##// END OF EJS Templates
default-reviewers: fixed problems with new diff format for more advanced default reviewer rules.
marcink -
r4385:cf2c34da stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,2072 +1,2072 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 38 from rhodecode.lib.vcs.nodes import FileNode
39 39 from rhodecode.translation import lazy_ugettext
40 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 41 from rhodecode.lib import audit_logger
42 42 from rhodecode.lib.compat import OrderedDict
43 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 44 from rhodecode.lib.markup_renderer import (
45 45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 46 from rhodecode.lib.utils2 import (
47 47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 48 get_current_rhodecode_user)
49 49 from rhodecode.lib.vcs.backends.base import (
50 50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 51 TargetRefMissing, SourceRefMissing)
52 52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 CommitDoesNotExistError, EmptyRepositoryError)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 57 from rhodecode.model.comment import CommentsModel
58 58 from rhodecode.model.db import (
59 59 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 61 from rhodecode.model.meta import Session
62 62 from rhodecode.model.notification import NotificationModel, \
63 63 EmailNotificationModel
64 64 from rhodecode.model.scm import ScmModel
65 65 from rhodecode.model.settings import VcsSettingsModel
66 66
67 67
68 68 log = logging.getLogger(__name__)
69 69
70 70
71 71 # Data structure to hold the response data when updating commits during a pull
72 72 # request update.
73 73 class UpdateResponse(object):
74 74
75 75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 76 commit_changes, source_changed, target_changed):
77 77
78 78 self.executed = executed
79 79 self.reason = reason
80 80 self.new = new
81 81 self.old = old
82 82 self.common_ancestor_id = common_ancestor_id
83 83 self.changes = commit_changes
84 84 self.source_changed = source_changed
85 85 self.target_changed = target_changed
86 86
87 87
88 88 def get_diff_info(
89 89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 90 get_commit_authors=True):
91 91 """
92 92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 93 This is also used for default reviewers logic
94 94 """
95 95
96 96 source_scm = source_repo.scm_instance()
97 97 target_scm = target_repo.scm_instance()
98 98
99 99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 100 if not ancestor_id:
101 101 raise ValueError(
102 102 'cannot calculate diff info without a common ancestor. '
103 103 'Make sure both repositories are related, and have a common forking commit.')
104 104
105 105 # case here is that want a simple diff without incoming commits,
106 106 # previewing what will be merged based only on commits in the source.
107 107 log.debug('Using ancestor %s as source_ref instead of %s',
108 108 ancestor_id, source_ref)
109 109
110 110 # source of changes now is the common ancestor
111 111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 112 # target commit becomes the source ref as it is the last commit
113 113 # for diff generation this logic gives proper diff
114 114 target_commit = source_scm.get_commit(commit_id=source_ref)
115 115
116 116 vcs_diff = \
117 117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 118 ignore_whitespace=False, context=3)
119 119
120 120 diff_processor = diffs.DiffProcessor(
121 121 vcs_diff, format='newdiff', diff_limit=None,
122 122 file_limit=None, show_full_diff=True)
123 123
124 124 _parsed = diff_processor.prepare()
125 125
126 126 all_files = []
127 127 all_files_changes = []
128 128 changed_lines = {}
129 129 stats = [0, 0]
130 130 for f in _parsed:
131 131 all_files.append(f['filename'])
132 132 all_files_changes.append({
133 133 'filename': f['filename'],
134 134 'stats': f['stats']
135 135 })
136 136 stats[0] += f['stats']['added']
137 137 stats[1] += f['stats']['deleted']
138 138
139 139 changed_lines[f['filename']] = []
140 140 if len(f['chunks']) < 2:
141 141 continue
142 142 # first line is "context" information
143 143 for chunks in f['chunks'][1:]:
144 144 for chunk in chunks['lines']:
145 145 if chunk['action'] not in ('del', 'mod'):
146 146 continue
147 147 changed_lines[f['filename']].append(chunk['old_lineno'])
148 148
149 149 commit_authors = []
150 150 user_counts = {}
151 151 email_counts = {}
152 152 author_counts = {}
153 153 _commit_cache = {}
154 154
155 155 commits = []
156 156 if get_commit_authors:
157 157 commits = target_scm.compare(
158 158 target_ref, source_ref, source_scm, merge=True,
159 159 pre_load=["author"])
160 160
161 161 for commit in commits:
162 162 user = User.get_from_cs_author(commit.author)
163 163 if user and user not in commit_authors:
164 164 commit_authors.append(user)
165 165
166 166 # lines
167 167 if get_authors:
168 168 target_commit = source_repo.get_commit(ancestor_id)
169 169
170 170 for fname, lines in changed_lines.items():
171 171 try:
172 172 node = target_commit.get_node(fname)
173 173 except Exception:
174 174 continue
175 175
176 176 if not isinstance(node, FileNode):
177 177 continue
178 178
179 179 for annotation in node.annotate:
180 180 line_no, commit_id, get_commit_func, line_text = annotation
181 181 if line_no in lines:
182 182 if commit_id not in _commit_cache:
183 183 _commit_cache[commit_id] = get_commit_func()
184 184 commit = _commit_cache[commit_id]
185 185 author = commit.author
186 186 email = commit.author_email
187 187 user = User.get_from_cs_author(author)
188 188 if user:
189 user_counts[user] = user_counts.get(user, 0) + 1
189 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
190 190 author_counts[author] = author_counts.get(author, 0) + 1
191 191 email_counts[email] = email_counts.get(email, 0) + 1
192 192
193 193 return {
194 194 'commits': commits,
195 195 'files': all_files_changes,
196 196 'stats': stats,
197 197 'ancestor': ancestor_id,
198 198 # original authors of modified files
199 199 'original_authors': {
200 200 'users': user_counts,
201 201 'authors': author_counts,
202 202 'emails': email_counts,
203 203 },
204 204 'commit_authors': commit_authors
205 205 }
206 206
207 207
208 208 class PullRequestModel(BaseModel):
209 209
210 210 cls = PullRequest
211 211
212 212 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
213 213
214 214 UPDATE_STATUS_MESSAGES = {
215 215 UpdateFailureReason.NONE: lazy_ugettext(
216 216 'Pull request update successful.'),
217 217 UpdateFailureReason.UNKNOWN: lazy_ugettext(
218 218 'Pull request update failed because of an unknown error.'),
219 219 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
220 220 'No update needed because the source and target have not changed.'),
221 221 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
222 222 'Pull request cannot be updated because the reference type is '
223 223 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
224 224 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
225 225 'This pull request cannot be updated because the target '
226 226 'reference is missing.'),
227 227 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
228 228 'This pull request cannot be updated because the source '
229 229 'reference is missing.'),
230 230 }
231 231 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
232 232 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
233 233
234 234 def __get_pull_request(self, pull_request):
235 235 return self._get_instance((
236 236 PullRequest, PullRequestVersion), pull_request)
237 237
238 238 def _check_perms(self, perms, pull_request, user, api=False):
239 239 if not api:
240 240 return h.HasRepoPermissionAny(*perms)(
241 241 user=user, repo_name=pull_request.target_repo.repo_name)
242 242 else:
243 243 return h.HasRepoPermissionAnyApi(*perms)(
244 244 user=user, repo_name=pull_request.target_repo.repo_name)
245 245
246 246 def check_user_read(self, pull_request, user, api=False):
247 247 _perms = ('repository.admin', 'repository.write', 'repository.read',)
248 248 return self._check_perms(_perms, pull_request, user, api)
249 249
250 250 def check_user_merge(self, pull_request, user, api=False):
251 251 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
252 252 return self._check_perms(_perms, pull_request, user, api)
253 253
254 254 def check_user_update(self, pull_request, user, api=False):
255 255 owner = user.user_id == pull_request.user_id
256 256 return self.check_user_merge(pull_request, user, api) or owner
257 257
258 258 def check_user_delete(self, pull_request, user):
259 259 owner = user.user_id == pull_request.user_id
260 260 _perms = ('repository.admin',)
261 261 return self._check_perms(_perms, pull_request, user) or owner
262 262
263 263 def check_user_change_status(self, pull_request, user, api=False):
264 264 reviewer = user.user_id in [x.user_id for x in
265 265 pull_request.reviewers]
266 266 return self.check_user_update(pull_request, user, api) or reviewer
267 267
268 268 def check_user_comment(self, pull_request, user):
269 269 owner = user.user_id == pull_request.user_id
270 270 return self.check_user_read(pull_request, user) or owner
271 271
272 272 def get(self, pull_request):
273 273 return self.__get_pull_request(pull_request)
274 274
275 275 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
276 276 statuses=None, opened_by=None, order_by=None,
277 277 order_dir='desc', only_created=False):
278 278 repo = None
279 279 if repo_name:
280 280 repo = self._get_repo(repo_name)
281 281
282 282 q = PullRequest.query()
283 283
284 284 if search_q:
285 285 like_expression = u'%{}%'.format(safe_unicode(search_q))
286 286 q = q.join(User)
287 287 q = q.filter(or_(
288 288 cast(PullRequest.pull_request_id, String).ilike(like_expression),
289 289 User.username.ilike(like_expression),
290 290 PullRequest.title.ilike(like_expression),
291 291 PullRequest.description.ilike(like_expression),
292 292 ))
293 293
294 294 # source or target
295 295 if repo and source:
296 296 q = q.filter(PullRequest.source_repo == repo)
297 297 elif repo:
298 298 q = q.filter(PullRequest.target_repo == repo)
299 299
300 300 # closed,opened
301 301 if statuses:
302 302 q = q.filter(PullRequest.status.in_(statuses))
303 303
304 304 # opened by filter
305 305 if opened_by:
306 306 q = q.filter(PullRequest.user_id.in_(opened_by))
307 307
308 308 # only get those that are in "created" state
309 309 if only_created:
310 310 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
311 311
312 312 if order_by:
313 313 order_map = {
314 314 'name_raw': PullRequest.pull_request_id,
315 315 'id': PullRequest.pull_request_id,
316 316 'title': PullRequest.title,
317 317 'updated_on_raw': PullRequest.updated_on,
318 318 'target_repo': PullRequest.target_repo_id
319 319 }
320 320 if order_dir == 'asc':
321 321 q = q.order_by(order_map[order_by].asc())
322 322 else:
323 323 q = q.order_by(order_map[order_by].desc())
324 324
325 325 return q
326 326
327 327 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
328 328 opened_by=None):
329 329 """
330 330 Count the number of pull requests for a specific repository.
331 331
332 332 :param repo_name: target or source repo
333 333 :param search_q: filter by text
334 334 :param source: boolean flag to specify if repo_name refers to source
335 335 :param statuses: list of pull request statuses
336 336 :param opened_by: author user of the pull request
337 337 :returns: int number of pull requests
338 338 """
339 339 q = self._prepare_get_all_query(
340 340 repo_name, search_q=search_q, source=source, statuses=statuses,
341 341 opened_by=opened_by)
342 342
343 343 return q.count()
344 344
345 345 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
346 346 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
347 347 """
348 348 Get all pull requests for a specific repository.
349 349
350 350 :param repo_name: target or source repo
351 351 :param search_q: filter by text
352 352 :param source: boolean flag to specify if repo_name refers to source
353 353 :param statuses: list of pull request statuses
354 354 :param opened_by: author user of the pull request
355 355 :param offset: pagination offset
356 356 :param length: length of returned list
357 357 :param order_by: order of the returned list
358 358 :param order_dir: 'asc' or 'desc' ordering direction
359 359 :returns: list of pull requests
360 360 """
361 361 q = self._prepare_get_all_query(
362 362 repo_name, search_q=search_q, source=source, statuses=statuses,
363 363 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
364 364
365 365 if length:
366 366 pull_requests = q.limit(length).offset(offset).all()
367 367 else:
368 368 pull_requests = q.all()
369 369
370 370 return pull_requests
371 371
372 372 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
373 373 opened_by=None):
374 374 """
375 375 Count the number of pull requests for a specific repository that are
376 376 awaiting review.
377 377
378 378 :param repo_name: target or source repo
379 379 :param search_q: filter by text
380 380 :param source: boolean flag to specify if repo_name refers to source
381 381 :param statuses: list of pull request statuses
382 382 :param opened_by: author user of the pull request
383 383 :returns: int number of pull requests
384 384 """
385 385 pull_requests = self.get_awaiting_review(
386 386 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
387 387
388 388 return len(pull_requests)
389 389
390 390 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
391 391 opened_by=None, offset=0, length=None,
392 392 order_by=None, order_dir='desc'):
393 393 """
394 394 Get all pull requests for a specific repository that are awaiting
395 395 review.
396 396
397 397 :param repo_name: target or source repo
398 398 :param search_q: filter by text
399 399 :param source: boolean flag to specify if repo_name refers to source
400 400 :param statuses: list of pull request statuses
401 401 :param opened_by: author user of the pull request
402 402 :param offset: pagination offset
403 403 :param length: length of returned list
404 404 :param order_by: order of the returned list
405 405 :param order_dir: 'asc' or 'desc' ordering direction
406 406 :returns: list of pull requests
407 407 """
408 408 pull_requests = self.get_all(
409 409 repo_name, search_q=search_q, source=source, statuses=statuses,
410 410 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
411 411
412 412 _filtered_pull_requests = []
413 413 for pr in pull_requests:
414 414 status = pr.calculated_review_status()
415 415 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
416 416 ChangesetStatus.STATUS_UNDER_REVIEW]:
417 417 _filtered_pull_requests.append(pr)
418 418 if length:
419 419 return _filtered_pull_requests[offset:offset+length]
420 420 else:
421 421 return _filtered_pull_requests
422 422
423 423 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
424 424 opened_by=None, user_id=None):
425 425 """
426 426 Count the number of pull requests for a specific repository that are
427 427 awaiting review from a specific user.
428 428
429 429 :param repo_name: target or source repo
430 430 :param search_q: filter by text
431 431 :param source: boolean flag to specify if repo_name refers to source
432 432 :param statuses: list of pull request statuses
433 433 :param opened_by: author user of the pull request
434 434 :param user_id: reviewer user of the pull request
435 435 :returns: int number of pull requests
436 436 """
437 437 pull_requests = self.get_awaiting_my_review(
438 438 repo_name, search_q=search_q, source=source, statuses=statuses,
439 439 opened_by=opened_by, user_id=user_id)
440 440
441 441 return len(pull_requests)
442 442
443 443 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
444 444 opened_by=None, user_id=None, offset=0,
445 445 length=None, order_by=None, order_dir='desc'):
446 446 """
447 447 Get all pull requests for a specific repository that are awaiting
448 448 review from a specific user.
449 449
450 450 :param repo_name: target or source repo
451 451 :param search_q: filter by text
452 452 :param source: boolean flag to specify if repo_name refers to source
453 453 :param statuses: list of pull request statuses
454 454 :param opened_by: author user of the pull request
455 455 :param user_id: reviewer user of the pull request
456 456 :param offset: pagination offset
457 457 :param length: length of returned list
458 458 :param order_by: order of the returned list
459 459 :param order_dir: 'asc' or 'desc' ordering direction
460 460 :returns: list of pull requests
461 461 """
462 462 pull_requests = self.get_all(
463 463 repo_name, search_q=search_q, source=source, statuses=statuses,
464 464 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
465 465
466 466 _my = PullRequestModel().get_not_reviewed(user_id)
467 467 my_participation = []
468 468 for pr in pull_requests:
469 469 if pr in _my:
470 470 my_participation.append(pr)
471 471 _filtered_pull_requests = my_participation
472 472 if length:
473 473 return _filtered_pull_requests[offset:offset+length]
474 474 else:
475 475 return _filtered_pull_requests
476 476
477 477 def get_not_reviewed(self, user_id):
478 478 return [
479 479 x.pull_request for x in PullRequestReviewers.query().filter(
480 480 PullRequestReviewers.user_id == user_id).all()
481 481 ]
482 482
483 483 def _prepare_participating_query(self, user_id=None, statuses=None, query='',
484 484 order_by=None, order_dir='desc'):
485 485 q = PullRequest.query()
486 486 if user_id:
487 487 reviewers_subquery = Session().query(
488 488 PullRequestReviewers.pull_request_id).filter(
489 489 PullRequestReviewers.user_id == user_id).subquery()
490 490 user_filter = or_(
491 491 PullRequest.user_id == user_id,
492 492 PullRequest.pull_request_id.in_(reviewers_subquery)
493 493 )
494 494 q = PullRequest.query().filter(user_filter)
495 495
496 496 # closed,opened
497 497 if statuses:
498 498 q = q.filter(PullRequest.status.in_(statuses))
499 499
500 500 if query:
501 501 like_expression = u'%{}%'.format(safe_unicode(query))
502 502 q = q.join(User)
503 503 q = q.filter(or_(
504 504 cast(PullRequest.pull_request_id, String).ilike(like_expression),
505 505 User.username.ilike(like_expression),
506 506 PullRequest.title.ilike(like_expression),
507 507 PullRequest.description.ilike(like_expression),
508 508 ))
509 509 if order_by:
510 510 order_map = {
511 511 'name_raw': PullRequest.pull_request_id,
512 512 'title': PullRequest.title,
513 513 'updated_on_raw': PullRequest.updated_on,
514 514 'target_repo': PullRequest.target_repo_id
515 515 }
516 516 if order_dir == 'asc':
517 517 q = q.order_by(order_map[order_by].asc())
518 518 else:
519 519 q = q.order_by(order_map[order_by].desc())
520 520
521 521 return q
522 522
523 523 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
524 524 q = self._prepare_participating_query(user_id, statuses=statuses, query=query)
525 525 return q.count()
526 526
527 527 def get_im_participating_in(
528 528 self, user_id=None, statuses=None, query='', offset=0,
529 529 length=None, order_by=None, order_dir='desc'):
530 530 """
531 531 Get all Pull requests that i'm participating in, or i have opened
532 532 """
533 533
534 534 q = self._prepare_participating_query(
535 535 user_id, statuses=statuses, query=query, order_by=order_by,
536 536 order_dir=order_dir)
537 537
538 538 if length:
539 539 pull_requests = q.limit(length).offset(offset).all()
540 540 else:
541 541 pull_requests = q.all()
542 542
543 543 return pull_requests
544 544
545 545 def get_versions(self, pull_request):
546 546 """
547 547 returns version of pull request sorted by ID descending
548 548 """
549 549 return PullRequestVersion.query()\
550 550 .filter(PullRequestVersion.pull_request == pull_request)\
551 551 .order_by(PullRequestVersion.pull_request_version_id.asc())\
552 552 .all()
553 553
554 554 def get_pr_version(self, pull_request_id, version=None):
555 555 at_version = None
556 556
557 557 if version and version == 'latest':
558 558 pull_request_ver = PullRequest.get(pull_request_id)
559 559 pull_request_obj = pull_request_ver
560 560 _org_pull_request_obj = pull_request_obj
561 561 at_version = 'latest'
562 562 elif version:
563 563 pull_request_ver = PullRequestVersion.get_or_404(version)
564 564 pull_request_obj = pull_request_ver
565 565 _org_pull_request_obj = pull_request_ver.pull_request
566 566 at_version = pull_request_ver.pull_request_version_id
567 567 else:
568 568 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
569 569 pull_request_id)
570 570
571 571 pull_request_display_obj = PullRequest.get_pr_display_object(
572 572 pull_request_obj, _org_pull_request_obj)
573 573
574 574 return _org_pull_request_obj, pull_request_obj, \
575 575 pull_request_display_obj, at_version
576 576
577 577 def create(self, created_by, source_repo, source_ref, target_repo,
578 578 target_ref, revisions, reviewers, title, description=None,
579 579 common_ancestor_id=None,
580 580 description_renderer=None,
581 581 reviewer_data=None, translator=None, auth_user=None):
582 582 translator = translator or get_current_request().translate
583 583
584 584 created_by_user = self._get_user(created_by)
585 585 auth_user = auth_user or created_by_user.AuthUser()
586 586 source_repo = self._get_repo(source_repo)
587 587 target_repo = self._get_repo(target_repo)
588 588
589 589 pull_request = PullRequest()
590 590 pull_request.source_repo = source_repo
591 591 pull_request.source_ref = source_ref
592 592 pull_request.target_repo = target_repo
593 593 pull_request.target_ref = target_ref
594 594 pull_request.revisions = revisions
595 595 pull_request.title = title
596 596 pull_request.description = description
597 597 pull_request.description_renderer = description_renderer
598 598 pull_request.author = created_by_user
599 599 pull_request.reviewer_data = reviewer_data
600 600 pull_request.pull_request_state = pull_request.STATE_CREATING
601 601 pull_request.common_ancestor_id = common_ancestor_id
602 602
603 603 Session().add(pull_request)
604 604 Session().flush()
605 605
606 606 reviewer_ids = set()
607 607 # members / reviewers
608 608 for reviewer_object in reviewers:
609 609 user_id, reasons, mandatory, rules = reviewer_object
610 610 user = self._get_user(user_id)
611 611
612 612 # skip duplicates
613 613 if user.user_id in reviewer_ids:
614 614 continue
615 615
616 616 reviewer_ids.add(user.user_id)
617 617
618 618 reviewer = PullRequestReviewers()
619 619 reviewer.user = user
620 620 reviewer.pull_request = pull_request
621 621 reviewer.reasons = reasons
622 622 reviewer.mandatory = mandatory
623 623
624 624 # NOTE(marcink): pick only first rule for now
625 625 rule_id = list(rules)[0] if rules else None
626 626 rule = RepoReviewRule.get(rule_id) if rule_id else None
627 627 if rule:
628 628 review_group = rule.user_group_vote_rule(user_id)
629 629 # we check if this particular reviewer is member of a voting group
630 630 if review_group:
631 631 # NOTE(marcink):
632 632 # can be that user is member of more but we pick the first same,
633 633 # same as default reviewers algo
634 634 review_group = review_group[0]
635 635
636 636 rule_data = {
637 637 'rule_name':
638 638 rule.review_rule_name,
639 639 'rule_user_group_entry_id':
640 640 review_group.repo_review_rule_users_group_id,
641 641 'rule_user_group_name':
642 642 review_group.users_group.users_group_name,
643 643 'rule_user_group_members':
644 644 [x.user.username for x in review_group.users_group.members],
645 645 'rule_user_group_members_id':
646 646 [x.user.user_id for x in review_group.users_group.members],
647 647 }
648 648 # e.g {'vote_rule': -1, 'mandatory': True}
649 649 rule_data.update(review_group.rule_data())
650 650
651 651 reviewer.rule_data = rule_data
652 652
653 653 Session().add(reviewer)
654 654 Session().flush()
655 655
656 656 # Set approval status to "Under Review" for all commits which are
657 657 # part of this pull request.
658 658 ChangesetStatusModel().set_status(
659 659 repo=target_repo,
660 660 status=ChangesetStatus.STATUS_UNDER_REVIEW,
661 661 user=created_by_user,
662 662 pull_request=pull_request
663 663 )
664 664 # we commit early at this point. This has to do with a fact
665 665 # that before queries do some row-locking. And because of that
666 666 # we need to commit and finish transaction before below validate call
667 667 # that for large repos could be long resulting in long row locks
668 668 Session().commit()
669 669
670 670 # prepare workspace, and run initial merge simulation. Set state during that
671 671 # operation
672 672 pull_request = PullRequest.get(pull_request.pull_request_id)
673 673
674 674 # set as merging, for merge simulation, and if finished to created so we mark
675 675 # simulation is working fine
676 676 with pull_request.set_state(PullRequest.STATE_MERGING,
677 677 final_state=PullRequest.STATE_CREATED) as state_obj:
678 678 MergeCheck.validate(
679 679 pull_request, auth_user=auth_user, translator=translator)
680 680
681 681 self.notify_reviewers(pull_request, reviewer_ids)
682 682 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
683 683
684 684 creation_data = pull_request.get_api_data(with_merge_state=False)
685 685 self._log_audit_action(
686 686 'repo.pull_request.create', {'data': creation_data},
687 687 auth_user, pull_request)
688 688
689 689 return pull_request
690 690
691 691 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
692 692 pull_request = self.__get_pull_request(pull_request)
693 693 target_scm = pull_request.target_repo.scm_instance()
694 694 if action == 'create':
695 695 trigger_hook = hooks_utils.trigger_create_pull_request_hook
696 696 elif action == 'merge':
697 697 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
698 698 elif action == 'close':
699 699 trigger_hook = hooks_utils.trigger_close_pull_request_hook
700 700 elif action == 'review_status_change':
701 701 trigger_hook = hooks_utils.trigger_review_pull_request_hook
702 702 elif action == 'update':
703 703 trigger_hook = hooks_utils.trigger_update_pull_request_hook
704 704 elif action == 'comment':
705 705 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
706 706 else:
707 707 return
708 708
709 709 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
710 710 pull_request, action, trigger_hook)
711 711 trigger_hook(
712 712 username=user.username,
713 713 repo_name=pull_request.target_repo.repo_name,
714 714 repo_type=target_scm.alias,
715 715 pull_request=pull_request,
716 716 data=data)
717 717
718 718 def _get_commit_ids(self, pull_request):
719 719 """
720 720 Return the commit ids of the merged pull request.
721 721
722 722 This method is not dealing correctly yet with the lack of autoupdates
723 723 nor with the implicit target updates.
724 724 For example: if a commit in the source repo is already in the target it
725 725 will be reported anyways.
726 726 """
727 727 merge_rev = pull_request.merge_rev
728 728 if merge_rev is None:
729 729 raise ValueError('This pull request was not merged yet')
730 730
731 731 commit_ids = list(pull_request.revisions)
732 732 if merge_rev not in commit_ids:
733 733 commit_ids.append(merge_rev)
734 734
735 735 return commit_ids
736 736
737 737 def merge_repo(self, pull_request, user, extras):
738 738 log.debug("Merging pull request %s", pull_request.pull_request_id)
739 739 extras['user_agent'] = 'internal-merge'
740 740 merge_state = self._merge_pull_request(pull_request, user, extras)
741 741 if merge_state.executed:
742 742 log.debug("Merge was successful, updating the pull request comments.")
743 743 self._comment_and_close_pr(pull_request, user, merge_state)
744 744
745 745 self._log_audit_action(
746 746 'repo.pull_request.merge',
747 747 {'merge_state': merge_state.__dict__},
748 748 user, pull_request)
749 749
750 750 else:
751 751 log.warn("Merge failed, not updating the pull request.")
752 752 return merge_state
753 753
754 754 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
755 755 target_vcs = pull_request.target_repo.scm_instance()
756 756 source_vcs = pull_request.source_repo.scm_instance()
757 757
758 758 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
759 759 pr_id=pull_request.pull_request_id,
760 760 pr_title=pull_request.title,
761 761 source_repo=source_vcs.name,
762 762 source_ref_name=pull_request.source_ref_parts.name,
763 763 target_repo=target_vcs.name,
764 764 target_ref_name=pull_request.target_ref_parts.name,
765 765 )
766 766
767 767 workspace_id = self._workspace_id(pull_request)
768 768 repo_id = pull_request.target_repo.repo_id
769 769 use_rebase = self._use_rebase_for_merging(pull_request)
770 770 close_branch = self._close_branch_before_merging(pull_request)
771 771 user_name = self._user_name_for_merging(pull_request, user)
772 772
773 773 target_ref = self._refresh_reference(
774 774 pull_request.target_ref_parts, target_vcs)
775 775
776 776 callback_daemon, extras = prepare_callback_daemon(
777 777 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
778 778 host=vcs_settings.HOOKS_HOST,
779 779 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
780 780
781 781 with callback_daemon:
782 782 # TODO: johbo: Implement a clean way to run a config_override
783 783 # for a single call.
784 784 target_vcs.config.set(
785 785 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
786 786
787 787 merge_state = target_vcs.merge(
788 788 repo_id, workspace_id, target_ref, source_vcs,
789 789 pull_request.source_ref_parts,
790 790 user_name=user_name, user_email=user.email,
791 791 message=message, use_rebase=use_rebase,
792 792 close_branch=close_branch)
793 793 return merge_state
794 794
795 795 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
796 796 pull_request.merge_rev = merge_state.merge_ref.commit_id
797 797 pull_request.updated_on = datetime.datetime.now()
798 798 close_msg = close_msg or 'Pull request merged and closed'
799 799
800 800 CommentsModel().create(
801 801 text=safe_unicode(close_msg),
802 802 repo=pull_request.target_repo.repo_id,
803 803 user=user.user_id,
804 804 pull_request=pull_request.pull_request_id,
805 805 f_path=None,
806 806 line_no=None,
807 807 closing_pr=True
808 808 )
809 809
810 810 Session().add(pull_request)
811 811 Session().flush()
812 812 # TODO: paris: replace invalidation with less radical solution
813 813 ScmModel().mark_for_invalidation(
814 814 pull_request.target_repo.repo_name)
815 815 self.trigger_pull_request_hook(pull_request, user, 'merge')
816 816
817 817 def has_valid_update_type(self, pull_request):
818 818 source_ref_type = pull_request.source_ref_parts.type
819 819 return source_ref_type in self.REF_TYPES
820 820
821 821 def get_flow_commits(self, pull_request):
822 822
823 823 # source repo
824 824 source_ref_name = pull_request.source_ref_parts.name
825 825 source_ref_type = pull_request.source_ref_parts.type
826 826 source_ref_id = pull_request.source_ref_parts.commit_id
827 827 source_repo = pull_request.source_repo.scm_instance()
828 828
829 829 try:
830 830 if source_ref_type in self.REF_TYPES:
831 831 source_commit = source_repo.get_commit(source_ref_name)
832 832 else:
833 833 source_commit = source_repo.get_commit(source_ref_id)
834 834 except CommitDoesNotExistError:
835 835 raise SourceRefMissing()
836 836
837 837 # target repo
838 838 target_ref_name = pull_request.target_ref_parts.name
839 839 target_ref_type = pull_request.target_ref_parts.type
840 840 target_ref_id = pull_request.target_ref_parts.commit_id
841 841 target_repo = pull_request.target_repo.scm_instance()
842 842
843 843 try:
844 844 if target_ref_type in self.REF_TYPES:
845 845 target_commit = target_repo.get_commit(target_ref_name)
846 846 else:
847 847 target_commit = target_repo.get_commit(target_ref_id)
848 848 except CommitDoesNotExistError:
849 849 raise TargetRefMissing()
850 850
851 851 return source_commit, target_commit
852 852
853 853 def update_commits(self, pull_request, updating_user):
854 854 """
855 855 Get the updated list of commits for the pull request
856 856 and return the new pull request version and the list
857 857 of commits processed by this update action
858 858
859 859 updating_user is the user_object who triggered the update
860 860 """
861 861 pull_request = self.__get_pull_request(pull_request)
862 862 source_ref_type = pull_request.source_ref_parts.type
863 863 source_ref_name = pull_request.source_ref_parts.name
864 864 source_ref_id = pull_request.source_ref_parts.commit_id
865 865
866 866 target_ref_type = pull_request.target_ref_parts.type
867 867 target_ref_name = pull_request.target_ref_parts.name
868 868 target_ref_id = pull_request.target_ref_parts.commit_id
869 869
870 870 if not self.has_valid_update_type(pull_request):
871 871 log.debug("Skipping update of pull request %s due to ref type: %s",
872 872 pull_request, source_ref_type)
873 873 return UpdateResponse(
874 874 executed=False,
875 875 reason=UpdateFailureReason.WRONG_REF_TYPE,
876 876 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
877 877 source_changed=False, target_changed=False)
878 878
879 879 try:
880 880 source_commit, target_commit = self.get_flow_commits(pull_request)
881 881 except SourceRefMissing:
882 882 return UpdateResponse(
883 883 executed=False,
884 884 reason=UpdateFailureReason.MISSING_SOURCE_REF,
885 885 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
886 886 source_changed=False, target_changed=False)
887 887 except TargetRefMissing:
888 888 return UpdateResponse(
889 889 executed=False,
890 890 reason=UpdateFailureReason.MISSING_TARGET_REF,
891 891 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
892 892 source_changed=False, target_changed=False)
893 893
894 894 source_changed = source_ref_id != source_commit.raw_id
895 895 target_changed = target_ref_id != target_commit.raw_id
896 896
897 897 if not (source_changed or target_changed):
898 898 log.debug("Nothing changed in pull request %s", pull_request)
899 899 return UpdateResponse(
900 900 executed=False,
901 901 reason=UpdateFailureReason.NO_CHANGE,
902 902 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
903 903 source_changed=target_changed, target_changed=source_changed)
904 904
905 905 change_in_found = 'target repo' if target_changed else 'source repo'
906 906 log.debug('Updating pull request because of change in %s detected',
907 907 change_in_found)
908 908
909 909 # Finally there is a need for an update, in case of source change
910 910 # we create a new version, else just an update
911 911 if source_changed:
912 912 pull_request_version = self._create_version_from_snapshot(pull_request)
913 913 self._link_comments_to_version(pull_request_version)
914 914 else:
915 915 try:
916 916 ver = pull_request.versions[-1]
917 917 except IndexError:
918 918 ver = None
919 919
920 920 pull_request.pull_request_version_id = \
921 921 ver.pull_request_version_id if ver else None
922 922 pull_request_version = pull_request
923 923
924 924 source_repo = pull_request.source_repo.scm_instance()
925 925 target_repo = pull_request.target_repo.scm_instance()
926 926
927 927 # re-compute commit ids
928 928 old_commit_ids = pull_request.revisions
929 929 pre_load = ["author", "date", "message", "branch"]
930 930 commit_ranges = target_repo.compare(
931 931 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
932 932 pre_load=pre_load)
933 933
934 934 target_ref = target_commit.raw_id
935 935 source_ref = source_commit.raw_id
936 936 ancestor_commit_id = target_repo.get_common_ancestor(
937 937 target_ref, source_ref, source_repo)
938 938
939 939 if not ancestor_commit_id:
940 940 raise ValueError(
941 941 'cannot calculate diff info without a common ancestor. '
942 942 'Make sure both repositories are related, and have a common forking commit.')
943 943
944 944 pull_request.common_ancestor_id = ancestor_commit_id
945 945
946 946 pull_request.source_ref = '%s:%s:%s' % (
947 947 source_ref_type, source_ref_name, source_commit.raw_id)
948 948 pull_request.target_ref = '%s:%s:%s' % (
949 949 target_ref_type, target_ref_name, ancestor_commit_id)
950 950
951 951 pull_request.revisions = [
952 952 commit.raw_id for commit in reversed(commit_ranges)]
953 953 pull_request.updated_on = datetime.datetime.now()
954 954 Session().add(pull_request)
955 955 new_commit_ids = pull_request.revisions
956 956
957 957 old_diff_data, new_diff_data = self._generate_update_diffs(
958 958 pull_request, pull_request_version)
959 959
960 960 # calculate commit and file changes
961 961 commit_changes = self._calculate_commit_id_changes(
962 962 old_commit_ids, new_commit_ids)
963 963 file_changes = self._calculate_file_changes(
964 964 old_diff_data, new_diff_data)
965 965
966 966 # set comments as outdated if DIFFS changed
967 967 CommentsModel().outdate_comments(
968 968 pull_request, old_diff_data=old_diff_data,
969 969 new_diff_data=new_diff_data)
970 970
971 971 valid_commit_changes = (commit_changes.added or commit_changes.removed)
972 972 file_node_changes = (
973 973 file_changes.added or file_changes.modified or file_changes.removed)
974 974 pr_has_changes = valid_commit_changes or file_node_changes
975 975
976 976 # Add an automatic comment to the pull request, in case
977 977 # anything has changed
978 978 if pr_has_changes:
979 979 update_comment = CommentsModel().create(
980 980 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
981 981 repo=pull_request.target_repo,
982 982 user=pull_request.author,
983 983 pull_request=pull_request,
984 984 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
985 985
986 986 # Update status to "Under Review" for added commits
987 987 for commit_id in commit_changes.added:
988 988 ChangesetStatusModel().set_status(
989 989 repo=pull_request.source_repo,
990 990 status=ChangesetStatus.STATUS_UNDER_REVIEW,
991 991 comment=update_comment,
992 992 user=pull_request.author,
993 993 pull_request=pull_request,
994 994 revision=commit_id)
995 995
996 996 # send update email to users
997 997 try:
998 998 self.notify_users(pull_request=pull_request, updating_user=updating_user,
999 999 ancestor_commit_id=ancestor_commit_id,
1000 1000 commit_changes=commit_changes,
1001 1001 file_changes=file_changes)
1002 1002 except Exception:
1003 1003 log.exception('Failed to send email notification to users')
1004 1004
1005 1005 log.debug(
1006 1006 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1007 1007 'removed_ids: %s', pull_request.pull_request_id,
1008 1008 commit_changes.added, commit_changes.common, commit_changes.removed)
1009 1009 log.debug(
1010 1010 'Updated pull request with the following file changes: %s',
1011 1011 file_changes)
1012 1012
1013 1013 log.info(
1014 1014 "Updated pull request %s from commit %s to commit %s, "
1015 1015 "stored new version %s of this pull request.",
1016 1016 pull_request.pull_request_id, source_ref_id,
1017 1017 pull_request.source_ref_parts.commit_id,
1018 1018 pull_request_version.pull_request_version_id)
1019 1019 Session().commit()
1020 1020 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1021 1021
1022 1022 return UpdateResponse(
1023 1023 executed=True, reason=UpdateFailureReason.NONE,
1024 1024 old=pull_request, new=pull_request_version,
1025 1025 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1026 1026 source_changed=source_changed, target_changed=target_changed)
1027 1027
1028 1028 def _create_version_from_snapshot(self, pull_request):
1029 1029 version = PullRequestVersion()
1030 1030 version.title = pull_request.title
1031 1031 version.description = pull_request.description
1032 1032 version.status = pull_request.status
1033 1033 version.pull_request_state = pull_request.pull_request_state
1034 1034 version.created_on = datetime.datetime.now()
1035 1035 version.updated_on = pull_request.updated_on
1036 1036 version.user_id = pull_request.user_id
1037 1037 version.source_repo = pull_request.source_repo
1038 1038 version.source_ref = pull_request.source_ref
1039 1039 version.target_repo = pull_request.target_repo
1040 1040 version.target_ref = pull_request.target_ref
1041 1041
1042 1042 version._last_merge_source_rev = pull_request._last_merge_source_rev
1043 1043 version._last_merge_target_rev = pull_request._last_merge_target_rev
1044 1044 version.last_merge_status = pull_request.last_merge_status
1045 1045 version.last_merge_metadata = pull_request.last_merge_metadata
1046 1046 version.shadow_merge_ref = pull_request.shadow_merge_ref
1047 1047 version.merge_rev = pull_request.merge_rev
1048 1048 version.reviewer_data = pull_request.reviewer_data
1049 1049
1050 1050 version.revisions = pull_request.revisions
1051 1051 version.common_ancestor_id = pull_request.common_ancestor_id
1052 1052 version.pull_request = pull_request
1053 1053 Session().add(version)
1054 1054 Session().flush()
1055 1055
1056 1056 return version
1057 1057
1058 1058 def _generate_update_diffs(self, pull_request, pull_request_version):
1059 1059
1060 1060 diff_context = (
1061 1061 self.DIFF_CONTEXT +
1062 1062 CommentsModel.needed_extra_diff_context())
1063 1063 hide_whitespace_changes = False
1064 1064 source_repo = pull_request_version.source_repo
1065 1065 source_ref_id = pull_request_version.source_ref_parts.commit_id
1066 1066 target_ref_id = pull_request_version.target_ref_parts.commit_id
1067 1067 old_diff = self._get_diff_from_pr_or_version(
1068 1068 source_repo, source_ref_id, target_ref_id,
1069 1069 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1070 1070
1071 1071 source_repo = pull_request.source_repo
1072 1072 source_ref_id = pull_request.source_ref_parts.commit_id
1073 1073 target_ref_id = pull_request.target_ref_parts.commit_id
1074 1074
1075 1075 new_diff = self._get_diff_from_pr_or_version(
1076 1076 source_repo, source_ref_id, target_ref_id,
1077 1077 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1078 1078
1079 1079 old_diff_data = diffs.DiffProcessor(old_diff)
1080 1080 old_diff_data.prepare()
1081 1081 new_diff_data = diffs.DiffProcessor(new_diff)
1082 1082 new_diff_data.prepare()
1083 1083
1084 1084 return old_diff_data, new_diff_data
1085 1085
1086 1086 def _link_comments_to_version(self, pull_request_version):
1087 1087 """
1088 1088 Link all unlinked comments of this pull request to the given version.
1089 1089
1090 1090 :param pull_request_version: The `PullRequestVersion` to which
1091 1091 the comments shall be linked.
1092 1092
1093 1093 """
1094 1094 pull_request = pull_request_version.pull_request
1095 1095 comments = ChangesetComment.query()\
1096 1096 .filter(
1097 1097 # TODO: johbo: Should we query for the repo at all here?
1098 1098 # Pending decision on how comments of PRs are to be related
1099 1099 # to either the source repo, the target repo or no repo at all.
1100 1100 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1101 1101 ChangesetComment.pull_request == pull_request,
1102 1102 ChangesetComment.pull_request_version == None)\
1103 1103 .order_by(ChangesetComment.comment_id.asc())
1104 1104
1105 1105 # TODO: johbo: Find out why this breaks if it is done in a bulk
1106 1106 # operation.
1107 1107 for comment in comments:
1108 1108 comment.pull_request_version_id = (
1109 1109 pull_request_version.pull_request_version_id)
1110 1110 Session().add(comment)
1111 1111
1112 1112 def _calculate_commit_id_changes(self, old_ids, new_ids):
1113 1113 added = [x for x in new_ids if x not in old_ids]
1114 1114 common = [x for x in new_ids if x in old_ids]
1115 1115 removed = [x for x in old_ids if x not in new_ids]
1116 1116 total = new_ids
1117 1117 return ChangeTuple(added, common, removed, total)
1118 1118
1119 1119 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1120 1120
1121 1121 old_files = OrderedDict()
1122 1122 for diff_data in old_diff_data.parsed_diff:
1123 1123 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1124 1124
1125 1125 added_files = []
1126 1126 modified_files = []
1127 1127 removed_files = []
1128 1128 for diff_data in new_diff_data.parsed_diff:
1129 1129 new_filename = diff_data['filename']
1130 1130 new_hash = md5_safe(diff_data['raw_diff'])
1131 1131
1132 1132 old_hash = old_files.get(new_filename)
1133 1133 if not old_hash:
1134 1134 # file is not present in old diff, we have to figure out from parsed diff
1135 1135 # operation ADD/REMOVE
1136 1136 operations_dict = diff_data['stats']['ops']
1137 1137 if diffs.DEL_FILENODE in operations_dict:
1138 1138 removed_files.append(new_filename)
1139 1139 else:
1140 1140 added_files.append(new_filename)
1141 1141 else:
1142 1142 if new_hash != old_hash:
1143 1143 modified_files.append(new_filename)
1144 1144 # now remove a file from old, since we have seen it already
1145 1145 del old_files[new_filename]
1146 1146
1147 1147 # removed files is when there are present in old, but not in NEW,
1148 1148 # since we remove old files that are present in new diff, left-overs
1149 1149 # if any should be the removed files
1150 1150 removed_files.extend(old_files.keys())
1151 1151
1152 1152 return FileChangeTuple(added_files, modified_files, removed_files)
1153 1153
1154 1154 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1155 1155 """
1156 1156 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1157 1157 so it's always looking the same disregarding on which default
1158 1158 renderer system is using.
1159 1159
1160 1160 :param ancestor_commit_id: ancestor raw_id
1161 1161 :param changes: changes named tuple
1162 1162 :param file_changes: file changes named tuple
1163 1163
1164 1164 """
1165 1165 new_status = ChangesetStatus.get_status_lbl(
1166 1166 ChangesetStatus.STATUS_UNDER_REVIEW)
1167 1167
1168 1168 changed_files = (
1169 1169 file_changes.added + file_changes.modified + file_changes.removed)
1170 1170
1171 1171 params = {
1172 1172 'under_review_label': new_status,
1173 1173 'added_commits': changes.added,
1174 1174 'removed_commits': changes.removed,
1175 1175 'changed_files': changed_files,
1176 1176 'added_files': file_changes.added,
1177 1177 'modified_files': file_changes.modified,
1178 1178 'removed_files': file_changes.removed,
1179 1179 'ancestor_commit_id': ancestor_commit_id
1180 1180 }
1181 1181 renderer = RstTemplateRenderer()
1182 1182 return renderer.render('pull_request_update.mako', **params)
1183 1183
1184 1184 def edit(self, pull_request, title, description, description_renderer, user):
1185 1185 pull_request = self.__get_pull_request(pull_request)
1186 1186 old_data = pull_request.get_api_data(with_merge_state=False)
1187 1187 if pull_request.is_closed():
1188 1188 raise ValueError('This pull request is closed')
1189 1189 if title:
1190 1190 pull_request.title = title
1191 1191 pull_request.description = description
1192 1192 pull_request.updated_on = datetime.datetime.now()
1193 1193 pull_request.description_renderer = description_renderer
1194 1194 Session().add(pull_request)
1195 1195 self._log_audit_action(
1196 1196 'repo.pull_request.edit', {'old_data': old_data},
1197 1197 user, pull_request)
1198 1198
1199 1199 def update_reviewers(self, pull_request, reviewer_data, user):
1200 1200 """
1201 1201 Update the reviewers in the pull request
1202 1202
1203 1203 :param pull_request: the pr to update
1204 1204 :param reviewer_data: list of tuples
1205 1205 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1206 1206 """
1207 1207 pull_request = self.__get_pull_request(pull_request)
1208 1208 if pull_request.is_closed():
1209 1209 raise ValueError('This pull request is closed')
1210 1210
1211 1211 reviewers = {}
1212 1212 for user_id, reasons, mandatory, rules in reviewer_data:
1213 1213 if isinstance(user_id, (int, compat.string_types)):
1214 1214 user_id = self._get_user(user_id).user_id
1215 1215 reviewers[user_id] = {
1216 1216 'reasons': reasons, 'mandatory': mandatory}
1217 1217
1218 1218 reviewers_ids = set(reviewers.keys())
1219 1219 current_reviewers = PullRequestReviewers.query()\
1220 1220 .filter(PullRequestReviewers.pull_request ==
1221 1221 pull_request).all()
1222 1222 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1223 1223
1224 1224 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1225 1225 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1226 1226
1227 1227 log.debug("Adding %s reviewers", ids_to_add)
1228 1228 log.debug("Removing %s reviewers", ids_to_remove)
1229 1229 changed = False
1230 1230 added_audit_reviewers = []
1231 1231 removed_audit_reviewers = []
1232 1232
1233 1233 for uid in ids_to_add:
1234 1234 changed = True
1235 1235 _usr = self._get_user(uid)
1236 1236 reviewer = PullRequestReviewers()
1237 1237 reviewer.user = _usr
1238 1238 reviewer.pull_request = pull_request
1239 1239 reviewer.reasons = reviewers[uid]['reasons']
1240 1240 # NOTE(marcink): mandatory shouldn't be changed now
1241 1241 # reviewer.mandatory = reviewers[uid]['reasons']
1242 1242 Session().add(reviewer)
1243 1243 added_audit_reviewers.append(reviewer.get_dict())
1244 1244
1245 1245 for uid in ids_to_remove:
1246 1246 changed = True
1247 1247 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1248 1248 # that prevents and fixes cases that we added the same reviewer twice.
1249 1249 # this CAN happen due to the lack of DB checks
1250 1250 reviewers = PullRequestReviewers.query()\
1251 1251 .filter(PullRequestReviewers.user_id == uid,
1252 1252 PullRequestReviewers.pull_request == pull_request)\
1253 1253 .all()
1254 1254
1255 1255 for obj in reviewers:
1256 1256 added_audit_reviewers.append(obj.get_dict())
1257 1257 Session().delete(obj)
1258 1258
1259 1259 if changed:
1260 1260 Session().expire_all()
1261 1261 pull_request.updated_on = datetime.datetime.now()
1262 1262 Session().add(pull_request)
1263 1263
1264 1264 # finally store audit logs
1265 1265 for user_data in added_audit_reviewers:
1266 1266 self._log_audit_action(
1267 1267 'repo.pull_request.reviewer.add', {'data': user_data},
1268 1268 user, pull_request)
1269 1269 for user_data in removed_audit_reviewers:
1270 1270 self._log_audit_action(
1271 1271 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1272 1272 user, pull_request)
1273 1273
1274 1274 self.notify_reviewers(pull_request, ids_to_add)
1275 1275 return ids_to_add, ids_to_remove
1276 1276
1277 1277 def get_url(self, pull_request, request=None, permalink=False):
1278 1278 if not request:
1279 1279 request = get_current_request()
1280 1280
1281 1281 if permalink:
1282 1282 return request.route_url(
1283 1283 'pull_requests_global',
1284 1284 pull_request_id=pull_request.pull_request_id,)
1285 1285 else:
1286 1286 return request.route_url('pullrequest_show',
1287 1287 repo_name=safe_str(pull_request.target_repo.repo_name),
1288 1288 pull_request_id=pull_request.pull_request_id,)
1289 1289
1290 1290 def get_shadow_clone_url(self, pull_request, request=None):
1291 1291 """
1292 1292 Returns qualified url pointing to the shadow repository. If this pull
1293 1293 request is closed there is no shadow repository and ``None`` will be
1294 1294 returned.
1295 1295 """
1296 1296 if pull_request.is_closed():
1297 1297 return None
1298 1298 else:
1299 1299 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1300 1300 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1301 1301
1302 1302 def notify_reviewers(self, pull_request, reviewers_ids):
1303 1303 # notification to reviewers
1304 1304 if not reviewers_ids:
1305 1305 return
1306 1306
1307 1307 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1308 1308
1309 1309 pull_request_obj = pull_request
1310 1310 # get the current participants of this pull request
1311 1311 recipients = reviewers_ids
1312 1312 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1313 1313
1314 1314 pr_source_repo = pull_request_obj.source_repo
1315 1315 pr_target_repo = pull_request_obj.target_repo
1316 1316
1317 1317 pr_url = h.route_url('pullrequest_show',
1318 1318 repo_name=pr_target_repo.repo_name,
1319 1319 pull_request_id=pull_request_obj.pull_request_id,)
1320 1320
1321 1321 # set some variables for email notification
1322 1322 pr_target_repo_url = h.route_url(
1323 1323 'repo_summary', repo_name=pr_target_repo.repo_name)
1324 1324
1325 1325 pr_source_repo_url = h.route_url(
1326 1326 'repo_summary', repo_name=pr_source_repo.repo_name)
1327 1327
1328 1328 # pull request specifics
1329 1329 pull_request_commits = [
1330 1330 (x.raw_id, x.message)
1331 1331 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1332 1332
1333 1333 kwargs = {
1334 1334 'user': pull_request.author,
1335 1335 'pull_request': pull_request_obj,
1336 1336 'pull_request_commits': pull_request_commits,
1337 1337
1338 1338 'pull_request_target_repo': pr_target_repo,
1339 1339 'pull_request_target_repo_url': pr_target_repo_url,
1340 1340
1341 1341 'pull_request_source_repo': pr_source_repo,
1342 1342 'pull_request_source_repo_url': pr_source_repo_url,
1343 1343
1344 1344 'pull_request_url': pr_url,
1345 1345 }
1346 1346
1347 1347 # pre-generate the subject for notification itself
1348 1348 (subject,
1349 1349 _h, _e, # we don't care about those
1350 1350 body_plaintext) = EmailNotificationModel().render_email(
1351 1351 notification_type, **kwargs)
1352 1352
1353 1353 # create notification objects, and emails
1354 1354 NotificationModel().create(
1355 1355 created_by=pull_request.author,
1356 1356 notification_subject=subject,
1357 1357 notification_body=body_plaintext,
1358 1358 notification_type=notification_type,
1359 1359 recipients=recipients,
1360 1360 email_kwargs=kwargs,
1361 1361 )
1362 1362
1363 1363 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1364 1364 commit_changes, file_changes):
1365 1365
1366 1366 updating_user_id = updating_user.user_id
1367 1367 reviewers = set([x.user.user_id for x in pull_request.reviewers])
1368 1368 # NOTE(marcink): send notification to all other users except to
1369 1369 # person who updated the PR
1370 1370 recipients = reviewers.difference(set([updating_user_id]))
1371 1371
1372 1372 log.debug('Notify following recipients about pull-request update %s', recipients)
1373 1373
1374 1374 pull_request_obj = pull_request
1375 1375
1376 1376 # send email about the update
1377 1377 changed_files = (
1378 1378 file_changes.added + file_changes.modified + file_changes.removed)
1379 1379
1380 1380 pr_source_repo = pull_request_obj.source_repo
1381 1381 pr_target_repo = pull_request_obj.target_repo
1382 1382
1383 1383 pr_url = h.route_url('pullrequest_show',
1384 1384 repo_name=pr_target_repo.repo_name,
1385 1385 pull_request_id=pull_request_obj.pull_request_id,)
1386 1386
1387 1387 # set some variables for email notification
1388 1388 pr_target_repo_url = h.route_url(
1389 1389 'repo_summary', repo_name=pr_target_repo.repo_name)
1390 1390
1391 1391 pr_source_repo_url = h.route_url(
1392 1392 'repo_summary', repo_name=pr_source_repo.repo_name)
1393 1393
1394 1394 email_kwargs = {
1395 1395 'date': datetime.datetime.now(),
1396 1396 'updating_user': updating_user,
1397 1397
1398 1398 'pull_request': pull_request_obj,
1399 1399
1400 1400 'pull_request_target_repo': pr_target_repo,
1401 1401 'pull_request_target_repo_url': pr_target_repo_url,
1402 1402
1403 1403 'pull_request_source_repo': pr_source_repo,
1404 1404 'pull_request_source_repo_url': pr_source_repo_url,
1405 1405
1406 1406 'pull_request_url': pr_url,
1407 1407
1408 1408 'ancestor_commit_id': ancestor_commit_id,
1409 1409 'added_commits': commit_changes.added,
1410 1410 'removed_commits': commit_changes.removed,
1411 1411 'changed_files': changed_files,
1412 1412 'added_files': file_changes.added,
1413 1413 'modified_files': file_changes.modified,
1414 1414 'removed_files': file_changes.removed,
1415 1415 }
1416 1416
1417 1417 (subject,
1418 1418 _h, _e, # we don't care about those
1419 1419 body_plaintext) = EmailNotificationModel().render_email(
1420 1420 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1421 1421
1422 1422 # create notification objects, and emails
1423 1423 NotificationModel().create(
1424 1424 created_by=updating_user,
1425 1425 notification_subject=subject,
1426 1426 notification_body=body_plaintext,
1427 1427 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1428 1428 recipients=recipients,
1429 1429 email_kwargs=email_kwargs,
1430 1430 )
1431 1431
1432 1432 def delete(self, pull_request, user=None):
1433 1433 if not user:
1434 1434 user = getattr(get_current_rhodecode_user(), 'username', None)
1435 1435
1436 1436 pull_request = self.__get_pull_request(pull_request)
1437 1437 old_data = pull_request.get_api_data(with_merge_state=False)
1438 1438 self._cleanup_merge_workspace(pull_request)
1439 1439 self._log_audit_action(
1440 1440 'repo.pull_request.delete', {'old_data': old_data},
1441 1441 user, pull_request)
1442 1442 Session().delete(pull_request)
1443 1443
1444 1444 def close_pull_request(self, pull_request, user):
1445 1445 pull_request = self.__get_pull_request(pull_request)
1446 1446 self._cleanup_merge_workspace(pull_request)
1447 1447 pull_request.status = PullRequest.STATUS_CLOSED
1448 1448 pull_request.updated_on = datetime.datetime.now()
1449 1449 Session().add(pull_request)
1450 1450 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1451 1451
1452 1452 pr_data = pull_request.get_api_data(with_merge_state=False)
1453 1453 self._log_audit_action(
1454 1454 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1455 1455
1456 1456 def close_pull_request_with_comment(
1457 1457 self, pull_request, user, repo, message=None, auth_user=None):
1458 1458
1459 1459 pull_request_review_status = pull_request.calculated_review_status()
1460 1460
1461 1461 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1462 1462 # approved only if we have voting consent
1463 1463 status = ChangesetStatus.STATUS_APPROVED
1464 1464 else:
1465 1465 status = ChangesetStatus.STATUS_REJECTED
1466 1466 status_lbl = ChangesetStatus.get_status_lbl(status)
1467 1467
1468 1468 default_message = (
1469 1469 'Closing with status change {transition_icon} {status}.'
1470 1470 ).format(transition_icon='>', status=status_lbl)
1471 1471 text = message or default_message
1472 1472
1473 1473 # create a comment, and link it to new status
1474 1474 comment = CommentsModel().create(
1475 1475 text=text,
1476 1476 repo=repo.repo_id,
1477 1477 user=user.user_id,
1478 1478 pull_request=pull_request.pull_request_id,
1479 1479 status_change=status_lbl,
1480 1480 status_change_type=status,
1481 1481 closing_pr=True,
1482 1482 auth_user=auth_user,
1483 1483 )
1484 1484
1485 1485 # calculate old status before we change it
1486 1486 old_calculated_status = pull_request.calculated_review_status()
1487 1487 ChangesetStatusModel().set_status(
1488 1488 repo.repo_id,
1489 1489 status,
1490 1490 user.user_id,
1491 1491 comment=comment,
1492 1492 pull_request=pull_request.pull_request_id
1493 1493 )
1494 1494
1495 1495 Session().flush()
1496 1496
1497 1497 self.trigger_pull_request_hook(pull_request, user, 'comment',
1498 1498 data={'comment': comment})
1499 1499
1500 1500 # we now calculate the status of pull request again, and based on that
1501 1501 # calculation trigger status change. This might happen in cases
1502 1502 # that non-reviewer admin closes a pr, which means his vote doesn't
1503 1503 # change the status, while if he's a reviewer this might change it.
1504 1504 calculated_status = pull_request.calculated_review_status()
1505 1505 if old_calculated_status != calculated_status:
1506 1506 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1507 1507 data={'status': calculated_status})
1508 1508
1509 1509 # finally close the PR
1510 1510 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1511 1511
1512 1512 return comment, status
1513 1513
1514 1514 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1515 1515 _ = translator or get_current_request().translate
1516 1516
1517 1517 if not self._is_merge_enabled(pull_request):
1518 1518 return None, False, _('Server-side pull request merging is disabled.')
1519 1519
1520 1520 if pull_request.is_closed():
1521 1521 return None, False, _('This pull request is closed.')
1522 1522
1523 1523 merge_possible, msg = self._check_repo_requirements(
1524 1524 target=pull_request.target_repo, source=pull_request.source_repo,
1525 1525 translator=_)
1526 1526 if not merge_possible:
1527 1527 return None, merge_possible, msg
1528 1528
1529 1529 try:
1530 1530 merge_response = self._try_merge(
1531 1531 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1532 1532 log.debug("Merge response: %s", merge_response)
1533 1533 return merge_response, merge_response.possible, merge_response.merge_status_message
1534 1534 except NotImplementedError:
1535 1535 return None, False, _('Pull request merging is not supported.')
1536 1536
1537 1537 def _check_repo_requirements(self, target, source, translator):
1538 1538 """
1539 1539 Check if `target` and `source` have compatible requirements.
1540 1540
1541 1541 Currently this is just checking for largefiles.
1542 1542 """
1543 1543 _ = translator
1544 1544 target_has_largefiles = self._has_largefiles(target)
1545 1545 source_has_largefiles = self._has_largefiles(source)
1546 1546 merge_possible = True
1547 1547 message = u''
1548 1548
1549 1549 if target_has_largefiles != source_has_largefiles:
1550 1550 merge_possible = False
1551 1551 if source_has_largefiles:
1552 1552 message = _(
1553 1553 'Target repository large files support is disabled.')
1554 1554 else:
1555 1555 message = _(
1556 1556 'Source repository large files support is disabled.')
1557 1557
1558 1558 return merge_possible, message
1559 1559
1560 1560 def _has_largefiles(self, repo):
1561 1561 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1562 1562 'extensions', 'largefiles')
1563 1563 return largefiles_ui and largefiles_ui[0].active
1564 1564
1565 1565 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1566 1566 """
1567 1567 Try to merge the pull request and return the merge status.
1568 1568 """
1569 1569 log.debug(
1570 1570 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1571 1571 pull_request.pull_request_id, force_shadow_repo_refresh)
1572 1572 target_vcs = pull_request.target_repo.scm_instance()
1573 1573 # Refresh the target reference.
1574 1574 try:
1575 1575 target_ref = self._refresh_reference(
1576 1576 pull_request.target_ref_parts, target_vcs)
1577 1577 except CommitDoesNotExistError:
1578 1578 merge_state = MergeResponse(
1579 1579 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1580 1580 metadata={'target_ref': pull_request.target_ref_parts})
1581 1581 return merge_state
1582 1582
1583 1583 target_locked = pull_request.target_repo.locked
1584 1584 if target_locked and target_locked[0]:
1585 1585 locked_by = 'user:{}'.format(target_locked[0])
1586 1586 log.debug("The target repository is locked by %s.", locked_by)
1587 1587 merge_state = MergeResponse(
1588 1588 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1589 1589 metadata={'locked_by': locked_by})
1590 1590 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1591 1591 pull_request, target_ref):
1592 1592 log.debug("Refreshing the merge status of the repository.")
1593 1593 merge_state = self._refresh_merge_state(
1594 1594 pull_request, target_vcs, target_ref)
1595 1595 else:
1596 1596 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1597 1597 metadata = {
1598 1598 'unresolved_files': '',
1599 1599 'target_ref': pull_request.target_ref_parts,
1600 1600 'source_ref': pull_request.source_ref_parts,
1601 1601 }
1602 1602 if pull_request.last_merge_metadata:
1603 1603 metadata.update(pull_request.last_merge_metadata)
1604 1604
1605 1605 if not possible and target_ref.type == 'branch':
1606 1606 # NOTE(marcink): case for mercurial multiple heads on branch
1607 1607 heads = target_vcs._heads(target_ref.name)
1608 1608 if len(heads) != 1:
1609 1609 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1610 1610 metadata.update({
1611 1611 'heads': heads
1612 1612 })
1613 1613
1614 1614 merge_state = MergeResponse(
1615 1615 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1616 1616
1617 1617 return merge_state
1618 1618
1619 1619 def _refresh_reference(self, reference, vcs_repository):
1620 1620 if reference.type in self.UPDATABLE_REF_TYPES:
1621 1621 name_or_id = reference.name
1622 1622 else:
1623 1623 name_or_id = reference.commit_id
1624 1624
1625 1625 refreshed_commit = vcs_repository.get_commit(name_or_id)
1626 1626 refreshed_reference = Reference(
1627 1627 reference.type, reference.name, refreshed_commit.raw_id)
1628 1628 return refreshed_reference
1629 1629
1630 1630 def _needs_merge_state_refresh(self, pull_request, target_reference):
1631 1631 return not(
1632 1632 pull_request.revisions and
1633 1633 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1634 1634 target_reference.commit_id == pull_request._last_merge_target_rev)
1635 1635
1636 1636 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1637 1637 workspace_id = self._workspace_id(pull_request)
1638 1638 source_vcs = pull_request.source_repo.scm_instance()
1639 1639 repo_id = pull_request.target_repo.repo_id
1640 1640 use_rebase = self._use_rebase_for_merging(pull_request)
1641 1641 close_branch = self._close_branch_before_merging(pull_request)
1642 1642 merge_state = target_vcs.merge(
1643 1643 repo_id, workspace_id,
1644 1644 target_reference, source_vcs, pull_request.source_ref_parts,
1645 1645 dry_run=True, use_rebase=use_rebase,
1646 1646 close_branch=close_branch)
1647 1647
1648 1648 # Do not store the response if there was an unknown error.
1649 1649 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1650 1650 pull_request._last_merge_source_rev = \
1651 1651 pull_request.source_ref_parts.commit_id
1652 1652 pull_request._last_merge_target_rev = target_reference.commit_id
1653 1653 pull_request.last_merge_status = merge_state.failure_reason
1654 1654 pull_request.last_merge_metadata = merge_state.metadata
1655 1655
1656 1656 pull_request.shadow_merge_ref = merge_state.merge_ref
1657 1657 Session().add(pull_request)
1658 1658 Session().commit()
1659 1659
1660 1660 return merge_state
1661 1661
1662 1662 def _workspace_id(self, pull_request):
1663 1663 workspace_id = 'pr-%s' % pull_request.pull_request_id
1664 1664 return workspace_id
1665 1665
1666 1666 def generate_repo_data(self, repo, commit_id=None, branch=None,
1667 1667 bookmark=None, translator=None):
1668 1668 from rhodecode.model.repo import RepoModel
1669 1669
1670 1670 all_refs, selected_ref = \
1671 1671 self._get_repo_pullrequest_sources(
1672 1672 repo.scm_instance(), commit_id=commit_id,
1673 1673 branch=branch, bookmark=bookmark, translator=translator)
1674 1674
1675 1675 refs_select2 = []
1676 1676 for element in all_refs:
1677 1677 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1678 1678 refs_select2.append({'text': element[1], 'children': children})
1679 1679
1680 1680 return {
1681 1681 'user': {
1682 1682 'user_id': repo.user.user_id,
1683 1683 'username': repo.user.username,
1684 1684 'firstname': repo.user.first_name,
1685 1685 'lastname': repo.user.last_name,
1686 1686 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1687 1687 },
1688 1688 'name': repo.repo_name,
1689 1689 'link': RepoModel().get_url(repo),
1690 1690 'description': h.chop_at_smart(repo.description_safe, '\n'),
1691 1691 'refs': {
1692 1692 'all_refs': all_refs,
1693 1693 'selected_ref': selected_ref,
1694 1694 'select2_refs': refs_select2
1695 1695 }
1696 1696 }
1697 1697
1698 1698 def generate_pullrequest_title(self, source, source_ref, target):
1699 1699 return u'{source}#{at_ref} to {target}'.format(
1700 1700 source=source,
1701 1701 at_ref=source_ref,
1702 1702 target=target,
1703 1703 )
1704 1704
1705 1705 def _cleanup_merge_workspace(self, pull_request):
1706 1706 # Merging related cleanup
1707 1707 repo_id = pull_request.target_repo.repo_id
1708 1708 target_scm = pull_request.target_repo.scm_instance()
1709 1709 workspace_id = self._workspace_id(pull_request)
1710 1710
1711 1711 try:
1712 1712 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1713 1713 except NotImplementedError:
1714 1714 pass
1715 1715
1716 1716 def _get_repo_pullrequest_sources(
1717 1717 self, repo, commit_id=None, branch=None, bookmark=None,
1718 1718 translator=None):
1719 1719 """
1720 1720 Return a structure with repo's interesting commits, suitable for
1721 1721 the selectors in pullrequest controller
1722 1722
1723 1723 :param commit_id: a commit that must be in the list somehow
1724 1724 and selected by default
1725 1725 :param branch: a branch that must be in the list and selected
1726 1726 by default - even if closed
1727 1727 :param bookmark: a bookmark that must be in the list and selected
1728 1728 """
1729 1729 _ = translator or get_current_request().translate
1730 1730
1731 1731 commit_id = safe_str(commit_id) if commit_id else None
1732 1732 branch = safe_unicode(branch) if branch else None
1733 1733 bookmark = safe_unicode(bookmark) if bookmark else None
1734 1734
1735 1735 selected = None
1736 1736
1737 1737 # order matters: first source that has commit_id in it will be selected
1738 1738 sources = []
1739 1739 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1740 1740 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1741 1741
1742 1742 if commit_id:
1743 1743 ref_commit = (h.short_id(commit_id), commit_id)
1744 1744 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1745 1745
1746 1746 sources.append(
1747 1747 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1748 1748 )
1749 1749
1750 1750 groups = []
1751 1751
1752 1752 for group_key, ref_list, group_name, match in sources:
1753 1753 group_refs = []
1754 1754 for ref_name, ref_id in ref_list:
1755 1755 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1756 1756 group_refs.append((ref_key, ref_name))
1757 1757
1758 1758 if not selected:
1759 1759 if set([commit_id, match]) & set([ref_id, ref_name]):
1760 1760 selected = ref_key
1761 1761
1762 1762 if group_refs:
1763 1763 groups.append((group_refs, group_name))
1764 1764
1765 1765 if not selected:
1766 1766 ref = commit_id or branch or bookmark
1767 1767 if ref:
1768 1768 raise CommitDoesNotExistError(
1769 1769 u'No commit refs could be found matching: {}'.format(ref))
1770 1770 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1771 1771 selected = u'branch:{}:{}'.format(
1772 1772 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1773 1773 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1774 1774 )
1775 1775 elif repo.commit_ids:
1776 1776 # make the user select in this case
1777 1777 selected = None
1778 1778 else:
1779 1779 raise EmptyRepositoryError()
1780 1780 return groups, selected
1781 1781
1782 1782 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1783 1783 hide_whitespace_changes, diff_context):
1784 1784
1785 1785 return self._get_diff_from_pr_or_version(
1786 1786 source_repo, source_ref_id, target_ref_id,
1787 1787 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1788 1788
1789 1789 def _get_diff_from_pr_or_version(
1790 1790 self, source_repo, source_ref_id, target_ref_id,
1791 1791 hide_whitespace_changes, diff_context):
1792 1792
1793 1793 target_commit = source_repo.get_commit(
1794 1794 commit_id=safe_str(target_ref_id))
1795 1795 source_commit = source_repo.get_commit(
1796 1796 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1797 1797 if isinstance(source_repo, Repository):
1798 1798 vcs_repo = source_repo.scm_instance()
1799 1799 else:
1800 1800 vcs_repo = source_repo
1801 1801
1802 1802 # TODO: johbo: In the context of an update, we cannot reach
1803 1803 # the old commit anymore with our normal mechanisms. It needs
1804 1804 # some sort of special support in the vcs layer to avoid this
1805 1805 # workaround.
1806 1806 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1807 1807 vcs_repo.alias == 'git'):
1808 1808 source_commit.raw_id = safe_str(source_ref_id)
1809 1809
1810 1810 log.debug('calculating diff between '
1811 1811 'source_ref:%s and target_ref:%s for repo `%s`',
1812 1812 target_ref_id, source_ref_id,
1813 1813 safe_unicode(vcs_repo.path))
1814 1814
1815 1815 vcs_diff = vcs_repo.get_diff(
1816 1816 commit1=target_commit, commit2=source_commit,
1817 1817 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1818 1818 return vcs_diff
1819 1819
1820 1820 def _is_merge_enabled(self, pull_request):
1821 1821 return self._get_general_setting(
1822 1822 pull_request, 'rhodecode_pr_merge_enabled')
1823 1823
1824 1824 def _use_rebase_for_merging(self, pull_request):
1825 1825 repo_type = pull_request.target_repo.repo_type
1826 1826 if repo_type == 'hg':
1827 1827 return self._get_general_setting(
1828 1828 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1829 1829 elif repo_type == 'git':
1830 1830 return self._get_general_setting(
1831 1831 pull_request, 'rhodecode_git_use_rebase_for_merging')
1832 1832
1833 1833 return False
1834 1834
1835 1835 def _user_name_for_merging(self, pull_request, user):
1836 1836 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1837 1837 if env_user_name_attr and hasattr(user, env_user_name_attr):
1838 1838 user_name_attr = env_user_name_attr
1839 1839 else:
1840 1840 user_name_attr = 'short_contact'
1841 1841
1842 1842 user_name = getattr(user, user_name_attr)
1843 1843 return user_name
1844 1844
1845 1845 def _close_branch_before_merging(self, pull_request):
1846 1846 repo_type = pull_request.target_repo.repo_type
1847 1847 if repo_type == 'hg':
1848 1848 return self._get_general_setting(
1849 1849 pull_request, 'rhodecode_hg_close_branch_before_merging')
1850 1850 elif repo_type == 'git':
1851 1851 return self._get_general_setting(
1852 1852 pull_request, 'rhodecode_git_close_branch_before_merging')
1853 1853
1854 1854 return False
1855 1855
1856 1856 def _get_general_setting(self, pull_request, settings_key, default=False):
1857 1857 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1858 1858 settings = settings_model.get_general_settings()
1859 1859 return settings.get(settings_key, default)
1860 1860
1861 1861 def _log_audit_action(self, action, action_data, user, pull_request):
1862 1862 audit_logger.store(
1863 1863 action=action,
1864 1864 action_data=action_data,
1865 1865 user=user,
1866 1866 repo=pull_request.target_repo)
1867 1867
1868 1868 def get_reviewer_functions(self):
1869 1869 """
1870 1870 Fetches functions for validation and fetching default reviewers.
1871 1871 If available we use the EE package, else we fallback to CE
1872 1872 package functions
1873 1873 """
1874 1874 try:
1875 1875 from rc_reviewers.utils import get_default_reviewers_data
1876 1876 from rc_reviewers.utils import validate_default_reviewers
1877 1877 except ImportError:
1878 1878 from rhodecode.apps.repository.utils import get_default_reviewers_data
1879 1879 from rhodecode.apps.repository.utils import validate_default_reviewers
1880 1880
1881 1881 return get_default_reviewers_data, validate_default_reviewers
1882 1882
1883 1883
1884 1884 class MergeCheck(object):
1885 1885 """
1886 1886 Perform Merge Checks and returns a check object which stores information
1887 1887 about merge errors, and merge conditions
1888 1888 """
1889 1889 TODO_CHECK = 'todo'
1890 1890 PERM_CHECK = 'perm'
1891 1891 REVIEW_CHECK = 'review'
1892 1892 MERGE_CHECK = 'merge'
1893 1893 WIP_CHECK = 'wip'
1894 1894
1895 1895 def __init__(self):
1896 1896 self.review_status = None
1897 1897 self.merge_possible = None
1898 1898 self.merge_msg = ''
1899 1899 self.merge_response = None
1900 1900 self.failed = None
1901 1901 self.errors = []
1902 1902 self.error_details = OrderedDict()
1903 1903 self.source_commit = AttributeDict()
1904 1904 self.target_commit = AttributeDict()
1905 1905
1906 1906 def __repr__(self):
1907 1907 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
1908 1908 self.merge_possible, self.failed, self.errors)
1909 1909
1910 1910 def push_error(self, error_type, message, error_key, details):
1911 1911 self.failed = True
1912 1912 self.errors.append([error_type, message])
1913 1913 self.error_details[error_key] = dict(
1914 1914 details=details,
1915 1915 error_type=error_type,
1916 1916 message=message
1917 1917 )
1918 1918
1919 1919 @classmethod
1920 1920 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1921 1921 force_shadow_repo_refresh=False):
1922 1922 _ = translator
1923 1923 merge_check = cls()
1924 1924
1925 1925 # title has WIP:
1926 1926 if pull_request.work_in_progress:
1927 1927 log.debug("MergeCheck: cannot merge, title has wip: marker.")
1928 1928
1929 1929 msg = _('WIP marker in title prevents from accidental merge.')
1930 1930 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
1931 1931 if fail_early:
1932 1932 return merge_check
1933 1933
1934 1934 # permissions to merge
1935 1935 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
1936 1936 if not user_allowed_to_merge:
1937 1937 log.debug("MergeCheck: cannot merge, approval is pending.")
1938 1938
1939 1939 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1940 1940 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1941 1941 if fail_early:
1942 1942 return merge_check
1943 1943
1944 1944 # permission to merge into the target branch
1945 1945 target_commit_id = pull_request.target_ref_parts.commit_id
1946 1946 if pull_request.target_ref_parts.type == 'branch':
1947 1947 branch_name = pull_request.target_ref_parts.name
1948 1948 else:
1949 1949 # for mercurial we can always figure out the branch from the commit
1950 1950 # in case of bookmark
1951 1951 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1952 1952 branch_name = target_commit.branch
1953 1953
1954 1954 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1955 1955 pull_request.target_repo.repo_name, branch_name)
1956 1956 if branch_perm and branch_perm == 'branch.none':
1957 1957 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1958 1958 branch_name, rule)
1959 1959 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1960 1960 if fail_early:
1961 1961 return merge_check
1962 1962
1963 1963 # review status, must be always present
1964 1964 review_status = pull_request.calculated_review_status()
1965 1965 merge_check.review_status = review_status
1966 1966
1967 1967 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1968 1968 if not status_approved:
1969 1969 log.debug("MergeCheck: cannot merge, approval is pending.")
1970 1970
1971 1971 msg = _('Pull request reviewer approval is pending.')
1972 1972
1973 1973 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1974 1974
1975 1975 if fail_early:
1976 1976 return merge_check
1977 1977
1978 1978 # left over TODOs
1979 1979 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1980 1980 if todos:
1981 1981 log.debug("MergeCheck: cannot merge, {} "
1982 1982 "unresolved TODOs left.".format(len(todos)))
1983 1983
1984 1984 if len(todos) == 1:
1985 1985 msg = _('Cannot merge, {} TODO still not resolved.').format(
1986 1986 len(todos))
1987 1987 else:
1988 1988 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1989 1989 len(todos))
1990 1990
1991 1991 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1992 1992
1993 1993 if fail_early:
1994 1994 return merge_check
1995 1995
1996 1996 # merge possible, here is the filesystem simulation + shadow repo
1997 1997 merge_response, merge_status, msg = PullRequestModel().merge_status(
1998 1998 pull_request, translator=translator,
1999 1999 force_shadow_repo_refresh=force_shadow_repo_refresh)
2000 2000
2001 2001 merge_check.merge_possible = merge_status
2002 2002 merge_check.merge_msg = msg
2003 2003 merge_check.merge_response = merge_response
2004 2004
2005 2005 source_ref_id = pull_request.source_ref_parts.commit_id
2006 2006 target_ref_id = pull_request.target_ref_parts.commit_id
2007 2007
2008 2008 try:
2009 2009 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2010 2010 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2011 2011 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2012 2012 merge_check.source_commit.current_raw_id = source_commit.raw_id
2013 2013 merge_check.source_commit.previous_raw_id = source_ref_id
2014 2014
2015 2015 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2016 2016 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2017 2017 merge_check.target_commit.current_raw_id = target_commit.raw_id
2018 2018 merge_check.target_commit.previous_raw_id = target_ref_id
2019 2019 except (SourceRefMissing, TargetRefMissing):
2020 2020 pass
2021 2021
2022 2022 if not merge_status:
2023 2023 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2024 2024 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2025 2025
2026 2026 if fail_early:
2027 2027 return merge_check
2028 2028
2029 2029 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2030 2030 return merge_check
2031 2031
2032 2032 @classmethod
2033 2033 def get_merge_conditions(cls, pull_request, translator):
2034 2034 _ = translator
2035 2035 merge_details = {}
2036 2036
2037 2037 model = PullRequestModel()
2038 2038 use_rebase = model._use_rebase_for_merging(pull_request)
2039 2039
2040 2040 if use_rebase:
2041 2041 merge_details['merge_strategy'] = dict(
2042 2042 details={},
2043 2043 message=_('Merge strategy: rebase')
2044 2044 )
2045 2045 else:
2046 2046 merge_details['merge_strategy'] = dict(
2047 2047 details={},
2048 2048 message=_('Merge strategy: explicit merge commit')
2049 2049 )
2050 2050
2051 2051 close_branch = model._close_branch_before_merging(pull_request)
2052 2052 if close_branch:
2053 2053 repo_type = pull_request.target_repo.repo_type
2054 2054 close_msg = ''
2055 2055 if repo_type == 'hg':
2056 2056 close_msg = _('Source branch will be closed after merge.')
2057 2057 elif repo_type == 'git':
2058 2058 close_msg = _('Source branch will be deleted after merge.')
2059 2059
2060 2060 merge_details['close_branch'] = dict(
2061 2061 details={},
2062 2062 message=close_msg
2063 2063 )
2064 2064
2065 2065 return merge_details
2066 2066
2067 2067
2068 2068 ChangeTuple = collections.namedtuple(
2069 2069 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2070 2070
2071 2071 FileChangeTuple = collections.namedtuple(
2072 2072 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now