##// END OF EJS Templates
reviewers: use common function to obtain reviewers with filter of role.
marcink -
r4514:96651613 stable
parent child Browse files
Show More
@@ -1,821 +1,821 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 comments model for RhodeCode
23 23 """
24 24 import datetime
25 25
26 26 import logging
27 27 import traceback
28 28 import collections
29 29
30 30 from pyramid.threadlocal import get_current_registry, get_current_request
31 31 from sqlalchemy.sql.expression import null
32 32 from sqlalchemy.sql.functions import coalesce
33 33
34 34 from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils
35 35 from rhodecode.lib import audit_logger
36 36 from rhodecode.lib.exceptions import CommentVersionMismatch
37 37 from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int
38 38 from rhodecode.model import BaseModel
39 39 from rhodecode.model.db import (
40 40 ChangesetComment,
41 41 User,
42 42 Notification,
43 43 PullRequest,
44 44 AttributeDict,
45 45 ChangesetCommentHistory,
46 46 )
47 47 from rhodecode.model.notification import NotificationModel
48 48 from rhodecode.model.meta import Session
49 49 from rhodecode.model.settings import VcsSettingsModel
50 50 from rhodecode.model.notification import EmailNotificationModel
51 51 from rhodecode.model.validation_schema.schemas import comment_schema
52 52
53 53
54 54 log = logging.getLogger(__name__)
55 55
56 56
57 57 class CommentsModel(BaseModel):
58 58
59 59 cls = ChangesetComment
60 60
61 61 DIFF_CONTEXT_BEFORE = 3
62 62 DIFF_CONTEXT_AFTER = 3
63 63
64 64 def __get_commit_comment(self, changeset_comment):
65 65 return self._get_instance(ChangesetComment, changeset_comment)
66 66
67 67 def __get_pull_request(self, pull_request):
68 68 return self._get_instance(PullRequest, pull_request)
69 69
70 70 def _extract_mentions(self, s):
71 71 user_objects = []
72 72 for username in extract_mentioned_users(s):
73 73 user_obj = User.get_by_username(username, case_insensitive=True)
74 74 if user_obj:
75 75 user_objects.append(user_obj)
76 76 return user_objects
77 77
78 78 def _get_renderer(self, global_renderer='rst', request=None):
79 79 request = request or get_current_request()
80 80
81 81 try:
82 82 global_renderer = request.call_context.visual.default_renderer
83 83 except AttributeError:
84 84 log.debug("Renderer not set, falling back "
85 85 "to default renderer '%s'", global_renderer)
86 86 except Exception:
87 87 log.error(traceback.format_exc())
88 88 return global_renderer
89 89
90 90 def aggregate_comments(self, comments, versions, show_version, inline=False):
91 91 # group by versions, and count until, and display objects
92 92
93 93 comment_groups = collections.defaultdict(list)
94 94 [comment_groups[_co.pull_request_version_id].append(_co) for _co in comments]
95 95
96 96 def yield_comments(pos):
97 97 for co in comment_groups[pos]:
98 98 yield co
99 99
100 100 comment_versions = collections.defaultdict(
101 101 lambda: collections.defaultdict(list))
102 102 prev_prvid = -1
103 103 # fake last entry with None, to aggregate on "latest" version which
104 104 # doesn't have an pull_request_version_id
105 105 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
106 106 prvid = ver.pull_request_version_id
107 107 if prev_prvid == -1:
108 108 prev_prvid = prvid
109 109
110 110 for co in yield_comments(prvid):
111 111 comment_versions[prvid]['at'].append(co)
112 112
113 113 # save until
114 114 current = comment_versions[prvid]['at']
115 115 prev_until = comment_versions[prev_prvid]['until']
116 116 cur_until = prev_until + current
117 117 comment_versions[prvid]['until'].extend(cur_until)
118 118
119 119 # save outdated
120 120 if inline:
121 121 outdated = [x for x in cur_until
122 122 if x.outdated_at_version(show_version)]
123 123 else:
124 124 outdated = [x for x in cur_until
125 125 if x.older_than_version(show_version)]
126 126 display = [x for x in cur_until if x not in outdated]
127 127
128 128 comment_versions[prvid]['outdated'] = outdated
129 129 comment_versions[prvid]['display'] = display
130 130
131 131 prev_prvid = prvid
132 132
133 133 return comment_versions
134 134
135 135 def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None):
136 136 qry = Session().query(ChangesetComment) \
137 137 .filter(ChangesetComment.repo == repo)
138 138
139 139 if comment_type and comment_type in ChangesetComment.COMMENT_TYPES:
140 140 qry = qry.filter(ChangesetComment.comment_type == comment_type)
141 141
142 142 if user:
143 143 user = self._get_user(user)
144 144 if user:
145 145 qry = qry.filter(ChangesetComment.user_id == user.user_id)
146 146
147 147 if commit_id:
148 148 qry = qry.filter(ChangesetComment.revision == commit_id)
149 149
150 150 qry = qry.order_by(ChangesetComment.created_on)
151 151 return qry.all()
152 152
153 153 def get_repository_unresolved_todos(self, repo):
154 154 todos = Session().query(ChangesetComment) \
155 155 .filter(ChangesetComment.repo == repo) \
156 156 .filter(ChangesetComment.resolved_by == None) \
157 157 .filter(ChangesetComment.comment_type
158 158 == ChangesetComment.COMMENT_TYPE_TODO)
159 159 todos = todos.all()
160 160
161 161 return todos
162 162
163 163 def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True):
164 164
165 165 todos = Session().query(ChangesetComment) \
166 166 .filter(ChangesetComment.pull_request == pull_request) \
167 167 .filter(ChangesetComment.resolved_by == None) \
168 168 .filter(ChangesetComment.comment_type
169 169 == ChangesetComment.COMMENT_TYPE_TODO)
170 170
171 171 if not show_outdated:
172 172 todos = todos.filter(
173 173 coalesce(ChangesetComment.display_state, '') !=
174 174 ChangesetComment.COMMENT_OUTDATED)
175 175
176 176 todos = todos.all()
177 177
178 178 return todos
179 179
180 180 def get_pull_request_resolved_todos(self, pull_request, show_outdated=True):
181 181
182 182 todos = Session().query(ChangesetComment) \
183 183 .filter(ChangesetComment.pull_request == pull_request) \
184 184 .filter(ChangesetComment.resolved_by != None) \
185 185 .filter(ChangesetComment.comment_type
186 186 == ChangesetComment.COMMENT_TYPE_TODO)
187 187
188 188 if not show_outdated:
189 189 todos = todos.filter(
190 190 coalesce(ChangesetComment.display_state, '') !=
191 191 ChangesetComment.COMMENT_OUTDATED)
192 192
193 193 todos = todos.all()
194 194
195 195 return todos
196 196
197 197 def get_commit_unresolved_todos(self, commit_id, show_outdated=True):
198 198
199 199 todos = Session().query(ChangesetComment) \
200 200 .filter(ChangesetComment.revision == commit_id) \
201 201 .filter(ChangesetComment.resolved_by == None) \
202 202 .filter(ChangesetComment.comment_type
203 203 == ChangesetComment.COMMENT_TYPE_TODO)
204 204
205 205 if not show_outdated:
206 206 todos = todos.filter(
207 207 coalesce(ChangesetComment.display_state, '') !=
208 208 ChangesetComment.COMMENT_OUTDATED)
209 209
210 210 todos = todos.all()
211 211
212 212 return todos
213 213
214 214 def get_commit_resolved_todos(self, commit_id, show_outdated=True):
215 215
216 216 todos = Session().query(ChangesetComment) \
217 217 .filter(ChangesetComment.revision == commit_id) \
218 218 .filter(ChangesetComment.resolved_by != None) \
219 219 .filter(ChangesetComment.comment_type
220 220 == ChangesetComment.COMMENT_TYPE_TODO)
221 221
222 222 if not show_outdated:
223 223 todos = todos.filter(
224 224 coalesce(ChangesetComment.display_state, '') !=
225 225 ChangesetComment.COMMENT_OUTDATED)
226 226
227 227 todos = todos.all()
228 228
229 229 return todos
230 230
231 231 def get_commit_inline_comments(self, commit_id):
232 232 inline_comments = Session().query(ChangesetComment) \
233 233 .filter(ChangesetComment.line_no != None) \
234 234 .filter(ChangesetComment.f_path != None) \
235 235 .filter(ChangesetComment.revision == commit_id)
236 236 inline_comments = inline_comments.all()
237 237 return inline_comments
238 238
239 239 def _log_audit_action(self, action, action_data, auth_user, comment):
240 240 audit_logger.store(
241 241 action=action,
242 242 action_data=action_data,
243 243 user=auth_user,
244 244 repo=comment.repo)
245 245
246 246 def create(self, text, repo, user, commit_id=None, pull_request=None,
247 247 f_path=None, line_no=None, status_change=None,
248 248 status_change_type=None, comment_type=None,
249 249 resolves_comment_id=None, closing_pr=False, send_email=True,
250 250 renderer=None, auth_user=None, extra_recipients=None):
251 251 """
252 252 Creates new comment for commit or pull request.
253 253 IF status_change is not none this comment is associated with a
254 254 status change of commit or commit associated with pull request
255 255
256 256 :param text:
257 257 :param repo:
258 258 :param user:
259 259 :param commit_id:
260 260 :param pull_request:
261 261 :param f_path:
262 262 :param line_no:
263 263 :param status_change: Label for status change
264 264 :param comment_type: Type of comment
265 265 :param resolves_comment_id: id of comment which this one will resolve
266 266 :param status_change_type: type of status change
267 267 :param closing_pr:
268 268 :param send_email:
269 269 :param renderer: pick renderer for this comment
270 270 :param auth_user: current authenticated user calling this method
271 271 :param extra_recipients: list of extra users to be added to recipients
272 272 """
273 273
274 274 if not text:
275 275 log.warning('Missing text for comment, skipping...')
276 276 return
277 277 request = get_current_request()
278 278 _ = request.translate
279 279
280 280 if not renderer:
281 281 renderer = self._get_renderer(request=request)
282 282
283 283 repo = self._get_repo(repo)
284 284 user = self._get_user(user)
285 285 auth_user = auth_user or user
286 286
287 287 schema = comment_schema.CommentSchema()
288 288 validated_kwargs = schema.deserialize(dict(
289 289 comment_body=text,
290 290 comment_type=comment_type,
291 291 comment_file=f_path,
292 292 comment_line=line_no,
293 293 renderer_type=renderer,
294 294 status_change=status_change_type,
295 295 resolves_comment_id=resolves_comment_id,
296 296 repo=repo.repo_id,
297 297 user=user.user_id,
298 298 ))
299 299
300 300 comment = ChangesetComment()
301 301 comment.renderer = validated_kwargs['renderer_type']
302 302 comment.text = validated_kwargs['comment_body']
303 303 comment.f_path = validated_kwargs['comment_file']
304 304 comment.line_no = validated_kwargs['comment_line']
305 305 comment.comment_type = validated_kwargs['comment_type']
306 306
307 307 comment.repo = repo
308 308 comment.author = user
309 309 resolved_comment = self.__get_commit_comment(
310 310 validated_kwargs['resolves_comment_id'])
311 311 # check if the comment actually belongs to this PR
312 312 if resolved_comment and resolved_comment.pull_request and \
313 313 resolved_comment.pull_request != pull_request:
314 314 log.warning('Comment tried to resolved unrelated todo comment: %s',
315 315 resolved_comment)
316 316 # comment not bound to this pull request, forbid
317 317 resolved_comment = None
318 318
319 319 elif resolved_comment and resolved_comment.repo and \
320 320 resolved_comment.repo != repo:
321 321 log.warning('Comment tried to resolved unrelated todo comment: %s',
322 322 resolved_comment)
323 323 # comment not bound to this repo, forbid
324 324 resolved_comment = None
325 325
326 326 comment.resolved_comment = resolved_comment
327 327
328 328 pull_request_id = pull_request
329 329
330 330 commit_obj = None
331 331 pull_request_obj = None
332 332
333 333 if commit_id:
334 334 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
335 335 # do a lookup, so we don't pass something bad here
336 336 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
337 337 comment.revision = commit_obj.raw_id
338 338
339 339 elif pull_request_id:
340 340 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
341 341 pull_request_obj = self.__get_pull_request(pull_request_id)
342 342 comment.pull_request = pull_request_obj
343 343 else:
344 344 raise Exception('Please specify commit or pull_request_id')
345 345
346 346 Session().add(comment)
347 347 Session().flush()
348 348 kwargs = {
349 349 'user': user,
350 350 'renderer_type': renderer,
351 351 'repo_name': repo.repo_name,
352 352 'status_change': status_change,
353 353 'status_change_type': status_change_type,
354 354 'comment_body': text,
355 355 'comment_file': f_path,
356 356 'comment_line': line_no,
357 357 'comment_type': comment_type or 'note',
358 358 'comment_id': comment.comment_id
359 359 }
360 360
361 361 if commit_obj:
362 362 recipients = ChangesetComment.get_users(
363 363 revision=commit_obj.raw_id)
364 364 # add commit author if it's in RhodeCode system
365 365 cs_author = User.get_from_cs_author(commit_obj.author)
366 366 if not cs_author:
367 367 # use repo owner if we cannot extract the author correctly
368 368 cs_author = repo.user
369 369 recipients += [cs_author]
370 370
371 371 commit_comment_url = self.get_url(comment, request=request)
372 372 commit_comment_reply_url = self.get_url(
373 373 comment, request=request,
374 374 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
375 375
376 376 target_repo_url = h.link_to(
377 377 repo.repo_name,
378 378 h.route_url('repo_summary', repo_name=repo.repo_name))
379 379
380 380 commit_url = h.route_url('repo_commit', repo_name=repo.repo_name,
381 381 commit_id=commit_id)
382 382
383 383 # commit specifics
384 384 kwargs.update({
385 385 'commit': commit_obj,
386 386 'commit_message': commit_obj.message,
387 387 'commit_target_repo_url': target_repo_url,
388 388 'commit_comment_url': commit_comment_url,
389 389 'commit_comment_reply_url': commit_comment_reply_url,
390 390 'commit_url': commit_url,
391 391 'thread_ids': [commit_url, commit_comment_url],
392 392 })
393 393
394 394 elif pull_request_obj:
395 395 # get the current participants of this pull request
396 396 recipients = ChangesetComment.get_users(
397 397 pull_request_id=pull_request_obj.pull_request_id)
398 398 # add pull request author
399 399 recipients += [pull_request_obj.author]
400 400
401 401 # add the reviewers to notification
402 recipients += [x.user for x in pull_request_obj.reviewers]
402 recipients += [x.user for x in pull_request_obj.get_pull_request_reviewers()]
403 403
404 404 pr_target_repo = pull_request_obj.target_repo
405 405 pr_source_repo = pull_request_obj.source_repo
406 406
407 407 pr_comment_url = self.get_url(comment, request=request)
408 408 pr_comment_reply_url = self.get_url(
409 409 comment, request=request,
410 410 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
411 411
412 412 pr_url = h.route_url(
413 413 'pullrequest_show',
414 414 repo_name=pr_target_repo.repo_name,
415 415 pull_request_id=pull_request_obj.pull_request_id, )
416 416
417 417 # set some variables for email notification
418 418 pr_target_repo_url = h.route_url(
419 419 'repo_summary', repo_name=pr_target_repo.repo_name)
420 420
421 421 pr_source_repo_url = h.route_url(
422 422 'repo_summary', repo_name=pr_source_repo.repo_name)
423 423
424 424 # pull request specifics
425 425 kwargs.update({
426 426 'pull_request': pull_request_obj,
427 427 'pr_id': pull_request_obj.pull_request_id,
428 428 'pull_request_url': pr_url,
429 429 'pull_request_target_repo': pr_target_repo,
430 430 'pull_request_target_repo_url': pr_target_repo_url,
431 431 'pull_request_source_repo': pr_source_repo,
432 432 'pull_request_source_repo_url': pr_source_repo_url,
433 433 'pr_comment_url': pr_comment_url,
434 434 'pr_comment_reply_url': pr_comment_reply_url,
435 435 'pr_closing': closing_pr,
436 436 'thread_ids': [pr_url, pr_comment_url],
437 437 })
438 438
439 439 if send_email:
440 440 recipients += [self._get_user(u) for u in (extra_recipients or [])]
441 441 # pre-generate the subject for notification itself
442 442 (subject, _e, body_plaintext) = EmailNotificationModel().render_email(
443 443 notification_type, **kwargs)
444 444
445 445 mention_recipients = set(
446 446 self._extract_mentions(text)).difference(recipients)
447 447
448 448 # create notification objects, and emails
449 449 NotificationModel().create(
450 450 created_by=user,
451 451 notification_subject=subject,
452 452 notification_body=body_plaintext,
453 453 notification_type=notification_type,
454 454 recipients=recipients,
455 455 mention_recipients=mention_recipients,
456 456 email_kwargs=kwargs,
457 457 )
458 458
459 459 Session().flush()
460 460 if comment.pull_request:
461 461 action = 'repo.pull_request.comment.create'
462 462 else:
463 463 action = 'repo.commit.comment.create'
464 464
465 465 comment_data = comment.get_api_data()
466 466
467 467 self._log_audit_action(
468 468 action, {'data': comment_data}, auth_user, comment)
469 469
470 470 return comment
471 471
472 472 def edit(self, comment_id, text, auth_user, version):
473 473 """
474 474 Change existing comment for commit or pull request.
475 475
476 476 :param comment_id:
477 477 :param text:
478 478 :param auth_user: current authenticated user calling this method
479 479 :param version: last comment version
480 480 """
481 481 if not text:
482 482 log.warning('Missing text for comment, skipping...')
483 483 return
484 484
485 485 comment = ChangesetComment.get(comment_id)
486 486 old_comment_text = comment.text
487 487 comment.text = text
488 488 comment.modified_at = datetime.datetime.now()
489 489 version = safe_int(version)
490 490
491 491 # NOTE(marcink): this returns initial comment + edits, so v2 from ui
492 492 # would return 3 here
493 493 comment_version = ChangesetCommentHistory.get_version(comment_id)
494 494
495 495 if isinstance(version, (int, long)) and (comment_version - version) != 1:
496 496 log.warning(
497 497 'Version mismatch comment_version {} submitted {}, skipping'.format(
498 498 comment_version-1, # -1 since note above
499 499 version
500 500 )
501 501 )
502 502 raise CommentVersionMismatch()
503 503
504 504 comment_history = ChangesetCommentHistory()
505 505 comment_history.comment_id = comment_id
506 506 comment_history.version = comment_version
507 507 comment_history.created_by_user_id = auth_user.user_id
508 508 comment_history.text = old_comment_text
509 509 # TODO add email notification
510 510 Session().add(comment_history)
511 511 Session().add(comment)
512 512 Session().flush()
513 513
514 514 if comment.pull_request:
515 515 action = 'repo.pull_request.comment.edit'
516 516 else:
517 517 action = 'repo.commit.comment.edit'
518 518
519 519 comment_data = comment.get_api_data()
520 520 comment_data['old_comment_text'] = old_comment_text
521 521 self._log_audit_action(
522 522 action, {'data': comment_data}, auth_user, comment)
523 523
524 524 return comment_history
525 525
526 526 def delete(self, comment, auth_user):
527 527 """
528 528 Deletes given comment
529 529 """
530 530 comment = self.__get_commit_comment(comment)
531 531 old_data = comment.get_api_data()
532 532 Session().delete(comment)
533 533
534 534 if comment.pull_request:
535 535 action = 'repo.pull_request.comment.delete'
536 536 else:
537 537 action = 'repo.commit.comment.delete'
538 538
539 539 self._log_audit_action(
540 540 action, {'old_data': old_data}, auth_user, comment)
541 541
542 542 return comment
543 543
544 544 def get_all_comments(self, repo_id, revision=None, pull_request=None, count_only=False):
545 545 q = ChangesetComment.query()\
546 546 .filter(ChangesetComment.repo_id == repo_id)
547 547 if revision:
548 548 q = q.filter(ChangesetComment.revision == revision)
549 549 elif pull_request:
550 550 pull_request = self.__get_pull_request(pull_request)
551 551 q = q.filter(ChangesetComment.pull_request_id == pull_request.pull_request_id)
552 552 else:
553 553 raise Exception('Please specify commit or pull_request')
554 554 q = q.order_by(ChangesetComment.created_on)
555 555 if count_only:
556 556 return q.count()
557 557
558 558 return q.all()
559 559
560 560 def get_url(self, comment, request=None, permalink=False, anchor=None):
561 561 if not request:
562 562 request = get_current_request()
563 563
564 564 comment = self.__get_commit_comment(comment)
565 565 if anchor is None:
566 566 anchor = 'comment-{}'.format(comment.comment_id)
567 567
568 568 if comment.pull_request:
569 569 pull_request = comment.pull_request
570 570 if permalink:
571 571 return request.route_url(
572 572 'pull_requests_global',
573 573 pull_request_id=pull_request.pull_request_id,
574 574 _anchor=anchor)
575 575 else:
576 576 return request.route_url(
577 577 'pullrequest_show',
578 578 repo_name=safe_str(pull_request.target_repo.repo_name),
579 579 pull_request_id=pull_request.pull_request_id,
580 580 _anchor=anchor)
581 581
582 582 else:
583 583 repo = comment.repo
584 584 commit_id = comment.revision
585 585
586 586 if permalink:
587 587 return request.route_url(
588 588 'repo_commit', repo_name=safe_str(repo.repo_id),
589 589 commit_id=commit_id,
590 590 _anchor=anchor)
591 591
592 592 else:
593 593 return request.route_url(
594 594 'repo_commit', repo_name=safe_str(repo.repo_name),
595 595 commit_id=commit_id,
596 596 _anchor=anchor)
597 597
598 598 def get_comments(self, repo_id, revision=None, pull_request=None):
599 599 """
600 600 Gets main comments based on revision or pull_request_id
601 601
602 602 :param repo_id:
603 603 :param revision:
604 604 :param pull_request:
605 605 """
606 606
607 607 q = ChangesetComment.query()\
608 608 .filter(ChangesetComment.repo_id == repo_id)\
609 609 .filter(ChangesetComment.line_no == None)\
610 610 .filter(ChangesetComment.f_path == None)
611 611 if revision:
612 612 q = q.filter(ChangesetComment.revision == revision)
613 613 elif pull_request:
614 614 pull_request = self.__get_pull_request(pull_request)
615 615 q = q.filter(ChangesetComment.pull_request == pull_request)
616 616 else:
617 617 raise Exception('Please specify commit or pull_request')
618 618 q = q.order_by(ChangesetComment.created_on)
619 619 return q.all()
620 620
621 621 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
622 622 q = self._get_inline_comments_query(repo_id, revision, pull_request)
623 623 return self._group_comments_by_path_and_line_number(q)
624 624
625 625 def get_inline_comments_as_list(self, inline_comments, skip_outdated=True,
626 626 version=None):
627 627 inline_comms = []
628 628 for fname, per_line_comments in inline_comments.iteritems():
629 629 for lno, comments in per_line_comments.iteritems():
630 630 for comm in comments:
631 631 if not comm.outdated_at_version(version) and skip_outdated:
632 632 inline_comms.append(comm)
633 633
634 634 return inline_comms
635 635
636 636 def get_outdated_comments(self, repo_id, pull_request):
637 637 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
638 638 # of a pull request.
639 639 q = self._all_inline_comments_of_pull_request(pull_request)
640 640 q = q.filter(
641 641 ChangesetComment.display_state ==
642 642 ChangesetComment.COMMENT_OUTDATED
643 643 ).order_by(ChangesetComment.comment_id.asc())
644 644
645 645 return self._group_comments_by_path_and_line_number(q)
646 646
647 647 def _get_inline_comments_query(self, repo_id, revision, pull_request):
648 648 # TODO: johbo: Split this into two methods: One for PR and one for
649 649 # commit.
650 650 if revision:
651 651 q = Session().query(ChangesetComment).filter(
652 652 ChangesetComment.repo_id == repo_id,
653 653 ChangesetComment.line_no != null(),
654 654 ChangesetComment.f_path != null(),
655 655 ChangesetComment.revision == revision)
656 656
657 657 elif pull_request:
658 658 pull_request = self.__get_pull_request(pull_request)
659 659 if not CommentsModel.use_outdated_comments(pull_request):
660 660 q = self._visible_inline_comments_of_pull_request(pull_request)
661 661 else:
662 662 q = self._all_inline_comments_of_pull_request(pull_request)
663 663
664 664 else:
665 665 raise Exception('Please specify commit or pull_request_id')
666 666 q = q.order_by(ChangesetComment.comment_id.asc())
667 667 return q
668 668
669 669 def _group_comments_by_path_and_line_number(self, q):
670 670 comments = q.all()
671 671 paths = collections.defaultdict(lambda: collections.defaultdict(list))
672 672 for co in comments:
673 673 paths[co.f_path][co.line_no].append(co)
674 674 return paths
675 675
676 676 @classmethod
677 677 def needed_extra_diff_context(cls):
678 678 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
679 679
680 680 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
681 681 if not CommentsModel.use_outdated_comments(pull_request):
682 682 return
683 683
684 684 comments = self._visible_inline_comments_of_pull_request(pull_request)
685 685 comments_to_outdate = comments.all()
686 686
687 687 for comment in comments_to_outdate:
688 688 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
689 689
690 690 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
691 691 diff_line = _parse_comment_line_number(comment.line_no)
692 692
693 693 try:
694 694 old_context = old_diff_proc.get_context_of_line(
695 695 path=comment.f_path, diff_line=diff_line)
696 696 new_context = new_diff_proc.get_context_of_line(
697 697 path=comment.f_path, diff_line=diff_line)
698 698 except (diffs.LineNotInDiffException,
699 699 diffs.FileNotInDiffException):
700 700 comment.display_state = ChangesetComment.COMMENT_OUTDATED
701 701 return
702 702
703 703 if old_context == new_context:
704 704 return
705 705
706 706 if self._should_relocate_diff_line(diff_line):
707 707 new_diff_lines = new_diff_proc.find_context(
708 708 path=comment.f_path, context=old_context,
709 709 offset=self.DIFF_CONTEXT_BEFORE)
710 710 if not new_diff_lines:
711 711 comment.display_state = ChangesetComment.COMMENT_OUTDATED
712 712 else:
713 713 new_diff_line = self._choose_closest_diff_line(
714 714 diff_line, new_diff_lines)
715 715 comment.line_no = _diff_to_comment_line_number(new_diff_line)
716 716 else:
717 717 comment.display_state = ChangesetComment.COMMENT_OUTDATED
718 718
719 719 def _should_relocate_diff_line(self, diff_line):
720 720 """
721 721 Checks if relocation shall be tried for the given `diff_line`.
722 722
723 723 If a comment points into the first lines, then we can have a situation
724 724 that after an update another line has been added on top. In this case
725 725 we would find the context still and move the comment around. This
726 726 would be wrong.
727 727 """
728 728 should_relocate = (
729 729 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
730 730 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
731 731 return should_relocate
732 732
733 733 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
734 734 candidate = new_diff_lines[0]
735 735 best_delta = _diff_line_delta(diff_line, candidate)
736 736 for new_diff_line in new_diff_lines[1:]:
737 737 delta = _diff_line_delta(diff_line, new_diff_line)
738 738 if delta < best_delta:
739 739 candidate = new_diff_line
740 740 best_delta = delta
741 741 return candidate
742 742
743 743 def _visible_inline_comments_of_pull_request(self, pull_request):
744 744 comments = self._all_inline_comments_of_pull_request(pull_request)
745 745 comments = comments.filter(
746 746 coalesce(ChangesetComment.display_state, '') !=
747 747 ChangesetComment.COMMENT_OUTDATED)
748 748 return comments
749 749
750 750 def _all_inline_comments_of_pull_request(self, pull_request):
751 751 comments = Session().query(ChangesetComment)\
752 752 .filter(ChangesetComment.line_no != None)\
753 753 .filter(ChangesetComment.f_path != None)\
754 754 .filter(ChangesetComment.pull_request == pull_request)
755 755 return comments
756 756
757 757 def _all_general_comments_of_pull_request(self, pull_request):
758 758 comments = Session().query(ChangesetComment)\
759 759 .filter(ChangesetComment.line_no == None)\
760 760 .filter(ChangesetComment.f_path == None)\
761 761 .filter(ChangesetComment.pull_request == pull_request)
762 762
763 763 return comments
764 764
765 765 @staticmethod
766 766 def use_outdated_comments(pull_request):
767 767 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
768 768 settings = settings_model.get_general_settings()
769 769 return settings.get('rhodecode_use_outdated_comments', False)
770 770
771 771 def trigger_commit_comment_hook(self, repo, user, action, data=None):
772 772 repo = self._get_repo(repo)
773 773 target_scm = repo.scm_instance()
774 774 if action == 'create':
775 775 trigger_hook = hooks_utils.trigger_comment_commit_hooks
776 776 elif action == 'edit':
777 777 trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks
778 778 else:
779 779 return
780 780
781 781 log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s',
782 782 repo, action, trigger_hook)
783 783 trigger_hook(
784 784 username=user.username,
785 785 repo_name=repo.repo_name,
786 786 repo_type=target_scm.alias,
787 787 repo=repo,
788 788 data=data)
789 789
790 790
791 791 def _parse_comment_line_number(line_no):
792 792 """
793 793 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
794 794 """
795 795 old_line = None
796 796 new_line = None
797 797 if line_no.startswith('o'):
798 798 old_line = int(line_no[1:])
799 799 elif line_no.startswith('n'):
800 800 new_line = int(line_no[1:])
801 801 else:
802 802 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
803 803 return diffs.DiffLineNumber(old_line, new_line)
804 804
805 805
806 806 def _diff_to_comment_line_number(diff_line):
807 807 if diff_line.new is not None:
808 808 return u'n{}'.format(diff_line.new)
809 809 elif diff_line.old is not None:
810 810 return u'o{}'.format(diff_line.old)
811 811 return u''
812 812
813 813
814 814 def _diff_line_delta(a, b):
815 815 if None not in (a.new, b.new):
816 816 return abs(a.new - b.new)
817 817 elif None not in (a.old, b.old):
818 818 return abs(a.old - b.old)
819 819 else:
820 820 raise ValueError(
821 821 "Cannot compute delta between {} and {}".format(a, b))
@@ -1,2235 +1,2235 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 38 from rhodecode.lib.vcs.nodes import FileNode
39 39 from rhodecode.translation import lazy_ugettext
40 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 41 from rhodecode.lib import audit_logger
42 42 from rhodecode.lib.compat import OrderedDict
43 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 44 from rhodecode.lib.markup_renderer import (
45 45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 46 from rhodecode.lib.utils2 import (
47 47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 48 get_current_rhodecode_user)
49 49 from rhodecode.lib.vcs.backends.base import (
50 50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 51 TargetRefMissing, SourceRefMissing)
52 52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 CommitDoesNotExistError, EmptyRepositoryError)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 57 from rhodecode.model.comment import CommentsModel
58 58 from rhodecode.model.db import (
59 59 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 61 from rhodecode.model.meta import Session
62 62 from rhodecode.model.notification import NotificationModel, \
63 63 EmailNotificationModel
64 64 from rhodecode.model.scm import ScmModel
65 65 from rhodecode.model.settings import VcsSettingsModel
66 66
67 67
68 68 log = logging.getLogger(__name__)
69 69
70 70
71 71 # Data structure to hold the response data when updating commits during a pull
72 72 # request update.
73 73 class UpdateResponse(object):
74 74
75 75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 76 commit_changes, source_changed, target_changed):
77 77
78 78 self.executed = executed
79 79 self.reason = reason
80 80 self.new = new
81 81 self.old = old
82 82 self.common_ancestor_id = common_ancestor_id
83 83 self.changes = commit_changes
84 84 self.source_changed = source_changed
85 85 self.target_changed = target_changed
86 86
87 87
88 88 def get_diff_info(
89 89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 90 get_commit_authors=True):
91 91 """
92 92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 93 This is also used for default reviewers logic
94 94 """
95 95
96 96 source_scm = source_repo.scm_instance()
97 97 target_scm = target_repo.scm_instance()
98 98
99 99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 100 if not ancestor_id:
101 101 raise ValueError(
102 102 'cannot calculate diff info without a common ancestor. '
103 103 'Make sure both repositories are related, and have a common forking commit.')
104 104
105 105 # case here is that want a simple diff without incoming commits,
106 106 # previewing what will be merged based only on commits in the source.
107 107 log.debug('Using ancestor %s as source_ref instead of %s',
108 108 ancestor_id, source_ref)
109 109
110 110 # source of changes now is the common ancestor
111 111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 112 # target commit becomes the source ref as it is the last commit
113 113 # for diff generation this logic gives proper diff
114 114 target_commit = source_scm.get_commit(commit_id=source_ref)
115 115
116 116 vcs_diff = \
117 117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 118 ignore_whitespace=False, context=3)
119 119
120 120 diff_processor = diffs.DiffProcessor(
121 121 vcs_diff, format='newdiff', diff_limit=None,
122 122 file_limit=None, show_full_diff=True)
123 123
124 124 _parsed = diff_processor.prepare()
125 125
126 126 all_files = []
127 127 all_files_changes = []
128 128 changed_lines = {}
129 129 stats = [0, 0]
130 130 for f in _parsed:
131 131 all_files.append(f['filename'])
132 132 all_files_changes.append({
133 133 'filename': f['filename'],
134 134 'stats': f['stats']
135 135 })
136 136 stats[0] += f['stats']['added']
137 137 stats[1] += f['stats']['deleted']
138 138
139 139 changed_lines[f['filename']] = []
140 140 if len(f['chunks']) < 2:
141 141 continue
142 142 # first line is "context" information
143 143 for chunks in f['chunks'][1:]:
144 144 for chunk in chunks['lines']:
145 145 if chunk['action'] not in ('del', 'mod'):
146 146 continue
147 147 changed_lines[f['filename']].append(chunk['old_lineno'])
148 148
149 149 commit_authors = []
150 150 user_counts = {}
151 151 email_counts = {}
152 152 author_counts = {}
153 153 _commit_cache = {}
154 154
155 155 commits = []
156 156 if get_commit_authors:
157 157 log.debug('Obtaining commit authors from set of commits')
158 158 _compare_data = target_scm.compare(
159 159 target_ref, source_ref, source_scm, merge=True,
160 160 pre_load=["author", "date", "message"]
161 161 )
162 162
163 163 for commit in _compare_data:
164 164 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
165 165 # at this function which is later called via JSON serialization
166 166 serialized_commit = dict(
167 167 author=commit.author,
168 168 date=commit.date,
169 169 message=commit.message,
170 170 )
171 171 commits.append(serialized_commit)
172 172 user = User.get_from_cs_author(serialized_commit['author'])
173 173 if user and user not in commit_authors:
174 174 commit_authors.append(user)
175 175
176 176 # lines
177 177 if get_authors:
178 178 log.debug('Calculating authors of changed files')
179 179 target_commit = source_repo.get_commit(ancestor_id)
180 180
181 181 for fname, lines in changed_lines.items():
182 182
183 183 try:
184 184 node = target_commit.get_node(fname, pre_load=["is_binary"])
185 185 except Exception:
186 186 log.exception("Failed to load node with path %s", fname)
187 187 continue
188 188
189 189 if not isinstance(node, FileNode):
190 190 continue
191 191
192 192 # NOTE(marcink): for binary node we don't do annotation, just use last author
193 193 if node.is_binary:
194 194 author = node.last_commit.author
195 195 email = node.last_commit.author_email
196 196
197 197 user = User.get_from_cs_author(author)
198 198 if user:
199 199 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
200 200 author_counts[author] = author_counts.get(author, 0) + 1
201 201 email_counts[email] = email_counts.get(email, 0) + 1
202 202
203 203 continue
204 204
205 205 for annotation in node.annotate:
206 206 line_no, commit_id, get_commit_func, line_text = annotation
207 207 if line_no in lines:
208 208 if commit_id not in _commit_cache:
209 209 _commit_cache[commit_id] = get_commit_func()
210 210 commit = _commit_cache[commit_id]
211 211 author = commit.author
212 212 email = commit.author_email
213 213 user = User.get_from_cs_author(author)
214 214 if user:
215 215 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
216 216 author_counts[author] = author_counts.get(author, 0) + 1
217 217 email_counts[email] = email_counts.get(email, 0) + 1
218 218
219 219 log.debug('Default reviewers processing finished')
220 220
221 221 return {
222 222 'commits': commits,
223 223 'files': all_files_changes,
224 224 'stats': stats,
225 225 'ancestor': ancestor_id,
226 226 # original authors of modified files
227 227 'original_authors': {
228 228 'users': user_counts,
229 229 'authors': author_counts,
230 230 'emails': email_counts,
231 231 },
232 232 'commit_authors': commit_authors
233 233 }
234 234
235 235
236 236 class PullRequestModel(BaseModel):
237 237
238 238 cls = PullRequest
239 239
240 240 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
241 241
242 242 UPDATE_STATUS_MESSAGES = {
243 243 UpdateFailureReason.NONE: lazy_ugettext(
244 244 'Pull request update successful.'),
245 245 UpdateFailureReason.UNKNOWN: lazy_ugettext(
246 246 'Pull request update failed because of an unknown error.'),
247 247 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
248 248 'No update needed because the source and target have not changed.'),
249 249 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
250 250 'Pull request cannot be updated because the reference type is '
251 251 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
252 252 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
253 253 'This pull request cannot be updated because the target '
254 254 'reference is missing.'),
255 255 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
256 256 'This pull request cannot be updated because the source '
257 257 'reference is missing.'),
258 258 }
259 259 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
260 260 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
261 261
262 262 def __get_pull_request(self, pull_request):
263 263 return self._get_instance((
264 264 PullRequest, PullRequestVersion), pull_request)
265 265
266 266 def _check_perms(self, perms, pull_request, user, api=False):
267 267 if not api:
268 268 return h.HasRepoPermissionAny(*perms)(
269 269 user=user, repo_name=pull_request.target_repo.repo_name)
270 270 else:
271 271 return h.HasRepoPermissionAnyApi(*perms)(
272 272 user=user, repo_name=pull_request.target_repo.repo_name)
273 273
274 274 def check_user_read(self, pull_request, user, api=False):
275 275 _perms = ('repository.admin', 'repository.write', 'repository.read',)
276 276 return self._check_perms(_perms, pull_request, user, api)
277 277
278 278 def check_user_merge(self, pull_request, user, api=False):
279 279 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
280 280 return self._check_perms(_perms, pull_request, user, api)
281 281
282 282 def check_user_update(self, pull_request, user, api=False):
283 283 owner = user.user_id == pull_request.user_id
284 284 return self.check_user_merge(pull_request, user, api) or owner
285 285
286 286 def check_user_delete(self, pull_request, user):
287 287 owner = user.user_id == pull_request.user_id
288 288 _perms = ('repository.admin',)
289 289 return self._check_perms(_perms, pull_request, user) or owner
290 290
291 291 def is_user_reviewer(self, pull_request, user):
292 292 return user.user_id in [
293 293 x.user_id for x in
294 294 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
295 295 if x.user
296 296 ]
297 297
298 298 def check_user_change_status(self, pull_request, user, api=False):
299 299 return self.check_user_update(pull_request, user, api) \
300 300 or self.is_user_reviewer(pull_request, user)
301 301
302 302 def check_user_comment(self, pull_request, user):
303 303 owner = user.user_id == pull_request.user_id
304 304 return self.check_user_read(pull_request, user) or owner
305 305
306 306 def get(self, pull_request):
307 307 return self.__get_pull_request(pull_request)
308 308
309 309 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
310 310 statuses=None, opened_by=None, order_by=None,
311 311 order_dir='desc', only_created=False):
312 312 repo = None
313 313 if repo_name:
314 314 repo = self._get_repo(repo_name)
315 315
316 316 q = PullRequest.query()
317 317
318 318 if search_q:
319 319 like_expression = u'%{}%'.format(safe_unicode(search_q))
320 320 q = q.join(User)
321 321 q = q.filter(or_(
322 322 cast(PullRequest.pull_request_id, String).ilike(like_expression),
323 323 User.username.ilike(like_expression),
324 324 PullRequest.title.ilike(like_expression),
325 325 PullRequest.description.ilike(like_expression),
326 326 ))
327 327
328 328 # source or target
329 329 if repo and source:
330 330 q = q.filter(PullRequest.source_repo == repo)
331 331 elif repo:
332 332 q = q.filter(PullRequest.target_repo == repo)
333 333
334 334 # closed,opened
335 335 if statuses:
336 336 q = q.filter(PullRequest.status.in_(statuses))
337 337
338 338 # opened by filter
339 339 if opened_by:
340 340 q = q.filter(PullRequest.user_id.in_(opened_by))
341 341
342 342 # only get those that are in "created" state
343 343 if only_created:
344 344 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
345 345
346 346 if order_by:
347 347 order_map = {
348 348 'name_raw': PullRequest.pull_request_id,
349 349 'id': PullRequest.pull_request_id,
350 350 'title': PullRequest.title,
351 351 'updated_on_raw': PullRequest.updated_on,
352 352 'target_repo': PullRequest.target_repo_id
353 353 }
354 354 if order_dir == 'asc':
355 355 q = q.order_by(order_map[order_by].asc())
356 356 else:
357 357 q = q.order_by(order_map[order_by].desc())
358 358
359 359 return q
360 360
361 361 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
362 362 opened_by=None):
363 363 """
364 364 Count the number of pull requests for a specific repository.
365 365
366 366 :param repo_name: target or source repo
367 367 :param search_q: filter by text
368 368 :param source: boolean flag to specify if repo_name refers to source
369 369 :param statuses: list of pull request statuses
370 370 :param opened_by: author user of the pull request
371 371 :returns: int number of pull requests
372 372 """
373 373 q = self._prepare_get_all_query(
374 374 repo_name, search_q=search_q, source=source, statuses=statuses,
375 375 opened_by=opened_by)
376 376
377 377 return q.count()
378 378
379 379 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
380 380 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
381 381 """
382 382 Get all pull requests for a specific repository.
383 383
384 384 :param repo_name: target or source repo
385 385 :param search_q: filter by text
386 386 :param source: boolean flag to specify if repo_name refers to source
387 387 :param statuses: list of pull request statuses
388 388 :param opened_by: author user of the pull request
389 389 :param offset: pagination offset
390 390 :param length: length of returned list
391 391 :param order_by: order of the returned list
392 392 :param order_dir: 'asc' or 'desc' ordering direction
393 393 :returns: list of pull requests
394 394 """
395 395 q = self._prepare_get_all_query(
396 396 repo_name, search_q=search_q, source=source, statuses=statuses,
397 397 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
398 398
399 399 if length:
400 400 pull_requests = q.limit(length).offset(offset).all()
401 401 else:
402 402 pull_requests = q.all()
403 403
404 404 return pull_requests
405 405
406 406 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
407 407 opened_by=None):
408 408 """
409 409 Count the number of pull requests for a specific repository that are
410 410 awaiting review.
411 411
412 412 :param repo_name: target or source repo
413 413 :param search_q: filter by text
414 414 :param source: boolean flag to specify if repo_name refers to source
415 415 :param statuses: list of pull request statuses
416 416 :param opened_by: author user of the pull request
417 417 :returns: int number of pull requests
418 418 """
419 419 pull_requests = self.get_awaiting_review(
420 420 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
421 421
422 422 return len(pull_requests)
423 423
424 424 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
425 425 opened_by=None, offset=0, length=None,
426 426 order_by=None, order_dir='desc'):
427 427 """
428 428 Get all pull requests for a specific repository that are awaiting
429 429 review.
430 430
431 431 :param repo_name: target or source repo
432 432 :param search_q: filter by text
433 433 :param source: boolean flag to specify if repo_name refers to source
434 434 :param statuses: list of pull request statuses
435 435 :param opened_by: author user of the pull request
436 436 :param offset: pagination offset
437 437 :param length: length of returned list
438 438 :param order_by: order of the returned list
439 439 :param order_dir: 'asc' or 'desc' ordering direction
440 440 :returns: list of pull requests
441 441 """
442 442 pull_requests = self.get_all(
443 443 repo_name, search_q=search_q, source=source, statuses=statuses,
444 444 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
445 445
446 446 _filtered_pull_requests = []
447 447 for pr in pull_requests:
448 448 status = pr.calculated_review_status()
449 449 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
450 450 ChangesetStatus.STATUS_UNDER_REVIEW]:
451 451 _filtered_pull_requests.append(pr)
452 452 if length:
453 453 return _filtered_pull_requests[offset:offset+length]
454 454 else:
455 455 return _filtered_pull_requests
456 456
457 457 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
458 458 opened_by=None, user_id=None):
459 459 """
460 460 Count the number of pull requests for a specific repository that are
461 461 awaiting review from a specific user.
462 462
463 463 :param repo_name: target or source repo
464 464 :param search_q: filter by text
465 465 :param source: boolean flag to specify if repo_name refers to source
466 466 :param statuses: list of pull request statuses
467 467 :param opened_by: author user of the pull request
468 468 :param user_id: reviewer user of the pull request
469 469 :returns: int number of pull requests
470 470 """
471 471 pull_requests = self.get_awaiting_my_review(
472 472 repo_name, search_q=search_q, source=source, statuses=statuses,
473 473 opened_by=opened_by, user_id=user_id)
474 474
475 475 return len(pull_requests)
476 476
477 477 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
478 478 opened_by=None, user_id=None, offset=0,
479 479 length=None, order_by=None, order_dir='desc'):
480 480 """
481 481 Get all pull requests for a specific repository that are awaiting
482 482 review from a specific user.
483 483
484 484 :param repo_name: target or source repo
485 485 :param search_q: filter by text
486 486 :param source: boolean flag to specify if repo_name refers to source
487 487 :param statuses: list of pull request statuses
488 488 :param opened_by: author user of the pull request
489 489 :param user_id: reviewer user of the pull request
490 490 :param offset: pagination offset
491 491 :param length: length of returned list
492 492 :param order_by: order of the returned list
493 493 :param order_dir: 'asc' or 'desc' ordering direction
494 494 :returns: list of pull requests
495 495 """
496 496 pull_requests = self.get_all(
497 497 repo_name, search_q=search_q, source=source, statuses=statuses,
498 498 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
499 499
500 500 _my = PullRequestModel().get_not_reviewed(user_id)
501 501 my_participation = []
502 502 for pr in pull_requests:
503 503 if pr in _my:
504 504 my_participation.append(pr)
505 505 _filtered_pull_requests = my_participation
506 506 if length:
507 507 return _filtered_pull_requests[offset:offset+length]
508 508 else:
509 509 return _filtered_pull_requests
510 510
511 511 def get_not_reviewed(self, user_id):
512 512 return [
513 513 x.pull_request for x in PullRequestReviewers.query().filter(
514 514 PullRequestReviewers.user_id == user_id).all()
515 515 ]
516 516
517 517 def _prepare_participating_query(self, user_id=None, statuses=None, query='',
518 518 order_by=None, order_dir='desc'):
519 519 q = PullRequest.query()
520 520 if user_id:
521 521 reviewers_subquery = Session().query(
522 522 PullRequestReviewers.pull_request_id).filter(
523 523 PullRequestReviewers.user_id == user_id).subquery()
524 524 user_filter = or_(
525 525 PullRequest.user_id == user_id,
526 526 PullRequest.pull_request_id.in_(reviewers_subquery)
527 527 )
528 528 q = PullRequest.query().filter(user_filter)
529 529
530 530 # closed,opened
531 531 if statuses:
532 532 q = q.filter(PullRequest.status.in_(statuses))
533 533
534 534 if query:
535 535 like_expression = u'%{}%'.format(safe_unicode(query))
536 536 q = q.join(User)
537 537 q = q.filter(or_(
538 538 cast(PullRequest.pull_request_id, String).ilike(like_expression),
539 539 User.username.ilike(like_expression),
540 540 PullRequest.title.ilike(like_expression),
541 541 PullRequest.description.ilike(like_expression),
542 542 ))
543 543 if order_by:
544 544 order_map = {
545 545 'name_raw': PullRequest.pull_request_id,
546 546 'title': PullRequest.title,
547 547 'updated_on_raw': PullRequest.updated_on,
548 548 'target_repo': PullRequest.target_repo_id
549 549 }
550 550 if order_dir == 'asc':
551 551 q = q.order_by(order_map[order_by].asc())
552 552 else:
553 553 q = q.order_by(order_map[order_by].desc())
554 554
555 555 return q
556 556
557 557 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
558 558 q = self._prepare_participating_query(user_id, statuses=statuses, query=query)
559 559 return q.count()
560 560
561 561 def get_im_participating_in(
562 562 self, user_id=None, statuses=None, query='', offset=0,
563 563 length=None, order_by=None, order_dir='desc'):
564 564 """
565 565 Get all Pull requests that i'm participating in, or i have opened
566 566 """
567 567
568 568 q = self._prepare_participating_query(
569 569 user_id, statuses=statuses, query=query, order_by=order_by,
570 570 order_dir=order_dir)
571 571
572 572 if length:
573 573 pull_requests = q.limit(length).offset(offset).all()
574 574 else:
575 575 pull_requests = q.all()
576 576
577 577 return pull_requests
578 578
579 579 def get_versions(self, pull_request):
580 580 """
581 581 returns version of pull request sorted by ID descending
582 582 """
583 583 return PullRequestVersion.query()\
584 584 .filter(PullRequestVersion.pull_request == pull_request)\
585 585 .order_by(PullRequestVersion.pull_request_version_id.asc())\
586 586 .all()
587 587
588 588 def get_pr_version(self, pull_request_id, version=None):
589 589 at_version = None
590 590
591 591 if version and version == 'latest':
592 592 pull_request_ver = PullRequest.get(pull_request_id)
593 593 pull_request_obj = pull_request_ver
594 594 _org_pull_request_obj = pull_request_obj
595 595 at_version = 'latest'
596 596 elif version:
597 597 pull_request_ver = PullRequestVersion.get_or_404(version)
598 598 pull_request_obj = pull_request_ver
599 599 _org_pull_request_obj = pull_request_ver.pull_request
600 600 at_version = pull_request_ver.pull_request_version_id
601 601 else:
602 602 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
603 603 pull_request_id)
604 604
605 605 pull_request_display_obj = PullRequest.get_pr_display_object(
606 606 pull_request_obj, _org_pull_request_obj)
607 607
608 608 return _org_pull_request_obj, pull_request_obj, \
609 609 pull_request_display_obj, at_version
610 610
611 611 def create(self, created_by, source_repo, source_ref, target_repo,
612 612 target_ref, revisions, reviewers, observers, title, description=None,
613 613 common_ancestor_id=None,
614 614 description_renderer=None,
615 615 reviewer_data=None, translator=None, auth_user=None):
616 616 translator = translator or get_current_request().translate
617 617
618 618 created_by_user = self._get_user(created_by)
619 619 auth_user = auth_user or created_by_user.AuthUser()
620 620 source_repo = self._get_repo(source_repo)
621 621 target_repo = self._get_repo(target_repo)
622 622
623 623 pull_request = PullRequest()
624 624 pull_request.source_repo = source_repo
625 625 pull_request.source_ref = source_ref
626 626 pull_request.target_repo = target_repo
627 627 pull_request.target_ref = target_ref
628 628 pull_request.revisions = revisions
629 629 pull_request.title = title
630 630 pull_request.description = description
631 631 pull_request.description_renderer = description_renderer
632 632 pull_request.author = created_by_user
633 633 pull_request.reviewer_data = reviewer_data
634 634 pull_request.pull_request_state = pull_request.STATE_CREATING
635 635 pull_request.common_ancestor_id = common_ancestor_id
636 636
637 637 Session().add(pull_request)
638 638 Session().flush()
639 639
640 640 reviewer_ids = set()
641 641 # members / reviewers
642 642 for reviewer_object in reviewers:
643 643 user_id, reasons, mandatory, role, rules = reviewer_object
644 644 user = self._get_user(user_id)
645 645
646 646 # skip duplicates
647 647 if user.user_id in reviewer_ids:
648 648 continue
649 649
650 650 reviewer_ids.add(user.user_id)
651 651
652 652 reviewer = PullRequestReviewers()
653 653 reviewer.user = user
654 654 reviewer.pull_request = pull_request
655 655 reviewer.reasons = reasons
656 656 reviewer.mandatory = mandatory
657 657 reviewer.role = role
658 658
659 659 # NOTE(marcink): pick only first rule for now
660 660 rule_id = list(rules)[0] if rules else None
661 661 rule = RepoReviewRule.get(rule_id) if rule_id else None
662 662 if rule:
663 663 review_group = rule.user_group_vote_rule(user_id)
664 664 # we check if this particular reviewer is member of a voting group
665 665 if review_group:
666 666 # NOTE(marcink):
667 667 # can be that user is member of more but we pick the first same,
668 668 # same as default reviewers algo
669 669 review_group = review_group[0]
670 670
671 671 rule_data = {
672 672 'rule_name':
673 673 rule.review_rule_name,
674 674 'rule_user_group_entry_id':
675 675 review_group.repo_review_rule_users_group_id,
676 676 'rule_user_group_name':
677 677 review_group.users_group.users_group_name,
678 678 'rule_user_group_members':
679 679 [x.user.username for x in review_group.users_group.members],
680 680 'rule_user_group_members_id':
681 681 [x.user.user_id for x in review_group.users_group.members],
682 682 }
683 683 # e.g {'vote_rule': -1, 'mandatory': True}
684 684 rule_data.update(review_group.rule_data())
685 685
686 686 reviewer.rule_data = rule_data
687 687
688 688 Session().add(reviewer)
689 689 Session().flush()
690 690
691 691 for observer_object in observers:
692 692 user_id, reasons, mandatory, role, rules = observer_object
693 693 user = self._get_user(user_id)
694 694
695 695 # skip duplicates from reviewers
696 696 if user.user_id in reviewer_ids:
697 697 continue
698 698
699 699 #reviewer_ids.add(user.user_id)
700 700
701 701 observer = PullRequestReviewers()
702 702 observer.user = user
703 703 observer.pull_request = pull_request
704 704 observer.reasons = reasons
705 705 observer.mandatory = mandatory
706 706 observer.role = role
707 707
708 708 # NOTE(marcink): pick only first rule for now
709 709 rule_id = list(rules)[0] if rules else None
710 710 rule = RepoReviewRule.get(rule_id) if rule_id else None
711 711 if rule:
712 712 # TODO(marcink): do we need this for observers ??
713 713 pass
714 714
715 715 Session().add(observer)
716 716 Session().flush()
717 717
718 718 # Set approval status to "Under Review" for all commits which are
719 719 # part of this pull request.
720 720 ChangesetStatusModel().set_status(
721 721 repo=target_repo,
722 722 status=ChangesetStatus.STATUS_UNDER_REVIEW,
723 723 user=created_by_user,
724 724 pull_request=pull_request
725 725 )
726 726 # we commit early at this point. This has to do with a fact
727 727 # that before queries do some row-locking. And because of that
728 728 # we need to commit and finish transaction before below validate call
729 729 # that for large repos could be long resulting in long row locks
730 730 Session().commit()
731 731
732 732 # prepare workspace, and run initial merge simulation. Set state during that
733 733 # operation
734 734 pull_request = PullRequest.get(pull_request.pull_request_id)
735 735
736 736 # set as merging, for merge simulation, and if finished to created so we mark
737 737 # simulation is working fine
738 738 with pull_request.set_state(PullRequest.STATE_MERGING,
739 739 final_state=PullRequest.STATE_CREATED) as state_obj:
740 740 MergeCheck.validate(
741 741 pull_request, auth_user=auth_user, translator=translator)
742 742
743 743 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
744 744 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
745 745
746 746 creation_data = pull_request.get_api_data(with_merge_state=False)
747 747 self._log_audit_action(
748 748 'repo.pull_request.create', {'data': creation_data},
749 749 auth_user, pull_request)
750 750
751 751 return pull_request
752 752
753 753 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
754 754 pull_request = self.__get_pull_request(pull_request)
755 755 target_scm = pull_request.target_repo.scm_instance()
756 756 if action == 'create':
757 757 trigger_hook = hooks_utils.trigger_create_pull_request_hook
758 758 elif action == 'merge':
759 759 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
760 760 elif action == 'close':
761 761 trigger_hook = hooks_utils.trigger_close_pull_request_hook
762 762 elif action == 'review_status_change':
763 763 trigger_hook = hooks_utils.trigger_review_pull_request_hook
764 764 elif action == 'update':
765 765 trigger_hook = hooks_utils.trigger_update_pull_request_hook
766 766 elif action == 'comment':
767 767 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
768 768 elif action == 'comment_edit':
769 769 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
770 770 else:
771 771 return
772 772
773 773 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
774 774 pull_request, action, trigger_hook)
775 775 trigger_hook(
776 776 username=user.username,
777 777 repo_name=pull_request.target_repo.repo_name,
778 778 repo_type=target_scm.alias,
779 779 pull_request=pull_request,
780 780 data=data)
781 781
782 782 def _get_commit_ids(self, pull_request):
783 783 """
784 784 Return the commit ids of the merged pull request.
785 785
786 786 This method is not dealing correctly yet with the lack of autoupdates
787 787 nor with the implicit target updates.
788 788 For example: if a commit in the source repo is already in the target it
789 789 will be reported anyways.
790 790 """
791 791 merge_rev = pull_request.merge_rev
792 792 if merge_rev is None:
793 793 raise ValueError('This pull request was not merged yet')
794 794
795 795 commit_ids = list(pull_request.revisions)
796 796 if merge_rev not in commit_ids:
797 797 commit_ids.append(merge_rev)
798 798
799 799 return commit_ids
800 800
801 801 def merge_repo(self, pull_request, user, extras):
802 802 log.debug("Merging pull request %s", pull_request.pull_request_id)
803 803 extras['user_agent'] = 'internal-merge'
804 804 merge_state = self._merge_pull_request(pull_request, user, extras)
805 805 if merge_state.executed:
806 806 log.debug("Merge was successful, updating the pull request comments.")
807 807 self._comment_and_close_pr(pull_request, user, merge_state)
808 808
809 809 self._log_audit_action(
810 810 'repo.pull_request.merge',
811 811 {'merge_state': merge_state.__dict__},
812 812 user, pull_request)
813 813
814 814 else:
815 815 log.warn("Merge failed, not updating the pull request.")
816 816 return merge_state
817 817
818 818 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
819 819 target_vcs = pull_request.target_repo.scm_instance()
820 820 source_vcs = pull_request.source_repo.scm_instance()
821 821
822 822 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
823 823 pr_id=pull_request.pull_request_id,
824 824 pr_title=pull_request.title,
825 825 source_repo=source_vcs.name,
826 826 source_ref_name=pull_request.source_ref_parts.name,
827 827 target_repo=target_vcs.name,
828 828 target_ref_name=pull_request.target_ref_parts.name,
829 829 )
830 830
831 831 workspace_id = self._workspace_id(pull_request)
832 832 repo_id = pull_request.target_repo.repo_id
833 833 use_rebase = self._use_rebase_for_merging(pull_request)
834 834 close_branch = self._close_branch_before_merging(pull_request)
835 835 user_name = self._user_name_for_merging(pull_request, user)
836 836
837 837 target_ref = self._refresh_reference(
838 838 pull_request.target_ref_parts, target_vcs)
839 839
840 840 callback_daemon, extras = prepare_callback_daemon(
841 841 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
842 842 host=vcs_settings.HOOKS_HOST,
843 843 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
844 844
845 845 with callback_daemon:
846 846 # TODO: johbo: Implement a clean way to run a config_override
847 847 # for a single call.
848 848 target_vcs.config.set(
849 849 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
850 850
851 851 merge_state = target_vcs.merge(
852 852 repo_id, workspace_id, target_ref, source_vcs,
853 853 pull_request.source_ref_parts,
854 854 user_name=user_name, user_email=user.email,
855 855 message=message, use_rebase=use_rebase,
856 856 close_branch=close_branch)
857 857 return merge_state
858 858
859 859 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
860 860 pull_request.merge_rev = merge_state.merge_ref.commit_id
861 861 pull_request.updated_on = datetime.datetime.now()
862 862 close_msg = close_msg or 'Pull request merged and closed'
863 863
864 864 CommentsModel().create(
865 865 text=safe_unicode(close_msg),
866 866 repo=pull_request.target_repo.repo_id,
867 867 user=user.user_id,
868 868 pull_request=pull_request.pull_request_id,
869 869 f_path=None,
870 870 line_no=None,
871 871 closing_pr=True
872 872 )
873 873
874 874 Session().add(pull_request)
875 875 Session().flush()
876 876 # TODO: paris: replace invalidation with less radical solution
877 877 ScmModel().mark_for_invalidation(
878 878 pull_request.target_repo.repo_name)
879 879 self.trigger_pull_request_hook(pull_request, user, 'merge')
880 880
881 881 def has_valid_update_type(self, pull_request):
882 882 source_ref_type = pull_request.source_ref_parts.type
883 883 return source_ref_type in self.REF_TYPES
884 884
885 885 def get_flow_commits(self, pull_request):
886 886
887 887 # source repo
888 888 source_ref_name = pull_request.source_ref_parts.name
889 889 source_ref_type = pull_request.source_ref_parts.type
890 890 source_ref_id = pull_request.source_ref_parts.commit_id
891 891 source_repo = pull_request.source_repo.scm_instance()
892 892
893 893 try:
894 894 if source_ref_type in self.REF_TYPES:
895 895 source_commit = source_repo.get_commit(source_ref_name)
896 896 else:
897 897 source_commit = source_repo.get_commit(source_ref_id)
898 898 except CommitDoesNotExistError:
899 899 raise SourceRefMissing()
900 900
901 901 # target repo
902 902 target_ref_name = pull_request.target_ref_parts.name
903 903 target_ref_type = pull_request.target_ref_parts.type
904 904 target_ref_id = pull_request.target_ref_parts.commit_id
905 905 target_repo = pull_request.target_repo.scm_instance()
906 906
907 907 try:
908 908 if target_ref_type in self.REF_TYPES:
909 909 target_commit = target_repo.get_commit(target_ref_name)
910 910 else:
911 911 target_commit = target_repo.get_commit(target_ref_id)
912 912 except CommitDoesNotExistError:
913 913 raise TargetRefMissing()
914 914
915 915 return source_commit, target_commit
916 916
917 917 def update_commits(self, pull_request, updating_user):
918 918 """
919 919 Get the updated list of commits for the pull request
920 920 and return the new pull request version and the list
921 921 of commits processed by this update action
922 922
923 923 updating_user is the user_object who triggered the update
924 924 """
925 925 pull_request = self.__get_pull_request(pull_request)
926 926 source_ref_type = pull_request.source_ref_parts.type
927 927 source_ref_name = pull_request.source_ref_parts.name
928 928 source_ref_id = pull_request.source_ref_parts.commit_id
929 929
930 930 target_ref_type = pull_request.target_ref_parts.type
931 931 target_ref_name = pull_request.target_ref_parts.name
932 932 target_ref_id = pull_request.target_ref_parts.commit_id
933 933
934 934 if not self.has_valid_update_type(pull_request):
935 935 log.debug("Skipping update of pull request %s due to ref type: %s",
936 936 pull_request, source_ref_type)
937 937 return UpdateResponse(
938 938 executed=False,
939 939 reason=UpdateFailureReason.WRONG_REF_TYPE,
940 940 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
941 941 source_changed=False, target_changed=False)
942 942
943 943 try:
944 944 source_commit, target_commit = self.get_flow_commits(pull_request)
945 945 except SourceRefMissing:
946 946 return UpdateResponse(
947 947 executed=False,
948 948 reason=UpdateFailureReason.MISSING_SOURCE_REF,
949 949 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
950 950 source_changed=False, target_changed=False)
951 951 except TargetRefMissing:
952 952 return UpdateResponse(
953 953 executed=False,
954 954 reason=UpdateFailureReason.MISSING_TARGET_REF,
955 955 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
956 956 source_changed=False, target_changed=False)
957 957
958 958 source_changed = source_ref_id != source_commit.raw_id
959 959 target_changed = target_ref_id != target_commit.raw_id
960 960
961 961 if not (source_changed or target_changed):
962 962 log.debug("Nothing changed in pull request %s", pull_request)
963 963 return UpdateResponse(
964 964 executed=False,
965 965 reason=UpdateFailureReason.NO_CHANGE,
966 966 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
967 967 source_changed=target_changed, target_changed=source_changed)
968 968
969 969 change_in_found = 'target repo' if target_changed else 'source repo'
970 970 log.debug('Updating pull request because of change in %s detected',
971 971 change_in_found)
972 972
973 973 # Finally there is a need for an update, in case of source change
974 974 # we create a new version, else just an update
975 975 if source_changed:
976 976 pull_request_version = self._create_version_from_snapshot(pull_request)
977 977 self._link_comments_to_version(pull_request_version)
978 978 else:
979 979 try:
980 980 ver = pull_request.versions[-1]
981 981 except IndexError:
982 982 ver = None
983 983
984 984 pull_request.pull_request_version_id = \
985 985 ver.pull_request_version_id if ver else None
986 986 pull_request_version = pull_request
987 987
988 988 source_repo = pull_request.source_repo.scm_instance()
989 989 target_repo = pull_request.target_repo.scm_instance()
990 990
991 991 # re-compute commit ids
992 992 old_commit_ids = pull_request.revisions
993 993 pre_load = ["author", "date", "message", "branch"]
994 994 commit_ranges = target_repo.compare(
995 995 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
996 996 pre_load=pre_load)
997 997
998 998 target_ref = target_commit.raw_id
999 999 source_ref = source_commit.raw_id
1000 1000 ancestor_commit_id = target_repo.get_common_ancestor(
1001 1001 target_ref, source_ref, source_repo)
1002 1002
1003 1003 if not ancestor_commit_id:
1004 1004 raise ValueError(
1005 1005 'cannot calculate diff info without a common ancestor. '
1006 1006 'Make sure both repositories are related, and have a common forking commit.')
1007 1007
1008 1008 pull_request.common_ancestor_id = ancestor_commit_id
1009 1009
1010 1010 pull_request.source_ref = '%s:%s:%s' % (
1011 1011 source_ref_type, source_ref_name, source_commit.raw_id)
1012 1012 pull_request.target_ref = '%s:%s:%s' % (
1013 1013 target_ref_type, target_ref_name, ancestor_commit_id)
1014 1014
1015 1015 pull_request.revisions = [
1016 1016 commit.raw_id for commit in reversed(commit_ranges)]
1017 1017 pull_request.updated_on = datetime.datetime.now()
1018 1018 Session().add(pull_request)
1019 1019 new_commit_ids = pull_request.revisions
1020 1020
1021 1021 old_diff_data, new_diff_data = self._generate_update_diffs(
1022 1022 pull_request, pull_request_version)
1023 1023
1024 1024 # calculate commit and file changes
1025 1025 commit_changes = self._calculate_commit_id_changes(
1026 1026 old_commit_ids, new_commit_ids)
1027 1027 file_changes = self._calculate_file_changes(
1028 1028 old_diff_data, new_diff_data)
1029 1029
1030 1030 # set comments as outdated if DIFFS changed
1031 1031 CommentsModel().outdate_comments(
1032 1032 pull_request, old_diff_data=old_diff_data,
1033 1033 new_diff_data=new_diff_data)
1034 1034
1035 1035 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1036 1036 file_node_changes = (
1037 1037 file_changes.added or file_changes.modified or file_changes.removed)
1038 1038 pr_has_changes = valid_commit_changes or file_node_changes
1039 1039
1040 1040 # Add an automatic comment to the pull request, in case
1041 1041 # anything has changed
1042 1042 if pr_has_changes:
1043 1043 update_comment = CommentsModel().create(
1044 1044 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1045 1045 repo=pull_request.target_repo,
1046 1046 user=pull_request.author,
1047 1047 pull_request=pull_request,
1048 1048 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1049 1049
1050 1050 # Update status to "Under Review" for added commits
1051 1051 for commit_id in commit_changes.added:
1052 1052 ChangesetStatusModel().set_status(
1053 1053 repo=pull_request.source_repo,
1054 1054 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1055 1055 comment=update_comment,
1056 1056 user=pull_request.author,
1057 1057 pull_request=pull_request,
1058 1058 revision=commit_id)
1059 1059
1060 1060 # send update email to users
1061 1061 try:
1062 1062 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1063 1063 ancestor_commit_id=ancestor_commit_id,
1064 1064 commit_changes=commit_changes,
1065 1065 file_changes=file_changes)
1066 1066 except Exception:
1067 1067 log.exception('Failed to send email notification to users')
1068 1068
1069 1069 log.debug(
1070 1070 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1071 1071 'removed_ids: %s', pull_request.pull_request_id,
1072 1072 commit_changes.added, commit_changes.common, commit_changes.removed)
1073 1073 log.debug(
1074 1074 'Updated pull request with the following file changes: %s',
1075 1075 file_changes)
1076 1076
1077 1077 log.info(
1078 1078 "Updated pull request %s from commit %s to commit %s, "
1079 1079 "stored new version %s of this pull request.",
1080 1080 pull_request.pull_request_id, source_ref_id,
1081 1081 pull_request.source_ref_parts.commit_id,
1082 1082 pull_request_version.pull_request_version_id)
1083 1083 Session().commit()
1084 1084 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1085 1085
1086 1086 return UpdateResponse(
1087 1087 executed=True, reason=UpdateFailureReason.NONE,
1088 1088 old=pull_request, new=pull_request_version,
1089 1089 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1090 1090 source_changed=source_changed, target_changed=target_changed)
1091 1091
1092 1092 def _create_version_from_snapshot(self, pull_request):
1093 1093 version = PullRequestVersion()
1094 1094 version.title = pull_request.title
1095 1095 version.description = pull_request.description
1096 1096 version.status = pull_request.status
1097 1097 version.pull_request_state = pull_request.pull_request_state
1098 1098 version.created_on = datetime.datetime.now()
1099 1099 version.updated_on = pull_request.updated_on
1100 1100 version.user_id = pull_request.user_id
1101 1101 version.source_repo = pull_request.source_repo
1102 1102 version.source_ref = pull_request.source_ref
1103 1103 version.target_repo = pull_request.target_repo
1104 1104 version.target_ref = pull_request.target_ref
1105 1105
1106 1106 version._last_merge_source_rev = pull_request._last_merge_source_rev
1107 1107 version._last_merge_target_rev = pull_request._last_merge_target_rev
1108 1108 version.last_merge_status = pull_request.last_merge_status
1109 1109 version.last_merge_metadata = pull_request.last_merge_metadata
1110 1110 version.shadow_merge_ref = pull_request.shadow_merge_ref
1111 1111 version.merge_rev = pull_request.merge_rev
1112 1112 version.reviewer_data = pull_request.reviewer_data
1113 1113
1114 1114 version.revisions = pull_request.revisions
1115 1115 version.common_ancestor_id = pull_request.common_ancestor_id
1116 1116 version.pull_request = pull_request
1117 1117 Session().add(version)
1118 1118 Session().flush()
1119 1119
1120 1120 return version
1121 1121
1122 1122 def _generate_update_diffs(self, pull_request, pull_request_version):
1123 1123
1124 1124 diff_context = (
1125 1125 self.DIFF_CONTEXT +
1126 1126 CommentsModel.needed_extra_diff_context())
1127 1127 hide_whitespace_changes = False
1128 1128 source_repo = pull_request_version.source_repo
1129 1129 source_ref_id = pull_request_version.source_ref_parts.commit_id
1130 1130 target_ref_id = pull_request_version.target_ref_parts.commit_id
1131 1131 old_diff = self._get_diff_from_pr_or_version(
1132 1132 source_repo, source_ref_id, target_ref_id,
1133 1133 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1134 1134
1135 1135 source_repo = pull_request.source_repo
1136 1136 source_ref_id = pull_request.source_ref_parts.commit_id
1137 1137 target_ref_id = pull_request.target_ref_parts.commit_id
1138 1138
1139 1139 new_diff = self._get_diff_from_pr_or_version(
1140 1140 source_repo, source_ref_id, target_ref_id,
1141 1141 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1142 1142
1143 1143 old_diff_data = diffs.DiffProcessor(old_diff)
1144 1144 old_diff_data.prepare()
1145 1145 new_diff_data = diffs.DiffProcessor(new_diff)
1146 1146 new_diff_data.prepare()
1147 1147
1148 1148 return old_diff_data, new_diff_data
1149 1149
1150 1150 def _link_comments_to_version(self, pull_request_version):
1151 1151 """
1152 1152 Link all unlinked comments of this pull request to the given version.
1153 1153
1154 1154 :param pull_request_version: The `PullRequestVersion` to which
1155 1155 the comments shall be linked.
1156 1156
1157 1157 """
1158 1158 pull_request = pull_request_version.pull_request
1159 1159 comments = ChangesetComment.query()\
1160 1160 .filter(
1161 1161 # TODO: johbo: Should we query for the repo at all here?
1162 1162 # Pending decision on how comments of PRs are to be related
1163 1163 # to either the source repo, the target repo or no repo at all.
1164 1164 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1165 1165 ChangesetComment.pull_request == pull_request,
1166 1166 ChangesetComment.pull_request_version == None)\
1167 1167 .order_by(ChangesetComment.comment_id.asc())
1168 1168
1169 1169 # TODO: johbo: Find out why this breaks if it is done in a bulk
1170 1170 # operation.
1171 1171 for comment in comments:
1172 1172 comment.pull_request_version_id = (
1173 1173 pull_request_version.pull_request_version_id)
1174 1174 Session().add(comment)
1175 1175
1176 1176 def _calculate_commit_id_changes(self, old_ids, new_ids):
1177 1177 added = [x for x in new_ids if x not in old_ids]
1178 1178 common = [x for x in new_ids if x in old_ids]
1179 1179 removed = [x for x in old_ids if x not in new_ids]
1180 1180 total = new_ids
1181 1181 return ChangeTuple(added, common, removed, total)
1182 1182
1183 1183 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1184 1184
1185 1185 old_files = OrderedDict()
1186 1186 for diff_data in old_diff_data.parsed_diff:
1187 1187 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1188 1188
1189 1189 added_files = []
1190 1190 modified_files = []
1191 1191 removed_files = []
1192 1192 for diff_data in new_diff_data.parsed_diff:
1193 1193 new_filename = diff_data['filename']
1194 1194 new_hash = md5_safe(diff_data['raw_diff'])
1195 1195
1196 1196 old_hash = old_files.get(new_filename)
1197 1197 if not old_hash:
1198 1198 # file is not present in old diff, we have to figure out from parsed diff
1199 1199 # operation ADD/REMOVE
1200 1200 operations_dict = diff_data['stats']['ops']
1201 1201 if diffs.DEL_FILENODE in operations_dict:
1202 1202 removed_files.append(new_filename)
1203 1203 else:
1204 1204 added_files.append(new_filename)
1205 1205 else:
1206 1206 if new_hash != old_hash:
1207 1207 modified_files.append(new_filename)
1208 1208 # now remove a file from old, since we have seen it already
1209 1209 del old_files[new_filename]
1210 1210
1211 1211 # removed files is when there are present in old, but not in NEW,
1212 1212 # since we remove old files that are present in new diff, left-overs
1213 1213 # if any should be the removed files
1214 1214 removed_files.extend(old_files.keys())
1215 1215
1216 1216 return FileChangeTuple(added_files, modified_files, removed_files)
1217 1217
1218 1218 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1219 1219 """
1220 1220 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1221 1221 so it's always looking the same disregarding on which default
1222 1222 renderer system is using.
1223 1223
1224 1224 :param ancestor_commit_id: ancestor raw_id
1225 1225 :param changes: changes named tuple
1226 1226 :param file_changes: file changes named tuple
1227 1227
1228 1228 """
1229 1229 new_status = ChangesetStatus.get_status_lbl(
1230 1230 ChangesetStatus.STATUS_UNDER_REVIEW)
1231 1231
1232 1232 changed_files = (
1233 1233 file_changes.added + file_changes.modified + file_changes.removed)
1234 1234
1235 1235 params = {
1236 1236 'under_review_label': new_status,
1237 1237 'added_commits': changes.added,
1238 1238 'removed_commits': changes.removed,
1239 1239 'changed_files': changed_files,
1240 1240 'added_files': file_changes.added,
1241 1241 'modified_files': file_changes.modified,
1242 1242 'removed_files': file_changes.removed,
1243 1243 'ancestor_commit_id': ancestor_commit_id
1244 1244 }
1245 1245 renderer = RstTemplateRenderer()
1246 1246 return renderer.render('pull_request_update.mako', **params)
1247 1247
1248 1248 def edit(self, pull_request, title, description, description_renderer, user):
1249 1249 pull_request = self.__get_pull_request(pull_request)
1250 1250 old_data = pull_request.get_api_data(with_merge_state=False)
1251 1251 if pull_request.is_closed():
1252 1252 raise ValueError('This pull request is closed')
1253 1253 if title:
1254 1254 pull_request.title = title
1255 1255 pull_request.description = description
1256 1256 pull_request.updated_on = datetime.datetime.now()
1257 1257 pull_request.description_renderer = description_renderer
1258 1258 Session().add(pull_request)
1259 1259 self._log_audit_action(
1260 1260 'repo.pull_request.edit', {'old_data': old_data},
1261 1261 user, pull_request)
1262 1262
1263 1263 def update_reviewers(self, pull_request, reviewer_data, user):
1264 1264 """
1265 1265 Update the reviewers in the pull request
1266 1266
1267 1267 :param pull_request: the pr to update
1268 1268 :param reviewer_data: list of tuples
1269 1269 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1270 1270 :param user: current use who triggers this action
1271 1271 """
1272 1272
1273 1273 pull_request = self.__get_pull_request(pull_request)
1274 1274 if pull_request.is_closed():
1275 1275 raise ValueError('This pull request is closed')
1276 1276
1277 1277 reviewers = {}
1278 1278 for user_id, reasons, mandatory, role, rules in reviewer_data:
1279 1279 if isinstance(user_id, (int, compat.string_types)):
1280 1280 user_id = self._get_user(user_id).user_id
1281 1281 reviewers[user_id] = {
1282 1282 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1283 1283
1284 1284 reviewers_ids = set(reviewers.keys())
1285 1285 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1286 1286 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1287 1287
1288 1288 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1289 1289
1290 1290 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1291 1291 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1292 1292
1293 1293 log.debug("Adding %s reviewers", ids_to_add)
1294 1294 log.debug("Removing %s reviewers", ids_to_remove)
1295 1295 changed = False
1296 1296 added_audit_reviewers = []
1297 1297 removed_audit_reviewers = []
1298 1298
1299 1299 for uid in ids_to_add:
1300 1300 changed = True
1301 1301 _usr = self._get_user(uid)
1302 1302 reviewer = PullRequestReviewers()
1303 1303 reviewer.user = _usr
1304 1304 reviewer.pull_request = pull_request
1305 1305 reviewer.reasons = reviewers[uid]['reasons']
1306 1306 # NOTE(marcink): mandatory shouldn't be changed now
1307 1307 # reviewer.mandatory = reviewers[uid]['reasons']
1308 1308 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1309 1309 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1310 1310 Session().add(reviewer)
1311 1311 added_audit_reviewers.append(reviewer.get_dict())
1312 1312
1313 1313 for uid in ids_to_remove:
1314 1314 changed = True
1315 1315 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1316 1316 # This is an edge case that handles previous state of having the same reviewer twice.
1317 1317 # this CAN happen due to the lack of DB checks
1318 1318 reviewers = PullRequestReviewers.query()\
1319 1319 .filter(PullRequestReviewers.user_id == uid,
1320 1320 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1321 1321 PullRequestReviewers.pull_request == pull_request)\
1322 1322 .all()
1323 1323
1324 1324 for obj in reviewers:
1325 1325 added_audit_reviewers.append(obj.get_dict())
1326 1326 Session().delete(obj)
1327 1327
1328 1328 if changed:
1329 1329 Session().expire_all()
1330 1330 pull_request.updated_on = datetime.datetime.now()
1331 1331 Session().add(pull_request)
1332 1332
1333 1333 # finally store audit logs
1334 1334 for user_data in added_audit_reviewers:
1335 1335 self._log_audit_action(
1336 1336 'repo.pull_request.reviewer.add', {'data': user_data},
1337 1337 user, pull_request)
1338 1338 for user_data in removed_audit_reviewers:
1339 1339 self._log_audit_action(
1340 1340 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1341 1341 user, pull_request)
1342 1342
1343 1343 self.notify_reviewers(pull_request, ids_to_add, user.get_instance())
1344 1344 return ids_to_add, ids_to_remove
1345 1345
1346 1346 def update_observers(self, pull_request, observer_data, user):
1347 1347 """
1348 1348 Update the observers in the pull request
1349 1349
1350 1350 :param pull_request: the pr to update
1351 1351 :param observer_data: list of tuples
1352 1352 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1353 1353 :param user: current use who triggers this action
1354 1354 """
1355 1355 pull_request = self.__get_pull_request(pull_request)
1356 1356 if pull_request.is_closed():
1357 1357 raise ValueError('This pull request is closed')
1358 1358
1359 1359 observers = {}
1360 1360 for user_id, reasons, mandatory, role, rules in observer_data:
1361 1361 if isinstance(user_id, (int, compat.string_types)):
1362 1362 user_id = self._get_user(user_id).user_id
1363 1363 observers[user_id] = {
1364 1364 'reasons': reasons, 'observers': mandatory, 'role': role}
1365 1365
1366 1366 observers_ids = set(observers.keys())
1367 1367 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1368 1368 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1369 1369
1370 1370 current_observers_ids = set([x.user.user_id for x in current_observers])
1371 1371
1372 1372 ids_to_add = observers_ids.difference(current_observers_ids)
1373 1373 ids_to_remove = current_observers_ids.difference(observers_ids)
1374 1374
1375 1375 log.debug("Adding %s observer", ids_to_add)
1376 1376 log.debug("Removing %s observer", ids_to_remove)
1377 1377 changed = False
1378 1378 added_audit_observers = []
1379 1379 removed_audit_observers = []
1380 1380
1381 1381 for uid in ids_to_add:
1382 1382 changed = True
1383 1383 _usr = self._get_user(uid)
1384 1384 observer = PullRequestReviewers()
1385 1385 observer.user = _usr
1386 1386 observer.pull_request = pull_request
1387 1387 observer.reasons = observers[uid]['reasons']
1388 1388 # NOTE(marcink): mandatory shouldn't be changed now
1389 1389 # observer.mandatory = observer[uid]['reasons']
1390 1390
1391 1391 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1392 1392 observer.role = PullRequestReviewers.ROLE_OBSERVER
1393 1393 Session().add(observer)
1394 1394 added_audit_observers.append(observer.get_dict())
1395 1395
1396 1396 for uid in ids_to_remove:
1397 1397 changed = True
1398 1398 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1399 1399 # This is an edge case that handles previous state of having the same reviewer twice.
1400 1400 # this CAN happen due to the lack of DB checks
1401 1401 observers = PullRequestReviewers.query()\
1402 1402 .filter(PullRequestReviewers.user_id == uid,
1403 1403 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1404 1404 PullRequestReviewers.pull_request == pull_request)\
1405 1405 .all()
1406 1406
1407 1407 for obj in observers:
1408 1408 added_audit_observers.append(obj.get_dict())
1409 1409 Session().delete(obj)
1410 1410
1411 1411 if changed:
1412 1412 Session().expire_all()
1413 1413 pull_request.updated_on = datetime.datetime.now()
1414 1414 Session().add(pull_request)
1415 1415
1416 1416 # finally store audit logs
1417 1417 for user_data in added_audit_observers:
1418 1418 self._log_audit_action(
1419 1419 'repo.pull_request.observer.add', {'data': user_data},
1420 1420 user, pull_request)
1421 1421 for user_data in removed_audit_observers:
1422 1422 self._log_audit_action(
1423 1423 'repo.pull_request.observer.delete', {'old_data': user_data},
1424 1424 user, pull_request)
1425 1425
1426 1426 self.notify_observers(pull_request, ids_to_add, user.get_instance())
1427 1427 return ids_to_add, ids_to_remove
1428 1428
1429 1429 def get_url(self, pull_request, request=None, permalink=False):
1430 1430 if not request:
1431 1431 request = get_current_request()
1432 1432
1433 1433 if permalink:
1434 1434 return request.route_url(
1435 1435 'pull_requests_global',
1436 1436 pull_request_id=pull_request.pull_request_id,)
1437 1437 else:
1438 1438 return request.route_url('pullrequest_show',
1439 1439 repo_name=safe_str(pull_request.target_repo.repo_name),
1440 1440 pull_request_id=pull_request.pull_request_id,)
1441 1441
1442 1442 def get_shadow_clone_url(self, pull_request, request=None):
1443 1443 """
1444 1444 Returns qualified url pointing to the shadow repository. If this pull
1445 1445 request is closed there is no shadow repository and ``None`` will be
1446 1446 returned.
1447 1447 """
1448 1448 if pull_request.is_closed():
1449 1449 return None
1450 1450 else:
1451 1451 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1452 1452 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1453 1453
1454 1454 def _notify_reviewers(self, pull_request, user_ids, role, user):
1455 1455 # notification to reviewers/observers
1456 1456 if not user_ids:
1457 1457 return
1458 1458
1459 1459 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1460 1460
1461 1461 pull_request_obj = pull_request
1462 1462 # get the current participants of this pull request
1463 1463 recipients = user_ids
1464 1464 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1465 1465
1466 1466 pr_source_repo = pull_request_obj.source_repo
1467 1467 pr_target_repo = pull_request_obj.target_repo
1468 1468
1469 1469 pr_url = h.route_url('pullrequest_show',
1470 1470 repo_name=pr_target_repo.repo_name,
1471 1471 pull_request_id=pull_request_obj.pull_request_id,)
1472 1472
1473 1473 # set some variables for email notification
1474 1474 pr_target_repo_url = h.route_url(
1475 1475 'repo_summary', repo_name=pr_target_repo.repo_name)
1476 1476
1477 1477 pr_source_repo_url = h.route_url(
1478 1478 'repo_summary', repo_name=pr_source_repo.repo_name)
1479 1479
1480 1480 # pull request specifics
1481 1481 pull_request_commits = [
1482 1482 (x.raw_id, x.message)
1483 1483 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1484 1484
1485 1485 current_rhodecode_user = user
1486 1486 kwargs = {
1487 1487 'user': current_rhodecode_user,
1488 1488 'pull_request_author': pull_request.author,
1489 1489 'pull_request': pull_request_obj,
1490 1490 'pull_request_commits': pull_request_commits,
1491 1491
1492 1492 'pull_request_target_repo': pr_target_repo,
1493 1493 'pull_request_target_repo_url': pr_target_repo_url,
1494 1494
1495 1495 'pull_request_source_repo': pr_source_repo,
1496 1496 'pull_request_source_repo_url': pr_source_repo_url,
1497 1497
1498 1498 'pull_request_url': pr_url,
1499 1499 'thread_ids': [pr_url],
1500 1500 'user_role': role
1501 1501 }
1502 1502
1503 1503 # pre-generate the subject for notification itself
1504 1504 (subject, _e, body_plaintext) = EmailNotificationModel().render_email(
1505 1505 notification_type, **kwargs)
1506 1506
1507 1507 # create notification objects, and emails
1508 1508 NotificationModel().create(
1509 1509 created_by=current_rhodecode_user,
1510 1510 notification_subject=subject,
1511 1511 notification_body=body_plaintext,
1512 1512 notification_type=notification_type,
1513 1513 recipients=recipients,
1514 1514 email_kwargs=kwargs,
1515 1515 )
1516 1516
1517 1517 def notify_reviewers(self, pull_request, reviewers_ids, user):
1518 1518 return self._notify_reviewers(pull_request, reviewers_ids,
1519 1519 PullRequestReviewers.ROLE_REVIEWER, user)
1520 1520
1521 1521 def notify_observers(self, pull_request, observers_ids, user):
1522 1522 return self._notify_reviewers(pull_request, observers_ids,
1523 1523 PullRequestReviewers.ROLE_OBSERVER, user)
1524 1524
1525 1525 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1526 1526 commit_changes, file_changes):
1527 1527
1528 1528 updating_user_id = updating_user.user_id
1529 reviewers = set([x.user.user_id for x in pull_request.reviewers])
1529 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1530 1530 # NOTE(marcink): send notification to all other users except to
1531 1531 # person who updated the PR
1532 1532 recipients = reviewers.difference(set([updating_user_id]))
1533 1533
1534 1534 log.debug('Notify following recipients about pull-request update %s', recipients)
1535 1535
1536 1536 pull_request_obj = pull_request
1537 1537
1538 1538 # send email about the update
1539 1539 changed_files = (
1540 1540 file_changes.added + file_changes.modified + file_changes.removed)
1541 1541
1542 1542 pr_source_repo = pull_request_obj.source_repo
1543 1543 pr_target_repo = pull_request_obj.target_repo
1544 1544
1545 1545 pr_url = h.route_url('pullrequest_show',
1546 1546 repo_name=pr_target_repo.repo_name,
1547 1547 pull_request_id=pull_request_obj.pull_request_id,)
1548 1548
1549 1549 # set some variables for email notification
1550 1550 pr_target_repo_url = h.route_url(
1551 1551 'repo_summary', repo_name=pr_target_repo.repo_name)
1552 1552
1553 1553 pr_source_repo_url = h.route_url(
1554 1554 'repo_summary', repo_name=pr_source_repo.repo_name)
1555 1555
1556 1556 email_kwargs = {
1557 1557 'date': datetime.datetime.now(),
1558 1558 'updating_user': updating_user,
1559 1559
1560 1560 'pull_request': pull_request_obj,
1561 1561
1562 1562 'pull_request_target_repo': pr_target_repo,
1563 1563 'pull_request_target_repo_url': pr_target_repo_url,
1564 1564
1565 1565 'pull_request_source_repo': pr_source_repo,
1566 1566 'pull_request_source_repo_url': pr_source_repo_url,
1567 1567
1568 1568 'pull_request_url': pr_url,
1569 1569
1570 1570 'ancestor_commit_id': ancestor_commit_id,
1571 1571 'added_commits': commit_changes.added,
1572 1572 'removed_commits': commit_changes.removed,
1573 1573 'changed_files': changed_files,
1574 1574 'added_files': file_changes.added,
1575 1575 'modified_files': file_changes.modified,
1576 1576 'removed_files': file_changes.removed,
1577 1577 'thread_ids': [pr_url],
1578 1578 }
1579 1579
1580 1580 (subject, _e, body_plaintext) = EmailNotificationModel().render_email(
1581 1581 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1582 1582
1583 1583 # create notification objects, and emails
1584 1584 NotificationModel().create(
1585 1585 created_by=updating_user,
1586 1586 notification_subject=subject,
1587 1587 notification_body=body_plaintext,
1588 1588 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1589 1589 recipients=recipients,
1590 1590 email_kwargs=email_kwargs,
1591 1591 )
1592 1592
1593 1593 def delete(self, pull_request, user=None):
1594 1594 if not user:
1595 1595 user = getattr(get_current_rhodecode_user(), 'username', None)
1596 1596
1597 1597 pull_request = self.__get_pull_request(pull_request)
1598 1598 old_data = pull_request.get_api_data(with_merge_state=False)
1599 1599 self._cleanup_merge_workspace(pull_request)
1600 1600 self._log_audit_action(
1601 1601 'repo.pull_request.delete', {'old_data': old_data},
1602 1602 user, pull_request)
1603 1603 Session().delete(pull_request)
1604 1604
1605 1605 def close_pull_request(self, pull_request, user):
1606 1606 pull_request = self.__get_pull_request(pull_request)
1607 1607 self._cleanup_merge_workspace(pull_request)
1608 1608 pull_request.status = PullRequest.STATUS_CLOSED
1609 1609 pull_request.updated_on = datetime.datetime.now()
1610 1610 Session().add(pull_request)
1611 1611 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1612 1612
1613 1613 pr_data = pull_request.get_api_data(with_merge_state=False)
1614 1614 self._log_audit_action(
1615 1615 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1616 1616
1617 1617 def close_pull_request_with_comment(
1618 1618 self, pull_request, user, repo, message=None, auth_user=None):
1619 1619
1620 1620 pull_request_review_status = pull_request.calculated_review_status()
1621 1621
1622 1622 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1623 1623 # approved only if we have voting consent
1624 1624 status = ChangesetStatus.STATUS_APPROVED
1625 1625 else:
1626 1626 status = ChangesetStatus.STATUS_REJECTED
1627 1627 status_lbl = ChangesetStatus.get_status_lbl(status)
1628 1628
1629 1629 default_message = (
1630 1630 'Closing with status change {transition_icon} {status}.'
1631 1631 ).format(transition_icon='>', status=status_lbl)
1632 1632 text = message or default_message
1633 1633
1634 1634 # create a comment, and link it to new status
1635 1635 comment = CommentsModel().create(
1636 1636 text=text,
1637 1637 repo=repo.repo_id,
1638 1638 user=user.user_id,
1639 1639 pull_request=pull_request.pull_request_id,
1640 1640 status_change=status_lbl,
1641 1641 status_change_type=status,
1642 1642 closing_pr=True,
1643 1643 auth_user=auth_user,
1644 1644 )
1645 1645
1646 1646 # calculate old status before we change it
1647 1647 old_calculated_status = pull_request.calculated_review_status()
1648 1648 ChangesetStatusModel().set_status(
1649 1649 repo.repo_id,
1650 1650 status,
1651 1651 user.user_id,
1652 1652 comment=comment,
1653 1653 pull_request=pull_request.pull_request_id
1654 1654 )
1655 1655
1656 1656 Session().flush()
1657 1657
1658 1658 self.trigger_pull_request_hook(pull_request, user, 'comment',
1659 1659 data={'comment': comment})
1660 1660
1661 1661 # we now calculate the status of pull request again, and based on that
1662 1662 # calculation trigger status change. This might happen in cases
1663 1663 # that non-reviewer admin closes a pr, which means his vote doesn't
1664 1664 # change the status, while if he's a reviewer this might change it.
1665 1665 calculated_status = pull_request.calculated_review_status()
1666 1666 if old_calculated_status != calculated_status:
1667 1667 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1668 1668 data={'status': calculated_status})
1669 1669
1670 1670 # finally close the PR
1671 1671 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1672 1672
1673 1673 return comment, status
1674 1674
1675 1675 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1676 1676 _ = translator or get_current_request().translate
1677 1677
1678 1678 if not self._is_merge_enabled(pull_request):
1679 1679 return None, False, _('Server-side pull request merging is disabled.')
1680 1680
1681 1681 if pull_request.is_closed():
1682 1682 return None, False, _('This pull request is closed.')
1683 1683
1684 1684 merge_possible, msg = self._check_repo_requirements(
1685 1685 target=pull_request.target_repo, source=pull_request.source_repo,
1686 1686 translator=_)
1687 1687 if not merge_possible:
1688 1688 return None, merge_possible, msg
1689 1689
1690 1690 try:
1691 1691 merge_response = self._try_merge(
1692 1692 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1693 1693 log.debug("Merge response: %s", merge_response)
1694 1694 return merge_response, merge_response.possible, merge_response.merge_status_message
1695 1695 except NotImplementedError:
1696 1696 return None, False, _('Pull request merging is not supported.')
1697 1697
1698 1698 def _check_repo_requirements(self, target, source, translator):
1699 1699 """
1700 1700 Check if `target` and `source` have compatible requirements.
1701 1701
1702 1702 Currently this is just checking for largefiles.
1703 1703 """
1704 1704 _ = translator
1705 1705 target_has_largefiles = self._has_largefiles(target)
1706 1706 source_has_largefiles = self._has_largefiles(source)
1707 1707 merge_possible = True
1708 1708 message = u''
1709 1709
1710 1710 if target_has_largefiles != source_has_largefiles:
1711 1711 merge_possible = False
1712 1712 if source_has_largefiles:
1713 1713 message = _(
1714 1714 'Target repository large files support is disabled.')
1715 1715 else:
1716 1716 message = _(
1717 1717 'Source repository large files support is disabled.')
1718 1718
1719 1719 return merge_possible, message
1720 1720
1721 1721 def _has_largefiles(self, repo):
1722 1722 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1723 1723 'extensions', 'largefiles')
1724 1724 return largefiles_ui and largefiles_ui[0].active
1725 1725
1726 1726 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1727 1727 """
1728 1728 Try to merge the pull request and return the merge status.
1729 1729 """
1730 1730 log.debug(
1731 1731 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1732 1732 pull_request.pull_request_id, force_shadow_repo_refresh)
1733 1733 target_vcs = pull_request.target_repo.scm_instance()
1734 1734 # Refresh the target reference.
1735 1735 try:
1736 1736 target_ref = self._refresh_reference(
1737 1737 pull_request.target_ref_parts, target_vcs)
1738 1738 except CommitDoesNotExistError:
1739 1739 merge_state = MergeResponse(
1740 1740 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1741 1741 metadata={'target_ref': pull_request.target_ref_parts})
1742 1742 return merge_state
1743 1743
1744 1744 target_locked = pull_request.target_repo.locked
1745 1745 if target_locked and target_locked[0]:
1746 1746 locked_by = 'user:{}'.format(target_locked[0])
1747 1747 log.debug("The target repository is locked by %s.", locked_by)
1748 1748 merge_state = MergeResponse(
1749 1749 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1750 1750 metadata={'locked_by': locked_by})
1751 1751 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1752 1752 pull_request, target_ref):
1753 1753 log.debug("Refreshing the merge status of the repository.")
1754 1754 merge_state = self._refresh_merge_state(
1755 1755 pull_request, target_vcs, target_ref)
1756 1756 else:
1757 1757 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1758 1758 metadata = {
1759 1759 'unresolved_files': '',
1760 1760 'target_ref': pull_request.target_ref_parts,
1761 1761 'source_ref': pull_request.source_ref_parts,
1762 1762 }
1763 1763 if pull_request.last_merge_metadata:
1764 1764 metadata.update(pull_request.last_merge_metadata_parsed)
1765 1765
1766 1766 if not possible and target_ref.type == 'branch':
1767 1767 # NOTE(marcink): case for mercurial multiple heads on branch
1768 1768 heads = target_vcs._heads(target_ref.name)
1769 1769 if len(heads) != 1:
1770 1770 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1771 1771 metadata.update({
1772 1772 'heads': heads
1773 1773 })
1774 1774
1775 1775 merge_state = MergeResponse(
1776 1776 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1777 1777
1778 1778 return merge_state
1779 1779
1780 1780 def _refresh_reference(self, reference, vcs_repository):
1781 1781 if reference.type in self.UPDATABLE_REF_TYPES:
1782 1782 name_or_id = reference.name
1783 1783 else:
1784 1784 name_or_id = reference.commit_id
1785 1785
1786 1786 refreshed_commit = vcs_repository.get_commit(name_or_id)
1787 1787 refreshed_reference = Reference(
1788 1788 reference.type, reference.name, refreshed_commit.raw_id)
1789 1789 return refreshed_reference
1790 1790
1791 1791 def _needs_merge_state_refresh(self, pull_request, target_reference):
1792 1792 return not(
1793 1793 pull_request.revisions and
1794 1794 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1795 1795 target_reference.commit_id == pull_request._last_merge_target_rev)
1796 1796
1797 1797 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1798 1798 workspace_id = self._workspace_id(pull_request)
1799 1799 source_vcs = pull_request.source_repo.scm_instance()
1800 1800 repo_id = pull_request.target_repo.repo_id
1801 1801 use_rebase = self._use_rebase_for_merging(pull_request)
1802 1802 close_branch = self._close_branch_before_merging(pull_request)
1803 1803 merge_state = target_vcs.merge(
1804 1804 repo_id, workspace_id,
1805 1805 target_reference, source_vcs, pull_request.source_ref_parts,
1806 1806 dry_run=True, use_rebase=use_rebase,
1807 1807 close_branch=close_branch)
1808 1808
1809 1809 # Do not store the response if there was an unknown error.
1810 1810 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1811 1811 pull_request._last_merge_source_rev = \
1812 1812 pull_request.source_ref_parts.commit_id
1813 1813 pull_request._last_merge_target_rev = target_reference.commit_id
1814 1814 pull_request.last_merge_status = merge_state.failure_reason
1815 1815 pull_request.last_merge_metadata = merge_state.metadata
1816 1816
1817 1817 pull_request.shadow_merge_ref = merge_state.merge_ref
1818 1818 Session().add(pull_request)
1819 1819 Session().commit()
1820 1820
1821 1821 return merge_state
1822 1822
1823 1823 def _workspace_id(self, pull_request):
1824 1824 workspace_id = 'pr-%s' % pull_request.pull_request_id
1825 1825 return workspace_id
1826 1826
1827 1827 def generate_repo_data(self, repo, commit_id=None, branch=None,
1828 1828 bookmark=None, translator=None):
1829 1829 from rhodecode.model.repo import RepoModel
1830 1830
1831 1831 all_refs, selected_ref = \
1832 1832 self._get_repo_pullrequest_sources(
1833 1833 repo.scm_instance(), commit_id=commit_id,
1834 1834 branch=branch, bookmark=bookmark, translator=translator)
1835 1835
1836 1836 refs_select2 = []
1837 1837 for element in all_refs:
1838 1838 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1839 1839 refs_select2.append({'text': element[1], 'children': children})
1840 1840
1841 1841 return {
1842 1842 'user': {
1843 1843 'user_id': repo.user.user_id,
1844 1844 'username': repo.user.username,
1845 1845 'firstname': repo.user.first_name,
1846 1846 'lastname': repo.user.last_name,
1847 1847 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1848 1848 },
1849 1849 'name': repo.repo_name,
1850 1850 'link': RepoModel().get_url(repo),
1851 1851 'description': h.chop_at_smart(repo.description_safe, '\n'),
1852 1852 'refs': {
1853 1853 'all_refs': all_refs,
1854 1854 'selected_ref': selected_ref,
1855 1855 'select2_refs': refs_select2
1856 1856 }
1857 1857 }
1858 1858
1859 1859 def generate_pullrequest_title(self, source, source_ref, target):
1860 1860 return u'{source}#{at_ref} to {target}'.format(
1861 1861 source=source,
1862 1862 at_ref=source_ref,
1863 1863 target=target,
1864 1864 )
1865 1865
1866 1866 def _cleanup_merge_workspace(self, pull_request):
1867 1867 # Merging related cleanup
1868 1868 repo_id = pull_request.target_repo.repo_id
1869 1869 target_scm = pull_request.target_repo.scm_instance()
1870 1870 workspace_id = self._workspace_id(pull_request)
1871 1871
1872 1872 try:
1873 1873 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1874 1874 except NotImplementedError:
1875 1875 pass
1876 1876
1877 1877 def _get_repo_pullrequest_sources(
1878 1878 self, repo, commit_id=None, branch=None, bookmark=None,
1879 1879 translator=None):
1880 1880 """
1881 1881 Return a structure with repo's interesting commits, suitable for
1882 1882 the selectors in pullrequest controller
1883 1883
1884 1884 :param commit_id: a commit that must be in the list somehow
1885 1885 and selected by default
1886 1886 :param branch: a branch that must be in the list and selected
1887 1887 by default - even if closed
1888 1888 :param bookmark: a bookmark that must be in the list and selected
1889 1889 """
1890 1890 _ = translator or get_current_request().translate
1891 1891
1892 1892 commit_id = safe_str(commit_id) if commit_id else None
1893 1893 branch = safe_unicode(branch) if branch else None
1894 1894 bookmark = safe_unicode(bookmark) if bookmark else None
1895 1895
1896 1896 selected = None
1897 1897
1898 1898 # order matters: first source that has commit_id in it will be selected
1899 1899 sources = []
1900 1900 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1901 1901 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1902 1902
1903 1903 if commit_id:
1904 1904 ref_commit = (h.short_id(commit_id), commit_id)
1905 1905 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1906 1906
1907 1907 sources.append(
1908 1908 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1909 1909 )
1910 1910
1911 1911 groups = []
1912 1912
1913 1913 for group_key, ref_list, group_name, match in sources:
1914 1914 group_refs = []
1915 1915 for ref_name, ref_id in ref_list:
1916 1916 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1917 1917 group_refs.append((ref_key, ref_name))
1918 1918
1919 1919 if not selected:
1920 1920 if set([commit_id, match]) & set([ref_id, ref_name]):
1921 1921 selected = ref_key
1922 1922
1923 1923 if group_refs:
1924 1924 groups.append((group_refs, group_name))
1925 1925
1926 1926 if not selected:
1927 1927 ref = commit_id or branch or bookmark
1928 1928 if ref:
1929 1929 raise CommitDoesNotExistError(
1930 1930 u'No commit refs could be found matching: {}'.format(ref))
1931 1931 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1932 1932 selected = u'branch:{}:{}'.format(
1933 1933 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1934 1934 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1935 1935 )
1936 1936 elif repo.commit_ids:
1937 1937 # make the user select in this case
1938 1938 selected = None
1939 1939 else:
1940 1940 raise EmptyRepositoryError()
1941 1941 return groups, selected
1942 1942
1943 1943 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1944 1944 hide_whitespace_changes, diff_context):
1945 1945
1946 1946 return self._get_diff_from_pr_or_version(
1947 1947 source_repo, source_ref_id, target_ref_id,
1948 1948 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1949 1949
1950 1950 def _get_diff_from_pr_or_version(
1951 1951 self, source_repo, source_ref_id, target_ref_id,
1952 1952 hide_whitespace_changes, diff_context):
1953 1953
1954 1954 target_commit = source_repo.get_commit(
1955 1955 commit_id=safe_str(target_ref_id))
1956 1956 source_commit = source_repo.get_commit(
1957 1957 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1958 1958 if isinstance(source_repo, Repository):
1959 1959 vcs_repo = source_repo.scm_instance()
1960 1960 else:
1961 1961 vcs_repo = source_repo
1962 1962
1963 1963 # TODO: johbo: In the context of an update, we cannot reach
1964 1964 # the old commit anymore with our normal mechanisms. It needs
1965 1965 # some sort of special support in the vcs layer to avoid this
1966 1966 # workaround.
1967 1967 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1968 1968 vcs_repo.alias == 'git'):
1969 1969 source_commit.raw_id = safe_str(source_ref_id)
1970 1970
1971 1971 log.debug('calculating diff between '
1972 1972 'source_ref:%s and target_ref:%s for repo `%s`',
1973 1973 target_ref_id, source_ref_id,
1974 1974 safe_unicode(vcs_repo.path))
1975 1975
1976 1976 vcs_diff = vcs_repo.get_diff(
1977 1977 commit1=target_commit, commit2=source_commit,
1978 1978 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1979 1979 return vcs_diff
1980 1980
1981 1981 def _is_merge_enabled(self, pull_request):
1982 1982 return self._get_general_setting(
1983 1983 pull_request, 'rhodecode_pr_merge_enabled')
1984 1984
1985 1985 def _use_rebase_for_merging(self, pull_request):
1986 1986 repo_type = pull_request.target_repo.repo_type
1987 1987 if repo_type == 'hg':
1988 1988 return self._get_general_setting(
1989 1989 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1990 1990 elif repo_type == 'git':
1991 1991 return self._get_general_setting(
1992 1992 pull_request, 'rhodecode_git_use_rebase_for_merging')
1993 1993
1994 1994 return False
1995 1995
1996 1996 def _user_name_for_merging(self, pull_request, user):
1997 1997 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1998 1998 if env_user_name_attr and hasattr(user, env_user_name_attr):
1999 1999 user_name_attr = env_user_name_attr
2000 2000 else:
2001 2001 user_name_attr = 'short_contact'
2002 2002
2003 2003 user_name = getattr(user, user_name_attr)
2004 2004 return user_name
2005 2005
2006 2006 def _close_branch_before_merging(self, pull_request):
2007 2007 repo_type = pull_request.target_repo.repo_type
2008 2008 if repo_type == 'hg':
2009 2009 return self._get_general_setting(
2010 2010 pull_request, 'rhodecode_hg_close_branch_before_merging')
2011 2011 elif repo_type == 'git':
2012 2012 return self._get_general_setting(
2013 2013 pull_request, 'rhodecode_git_close_branch_before_merging')
2014 2014
2015 2015 return False
2016 2016
2017 2017 def _get_general_setting(self, pull_request, settings_key, default=False):
2018 2018 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2019 2019 settings = settings_model.get_general_settings()
2020 2020 return settings.get(settings_key, default)
2021 2021
2022 2022 def _log_audit_action(self, action, action_data, user, pull_request):
2023 2023 audit_logger.store(
2024 2024 action=action,
2025 2025 action_data=action_data,
2026 2026 user=user,
2027 2027 repo=pull_request.target_repo)
2028 2028
2029 2029 def get_reviewer_functions(self):
2030 2030 """
2031 2031 Fetches functions for validation and fetching default reviewers.
2032 2032 If available we use the EE package, else we fallback to CE
2033 2033 package functions
2034 2034 """
2035 2035 try:
2036 2036 from rc_reviewers.utils import get_default_reviewers_data
2037 2037 from rc_reviewers.utils import validate_default_reviewers
2038 2038 from rc_reviewers.utils import validate_observers
2039 2039 except ImportError:
2040 2040 from rhodecode.apps.repository.utils import get_default_reviewers_data
2041 2041 from rhodecode.apps.repository.utils import validate_default_reviewers
2042 2042 from rhodecode.apps.repository.utils import validate_observers
2043 2043
2044 2044 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2045 2045
2046 2046
2047 2047 class MergeCheck(object):
2048 2048 """
2049 2049 Perform Merge Checks and returns a check object which stores information
2050 2050 about merge errors, and merge conditions
2051 2051 """
2052 2052 TODO_CHECK = 'todo'
2053 2053 PERM_CHECK = 'perm'
2054 2054 REVIEW_CHECK = 'review'
2055 2055 MERGE_CHECK = 'merge'
2056 2056 WIP_CHECK = 'wip'
2057 2057
2058 2058 def __init__(self):
2059 2059 self.review_status = None
2060 2060 self.merge_possible = None
2061 2061 self.merge_msg = ''
2062 2062 self.merge_response = None
2063 2063 self.failed = None
2064 2064 self.errors = []
2065 2065 self.error_details = OrderedDict()
2066 2066 self.source_commit = AttributeDict()
2067 2067 self.target_commit = AttributeDict()
2068 2068
2069 2069 def __repr__(self):
2070 2070 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2071 2071 self.merge_possible, self.failed, self.errors)
2072 2072
2073 2073 def push_error(self, error_type, message, error_key, details):
2074 2074 self.failed = True
2075 2075 self.errors.append([error_type, message])
2076 2076 self.error_details[error_key] = dict(
2077 2077 details=details,
2078 2078 error_type=error_type,
2079 2079 message=message
2080 2080 )
2081 2081
2082 2082 @classmethod
2083 2083 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2084 2084 force_shadow_repo_refresh=False):
2085 2085 _ = translator
2086 2086 merge_check = cls()
2087 2087
2088 2088 # title has WIP:
2089 2089 if pull_request.work_in_progress:
2090 2090 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2091 2091
2092 2092 msg = _('WIP marker in title prevents from accidental merge.')
2093 2093 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2094 2094 if fail_early:
2095 2095 return merge_check
2096 2096
2097 2097 # permissions to merge
2098 2098 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2099 2099 if not user_allowed_to_merge:
2100 2100 log.debug("MergeCheck: cannot merge, approval is pending.")
2101 2101
2102 2102 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2103 2103 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2104 2104 if fail_early:
2105 2105 return merge_check
2106 2106
2107 2107 # permission to merge into the target branch
2108 2108 target_commit_id = pull_request.target_ref_parts.commit_id
2109 2109 if pull_request.target_ref_parts.type == 'branch':
2110 2110 branch_name = pull_request.target_ref_parts.name
2111 2111 else:
2112 2112 # for mercurial we can always figure out the branch from the commit
2113 2113 # in case of bookmark
2114 2114 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2115 2115 branch_name = target_commit.branch
2116 2116
2117 2117 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2118 2118 pull_request.target_repo.repo_name, branch_name)
2119 2119 if branch_perm and branch_perm == 'branch.none':
2120 2120 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2121 2121 branch_name, rule)
2122 2122 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2123 2123 if fail_early:
2124 2124 return merge_check
2125 2125
2126 2126 # review status, must be always present
2127 2127 review_status = pull_request.calculated_review_status()
2128 2128 merge_check.review_status = review_status
2129 2129
2130 2130 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2131 2131 if not status_approved:
2132 2132 log.debug("MergeCheck: cannot merge, approval is pending.")
2133 2133
2134 2134 msg = _('Pull request reviewer approval is pending.')
2135 2135
2136 2136 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2137 2137
2138 2138 if fail_early:
2139 2139 return merge_check
2140 2140
2141 2141 # left over TODOs
2142 2142 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2143 2143 if todos:
2144 2144 log.debug("MergeCheck: cannot merge, {} "
2145 2145 "unresolved TODOs left.".format(len(todos)))
2146 2146
2147 2147 if len(todos) == 1:
2148 2148 msg = _('Cannot merge, {} TODO still not resolved.').format(
2149 2149 len(todos))
2150 2150 else:
2151 2151 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2152 2152 len(todos))
2153 2153
2154 2154 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2155 2155
2156 2156 if fail_early:
2157 2157 return merge_check
2158 2158
2159 2159 # merge possible, here is the filesystem simulation + shadow repo
2160 2160 merge_response, merge_status, msg = PullRequestModel().merge_status(
2161 2161 pull_request, translator=translator,
2162 2162 force_shadow_repo_refresh=force_shadow_repo_refresh)
2163 2163
2164 2164 merge_check.merge_possible = merge_status
2165 2165 merge_check.merge_msg = msg
2166 2166 merge_check.merge_response = merge_response
2167 2167
2168 2168 source_ref_id = pull_request.source_ref_parts.commit_id
2169 2169 target_ref_id = pull_request.target_ref_parts.commit_id
2170 2170
2171 2171 try:
2172 2172 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2173 2173 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2174 2174 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2175 2175 merge_check.source_commit.current_raw_id = source_commit.raw_id
2176 2176 merge_check.source_commit.previous_raw_id = source_ref_id
2177 2177
2178 2178 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2179 2179 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2180 2180 merge_check.target_commit.current_raw_id = target_commit.raw_id
2181 2181 merge_check.target_commit.previous_raw_id = target_ref_id
2182 2182 except (SourceRefMissing, TargetRefMissing):
2183 2183 pass
2184 2184
2185 2185 if not merge_status:
2186 2186 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2187 2187 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2188 2188
2189 2189 if fail_early:
2190 2190 return merge_check
2191 2191
2192 2192 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2193 2193 return merge_check
2194 2194
2195 2195 @classmethod
2196 2196 def get_merge_conditions(cls, pull_request, translator):
2197 2197 _ = translator
2198 2198 merge_details = {}
2199 2199
2200 2200 model = PullRequestModel()
2201 2201 use_rebase = model._use_rebase_for_merging(pull_request)
2202 2202
2203 2203 if use_rebase:
2204 2204 merge_details['merge_strategy'] = dict(
2205 2205 details={},
2206 2206 message=_('Merge strategy: rebase')
2207 2207 )
2208 2208 else:
2209 2209 merge_details['merge_strategy'] = dict(
2210 2210 details={},
2211 2211 message=_('Merge strategy: explicit merge commit')
2212 2212 )
2213 2213
2214 2214 close_branch = model._close_branch_before_merging(pull_request)
2215 2215 if close_branch:
2216 2216 repo_type = pull_request.target_repo.repo_type
2217 2217 close_msg = ''
2218 2218 if repo_type == 'hg':
2219 2219 close_msg = _('Source branch will be closed before the merge.')
2220 2220 elif repo_type == 'git':
2221 2221 close_msg = _('Source branch will be deleted after the merge.')
2222 2222
2223 2223 merge_details['close_branch'] = dict(
2224 2224 details={},
2225 2225 message=close_msg
2226 2226 )
2227 2227
2228 2228 return merge_details
2229 2229
2230 2230
2231 2231 ChangeTuple = collections.namedtuple(
2232 2232 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2233 2233
2234 2234 FileChangeTuple = collections.namedtuple(
2235 2235 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now