##// END OF EJS Templates
pull-requests: add information about changes in source repositories in pull-request show page....
dan -
r4317:b8d0e5ed default
parent child Browse files
Show More
@@ -1,1506 +1,1508 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 import formencode
25 25 import formencode.htmlfill
26 26 import peppercorn
27 27 from pyramid.httpexceptions import (
28 28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 29 from pyramid.view import view_config
30 30 from pyramid.renderers import render
31 31
32 32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 33
34 34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 35 from rhodecode.lib.base import vcs_operation_context
36 36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 37 from rhodecode.lib.ext_json import json
38 38 from rhodecode.lib.auth import (
39 39 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 40 NotAnonymous, CSRFRequired)
41 41 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 42 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 43 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
44 44 RepositoryRequirementError, EmptyRepositoryError)
45 45 from rhodecode.model.changeset_status import ChangesetStatusModel
46 46 from rhodecode.model.comment import CommentsModel
47 47 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
48 48 ChangesetComment, ChangesetStatus, Repository)
49 49 from rhodecode.model.forms import PullRequestForm
50 50 from rhodecode.model.meta import Session
51 51 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 52 from rhodecode.model.scm import ScmModel
53 53
54 54 log = logging.getLogger(__name__)
55 55
56 56
57 57 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58 58
59 59 def load_default_context(self):
60 60 c = self._get_local_tmpl_context(include_app_defaults=True)
61 61 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 62 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 63 # backward compat., we use for OLD PRs a plain renderer
64 64 c.renderer = 'plain'
65 65 return c
66 66
67 67 def _get_pull_requests_list(
68 68 self, repo_name, source, filter_type, opened_by, statuses):
69 69
70 70 draw, start, limit = self._extract_chunk(self.request)
71 71 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 72 _render = self.request.get_partial_renderer(
73 73 'rhodecode:templates/data_table/_dt_elements.mako')
74 74
75 75 # pagination
76 76
77 77 if filter_type == 'awaiting_review':
78 78 pull_requests = PullRequestModel().get_awaiting_review(
79 79 repo_name, search_q=search_q, source=source, opened_by=opened_by,
80 80 statuses=statuses, offset=start, length=limit,
81 81 order_by=order_by, order_dir=order_dir)
82 82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 83 repo_name, search_q=search_q, source=source, statuses=statuses,
84 84 opened_by=opened_by)
85 85 elif filter_type == 'awaiting_my_review':
86 86 pull_requests = PullRequestModel().get_awaiting_my_review(
87 87 repo_name, search_q=search_q, source=source, opened_by=opened_by,
88 88 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 89 offset=start, length=limit, order_by=order_by,
90 90 order_dir=order_dir)
91 91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 92 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
93 93 statuses=statuses, opened_by=opened_by)
94 94 else:
95 95 pull_requests = PullRequestModel().get_all(
96 96 repo_name, search_q=search_q, source=source, opened_by=opened_by,
97 97 statuses=statuses, offset=start, length=limit,
98 98 order_by=order_by, order_dir=order_dir)
99 99 pull_requests_total_count = PullRequestModel().count_all(
100 100 repo_name, search_q=search_q, source=source, statuses=statuses,
101 101 opened_by=opened_by)
102 102
103 103 data = []
104 104 comments_model = CommentsModel()
105 105 for pr in pull_requests:
106 106 comments = comments_model.get_all_comments(
107 107 self.db_repo.repo_id, pull_request=pr)
108 108
109 109 data.append({
110 110 'name': _render('pullrequest_name',
111 111 pr.pull_request_id, pr.pull_request_state,
112 112 pr.work_in_progress, pr.target_repo.repo_name),
113 113 'name_raw': pr.pull_request_id,
114 114 'status': _render('pullrequest_status',
115 115 pr.calculated_review_status()),
116 116 'title': _render('pullrequest_title', pr.title, pr.description),
117 117 'description': h.escape(pr.description),
118 118 'updated_on': _render('pullrequest_updated_on',
119 119 h.datetime_to_time(pr.updated_on)),
120 120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 121 'created_on': _render('pullrequest_updated_on',
122 122 h.datetime_to_time(pr.created_on)),
123 123 'created_on_raw': h.datetime_to_time(pr.created_on),
124 124 'state': pr.pull_request_state,
125 125 'author': _render('pullrequest_author',
126 126 pr.author.full_contact, ),
127 127 'author_raw': pr.author.full_name,
128 128 'comments': _render('pullrequest_comments', len(comments)),
129 129 'comments_raw': len(comments),
130 130 'closed': pr.is_closed(),
131 131 })
132 132
133 133 data = ({
134 134 'draw': draw,
135 135 'data': data,
136 136 'recordsTotal': pull_requests_total_count,
137 137 'recordsFiltered': pull_requests_total_count,
138 138 })
139 139 return data
140 140
141 141 @LoginRequired()
142 142 @HasRepoPermissionAnyDecorator(
143 143 'repository.read', 'repository.write', 'repository.admin')
144 144 @view_config(
145 145 route_name='pullrequest_show_all', request_method='GET',
146 146 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
147 147 def pull_request_list(self):
148 148 c = self.load_default_context()
149 149
150 150 req_get = self.request.GET
151 151 c.source = str2bool(req_get.get('source'))
152 152 c.closed = str2bool(req_get.get('closed'))
153 153 c.my = str2bool(req_get.get('my'))
154 154 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
155 155 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
156 156
157 157 c.active = 'open'
158 158 if c.my:
159 159 c.active = 'my'
160 160 if c.closed:
161 161 c.active = 'closed'
162 162 if c.awaiting_review and not c.source:
163 163 c.active = 'awaiting'
164 164 if c.source and not c.awaiting_review:
165 165 c.active = 'source'
166 166 if c.awaiting_my_review:
167 167 c.active = 'awaiting_my'
168 168
169 169 return self._get_template_context(c)
170 170
171 171 @LoginRequired()
172 172 @HasRepoPermissionAnyDecorator(
173 173 'repository.read', 'repository.write', 'repository.admin')
174 174 @view_config(
175 175 route_name='pullrequest_show_all_data', request_method='GET',
176 176 renderer='json_ext', xhr=True)
177 177 def pull_request_list_data(self):
178 178 self.load_default_context()
179 179
180 180 # additional filters
181 181 req_get = self.request.GET
182 182 source = str2bool(req_get.get('source'))
183 183 closed = str2bool(req_get.get('closed'))
184 184 my = str2bool(req_get.get('my'))
185 185 awaiting_review = str2bool(req_get.get('awaiting_review'))
186 186 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
187 187
188 188 filter_type = 'awaiting_review' if awaiting_review \
189 189 else 'awaiting_my_review' if awaiting_my_review \
190 190 else None
191 191
192 192 opened_by = None
193 193 if my:
194 194 opened_by = [self._rhodecode_user.user_id]
195 195
196 196 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
197 197 if closed:
198 198 statuses = [PullRequest.STATUS_CLOSED]
199 199
200 200 data = self._get_pull_requests_list(
201 201 repo_name=self.db_repo_name, source=source,
202 202 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
203 203
204 204 return data
205 205
206 206 def _is_diff_cache_enabled(self, target_repo):
207 207 caching_enabled = self._get_general_setting(
208 208 target_repo, 'rhodecode_diff_cache')
209 209 log.debug('Diff caching enabled: %s', caching_enabled)
210 210 return caching_enabled
211 211
212 212 def _get_diffset(self, source_repo_name, source_repo,
213 213 source_ref_id, target_ref_id,
214 214 target_commit, source_commit, diff_limit, file_limit,
215 215 fulldiff, hide_whitespace_changes, diff_context):
216 216
217 217 vcs_diff = PullRequestModel().get_diff(
218 218 source_repo, source_ref_id, target_ref_id,
219 219 hide_whitespace_changes, diff_context)
220 220
221 221 diff_processor = diffs.DiffProcessor(
222 222 vcs_diff, format='newdiff', diff_limit=diff_limit,
223 223 file_limit=file_limit, show_full_diff=fulldiff)
224 224
225 225 _parsed = diff_processor.prepare()
226 226
227 227 diffset = codeblocks.DiffSet(
228 228 repo_name=self.db_repo_name,
229 229 source_repo_name=source_repo_name,
230 230 source_node_getter=codeblocks.diffset_node_getter(target_commit),
231 231 target_node_getter=codeblocks.diffset_node_getter(source_commit),
232 232 )
233 233 diffset = self.path_filter.render_patchset_filtered(
234 234 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
235 235
236 236 return diffset
237 237
238 238 def _get_range_diffset(self, source_scm, source_repo,
239 239 commit1, commit2, diff_limit, file_limit,
240 240 fulldiff, hide_whitespace_changes, diff_context):
241 241 vcs_diff = source_scm.get_diff(
242 242 commit1, commit2,
243 243 ignore_whitespace=hide_whitespace_changes,
244 244 context=diff_context)
245 245
246 246 diff_processor = diffs.DiffProcessor(
247 247 vcs_diff, format='newdiff', diff_limit=diff_limit,
248 248 file_limit=file_limit, show_full_diff=fulldiff)
249 249
250 250 _parsed = diff_processor.prepare()
251 251
252 252 diffset = codeblocks.DiffSet(
253 253 repo_name=source_repo.repo_name,
254 254 source_node_getter=codeblocks.diffset_node_getter(commit1),
255 255 target_node_getter=codeblocks.diffset_node_getter(commit2))
256 256
257 257 diffset = self.path_filter.render_patchset_filtered(
258 258 diffset, _parsed, commit1.raw_id, commit2.raw_id)
259 259
260 260 return diffset
261 261
262 262 @LoginRequired()
263 263 @HasRepoPermissionAnyDecorator(
264 264 'repository.read', 'repository.write', 'repository.admin')
265 265 @view_config(
266 266 route_name='pullrequest_show', request_method='GET',
267 267 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
268 268 def pull_request_show(self):
269 269 _ = self.request.translate
270 270 c = self.load_default_context()
271 271
272 272 pull_request = PullRequest.get_or_404(
273 273 self.request.matchdict['pull_request_id'])
274 274 pull_request_id = pull_request.pull_request_id
275 275
276 276 c.state_progressing = pull_request.is_state_changing()
277 277
278 278 _new_state = {
279 279 'created': PullRequest.STATE_CREATED,
280 280 }.get(self.request.GET.get('force_state'))
281 281 if c.is_super_admin and _new_state:
282 282 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
283 283 h.flash(
284 284 _('Pull Request state was force changed to `{}`').format(_new_state),
285 285 category='success')
286 286 Session().commit()
287 287
288 288 raise HTTPFound(h.route_path(
289 289 'pullrequest_show', repo_name=self.db_repo_name,
290 290 pull_request_id=pull_request_id))
291 291
292 292 version = self.request.GET.get('version')
293 293 from_version = self.request.GET.get('from_version') or version
294 294 merge_checks = self.request.GET.get('merge_checks')
295 295 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
296 296
297 297 # fetch global flags of ignore ws or context lines
298 298 diff_context = diffs.get_diff_context(self.request)
299 299 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
300 300
301 301 force_refresh = str2bool(self.request.GET.get('force_refresh'))
302 302
303 303 (pull_request_latest,
304 304 pull_request_at_ver,
305 305 pull_request_display_obj,
306 306 at_version) = PullRequestModel().get_pr_version(
307 307 pull_request_id, version=version)
308 308 pr_closed = pull_request_latest.is_closed()
309 309
310 310 if pr_closed and (version or from_version):
311 311 # not allow to browse versions
312 312 raise HTTPFound(h.route_path(
313 313 'pullrequest_show', repo_name=self.db_repo_name,
314 314 pull_request_id=pull_request_id))
315 315
316 316 versions = pull_request_display_obj.versions()
317 317 # used to store per-commit range diffs
318 318 c.changes = collections.OrderedDict()
319 319 c.range_diff_on = self.request.GET.get('range-diff') == "1"
320 320
321 321 c.at_version = at_version
322 322 c.at_version_num = (at_version
323 323 if at_version and at_version != 'latest'
324 324 else None)
325 325 c.at_version_pos = ChangesetComment.get_index_from_version(
326 326 c.at_version_num, versions)
327 327
328 328 (prev_pull_request_latest,
329 329 prev_pull_request_at_ver,
330 330 prev_pull_request_display_obj,
331 331 prev_at_version) = PullRequestModel().get_pr_version(
332 332 pull_request_id, version=from_version)
333 333
334 334 c.from_version = prev_at_version
335 335 c.from_version_num = (prev_at_version
336 336 if prev_at_version and prev_at_version != 'latest'
337 337 else None)
338 338 c.from_version_pos = ChangesetComment.get_index_from_version(
339 339 c.from_version_num, versions)
340 340
341 341 # define if we're in COMPARE mode or VIEW at version mode
342 342 compare = at_version != prev_at_version
343 343
344 344 # pull_requests repo_name we opened it against
345 345 # ie. target_repo must match
346 346 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
347 347 raise HTTPNotFound()
348 348
349 349 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
350 350 pull_request_at_ver)
351 351
352 352 c.pull_request = pull_request_display_obj
353 353 c.renderer = pull_request_at_ver.description_renderer or c.renderer
354 354 c.pull_request_latest = pull_request_latest
355 355
356 356 if compare or (at_version and not at_version == 'latest'):
357 357 c.allowed_to_change_status = False
358 358 c.allowed_to_update = False
359 359 c.allowed_to_merge = False
360 360 c.allowed_to_delete = False
361 361 c.allowed_to_comment = False
362 362 c.allowed_to_close = False
363 363 else:
364 364 can_change_status = PullRequestModel().check_user_change_status(
365 365 pull_request_at_ver, self._rhodecode_user)
366 366 c.allowed_to_change_status = can_change_status and not pr_closed
367 367
368 368 c.allowed_to_update = PullRequestModel().check_user_update(
369 369 pull_request_latest, self._rhodecode_user) and not pr_closed
370 370 c.allowed_to_merge = PullRequestModel().check_user_merge(
371 371 pull_request_latest, self._rhodecode_user) and not pr_closed
372 372 c.allowed_to_delete = PullRequestModel().check_user_delete(
373 373 pull_request_latest, self._rhodecode_user) and not pr_closed
374 374 c.allowed_to_comment = not pr_closed
375 375 c.allowed_to_close = c.allowed_to_merge and not pr_closed
376 376
377 377 c.forbid_adding_reviewers = False
378 378 c.forbid_author_to_review = False
379 379 c.forbid_commit_author_to_review = False
380 380
381 381 if pull_request_latest.reviewer_data and \
382 382 'rules' in pull_request_latest.reviewer_data:
383 383 rules = pull_request_latest.reviewer_data['rules'] or {}
384 384 try:
385 385 c.forbid_adding_reviewers = rules.get(
386 386 'forbid_adding_reviewers')
387 387 c.forbid_author_to_review = rules.get(
388 388 'forbid_author_to_review')
389 389 c.forbid_commit_author_to_review = rules.get(
390 390 'forbid_commit_author_to_review')
391 391 except Exception:
392 392 pass
393 393
394 394 # check merge capabilities
395 395 _merge_check = MergeCheck.validate(
396 396 pull_request_latest, auth_user=self._rhodecode_user,
397 397 translator=self.request.translate,
398 398 force_shadow_repo_refresh=force_refresh)
399 399
400 400 c.pr_merge_errors = _merge_check.error_details
401 401 c.pr_merge_possible = not _merge_check.failed
402 402 c.pr_merge_message = _merge_check.merge_msg
403 c.pr_merge_source_commit = _merge_check.source_commit
404 c.pr_merge_target_commit = _merge_check.target_commit
403 405
404 406 c.pr_merge_info = MergeCheck.get_merge_conditions(
405 407 pull_request_latest, translator=self.request.translate)
406 408
407 409 c.pull_request_review_status = _merge_check.review_status
408 410 if merge_checks:
409 411 self.request.override_renderer = \
410 412 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
411 413 return self._get_template_context(c)
412 414
413 415 comments_model = CommentsModel()
414 416
415 417 # reviewers and statuses
416 418 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
417 419 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
418 420
419 421 # GENERAL COMMENTS with versions #
420 422 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
421 423 q = q.order_by(ChangesetComment.comment_id.asc())
422 424 general_comments = q
423 425
424 426 # pick comments we want to render at current version
425 427 c.comment_versions = comments_model.aggregate_comments(
426 428 general_comments, versions, c.at_version_num)
427 429 c.comments = c.comment_versions[c.at_version_num]['until']
428 430
429 431 # INLINE COMMENTS with versions #
430 432 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
431 433 q = q.order_by(ChangesetComment.comment_id.asc())
432 434 inline_comments = q
433 435
434 436 c.inline_versions = comments_model.aggregate_comments(
435 437 inline_comments, versions, c.at_version_num, inline=True)
436 438
437 439 # TODOs
438 440 c.unresolved_comments = CommentsModel() \
439 441 .get_pull_request_unresolved_todos(pull_request)
440 442 c.resolved_comments = CommentsModel() \
441 443 .get_pull_request_resolved_todos(pull_request)
442 444
443 445 # inject latest version
444 446 latest_ver = PullRequest.get_pr_display_object(
445 447 pull_request_latest, pull_request_latest)
446 448
447 449 c.versions = versions + [latest_ver]
448 450
449 451 # if we use version, then do not show later comments
450 452 # than current version
451 453 display_inline_comments = collections.defaultdict(
452 454 lambda: collections.defaultdict(list))
453 455 for co in inline_comments:
454 456 if c.at_version_num:
455 457 # pick comments that are at least UPTO given version, so we
456 458 # don't render comments for higher version
457 459 should_render = co.pull_request_version_id and \
458 460 co.pull_request_version_id <= c.at_version_num
459 461 else:
460 462 # showing all, for 'latest'
461 463 should_render = True
462 464
463 465 if should_render:
464 466 display_inline_comments[co.f_path][co.line_no].append(co)
465 467
466 468 # load diff data into template context, if we use compare mode then
467 469 # diff is calculated based on changes between versions of PR
468 470
469 471 source_repo = pull_request_at_ver.source_repo
470 472 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
471 473
472 474 target_repo = pull_request_at_ver.target_repo
473 475 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
474 476
475 477 if compare:
476 478 # in compare switch the diff base to latest commit from prev version
477 479 target_ref_id = prev_pull_request_display_obj.revisions[0]
478 480
479 481 # despite opening commits for bookmarks/branches/tags, we always
480 482 # convert this to rev to prevent changes after bookmark or branch change
481 483 c.source_ref_type = 'rev'
482 484 c.source_ref = source_ref_id
483 485
484 486 c.target_ref_type = 'rev'
485 487 c.target_ref = target_ref_id
486 488
487 489 c.source_repo = source_repo
488 490 c.target_repo = target_repo
489 491
490 492 c.commit_ranges = []
491 493 source_commit = EmptyCommit()
492 494 target_commit = EmptyCommit()
493 495 c.missing_requirements = False
494 496
495 497 source_scm = source_repo.scm_instance()
496 498 target_scm = target_repo.scm_instance()
497 499
498 500 shadow_scm = None
499 501 try:
500 502 shadow_scm = pull_request_latest.get_shadow_repo()
501 503 except Exception:
502 504 log.debug('Failed to get shadow repo', exc_info=True)
503 505 # try first the existing source_repo, and then shadow
504 506 # repo if we can obtain one
505 507 commits_source_repo = source_scm
506 508 if shadow_scm:
507 509 commits_source_repo = shadow_scm
508 510
509 511 c.commits_source_repo = commits_source_repo
510 512 c.ancestor = None # set it to None, to hide it from PR view
511 513
512 514 # empty version means latest, so we keep this to prevent
513 515 # double caching
514 516 version_normalized = version or 'latest'
515 517 from_version_normalized = from_version or 'latest'
516 518
517 519 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
518 520 cache_file_path = diff_cache_exist(
519 521 cache_path, 'pull_request', pull_request_id, version_normalized,
520 522 from_version_normalized, source_ref_id, target_ref_id,
521 523 hide_whitespace_changes, diff_context, c.fulldiff)
522 524
523 525 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
524 526 force_recache = self.get_recache_flag()
525 527
526 528 cached_diff = None
527 529 if caching_enabled:
528 530 cached_diff = load_cached_diff(cache_file_path)
529 531
530 532 has_proper_commit_cache = (
531 533 cached_diff and cached_diff.get('commits')
532 534 and len(cached_diff.get('commits', [])) == 5
533 535 and cached_diff.get('commits')[0]
534 536 and cached_diff.get('commits')[3])
535 537
536 538 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
537 539 diff_commit_cache = \
538 540 (ancestor_commit, commit_cache, missing_requirements,
539 541 source_commit, target_commit) = cached_diff['commits']
540 542 else:
541 543 # NOTE(marcink): we reach potentially unreachable errors when a PR has
542 544 # merge errors resulting in potentially hidden commits in the shadow repo.
543 545 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
544 546 and _merge_check.merge_response
545 547 maybe_unreachable = maybe_unreachable \
546 548 and _merge_check.merge_response.metadata.get('unresolved_files')
547 549 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
548 550 diff_commit_cache = \
549 551 (ancestor_commit, commit_cache, missing_requirements,
550 552 source_commit, target_commit) = self.get_commits(
551 553 commits_source_repo,
552 554 pull_request_at_ver,
553 555 source_commit,
554 556 source_ref_id,
555 557 source_scm,
556 558 target_commit,
557 559 target_ref_id,
558 560 target_scm, maybe_unreachable=maybe_unreachable)
559 561
560 562 # register our commit range
561 563 for comm in commit_cache.values():
562 564 c.commit_ranges.append(comm)
563 565
564 566 c.missing_requirements = missing_requirements
565 567 c.ancestor_commit = ancestor_commit
566 568 c.statuses = source_repo.statuses(
567 569 [x.raw_id for x in c.commit_ranges])
568 570
569 571 # auto collapse if we have more than limit
570 572 collapse_limit = diffs.DiffProcessor._collapse_commits_over
571 573 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
572 574 c.compare_mode = compare
573 575
574 576 # diff_limit is the old behavior, will cut off the whole diff
575 577 # if the limit is applied otherwise will just hide the
576 578 # big files from the front-end
577 579 diff_limit = c.visual.cut_off_limit_diff
578 580 file_limit = c.visual.cut_off_limit_file
579 581
580 582 c.missing_commits = False
581 583 if (c.missing_requirements
582 584 or isinstance(source_commit, EmptyCommit)
583 585 or source_commit == target_commit):
584 586
585 587 c.missing_commits = True
586 588 else:
587 589 c.inline_comments = display_inline_comments
588 590
589 591 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
590 592 if not force_recache and has_proper_diff_cache:
591 593 c.diffset = cached_diff['diff']
592 594 (ancestor_commit, commit_cache, missing_requirements,
593 595 source_commit, target_commit) = cached_diff['commits']
594 596 else:
595 597 c.diffset = self._get_diffset(
596 598 c.source_repo.repo_name, commits_source_repo,
597 599 source_ref_id, target_ref_id,
598 600 target_commit, source_commit,
599 601 diff_limit, file_limit, c.fulldiff,
600 602 hide_whitespace_changes, diff_context)
601 603
602 604 # save cached diff
603 605 if caching_enabled:
604 606 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
605 607
606 608 c.limited_diff = c.diffset.limited_diff
607 609
608 610 # calculate removed files that are bound to comments
609 611 comment_deleted_files = [
610 612 fname for fname in display_inline_comments
611 613 if fname not in c.diffset.file_stats]
612 614
613 615 c.deleted_files_comments = collections.defaultdict(dict)
614 616 for fname, per_line_comments in display_inline_comments.items():
615 617 if fname in comment_deleted_files:
616 618 c.deleted_files_comments[fname]['stats'] = 0
617 619 c.deleted_files_comments[fname]['comments'] = list()
618 620 for lno, comments in per_line_comments.items():
619 621 c.deleted_files_comments[fname]['comments'].extend(comments)
620 622
621 623 # maybe calculate the range diff
622 624 if c.range_diff_on:
623 625 # TODO(marcink): set whitespace/context
624 626 context_lcl = 3
625 627 ign_whitespace_lcl = False
626 628
627 629 for commit in c.commit_ranges:
628 630 commit2 = commit
629 631 commit1 = commit.first_parent
630 632
631 633 range_diff_cache_file_path = diff_cache_exist(
632 634 cache_path, 'diff', commit.raw_id,
633 635 ign_whitespace_lcl, context_lcl, c.fulldiff)
634 636
635 637 cached_diff = None
636 638 if caching_enabled:
637 639 cached_diff = load_cached_diff(range_diff_cache_file_path)
638 640
639 641 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
640 642 if not force_recache and has_proper_diff_cache:
641 643 diffset = cached_diff['diff']
642 644 else:
643 645 diffset = self._get_range_diffset(
644 646 commits_source_repo, source_repo,
645 647 commit1, commit2, diff_limit, file_limit,
646 648 c.fulldiff, ign_whitespace_lcl, context_lcl
647 649 )
648 650
649 651 # save cached diff
650 652 if caching_enabled:
651 653 cache_diff(range_diff_cache_file_path, diffset, None)
652 654
653 655 c.changes[commit.raw_id] = diffset
654 656
655 657 # this is a hack to properly display links, when creating PR, the
656 658 # compare view and others uses different notation, and
657 659 # compare_commits.mako renders links based on the target_repo.
658 660 # We need to swap that here to generate it properly on the html side
659 661 c.target_repo = c.source_repo
660 662
661 663 c.commit_statuses = ChangesetStatus.STATUSES
662 664
663 665 c.show_version_changes = not pr_closed
664 666 if c.show_version_changes:
665 667 cur_obj = pull_request_at_ver
666 668 prev_obj = prev_pull_request_at_ver
667 669
668 670 old_commit_ids = prev_obj.revisions
669 671 new_commit_ids = cur_obj.revisions
670 672 commit_changes = PullRequestModel()._calculate_commit_id_changes(
671 673 old_commit_ids, new_commit_ids)
672 674 c.commit_changes_summary = commit_changes
673 675
674 676 # calculate the diff for commits between versions
675 677 c.commit_changes = []
676 678 mark = lambda cs, fw: list(
677 679 h.itertools.izip_longest([], cs, fillvalue=fw))
678 680 for c_type, raw_id in mark(commit_changes.added, 'a') \
679 681 + mark(commit_changes.removed, 'r') \
680 682 + mark(commit_changes.common, 'c'):
681 683
682 684 if raw_id in commit_cache:
683 685 commit = commit_cache[raw_id]
684 686 else:
685 687 try:
686 688 commit = commits_source_repo.get_commit(raw_id)
687 689 except CommitDoesNotExistError:
688 690 # in case we fail extracting still use "dummy" commit
689 691 # for display in commit diff
690 692 commit = h.AttributeDict(
691 693 {'raw_id': raw_id,
692 694 'message': 'EMPTY or MISSING COMMIT'})
693 695 c.commit_changes.append([c_type, commit])
694 696
695 697 # current user review statuses for each version
696 698 c.review_versions = {}
697 699 if self._rhodecode_user.user_id in allowed_reviewers:
698 700 for co in general_comments:
699 701 if co.author.user_id == self._rhodecode_user.user_id:
700 702 status = co.status_change
701 703 if status:
702 704 _ver_pr = status[0].comment.pull_request_version_id
703 705 c.review_versions[_ver_pr] = status[0]
704 706
705 707 return self._get_template_context(c)
706 708
707 709 def get_commits(
708 710 self, commits_source_repo, pull_request_at_ver, source_commit,
709 711 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
710 712 maybe_unreachable=False):
711 713
712 714 commit_cache = collections.OrderedDict()
713 715 missing_requirements = False
714 716
715 717 try:
716 718 pre_load = ["author", "date", "message", "branch", "parents"]
717 719
718 720 pull_request_commits = pull_request_at_ver.revisions
719 721 log.debug('Loading %s commits from %s',
720 722 len(pull_request_commits), commits_source_repo)
721 723
722 724 for rev in pull_request_commits:
723 725 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
724 726 maybe_unreachable=maybe_unreachable)
725 727 commit_cache[comm.raw_id] = comm
726 728
727 729 # Order here matters, we first need to get target, and then
728 730 # the source
729 731 target_commit = commits_source_repo.get_commit(
730 732 commit_id=safe_str(target_ref_id))
731 733
732 734 source_commit = commits_source_repo.get_commit(
733 735 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
734 736 except CommitDoesNotExistError:
735 737 log.warning('Failed to get commit from `{}` repo'.format(
736 738 commits_source_repo), exc_info=True)
737 739 except RepositoryRequirementError:
738 740 log.warning('Failed to get all required data from repo', exc_info=True)
739 741 missing_requirements = True
740 742 ancestor_commit = None
741 743 try:
742 744 ancestor_id = source_scm.get_common_ancestor(
743 745 source_commit.raw_id, target_commit.raw_id, target_scm)
744 746 ancestor_commit = source_scm.get_commit(ancestor_id)
745 747 except Exception:
746 748 ancestor_commit = None
747 749 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
748 750
749 751 def assure_not_empty_repo(self):
750 752 _ = self.request.translate
751 753
752 754 try:
753 755 self.db_repo.scm_instance().get_commit()
754 756 except EmptyRepositoryError:
755 757 h.flash(h.literal(_('There are no commits yet')),
756 758 category='warning')
757 759 raise HTTPFound(
758 760 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
759 761
760 762 @LoginRequired()
761 763 @NotAnonymous()
762 764 @HasRepoPermissionAnyDecorator(
763 765 'repository.read', 'repository.write', 'repository.admin')
764 766 @view_config(
765 767 route_name='pullrequest_new', request_method='GET',
766 768 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
767 769 def pull_request_new(self):
768 770 _ = self.request.translate
769 771 c = self.load_default_context()
770 772
771 773 self.assure_not_empty_repo()
772 774 source_repo = self.db_repo
773 775
774 776 commit_id = self.request.GET.get('commit')
775 777 branch_ref = self.request.GET.get('branch')
776 778 bookmark_ref = self.request.GET.get('bookmark')
777 779
778 780 try:
779 781 source_repo_data = PullRequestModel().generate_repo_data(
780 782 source_repo, commit_id=commit_id,
781 783 branch=branch_ref, bookmark=bookmark_ref,
782 784 translator=self.request.translate)
783 785 except CommitDoesNotExistError as e:
784 786 log.exception(e)
785 787 h.flash(_('Commit does not exist'), 'error')
786 788 raise HTTPFound(
787 789 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
788 790
789 791 default_target_repo = source_repo
790 792
791 793 if source_repo.parent and c.has_origin_repo_read_perm:
792 794 parent_vcs_obj = source_repo.parent.scm_instance()
793 795 if parent_vcs_obj and not parent_vcs_obj.is_empty():
794 796 # change default if we have a parent repo
795 797 default_target_repo = source_repo.parent
796 798
797 799 target_repo_data = PullRequestModel().generate_repo_data(
798 800 default_target_repo, translator=self.request.translate)
799 801
800 802 selected_source_ref = source_repo_data['refs']['selected_ref']
801 803 title_source_ref = ''
802 804 if selected_source_ref:
803 805 title_source_ref = selected_source_ref.split(':', 2)[1]
804 806 c.default_title = PullRequestModel().generate_pullrequest_title(
805 807 source=source_repo.repo_name,
806 808 source_ref=title_source_ref,
807 809 target=default_target_repo.repo_name
808 810 )
809 811
810 812 c.default_repo_data = {
811 813 'source_repo_name': source_repo.repo_name,
812 814 'source_refs_json': json.dumps(source_repo_data),
813 815 'target_repo_name': default_target_repo.repo_name,
814 816 'target_refs_json': json.dumps(target_repo_data),
815 817 }
816 818 c.default_source_ref = selected_source_ref
817 819
818 820 return self._get_template_context(c)
819 821
820 822 @LoginRequired()
821 823 @NotAnonymous()
822 824 @HasRepoPermissionAnyDecorator(
823 825 'repository.read', 'repository.write', 'repository.admin')
824 826 @view_config(
825 827 route_name='pullrequest_repo_refs', request_method='GET',
826 828 renderer='json_ext', xhr=True)
827 829 def pull_request_repo_refs(self):
828 830 self.load_default_context()
829 831 target_repo_name = self.request.matchdict['target_repo_name']
830 832 repo = Repository.get_by_repo_name(target_repo_name)
831 833 if not repo:
832 834 raise HTTPNotFound()
833 835
834 836 target_perm = HasRepoPermissionAny(
835 837 'repository.read', 'repository.write', 'repository.admin')(
836 838 target_repo_name)
837 839 if not target_perm:
838 840 raise HTTPNotFound()
839 841
840 842 return PullRequestModel().generate_repo_data(
841 843 repo, translator=self.request.translate)
842 844
843 845 @LoginRequired()
844 846 @NotAnonymous()
845 847 @HasRepoPermissionAnyDecorator(
846 848 'repository.read', 'repository.write', 'repository.admin')
847 849 @view_config(
848 850 route_name='pullrequest_repo_targets', request_method='GET',
849 851 renderer='json_ext', xhr=True)
850 852 def pullrequest_repo_targets(self):
851 853 _ = self.request.translate
852 854 filter_query = self.request.GET.get('query')
853 855
854 856 # get the parents
855 857 parent_target_repos = []
856 858 if self.db_repo.parent:
857 859 parents_query = Repository.query() \
858 860 .order_by(func.length(Repository.repo_name)) \
859 861 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
860 862
861 863 if filter_query:
862 864 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
863 865 parents_query = parents_query.filter(
864 866 Repository.repo_name.ilike(ilike_expression))
865 867 parents = parents_query.limit(20).all()
866 868
867 869 for parent in parents:
868 870 parent_vcs_obj = parent.scm_instance()
869 871 if parent_vcs_obj and not parent_vcs_obj.is_empty():
870 872 parent_target_repos.append(parent)
871 873
872 874 # get other forks, and repo itself
873 875 query = Repository.query() \
874 876 .order_by(func.length(Repository.repo_name)) \
875 877 .filter(
876 878 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
877 879 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
878 880 ) \
879 881 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
880 882
881 883 if filter_query:
882 884 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
883 885 query = query.filter(Repository.repo_name.ilike(ilike_expression))
884 886
885 887 limit = max(20 - len(parent_target_repos), 5) # not less then 5
886 888 target_repos = query.limit(limit).all()
887 889
888 890 all_target_repos = target_repos + parent_target_repos
889 891
890 892 repos = []
891 893 # This checks permissions to the repositories
892 894 for obj in ScmModel().get_repos(all_target_repos):
893 895 repos.append({
894 896 'id': obj['name'],
895 897 'text': obj['name'],
896 898 'type': 'repo',
897 899 'repo_id': obj['dbrepo']['repo_id'],
898 900 'repo_type': obj['dbrepo']['repo_type'],
899 901 'private': obj['dbrepo']['private'],
900 902
901 903 })
902 904
903 905 data = {
904 906 'more': False,
905 907 'results': [{
906 908 'text': _('Repositories'),
907 909 'children': repos
908 910 }] if repos else []
909 911 }
910 912 return data
911 913
912 914 @LoginRequired()
913 915 @NotAnonymous()
914 916 @HasRepoPermissionAnyDecorator(
915 917 'repository.read', 'repository.write', 'repository.admin')
916 918 @CSRFRequired()
917 919 @view_config(
918 920 route_name='pullrequest_create', request_method='POST',
919 921 renderer=None)
920 922 def pull_request_create(self):
921 923 _ = self.request.translate
922 924 self.assure_not_empty_repo()
923 925 self.load_default_context()
924 926
925 927 controls = peppercorn.parse(self.request.POST.items())
926 928
927 929 try:
928 930 form = PullRequestForm(
929 931 self.request.translate, self.db_repo.repo_id)()
930 932 _form = form.to_python(controls)
931 933 except formencode.Invalid as errors:
932 934 if errors.error_dict.get('revisions'):
933 935 msg = 'Revisions: %s' % errors.error_dict['revisions']
934 936 elif errors.error_dict.get('pullrequest_title'):
935 937 msg = errors.error_dict.get('pullrequest_title')
936 938 else:
937 939 msg = _('Error creating pull request: {}').format(errors)
938 940 log.exception(msg)
939 941 h.flash(msg, 'error')
940 942
941 943 # would rather just go back to form ...
942 944 raise HTTPFound(
943 945 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
944 946
945 947 source_repo = _form['source_repo']
946 948 source_ref = _form['source_ref']
947 949 target_repo = _form['target_repo']
948 950 target_ref = _form['target_ref']
949 951 commit_ids = _form['revisions'][::-1]
950 952
951 953 # find the ancestor for this pr
952 954 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
953 955 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
954 956
955 957 if not (source_db_repo or target_db_repo):
956 958 h.flash(_('source_repo or target repo not found'), category='error')
957 959 raise HTTPFound(
958 960 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
959 961
960 962 # re-check permissions again here
961 963 # source_repo we must have read permissions
962 964
963 965 source_perm = HasRepoPermissionAny(
964 966 'repository.read', 'repository.write', 'repository.admin')(
965 967 source_db_repo.repo_name)
966 968 if not source_perm:
967 969 msg = _('Not Enough permissions to source repo `{}`.'.format(
968 970 source_db_repo.repo_name))
969 971 h.flash(msg, category='error')
970 972 # copy the args back to redirect
971 973 org_query = self.request.GET.mixed()
972 974 raise HTTPFound(
973 975 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
974 976 _query=org_query))
975 977
976 978 # target repo we must have read permissions, and also later on
977 979 # we want to check branch permissions here
978 980 target_perm = HasRepoPermissionAny(
979 981 'repository.read', 'repository.write', 'repository.admin')(
980 982 target_db_repo.repo_name)
981 983 if not target_perm:
982 984 msg = _('Not Enough permissions to target repo `{}`.'.format(
983 985 target_db_repo.repo_name))
984 986 h.flash(msg, category='error')
985 987 # copy the args back to redirect
986 988 org_query = self.request.GET.mixed()
987 989 raise HTTPFound(
988 990 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
989 991 _query=org_query))
990 992
991 993 source_scm = source_db_repo.scm_instance()
992 994 target_scm = target_db_repo.scm_instance()
993 995
994 996 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
995 997 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
996 998
997 999 ancestor = source_scm.get_common_ancestor(
998 1000 source_commit.raw_id, target_commit.raw_id, target_scm)
999 1001
1000 1002 # recalculate target ref based on ancestor
1001 1003 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
1002 1004 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
1003 1005
1004 1006 get_default_reviewers_data, validate_default_reviewers = \
1005 1007 PullRequestModel().get_reviewer_functions()
1006 1008
1007 1009 # recalculate reviewers logic, to make sure we can validate this
1008 1010 reviewer_rules = get_default_reviewers_data(
1009 1011 self._rhodecode_db_user, source_db_repo,
1010 1012 source_commit, target_db_repo, target_commit)
1011 1013
1012 1014 given_reviewers = _form['review_members']
1013 1015 reviewers = validate_default_reviewers(
1014 1016 given_reviewers, reviewer_rules)
1015 1017
1016 1018 pullrequest_title = _form['pullrequest_title']
1017 1019 title_source_ref = source_ref.split(':', 2)[1]
1018 1020 if not pullrequest_title:
1019 1021 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1020 1022 source=source_repo,
1021 1023 source_ref=title_source_ref,
1022 1024 target=target_repo
1023 1025 )
1024 1026
1025 1027 description = _form['pullrequest_desc']
1026 1028 description_renderer = _form['description_renderer']
1027 1029
1028 1030 try:
1029 1031 pull_request = PullRequestModel().create(
1030 1032 created_by=self._rhodecode_user.user_id,
1031 1033 source_repo=source_repo,
1032 1034 source_ref=source_ref,
1033 1035 target_repo=target_repo,
1034 1036 target_ref=target_ref,
1035 1037 revisions=commit_ids,
1036 1038 reviewers=reviewers,
1037 1039 title=pullrequest_title,
1038 1040 description=description,
1039 1041 description_renderer=description_renderer,
1040 1042 reviewer_data=reviewer_rules,
1041 1043 auth_user=self._rhodecode_user
1042 1044 )
1043 1045 Session().commit()
1044 1046
1045 1047 h.flash(_('Successfully opened new pull request'),
1046 1048 category='success')
1047 1049 except Exception:
1048 1050 msg = _('Error occurred during creation of this pull request.')
1049 1051 log.exception(msg)
1050 1052 h.flash(msg, category='error')
1051 1053
1052 1054 # copy the args back to redirect
1053 1055 org_query = self.request.GET.mixed()
1054 1056 raise HTTPFound(
1055 1057 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1056 1058 _query=org_query))
1057 1059
1058 1060 raise HTTPFound(
1059 1061 h.route_path('pullrequest_show', repo_name=target_repo,
1060 1062 pull_request_id=pull_request.pull_request_id))
1061 1063
1062 1064 @LoginRequired()
1063 1065 @NotAnonymous()
1064 1066 @HasRepoPermissionAnyDecorator(
1065 1067 'repository.read', 'repository.write', 'repository.admin')
1066 1068 @CSRFRequired()
1067 1069 @view_config(
1068 1070 route_name='pullrequest_update', request_method='POST',
1069 1071 renderer='json_ext')
1070 1072 def pull_request_update(self):
1071 1073 pull_request = PullRequest.get_or_404(
1072 1074 self.request.matchdict['pull_request_id'])
1073 1075 _ = self.request.translate
1074 1076
1075 1077 self.load_default_context()
1076 1078 redirect_url = None
1077 1079
1078 1080 if pull_request.is_closed():
1079 1081 log.debug('update: forbidden because pull request is closed')
1080 1082 msg = _(u'Cannot update closed pull requests.')
1081 1083 h.flash(msg, category='error')
1082 1084 return {'response': True,
1083 1085 'redirect_url': redirect_url}
1084 1086
1085 1087 is_state_changing = pull_request.is_state_changing()
1086 1088
1087 1089 # only owner or admin can update it
1088 1090 allowed_to_update = PullRequestModel().check_user_update(
1089 1091 pull_request, self._rhodecode_user)
1090 1092 if allowed_to_update:
1091 1093 controls = peppercorn.parse(self.request.POST.items())
1092 1094 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1093 1095
1094 1096 if 'review_members' in controls:
1095 1097 self._update_reviewers(
1096 1098 pull_request, controls['review_members'],
1097 1099 pull_request.reviewer_data)
1098 1100 elif str2bool(self.request.POST.get('update_commits', 'false')):
1099 1101 if is_state_changing:
1100 1102 log.debug('commits update: forbidden because pull request is in state %s',
1101 1103 pull_request.pull_request_state)
1102 1104 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1103 1105 u'Current state is: `{}`').format(
1104 1106 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1105 1107 h.flash(msg, category='error')
1106 1108 return {'response': True,
1107 1109 'redirect_url': redirect_url}
1108 1110
1109 1111 self._update_commits(pull_request)
1110 1112 if force_refresh:
1111 1113 redirect_url = h.route_path(
1112 1114 'pullrequest_show', repo_name=self.db_repo_name,
1113 1115 pull_request_id=pull_request.pull_request_id,
1114 1116 _query={"force_refresh": 1})
1115 1117 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1116 1118 self._edit_pull_request(pull_request)
1117 1119 else:
1118 1120 raise HTTPBadRequest()
1119 1121
1120 1122 return {'response': True,
1121 1123 'redirect_url': redirect_url}
1122 1124 raise HTTPForbidden()
1123 1125
1124 1126 def _edit_pull_request(self, pull_request):
1125 1127 _ = self.request.translate
1126 1128
1127 1129 try:
1128 1130 PullRequestModel().edit(
1129 1131 pull_request,
1130 1132 self.request.POST.get('title'),
1131 1133 self.request.POST.get('description'),
1132 1134 self.request.POST.get('description_renderer'),
1133 1135 self._rhodecode_user)
1134 1136 except ValueError:
1135 1137 msg = _(u'Cannot update closed pull requests.')
1136 1138 h.flash(msg, category='error')
1137 1139 return
1138 1140 else:
1139 1141 Session().commit()
1140 1142
1141 1143 msg = _(u'Pull request title & description updated.')
1142 1144 h.flash(msg, category='success')
1143 1145 return
1144 1146
1145 1147 def _update_commits(self, pull_request):
1146 1148 _ = self.request.translate
1147 1149
1148 1150 with pull_request.set_state(PullRequest.STATE_UPDATING):
1149 1151 resp = PullRequestModel().update_commits(
1150 1152 pull_request, self._rhodecode_db_user)
1151 1153
1152 1154 if resp.executed:
1153 1155
1154 1156 if resp.target_changed and resp.source_changed:
1155 1157 changed = 'target and source repositories'
1156 1158 elif resp.target_changed and not resp.source_changed:
1157 1159 changed = 'target repository'
1158 1160 elif not resp.target_changed and resp.source_changed:
1159 1161 changed = 'source repository'
1160 1162 else:
1161 1163 changed = 'nothing'
1162 1164
1163 1165 msg = _(u'Pull request updated to "{source_commit_id}" with '
1164 1166 u'{count_added} added, {count_removed} removed commits. '
1165 1167 u'Source of changes: {change_source}')
1166 1168 msg = msg.format(
1167 1169 source_commit_id=pull_request.source_ref_parts.commit_id,
1168 1170 count_added=len(resp.changes.added),
1169 1171 count_removed=len(resp.changes.removed),
1170 1172 change_source=changed)
1171 1173 h.flash(msg, category='success')
1172 1174
1173 1175 channel = '/repo${}$/pr/{}'.format(
1174 1176 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1175 1177 message = msg + (
1176 1178 ' - <a onclick="window.location.reload()">'
1177 1179 '<strong>{}</strong></a>'.format(_('Reload page')))
1178 1180 channelstream.post_message(
1179 1181 channel, message, self._rhodecode_user.username,
1180 1182 registry=self.request.registry)
1181 1183 else:
1182 1184 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1183 1185 warning_reasons = [
1184 1186 UpdateFailureReason.NO_CHANGE,
1185 1187 UpdateFailureReason.WRONG_REF_TYPE,
1186 1188 ]
1187 1189 category = 'warning' if resp.reason in warning_reasons else 'error'
1188 1190 h.flash(msg, category=category)
1189 1191
1190 1192 @LoginRequired()
1191 1193 @NotAnonymous()
1192 1194 @HasRepoPermissionAnyDecorator(
1193 1195 'repository.read', 'repository.write', 'repository.admin')
1194 1196 @CSRFRequired()
1195 1197 @view_config(
1196 1198 route_name='pullrequest_merge', request_method='POST',
1197 1199 renderer='json_ext')
1198 1200 def pull_request_merge(self):
1199 1201 """
1200 1202 Merge will perform a server-side merge of the specified
1201 1203 pull request, if the pull request is approved and mergeable.
1202 1204 After successful merging, the pull request is automatically
1203 1205 closed, with a relevant comment.
1204 1206 """
1205 1207 pull_request = PullRequest.get_or_404(
1206 1208 self.request.matchdict['pull_request_id'])
1207 1209 _ = self.request.translate
1208 1210
1209 1211 if pull_request.is_state_changing():
1210 1212 log.debug('show: forbidden because pull request is in state %s',
1211 1213 pull_request.pull_request_state)
1212 1214 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1213 1215 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1214 1216 pull_request.pull_request_state)
1215 1217 h.flash(msg, category='error')
1216 1218 raise HTTPFound(
1217 1219 h.route_path('pullrequest_show',
1218 1220 repo_name=pull_request.target_repo.repo_name,
1219 1221 pull_request_id=pull_request.pull_request_id))
1220 1222
1221 1223 self.load_default_context()
1222 1224
1223 1225 with pull_request.set_state(PullRequest.STATE_UPDATING):
1224 1226 check = MergeCheck.validate(
1225 1227 pull_request, auth_user=self._rhodecode_user,
1226 1228 translator=self.request.translate)
1227 1229 merge_possible = not check.failed
1228 1230
1229 1231 for err_type, error_msg in check.errors:
1230 1232 h.flash(error_msg, category=err_type)
1231 1233
1232 1234 if merge_possible:
1233 1235 log.debug("Pre-conditions checked, trying to merge.")
1234 1236 extras = vcs_operation_context(
1235 1237 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1236 1238 username=self._rhodecode_db_user.username, action='push',
1237 1239 scm=pull_request.target_repo.repo_type)
1238 1240 with pull_request.set_state(PullRequest.STATE_UPDATING):
1239 1241 self._merge_pull_request(
1240 1242 pull_request, self._rhodecode_db_user, extras)
1241 1243 else:
1242 1244 log.debug("Pre-conditions failed, NOT merging.")
1243 1245
1244 1246 raise HTTPFound(
1245 1247 h.route_path('pullrequest_show',
1246 1248 repo_name=pull_request.target_repo.repo_name,
1247 1249 pull_request_id=pull_request.pull_request_id))
1248 1250
1249 1251 def _merge_pull_request(self, pull_request, user, extras):
1250 1252 _ = self.request.translate
1251 1253 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1252 1254
1253 1255 if merge_resp.executed:
1254 1256 log.debug("The merge was successful, closing the pull request.")
1255 1257 PullRequestModel().close_pull_request(
1256 1258 pull_request.pull_request_id, user)
1257 1259 Session().commit()
1258 1260 msg = _('Pull request was successfully merged and closed.')
1259 1261 h.flash(msg, category='success')
1260 1262 else:
1261 1263 log.debug(
1262 1264 "The merge was not successful. Merge response: %s", merge_resp)
1263 1265 msg = merge_resp.merge_status_message
1264 1266 h.flash(msg, category='error')
1265 1267
1266 1268 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1267 1269 _ = self.request.translate
1268 1270
1269 1271 get_default_reviewers_data, validate_default_reviewers = \
1270 1272 PullRequestModel().get_reviewer_functions()
1271 1273
1272 1274 try:
1273 1275 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1274 1276 except ValueError as e:
1275 1277 log.error('Reviewers Validation: {}'.format(e))
1276 1278 h.flash(e, category='error')
1277 1279 return
1278 1280
1279 1281 old_calculated_status = pull_request.calculated_review_status()
1280 1282 PullRequestModel().update_reviewers(
1281 1283 pull_request, reviewers, self._rhodecode_user)
1282 1284 h.flash(_('Pull request reviewers updated.'), category='success')
1283 1285 Session().commit()
1284 1286
1285 1287 # trigger status changed if change in reviewers changes the status
1286 1288 calculated_status = pull_request.calculated_review_status()
1287 1289 if old_calculated_status != calculated_status:
1288 1290 PullRequestModel().trigger_pull_request_hook(
1289 1291 pull_request, self._rhodecode_user, 'review_status_change',
1290 1292 data={'status': calculated_status})
1291 1293
1292 1294 @LoginRequired()
1293 1295 @NotAnonymous()
1294 1296 @HasRepoPermissionAnyDecorator(
1295 1297 'repository.read', 'repository.write', 'repository.admin')
1296 1298 @CSRFRequired()
1297 1299 @view_config(
1298 1300 route_name='pullrequest_delete', request_method='POST',
1299 1301 renderer='json_ext')
1300 1302 def pull_request_delete(self):
1301 1303 _ = self.request.translate
1302 1304
1303 1305 pull_request = PullRequest.get_or_404(
1304 1306 self.request.matchdict['pull_request_id'])
1305 1307 self.load_default_context()
1306 1308
1307 1309 pr_closed = pull_request.is_closed()
1308 1310 allowed_to_delete = PullRequestModel().check_user_delete(
1309 1311 pull_request, self._rhodecode_user) and not pr_closed
1310 1312
1311 1313 # only owner can delete it !
1312 1314 if allowed_to_delete:
1313 1315 PullRequestModel().delete(pull_request, self._rhodecode_user)
1314 1316 Session().commit()
1315 1317 h.flash(_('Successfully deleted pull request'),
1316 1318 category='success')
1317 1319 raise HTTPFound(h.route_path('pullrequest_show_all',
1318 1320 repo_name=self.db_repo_name))
1319 1321
1320 1322 log.warning('user %s tried to delete pull request without access',
1321 1323 self._rhodecode_user)
1322 1324 raise HTTPNotFound()
1323 1325
1324 1326 @LoginRequired()
1325 1327 @NotAnonymous()
1326 1328 @HasRepoPermissionAnyDecorator(
1327 1329 'repository.read', 'repository.write', 'repository.admin')
1328 1330 @CSRFRequired()
1329 1331 @view_config(
1330 1332 route_name='pullrequest_comment_create', request_method='POST',
1331 1333 renderer='json_ext')
1332 1334 def pull_request_comment_create(self):
1333 1335 _ = self.request.translate
1334 1336
1335 1337 pull_request = PullRequest.get_or_404(
1336 1338 self.request.matchdict['pull_request_id'])
1337 1339 pull_request_id = pull_request.pull_request_id
1338 1340
1339 1341 if pull_request.is_closed():
1340 1342 log.debug('comment: forbidden because pull request is closed')
1341 1343 raise HTTPForbidden()
1342 1344
1343 1345 allowed_to_comment = PullRequestModel().check_user_comment(
1344 1346 pull_request, self._rhodecode_user)
1345 1347 if not allowed_to_comment:
1346 1348 log.debug(
1347 1349 'comment: forbidden because pull request is from forbidden repo')
1348 1350 raise HTTPForbidden()
1349 1351
1350 1352 c = self.load_default_context()
1351 1353
1352 1354 status = self.request.POST.get('changeset_status', None)
1353 1355 text = self.request.POST.get('text')
1354 1356 comment_type = self.request.POST.get('comment_type')
1355 1357 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1356 1358 close_pull_request = self.request.POST.get('close_pull_request')
1357 1359
1358 1360 # the logic here should work like following, if we submit close
1359 1361 # pr comment, use `close_pull_request_with_comment` function
1360 1362 # else handle regular comment logic
1361 1363
1362 1364 if close_pull_request:
1363 1365 # only owner or admin or person with write permissions
1364 1366 allowed_to_close = PullRequestModel().check_user_update(
1365 1367 pull_request, self._rhodecode_user)
1366 1368 if not allowed_to_close:
1367 1369 log.debug('comment: forbidden because not allowed to close '
1368 1370 'pull request %s', pull_request_id)
1369 1371 raise HTTPForbidden()
1370 1372
1371 1373 # This also triggers `review_status_change`
1372 1374 comment, status = PullRequestModel().close_pull_request_with_comment(
1373 1375 pull_request, self._rhodecode_user, self.db_repo, message=text,
1374 1376 auth_user=self._rhodecode_user)
1375 1377 Session().flush()
1376 1378
1377 1379 PullRequestModel().trigger_pull_request_hook(
1378 1380 pull_request, self._rhodecode_user, 'comment',
1379 1381 data={'comment': comment})
1380 1382
1381 1383 else:
1382 1384 # regular comment case, could be inline, or one with status.
1383 1385 # for that one we check also permissions
1384 1386
1385 1387 allowed_to_change_status = PullRequestModel().check_user_change_status(
1386 1388 pull_request, self._rhodecode_user)
1387 1389
1388 1390 if status and allowed_to_change_status:
1389 1391 message = (_('Status change %(transition_icon)s %(status)s')
1390 1392 % {'transition_icon': '>',
1391 1393 'status': ChangesetStatus.get_status_lbl(status)})
1392 1394 text = text or message
1393 1395
1394 1396 comment = CommentsModel().create(
1395 1397 text=text,
1396 1398 repo=self.db_repo.repo_id,
1397 1399 user=self._rhodecode_user.user_id,
1398 1400 pull_request=pull_request,
1399 1401 f_path=self.request.POST.get('f_path'),
1400 1402 line_no=self.request.POST.get('line'),
1401 1403 status_change=(ChangesetStatus.get_status_lbl(status)
1402 1404 if status and allowed_to_change_status else None),
1403 1405 status_change_type=(status
1404 1406 if status and allowed_to_change_status else None),
1405 1407 comment_type=comment_type,
1406 1408 resolves_comment_id=resolves_comment_id,
1407 1409 auth_user=self._rhodecode_user
1408 1410 )
1409 1411
1410 1412 if allowed_to_change_status:
1411 1413 # calculate old status before we change it
1412 1414 old_calculated_status = pull_request.calculated_review_status()
1413 1415
1414 1416 # get status if set !
1415 1417 if status:
1416 1418 ChangesetStatusModel().set_status(
1417 1419 self.db_repo.repo_id,
1418 1420 status,
1419 1421 self._rhodecode_user.user_id,
1420 1422 comment,
1421 1423 pull_request=pull_request
1422 1424 )
1423 1425
1424 1426 Session().flush()
1425 1427 # this is somehow required to get access to some relationship
1426 1428 # loaded on comment
1427 1429 Session().refresh(comment)
1428 1430
1429 1431 PullRequestModel().trigger_pull_request_hook(
1430 1432 pull_request, self._rhodecode_user, 'comment',
1431 1433 data={'comment': comment})
1432 1434
1433 1435 # we now calculate the status of pull request, and based on that
1434 1436 # calculation we set the commits status
1435 1437 calculated_status = pull_request.calculated_review_status()
1436 1438 if old_calculated_status != calculated_status:
1437 1439 PullRequestModel().trigger_pull_request_hook(
1438 1440 pull_request, self._rhodecode_user, 'review_status_change',
1439 1441 data={'status': calculated_status})
1440 1442
1441 1443 Session().commit()
1442 1444
1443 1445 data = {
1444 1446 'target_id': h.safeid(h.safe_unicode(
1445 1447 self.request.POST.get('f_path'))),
1446 1448 }
1447 1449 if comment:
1448 1450 c.co = comment
1449 1451 rendered_comment = render(
1450 1452 'rhodecode:templates/changeset/changeset_comment_block.mako',
1451 1453 self._get_template_context(c), self.request)
1452 1454
1453 1455 data.update(comment.get_dict())
1454 1456 data.update({'rendered_text': rendered_comment})
1455 1457
1456 1458 return data
1457 1459
1458 1460 @LoginRequired()
1459 1461 @NotAnonymous()
1460 1462 @HasRepoPermissionAnyDecorator(
1461 1463 'repository.read', 'repository.write', 'repository.admin')
1462 1464 @CSRFRequired()
1463 1465 @view_config(
1464 1466 route_name='pullrequest_comment_delete', request_method='POST',
1465 1467 renderer='json_ext')
1466 1468 def pull_request_comment_delete(self):
1467 1469 pull_request = PullRequest.get_or_404(
1468 1470 self.request.matchdict['pull_request_id'])
1469 1471
1470 1472 comment = ChangesetComment.get_or_404(
1471 1473 self.request.matchdict['comment_id'])
1472 1474 comment_id = comment.comment_id
1473 1475
1474 1476 if pull_request.is_closed():
1475 1477 log.debug('comment: forbidden because pull request is closed')
1476 1478 raise HTTPForbidden()
1477 1479
1478 1480 if not comment:
1479 1481 log.debug('Comment with id:%s not found, skipping', comment_id)
1480 1482 # comment already deleted in another call probably
1481 1483 return True
1482 1484
1483 1485 if comment.pull_request.is_closed():
1484 1486 # don't allow deleting comments on closed pull request
1485 1487 raise HTTPForbidden()
1486 1488
1487 1489 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1488 1490 super_admin = h.HasPermissionAny('hg.admin')()
1489 1491 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1490 1492 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1491 1493 comment_repo_admin = is_repo_admin and is_repo_comment
1492 1494
1493 1495 if super_admin or comment_owner or comment_repo_admin:
1494 1496 old_calculated_status = comment.pull_request.calculated_review_status()
1495 1497 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1496 1498 Session().commit()
1497 1499 calculated_status = comment.pull_request.calculated_review_status()
1498 1500 if old_calculated_status != calculated_status:
1499 1501 PullRequestModel().trigger_pull_request_hook(
1500 1502 comment.pull_request, self._rhodecode_user, 'review_status_change',
1501 1503 data={'status': calculated_status})
1502 1504 return True
1503 1505 else:
1504 1506 log.warning('No permissions for user %s to delete comment_id: %s',
1505 1507 self._rhodecode_db_user, comment_id)
1506 1508 raise HTTPNotFound()
@@ -1,1904 +1,1912 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24 import os
25 25 import re
26 26 import time
27 27 import shutil
28 28 import datetime
29 29 import fnmatch
30 30 import itertools
31 31 import logging
32 32 import collections
33 33 import warnings
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from pyramid import compat
38 38
39 39 import rhodecode
40 40 from rhodecode.translation import lazy_ugettext
41 41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 42 from rhodecode.lib.vcs import connection
43 43 from rhodecode.lib.vcs.utils import author_name, author_email
44 44 from rhodecode.lib.vcs.conf import settings
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 50 RepositoryError)
51 51
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 FILEMODE_DEFAULT = 0o100644
57 57 FILEMODE_EXECUTABLE = 0o100755
58 58 EMPTY_COMMIT_ID = '0' * 40
59 59
60 60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61 61
62 62
63 63 class MergeFailureReason(object):
64 64 """
65 65 Enumeration with all the reasons why the server side merge could fail.
66 66
67 67 DO NOT change the number of the reasons, as they may be stored in the
68 68 database.
69 69
70 70 Changing the name of a reason is acceptable and encouraged to deprecate old
71 71 reasons.
72 72 """
73 73
74 74 # Everything went well.
75 75 NONE = 0
76 76
77 77 # An unexpected exception was raised. Check the logs for more details.
78 78 UNKNOWN = 1
79 79
80 80 # The merge was not successful, there are conflicts.
81 81 MERGE_FAILED = 2
82 82
83 83 # The merge succeeded but we could not push it to the target repository.
84 84 PUSH_FAILED = 3
85 85
86 86 # The specified target is not a head in the target repository.
87 87 TARGET_IS_NOT_HEAD = 4
88 88
89 89 # The source repository contains more branches than the target. Pushing
90 90 # the merge will create additional branches in the target.
91 91 HG_SOURCE_HAS_MORE_BRANCHES = 5
92 92
93 93 # The target reference has multiple heads. That does not allow to correctly
94 94 # identify the target location. This could only happen for mercurial
95 95 # branches.
96 96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
97 97
98 98 # The target repository is locked
99 99 TARGET_IS_LOCKED = 7
100 100
101 101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
102 102 # A involved commit could not be found.
103 103 _DEPRECATED_MISSING_COMMIT = 8
104 104
105 105 # The target repo reference is missing.
106 106 MISSING_TARGET_REF = 9
107 107
108 108 # The source repo reference is missing.
109 109 MISSING_SOURCE_REF = 10
110 110
111 111 # The merge was not successful, there are conflicts related to sub
112 112 # repositories.
113 113 SUBREPO_MERGE_FAILED = 11
114 114
115 115
116 116 class UpdateFailureReason(object):
117 117 """
118 118 Enumeration with all the reasons why the pull request update could fail.
119 119
120 120 DO NOT change the number of the reasons, as they may be stored in the
121 121 database.
122 122
123 123 Changing the name of a reason is acceptable and encouraged to deprecate old
124 124 reasons.
125 125 """
126 126
127 127 # Everything went well.
128 128 NONE = 0
129 129
130 130 # An unexpected exception was raised. Check the logs for more details.
131 131 UNKNOWN = 1
132 132
133 133 # The pull request is up to date.
134 134 NO_CHANGE = 2
135 135
136 136 # The pull request has a reference type that is not supported for update.
137 137 WRONG_REF_TYPE = 3
138 138
139 139 # Update failed because the target reference is missing.
140 140 MISSING_TARGET_REF = 4
141 141
142 142 # Update failed because the source reference is missing.
143 143 MISSING_SOURCE_REF = 5
144 144
145 145
146 146 class MergeResponse(object):
147 147
148 148 # uses .format(**metadata) for variables
149 149 MERGE_STATUS_MESSAGES = {
150 150 MergeFailureReason.NONE: lazy_ugettext(
151 151 u'This pull request can be automatically merged.'),
152 152 MergeFailureReason.UNKNOWN: lazy_ugettext(
153 153 u'This pull request cannot be merged because of an unhandled exception. '
154 154 u'{exception}'),
155 155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
156 156 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
157 157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
158 158 u'This pull request could not be merged because push to '
159 159 u'target:`{target}@{merge_commit}` failed.'),
160 160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
161 161 u'This pull request cannot be merged because the target '
162 162 u'`{target_ref.name}` is not a head.'),
163 163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
164 164 u'This pull request cannot be merged because the source contains '
165 165 u'more branches than the target.'),
166 166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
167 167 u'This pull request cannot be merged because the target `{target_ref.name}` '
168 168 u'has multiple heads: `{heads}`.'),
169 169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
170 170 u'This pull request cannot be merged because the target repository is '
171 171 u'locked by {locked_by}.'),
172 172
173 173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
174 174 u'This pull request cannot be merged because the target '
175 175 u'reference `{target_ref.name}` is missing.'),
176 176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
177 177 u'This pull request cannot be merged because the source '
178 178 u'reference `{source_ref.name}` is missing.'),
179 179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
180 180 u'This pull request cannot be merged because of conflicts related '
181 181 u'to sub repositories.'),
182 182
183 183 # Deprecations
184 184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
185 185 u'This pull request cannot be merged because the target or the '
186 186 u'source reference is missing.'),
187 187
188 188 }
189 189
190 190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
191 191 self.possible = possible
192 192 self.executed = executed
193 193 self.merge_ref = merge_ref
194 194 self.failure_reason = failure_reason
195 195 self.metadata = metadata or {}
196 196
197 197 def __repr__(self):
198 198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
199 199
200 200 def __eq__(self, other):
201 201 same_instance = isinstance(other, self.__class__)
202 202 return same_instance \
203 203 and self.possible == other.possible \
204 204 and self.executed == other.executed \
205 205 and self.failure_reason == other.failure_reason
206 206
207 207 @property
208 208 def label(self):
209 209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
210 210 not k.startswith('_'))
211 211 return label_dict.get(self.failure_reason)
212 212
213 213 @property
214 214 def merge_status_message(self):
215 215 """
216 216 Return a human friendly error message for the given merge status code.
217 217 """
218 218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
219 219
220 220 try:
221 221 return msg.format(**self.metadata)
222 222 except Exception:
223 223 log.exception('Failed to format %s message', self)
224 224 return msg
225 225
226 226 def asdict(self):
227 227 data = {}
228 228 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
229 229 'merge_status_message']:
230 230 data[k] = getattr(self, k)
231 231 return data
232 232
233 233
234 class TargetRefMissing(ValueError):
235 pass
236
237
238 class SourceRefMissing(ValueError):
239 pass
240
241
234 242 class BaseRepository(object):
235 243 """
236 244 Base Repository for final backends
237 245
238 246 .. attribute:: DEFAULT_BRANCH_NAME
239 247
240 248 name of default branch (i.e. "trunk" for svn, "master" for git etc.
241 249
242 250 .. attribute:: commit_ids
243 251
244 252 list of all available commit ids, in ascending order
245 253
246 254 .. attribute:: path
247 255
248 256 absolute path to the repository
249 257
250 258 .. attribute:: bookmarks
251 259
252 260 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
253 261 there are no bookmarks or the backend implementation does not support
254 262 bookmarks.
255 263
256 264 .. attribute:: tags
257 265
258 266 Mapping from name to :term:`Commit ID` of the tag.
259 267
260 268 """
261 269
262 270 DEFAULT_BRANCH_NAME = None
263 271 DEFAULT_CONTACT = u"Unknown"
264 272 DEFAULT_DESCRIPTION = u"unknown"
265 273 EMPTY_COMMIT_ID = '0' * 40
266 274
267 275 path = None
268 276
269 277 _is_empty = None
270 278 _commit_ids = {}
271 279
272 280 def __init__(self, repo_path, config=None, create=False, **kwargs):
273 281 """
274 282 Initializes repository. Raises RepositoryError if repository could
275 283 not be find at the given ``repo_path`` or directory at ``repo_path``
276 284 exists and ``create`` is set to True.
277 285
278 286 :param repo_path: local path of the repository
279 287 :param config: repository configuration
280 288 :param create=False: if set to True, would try to create repository.
281 289 :param src_url=None: if set, should be proper url from which repository
282 290 would be cloned; requires ``create`` parameter to be set to True -
283 291 raises RepositoryError if src_url is set and create evaluates to
284 292 False
285 293 """
286 294 raise NotImplementedError
287 295
288 296 def __repr__(self):
289 297 return '<%s at %s>' % (self.__class__.__name__, self.path)
290 298
291 299 def __len__(self):
292 300 return self.count()
293 301
294 302 def __eq__(self, other):
295 303 same_instance = isinstance(other, self.__class__)
296 304 return same_instance and other.path == self.path
297 305
298 306 def __ne__(self, other):
299 307 return not self.__eq__(other)
300 308
301 309 def get_create_shadow_cache_pr_path(self, db_repo):
302 310 path = db_repo.cached_diffs_dir
303 311 if not os.path.exists(path):
304 312 os.makedirs(path, 0o755)
305 313 return path
306 314
307 315 @classmethod
308 316 def get_default_config(cls, default=None):
309 317 config = Config()
310 318 if default and isinstance(default, list):
311 319 for section, key, val in default:
312 320 config.set(section, key, val)
313 321 return config
314 322
315 323 @LazyProperty
316 324 def _remote(self):
317 325 raise NotImplementedError
318 326
319 327 def _heads(self, branch=None):
320 328 return []
321 329
322 330 @LazyProperty
323 331 def EMPTY_COMMIT(self):
324 332 return EmptyCommit(self.EMPTY_COMMIT_ID)
325 333
326 334 @LazyProperty
327 335 def alias(self):
328 336 for k, v in settings.BACKENDS.items():
329 337 if v.split('.')[-1] == str(self.__class__.__name__):
330 338 return k
331 339
332 340 @LazyProperty
333 341 def name(self):
334 342 return safe_unicode(os.path.basename(self.path))
335 343
336 344 @LazyProperty
337 345 def description(self):
338 346 raise NotImplementedError
339 347
340 348 def refs(self):
341 349 """
342 350 returns a `dict` with branches, bookmarks, tags, and closed_branches
343 351 for this repository
344 352 """
345 353 return dict(
346 354 branches=self.branches,
347 355 branches_closed=self.branches_closed,
348 356 tags=self.tags,
349 357 bookmarks=self.bookmarks
350 358 )
351 359
352 360 @LazyProperty
353 361 def branches(self):
354 362 """
355 363 A `dict` which maps branch names to commit ids.
356 364 """
357 365 raise NotImplementedError
358 366
359 367 @LazyProperty
360 368 def branches_closed(self):
361 369 """
362 370 A `dict` which maps tags names to commit ids.
363 371 """
364 372 raise NotImplementedError
365 373
366 374 @LazyProperty
367 375 def bookmarks(self):
368 376 """
369 377 A `dict` which maps tags names to commit ids.
370 378 """
371 379 raise NotImplementedError
372 380
373 381 @LazyProperty
374 382 def tags(self):
375 383 """
376 384 A `dict` which maps tags names to commit ids.
377 385 """
378 386 raise NotImplementedError
379 387
380 388 @LazyProperty
381 389 def size(self):
382 390 """
383 391 Returns combined size in bytes for all repository files
384 392 """
385 393 tip = self.get_commit()
386 394 return tip.size
387 395
388 396 def size_at_commit(self, commit_id):
389 397 commit = self.get_commit(commit_id)
390 398 return commit.size
391 399
392 400 def _check_for_empty(self):
393 401 no_commits = len(self._commit_ids) == 0
394 402 if no_commits:
395 403 # check on remote to be sure
396 404 return self._remote.is_empty()
397 405 else:
398 406 return False
399 407
400 408 def is_empty(self):
401 409 if rhodecode.is_test:
402 410 return self._check_for_empty()
403 411
404 412 if self._is_empty is None:
405 413 # cache empty for production, but not tests
406 414 self._is_empty = self._check_for_empty()
407 415
408 416 return self._is_empty
409 417
410 418 @staticmethod
411 419 def check_url(url, config):
412 420 """
413 421 Function will check given url and try to verify if it's a valid
414 422 link.
415 423 """
416 424 raise NotImplementedError
417 425
418 426 @staticmethod
419 427 def is_valid_repository(path):
420 428 """
421 429 Check if given `path` contains a valid repository of this backend
422 430 """
423 431 raise NotImplementedError
424 432
425 433 # ==========================================================================
426 434 # COMMITS
427 435 # ==========================================================================
428 436
429 437 @CachedProperty
430 438 def commit_ids(self):
431 439 raise NotImplementedError
432 440
433 441 def append_commit_id(self, commit_id):
434 442 if commit_id not in self.commit_ids:
435 443 self._rebuild_cache(self.commit_ids + [commit_id])
436 444
437 445 # clear cache
438 446 self._invalidate_prop_cache('commit_ids')
439 447 self._is_empty = False
440 448
441 449 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
442 450 translate_tag=None, maybe_unreachable=False):
443 451 """
444 452 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
445 453 are both None, most recent commit is returned.
446 454
447 455 :param pre_load: Optional. List of commit attributes to load.
448 456
449 457 :raises ``EmptyRepositoryError``: if there are no commits
450 458 """
451 459 raise NotImplementedError
452 460
453 461 def __iter__(self):
454 462 for commit_id in self.commit_ids:
455 463 yield self.get_commit(commit_id=commit_id)
456 464
457 465 def get_commits(
458 466 self, start_id=None, end_id=None, start_date=None, end_date=None,
459 467 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
460 468 """
461 469 Returns iterator of `BaseCommit` objects from start to end
462 470 not inclusive. This should behave just like a list, ie. end is not
463 471 inclusive.
464 472
465 473 :param start_id: None or str, must be a valid commit id
466 474 :param end_id: None or str, must be a valid commit id
467 475 :param start_date:
468 476 :param end_date:
469 477 :param branch_name:
470 478 :param show_hidden:
471 479 :param pre_load:
472 480 :param translate_tags:
473 481 """
474 482 raise NotImplementedError
475 483
476 484 def __getitem__(self, key):
477 485 """
478 486 Allows index based access to the commit objects of this repository.
479 487 """
480 488 pre_load = ["author", "branch", "date", "message", "parents"]
481 489 if isinstance(key, slice):
482 490 return self._get_range(key, pre_load)
483 491 return self.get_commit(commit_idx=key, pre_load=pre_load)
484 492
485 493 def _get_range(self, slice_obj, pre_load):
486 494 for commit_id in self.commit_ids.__getitem__(slice_obj):
487 495 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
488 496
489 497 def count(self):
490 498 return len(self.commit_ids)
491 499
492 500 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
493 501 """
494 502 Creates and returns a tag for the given ``commit_id``.
495 503
496 504 :param name: name for new tag
497 505 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
498 506 :param commit_id: commit id for which new tag would be created
499 507 :param message: message of the tag's commit
500 508 :param date: date of tag's commit
501 509
502 510 :raises TagAlreadyExistError: if tag with same name already exists
503 511 """
504 512 raise NotImplementedError
505 513
506 514 def remove_tag(self, name, user, message=None, date=None):
507 515 """
508 516 Removes tag with the given ``name``.
509 517
510 518 :param name: name of the tag to be removed
511 519 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
512 520 :param message: message of the tag's removal commit
513 521 :param date: date of tag's removal commit
514 522
515 523 :raises TagDoesNotExistError: if tag with given name does not exists
516 524 """
517 525 raise NotImplementedError
518 526
519 527 def get_diff(
520 528 self, commit1, commit2, path=None, ignore_whitespace=False,
521 529 context=3, path1=None):
522 530 """
523 531 Returns (git like) *diff*, as plain text. Shows changes introduced by
524 532 `commit2` since `commit1`.
525 533
526 534 :param commit1: Entry point from which diff is shown. Can be
527 535 ``self.EMPTY_COMMIT`` - in this case, patch showing all
528 536 the changes since empty state of the repository until `commit2`
529 537 :param commit2: Until which commit changes should be shown.
530 538 :param path: Can be set to a path of a file to create a diff of that
531 539 file. If `path1` is also set, this value is only associated to
532 540 `commit2`.
533 541 :param ignore_whitespace: If set to ``True``, would not show whitespace
534 542 changes. Defaults to ``False``.
535 543 :param context: How many lines before/after changed lines should be
536 544 shown. Defaults to ``3``.
537 545 :param path1: Can be set to a path to associate with `commit1`. This
538 546 parameter works only for backends which support diff generation for
539 547 different paths. Other backends will raise a `ValueError` if `path1`
540 548 is set and has a different value than `path`.
541 549 :param file_path: filter this diff by given path pattern
542 550 """
543 551 raise NotImplementedError
544 552
545 553 def strip(self, commit_id, branch=None):
546 554 """
547 555 Strip given commit_id from the repository
548 556 """
549 557 raise NotImplementedError
550 558
551 559 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
552 560 """
553 561 Return a latest common ancestor commit if one exists for this repo
554 562 `commit_id1` vs `commit_id2` from `repo2`.
555 563
556 564 :param commit_id1: Commit it from this repository to use as a
557 565 target for the comparison.
558 566 :param commit_id2: Source commit id to use for comparison.
559 567 :param repo2: Source repository to use for comparison.
560 568 """
561 569 raise NotImplementedError
562 570
563 571 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
564 572 """
565 573 Compare this repository's revision `commit_id1` with `commit_id2`.
566 574
567 575 Returns a tuple(commits, ancestor) that would be merged from
568 576 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
569 577 will be returned as ancestor.
570 578
571 579 :param commit_id1: Commit it from this repository to use as a
572 580 target for the comparison.
573 581 :param commit_id2: Source commit id to use for comparison.
574 582 :param repo2: Source repository to use for comparison.
575 583 :param merge: If set to ``True`` will do a merge compare which also
576 584 returns the common ancestor.
577 585 :param pre_load: Optional. List of commit attributes to load.
578 586 """
579 587 raise NotImplementedError
580 588
581 589 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
582 590 user_name='', user_email='', message='', dry_run=False,
583 591 use_rebase=False, close_branch=False):
584 592 """
585 593 Merge the revisions specified in `source_ref` from `source_repo`
586 594 onto the `target_ref` of this repository.
587 595
588 596 `source_ref` and `target_ref` are named tupls with the following
589 597 fields `type`, `name` and `commit_id`.
590 598
591 599 Returns a MergeResponse named tuple with the following fields
592 600 'possible', 'executed', 'source_commit', 'target_commit',
593 601 'merge_commit'.
594 602
595 603 :param repo_id: `repo_id` target repo id.
596 604 :param workspace_id: `workspace_id` unique identifier.
597 605 :param target_ref: `target_ref` points to the commit on top of which
598 606 the `source_ref` should be merged.
599 607 :param source_repo: The repository that contains the commits to be
600 608 merged.
601 609 :param source_ref: `source_ref` points to the topmost commit from
602 610 the `source_repo` which should be merged.
603 611 :param user_name: Merge commit `user_name`.
604 612 :param user_email: Merge commit `user_email`.
605 613 :param message: Merge commit `message`.
606 614 :param dry_run: If `True` the merge will not take place.
607 615 :param use_rebase: If `True` commits from the source will be rebased
608 616 on top of the target instead of being merged.
609 617 :param close_branch: If `True` branch will be close before merging it
610 618 """
611 619 if dry_run:
612 620 message = message or settings.MERGE_DRY_RUN_MESSAGE
613 621 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
614 622 user_name = user_name or settings.MERGE_DRY_RUN_USER
615 623 else:
616 624 if not user_name:
617 625 raise ValueError('user_name cannot be empty')
618 626 if not user_email:
619 627 raise ValueError('user_email cannot be empty')
620 628 if not message:
621 629 raise ValueError('message cannot be empty')
622 630
623 631 try:
624 632 return self._merge_repo(
625 633 repo_id, workspace_id, target_ref, source_repo,
626 634 source_ref, message, user_name, user_email, dry_run=dry_run,
627 635 use_rebase=use_rebase, close_branch=close_branch)
628 636 except RepositoryError as exc:
629 637 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
630 638 return MergeResponse(
631 639 False, False, None, MergeFailureReason.UNKNOWN,
632 640 metadata={'exception': str(exc)})
633 641
634 642 def _merge_repo(self, repo_id, workspace_id, target_ref,
635 643 source_repo, source_ref, merge_message,
636 644 merger_name, merger_email, dry_run=False,
637 645 use_rebase=False, close_branch=False):
638 646 """Internal implementation of merge."""
639 647 raise NotImplementedError
640 648
641 649 def _maybe_prepare_merge_workspace(
642 650 self, repo_id, workspace_id, target_ref, source_ref):
643 651 """
644 652 Create the merge workspace.
645 653
646 654 :param workspace_id: `workspace_id` unique identifier.
647 655 """
648 656 raise NotImplementedError
649 657
650 658 @classmethod
651 659 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
652 660 """
653 661 Legacy version that was used before. We still need it for
654 662 backward compat
655 663 """
656 664 return os.path.join(
657 665 os.path.dirname(repo_path),
658 666 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
659 667
660 668 @classmethod
661 669 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
662 670 # The name of the shadow repository must start with '.', so it is
663 671 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
664 672 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
665 673 if os.path.exists(legacy_repository_path):
666 674 return legacy_repository_path
667 675 else:
668 676 return os.path.join(
669 677 os.path.dirname(repo_path),
670 678 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
671 679
672 680 def cleanup_merge_workspace(self, repo_id, workspace_id):
673 681 """
674 682 Remove merge workspace.
675 683
676 684 This function MUST not fail in case there is no workspace associated to
677 685 the given `workspace_id`.
678 686
679 687 :param workspace_id: `workspace_id` unique identifier.
680 688 """
681 689 shadow_repository_path = self._get_shadow_repository_path(
682 690 self.path, repo_id, workspace_id)
683 691 shadow_repository_path_del = '{}.{}.delete'.format(
684 692 shadow_repository_path, time.time())
685 693
686 694 # move the shadow repo, so it never conflicts with the one used.
687 695 # we use this method because shutil.rmtree had some edge case problems
688 696 # removing symlinked repositories
689 697 if not os.path.isdir(shadow_repository_path):
690 698 return
691 699
692 700 shutil.move(shadow_repository_path, shadow_repository_path_del)
693 701 try:
694 702 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
695 703 except Exception:
696 704 log.exception('Failed to gracefully remove shadow repo under %s',
697 705 shadow_repository_path_del)
698 706 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
699 707
700 708 # ========== #
701 709 # COMMIT API #
702 710 # ========== #
703 711
704 712 @LazyProperty
705 713 def in_memory_commit(self):
706 714 """
707 715 Returns :class:`InMemoryCommit` object for this repository.
708 716 """
709 717 raise NotImplementedError
710 718
711 719 # ======================== #
712 720 # UTILITIES FOR SUBCLASSES #
713 721 # ======================== #
714 722
715 723 def _validate_diff_commits(self, commit1, commit2):
716 724 """
717 725 Validates that the given commits are related to this repository.
718 726
719 727 Intended as a utility for sub classes to have a consistent validation
720 728 of input parameters in methods like :meth:`get_diff`.
721 729 """
722 730 self._validate_commit(commit1)
723 731 self._validate_commit(commit2)
724 732 if (isinstance(commit1, EmptyCommit) and
725 733 isinstance(commit2, EmptyCommit)):
726 734 raise ValueError("Cannot compare two empty commits")
727 735
728 736 def _validate_commit(self, commit):
729 737 if not isinstance(commit, BaseCommit):
730 738 raise TypeError(
731 739 "%s is not of type BaseCommit" % repr(commit))
732 740 if commit.repository != self and not isinstance(commit, EmptyCommit):
733 741 raise ValueError(
734 742 "Commit %s must be a valid commit from this repository %s, "
735 743 "related to this repository instead %s." %
736 744 (commit, self, commit.repository))
737 745
738 746 def _validate_commit_id(self, commit_id):
739 747 if not isinstance(commit_id, compat.string_types):
740 748 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
741 749
742 750 def _validate_commit_idx(self, commit_idx):
743 751 if not isinstance(commit_idx, (int, long)):
744 752 raise TypeError("commit_idx must be a numeric value")
745 753
746 754 def _validate_branch_name(self, branch_name):
747 755 if branch_name and branch_name not in self.branches_all:
748 756 msg = ("Branch %s not found in %s" % (branch_name, self))
749 757 raise BranchDoesNotExistError(msg)
750 758
751 759 #
752 760 # Supporting deprecated API parts
753 761 # TODO: johbo: consider to move this into a mixin
754 762 #
755 763
756 764 @property
757 765 def EMPTY_CHANGESET(self):
758 766 warnings.warn(
759 767 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
760 768 return self.EMPTY_COMMIT_ID
761 769
762 770 @property
763 771 def revisions(self):
764 772 warnings.warn("Use commits attribute instead", DeprecationWarning)
765 773 return self.commit_ids
766 774
767 775 @revisions.setter
768 776 def revisions(self, value):
769 777 warnings.warn("Use commits attribute instead", DeprecationWarning)
770 778 self.commit_ids = value
771 779
772 780 def get_changeset(self, revision=None, pre_load=None):
773 781 warnings.warn("Use get_commit instead", DeprecationWarning)
774 782 commit_id = None
775 783 commit_idx = None
776 784 if isinstance(revision, compat.string_types):
777 785 commit_id = revision
778 786 else:
779 787 commit_idx = revision
780 788 return self.get_commit(
781 789 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
782 790
783 791 def get_changesets(
784 792 self, start=None, end=None, start_date=None, end_date=None,
785 793 branch_name=None, pre_load=None):
786 794 warnings.warn("Use get_commits instead", DeprecationWarning)
787 795 start_id = self._revision_to_commit(start)
788 796 end_id = self._revision_to_commit(end)
789 797 return self.get_commits(
790 798 start_id=start_id, end_id=end_id, start_date=start_date,
791 799 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
792 800
793 801 def _revision_to_commit(self, revision):
794 802 """
795 803 Translates a revision to a commit_id
796 804
797 805 Helps to support the old changeset based API which allows to use
798 806 commit ids and commit indices interchangeable.
799 807 """
800 808 if revision is None:
801 809 return revision
802 810
803 811 if isinstance(revision, compat.string_types):
804 812 commit_id = revision
805 813 else:
806 814 commit_id = self.commit_ids[revision]
807 815 return commit_id
808 816
809 817 @property
810 818 def in_memory_changeset(self):
811 819 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
812 820 return self.in_memory_commit
813 821
814 822 def get_path_permissions(self, username):
815 823 """
816 824 Returns a path permission checker or None if not supported
817 825
818 826 :param username: session user name
819 827 :return: an instance of BasePathPermissionChecker or None
820 828 """
821 829 return None
822 830
823 831 def install_hooks(self, force=False):
824 832 return self._remote.install_hooks(force)
825 833
826 834 def get_hooks_info(self):
827 835 return self._remote.get_hooks_info()
828 836
829 837
830 838 class BaseCommit(object):
831 839 """
832 840 Each backend should implement it's commit representation.
833 841
834 842 **Attributes**
835 843
836 844 ``repository``
837 845 repository object within which commit exists
838 846
839 847 ``id``
840 848 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
841 849 just ``tip``.
842 850
843 851 ``raw_id``
844 852 raw commit representation (i.e. full 40 length sha for git
845 853 backend)
846 854
847 855 ``short_id``
848 856 shortened (if apply) version of ``raw_id``; it would be simple
849 857 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
850 858 as ``raw_id`` for subversion
851 859
852 860 ``idx``
853 861 commit index
854 862
855 863 ``files``
856 864 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
857 865
858 866 ``dirs``
859 867 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
860 868
861 869 ``nodes``
862 870 combined list of ``Node`` objects
863 871
864 872 ``author``
865 873 author of the commit, as unicode
866 874
867 875 ``message``
868 876 message of the commit, as unicode
869 877
870 878 ``parents``
871 879 list of parent commits
872 880
873 881 """
874 882
875 883 branch = None
876 884 """
877 885 Depending on the backend this should be set to the branch name of the
878 886 commit. Backends not supporting branches on commits should leave this
879 887 value as ``None``.
880 888 """
881 889
882 890 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
883 891 """
884 892 This template is used to generate a default prefix for repository archives
885 893 if no prefix has been specified.
886 894 """
887 895
888 896 def __str__(self):
889 897 return '<%s at %s:%s>' % (
890 898 self.__class__.__name__, self.idx, self.short_id)
891 899
892 900 def __repr__(self):
893 901 return self.__str__()
894 902
895 903 def __unicode__(self):
896 904 return u'%s:%s' % (self.idx, self.short_id)
897 905
898 906 def __eq__(self, other):
899 907 same_instance = isinstance(other, self.__class__)
900 908 return same_instance and self.raw_id == other.raw_id
901 909
902 910 def __json__(self):
903 911 parents = []
904 912 try:
905 913 for parent in self.parents:
906 914 parents.append({'raw_id': parent.raw_id})
907 915 except NotImplementedError:
908 916 # empty commit doesn't have parents implemented
909 917 pass
910 918
911 919 return {
912 920 'short_id': self.short_id,
913 921 'raw_id': self.raw_id,
914 922 'revision': self.idx,
915 923 'message': self.message,
916 924 'date': self.date,
917 925 'author': self.author,
918 926 'parents': parents,
919 927 'branch': self.branch
920 928 }
921 929
922 930 def __getstate__(self):
923 931 d = self.__dict__.copy()
924 932 d.pop('_remote', None)
925 933 d.pop('repository', None)
926 934 return d
927 935
928 936 def serialize(self):
929 937 return self.__json__()
930 938
931 939 def _get_refs(self):
932 940 return {
933 941 'branches': [self.branch] if self.branch else [],
934 942 'bookmarks': getattr(self, 'bookmarks', []),
935 943 'tags': self.tags
936 944 }
937 945
938 946 @LazyProperty
939 947 def last(self):
940 948 """
941 949 ``True`` if this is last commit in repository, ``False``
942 950 otherwise; trying to access this attribute while there is no
943 951 commits would raise `EmptyRepositoryError`
944 952 """
945 953 if self.repository is None:
946 954 raise CommitError("Cannot check if it's most recent commit")
947 955 return self.raw_id == self.repository.commit_ids[-1]
948 956
949 957 @LazyProperty
950 958 def parents(self):
951 959 """
952 960 Returns list of parent commits.
953 961 """
954 962 raise NotImplementedError
955 963
956 964 @LazyProperty
957 965 def first_parent(self):
958 966 """
959 967 Returns list of parent commits.
960 968 """
961 969 return self.parents[0] if self.parents else EmptyCommit()
962 970
963 971 @property
964 972 def merge(self):
965 973 """
966 974 Returns boolean if commit is a merge.
967 975 """
968 976 return len(self.parents) > 1
969 977
970 978 @LazyProperty
971 979 def children(self):
972 980 """
973 981 Returns list of child commits.
974 982 """
975 983 raise NotImplementedError
976 984
977 985 @LazyProperty
978 986 def id(self):
979 987 """
980 988 Returns string identifying this commit.
981 989 """
982 990 raise NotImplementedError
983 991
984 992 @LazyProperty
985 993 def raw_id(self):
986 994 """
987 995 Returns raw string identifying this commit.
988 996 """
989 997 raise NotImplementedError
990 998
991 999 @LazyProperty
992 1000 def short_id(self):
993 1001 """
994 1002 Returns shortened version of ``raw_id`` attribute, as string,
995 1003 identifying this commit, useful for presentation to users.
996 1004 """
997 1005 raise NotImplementedError
998 1006
999 1007 @LazyProperty
1000 1008 def idx(self):
1001 1009 """
1002 1010 Returns integer identifying this commit.
1003 1011 """
1004 1012 raise NotImplementedError
1005 1013
1006 1014 @LazyProperty
1007 1015 def committer(self):
1008 1016 """
1009 1017 Returns committer for this commit
1010 1018 """
1011 1019 raise NotImplementedError
1012 1020
1013 1021 @LazyProperty
1014 1022 def committer_name(self):
1015 1023 """
1016 1024 Returns committer name for this commit
1017 1025 """
1018 1026
1019 1027 return author_name(self.committer)
1020 1028
1021 1029 @LazyProperty
1022 1030 def committer_email(self):
1023 1031 """
1024 1032 Returns committer email address for this commit
1025 1033 """
1026 1034
1027 1035 return author_email(self.committer)
1028 1036
1029 1037 @LazyProperty
1030 1038 def author(self):
1031 1039 """
1032 1040 Returns author for this commit
1033 1041 """
1034 1042
1035 1043 raise NotImplementedError
1036 1044
1037 1045 @LazyProperty
1038 1046 def author_name(self):
1039 1047 """
1040 1048 Returns author name for this commit
1041 1049 """
1042 1050
1043 1051 return author_name(self.author)
1044 1052
1045 1053 @LazyProperty
1046 1054 def author_email(self):
1047 1055 """
1048 1056 Returns author email address for this commit
1049 1057 """
1050 1058
1051 1059 return author_email(self.author)
1052 1060
1053 1061 def get_file_mode(self, path):
1054 1062 """
1055 1063 Returns stat mode of the file at `path`.
1056 1064 """
1057 1065 raise NotImplementedError
1058 1066
1059 1067 def is_link(self, path):
1060 1068 """
1061 1069 Returns ``True`` if given `path` is a symlink
1062 1070 """
1063 1071 raise NotImplementedError
1064 1072
1065 1073 def is_node_binary(self, path):
1066 1074 """
1067 1075 Returns ``True`` is given path is a binary file
1068 1076 """
1069 1077 raise NotImplementedError
1070 1078
1071 1079 def get_file_content(self, path):
1072 1080 """
1073 1081 Returns content of the file at the given `path`.
1074 1082 """
1075 1083 raise NotImplementedError
1076 1084
1077 1085 def get_file_content_streamed(self, path):
1078 1086 """
1079 1087 returns a streaming response from vcsserver with file content
1080 1088 """
1081 1089 raise NotImplementedError
1082 1090
1083 1091 def get_file_size(self, path):
1084 1092 """
1085 1093 Returns size of the file at the given `path`.
1086 1094 """
1087 1095 raise NotImplementedError
1088 1096
1089 1097 def get_path_commit(self, path, pre_load=None):
1090 1098 """
1091 1099 Returns last commit of the file at the given `path`.
1092 1100
1093 1101 :param pre_load: Optional. List of commit attributes to load.
1094 1102 """
1095 1103 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1096 1104 if not commits:
1097 1105 raise RepositoryError(
1098 1106 'Failed to fetch history for path {}. '
1099 1107 'Please check if such path exists in your repository'.format(
1100 1108 path))
1101 1109 return commits[0]
1102 1110
1103 1111 def get_path_history(self, path, limit=None, pre_load=None):
1104 1112 """
1105 1113 Returns history of file as reversed list of :class:`BaseCommit`
1106 1114 objects for which file at given `path` has been modified.
1107 1115
1108 1116 :param limit: Optional. Allows to limit the size of the returned
1109 1117 history. This is intended as a hint to the underlying backend, so
1110 1118 that it can apply optimizations depending on the limit.
1111 1119 :param pre_load: Optional. List of commit attributes to load.
1112 1120 """
1113 1121 raise NotImplementedError
1114 1122
1115 1123 def get_file_annotate(self, path, pre_load=None):
1116 1124 """
1117 1125 Returns a generator of four element tuples with
1118 1126 lineno, sha, commit lazy loader and line
1119 1127
1120 1128 :param pre_load: Optional. List of commit attributes to load.
1121 1129 """
1122 1130 raise NotImplementedError
1123 1131
1124 1132 def get_nodes(self, path):
1125 1133 """
1126 1134 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1127 1135 state of commit at the given ``path``.
1128 1136
1129 1137 :raises ``CommitError``: if node at the given ``path`` is not
1130 1138 instance of ``DirNode``
1131 1139 """
1132 1140 raise NotImplementedError
1133 1141
1134 1142 def get_node(self, path):
1135 1143 """
1136 1144 Returns ``Node`` object from the given ``path``.
1137 1145
1138 1146 :raises ``NodeDoesNotExistError``: if there is no node at the given
1139 1147 ``path``
1140 1148 """
1141 1149 raise NotImplementedError
1142 1150
1143 1151 def get_largefile_node(self, path):
1144 1152 """
1145 1153 Returns the path to largefile from Mercurial/Git-lfs storage.
1146 1154 or None if it's not a largefile node
1147 1155 """
1148 1156 return None
1149 1157
1150 1158 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1151 1159 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1152 1160 """
1153 1161 Creates an archive containing the contents of the repository.
1154 1162
1155 1163 :param archive_dest_path: path to the file which to create the archive.
1156 1164 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1157 1165 :param prefix: name of root directory in archive.
1158 1166 Default is repository name and commit's short_id joined with dash:
1159 1167 ``"{repo_name}-{short_id}"``.
1160 1168 :param write_metadata: write a metadata file into archive.
1161 1169 :param mtime: custom modification time for archive creation, defaults
1162 1170 to time.time() if not given.
1163 1171 :param archive_at_path: pack files at this path (default '/')
1164 1172
1165 1173 :raise VCSError: If prefix has a problem.
1166 1174 """
1167 1175 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1168 1176 if kind not in allowed_kinds:
1169 1177 raise ImproperArchiveTypeError(
1170 1178 'Archive kind (%s) not supported use one of %s' %
1171 1179 (kind, allowed_kinds))
1172 1180
1173 1181 prefix = self._validate_archive_prefix(prefix)
1174 1182
1175 1183 mtime = mtime is not None or time.mktime(self.date.timetuple())
1176 1184
1177 1185 file_info = []
1178 1186 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1179 1187 for _r, _d, files in cur_rev.walk(archive_at_path):
1180 1188 for f in files:
1181 1189 f_path = os.path.join(prefix, f.path)
1182 1190 file_info.append(
1183 1191 (f_path, f.mode, f.is_link(), f.raw_bytes))
1184 1192
1185 1193 if write_metadata:
1186 1194 metadata = [
1187 1195 ('repo_name', self.repository.name),
1188 1196 ('commit_id', self.raw_id),
1189 1197 ('mtime', mtime),
1190 1198 ('branch', self.branch),
1191 1199 ('tags', ','.join(self.tags)),
1192 1200 ]
1193 1201 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1194 1202 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1195 1203
1196 1204 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1197 1205
1198 1206 def _validate_archive_prefix(self, prefix):
1199 1207 if prefix is None:
1200 1208 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1201 1209 repo_name=safe_str(self.repository.name),
1202 1210 short_id=self.short_id)
1203 1211 elif not isinstance(prefix, str):
1204 1212 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1205 1213 elif prefix.startswith('/'):
1206 1214 raise VCSError("Prefix cannot start with leading slash")
1207 1215 elif prefix.strip() == '':
1208 1216 raise VCSError("Prefix cannot be empty")
1209 1217 return prefix
1210 1218
1211 1219 @LazyProperty
1212 1220 def root(self):
1213 1221 """
1214 1222 Returns ``RootNode`` object for this commit.
1215 1223 """
1216 1224 return self.get_node('')
1217 1225
1218 1226 def next(self, branch=None):
1219 1227 """
1220 1228 Returns next commit from current, if branch is gives it will return
1221 1229 next commit belonging to this branch
1222 1230
1223 1231 :param branch: show commits within the given named branch
1224 1232 """
1225 1233 indexes = xrange(self.idx + 1, self.repository.count())
1226 1234 return self._find_next(indexes, branch)
1227 1235
1228 1236 def prev(self, branch=None):
1229 1237 """
1230 1238 Returns previous commit from current, if branch is gives it will
1231 1239 return previous commit belonging to this branch
1232 1240
1233 1241 :param branch: show commit within the given named branch
1234 1242 """
1235 1243 indexes = xrange(self.idx - 1, -1, -1)
1236 1244 return self._find_next(indexes, branch)
1237 1245
1238 1246 def _find_next(self, indexes, branch=None):
1239 1247 if branch and self.branch != branch:
1240 1248 raise VCSError('Branch option used on commit not belonging '
1241 1249 'to that branch')
1242 1250
1243 1251 for next_idx in indexes:
1244 1252 commit = self.repository.get_commit(commit_idx=next_idx)
1245 1253 if branch and branch != commit.branch:
1246 1254 continue
1247 1255 return commit
1248 1256 raise CommitDoesNotExistError
1249 1257
1250 1258 def diff(self, ignore_whitespace=True, context=3):
1251 1259 """
1252 1260 Returns a `Diff` object representing the change made by this commit.
1253 1261 """
1254 1262 parent = self.first_parent
1255 1263 diff = self.repository.get_diff(
1256 1264 parent, self,
1257 1265 ignore_whitespace=ignore_whitespace,
1258 1266 context=context)
1259 1267 return diff
1260 1268
1261 1269 @LazyProperty
1262 1270 def added(self):
1263 1271 """
1264 1272 Returns list of added ``FileNode`` objects.
1265 1273 """
1266 1274 raise NotImplementedError
1267 1275
1268 1276 @LazyProperty
1269 1277 def changed(self):
1270 1278 """
1271 1279 Returns list of modified ``FileNode`` objects.
1272 1280 """
1273 1281 raise NotImplementedError
1274 1282
1275 1283 @LazyProperty
1276 1284 def removed(self):
1277 1285 """
1278 1286 Returns list of removed ``FileNode`` objects.
1279 1287 """
1280 1288 raise NotImplementedError
1281 1289
1282 1290 @LazyProperty
1283 1291 def size(self):
1284 1292 """
1285 1293 Returns total number of bytes from contents of all filenodes.
1286 1294 """
1287 1295 return sum((node.size for node in self.get_filenodes_generator()))
1288 1296
1289 1297 def walk(self, topurl=''):
1290 1298 """
1291 1299 Similar to os.walk method. Insted of filesystem it walks through
1292 1300 commit starting at given ``topurl``. Returns generator of tuples
1293 1301 (topnode, dirnodes, filenodes).
1294 1302 """
1295 1303 topnode = self.get_node(topurl)
1296 1304 if not topnode.is_dir():
1297 1305 return
1298 1306 yield (topnode, topnode.dirs, topnode.files)
1299 1307 for dirnode in topnode.dirs:
1300 1308 for tup in self.walk(dirnode.path):
1301 1309 yield tup
1302 1310
1303 1311 def get_filenodes_generator(self):
1304 1312 """
1305 1313 Returns generator that yields *all* file nodes.
1306 1314 """
1307 1315 for topnode, dirs, files in self.walk():
1308 1316 for node in files:
1309 1317 yield node
1310 1318
1311 1319 #
1312 1320 # Utilities for sub classes to support consistent behavior
1313 1321 #
1314 1322
1315 1323 def no_node_at_path(self, path):
1316 1324 return NodeDoesNotExistError(
1317 1325 u"There is no file nor directory at the given path: "
1318 1326 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1319 1327
1320 1328 def _fix_path(self, path):
1321 1329 """
1322 1330 Paths are stored without trailing slash so we need to get rid off it if
1323 1331 needed.
1324 1332 """
1325 1333 return path.rstrip('/')
1326 1334
1327 1335 #
1328 1336 # Deprecated API based on changesets
1329 1337 #
1330 1338
1331 1339 @property
1332 1340 def revision(self):
1333 1341 warnings.warn("Use idx instead", DeprecationWarning)
1334 1342 return self.idx
1335 1343
1336 1344 @revision.setter
1337 1345 def revision(self, value):
1338 1346 warnings.warn("Use idx instead", DeprecationWarning)
1339 1347 self.idx = value
1340 1348
1341 1349 def get_file_changeset(self, path):
1342 1350 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1343 1351 return self.get_path_commit(path)
1344 1352
1345 1353
1346 1354 class BaseChangesetClass(type):
1347 1355
1348 1356 def __instancecheck__(self, instance):
1349 1357 return isinstance(instance, BaseCommit)
1350 1358
1351 1359
1352 1360 class BaseChangeset(BaseCommit):
1353 1361
1354 1362 __metaclass__ = BaseChangesetClass
1355 1363
1356 1364 def __new__(cls, *args, **kwargs):
1357 1365 warnings.warn(
1358 1366 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1359 1367 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1360 1368
1361 1369
1362 1370 class BaseInMemoryCommit(object):
1363 1371 """
1364 1372 Represents differences between repository's state (most recent head) and
1365 1373 changes made *in place*.
1366 1374
1367 1375 **Attributes**
1368 1376
1369 1377 ``repository``
1370 1378 repository object for this in-memory-commit
1371 1379
1372 1380 ``added``
1373 1381 list of ``FileNode`` objects marked as *added*
1374 1382
1375 1383 ``changed``
1376 1384 list of ``FileNode`` objects marked as *changed*
1377 1385
1378 1386 ``removed``
1379 1387 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1380 1388 *removed*
1381 1389
1382 1390 ``parents``
1383 1391 list of :class:`BaseCommit` instances representing parents of
1384 1392 in-memory commit. Should always be 2-element sequence.
1385 1393
1386 1394 """
1387 1395
1388 1396 def __init__(self, repository):
1389 1397 self.repository = repository
1390 1398 self.added = []
1391 1399 self.changed = []
1392 1400 self.removed = []
1393 1401 self.parents = []
1394 1402
1395 1403 def add(self, *filenodes):
1396 1404 """
1397 1405 Marks given ``FileNode`` objects as *to be committed*.
1398 1406
1399 1407 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1400 1408 latest commit
1401 1409 :raises ``NodeAlreadyAddedError``: if node with same path is already
1402 1410 marked as *added*
1403 1411 """
1404 1412 # Check if not already marked as *added* first
1405 1413 for node in filenodes:
1406 1414 if node.path in (n.path for n in self.added):
1407 1415 raise NodeAlreadyAddedError(
1408 1416 "Such FileNode %s is already marked for addition"
1409 1417 % node.path)
1410 1418 for node in filenodes:
1411 1419 self.added.append(node)
1412 1420
1413 1421 def change(self, *filenodes):
1414 1422 """
1415 1423 Marks given ``FileNode`` objects to be *changed* in next commit.
1416 1424
1417 1425 :raises ``EmptyRepositoryError``: if there are no commits yet
1418 1426 :raises ``NodeAlreadyExistsError``: if node with same path is already
1419 1427 marked to be *changed*
1420 1428 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1421 1429 marked to be *removed*
1422 1430 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1423 1431 commit
1424 1432 :raises ``NodeNotChangedError``: if node hasn't really be changed
1425 1433 """
1426 1434 for node in filenodes:
1427 1435 if node.path in (n.path for n in self.removed):
1428 1436 raise NodeAlreadyRemovedError(
1429 1437 "Node at %s is already marked as removed" % node.path)
1430 1438 try:
1431 1439 self.repository.get_commit()
1432 1440 except EmptyRepositoryError:
1433 1441 raise EmptyRepositoryError(
1434 1442 "Nothing to change - try to *add* new nodes rather than "
1435 1443 "changing them")
1436 1444 for node in filenodes:
1437 1445 if node.path in (n.path for n in self.changed):
1438 1446 raise NodeAlreadyChangedError(
1439 1447 "Node at '%s' is already marked as changed" % node.path)
1440 1448 self.changed.append(node)
1441 1449
1442 1450 def remove(self, *filenodes):
1443 1451 """
1444 1452 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1445 1453 *removed* in next commit.
1446 1454
1447 1455 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1448 1456 be *removed*
1449 1457 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1450 1458 be *changed*
1451 1459 """
1452 1460 for node in filenodes:
1453 1461 if node.path in (n.path for n in self.removed):
1454 1462 raise NodeAlreadyRemovedError(
1455 1463 "Node is already marked to for removal at %s" % node.path)
1456 1464 if node.path in (n.path for n in self.changed):
1457 1465 raise NodeAlreadyChangedError(
1458 1466 "Node is already marked to be changed at %s" % node.path)
1459 1467 # We only mark node as *removed* - real removal is done by
1460 1468 # commit method
1461 1469 self.removed.append(node)
1462 1470
1463 1471 def reset(self):
1464 1472 """
1465 1473 Resets this instance to initial state (cleans ``added``, ``changed``
1466 1474 and ``removed`` lists).
1467 1475 """
1468 1476 self.added = []
1469 1477 self.changed = []
1470 1478 self.removed = []
1471 1479 self.parents = []
1472 1480
1473 1481 def get_ipaths(self):
1474 1482 """
1475 1483 Returns generator of paths from nodes marked as added, changed or
1476 1484 removed.
1477 1485 """
1478 1486 for node in itertools.chain(self.added, self.changed, self.removed):
1479 1487 yield node.path
1480 1488
1481 1489 def get_paths(self):
1482 1490 """
1483 1491 Returns list of paths from nodes marked as added, changed or removed.
1484 1492 """
1485 1493 return list(self.get_ipaths())
1486 1494
1487 1495 def check_integrity(self, parents=None):
1488 1496 """
1489 1497 Checks in-memory commit's integrity. Also, sets parents if not
1490 1498 already set.
1491 1499
1492 1500 :raises CommitError: if any error occurs (i.e.
1493 1501 ``NodeDoesNotExistError``).
1494 1502 """
1495 1503 if not self.parents:
1496 1504 parents = parents or []
1497 1505 if len(parents) == 0:
1498 1506 try:
1499 1507 parents = [self.repository.get_commit(), None]
1500 1508 except EmptyRepositoryError:
1501 1509 parents = [None, None]
1502 1510 elif len(parents) == 1:
1503 1511 parents += [None]
1504 1512 self.parents = parents
1505 1513
1506 1514 # Local parents, only if not None
1507 1515 parents = [p for p in self.parents if p]
1508 1516
1509 1517 # Check nodes marked as added
1510 1518 for p in parents:
1511 1519 for node in self.added:
1512 1520 try:
1513 1521 p.get_node(node.path)
1514 1522 except NodeDoesNotExistError:
1515 1523 pass
1516 1524 else:
1517 1525 raise NodeAlreadyExistsError(
1518 1526 "Node `%s` already exists at %s" % (node.path, p))
1519 1527
1520 1528 # Check nodes marked as changed
1521 1529 missing = set(self.changed)
1522 1530 not_changed = set(self.changed)
1523 1531 if self.changed and not parents:
1524 1532 raise NodeDoesNotExistError(str(self.changed[0].path))
1525 1533 for p in parents:
1526 1534 for node in self.changed:
1527 1535 try:
1528 1536 old = p.get_node(node.path)
1529 1537 missing.remove(node)
1530 1538 # if content actually changed, remove node from not_changed
1531 1539 if old.content != node.content:
1532 1540 not_changed.remove(node)
1533 1541 except NodeDoesNotExistError:
1534 1542 pass
1535 1543 if self.changed and missing:
1536 1544 raise NodeDoesNotExistError(
1537 1545 "Node `%s` marked as modified but missing in parents: %s"
1538 1546 % (node.path, parents))
1539 1547
1540 1548 if self.changed and not_changed:
1541 1549 raise NodeNotChangedError(
1542 1550 "Node `%s` wasn't actually changed (parents: %s)"
1543 1551 % (not_changed.pop().path, parents))
1544 1552
1545 1553 # Check nodes marked as removed
1546 1554 if self.removed and not parents:
1547 1555 raise NodeDoesNotExistError(
1548 1556 "Cannot remove node at %s as there "
1549 1557 "were no parents specified" % self.removed[0].path)
1550 1558 really_removed = set()
1551 1559 for p in parents:
1552 1560 for node in self.removed:
1553 1561 try:
1554 1562 p.get_node(node.path)
1555 1563 really_removed.add(node)
1556 1564 except CommitError:
1557 1565 pass
1558 1566 not_removed = set(self.removed) - really_removed
1559 1567 if not_removed:
1560 1568 # TODO: johbo: This code branch does not seem to be covered
1561 1569 raise NodeDoesNotExistError(
1562 1570 "Cannot remove node at %s from "
1563 1571 "following parents: %s" % (not_removed, parents))
1564 1572
1565 1573 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1566 1574 """
1567 1575 Performs in-memory commit (doesn't check workdir in any way) and
1568 1576 returns newly created :class:`BaseCommit`. Updates repository's
1569 1577 attribute `commits`.
1570 1578
1571 1579 .. note::
1572 1580
1573 1581 While overriding this method each backend's should call
1574 1582 ``self.check_integrity(parents)`` in the first place.
1575 1583
1576 1584 :param message: message of the commit
1577 1585 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1578 1586 :param parents: single parent or sequence of parents from which commit
1579 1587 would be derived
1580 1588 :param date: ``datetime.datetime`` instance. Defaults to
1581 1589 ``datetime.datetime.now()``.
1582 1590 :param branch: branch name, as string. If none given, default backend's
1583 1591 branch would be used.
1584 1592
1585 1593 :raises ``CommitError``: if any error occurs while committing
1586 1594 """
1587 1595 raise NotImplementedError
1588 1596
1589 1597
1590 1598 class BaseInMemoryChangesetClass(type):
1591 1599
1592 1600 def __instancecheck__(self, instance):
1593 1601 return isinstance(instance, BaseInMemoryCommit)
1594 1602
1595 1603
1596 1604 class BaseInMemoryChangeset(BaseInMemoryCommit):
1597 1605
1598 1606 __metaclass__ = BaseInMemoryChangesetClass
1599 1607
1600 1608 def __new__(cls, *args, **kwargs):
1601 1609 warnings.warn(
1602 1610 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1603 1611 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1604 1612
1605 1613
1606 1614 class EmptyCommit(BaseCommit):
1607 1615 """
1608 1616 An dummy empty commit. It's possible to pass hash when creating
1609 1617 an EmptyCommit
1610 1618 """
1611 1619
1612 1620 def __init__(
1613 1621 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1614 1622 message='', author='', date=None):
1615 1623 self._empty_commit_id = commit_id
1616 1624 # TODO: johbo: Solve idx parameter, default value does not make
1617 1625 # too much sense
1618 1626 self.idx = idx
1619 1627 self.message = message
1620 1628 self.author = author
1621 1629 self.date = date or datetime.datetime.fromtimestamp(0)
1622 1630 self.repository = repo
1623 1631 self.alias = alias
1624 1632
1625 1633 @LazyProperty
1626 1634 def raw_id(self):
1627 1635 """
1628 1636 Returns raw string identifying this commit, useful for web
1629 1637 representation.
1630 1638 """
1631 1639
1632 1640 return self._empty_commit_id
1633 1641
1634 1642 @LazyProperty
1635 1643 def branch(self):
1636 1644 if self.alias:
1637 1645 from rhodecode.lib.vcs.backends import get_backend
1638 1646 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1639 1647
1640 1648 @LazyProperty
1641 1649 def short_id(self):
1642 1650 return self.raw_id[:12]
1643 1651
1644 1652 @LazyProperty
1645 1653 def id(self):
1646 1654 return self.raw_id
1647 1655
1648 1656 def get_path_commit(self, path):
1649 1657 return self
1650 1658
1651 1659 def get_file_content(self, path):
1652 1660 return u''
1653 1661
1654 1662 def get_file_content_streamed(self, path):
1655 1663 yield self.get_file_content()
1656 1664
1657 1665 def get_file_size(self, path):
1658 1666 return 0
1659 1667
1660 1668
1661 1669 class EmptyChangesetClass(type):
1662 1670
1663 1671 def __instancecheck__(self, instance):
1664 1672 return isinstance(instance, EmptyCommit)
1665 1673
1666 1674
1667 1675 class EmptyChangeset(EmptyCommit):
1668 1676
1669 1677 __metaclass__ = EmptyChangesetClass
1670 1678
1671 1679 def __new__(cls, *args, **kwargs):
1672 1680 warnings.warn(
1673 1681 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1674 1682 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1675 1683
1676 1684 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1677 1685 alias=None, revision=-1, message='', author='', date=None):
1678 1686 if requested_revision is not None:
1679 1687 warnings.warn(
1680 1688 "Parameter requested_revision not supported anymore",
1681 1689 DeprecationWarning)
1682 1690 super(EmptyChangeset, self).__init__(
1683 1691 commit_id=cs, repo=repo, alias=alias, idx=revision,
1684 1692 message=message, author=author, date=date)
1685 1693
1686 1694 @property
1687 1695 def revision(self):
1688 1696 warnings.warn("Use idx instead", DeprecationWarning)
1689 1697 return self.idx
1690 1698
1691 1699 @revision.setter
1692 1700 def revision(self, value):
1693 1701 warnings.warn("Use idx instead", DeprecationWarning)
1694 1702 self.idx = value
1695 1703
1696 1704
1697 1705 class EmptyRepository(BaseRepository):
1698 1706 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1699 1707 pass
1700 1708
1701 1709 def get_diff(self, *args, **kwargs):
1702 1710 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1703 1711 return GitDiff('')
1704 1712
1705 1713
1706 1714 class CollectionGenerator(object):
1707 1715
1708 1716 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1709 1717 self.repo = repo
1710 1718 self.commit_ids = commit_ids
1711 1719 # TODO: (oliver) this isn't currently hooked up
1712 1720 self.collection_size = None
1713 1721 self.pre_load = pre_load
1714 1722 self.translate_tag = translate_tag
1715 1723
1716 1724 def __len__(self):
1717 1725 if self.collection_size is not None:
1718 1726 return self.collection_size
1719 1727 return self.commit_ids.__len__()
1720 1728
1721 1729 def __iter__(self):
1722 1730 for commit_id in self.commit_ids:
1723 1731 # TODO: johbo: Mercurial passes in commit indices or commit ids
1724 1732 yield self._commit_factory(commit_id)
1725 1733
1726 1734 def _commit_factory(self, commit_id):
1727 1735 """
1728 1736 Allows backends to override the way commits are generated.
1729 1737 """
1730 1738 return self.repo.get_commit(
1731 1739 commit_id=commit_id, pre_load=self.pre_load,
1732 1740 translate_tag=self.translate_tag)
1733 1741
1734 1742 def __getslice__(self, i, j):
1735 1743 """
1736 1744 Returns an iterator of sliced repository
1737 1745 """
1738 1746 commit_ids = self.commit_ids[i:j]
1739 1747 return self.__class__(
1740 1748 self.repo, commit_ids, pre_load=self.pre_load,
1741 1749 translate_tag=self.translate_tag)
1742 1750
1743 1751 def __repr__(self):
1744 1752 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1745 1753
1746 1754
1747 1755 class Config(object):
1748 1756 """
1749 1757 Represents the configuration for a repository.
1750 1758
1751 1759 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1752 1760 standard library. It implements only the needed subset.
1753 1761 """
1754 1762
1755 1763 def __init__(self):
1756 1764 self._values = {}
1757 1765
1758 1766 def copy(self):
1759 1767 clone = Config()
1760 1768 for section, values in self._values.items():
1761 1769 clone._values[section] = values.copy()
1762 1770 return clone
1763 1771
1764 1772 def __repr__(self):
1765 1773 return '<Config(%s sections) at %s>' % (
1766 1774 len(self._values), hex(id(self)))
1767 1775
1768 1776 def items(self, section):
1769 1777 return self._values.get(section, {}).iteritems()
1770 1778
1771 1779 def get(self, section, option):
1772 1780 return self._values.get(section, {}).get(option)
1773 1781
1774 1782 def set(self, section, option, value):
1775 1783 section_values = self._values.setdefault(section, {})
1776 1784 section_values[option] = value
1777 1785
1778 1786 def clear_section(self, section):
1779 1787 self._values[section] = {}
1780 1788
1781 1789 def serialize(self):
1782 1790 """
1783 1791 Creates a list of three tuples (section, key, value) representing
1784 1792 this config object.
1785 1793 """
1786 1794 items = []
1787 1795 for section in self._values:
1788 1796 for option, value in self._values[section].items():
1789 1797 items.append(
1790 1798 (safe_str(section), safe_str(option), safe_str(value)))
1791 1799 return items
1792 1800
1793 1801
1794 1802 class Diff(object):
1795 1803 """
1796 1804 Represents a diff result from a repository backend.
1797 1805
1798 1806 Subclasses have to provide a backend specific value for
1799 1807 :attr:`_header_re` and :attr:`_meta_re`.
1800 1808 """
1801 1809 _meta_re = None
1802 1810 _header_re = None
1803 1811
1804 1812 def __init__(self, raw_diff):
1805 1813 self.raw = raw_diff
1806 1814
1807 1815 def chunks(self):
1808 1816 """
1809 1817 split the diff in chunks of separate --git a/file b/file chunks
1810 1818 to make diffs consistent we must prepend with \n, and make sure
1811 1819 we can detect last chunk as this was also has special rule
1812 1820 """
1813 1821
1814 1822 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1815 1823 header = diff_parts[0]
1816 1824
1817 1825 if self._meta_re:
1818 1826 match = self._meta_re.match(header)
1819 1827
1820 1828 chunks = diff_parts[1:]
1821 1829 total_chunks = len(chunks)
1822 1830
1823 1831 return (
1824 1832 DiffChunk(chunk, self, cur_chunk == total_chunks)
1825 1833 for cur_chunk, chunk in enumerate(chunks, start=1))
1826 1834
1827 1835
1828 1836 class DiffChunk(object):
1829 1837
1830 1838 def __init__(self, chunk, diff, last_chunk):
1831 1839 self._diff = diff
1832 1840
1833 1841 # since we split by \ndiff --git that part is lost from original diff
1834 1842 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1835 1843 if not last_chunk:
1836 1844 chunk += '\n'
1837 1845
1838 1846 match = self._diff._header_re.match(chunk)
1839 1847 self.header = match.groupdict()
1840 1848 self.diff = chunk[match.end():]
1841 1849 self.raw = chunk
1842 1850
1843 1851
1844 1852 class BasePathPermissionChecker(object):
1845 1853
1846 1854 @staticmethod
1847 1855 def create_from_patterns(includes, excludes):
1848 1856 if includes and '*' in includes and not excludes:
1849 1857 return AllPathPermissionChecker()
1850 1858 elif excludes and '*' in excludes:
1851 1859 return NonePathPermissionChecker()
1852 1860 else:
1853 1861 return PatternPathPermissionChecker(includes, excludes)
1854 1862
1855 1863 @property
1856 1864 def has_full_access(self):
1857 1865 raise NotImplemented()
1858 1866
1859 1867 def has_access(self, path):
1860 1868 raise NotImplemented()
1861 1869
1862 1870
1863 1871 class AllPathPermissionChecker(BasePathPermissionChecker):
1864 1872
1865 1873 @property
1866 1874 def has_full_access(self):
1867 1875 return True
1868 1876
1869 1877 def has_access(self, path):
1870 1878 return True
1871 1879
1872 1880
1873 1881 class NonePathPermissionChecker(BasePathPermissionChecker):
1874 1882
1875 1883 @property
1876 1884 def has_full_access(self):
1877 1885 return False
1878 1886
1879 1887 def has_access(self, path):
1880 1888 return False
1881 1889
1882 1890
1883 1891 class PatternPathPermissionChecker(BasePathPermissionChecker):
1884 1892
1885 1893 def __init__(self, includes, excludes):
1886 1894 self.includes = includes
1887 1895 self.excludes = excludes
1888 1896 self.includes_re = [] if not includes else [
1889 1897 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1890 1898 self.excludes_re = [] if not excludes else [
1891 1899 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1892 1900
1893 1901 @property
1894 1902 def has_full_access(self):
1895 1903 return '*' in self.includes and not self.excludes
1896 1904
1897 1905 def has_access(self, path):
1898 1906 for regex in self.excludes_re:
1899 1907 if regex.match(path):
1900 1908 return False
1901 1909 for regex in self.includes_re:
1902 1910 if regex.match(path):
1903 1911 return True
1904 1912 return False
@@ -1,1890 +1,1922 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 from rhodecode import events
39 38 from rhodecode.translation import lazy_ugettext
40 39 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 40 from rhodecode.lib import audit_logger
42 41 from rhodecode.lib.compat import OrderedDict
43 42 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 43 from rhodecode.lib.markup_renderer import (
45 44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe, AttributeDict, safe_int
47 46 from rhodecode.lib.vcs.backends.base import (
48 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
48 TargetRefMissing, SourceRefMissing)
49 49 from rhodecode.lib.vcs.conf import settings as vcs_settings
50 50 from rhodecode.lib.vcs.exceptions import (
51 51 CommitDoesNotExistError, EmptyRepositoryError)
52 52 from rhodecode.model import BaseModel
53 53 from rhodecode.model.changeset_status import ChangesetStatusModel
54 54 from rhodecode.model.comment import CommentsModel
55 55 from rhodecode.model.db import (
56 56 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
57 57 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
58 58 from rhodecode.model.meta import Session
59 59 from rhodecode.model.notification import NotificationModel, \
60 60 EmailNotificationModel
61 61 from rhodecode.model.scm import ScmModel
62 62 from rhodecode.model.settings import VcsSettingsModel
63 63
64 64
65 65 log = logging.getLogger(__name__)
66 66
67 67
68 68 # Data structure to hold the response data when updating commits during a pull
69 69 # request update.
70 70 class UpdateResponse(object):
71 71
72 72 def __init__(self, executed, reason, new, old, common_ancestor_id,
73 73 commit_changes, source_changed, target_changed):
74 74
75 75 self.executed = executed
76 76 self.reason = reason
77 77 self.new = new
78 78 self.old = old
79 79 self.common_ancestor_id = common_ancestor_id
80 80 self.changes = commit_changes
81 81 self.source_changed = source_changed
82 82 self.target_changed = target_changed
83 83
84 84
85 85 class PullRequestModel(BaseModel):
86 86
87 87 cls = PullRequest
88 88
89 89 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
90 90
91 91 UPDATE_STATUS_MESSAGES = {
92 92 UpdateFailureReason.NONE: lazy_ugettext(
93 93 'Pull request update successful.'),
94 94 UpdateFailureReason.UNKNOWN: lazy_ugettext(
95 95 'Pull request update failed because of an unknown error.'),
96 96 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
97 97 'No update needed because the source and target have not changed.'),
98 98 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
99 99 'Pull request cannot be updated because the reference type is '
100 100 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
101 101 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
102 102 'This pull request cannot be updated because the target '
103 103 'reference is missing.'),
104 104 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
105 105 'This pull request cannot be updated because the source '
106 106 'reference is missing.'),
107 107 }
108 108 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
109 109 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
110 110
111 111 def __get_pull_request(self, pull_request):
112 112 return self._get_instance((
113 113 PullRequest, PullRequestVersion), pull_request)
114 114
115 115 def _check_perms(self, perms, pull_request, user, api=False):
116 116 if not api:
117 117 return h.HasRepoPermissionAny(*perms)(
118 118 user=user, repo_name=pull_request.target_repo.repo_name)
119 119 else:
120 120 return h.HasRepoPermissionAnyApi(*perms)(
121 121 user=user, repo_name=pull_request.target_repo.repo_name)
122 122
123 123 def check_user_read(self, pull_request, user, api=False):
124 124 _perms = ('repository.admin', 'repository.write', 'repository.read',)
125 125 return self._check_perms(_perms, pull_request, user, api)
126 126
127 127 def check_user_merge(self, pull_request, user, api=False):
128 128 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
129 129 return self._check_perms(_perms, pull_request, user, api)
130 130
131 131 def check_user_update(self, pull_request, user, api=False):
132 132 owner = user.user_id == pull_request.user_id
133 133 return self.check_user_merge(pull_request, user, api) or owner
134 134
135 135 def check_user_delete(self, pull_request, user):
136 136 owner = user.user_id == pull_request.user_id
137 137 _perms = ('repository.admin',)
138 138 return self._check_perms(_perms, pull_request, user) or owner
139 139
140 140 def check_user_change_status(self, pull_request, user, api=False):
141 141 reviewer = user.user_id in [x.user_id for x in
142 142 pull_request.reviewers]
143 143 return self.check_user_update(pull_request, user, api) or reviewer
144 144
145 145 def check_user_comment(self, pull_request, user):
146 146 owner = user.user_id == pull_request.user_id
147 147 return self.check_user_read(pull_request, user) or owner
148 148
149 149 def get(self, pull_request):
150 150 return self.__get_pull_request(pull_request)
151 151
152 152 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
153 153 statuses=None, opened_by=None, order_by=None,
154 154 order_dir='desc', only_created=False):
155 155 repo = None
156 156 if repo_name:
157 157 repo = self._get_repo(repo_name)
158 158
159 159 q = PullRequest.query()
160 160
161 161 if search_q:
162 162 like_expression = u'%{}%'.format(safe_unicode(search_q))
163 163 q = q.filter(or_(
164 164 cast(PullRequest.pull_request_id, String).ilike(like_expression),
165 165 PullRequest.title.ilike(like_expression),
166 166 PullRequest.description.ilike(like_expression),
167 167 ))
168 168
169 169 # source or target
170 170 if repo and source:
171 171 q = q.filter(PullRequest.source_repo == repo)
172 172 elif repo:
173 173 q = q.filter(PullRequest.target_repo == repo)
174 174
175 175 # closed,opened
176 176 if statuses:
177 177 q = q.filter(PullRequest.status.in_(statuses))
178 178
179 179 # opened by filter
180 180 if opened_by:
181 181 q = q.filter(PullRequest.user_id.in_(opened_by))
182 182
183 183 # only get those that are in "created" state
184 184 if only_created:
185 185 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
186 186
187 187 if order_by:
188 188 order_map = {
189 189 'name_raw': PullRequest.pull_request_id,
190 190 'id': PullRequest.pull_request_id,
191 191 'title': PullRequest.title,
192 192 'updated_on_raw': PullRequest.updated_on,
193 193 'target_repo': PullRequest.target_repo_id
194 194 }
195 195 if order_dir == 'asc':
196 196 q = q.order_by(order_map[order_by].asc())
197 197 else:
198 198 q = q.order_by(order_map[order_by].desc())
199 199
200 200 return q
201 201
202 202 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
203 203 opened_by=None):
204 204 """
205 205 Count the number of pull requests for a specific repository.
206 206
207 207 :param repo_name: target or source repo
208 208 :param search_q: filter by text
209 209 :param source: boolean flag to specify if repo_name refers to source
210 210 :param statuses: list of pull request statuses
211 211 :param opened_by: author user of the pull request
212 212 :returns: int number of pull requests
213 213 """
214 214 q = self._prepare_get_all_query(
215 215 repo_name, search_q=search_q, source=source, statuses=statuses,
216 216 opened_by=opened_by)
217 217
218 218 return q.count()
219 219
220 220 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
221 221 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
222 222 """
223 223 Get all pull requests for a specific repository.
224 224
225 225 :param repo_name: target or source repo
226 226 :param search_q: filter by text
227 227 :param source: boolean flag to specify if repo_name refers to source
228 228 :param statuses: list of pull request statuses
229 229 :param opened_by: author user of the pull request
230 230 :param offset: pagination offset
231 231 :param length: length of returned list
232 232 :param order_by: order of the returned list
233 233 :param order_dir: 'asc' or 'desc' ordering direction
234 234 :returns: list of pull requests
235 235 """
236 236 q = self._prepare_get_all_query(
237 237 repo_name, search_q=search_q, source=source, statuses=statuses,
238 238 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
239 239
240 240 if length:
241 241 pull_requests = q.limit(length).offset(offset).all()
242 242 else:
243 243 pull_requests = q.all()
244 244
245 245 return pull_requests
246 246
247 247 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
248 248 opened_by=None):
249 249 """
250 250 Count the number of pull requests for a specific repository that are
251 251 awaiting review.
252 252
253 253 :param repo_name: target or source repo
254 254 :param search_q: filter by text
255 255 :param source: boolean flag to specify if repo_name refers to source
256 256 :param statuses: list of pull request statuses
257 257 :param opened_by: author user of the pull request
258 258 :returns: int number of pull requests
259 259 """
260 260 pull_requests = self.get_awaiting_review(
261 261 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
262 262
263 263 return len(pull_requests)
264 264
265 265 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
266 266 opened_by=None, offset=0, length=None,
267 267 order_by=None, order_dir='desc'):
268 268 """
269 269 Get all pull requests for a specific repository that are awaiting
270 270 review.
271 271
272 272 :param repo_name: target or source repo
273 273 :param search_q: filter by text
274 274 :param source: boolean flag to specify if repo_name refers to source
275 275 :param statuses: list of pull request statuses
276 276 :param opened_by: author user of the pull request
277 277 :param offset: pagination offset
278 278 :param length: length of returned list
279 279 :param order_by: order of the returned list
280 280 :param order_dir: 'asc' or 'desc' ordering direction
281 281 :returns: list of pull requests
282 282 """
283 283 pull_requests = self.get_all(
284 284 repo_name, search_q=search_q, source=source, statuses=statuses,
285 285 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
286 286
287 287 _filtered_pull_requests = []
288 288 for pr in pull_requests:
289 289 status = pr.calculated_review_status()
290 290 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
291 291 ChangesetStatus.STATUS_UNDER_REVIEW]:
292 292 _filtered_pull_requests.append(pr)
293 293 if length:
294 294 return _filtered_pull_requests[offset:offset+length]
295 295 else:
296 296 return _filtered_pull_requests
297 297
298 298 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
299 299 opened_by=None, user_id=None):
300 300 """
301 301 Count the number of pull requests for a specific repository that are
302 302 awaiting review from a specific user.
303 303
304 304 :param repo_name: target or source repo
305 305 :param search_q: filter by text
306 306 :param source: boolean flag to specify if repo_name refers to source
307 307 :param statuses: list of pull request statuses
308 308 :param opened_by: author user of the pull request
309 309 :param user_id: reviewer user of the pull request
310 310 :returns: int number of pull requests
311 311 """
312 312 pull_requests = self.get_awaiting_my_review(
313 313 repo_name, search_q=search_q, source=source, statuses=statuses,
314 314 opened_by=opened_by, user_id=user_id)
315 315
316 316 return len(pull_requests)
317 317
318 318 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
319 319 opened_by=None, user_id=None, offset=0,
320 320 length=None, order_by=None, order_dir='desc'):
321 321 """
322 322 Get all pull requests for a specific repository that are awaiting
323 323 review from a specific user.
324 324
325 325 :param repo_name: target or source repo
326 326 :param search_q: filter by text
327 327 :param source: boolean flag to specify if repo_name refers to source
328 328 :param statuses: list of pull request statuses
329 329 :param opened_by: author user of the pull request
330 330 :param user_id: reviewer user of the pull request
331 331 :param offset: pagination offset
332 332 :param length: length of returned list
333 333 :param order_by: order of the returned list
334 334 :param order_dir: 'asc' or 'desc' ordering direction
335 335 :returns: list of pull requests
336 336 """
337 337 pull_requests = self.get_all(
338 338 repo_name, search_q=search_q, source=source, statuses=statuses,
339 339 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
340 340
341 341 _my = PullRequestModel().get_not_reviewed(user_id)
342 342 my_participation = []
343 343 for pr in pull_requests:
344 344 if pr in _my:
345 345 my_participation.append(pr)
346 346 _filtered_pull_requests = my_participation
347 347 if length:
348 348 return _filtered_pull_requests[offset:offset+length]
349 349 else:
350 350 return _filtered_pull_requests
351 351
352 352 def get_not_reviewed(self, user_id):
353 353 return [
354 354 x.pull_request for x in PullRequestReviewers.query().filter(
355 355 PullRequestReviewers.user_id == user_id).all()
356 356 ]
357 357
358 358 def _prepare_participating_query(self, user_id=None, statuses=None,
359 359 order_by=None, order_dir='desc'):
360 360 q = PullRequest.query()
361 361 if user_id:
362 362 reviewers_subquery = Session().query(
363 363 PullRequestReviewers.pull_request_id).filter(
364 364 PullRequestReviewers.user_id == user_id).subquery()
365 365 user_filter = or_(
366 366 PullRequest.user_id == user_id,
367 367 PullRequest.pull_request_id.in_(reviewers_subquery)
368 368 )
369 369 q = PullRequest.query().filter(user_filter)
370 370
371 371 # closed,opened
372 372 if statuses:
373 373 q = q.filter(PullRequest.status.in_(statuses))
374 374
375 375 if order_by:
376 376 order_map = {
377 377 'name_raw': PullRequest.pull_request_id,
378 378 'title': PullRequest.title,
379 379 'updated_on_raw': PullRequest.updated_on,
380 380 'target_repo': PullRequest.target_repo_id
381 381 }
382 382 if order_dir == 'asc':
383 383 q = q.order_by(order_map[order_by].asc())
384 384 else:
385 385 q = q.order_by(order_map[order_by].desc())
386 386
387 387 return q
388 388
389 389 def count_im_participating_in(self, user_id=None, statuses=None):
390 390 q = self._prepare_participating_query(user_id, statuses=statuses)
391 391 return q.count()
392 392
393 393 def get_im_participating_in(
394 394 self, user_id=None, statuses=None, offset=0,
395 395 length=None, order_by=None, order_dir='desc'):
396 396 """
397 397 Get all Pull requests that i'm participating in, or i have opened
398 398 """
399 399
400 400 q = self._prepare_participating_query(
401 401 user_id, statuses=statuses, order_by=order_by,
402 402 order_dir=order_dir)
403 403
404 404 if length:
405 405 pull_requests = q.limit(length).offset(offset).all()
406 406 else:
407 407 pull_requests = q.all()
408 408
409 409 return pull_requests
410 410
411 411 def get_versions(self, pull_request):
412 412 """
413 413 returns version of pull request sorted by ID descending
414 414 """
415 415 return PullRequestVersion.query()\
416 416 .filter(PullRequestVersion.pull_request == pull_request)\
417 417 .order_by(PullRequestVersion.pull_request_version_id.asc())\
418 418 .all()
419 419
420 420 def get_pr_version(self, pull_request_id, version=None):
421 421 at_version = None
422 422
423 423 if version and version == 'latest':
424 424 pull_request_ver = PullRequest.get(pull_request_id)
425 425 pull_request_obj = pull_request_ver
426 426 _org_pull_request_obj = pull_request_obj
427 427 at_version = 'latest'
428 428 elif version:
429 429 pull_request_ver = PullRequestVersion.get_or_404(version)
430 430 pull_request_obj = pull_request_ver
431 431 _org_pull_request_obj = pull_request_ver.pull_request
432 432 at_version = pull_request_ver.pull_request_version_id
433 433 else:
434 434 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
435 435 pull_request_id)
436 436
437 437 pull_request_display_obj = PullRequest.get_pr_display_object(
438 438 pull_request_obj, _org_pull_request_obj)
439 439
440 440 return _org_pull_request_obj, pull_request_obj, \
441 441 pull_request_display_obj, at_version
442 442
443 443 def create(self, created_by, source_repo, source_ref, target_repo,
444 444 target_ref, revisions, reviewers, title, description=None,
445 445 description_renderer=None,
446 446 reviewer_data=None, translator=None, auth_user=None):
447 447 translator = translator or get_current_request().translate
448 448
449 449 created_by_user = self._get_user(created_by)
450 450 auth_user = auth_user or created_by_user.AuthUser()
451 451 source_repo = self._get_repo(source_repo)
452 452 target_repo = self._get_repo(target_repo)
453 453
454 454 pull_request = PullRequest()
455 455 pull_request.source_repo = source_repo
456 456 pull_request.source_ref = source_ref
457 457 pull_request.target_repo = target_repo
458 458 pull_request.target_ref = target_ref
459 459 pull_request.revisions = revisions
460 460 pull_request.title = title
461 461 pull_request.description = description
462 462 pull_request.description_renderer = description_renderer
463 463 pull_request.author = created_by_user
464 464 pull_request.reviewer_data = reviewer_data
465 465 pull_request.pull_request_state = pull_request.STATE_CREATING
466 466 Session().add(pull_request)
467 467 Session().flush()
468 468
469 469 reviewer_ids = set()
470 470 # members / reviewers
471 471 for reviewer_object in reviewers:
472 472 user_id, reasons, mandatory, rules = reviewer_object
473 473 user = self._get_user(user_id)
474 474
475 475 # skip duplicates
476 476 if user.user_id in reviewer_ids:
477 477 continue
478 478
479 479 reviewer_ids.add(user.user_id)
480 480
481 481 reviewer = PullRequestReviewers()
482 482 reviewer.user = user
483 483 reviewer.pull_request = pull_request
484 484 reviewer.reasons = reasons
485 485 reviewer.mandatory = mandatory
486 486
487 487 # NOTE(marcink): pick only first rule for now
488 488 rule_id = list(rules)[0] if rules else None
489 489 rule = RepoReviewRule.get(rule_id) if rule_id else None
490 490 if rule:
491 491 review_group = rule.user_group_vote_rule(user_id)
492 492 # we check if this particular reviewer is member of a voting group
493 493 if review_group:
494 494 # NOTE(marcink):
495 495 # can be that user is member of more but we pick the first same,
496 496 # same as default reviewers algo
497 497 review_group = review_group[0]
498 498
499 499 rule_data = {
500 500 'rule_name':
501 501 rule.review_rule_name,
502 502 'rule_user_group_entry_id':
503 503 review_group.repo_review_rule_users_group_id,
504 504 'rule_user_group_name':
505 505 review_group.users_group.users_group_name,
506 506 'rule_user_group_members':
507 507 [x.user.username for x in review_group.users_group.members],
508 508 'rule_user_group_members_id':
509 509 [x.user.user_id for x in review_group.users_group.members],
510 510 }
511 511 # e.g {'vote_rule': -1, 'mandatory': True}
512 512 rule_data.update(review_group.rule_data())
513 513
514 514 reviewer.rule_data = rule_data
515 515
516 516 Session().add(reviewer)
517 517 Session().flush()
518 518
519 519 # Set approval status to "Under Review" for all commits which are
520 520 # part of this pull request.
521 521 ChangesetStatusModel().set_status(
522 522 repo=target_repo,
523 523 status=ChangesetStatus.STATUS_UNDER_REVIEW,
524 524 user=created_by_user,
525 525 pull_request=pull_request
526 526 )
527 527 # we commit early at this point. This has to do with a fact
528 528 # that before queries do some row-locking. And because of that
529 529 # we need to commit and finish transaction before below validate call
530 530 # that for large repos could be long resulting in long row locks
531 531 Session().commit()
532 532
533 533 # prepare workspace, and run initial merge simulation. Set state during that
534 534 # operation
535 535 pull_request = PullRequest.get(pull_request.pull_request_id)
536 536
537 537 # set as merging, for merge simulation, and if finished to created so we mark
538 538 # simulation is working fine
539 539 with pull_request.set_state(PullRequest.STATE_MERGING,
540 540 final_state=PullRequest.STATE_CREATED) as state_obj:
541 541 MergeCheck.validate(
542 542 pull_request, auth_user=auth_user, translator=translator)
543 543
544 544 self.notify_reviewers(pull_request, reviewer_ids)
545 545 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
546 546
547 547 creation_data = pull_request.get_api_data(with_merge_state=False)
548 548 self._log_audit_action(
549 549 'repo.pull_request.create', {'data': creation_data},
550 550 auth_user, pull_request)
551 551
552 552 return pull_request
553 553
554 554 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
555 555 pull_request = self.__get_pull_request(pull_request)
556 556 target_scm = pull_request.target_repo.scm_instance()
557 557 if action == 'create':
558 558 trigger_hook = hooks_utils.trigger_create_pull_request_hook
559 559 elif action == 'merge':
560 560 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
561 561 elif action == 'close':
562 562 trigger_hook = hooks_utils.trigger_close_pull_request_hook
563 563 elif action == 'review_status_change':
564 564 trigger_hook = hooks_utils.trigger_review_pull_request_hook
565 565 elif action == 'update':
566 566 trigger_hook = hooks_utils.trigger_update_pull_request_hook
567 567 elif action == 'comment':
568 568 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
569 569 else:
570 570 return
571 571
572 572 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
573 573 pull_request, action, trigger_hook)
574 574 trigger_hook(
575 575 username=user.username,
576 576 repo_name=pull_request.target_repo.repo_name,
577 577 repo_type=target_scm.alias,
578 578 pull_request=pull_request,
579 579 data=data)
580 580
581 581 def _get_commit_ids(self, pull_request):
582 582 """
583 583 Return the commit ids of the merged pull request.
584 584
585 585 This method is not dealing correctly yet with the lack of autoupdates
586 586 nor with the implicit target updates.
587 587 For example: if a commit in the source repo is already in the target it
588 588 will be reported anyways.
589 589 """
590 590 merge_rev = pull_request.merge_rev
591 591 if merge_rev is None:
592 592 raise ValueError('This pull request was not merged yet')
593 593
594 594 commit_ids = list(pull_request.revisions)
595 595 if merge_rev not in commit_ids:
596 596 commit_ids.append(merge_rev)
597 597
598 598 return commit_ids
599 599
600 600 def merge_repo(self, pull_request, user, extras):
601 601 log.debug("Merging pull request %s", pull_request.pull_request_id)
602 602 extras['user_agent'] = 'internal-merge'
603 603 merge_state = self._merge_pull_request(pull_request, user, extras)
604 604 if merge_state.executed:
605 605 log.debug("Merge was successful, updating the pull request comments.")
606 606 self._comment_and_close_pr(pull_request, user, merge_state)
607 607
608 608 self._log_audit_action(
609 609 'repo.pull_request.merge',
610 610 {'merge_state': merge_state.__dict__},
611 611 user, pull_request)
612 612
613 613 else:
614 614 log.warn("Merge failed, not updating the pull request.")
615 615 return merge_state
616 616
617 617 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
618 618 target_vcs = pull_request.target_repo.scm_instance()
619 619 source_vcs = pull_request.source_repo.scm_instance()
620 620
621 621 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
622 622 pr_id=pull_request.pull_request_id,
623 623 pr_title=pull_request.title,
624 624 source_repo=source_vcs.name,
625 625 source_ref_name=pull_request.source_ref_parts.name,
626 626 target_repo=target_vcs.name,
627 627 target_ref_name=pull_request.target_ref_parts.name,
628 628 )
629 629
630 630 workspace_id = self._workspace_id(pull_request)
631 631 repo_id = pull_request.target_repo.repo_id
632 632 use_rebase = self._use_rebase_for_merging(pull_request)
633 633 close_branch = self._close_branch_before_merging(pull_request)
634 634 user_name = self._user_name_for_merging(pull_request, user)
635 635
636 636 target_ref = self._refresh_reference(
637 637 pull_request.target_ref_parts, target_vcs)
638 638
639 639 callback_daemon, extras = prepare_callback_daemon(
640 640 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
641 641 host=vcs_settings.HOOKS_HOST,
642 642 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
643 643
644 644 with callback_daemon:
645 645 # TODO: johbo: Implement a clean way to run a config_override
646 646 # for a single call.
647 647 target_vcs.config.set(
648 648 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
649 649
650 650 merge_state = target_vcs.merge(
651 651 repo_id, workspace_id, target_ref, source_vcs,
652 652 pull_request.source_ref_parts,
653 653 user_name=user_name, user_email=user.email,
654 654 message=message, use_rebase=use_rebase,
655 655 close_branch=close_branch)
656 656 return merge_state
657 657
658 658 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
659 659 pull_request.merge_rev = merge_state.merge_ref.commit_id
660 660 pull_request.updated_on = datetime.datetime.now()
661 661 close_msg = close_msg or 'Pull request merged and closed'
662 662
663 663 CommentsModel().create(
664 664 text=safe_unicode(close_msg),
665 665 repo=pull_request.target_repo.repo_id,
666 666 user=user.user_id,
667 667 pull_request=pull_request.pull_request_id,
668 668 f_path=None,
669 669 line_no=None,
670 670 closing_pr=True
671 671 )
672 672
673 673 Session().add(pull_request)
674 674 Session().flush()
675 675 # TODO: paris: replace invalidation with less radical solution
676 676 ScmModel().mark_for_invalidation(
677 677 pull_request.target_repo.repo_name)
678 678 self.trigger_pull_request_hook(pull_request, user, 'merge')
679 679
680 680 def has_valid_update_type(self, pull_request):
681 681 source_ref_type = pull_request.source_ref_parts.type
682 682 return source_ref_type in self.REF_TYPES
683 683
684 def get_flow_commits(self, pull_request):
685
686 # source repo
687 source_ref_name = pull_request.source_ref_parts.name
688 source_ref_type = pull_request.source_ref_parts.type
689 source_ref_id = pull_request.source_ref_parts.commit_id
690 source_repo = pull_request.source_repo.scm_instance()
691
692 try:
693 if source_ref_type in self.REF_TYPES:
694 source_commit = source_repo.get_commit(source_ref_name)
695 else:
696 source_commit = source_repo.get_commit(source_ref_id)
697 except CommitDoesNotExistError:
698 raise SourceRefMissing()
699
700 # target repo
701 target_ref_name = pull_request.target_ref_parts.name
702 target_ref_type = pull_request.target_ref_parts.type
703 target_ref_id = pull_request.target_ref_parts.commit_id
704 target_repo = pull_request.target_repo.scm_instance()
705
706 try:
707 if target_ref_type in self.REF_TYPES:
708 target_commit = target_repo.get_commit(target_ref_name)
709 else:
710 target_commit = target_repo.get_commit(target_ref_id)
711 except CommitDoesNotExistError:
712 raise TargetRefMissing()
713
714 return source_commit, target_commit
715
684 716 def update_commits(self, pull_request, updating_user):
685 717 """
686 718 Get the updated list of commits for the pull request
687 719 and return the new pull request version and the list
688 720 of commits processed by this update action
689 721
690 722 updating_user is the user_object who triggered the update
691 723 """
692 724 pull_request = self.__get_pull_request(pull_request)
693 725 source_ref_type = pull_request.source_ref_parts.type
694 726 source_ref_name = pull_request.source_ref_parts.name
695 727 source_ref_id = pull_request.source_ref_parts.commit_id
696 728
697 729 target_ref_type = pull_request.target_ref_parts.type
698 730 target_ref_name = pull_request.target_ref_parts.name
699 731 target_ref_id = pull_request.target_ref_parts.commit_id
700 732
701 733 if not self.has_valid_update_type(pull_request):
702 734 log.debug("Skipping update of pull request %s due to ref type: %s",
703 735 pull_request, source_ref_type)
704 736 return UpdateResponse(
705 737 executed=False,
706 738 reason=UpdateFailureReason.WRONG_REF_TYPE,
707 739 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
708 740 source_changed=False, target_changed=False)
709 741
710 # source repo
711 source_repo = pull_request.source_repo.scm_instance()
712
713 742 try:
714 source_commit = source_repo.get_commit(commit_id=source_ref_name)
715 except CommitDoesNotExistError:
743 source_commit, target_commit = self.get_flow_commits(pull_request)
744 except SourceRefMissing:
716 745 return UpdateResponse(
717 746 executed=False,
718 747 reason=UpdateFailureReason.MISSING_SOURCE_REF,
719 748 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
720 749 source_changed=False, target_changed=False)
721
722 source_changed = source_ref_id != source_commit.raw_id
723
724 # target repo
725 target_repo = pull_request.target_repo.scm_instance()
726
727 try:
728 target_commit = target_repo.get_commit(commit_id=target_ref_name)
729 except CommitDoesNotExistError:
750 except TargetRefMissing:
730 751 return UpdateResponse(
731 752 executed=False,
732 753 reason=UpdateFailureReason.MISSING_TARGET_REF,
733 754 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
734 755 source_changed=False, target_changed=False)
756
757 source_changed = source_ref_id != source_commit.raw_id
735 758 target_changed = target_ref_id != target_commit.raw_id
736 759
737 760 if not (source_changed or target_changed):
738 761 log.debug("Nothing changed in pull request %s", pull_request)
739 762 return UpdateResponse(
740 763 executed=False,
741 764 reason=UpdateFailureReason.NO_CHANGE,
742 765 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
743 766 source_changed=target_changed, target_changed=source_changed)
744 767
745 768 change_in_found = 'target repo' if target_changed else 'source repo'
746 769 log.debug('Updating pull request because of change in %s detected',
747 770 change_in_found)
748 771
749 772 # Finally there is a need for an update, in case of source change
750 773 # we create a new version, else just an update
751 774 if source_changed:
752 775 pull_request_version = self._create_version_from_snapshot(pull_request)
753 776 self._link_comments_to_version(pull_request_version)
754 777 else:
755 778 try:
756 779 ver = pull_request.versions[-1]
757 780 except IndexError:
758 781 ver = None
759 782
760 783 pull_request.pull_request_version_id = \
761 784 ver.pull_request_version_id if ver else None
762 785 pull_request_version = pull_request
763 786
764 try:
765 if target_ref_type in self.REF_TYPES:
766 target_commit = target_repo.get_commit(target_ref_name)
767 else:
768 target_commit = target_repo.get_commit(target_ref_id)
769 except CommitDoesNotExistError:
770 return UpdateResponse(
771 executed=False,
772 reason=UpdateFailureReason.MISSING_TARGET_REF,
773 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
774 source_changed=source_changed, target_changed=target_changed)
787 source_repo = pull_request.source_repo.scm_instance()
788 target_repo = pull_request.target_repo.scm_instance()
775 789
776 790 # re-compute commit ids
777 791 old_commit_ids = pull_request.revisions
778 792 pre_load = ["author", "date", "message", "branch"]
779 793 commit_ranges = target_repo.compare(
780 794 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
781 795 pre_load=pre_load)
782 796
783 797 ancestor_commit_id = source_repo.get_common_ancestor(
784 798 source_commit.raw_id, target_commit.raw_id, target_repo)
785 799
786 800 pull_request.source_ref = '%s:%s:%s' % (
787 801 source_ref_type, source_ref_name, source_commit.raw_id)
788 802 pull_request.target_ref = '%s:%s:%s' % (
789 803 target_ref_type, target_ref_name, ancestor_commit_id)
790 804
791 805 pull_request.revisions = [
792 806 commit.raw_id for commit in reversed(commit_ranges)]
793 807 pull_request.updated_on = datetime.datetime.now()
794 808 Session().add(pull_request)
795 809 new_commit_ids = pull_request.revisions
796 810
797 811 old_diff_data, new_diff_data = self._generate_update_diffs(
798 812 pull_request, pull_request_version)
799 813
800 814 # calculate commit and file changes
801 815 commit_changes = self._calculate_commit_id_changes(
802 816 old_commit_ids, new_commit_ids)
803 817 file_changes = self._calculate_file_changes(
804 818 old_diff_data, new_diff_data)
805 819
806 820 # set comments as outdated if DIFFS changed
807 821 CommentsModel().outdate_comments(
808 822 pull_request, old_diff_data=old_diff_data,
809 823 new_diff_data=new_diff_data)
810 824
811 825 valid_commit_changes = (commit_changes.added or commit_changes.removed)
812 826 file_node_changes = (
813 827 file_changes.added or file_changes.modified or file_changes.removed)
814 828 pr_has_changes = valid_commit_changes or file_node_changes
815 829
816 830 # Add an automatic comment to the pull request, in case
817 831 # anything has changed
818 832 if pr_has_changes:
819 833 update_comment = CommentsModel().create(
820 834 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
821 835 repo=pull_request.target_repo,
822 836 user=pull_request.author,
823 837 pull_request=pull_request,
824 838 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
825 839
826 840 # Update status to "Under Review" for added commits
827 841 for commit_id in commit_changes.added:
828 842 ChangesetStatusModel().set_status(
829 843 repo=pull_request.source_repo,
830 844 status=ChangesetStatus.STATUS_UNDER_REVIEW,
831 845 comment=update_comment,
832 846 user=pull_request.author,
833 847 pull_request=pull_request,
834 848 revision=commit_id)
835 849
836 850 # send update email to users
837 851 try:
838 852 self.notify_users(pull_request=pull_request, updating_user=updating_user,
839 853 ancestor_commit_id=ancestor_commit_id,
840 854 commit_changes=commit_changes,
841 855 file_changes=file_changes)
842 856 except Exception:
843 857 log.exception('Failed to send email notification to users')
844 858
845 859 log.debug(
846 860 'Updated pull request %s, added_ids: %s, common_ids: %s, '
847 861 'removed_ids: %s', pull_request.pull_request_id,
848 862 commit_changes.added, commit_changes.common, commit_changes.removed)
849 863 log.debug(
850 864 'Updated pull request with the following file changes: %s',
851 865 file_changes)
852 866
853 867 log.info(
854 868 "Updated pull request %s from commit %s to commit %s, "
855 869 "stored new version %s of this pull request.",
856 870 pull_request.pull_request_id, source_ref_id,
857 871 pull_request.source_ref_parts.commit_id,
858 872 pull_request_version.pull_request_version_id)
859 873 Session().commit()
860 874 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
861 875
862 876 return UpdateResponse(
863 877 executed=True, reason=UpdateFailureReason.NONE,
864 878 old=pull_request, new=pull_request_version,
865 879 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
866 880 source_changed=source_changed, target_changed=target_changed)
867 881
868 882 def _create_version_from_snapshot(self, pull_request):
869 883 version = PullRequestVersion()
870 884 version.title = pull_request.title
871 885 version.description = pull_request.description
872 886 version.status = pull_request.status
873 887 version.pull_request_state = pull_request.pull_request_state
874 888 version.created_on = datetime.datetime.now()
875 889 version.updated_on = pull_request.updated_on
876 890 version.user_id = pull_request.user_id
877 891 version.source_repo = pull_request.source_repo
878 892 version.source_ref = pull_request.source_ref
879 893 version.target_repo = pull_request.target_repo
880 894 version.target_ref = pull_request.target_ref
881 895
882 896 version._last_merge_source_rev = pull_request._last_merge_source_rev
883 897 version._last_merge_target_rev = pull_request._last_merge_target_rev
884 898 version.last_merge_status = pull_request.last_merge_status
885 899 version.last_merge_metadata = pull_request.last_merge_metadata
886 900 version.shadow_merge_ref = pull_request.shadow_merge_ref
887 901 version.merge_rev = pull_request.merge_rev
888 902 version.reviewer_data = pull_request.reviewer_data
889 903
890 904 version.revisions = pull_request.revisions
891 905 version.pull_request = pull_request
892 906 Session().add(version)
893 907 Session().flush()
894 908
895 909 return version
896 910
897 911 def _generate_update_diffs(self, pull_request, pull_request_version):
898 912
899 913 diff_context = (
900 914 self.DIFF_CONTEXT +
901 915 CommentsModel.needed_extra_diff_context())
902 916 hide_whitespace_changes = False
903 917 source_repo = pull_request_version.source_repo
904 918 source_ref_id = pull_request_version.source_ref_parts.commit_id
905 919 target_ref_id = pull_request_version.target_ref_parts.commit_id
906 920 old_diff = self._get_diff_from_pr_or_version(
907 921 source_repo, source_ref_id, target_ref_id,
908 922 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
909 923
910 924 source_repo = pull_request.source_repo
911 925 source_ref_id = pull_request.source_ref_parts.commit_id
912 926 target_ref_id = pull_request.target_ref_parts.commit_id
913 927
914 928 new_diff = self._get_diff_from_pr_or_version(
915 929 source_repo, source_ref_id, target_ref_id,
916 930 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
917 931
918 932 old_diff_data = diffs.DiffProcessor(old_diff)
919 933 old_diff_data.prepare()
920 934 new_diff_data = diffs.DiffProcessor(new_diff)
921 935 new_diff_data.prepare()
922 936
923 937 return old_diff_data, new_diff_data
924 938
925 939 def _link_comments_to_version(self, pull_request_version):
926 940 """
927 941 Link all unlinked comments of this pull request to the given version.
928 942
929 943 :param pull_request_version: The `PullRequestVersion` to which
930 944 the comments shall be linked.
931 945
932 946 """
933 947 pull_request = pull_request_version.pull_request
934 948 comments = ChangesetComment.query()\
935 949 .filter(
936 950 # TODO: johbo: Should we query for the repo at all here?
937 951 # Pending decision on how comments of PRs are to be related
938 952 # to either the source repo, the target repo or no repo at all.
939 953 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
940 954 ChangesetComment.pull_request == pull_request,
941 955 ChangesetComment.pull_request_version == None)\
942 956 .order_by(ChangesetComment.comment_id.asc())
943 957
944 958 # TODO: johbo: Find out why this breaks if it is done in a bulk
945 959 # operation.
946 960 for comment in comments:
947 961 comment.pull_request_version_id = (
948 962 pull_request_version.pull_request_version_id)
949 963 Session().add(comment)
950 964
951 965 def _calculate_commit_id_changes(self, old_ids, new_ids):
952 966 added = [x for x in new_ids if x not in old_ids]
953 967 common = [x for x in new_ids if x in old_ids]
954 968 removed = [x for x in old_ids if x not in new_ids]
955 969 total = new_ids
956 970 return ChangeTuple(added, common, removed, total)
957 971
958 972 def _calculate_file_changes(self, old_diff_data, new_diff_data):
959 973
960 974 old_files = OrderedDict()
961 975 for diff_data in old_diff_data.parsed_diff:
962 976 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
963 977
964 978 added_files = []
965 979 modified_files = []
966 980 removed_files = []
967 981 for diff_data in new_diff_data.parsed_diff:
968 982 new_filename = diff_data['filename']
969 983 new_hash = md5_safe(diff_data['raw_diff'])
970 984
971 985 old_hash = old_files.get(new_filename)
972 986 if not old_hash:
973 987 # file is not present in old diff, we have to figure out from parsed diff
974 988 # operation ADD/REMOVE
975 989 operations_dict = diff_data['stats']['ops']
976 990 if diffs.DEL_FILENODE in operations_dict:
977 991 removed_files.append(new_filename)
978 992 else:
979 993 added_files.append(new_filename)
980 994 else:
981 995 if new_hash != old_hash:
982 996 modified_files.append(new_filename)
983 997 # now remove a file from old, since we have seen it already
984 998 del old_files[new_filename]
985 999
986 1000 # removed files is when there are present in old, but not in NEW,
987 1001 # since we remove old files that are present in new diff, left-overs
988 1002 # if any should be the removed files
989 1003 removed_files.extend(old_files.keys())
990 1004
991 1005 return FileChangeTuple(added_files, modified_files, removed_files)
992 1006
993 1007 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
994 1008 """
995 1009 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
996 1010 so it's always looking the same disregarding on which default
997 1011 renderer system is using.
998 1012
999 1013 :param ancestor_commit_id: ancestor raw_id
1000 1014 :param changes: changes named tuple
1001 1015 :param file_changes: file changes named tuple
1002 1016
1003 1017 """
1004 1018 new_status = ChangesetStatus.get_status_lbl(
1005 1019 ChangesetStatus.STATUS_UNDER_REVIEW)
1006 1020
1007 1021 changed_files = (
1008 1022 file_changes.added + file_changes.modified + file_changes.removed)
1009 1023
1010 1024 params = {
1011 1025 'under_review_label': new_status,
1012 1026 'added_commits': changes.added,
1013 1027 'removed_commits': changes.removed,
1014 1028 'changed_files': changed_files,
1015 1029 'added_files': file_changes.added,
1016 1030 'modified_files': file_changes.modified,
1017 1031 'removed_files': file_changes.removed,
1018 1032 'ancestor_commit_id': ancestor_commit_id
1019 1033 }
1020 1034 renderer = RstTemplateRenderer()
1021 1035 return renderer.render('pull_request_update.mako', **params)
1022 1036
1023 1037 def edit(self, pull_request, title, description, description_renderer, user):
1024 1038 pull_request = self.__get_pull_request(pull_request)
1025 1039 old_data = pull_request.get_api_data(with_merge_state=False)
1026 1040 if pull_request.is_closed():
1027 1041 raise ValueError('This pull request is closed')
1028 1042 if title:
1029 1043 pull_request.title = title
1030 1044 pull_request.description = description
1031 1045 pull_request.updated_on = datetime.datetime.now()
1032 1046 pull_request.description_renderer = description_renderer
1033 1047 Session().add(pull_request)
1034 1048 self._log_audit_action(
1035 1049 'repo.pull_request.edit', {'old_data': old_data},
1036 1050 user, pull_request)
1037 1051
1038 1052 def update_reviewers(self, pull_request, reviewer_data, user):
1039 1053 """
1040 1054 Update the reviewers in the pull request
1041 1055
1042 1056 :param pull_request: the pr to update
1043 1057 :param reviewer_data: list of tuples
1044 1058 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1045 1059 """
1046 1060 pull_request = self.__get_pull_request(pull_request)
1047 1061 if pull_request.is_closed():
1048 1062 raise ValueError('This pull request is closed')
1049 1063
1050 1064 reviewers = {}
1051 1065 for user_id, reasons, mandatory, rules in reviewer_data:
1052 1066 if isinstance(user_id, (int, compat.string_types)):
1053 1067 user_id = self._get_user(user_id).user_id
1054 1068 reviewers[user_id] = {
1055 1069 'reasons': reasons, 'mandatory': mandatory}
1056 1070
1057 1071 reviewers_ids = set(reviewers.keys())
1058 1072 current_reviewers = PullRequestReviewers.query()\
1059 1073 .filter(PullRequestReviewers.pull_request ==
1060 1074 pull_request).all()
1061 1075 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1062 1076
1063 1077 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1064 1078 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1065 1079
1066 1080 log.debug("Adding %s reviewers", ids_to_add)
1067 1081 log.debug("Removing %s reviewers", ids_to_remove)
1068 1082 changed = False
1069 1083 added_audit_reviewers = []
1070 1084 removed_audit_reviewers = []
1071 1085
1072 1086 for uid in ids_to_add:
1073 1087 changed = True
1074 1088 _usr = self._get_user(uid)
1075 1089 reviewer = PullRequestReviewers()
1076 1090 reviewer.user = _usr
1077 1091 reviewer.pull_request = pull_request
1078 1092 reviewer.reasons = reviewers[uid]['reasons']
1079 1093 # NOTE(marcink): mandatory shouldn't be changed now
1080 1094 # reviewer.mandatory = reviewers[uid]['reasons']
1081 1095 Session().add(reviewer)
1082 1096 added_audit_reviewers.append(reviewer.get_dict())
1083 1097
1084 1098 for uid in ids_to_remove:
1085 1099 changed = True
1086 1100 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1087 1101 # that prevents and fixes cases that we added the same reviewer twice.
1088 1102 # this CAN happen due to the lack of DB checks
1089 1103 reviewers = PullRequestReviewers.query()\
1090 1104 .filter(PullRequestReviewers.user_id == uid,
1091 1105 PullRequestReviewers.pull_request == pull_request)\
1092 1106 .all()
1093 1107
1094 1108 for obj in reviewers:
1095 1109 added_audit_reviewers.append(obj.get_dict())
1096 1110 Session().delete(obj)
1097 1111
1098 1112 if changed:
1099 1113 Session().expire_all()
1100 1114 pull_request.updated_on = datetime.datetime.now()
1101 1115 Session().add(pull_request)
1102 1116
1103 1117 # finally store audit logs
1104 1118 for user_data in added_audit_reviewers:
1105 1119 self._log_audit_action(
1106 1120 'repo.pull_request.reviewer.add', {'data': user_data},
1107 1121 user, pull_request)
1108 1122 for user_data in removed_audit_reviewers:
1109 1123 self._log_audit_action(
1110 1124 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1111 1125 user, pull_request)
1112 1126
1113 1127 self.notify_reviewers(pull_request, ids_to_add)
1114 1128 return ids_to_add, ids_to_remove
1115 1129
1116 1130 def get_url(self, pull_request, request=None, permalink=False):
1117 1131 if not request:
1118 1132 request = get_current_request()
1119 1133
1120 1134 if permalink:
1121 1135 return request.route_url(
1122 1136 'pull_requests_global',
1123 1137 pull_request_id=pull_request.pull_request_id,)
1124 1138 else:
1125 1139 return request.route_url('pullrequest_show',
1126 1140 repo_name=safe_str(pull_request.target_repo.repo_name),
1127 1141 pull_request_id=pull_request.pull_request_id,)
1128 1142
1129 1143 def get_shadow_clone_url(self, pull_request, request=None):
1130 1144 """
1131 1145 Returns qualified url pointing to the shadow repository. If this pull
1132 1146 request is closed there is no shadow repository and ``None`` will be
1133 1147 returned.
1134 1148 """
1135 1149 if pull_request.is_closed():
1136 1150 return None
1137 1151 else:
1138 1152 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1139 1153 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1140 1154
1141 1155 def notify_reviewers(self, pull_request, reviewers_ids):
1142 1156 # notification to reviewers
1143 1157 if not reviewers_ids:
1144 1158 return
1145 1159
1146 1160 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1147 1161
1148 1162 pull_request_obj = pull_request
1149 1163 # get the current participants of this pull request
1150 1164 recipients = reviewers_ids
1151 1165 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1152 1166
1153 1167 pr_source_repo = pull_request_obj.source_repo
1154 1168 pr_target_repo = pull_request_obj.target_repo
1155 1169
1156 1170 pr_url = h.route_url('pullrequest_show',
1157 1171 repo_name=pr_target_repo.repo_name,
1158 1172 pull_request_id=pull_request_obj.pull_request_id,)
1159 1173
1160 1174 # set some variables for email notification
1161 1175 pr_target_repo_url = h.route_url(
1162 1176 'repo_summary', repo_name=pr_target_repo.repo_name)
1163 1177
1164 1178 pr_source_repo_url = h.route_url(
1165 1179 'repo_summary', repo_name=pr_source_repo.repo_name)
1166 1180
1167 1181 # pull request specifics
1168 1182 pull_request_commits = [
1169 1183 (x.raw_id, x.message)
1170 1184 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1171 1185
1172 1186 kwargs = {
1173 1187 'user': pull_request.author,
1174 1188 'pull_request': pull_request_obj,
1175 1189 'pull_request_commits': pull_request_commits,
1176 1190
1177 1191 'pull_request_target_repo': pr_target_repo,
1178 1192 'pull_request_target_repo_url': pr_target_repo_url,
1179 1193
1180 1194 'pull_request_source_repo': pr_source_repo,
1181 1195 'pull_request_source_repo_url': pr_source_repo_url,
1182 1196
1183 1197 'pull_request_url': pr_url,
1184 1198 }
1185 1199
1186 1200 # pre-generate the subject for notification itself
1187 1201 (subject,
1188 1202 _h, _e, # we don't care about those
1189 1203 body_plaintext) = EmailNotificationModel().render_email(
1190 1204 notification_type, **kwargs)
1191 1205
1192 1206 # create notification objects, and emails
1193 1207 NotificationModel().create(
1194 1208 created_by=pull_request.author,
1195 1209 notification_subject=subject,
1196 1210 notification_body=body_plaintext,
1197 1211 notification_type=notification_type,
1198 1212 recipients=recipients,
1199 1213 email_kwargs=kwargs,
1200 1214 )
1201 1215
1202 1216 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1203 1217 commit_changes, file_changes):
1204 1218
1205 1219 updating_user_id = updating_user.user_id
1206 1220 reviewers = set([x.user.user_id for x in pull_request.reviewers])
1207 1221 # NOTE(marcink): send notification to all other users except to
1208 1222 # person who updated the PR
1209 1223 recipients = reviewers.difference(set([updating_user_id]))
1210 1224
1211 1225 log.debug('Notify following recipients about pull-request update %s', recipients)
1212 1226
1213 1227 pull_request_obj = pull_request
1214 1228
1215 1229 # send email about the update
1216 1230 changed_files = (
1217 1231 file_changes.added + file_changes.modified + file_changes.removed)
1218 1232
1219 1233 pr_source_repo = pull_request_obj.source_repo
1220 1234 pr_target_repo = pull_request_obj.target_repo
1221 1235
1222 1236 pr_url = h.route_url('pullrequest_show',
1223 1237 repo_name=pr_target_repo.repo_name,
1224 1238 pull_request_id=pull_request_obj.pull_request_id,)
1225 1239
1226 1240 # set some variables for email notification
1227 1241 pr_target_repo_url = h.route_url(
1228 1242 'repo_summary', repo_name=pr_target_repo.repo_name)
1229 1243
1230 1244 pr_source_repo_url = h.route_url(
1231 1245 'repo_summary', repo_name=pr_source_repo.repo_name)
1232 1246
1233 1247 email_kwargs = {
1234 1248 'date': datetime.datetime.now(),
1235 1249 'updating_user': updating_user,
1236 1250
1237 1251 'pull_request': pull_request_obj,
1238 1252
1239 1253 'pull_request_target_repo': pr_target_repo,
1240 1254 'pull_request_target_repo_url': pr_target_repo_url,
1241 1255
1242 1256 'pull_request_source_repo': pr_source_repo,
1243 1257 'pull_request_source_repo_url': pr_source_repo_url,
1244 1258
1245 1259 'pull_request_url': pr_url,
1246 1260
1247 1261 'ancestor_commit_id': ancestor_commit_id,
1248 1262 'added_commits': commit_changes.added,
1249 1263 'removed_commits': commit_changes.removed,
1250 1264 'changed_files': changed_files,
1251 1265 'added_files': file_changes.added,
1252 1266 'modified_files': file_changes.modified,
1253 1267 'removed_files': file_changes.removed,
1254 1268 }
1255 1269
1256 1270 (subject,
1257 1271 _h, _e, # we don't care about those
1258 1272 body_plaintext) = EmailNotificationModel().render_email(
1259 1273 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1260 1274
1261 1275 # create notification objects, and emails
1262 1276 NotificationModel().create(
1263 1277 created_by=updating_user,
1264 1278 notification_subject=subject,
1265 1279 notification_body=body_plaintext,
1266 1280 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1267 1281 recipients=recipients,
1268 1282 email_kwargs=email_kwargs,
1269 1283 )
1270 1284
1271 1285 def delete(self, pull_request, user):
1272 1286 pull_request = self.__get_pull_request(pull_request)
1273 1287 old_data = pull_request.get_api_data(with_merge_state=False)
1274 1288 self._cleanup_merge_workspace(pull_request)
1275 1289 self._log_audit_action(
1276 1290 'repo.pull_request.delete', {'old_data': old_data},
1277 1291 user, pull_request)
1278 1292 Session().delete(pull_request)
1279 1293
1280 1294 def close_pull_request(self, pull_request, user):
1281 1295 pull_request = self.__get_pull_request(pull_request)
1282 1296 self._cleanup_merge_workspace(pull_request)
1283 1297 pull_request.status = PullRequest.STATUS_CLOSED
1284 1298 pull_request.updated_on = datetime.datetime.now()
1285 1299 Session().add(pull_request)
1286 1300 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1287 1301
1288 1302 pr_data = pull_request.get_api_data(with_merge_state=False)
1289 1303 self._log_audit_action(
1290 1304 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1291 1305
1292 1306 def close_pull_request_with_comment(
1293 1307 self, pull_request, user, repo, message=None, auth_user=None):
1294 1308
1295 1309 pull_request_review_status = pull_request.calculated_review_status()
1296 1310
1297 1311 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1298 1312 # approved only if we have voting consent
1299 1313 status = ChangesetStatus.STATUS_APPROVED
1300 1314 else:
1301 1315 status = ChangesetStatus.STATUS_REJECTED
1302 1316 status_lbl = ChangesetStatus.get_status_lbl(status)
1303 1317
1304 1318 default_message = (
1305 1319 'Closing with status change {transition_icon} {status}.'
1306 1320 ).format(transition_icon='>', status=status_lbl)
1307 1321 text = message or default_message
1308 1322
1309 1323 # create a comment, and link it to new status
1310 1324 comment = CommentsModel().create(
1311 1325 text=text,
1312 1326 repo=repo.repo_id,
1313 1327 user=user.user_id,
1314 1328 pull_request=pull_request.pull_request_id,
1315 1329 status_change=status_lbl,
1316 1330 status_change_type=status,
1317 1331 closing_pr=True,
1318 1332 auth_user=auth_user,
1319 1333 )
1320 1334
1321 1335 # calculate old status before we change it
1322 1336 old_calculated_status = pull_request.calculated_review_status()
1323 1337 ChangesetStatusModel().set_status(
1324 1338 repo.repo_id,
1325 1339 status,
1326 1340 user.user_id,
1327 1341 comment=comment,
1328 1342 pull_request=pull_request.pull_request_id
1329 1343 )
1330 1344
1331 1345 Session().flush()
1332 1346
1333 1347 self.trigger_pull_request_hook(pull_request, user, 'comment',
1334 1348 data={'comment': comment})
1335 1349
1336 1350 # we now calculate the status of pull request again, and based on that
1337 1351 # calculation trigger status change. This might happen in cases
1338 1352 # that non-reviewer admin closes a pr, which means his vote doesn't
1339 1353 # change the status, while if he's a reviewer this might change it.
1340 1354 calculated_status = pull_request.calculated_review_status()
1341 1355 if old_calculated_status != calculated_status:
1342 1356 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1343 1357 data={'status': calculated_status})
1344 1358
1345 1359 # finally close the PR
1346 1360 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1347 1361
1348 1362 return comment, status
1349 1363
1350 1364 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1351 1365 _ = translator or get_current_request().translate
1352 1366
1353 1367 if not self._is_merge_enabled(pull_request):
1354 1368 return None, False, _('Server-side pull request merging is disabled.')
1355 1369
1356 1370 if pull_request.is_closed():
1357 1371 return None, False, _('This pull request is closed.')
1358 1372
1359 1373 merge_possible, msg = self._check_repo_requirements(
1360 1374 target=pull_request.target_repo, source=pull_request.source_repo,
1361 1375 translator=_)
1362 1376 if not merge_possible:
1363 1377 return None, merge_possible, msg
1364 1378
1365 1379 try:
1366 1380 merge_response = self._try_merge(
1367 1381 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1368 1382 log.debug("Merge response: %s", merge_response)
1369 1383 return merge_response, merge_response.possible, merge_response.merge_status_message
1370 1384 except NotImplementedError:
1371 1385 return None, False, _('Pull request merging is not supported.')
1372 1386
1373 1387 def _check_repo_requirements(self, target, source, translator):
1374 1388 """
1375 1389 Check if `target` and `source` have compatible requirements.
1376 1390
1377 1391 Currently this is just checking for largefiles.
1378 1392 """
1379 1393 _ = translator
1380 1394 target_has_largefiles = self._has_largefiles(target)
1381 1395 source_has_largefiles = self._has_largefiles(source)
1382 1396 merge_possible = True
1383 1397 message = u''
1384 1398
1385 1399 if target_has_largefiles != source_has_largefiles:
1386 1400 merge_possible = False
1387 1401 if source_has_largefiles:
1388 1402 message = _(
1389 1403 'Target repository large files support is disabled.')
1390 1404 else:
1391 1405 message = _(
1392 1406 'Source repository large files support is disabled.')
1393 1407
1394 1408 return merge_possible, message
1395 1409
1396 1410 def _has_largefiles(self, repo):
1397 1411 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1398 1412 'extensions', 'largefiles')
1399 1413 return largefiles_ui and largefiles_ui[0].active
1400 1414
1401 1415 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1402 1416 """
1403 1417 Try to merge the pull request and return the merge status.
1404 1418 """
1405 1419 log.debug(
1406 1420 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1407 1421 pull_request.pull_request_id, force_shadow_repo_refresh)
1408 1422 target_vcs = pull_request.target_repo.scm_instance()
1409 1423 # Refresh the target reference.
1410 1424 try:
1411 1425 target_ref = self._refresh_reference(
1412 1426 pull_request.target_ref_parts, target_vcs)
1413 1427 except CommitDoesNotExistError:
1414 1428 merge_state = MergeResponse(
1415 1429 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1416 1430 metadata={'target_ref': pull_request.target_ref_parts})
1417 1431 return merge_state
1418 1432
1419 1433 target_locked = pull_request.target_repo.locked
1420 1434 if target_locked and target_locked[0]:
1421 1435 locked_by = 'user:{}'.format(target_locked[0])
1422 1436 log.debug("The target repository is locked by %s.", locked_by)
1423 1437 merge_state = MergeResponse(
1424 1438 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1425 1439 metadata={'locked_by': locked_by})
1426 1440 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1427 1441 pull_request, target_ref):
1428 1442 log.debug("Refreshing the merge status of the repository.")
1429 1443 merge_state = self._refresh_merge_state(
1430 1444 pull_request, target_vcs, target_ref)
1431 1445 else:
1432 1446 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1433 1447 metadata = {
1434 1448 'unresolved_files': '',
1435 1449 'target_ref': pull_request.target_ref_parts,
1436 1450 'source_ref': pull_request.source_ref_parts,
1437 1451 }
1438 1452 if pull_request.last_merge_metadata:
1439 1453 metadata.update(pull_request.last_merge_metadata)
1440 1454
1441 1455 if not possible and target_ref.type == 'branch':
1442 1456 # NOTE(marcink): case for mercurial multiple heads on branch
1443 1457 heads = target_vcs._heads(target_ref.name)
1444 1458 if len(heads) != 1:
1445 1459 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1446 1460 metadata.update({
1447 1461 'heads': heads
1448 1462 })
1449 1463
1450 1464 merge_state = MergeResponse(
1451 1465 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1452 1466
1453 1467 return merge_state
1454 1468
1455 1469 def _refresh_reference(self, reference, vcs_repository):
1456 1470 if reference.type in self.UPDATABLE_REF_TYPES:
1457 1471 name_or_id = reference.name
1458 1472 else:
1459 1473 name_or_id = reference.commit_id
1460 1474
1461 1475 refreshed_commit = vcs_repository.get_commit(name_or_id)
1462 1476 refreshed_reference = Reference(
1463 1477 reference.type, reference.name, refreshed_commit.raw_id)
1464 1478 return refreshed_reference
1465 1479
1466 1480 def _needs_merge_state_refresh(self, pull_request, target_reference):
1467 1481 return not(
1468 1482 pull_request.revisions and
1469 1483 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1470 1484 target_reference.commit_id == pull_request._last_merge_target_rev)
1471 1485
1472 1486 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1473 1487 workspace_id = self._workspace_id(pull_request)
1474 1488 source_vcs = pull_request.source_repo.scm_instance()
1475 1489 repo_id = pull_request.target_repo.repo_id
1476 1490 use_rebase = self._use_rebase_for_merging(pull_request)
1477 1491 close_branch = self._close_branch_before_merging(pull_request)
1478 1492 merge_state = target_vcs.merge(
1479 1493 repo_id, workspace_id,
1480 1494 target_reference, source_vcs, pull_request.source_ref_parts,
1481 1495 dry_run=True, use_rebase=use_rebase,
1482 1496 close_branch=close_branch)
1483 1497
1484 1498 # Do not store the response if there was an unknown error.
1485 1499 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1486 1500 pull_request._last_merge_source_rev = \
1487 1501 pull_request.source_ref_parts.commit_id
1488 1502 pull_request._last_merge_target_rev = target_reference.commit_id
1489 1503 pull_request.last_merge_status = merge_state.failure_reason
1490 1504 pull_request.last_merge_metadata = merge_state.metadata
1491 1505
1492 1506 pull_request.shadow_merge_ref = merge_state.merge_ref
1493 1507 Session().add(pull_request)
1494 1508 Session().commit()
1495 1509
1496 1510 return merge_state
1497 1511
1498 1512 def _workspace_id(self, pull_request):
1499 1513 workspace_id = 'pr-%s' % pull_request.pull_request_id
1500 1514 return workspace_id
1501 1515
1502 1516 def generate_repo_data(self, repo, commit_id=None, branch=None,
1503 1517 bookmark=None, translator=None):
1504 1518 from rhodecode.model.repo import RepoModel
1505 1519
1506 1520 all_refs, selected_ref = \
1507 1521 self._get_repo_pullrequest_sources(
1508 1522 repo.scm_instance(), commit_id=commit_id,
1509 1523 branch=branch, bookmark=bookmark, translator=translator)
1510 1524
1511 1525 refs_select2 = []
1512 1526 for element in all_refs:
1513 1527 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1514 1528 refs_select2.append({'text': element[1], 'children': children})
1515 1529
1516 1530 return {
1517 1531 'user': {
1518 1532 'user_id': repo.user.user_id,
1519 1533 'username': repo.user.username,
1520 1534 'firstname': repo.user.first_name,
1521 1535 'lastname': repo.user.last_name,
1522 1536 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1523 1537 },
1524 1538 'name': repo.repo_name,
1525 1539 'link': RepoModel().get_url(repo),
1526 1540 'description': h.chop_at_smart(repo.description_safe, '\n'),
1527 1541 'refs': {
1528 1542 'all_refs': all_refs,
1529 1543 'selected_ref': selected_ref,
1530 1544 'select2_refs': refs_select2
1531 1545 }
1532 1546 }
1533 1547
1534 1548 def generate_pullrequest_title(self, source, source_ref, target):
1535 1549 return u'{source}#{at_ref} to {target}'.format(
1536 1550 source=source,
1537 1551 at_ref=source_ref,
1538 1552 target=target,
1539 1553 )
1540 1554
1541 1555 def _cleanup_merge_workspace(self, pull_request):
1542 1556 # Merging related cleanup
1543 1557 repo_id = pull_request.target_repo.repo_id
1544 1558 target_scm = pull_request.target_repo.scm_instance()
1545 1559 workspace_id = self._workspace_id(pull_request)
1546 1560
1547 1561 try:
1548 1562 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1549 1563 except NotImplementedError:
1550 1564 pass
1551 1565
1552 1566 def _get_repo_pullrequest_sources(
1553 1567 self, repo, commit_id=None, branch=None, bookmark=None,
1554 1568 translator=None):
1555 1569 """
1556 1570 Return a structure with repo's interesting commits, suitable for
1557 1571 the selectors in pullrequest controller
1558 1572
1559 1573 :param commit_id: a commit that must be in the list somehow
1560 1574 and selected by default
1561 1575 :param branch: a branch that must be in the list and selected
1562 1576 by default - even if closed
1563 1577 :param bookmark: a bookmark that must be in the list and selected
1564 1578 """
1565 1579 _ = translator or get_current_request().translate
1566 1580
1567 1581 commit_id = safe_str(commit_id) if commit_id else None
1568 1582 branch = safe_unicode(branch) if branch else None
1569 1583 bookmark = safe_unicode(bookmark) if bookmark else None
1570 1584
1571 1585 selected = None
1572 1586
1573 1587 # order matters: first source that has commit_id in it will be selected
1574 1588 sources = []
1575 1589 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1576 1590 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1577 1591
1578 1592 if commit_id:
1579 1593 ref_commit = (h.short_id(commit_id), commit_id)
1580 1594 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1581 1595
1582 1596 sources.append(
1583 1597 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1584 1598 )
1585 1599
1586 1600 groups = []
1587 1601
1588 1602 for group_key, ref_list, group_name, match in sources:
1589 1603 group_refs = []
1590 1604 for ref_name, ref_id in ref_list:
1591 1605 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1592 1606 group_refs.append((ref_key, ref_name))
1593 1607
1594 1608 if not selected:
1595 1609 if set([commit_id, match]) & set([ref_id, ref_name]):
1596 1610 selected = ref_key
1597 1611
1598 1612 if group_refs:
1599 1613 groups.append((group_refs, group_name))
1600 1614
1601 1615 if not selected:
1602 1616 ref = commit_id or branch or bookmark
1603 1617 if ref:
1604 1618 raise CommitDoesNotExistError(
1605 1619 u'No commit refs could be found matching: {}'.format(ref))
1606 1620 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1607 1621 selected = u'branch:{}:{}'.format(
1608 1622 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1609 1623 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1610 1624 )
1611 1625 elif repo.commit_ids:
1612 1626 # make the user select in this case
1613 1627 selected = None
1614 1628 else:
1615 1629 raise EmptyRepositoryError()
1616 1630 return groups, selected
1617 1631
1618 1632 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1619 1633 hide_whitespace_changes, diff_context):
1620 1634
1621 1635 return self._get_diff_from_pr_or_version(
1622 1636 source_repo, source_ref_id, target_ref_id,
1623 1637 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1624 1638
1625 1639 def _get_diff_from_pr_or_version(
1626 1640 self, source_repo, source_ref_id, target_ref_id,
1627 1641 hide_whitespace_changes, diff_context):
1628 1642
1629 1643 target_commit = source_repo.get_commit(
1630 1644 commit_id=safe_str(target_ref_id))
1631 1645 source_commit = source_repo.get_commit(
1632 1646 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1633 1647 if isinstance(source_repo, Repository):
1634 1648 vcs_repo = source_repo.scm_instance()
1635 1649 else:
1636 1650 vcs_repo = source_repo
1637 1651
1638 1652 # TODO: johbo: In the context of an update, we cannot reach
1639 1653 # the old commit anymore with our normal mechanisms. It needs
1640 1654 # some sort of special support in the vcs layer to avoid this
1641 1655 # workaround.
1642 1656 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1643 1657 vcs_repo.alias == 'git'):
1644 1658 source_commit.raw_id = safe_str(source_ref_id)
1645 1659
1646 1660 log.debug('calculating diff between '
1647 1661 'source_ref:%s and target_ref:%s for repo `%s`',
1648 1662 target_ref_id, source_ref_id,
1649 1663 safe_unicode(vcs_repo.path))
1650 1664
1651 1665 vcs_diff = vcs_repo.get_diff(
1652 1666 commit1=target_commit, commit2=source_commit,
1653 1667 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1654 1668 return vcs_diff
1655 1669
1656 1670 def _is_merge_enabled(self, pull_request):
1657 1671 return self._get_general_setting(
1658 1672 pull_request, 'rhodecode_pr_merge_enabled')
1659 1673
1660 1674 def _use_rebase_for_merging(self, pull_request):
1661 1675 repo_type = pull_request.target_repo.repo_type
1662 1676 if repo_type == 'hg':
1663 1677 return self._get_general_setting(
1664 1678 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1665 1679 elif repo_type == 'git':
1666 1680 return self._get_general_setting(
1667 1681 pull_request, 'rhodecode_git_use_rebase_for_merging')
1668 1682
1669 1683 return False
1670 1684
1671 1685 def _user_name_for_merging(self, pull_request, user):
1672 1686 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1673 1687 if env_user_name_attr and hasattr(user, env_user_name_attr):
1674 1688 user_name_attr = env_user_name_attr
1675 1689 else:
1676 1690 user_name_attr = 'short_contact'
1677 1691
1678 1692 user_name = getattr(user, user_name_attr)
1679 1693 return user_name
1680 1694
1681 1695 def _close_branch_before_merging(self, pull_request):
1682 1696 repo_type = pull_request.target_repo.repo_type
1683 1697 if repo_type == 'hg':
1684 1698 return self._get_general_setting(
1685 1699 pull_request, 'rhodecode_hg_close_branch_before_merging')
1686 1700 elif repo_type == 'git':
1687 1701 return self._get_general_setting(
1688 1702 pull_request, 'rhodecode_git_close_branch_before_merging')
1689 1703
1690 1704 return False
1691 1705
1692 1706 def _get_general_setting(self, pull_request, settings_key, default=False):
1693 1707 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1694 1708 settings = settings_model.get_general_settings()
1695 1709 return settings.get(settings_key, default)
1696 1710
1697 1711 def _log_audit_action(self, action, action_data, user, pull_request):
1698 1712 audit_logger.store(
1699 1713 action=action,
1700 1714 action_data=action_data,
1701 1715 user=user,
1702 1716 repo=pull_request.target_repo)
1703 1717
1704 1718 def get_reviewer_functions(self):
1705 1719 """
1706 1720 Fetches functions for validation and fetching default reviewers.
1707 1721 If available we use the EE package, else we fallback to CE
1708 1722 package functions
1709 1723 """
1710 1724 try:
1711 1725 from rc_reviewers.utils import get_default_reviewers_data
1712 1726 from rc_reviewers.utils import validate_default_reviewers
1713 1727 except ImportError:
1714 1728 from rhodecode.apps.repository.utils import get_default_reviewers_data
1715 1729 from rhodecode.apps.repository.utils import validate_default_reviewers
1716 1730
1717 1731 return get_default_reviewers_data, validate_default_reviewers
1718 1732
1719 1733
1720 1734 class MergeCheck(object):
1721 1735 """
1722 1736 Perform Merge Checks and returns a check object which stores information
1723 1737 about merge errors, and merge conditions
1724 1738 """
1725 1739 TODO_CHECK = 'todo'
1726 1740 PERM_CHECK = 'perm'
1727 1741 REVIEW_CHECK = 'review'
1728 1742 MERGE_CHECK = 'merge'
1729 1743 WIP_CHECK = 'wip'
1730 1744
1731 1745 def __init__(self):
1732 1746 self.review_status = None
1733 1747 self.merge_possible = None
1734 1748 self.merge_msg = ''
1735 1749 self.merge_response = None
1736 1750 self.failed = None
1737 1751 self.errors = []
1738 1752 self.error_details = OrderedDict()
1753 self.source_commit = AttributeDict()
1754 self.target_commit = AttributeDict()
1739 1755
1740 1756 def __repr__(self):
1741 1757 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
1742 1758 self.merge_possible, self.failed, self.errors)
1743 1759
1744 1760 def push_error(self, error_type, message, error_key, details):
1745 1761 self.failed = True
1746 1762 self.errors.append([error_type, message])
1747 1763 self.error_details[error_key] = dict(
1748 1764 details=details,
1749 1765 error_type=error_type,
1750 1766 message=message
1751 1767 )
1752 1768
1753 1769 @classmethod
1754 1770 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1755 1771 force_shadow_repo_refresh=False):
1756 1772 _ = translator
1757 1773 merge_check = cls()
1758 1774
1759 1775 # title has WIP:
1760 1776 if pull_request.work_in_progress:
1761 1777 log.debug("MergeCheck: cannot merge, title has wip: marker.")
1762 1778
1763 1779 msg = _('WIP marker in title prevents from accidental merge.')
1764 1780 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
1765 1781 if fail_early:
1766 1782 return merge_check
1767 1783
1768 1784 # permissions to merge
1769 user_allowed_to_merge = PullRequestModel().check_user_merge(
1770 pull_request, auth_user)
1785 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
1771 1786 if not user_allowed_to_merge:
1772 1787 log.debug("MergeCheck: cannot merge, approval is pending.")
1773 1788
1774 1789 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1775 1790 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1776 1791 if fail_early:
1777 1792 return merge_check
1778 1793
1779 1794 # permission to merge into the target branch
1780 1795 target_commit_id = pull_request.target_ref_parts.commit_id
1781 1796 if pull_request.target_ref_parts.type == 'branch':
1782 1797 branch_name = pull_request.target_ref_parts.name
1783 1798 else:
1784 1799 # for mercurial we can always figure out the branch from the commit
1785 1800 # in case of bookmark
1786 1801 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1787 1802 branch_name = target_commit.branch
1788 1803
1789 1804 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1790 1805 pull_request.target_repo.repo_name, branch_name)
1791 1806 if branch_perm and branch_perm == 'branch.none':
1792 1807 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1793 1808 branch_name, rule)
1794 1809 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1795 1810 if fail_early:
1796 1811 return merge_check
1797 1812
1798 1813 # review status, must be always present
1799 1814 review_status = pull_request.calculated_review_status()
1800 1815 merge_check.review_status = review_status
1801 1816
1802 1817 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1803 1818 if not status_approved:
1804 1819 log.debug("MergeCheck: cannot merge, approval is pending.")
1805 1820
1806 1821 msg = _('Pull request reviewer approval is pending.')
1807 1822
1808 1823 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1809 1824
1810 1825 if fail_early:
1811 1826 return merge_check
1812 1827
1813 1828 # left over TODOs
1814 1829 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1815 1830 if todos:
1816 1831 log.debug("MergeCheck: cannot merge, {} "
1817 1832 "unresolved TODOs left.".format(len(todos)))
1818 1833
1819 1834 if len(todos) == 1:
1820 1835 msg = _('Cannot merge, {} TODO still not resolved.').format(
1821 1836 len(todos))
1822 1837 else:
1823 1838 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1824 1839 len(todos))
1825 1840
1826 1841 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1827 1842
1828 1843 if fail_early:
1829 1844 return merge_check
1830 1845
1831 1846 # merge possible, here is the filesystem simulation + shadow repo
1832 1847 merge_response, merge_status, msg = PullRequestModel().merge_status(
1833 1848 pull_request, translator=translator,
1834 1849 force_shadow_repo_refresh=force_shadow_repo_refresh)
1835 1850
1836 1851 merge_check.merge_possible = merge_status
1837 1852 merge_check.merge_msg = msg
1838 1853 merge_check.merge_response = merge_response
1839 1854
1855 source_ref_id = pull_request.source_ref_parts.commit_id
1856 target_ref_id = pull_request.target_ref_parts.commit_id
1857
1858 try:
1859 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
1860 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
1861 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
1862 merge_check.source_commit.current_raw_id = source_commit.raw_id
1863 merge_check.source_commit.previous_raw_id = source_ref_id
1864
1865 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
1866 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
1867 merge_check.target_commit.current_raw_id = target_commit.raw_id
1868 merge_check.target_commit.previous_raw_id = target_ref_id
1869 except (SourceRefMissing, TargetRefMissing):
1870 pass
1871
1840 1872 if not merge_status:
1841 1873 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1842 1874 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1843 1875
1844 1876 if fail_early:
1845 1877 return merge_check
1846 1878
1847 1879 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1848 1880 return merge_check
1849 1881
1850 1882 @classmethod
1851 1883 def get_merge_conditions(cls, pull_request, translator):
1852 1884 _ = translator
1853 1885 merge_details = {}
1854 1886
1855 1887 model = PullRequestModel()
1856 1888 use_rebase = model._use_rebase_for_merging(pull_request)
1857 1889
1858 1890 if use_rebase:
1859 1891 merge_details['merge_strategy'] = dict(
1860 1892 details={},
1861 1893 message=_('Merge strategy: rebase')
1862 1894 )
1863 1895 else:
1864 1896 merge_details['merge_strategy'] = dict(
1865 1897 details={},
1866 1898 message=_('Merge strategy: explicit merge commit')
1867 1899 )
1868 1900
1869 1901 close_branch = model._close_branch_before_merging(pull_request)
1870 1902 if close_branch:
1871 1903 repo_type = pull_request.target_repo.repo_type
1872 1904 close_msg = ''
1873 1905 if repo_type == 'hg':
1874 1906 close_msg = _('Source branch will be closed after merge.')
1875 1907 elif repo_type == 'git':
1876 1908 close_msg = _('Source branch will be deleted after merge.')
1877 1909
1878 1910 merge_details['close_branch'] = dict(
1879 1911 details={},
1880 1912 message=close_msg
1881 1913 )
1882 1914
1883 1915 return merge_details
1884 1916
1885 1917
1886 1918 ChangeTuple = collections.namedtuple(
1887 1919 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1888 1920
1889 1921 FileChangeTuple = collections.namedtuple(
1890 1922 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,912 +1,922 b''
1 1 <%inherit file="/base/base.mako"/>
2 2 <%namespace name="base" file="/base/base.mako"/>
3 3 <%namespace name="dt" file="/data_table/_dt_elements.mako"/>
4 4
5 5 <%def name="title()">
6 6 ${_('{} Pull Request !{}').format(c.repo_name, c.pull_request.pull_request_id)}
7 7 %if c.rhodecode_name:
8 8 &middot; ${h.branding(c.rhodecode_name)}
9 9 %endif
10 10 </%def>
11 11
12 12 <%def name="breadcrumbs_links()">
13 13
14 14 </%def>
15 15
16 16 <%def name="menu_bar_nav()">
17 17 ${self.menu_items(active='repositories')}
18 18 </%def>
19 19
20 20 <%def name="menu_bar_subnav()">
21 21 ${self.repo_menu(active='showpullrequest')}
22 22 </%def>
23 23
24 24 <%def name="main()">
25 25
26 26 <script type="text/javascript">
27 27 // TODO: marcink switch this to pyroutes
28 28 AJAX_COMMENT_DELETE_URL = "${h.route_path('pullrequest_comment_delete',repo_name=c.repo_name,pull_request_id=c.pull_request.pull_request_id,comment_id='__COMMENT_ID__')}";
29 29 templateContext.pull_request_data.pull_request_id = ${c.pull_request.pull_request_id};
30 30 </script>
31 31
32 32 <div class="box">
33 33
34 34 <div class="box pr-summary">
35 35
36 36 <div class="summary-details block-left">
37 37 <div id="pr-title">
38 38 % if c.pull_request.is_closed():
39 39 <span class="pr-title-closed-tag tag">${_('Closed')}</span>
40 40 % endif
41 41 <input class="pr-title-input large disabled" disabled="disabled" name="pullrequest_title" type="text" value="${c.pull_request.title}">
42 42 </div>
43 43 <div id="pr-title-edit" class="input" style="display: none;">
44 44 <input class="pr-title-input large" id="pr-title-input" name="pullrequest_title" type="text" value="${c.pull_request.title}">
45 45 </div>
46 46
47 47 <% summary = lambda n:{False:'summary-short'}.get(n) %>
48 48 <div class="pr-details-title">
49 49 <div class="pull-left">
50 50 <a href="${h.route_path('pull_requests_global', pull_request_id=c.pull_request.pull_request_id)}">${_('Pull request !{}').format(c.pull_request.pull_request_id)}</a>
51 51 ${_('Created on')}
52 52 <span class="tooltip" title="${_('Last updated on')} ${h.format_date(c.pull_request.updated_on)}">${h.format_date(c.pull_request.created_on)},</span>
53 53 <span class="pr-details-title-author-pref">${_('by')}</span>
54 54 </div>
55 55
56 56 <div class="pull-left">
57 57 ${self.gravatar_with_user(c.pull_request.author.email, 16, tooltip=True)}
58 58 </div>
59 59
60 60 %if c.allowed_to_update:
61 61 <div class="pull-right">
62 62 <div id="edit_pull_request" class="action_button pr-save" style="display: none;">${_('Update title & description')}</div>
63 63 <div id="delete_pullrequest" class="action_button pr-save ${('' if c.allowed_to_delete else 'disabled' )}" style="display: none;">
64 64 % if c.allowed_to_delete:
65 65 ${h.secure_form(h.route_path('pullrequest_delete', repo_name=c.pull_request.target_repo.repo_name, pull_request_id=c.pull_request.pull_request_id), request=request)}
66 66 <input class="btn btn-link btn-danger no-margin" id="remove_${c.pull_request.pull_request_id}" name="remove_${c.pull_request.pull_request_id}"
67 67 onclick="submitConfirm(event, this, _gettext('Confirm to delete this pull request'), _gettext('Delete'), '${'!{}'.format(c.pull_request.pull_request_id)}')"
68 68 type="submit" value="${_('Delete pull request')}">
69 69 ${h.end_form()}
70 70 % else:
71 71 <span class="tooltip" title="${_('Not allowed to delete this pull request')}">${_('Delete pull request')}</span>
72 72 % endif
73 73 </div>
74 74 <div id="open_edit_pullrequest" class="action_button">${_('Edit')}</div>
75 75 <div id="close_edit_pullrequest" class="action_button" style="display: none;">${_('Cancel')}</div>
76 76 </div>
77 77
78 78 %endif
79 79 </div>
80 80
81 81 <div id="pr-desc" class="input" title="${_('Rendered using {} renderer').format(c.renderer)}">
82 82 ${h.render(c.pull_request.description, renderer=c.renderer, repo_name=c.repo_name)}
83 83 </div>
84 84
85 85 <div id="pr-desc-edit" class="input textarea" style="display: none;">
86 86 <input id="pr-renderer-input" type="hidden" name="description_renderer" value="${c.visual.default_renderer}">
87 87 ${dt.markup_form('pr-description-input', form_text=c.pull_request.description)}
88 88 </div>
89 89
90 90 <div id="summary" class="fields pr-details-content">
91 91
92 92 ## review
93 93 <div class="field">
94 94 <div class="label-pr-detail">
95 95 <label>${_('Review status')}:</label>
96 96 </div>
97 97 <div class="input">
98 98 %if c.pull_request_review_status:
99 99 <div class="tag status-tag-${c.pull_request_review_status}">
100 100 <i class="icon-circle review-status-${c.pull_request_review_status}"></i>
101 101 <span class="changeset-status-lbl">
102 102 %if c.pull_request.is_closed():
103 103 ${_('Closed')},
104 104 %endif
105 105
106 106 ${h.commit_status_lbl(c.pull_request_review_status)}
107 107
108 108 </span>
109 109 </div>
110 110 - ${_ungettext('calculated based on {} reviewer vote', 'calculated based on {} reviewers votes', len(c.pull_request_reviewers)).format(len(c.pull_request_reviewers))}
111 111 %endif
112 112 </div>
113 113 </div>
114 114
115 115 ## source
116 116 <div class="field">
117 117 <div class="label-pr-detail">
118 118 <label>${_('Commit flow')}:</label>
119 119 </div>
120 120 <div class="input">
121 121 <div class="pr-commit-flow">
122 122 ## Source
123 123 %if c.pull_request.source_ref_parts.type == 'branch':
124 124 <a href="${h.route_path('repo_commits', repo_name=c.pull_request.source_repo.repo_name, _query=dict(branch=c.pull_request.source_ref_parts.name))}"><code class="pr-source-info">${c.pull_request.source_ref_parts.type}:${c.pull_request.source_ref_parts.name}</code></a>
125 125 %else:
126 126 <code class="pr-source-info">${'{}:{}'.format(c.pull_request.source_ref_parts.type, c.pull_request.source_ref_parts.name)}</code>
127 127 %endif
128 128 ${_('of')} <a href="${h.route_path('repo_summary', repo_name=c.pull_request.source_repo.repo_name)}">${c.pull_request.source_repo.repo_name}</a>
129 129 &rarr;
130 130 ## Target
131 131 %if c.pull_request.target_ref_parts.type == 'branch':
132 132 <a href="${h.route_path('repo_commits', repo_name=c.pull_request.target_repo.repo_name, _query=dict(branch=c.pull_request.target_ref_parts.name))}"><code class="pr-target-info">${c.pull_request.target_ref_parts.type}:${c.pull_request.target_ref_parts.name}</code></a>
133 133 %else:
134 134 <code class="pr-target-info">${'{}:{}'.format(c.pull_request.target_ref_parts.type, c.pull_request.target_ref_parts.name)}</code>
135 135 %endif
136 136
137 137 ${_('of')} <a href="${h.route_path('repo_summary', repo_name=c.pull_request.target_repo.repo_name)}">${c.pull_request.target_repo.repo_name}</a>
138 138
139 139 <a class="source-details-action" href="#expand-source-details" onclick="return versionController.toggleElement(this, '.source-details')" data-toggle-on='<i class="icon-angle-down">more details</i>' data-toggle-off='<i class="icon-angle-up">less details</i>'>
140 140 <i class="icon-angle-down">more details</i>
141 141 </a>
142 142
143 143 </div>
144 144
145 145 <div class="source-details" style="display: none">
146 146
147 147 <ul>
148 148
149 149 ## common ancestor
150 150 <li>
151 151 ${_('Common ancestor')}:
152 152 % if c.ancestor_commit:
153 153 <a href="${h.route_path('repo_commit', repo_name=c.target_repo.repo_name, commit_id=c.ancestor_commit.raw_id)}">${h.show_id(c.ancestor_commit)}</a>
154 154 % else:
155 155 ${_('not available')}
156 156 % endif
157 157 </li>
158 158
159 159 ## pull url
160 160 <li>
161 161 %if h.is_hg(c.pull_request.source_repo):
162 162 <% clone_url = 'hg pull -r {} {}'.format(h.short_id(c.source_ref), c.pull_request.source_repo.clone_url()) %>
163 163 %elif h.is_git(c.pull_request.source_repo):
164 164 <% clone_url = 'git pull {} {}'.format(c.pull_request.source_repo.clone_url(), c.pull_request.source_ref_parts.name) %>
165 165 %endif
166 166
167 167 <span>${_('Pull changes from source')}</span>: <input type="text" class="input-monospace pr-pullinfo" value="${clone_url}" readonly="readonly">
168 168 <i class="tooltip icon-clipboard clipboard-action pull-right pr-pullinfo-copy" data-clipboard-text="${clone_url}" title="${_('Copy the pull url')}"></i>
169 169 </li>
170 170
171 171 ## Shadow repo
172 172 <li>
173 173 % if not c.pull_request.is_closed() and c.pull_request.shadow_merge_ref:
174 174 %if h.is_hg(c.pull_request.target_repo):
175 175 <% clone_url = 'hg clone --update {} {} pull-request-{}'.format(c.pull_request.shadow_merge_ref.name, c.shadow_clone_url, c.pull_request.pull_request_id) %>
176 176 %elif h.is_git(c.pull_request.target_repo):
177 177 <% clone_url = 'git clone --branch {} {} pull-request-{}'.format(c.pull_request.shadow_merge_ref.name, c.shadow_clone_url, c.pull_request.pull_request_id) %>
178 178 %endif
179 179
180 180 <span class="tooltip" title="${_('Clone repository in its merged state using shadow repository')}">${_('Clone from shadow repository')}</span>: <input type="text" class="input-monospace pr-mergeinfo" value="${clone_url}" readonly="readonly">
181 181 <i class="tooltip icon-clipboard clipboard-action pull-right pr-mergeinfo-copy" data-clipboard-text="${clone_url}" title="${_('Copy the clone url')}"></i>
182 182
183 183 % else:
184 184 <div class="">
185 185 ${_('Shadow repository data not available')}.
186 186 </div>
187 187 % endif
188 188 </li>
189 189
190 190 </ul>
191 191
192 192 </div>
193 193
194 194 </div>
195 195
196 196 </div>
197 197
198 198 ## versions
199 199 <div class="field">
200 200 <div class="label-pr-detail">
201 201 <label>${_('Versions')}:</label>
202 202 </div>
203 203
204 204 <% outdated_comm_count_ver = len(c.inline_versions[None]['outdated']) %>
205 205 <% general_outdated_comm_count_ver = len(c.comment_versions[None]['outdated']) %>
206 206
207 207 <div class="pr-versions">
208 208 % if c.show_version_changes:
209 209 <% outdated_comm_count_ver = len(c.inline_versions[c.at_version_num]['outdated']) %>
210 210 <% general_outdated_comm_count_ver = len(c.comment_versions[c.at_version_num]['outdated']) %>
211 211 ${_ungettext('{} version available for this pull request, ', '{} versions available for this pull request, ', len(c.versions)).format(len(c.versions))}
212 212 <a id="show-pr-versions" onclick="return versionController.toggleVersionView(this)" href="#show-pr-versions"
213 213 data-toggle-on="${_('show versions')}."
214 214 data-toggle-off="${_('hide versions')}.">
215 215 ${_('show versions')}.
216 216 </a>
217 217 <table>
218 218 ## SHOW ALL VERSIONS OF PR
219 219 <% ver_pr = None %>
220 220
221 221 % for data in reversed(list(enumerate(c.versions, 1))):
222 222 <% ver_pos = data[0] %>
223 223 <% ver = data[1] %>
224 224 <% ver_pr = ver.pull_request_version_id %>
225 225 <% display_row = '' if c.at_version and (c.at_version_num == ver_pr or c.from_version_num == ver_pr) else 'none' %>
226 226
227 227 <tr class="version-pr" style="display: ${display_row}">
228 228 <td>
229 229 <code>
230 230 <a href="${request.current_route_path(_query=dict(version=ver_pr or 'latest'))}">v${ver_pos}</a>
231 231 </code>
232 232 </td>
233 233 <td>
234 234 <input ${('checked="checked"' if c.from_version_num == ver_pr else '')} class="compare-radio-button" type="radio" name="ver_source" value="${ver_pr or 'latest'}" data-ver-pos="${ver_pos}"/>
235 235 <input ${('checked="checked"' if c.at_version_num == ver_pr else '')} class="compare-radio-button" type="radio" name="ver_target" value="${ver_pr or 'latest'}" data-ver-pos="${ver_pos}"/>
236 236 </td>
237 237 <td>
238 238 <% review_status = c.review_versions[ver_pr].status if ver_pr in c.review_versions else 'not_reviewed' %>
239 239 <i class="tooltip icon-circle review-status-${review_status}" title="${_('Your review status at this version')}"></i>
240 240
241 241 </td>
242 242 <td>
243 243 % if c.at_version_num != ver_pr:
244 244 <i class="tooltip icon-comment" title="${_('Comments from pull request version v{0}').format(ver_pos)}"></i>
245 245 <code>
246 246 General:${len(c.comment_versions[ver_pr]['at'])} / Inline:${len(c.inline_versions[ver_pr]['at'])}
247 247 </code>
248 248 % endif
249 249 </td>
250 250 <td>
251 251 ##<code>${ver.source_ref_parts.commit_id[:6]}</code>
252 252 </td>
253 253 <td>
254 254 <code>${h.age_component(ver.updated_on, time_is_local=True, tooltip=False)}</code>
255 255 </td>
256 256 </tr>
257 257 % endfor
258 258
259 259 <tr>
260 260 <td colspan="6">
261 261 <button id="show-version-diff" onclick="return versionController.showVersionDiff()" class="btn btn-sm" style="display: none"
262 262 data-label-text-locked="${_('select versions to show changes')}"
263 263 data-label-text-diff="${_('show changes between versions')}"
264 264 data-label-text-show="${_('show pull request for this version')}"
265 265 >
266 266 ${_('select versions to show changes')}
267 267 </button>
268 268 </td>
269 269 </tr>
270 270 </table>
271 271 % else:
272 272 <div>
273 273 ${_('Pull request versions not available')}.
274 274 </div>
275 275 % endif
276 276 </div>
277 277 </div>
278 278
279 279 </div>
280 280
281 281 </div>
282 282
283 283 ## REVIEW RULES
284 284 <div id="review_rules" style="display: none" class="reviewers-title block-right">
285 285 <div class="pr-details-title">
286 286 ${_('Reviewer rules')}
287 287 %if c.allowed_to_update:
288 288 <span id="close_edit_reviewers" class="block-right action_button last-item" style="display: none;">${_('Close')}</span>
289 289 %endif
290 290 </div>
291 291 <div class="pr-reviewer-rules">
292 292 ## review rules will be appended here, by default reviewers logic
293 293 </div>
294 294 <input id="review_data" type="hidden" name="review_data" value="">
295 295 </div>
296 296
297 297 ## REVIEWERS
298 298 <div class="reviewers-title first-panel block-right">
299 299 <div class="pr-details-title">
300 300 ${_('Pull request reviewers')}
301 301 %if c.allowed_to_update:
302 302 <span id="open_edit_reviewers" class="block-right action_button last-item">${_('Edit')}</span>
303 303 %endif
304 304 </div>
305 305 </div>
306 306 <div id="reviewers" class="block-right pr-details-content reviewers">
307 307
308 308 ## members redering block
309 309 <input type="hidden" name="__start__" value="review_members:sequence">
310 310 <ul id="review_members" class="group_members">
311 311
312 312 % for review_obj, member, reasons, mandatory, status in c.pull_request_reviewers:
313 313 <script>
314 314 var member = ${h.json.dumps(h.reviewer_as_json(member, reasons=reasons, mandatory=mandatory, user_group=review_obj.rule_user_group_data()))|n};
315 315 var status = "${(status[0][1].status if status else 'not_reviewed')}";
316 316 var status_lbl = "${h.commit_status_lbl(status[0][1].status if status else 'not_reviewed')}";
317 317 var allowed_to_update = ${h.json.dumps(c.allowed_to_update)};
318 318
319 319 var entry = renderTemplate('reviewMemberEntry', {
320 320 'member': member,
321 321 'mandatory': member.mandatory,
322 322 'reasons': member.reasons,
323 323 'allowed_to_update': allowed_to_update,
324 324 'review_status': status,
325 325 'review_status_label': status_lbl,
326 326 'user_group': member.user_group,
327 327 'create': false
328 328 });
329 329 $('#review_members').append(entry)
330 330 </script>
331 331
332 332 % endfor
333 333
334 334 </ul>
335 335
336 336 <input type="hidden" name="__end__" value="review_members:sequence">
337 337 ## end members redering block
338 338
339 339 %if not c.pull_request.is_closed():
340 340 <div id="add_reviewer" class="ac" style="display: none;">
341 341 %if c.allowed_to_update:
342 342 % if not c.forbid_adding_reviewers:
343 343 <div id="add_reviewer_input" class="reviewer_ac">
344 344 ${h.text('user', class_='ac-input', placeholder=_('Add reviewer or reviewer group'))}
345 345 <div id="reviewers_container"></div>
346 346 </div>
347 347 % endif
348 348 <div class="pull-right">
349 349 <button id="update_pull_request" class="btn btn-small no-margin">${_('Save Changes')}</button>
350 350 </div>
351 351 %endif
352 352 </div>
353 353 %endif
354 354 </div>
355 355
356 356 ## TODOs will be listed here
357 357 <div class="reviewers-title block-right">
358 358 <div class="pr-details-title">
359 359 ## Only show unresolved, that is only what matters
360 360 TODO Comments - ${len(c.unresolved_comments)} / ${(len(c.unresolved_comments) + len(c.resolved_comments))}
361 361
362 362 % if not c.at_version:
363 363 % if c.resolved_comments:
364 364 <span class="block-right action_button last-item noselect" onclick="$('.unresolved-todo-text').toggle(); return versionController.toggleElement(this, '.unresolved-todo');" data-toggle-on="Show resolved" data-toggle-off="Hide resolved">Show resolved</span>
365 365 % else:
366 366 <span class="block-right last-item noselect">Show resolved</span>
367 367 % endif
368 368 % endif
369 369 </div>
370 370 </div>
371 371 <div class="block-right pr-details-content reviewers">
372 372
373 373 <table class="todo-table">
374 374 <%
375 375 def sorter(entry):
376 376 user_id = entry.author.user_id
377 377 resolved = '1' if entry.resolved else '0'
378 378 if user_id == c.rhodecode_user.user_id:
379 379 # own comments first
380 380 user_id = 0
381 381 return '{}_{}_{}'.format(resolved, user_id, str(entry.comment_id).zfill(100))
382 382 %>
383 383
384 384 % if c.at_version:
385 385 <tr>
386 386 <td class="unresolved-todo-text">${_('unresolved TODOs unavailable in this view')}.</td>
387 387 </tr>
388 388 % else:
389 389 % for todo_comment in sorted(c.unresolved_comments + c.resolved_comments, key=sorter):
390 390 <% resolved = todo_comment.resolved %>
391 391 % if inline:
392 392 <% outdated_at_ver = todo_comment.outdated_at_version(getattr(c, 'at_version_num', None)) %>
393 393 % else:
394 394 <% outdated_at_ver = todo_comment.older_than_version(getattr(c, 'at_version_num', None)) %>
395 395 % endif
396 396
397 397 <tr ${('class="unresolved-todo" style="display: none"' if resolved else '') |n}>
398 398
399 399 <td class="td-todo-number">
400 400 % if resolved:
401 401 <a class="permalink todo-resolved tooltip" title="${_('Resolved by comment #{}').format(todo_comment.resolved.comment_id)}" href="#comment-${todo_comment.comment_id}" onclick="return Rhodecode.comments.scrollToComment($('#comment-${todo_comment.comment_id}'), 0, ${h.json.dumps(outdated_at_ver)})">
402 402 <i class="icon-flag-filled"></i> ${todo_comment.comment_id}</a>
403 403 % else:
404 404 <a class="permalink" href="#comment-${todo_comment.comment_id}" onclick="return Rhodecode.comments.scrollToComment($('#comment-${todo_comment.comment_id}'), 0, ${h.json.dumps(outdated_at_ver)})">
405 405 <i class="icon-flag-filled"></i> ${todo_comment.comment_id}</a>
406 406 % endif
407 407 </td>
408 408 <td class="td-todo-gravatar">
409 409 ${base.gravatar(todo_comment.author.email, 16, user=todo_comment.author, tooltip=True, extra_class=['no-margin'])}
410 410 </td>
411 411 <td class="todo-comment-text-wrapper">
412 412 <div class="todo-comment-text">
413 413 <code>${h.chop_at_smart(todo_comment.text, '\n', suffix_if_chopped='...')}</code>
414 414 </div>
415 415 </td>
416 416
417 417 </tr>
418 418 % endfor
419 419
420 420 % if len(c.unresolved_comments) == 0:
421 421 <tr>
422 422 <td class="unresolved-todo-text">${_('No unresolved TODOs')}.</td>
423 423 </tr>
424 424 % endif
425 425
426 426 % endif
427 427
428 428 </table>
429 429
430 430 </div>
431 431 </div>
432 432
433 433 </div>
434 434
435 435 <div class="box">
436 436
437 437 % if c.state_progressing:
438 438
439 439 <h2 style="text-align: center">
440 440 ${_('Cannot show diff when pull request state is changing. Current progress state')}: <span class="tag tag-merge-state-${c.pull_request.state}">${c.pull_request.state}</span>
441 441
442 442 % if c.is_super_admin:
443 443 <br/>
444 444 If you think this is an error try <a href="${h.current_route_path(request, force_state='created')}">forced state reset</a> to <span class="tag tag-merge-state-created">created</span> state.
445 445 % endif
446 446 </h2>
447 447
448 448 % else:
449 449
450 450 ## Diffs rendered here
451 451 <div class="table" >
452 452 <div id="changeset_compare_view_content">
453 453 ##CS
454 454 % if c.missing_requirements:
455 455 <div class="box">
456 456 <div class="alert alert-warning">
457 457 <div>
458 458 <strong>${_('Missing requirements:')}</strong>
459 459 ${_('These commits cannot be displayed, because this repository uses the Mercurial largefiles extension, which was not enabled.')}
460 460 </div>
461 461 </div>
462 462 </div>
463 463 % elif c.missing_commits:
464 464 <div class="box">
465 465 <div class="alert alert-warning">
466 466 <div>
467 467 <strong>${_('Missing commits')}:</strong>
468 468 ${_('This pull request cannot be displayed, because one or more commits no longer exist in the source repository.')}
469 469 ${_('Please update this pull request, push the commits back into the source repository, or consider closing this pull request.')}
470 470 ${_('Consider doing a {force_refresh_url} in case you think this is an error.').format(force_refresh_url=h.link_to('force refresh', h.current_route_path(request, force_refresh='1')))|n}
471 471 </div>
472 472 </div>
473 473 </div>
474 % elif c.pr_merge_source_commit.changed:
475 <div class="box">
476 <div class="alert alert-info">
477 <div>
478 % if c.pr_merge_source_commit.changed:
479 <strong>${_('There are new changes for {}:{} in source repository, please consider updating this pull request.').format(c.pr_merge_source_commit.ref_spec.type, c.pr_merge_source_commit.ref_spec.name)}</strong>
480 % endif
481 </div>
482 </div>
483 </div>
474 484 % endif
475 485
476 486 <div class="compare_view_commits_title">
477 487 % if not c.compare_mode:
478 488
479 489 % if c.at_version_pos:
480 490 <h4>
481 491 ${_('Showing changes at v%d, commenting is disabled.') % c.at_version_pos}
482 492 </h4>
483 493 % endif
484 494
485 495 <div class="pull-left">
486 496 <div class="btn-group">
487 497 <a class="${('collapsed' if c.collapse_all_commits else '')}" href="#expand-commits" onclick="toggleCommitExpand(this); return false" data-toggle-commits-cnt=${len(c.commit_ranges)} >
488 498 % if c.collapse_all_commits:
489 499 <i class="icon-plus-squared-alt icon-no-margin"></i>
490 500 ${_ungettext('Expand {} commit', 'Expand {} commits', len(c.commit_ranges)).format(len(c.commit_ranges))}
491 501 % else:
492 502 <i class="icon-minus-squared-alt icon-no-margin"></i>
493 503 ${_ungettext('Collapse {} commit', 'Collapse {} commits', len(c.commit_ranges)).format(len(c.commit_ranges))}
494 504 % endif
495 505 </a>
496 506 </div>
497 507 </div>
498 508
499 509 <div class="pull-right">
500 510 % if c.allowed_to_update and not c.pull_request.is_closed():
501 511
502 512 <div class="btn-group btn-group-actions">
503 513 <a id="update_commits" class="btn btn-primary no-margin" onclick="updateController.updateCommits(this); return false">
504 514 ${_('Update commits')}
505 515 </a>
506 516
507 517 <a id="update_commits_switcher" class="tooltip btn btn-primary" style="margin-left: -1px" data-toggle="dropdown" aria-pressed="false" role="button" title="${_('more update options')}">
508 518 <i class="icon-down"></i>
509 519 </a>
510 520
511 521 <div class="btn-action-switcher-container" id="update-commits-switcher">
512 522 <ul class="btn-action-switcher" role="menu">
513 523 <li>
514 524 <a href="#forceUpdate" onclick="updateController.forceUpdateCommits(this); return false">
515 525 ${_('Force update commits')}
516 526 </a>
517 527 <div class="action-help-block">
518 528 ${_('Update commits and force refresh this pull request.')}
519 529 </div>
520 530 </li>
521 531 </ul>
522 532 </div>
523 533 </div>
524 534
525 535 % else:
526 536 <a class="tooltip btn disabled pull-right" disabled="disabled" title="${_('Update is disabled for current view')}">${_('Update commits')}</a>
527 537 % endif
528 538
529 539 </div>
530 540 % endif
531 541 </div>
532 542
533 543 % if not c.missing_commits:
534 544 % if c.compare_mode:
535 545 % if c.at_version:
536 546 <h4>
537 547 ${_('Commits and changes between v{ver_from} and {ver_to} of this pull request, commenting is disabled').format(ver_from=c.from_version_pos, ver_to=c.at_version_pos if c.at_version_pos else 'latest')}:
538 548 </h4>
539 549
540 550 <div class="subtitle-compare">
541 551 ${_('commits added: {}, removed: {}').format(len(c.commit_changes_summary.added), len(c.commit_changes_summary.removed))}
542 552 </div>
543 553
544 554 <div class="container">
545 555 <table class="rctable compare_view_commits">
546 556 <tr>
547 557 <th></th>
548 558 <th>${_('Time')}</th>
549 559 <th>${_('Author')}</th>
550 560 <th>${_('Commit')}</th>
551 561 <th></th>
552 562 <th>${_('Description')}</th>
553 563 </tr>
554 564
555 565 % for c_type, commit in c.commit_changes:
556 566 % if c_type in ['a', 'r']:
557 567 <%
558 568 if c_type == 'a':
559 569 cc_title = _('Commit added in displayed changes')
560 570 elif c_type == 'r':
561 571 cc_title = _('Commit removed in displayed changes')
562 572 else:
563 573 cc_title = ''
564 574 %>
565 575 <tr id="row-${commit.raw_id}" commit_id="${commit.raw_id}" class="compare_select">
566 576 <td>
567 577 <div class="commit-change-indicator color-${c_type}-border">
568 578 <div class="commit-change-content color-${c_type} tooltip" title="${h.tooltip(cc_title)}">
569 579 ${c_type.upper()}
570 580 </div>
571 581 </div>
572 582 </td>
573 583 <td class="td-time">
574 584 ${h.age_component(commit.date)}
575 585 </td>
576 586 <td class="td-user">
577 587 ${base.gravatar_with_user(commit.author, 16, tooltip=True)}
578 588 </td>
579 589 <td class="td-hash">
580 590 <code>
581 591 <a href="${h.route_path('repo_commit', repo_name=c.target_repo.repo_name, commit_id=commit.raw_id)}">
582 592 r${commit.idx}:${h.short_id(commit.raw_id)}
583 593 </a>
584 594 ${h.hidden('revisions', commit.raw_id)}
585 595 </code>
586 596 </td>
587 597 <td class="td-message expand_commit" data-commit-id="${commit.raw_id}" title="${_( 'Expand commit message')}" onclick="commitsController.expandCommit(this); return false">
588 598 <i class="icon-expand-linked"></i>
589 599 </td>
590 600 <td class="mid td-description">
591 601 <div class="log-container truncate-wrap">
592 602 <div class="message truncate" id="c-${commit.raw_id}" data-message-raw="${commit.message}">${h.urlify_commit_message(commit.message, c.repo_name)}</div>
593 603 </div>
594 604 </td>
595 605 </tr>
596 606 % endif
597 607 % endfor
598 608 </table>
599 609 </div>
600 610
601 611 % endif
602 612
603 613 % else:
604 614 <%include file="/compare/compare_commits.mako" />
605 615 % endif
606 616
607 617 <div class="cs_files">
608 618 <%namespace name="cbdiffs" file="/codeblocks/diffs.mako"/>
609 619 % if c.at_version:
610 620 <% c.inline_cnt = len(c.inline_versions[c.at_version_num]['display']) %>
611 621 <% c.comments = c.comment_versions[c.at_version_num]['display'] %>
612 622 % else:
613 623 <% c.inline_cnt = len(c.inline_versions[c.at_version_num]['until']) %>
614 624 <% c.comments = c.comment_versions[c.at_version_num]['until'] %>
615 625 % endif
616 626
617 627 <%
618 628 pr_menu_data = {
619 629 'outdated_comm_count_ver': outdated_comm_count_ver
620 630 }
621 631 %>
622 632
623 633 ${cbdiffs.render_diffset_menu(c.diffset, range_diff_on=c.range_diff_on)}
624 634
625 635 % if c.range_diff_on:
626 636 % for commit in c.commit_ranges:
627 637 ${cbdiffs.render_diffset(
628 638 c.changes[commit.raw_id],
629 639 commit=commit, use_comments=True,
630 640 collapse_when_files_over=5,
631 641 disable_new_comments=True,
632 642 deleted_files_comments=c.deleted_files_comments,
633 643 inline_comments=c.inline_comments,
634 644 pull_request_menu=pr_menu_data, show_todos=False)}
635 645 % endfor
636 646 % else:
637 647 ${cbdiffs.render_diffset(
638 648 c.diffset, use_comments=True,
639 649 collapse_when_files_over=30,
640 650 disable_new_comments=not c.allowed_to_comment,
641 651 deleted_files_comments=c.deleted_files_comments,
642 652 inline_comments=c.inline_comments,
643 653 pull_request_menu=pr_menu_data, show_todos=False)}
644 654 % endif
645 655
646 656 </div>
647 657 % else:
648 658 ## skipping commits we need to clear the view for missing commits
649 659 <div style="clear:both;"></div>
650 660 % endif
651 661
652 662 </div>
653 663 </div>
654 664
655 665 ## template for inline comment form
656 666 <%namespace name="comment" file="/changeset/changeset_file_comment.mako"/>
657 667
658 668 ## comments heading with count
659 669 <div class="comments-heading">
660 670 <i class="icon-comment"></i>
661 671 ${_('Comments')} ${len(c.comments)}
662 672 </div>
663 673
664 674 ## render general comments
665 675 <div id="comment-tr-show">
666 676 % if general_outdated_comm_count_ver:
667 677 <div class="info-box">
668 678 % if general_outdated_comm_count_ver == 1:
669 679 ${_('there is {num} general comment from older versions').format(num=general_outdated_comm_count_ver)},
670 680 <a href="#show-hidden-comments" onclick="$('.comment-general.comment-outdated').show(); $(this).parent().hide(); return false;">${_('show it')}</a>
671 681 % else:
672 682 ${_('there are {num} general comments from older versions').format(num=general_outdated_comm_count_ver)},
673 683 <a href="#show-hidden-comments" onclick="$('.comment-general.comment-outdated').show(); $(this).parent().hide(); return false;">${_('show them')}</a>
674 684 % endif
675 685 </div>
676 686 % endif
677 687 </div>
678 688
679 689 ${comment.generate_comments(c.comments, include_pull_request=True, is_pull_request=True)}
680 690
681 691 % if not c.pull_request.is_closed():
682 692 ## main comment form and it status
683 693 ${comment.comments(h.route_path('pullrequest_comment_create', repo_name=c.repo_name,
684 694 pull_request_id=c.pull_request.pull_request_id),
685 695 c.pull_request_review_status,
686 696 is_pull_request=True, change_status=c.allowed_to_change_status)}
687 697
688 698 ## merge status, and merge action
689 699 <div class="pull-request-merge">
690 700 <%include file="/pullrequests/pullrequest_merge_checks.mako"/>
691 701 </div>
692 702
693 703 %endif
694 704
695 705 % endif
696 706 </div>
697 707
698 708 <script type="text/javascript">
699 709
700 710 versionController = new VersionController();
701 711 versionController.init();
702 712
703 713 reviewersController = new ReviewersController();
704 714 commitsController = new CommitsController();
705 715
706 716 updateController = new UpdatePrController();
707 717
708 718 $(function () {
709 719
710 720 // custom code mirror
711 721 var codeMirrorInstance = $('#pr-description-input').get(0).MarkupForm.cm;
712 722
713 723 var PRDetails = {
714 724 editButton: $('#open_edit_pullrequest'),
715 725 closeButton: $('#close_edit_pullrequest'),
716 726 deleteButton: $('#delete_pullrequest'),
717 727 viewFields: $('#pr-desc, #pr-title'),
718 728 editFields: $('#pr-desc-edit, #pr-title-edit, .pr-save'),
719 729
720 730 init: function () {
721 731 var that = this;
722 732 this.editButton.on('click', function (e) {
723 733 that.edit();
724 734 });
725 735 this.closeButton.on('click', function (e) {
726 736 that.view();
727 737 });
728 738 },
729 739
730 740 edit: function (event) {
731 741 this.viewFields.hide();
732 742 this.editButton.hide();
733 743 this.deleteButton.hide();
734 744 this.closeButton.show();
735 745 this.editFields.show();
736 746 codeMirrorInstance.refresh();
737 747 },
738 748
739 749 view: function (event) {
740 750 this.editButton.show();
741 751 this.deleteButton.show();
742 752 this.editFields.hide();
743 753 this.closeButton.hide();
744 754 this.viewFields.show();
745 755 }
746 756 };
747 757
748 758 var ReviewersPanel = {
749 759 editButton: $('#open_edit_reviewers'),
750 760 closeButton: $('#close_edit_reviewers'),
751 761 addButton: $('#add_reviewer'),
752 762 removeButtons: $('.reviewer_member_remove,.reviewer_member_mandatory_remove'),
753 763
754 764 init: function () {
755 765 var self = this;
756 766 this.editButton.on('click', function (e) {
757 767 self.edit();
758 768 });
759 769 this.closeButton.on('click', function (e) {
760 770 self.close();
761 771 });
762 772 },
763 773
764 774 edit: function (event) {
765 775 this.editButton.hide();
766 776 this.closeButton.show();
767 777 this.addButton.show();
768 778 this.removeButtons.css('visibility', 'visible');
769 779 // review rules
770 780 reviewersController.loadReviewRules(
771 781 ${c.pull_request.reviewer_data_json | n});
772 782 },
773 783
774 784 close: function (event) {
775 785 this.editButton.show();
776 786 this.closeButton.hide();
777 787 this.addButton.hide();
778 788 this.removeButtons.css('visibility', 'hidden');
779 789 // hide review rules
780 790 reviewersController.hideReviewRules()
781 791 }
782 792 };
783 793
784 794 PRDetails.init();
785 795 ReviewersPanel.init();
786 796
787 797 showOutdated = function (self) {
788 798 $('.comment-inline.comment-outdated').show();
789 799 $('.filediff-outdated').show();
790 800 $('.showOutdatedComments').hide();
791 801 $('.hideOutdatedComments').show();
792 802 };
793 803
794 804 hideOutdated = function (self) {
795 805 $('.comment-inline.comment-outdated').hide();
796 806 $('.filediff-outdated').hide();
797 807 $('.hideOutdatedComments').hide();
798 808 $('.showOutdatedComments').show();
799 809 };
800 810
801 811 refreshMergeChecks = function () {
802 812 var loadUrl = "${request.current_route_path(_query=dict(merge_checks=1))}";
803 813 $('.pull-request-merge').css('opacity', 0.3);
804 814 $('.action-buttons-extra').css('opacity', 0.3);
805 815
806 816 $('.pull-request-merge').load(
807 817 loadUrl, function () {
808 818 $('.pull-request-merge').css('opacity', 1);
809 819
810 820 $('.action-buttons-extra').css('opacity', 1);
811 821 }
812 822 );
813 823 };
814 824
815 825 closePullRequest = function (status) {
816 826 if (!confirm(_gettext('Are you sure to close this pull request without merging?'))) {
817 827 return false;
818 828 }
819 829 // inject closing flag
820 830 $('.action-buttons-extra').append('<input type="hidden" class="close-pr-input" id="close_pull_request" value="1">');
821 831 $(generalCommentForm.statusChange).select2("val", status).trigger('change');
822 832 $(generalCommentForm.submitForm).submit();
823 833 };
824 834
825 835 $('#show-outdated-comments').on('click', function (e) {
826 836 var button = $(this);
827 837 var outdated = $('.comment-outdated');
828 838
829 839 if (button.html() === "(Show)") {
830 840 button.html("(Hide)");
831 841 outdated.show();
832 842 } else {
833 843 button.html("(Show)");
834 844 outdated.hide();
835 845 }
836 846 });
837 847
838 848 $('.show-inline-comments').on('change', function (e) {
839 849 var show = 'none';
840 850 var target = e.currentTarget;
841 851 if (target.checked) {
842 852 show = ''
843 853 }
844 854 var boxid = $(target).attr('id_for');
845 855 var comments = $('#{0} .inline-comments'.format(boxid));
846 856 var fn_display = function (idx) {
847 857 $(this).css('display', show);
848 858 };
849 859 $(comments).each(fn_display);
850 860 var btns = $('#{0} .inline-comments-button'.format(boxid));
851 861 $(btns).each(fn_display);
852 862 });
853 863
854 864 $('#merge_pull_request_form').submit(function () {
855 865 if (!$('#merge_pull_request').attr('disabled')) {
856 866 $('#merge_pull_request').attr('disabled', 'disabled');
857 867 }
858 868 return true;
859 869 });
860 870
861 871 $('#edit_pull_request').on('click', function (e) {
862 872 var title = $('#pr-title-input').val();
863 873 var description = codeMirrorInstance.getValue();
864 874 var renderer = $('#pr-renderer-input').val();
865 875 editPullRequest(
866 876 "${c.repo_name}", "${c.pull_request.pull_request_id}",
867 877 title, description, renderer);
868 878 });
869 879
870 880 $('#update_pull_request').on('click', function (e) {
871 881 $(this).attr('disabled', 'disabled');
872 882 $(this).addClass('disabled');
873 883 $(this).html(_gettext('Saving...'));
874 884 reviewersController.updateReviewers(
875 885 "${c.repo_name}", "${c.pull_request.pull_request_id}");
876 886 });
877 887
878 888
879 889 // fixing issue with caches on firefox
880 890 $('#update_commits').removeAttr("disabled");
881 891
882 892 $('.show-inline-comments').on('click', function (e) {
883 893 var boxid = $(this).attr('data-comment-id');
884 894 var button = $(this);
885 895
886 896 if (button.hasClass("comments-visible")) {
887 897 $('#{0} .inline-comments'.format(boxid)).each(function (index) {
888 898 $(this).hide();
889 899 });
890 900 button.removeClass("comments-visible");
891 901 } else {
892 902 $('#{0} .inline-comments'.format(boxid)).each(function (index) {
893 903 $(this).show();
894 904 });
895 905 button.addClass("comments-visible");
896 906 }
897 907 });
898 908
899 909 // register submit callback on commentForm form to track TODOs
900 910 window.commentFormGlobalSubmitSuccessCallback = function () {
901 911 refreshMergeChecks();
902 912 };
903 913
904 914 ReviewerAutoComplete('#user');
905 915
906 916 })
907 917
908 918 </script>
909 919
910 920 </div>
911 921
912 922 </%def>
General Comments 0
You need to be logged in to leave comments. Login now