##// END OF EJS Templates
vcs: optimized pre-load attributes for better caching.
marcink -
r3850:0415fef3 default
parent child Browse files
Show More
@@ -1,728 +1,728 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import time
22 22 import logging
23 23 import operator
24 24
25 25 from pyramid import compat
26 26 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest
27 27
28 28 from rhodecode.lib import helpers as h, diffs
29 29 from rhodecode.lib.utils2 import (
30 30 StrictAttributeDict, str2bool, safe_int, datetime_to_time, safe_unicode)
31 31 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
32 32 from rhodecode.model import repo
33 33 from rhodecode.model import repo_group
34 34 from rhodecode.model import user_group
35 35 from rhodecode.model import user
36 36 from rhodecode.model.db import User
37 37 from rhodecode.model.scm import ScmModel
38 38 from rhodecode.model.settings import VcsSettingsModel
39 39
40 40 log = logging.getLogger(__name__)
41 41
42 42
43 43 ADMIN_PREFIX = '/_admin'
44 44 STATIC_FILE_PREFIX = '/_static'
45 45
46 46 URL_NAME_REQUIREMENTS = {
47 47 # group name can have a slash in them, but they must not end with a slash
48 48 'group_name': r'.*?[^/]',
49 49 'repo_group_name': r'.*?[^/]',
50 50 # repo names can have a slash in them, but they must not end with a slash
51 51 'repo_name': r'.*?[^/]',
52 52 # file path eats up everything at the end
53 53 'f_path': r'.*',
54 54 # reference types
55 55 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
56 56 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
57 57 }
58 58
59 59
60 60 def add_route_with_slash(config,name, pattern, **kw):
61 61 config.add_route(name, pattern, **kw)
62 62 if not pattern.endswith('/'):
63 63 config.add_route(name + '_slash', pattern + '/', **kw)
64 64
65 65
66 66 def add_route_requirements(route_path, requirements=None):
67 67 """
68 68 Adds regex requirements to pyramid routes using a mapping dict
69 69 e.g::
70 70 add_route_requirements('{repo_name}/settings')
71 71 """
72 72 requirements = requirements or URL_NAME_REQUIREMENTS
73 73 for key, regex in requirements.items():
74 74 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
75 75 return route_path
76 76
77 77
78 78 def get_format_ref_id(repo):
79 79 """Returns a `repo` specific reference formatter function"""
80 80 if h.is_svn(repo):
81 81 return _format_ref_id_svn
82 82 else:
83 83 return _format_ref_id
84 84
85 85
86 86 def _format_ref_id(name, raw_id):
87 87 """Default formatting of a given reference `name`"""
88 88 return name
89 89
90 90
91 91 def _format_ref_id_svn(name, raw_id):
92 92 """Special way of formatting a reference for Subversion including path"""
93 93 return '%s@%s' % (name, raw_id)
94 94
95 95
96 96 class TemplateArgs(StrictAttributeDict):
97 97 pass
98 98
99 99
100 100 class BaseAppView(object):
101 101
102 102 def __init__(self, context, request):
103 103 self.request = request
104 104 self.context = context
105 105 self.session = request.session
106 106 if not hasattr(request, 'user'):
107 107 # NOTE(marcink): edge case, we ended up in matched route
108 108 # but probably of web-app context, e.g API CALL/VCS CALL
109 109 if hasattr(request, 'vcs_call') or hasattr(request, 'rpc_method'):
110 110 log.warning('Unable to process request `%s` in this scope', request)
111 111 raise HTTPBadRequest()
112 112
113 113 self._rhodecode_user = request.user # auth user
114 114 self._rhodecode_db_user = self._rhodecode_user.get_instance()
115 115 self._maybe_needs_password_change(
116 116 request.matched_route.name, self._rhodecode_db_user)
117 117
118 118 def _maybe_needs_password_change(self, view_name, user_obj):
119 119 log.debug('Checking if user %s needs password change on view %s',
120 120 user_obj, view_name)
121 121 skip_user_views = [
122 122 'logout', 'login',
123 123 'my_account_password', 'my_account_password_update'
124 124 ]
125 125
126 126 if not user_obj:
127 127 return
128 128
129 129 if user_obj.username == User.DEFAULT_USER:
130 130 return
131 131
132 132 now = time.time()
133 133 should_change = user_obj.user_data.get('force_password_change')
134 134 change_after = safe_int(should_change) or 0
135 135 if should_change and now > change_after:
136 136 log.debug('User %s requires password change', user_obj)
137 137 h.flash('You are required to change your password', 'warning',
138 138 ignore_duplicate=True)
139 139
140 140 if view_name not in skip_user_views:
141 141 raise HTTPFound(
142 142 self.request.route_path('my_account_password'))
143 143
144 144 def _log_creation_exception(self, e, repo_name):
145 145 _ = self.request.translate
146 146 reason = None
147 147 if len(e.args) == 2:
148 148 reason = e.args[1]
149 149
150 150 if reason == 'INVALID_CERTIFICATE':
151 151 log.exception(
152 152 'Exception creating a repository: invalid certificate')
153 153 msg = (_('Error creating repository %s: invalid certificate')
154 154 % repo_name)
155 155 else:
156 156 log.exception("Exception creating a repository")
157 157 msg = (_('Error creating repository %s')
158 158 % repo_name)
159 159 return msg
160 160
161 161 def _get_local_tmpl_context(self, include_app_defaults=True):
162 162 c = TemplateArgs()
163 163 c.auth_user = self.request.user
164 164 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
165 165 c.rhodecode_user = self.request.user
166 166
167 167 if include_app_defaults:
168 168 from rhodecode.lib.base import attach_context_attributes
169 169 attach_context_attributes(c, self.request, self.request.user.user_id)
170 170
171 171 c.is_super_admin = c.auth_user.is_admin
172 172
173 173 c.can_create_repo = c.is_super_admin
174 174 c.can_create_repo_group = c.is_super_admin
175 175 c.can_create_user_group = c.is_super_admin
176 176
177 177 c.is_delegated_admin = False
178 178
179 179 if not c.auth_user.is_default and not c.is_super_admin:
180 180 c.can_create_repo = h.HasPermissionAny('hg.create.repository')(
181 181 user=self.request.user)
182 182 repositories = c.auth_user.repositories_admin or c.can_create_repo
183 183
184 184 c.can_create_repo_group = h.HasPermissionAny('hg.repogroup.create.true')(
185 185 user=self.request.user)
186 186 repository_groups = c.auth_user.repository_groups_admin or c.can_create_repo_group
187 187
188 188 c.can_create_user_group = h.HasPermissionAny('hg.usergroup.create.true')(
189 189 user=self.request.user)
190 190 user_groups = c.auth_user.user_groups_admin or c.can_create_user_group
191 191 # delegated admin can create, or manage some objects
192 192 c.is_delegated_admin = repositories or repository_groups or user_groups
193 193 return c
194 194
195 195 def _get_template_context(self, tmpl_args, **kwargs):
196 196
197 197 local_tmpl_args = {
198 198 'defaults': {},
199 199 'errors': {},
200 200 'c': tmpl_args
201 201 }
202 202 local_tmpl_args.update(kwargs)
203 203 return local_tmpl_args
204 204
205 205 def load_default_context(self):
206 206 """
207 207 example:
208 208
209 209 def load_default_context(self):
210 210 c = self._get_local_tmpl_context()
211 211 c.custom_var = 'foobar'
212 212
213 213 return c
214 214 """
215 215 raise NotImplementedError('Needs implementation in view class')
216 216
217 217
218 218 class RepoAppView(BaseAppView):
219 219
220 220 def __init__(self, context, request):
221 221 super(RepoAppView, self).__init__(context, request)
222 222 self.db_repo = request.db_repo
223 223 self.db_repo_name = self.db_repo.repo_name
224 224 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
225 225
226 226 def _handle_missing_requirements(self, error):
227 227 log.error(
228 228 'Requirements are missing for repository %s: %s',
229 229 self.db_repo_name, safe_unicode(error))
230 230
231 231 def _get_local_tmpl_context(self, include_app_defaults=True):
232 232 _ = self.request.translate
233 233 c = super(RepoAppView, self)._get_local_tmpl_context(
234 234 include_app_defaults=include_app_defaults)
235 235
236 236 # register common vars for this type of view
237 237 c.rhodecode_db_repo = self.db_repo
238 238 c.repo_name = self.db_repo_name
239 239 c.repository_pull_requests = self.db_repo_pull_requests
240 240 c.repository_is_user_following = ScmModel().is_following_repo(
241 241 self.db_repo_name, self._rhodecode_user.user_id)
242 242 self.path_filter = PathFilter(None)
243 243
244 244 c.repository_requirements_missing = {}
245 245 try:
246 246 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
247 247 # NOTE(marcink):
248 248 # comparison to None since if it's an object __bool__ is expensive to
249 249 # calculate
250 250 if self.rhodecode_vcs_repo is not None:
251 251 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
252 252 c.auth_user.username)
253 253 self.path_filter = PathFilter(path_perms)
254 254 except RepositoryRequirementError as e:
255 255 c.repository_requirements_missing = {'error': str(e)}
256 256 self._handle_missing_requirements(e)
257 257 self.rhodecode_vcs_repo = None
258 258
259 259 c.path_filter = self.path_filter # used by atom_feed_entry.mako
260 260
261 261 if self.rhodecode_vcs_repo is None:
262 262 # unable to fetch this repo as vcs instance, report back to user
263 263 h.flash(_(
264 264 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
265 265 "Please check if it exist, or is not damaged.") %
266 266 {'repo_name': c.repo_name},
267 267 category='error', ignore_duplicate=True)
268 268 if c.repository_requirements_missing:
269 269 route = self.request.matched_route.name
270 270 if route.startswith(('edit_repo', 'repo_summary')):
271 271 # allow summary and edit repo on missing requirements
272 272 return c
273 273
274 274 raise HTTPFound(
275 275 h.route_path('repo_summary', repo_name=self.db_repo_name))
276 276
277 277 else: # redirect if we don't show missing requirements
278 278 raise HTTPFound(h.route_path('home'))
279 279
280 280 c.has_origin_repo_read_perm = False
281 281 if self.db_repo.fork:
282 282 c.has_origin_repo_read_perm = h.HasRepoPermissionAny(
283 283 'repository.write', 'repository.read', 'repository.admin')(
284 284 self.db_repo.fork.repo_name, 'summary fork link')
285 285
286 286 return c
287 287
288 288 def _get_f_path_unchecked(self, matchdict, default=None):
289 289 """
290 290 Should only be used by redirects, everything else should call _get_f_path
291 291 """
292 292 f_path = matchdict.get('f_path')
293 293 if f_path:
294 294 # fix for multiple initial slashes that causes errors for GIT
295 295 return f_path.lstrip('/')
296 296
297 297 return default
298 298
299 299 def _get_f_path(self, matchdict, default=None):
300 300 f_path_match = self._get_f_path_unchecked(matchdict, default)
301 301 return self.path_filter.assert_path_permissions(f_path_match)
302 302
303 303 def _get_general_setting(self, target_repo, settings_key, default=False):
304 304 settings_model = VcsSettingsModel(repo=target_repo)
305 305 settings = settings_model.get_general_settings()
306 306 return settings.get(settings_key, default)
307 307
308 308 def get_recache_flag(self):
309 309 for flag_name in ['force_recache', 'force-recache', 'no-cache']:
310 310 flag_val = self.request.GET.get(flag_name)
311 311 if str2bool(flag_val):
312 312 return True
313 313 return False
314 314
315 315
316 316 class PathFilter(object):
317 317
318 318 # Expects and instance of BasePathPermissionChecker or None
319 319 def __init__(self, permission_checker):
320 320 self.permission_checker = permission_checker
321 321
322 322 def assert_path_permissions(self, path):
323 323 if path and self.permission_checker and not self.permission_checker.has_access(path):
324 324 raise HTTPForbidden()
325 325 return path
326 326
327 327 def filter_patchset(self, patchset):
328 328 if not self.permission_checker or not patchset:
329 329 return patchset, False
330 330 had_filtered = False
331 331 filtered_patchset = []
332 332 for patch in patchset:
333 333 filename = patch.get('filename', None)
334 334 if not filename or self.permission_checker.has_access(filename):
335 335 filtered_patchset.append(patch)
336 336 else:
337 337 had_filtered = True
338 338 if had_filtered:
339 339 if isinstance(patchset, diffs.LimitedDiffContainer):
340 340 filtered_patchset = diffs.LimitedDiffContainer(patchset.diff_limit, patchset.cur_diff_size, filtered_patchset)
341 341 return filtered_patchset, True
342 342 else:
343 343 return patchset, False
344 344
345 345 def render_patchset_filtered(self, diffset, patchset, source_ref=None, target_ref=None):
346 346 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
347 347 result = diffset.render_patchset(
348 348 filtered_patchset, source_ref=source_ref, target_ref=target_ref)
349 349 result.has_hidden_changes = has_hidden_changes
350 350 return result
351 351
352 352 def get_raw_patch(self, diff_processor):
353 353 if self.permission_checker is None:
354 354 return diff_processor.as_raw()
355 355 elif self.permission_checker.has_full_access:
356 356 return diff_processor.as_raw()
357 357 else:
358 358 return '# Repository has user-specific filters, raw patch generation is disabled.'
359 359
360 360 @property
361 361 def is_enabled(self):
362 362 return self.permission_checker is not None
363 363
364 364
365 365 class RepoGroupAppView(BaseAppView):
366 366 def __init__(self, context, request):
367 367 super(RepoGroupAppView, self).__init__(context, request)
368 368 self.db_repo_group = request.db_repo_group
369 369 self.db_repo_group_name = self.db_repo_group.group_name
370 370
371 371 def _get_local_tmpl_context(self, include_app_defaults=True):
372 372 _ = self.request.translate
373 373 c = super(RepoGroupAppView, self)._get_local_tmpl_context(
374 374 include_app_defaults=include_app_defaults)
375 375 c.repo_group = self.db_repo_group
376 376 return c
377 377
378 378 def _revoke_perms_on_yourself(self, form_result):
379 379 _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
380 380 form_result['perm_updates'])
381 381 _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
382 382 form_result['perm_additions'])
383 383 _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
384 384 form_result['perm_deletions'])
385 385 admin_perm = 'group.admin'
386 386 if _updates and _updates[0][1] != admin_perm or \
387 387 _additions and _additions[0][1] != admin_perm or \
388 388 _deletions and _deletions[0][1] != admin_perm:
389 389 return True
390 390 return False
391 391
392 392
393 393 class UserGroupAppView(BaseAppView):
394 394 def __init__(self, context, request):
395 395 super(UserGroupAppView, self).__init__(context, request)
396 396 self.db_user_group = request.db_user_group
397 397 self.db_user_group_name = self.db_user_group.users_group_name
398 398
399 399
400 400 class UserAppView(BaseAppView):
401 401 def __init__(self, context, request):
402 402 super(UserAppView, self).__init__(context, request)
403 403 self.db_user = request.db_user
404 404 self.db_user_id = self.db_user.user_id
405 405
406 406 _ = self.request.translate
407 407 if not request.db_user_supports_default:
408 408 if self.db_user.username == User.DEFAULT_USER:
409 409 h.flash(_("Editing user `{}` is disabled.".format(
410 410 User.DEFAULT_USER)), category='warning')
411 411 raise HTTPFound(h.route_path('users'))
412 412
413 413
414 414 class DataGridAppView(object):
415 415 """
416 416 Common class to have re-usable grid rendering components
417 417 """
418 418
419 419 def _extract_ordering(self, request, column_map=None):
420 420 column_map = column_map or {}
421 421 column_index = safe_int(request.GET.get('order[0][column]'))
422 422 order_dir = request.GET.get(
423 423 'order[0][dir]', 'desc')
424 424 order_by = request.GET.get(
425 425 'columns[%s][data][sort]' % column_index, 'name_raw')
426 426
427 427 # translate datatable to DB columns
428 428 order_by = column_map.get(order_by) or order_by
429 429
430 430 search_q = request.GET.get('search[value]')
431 431 return search_q, order_by, order_dir
432 432
433 433 def _extract_chunk(self, request):
434 434 start = safe_int(request.GET.get('start'), 0)
435 435 length = safe_int(request.GET.get('length'), 25)
436 436 draw = safe_int(request.GET.get('draw'))
437 437 return draw, start, length
438 438
439 439 def _get_order_col(self, order_by, model):
440 440 if isinstance(order_by, compat.string_types):
441 441 try:
442 442 return operator.attrgetter(order_by)(model)
443 443 except AttributeError:
444 444 return None
445 445 else:
446 446 return order_by
447 447
448 448
449 449 class BaseReferencesView(RepoAppView):
450 450 """
451 451 Base for reference view for branches, tags and bookmarks.
452 452 """
453 453 def load_default_context(self):
454 454 c = self._get_local_tmpl_context()
455 455
456 456
457 457 return c
458 458
459 459 def load_refs_context(self, ref_items, partials_template):
460 460 _render = self.request.get_partial_renderer(partials_template)
461 pre_load = ["author", "date", "message"]
461 pre_load = ["author", "date", "message", "parents"]
462 462
463 463 is_svn = h.is_svn(self.rhodecode_vcs_repo)
464 464 is_hg = h.is_hg(self.rhodecode_vcs_repo)
465 465
466 466 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
467 467
468 468 closed_refs = {}
469 469 if is_hg:
470 470 closed_refs = self.rhodecode_vcs_repo.branches_closed
471 471
472 472 data = []
473 473 for ref_name, commit_id in ref_items:
474 474 commit = self.rhodecode_vcs_repo.get_commit(
475 475 commit_id=commit_id, pre_load=pre_load)
476 476 closed = ref_name in closed_refs
477 477
478 478 # TODO: johbo: Unify generation of reference links
479 479 use_commit_id = '/' in ref_name or is_svn
480 480
481 481 if use_commit_id:
482 482 files_url = h.route_path(
483 483 'repo_files',
484 484 repo_name=self.db_repo_name,
485 485 f_path=ref_name if is_svn else '',
486 486 commit_id=commit_id)
487 487
488 488 else:
489 489 files_url = h.route_path(
490 490 'repo_files',
491 491 repo_name=self.db_repo_name,
492 492 f_path=ref_name if is_svn else '',
493 493 commit_id=ref_name,
494 494 _query=dict(at=ref_name))
495 495
496 496 data.append({
497 497 "name": _render('name', ref_name, files_url, closed),
498 498 "name_raw": ref_name,
499 499 "date": _render('date', commit.date),
500 500 "date_raw": datetime_to_time(commit.date),
501 501 "author": _render('author', commit.author),
502 502 "commit": _render(
503 503 'commit', commit.message, commit.raw_id, commit.idx),
504 504 "commit_raw": commit.idx,
505 505 "compare": _render(
506 506 'compare', format_ref_id(ref_name, commit.raw_id)),
507 507 })
508 508
509 509 return data
510 510
511 511
512 512 class RepoRoutePredicate(object):
513 513 def __init__(self, val, config):
514 514 self.val = val
515 515
516 516 def text(self):
517 517 return 'repo_route = %s' % self.val
518 518
519 519 phash = text
520 520
521 521 def __call__(self, info, request):
522 522 if hasattr(request, 'vcs_call'):
523 523 # skip vcs calls
524 524 return
525 525
526 526 repo_name = info['match']['repo_name']
527 527 repo_model = repo.RepoModel()
528 528
529 529 by_name_match = repo_model.get_by_repo_name(repo_name, cache=False)
530 530
531 531 def redirect_if_creating(route_info, db_repo):
532 532 skip_views = ['edit_repo_advanced_delete']
533 533 route = route_info['route']
534 534 # we should skip delete view so we can actually "remove" repositories
535 535 # if they get stuck in creating state.
536 536 if route.name in skip_views:
537 537 return
538 538
539 539 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
540 540 repo_creating_url = request.route_path(
541 541 'repo_creating', repo_name=db_repo.repo_name)
542 542 raise HTTPFound(repo_creating_url)
543 543
544 544 if by_name_match:
545 545 # register this as request object we can re-use later
546 546 request.db_repo = by_name_match
547 547 redirect_if_creating(info, by_name_match)
548 548 return True
549 549
550 550 by_id_match = repo_model.get_repo_by_id(repo_name)
551 551 if by_id_match:
552 552 request.db_repo = by_id_match
553 553 redirect_if_creating(info, by_id_match)
554 554 return True
555 555
556 556 return False
557 557
558 558
559 559 class RepoForbidArchivedRoutePredicate(object):
560 560 def __init__(self, val, config):
561 561 self.val = val
562 562
563 563 def text(self):
564 564 return 'repo_forbid_archived = %s' % self.val
565 565
566 566 phash = text
567 567
568 568 def __call__(self, info, request):
569 569 _ = request.translate
570 570 rhodecode_db_repo = request.db_repo
571 571
572 572 log.debug(
573 573 '%s checking if archived flag for repo for %s',
574 574 self.__class__.__name__, rhodecode_db_repo.repo_name)
575 575
576 576 if rhodecode_db_repo.archived:
577 577 log.warning('Current view is not supported for archived repo:%s',
578 578 rhodecode_db_repo.repo_name)
579 579
580 580 h.flash(
581 581 h.literal(_('Action not supported for archived repository.')),
582 582 category='warning')
583 583 summary_url = request.route_path(
584 584 'repo_summary', repo_name=rhodecode_db_repo.repo_name)
585 585 raise HTTPFound(summary_url)
586 586 return True
587 587
588 588
589 589 class RepoTypeRoutePredicate(object):
590 590 def __init__(self, val, config):
591 591 self.val = val or ['hg', 'git', 'svn']
592 592
593 593 def text(self):
594 594 return 'repo_accepted_type = %s' % self.val
595 595
596 596 phash = text
597 597
598 598 def __call__(self, info, request):
599 599 if hasattr(request, 'vcs_call'):
600 600 # skip vcs calls
601 601 return
602 602
603 603 rhodecode_db_repo = request.db_repo
604 604
605 605 log.debug(
606 606 '%s checking repo type for %s in %s',
607 607 self.__class__.__name__, rhodecode_db_repo.repo_type, self.val)
608 608
609 609 if rhodecode_db_repo.repo_type in self.val:
610 610 return True
611 611 else:
612 612 log.warning('Current view is not supported for repo type:%s',
613 613 rhodecode_db_repo.repo_type)
614 614 return False
615 615
616 616
617 617 class RepoGroupRoutePredicate(object):
618 618 def __init__(self, val, config):
619 619 self.val = val
620 620
621 621 def text(self):
622 622 return 'repo_group_route = %s' % self.val
623 623
624 624 phash = text
625 625
626 626 def __call__(self, info, request):
627 627 if hasattr(request, 'vcs_call'):
628 628 # skip vcs calls
629 629 return
630 630
631 631 repo_group_name = info['match']['repo_group_name']
632 632 repo_group_model = repo_group.RepoGroupModel()
633 633 by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False)
634 634
635 635 if by_name_match:
636 636 # register this as request object we can re-use later
637 637 request.db_repo_group = by_name_match
638 638 return True
639 639
640 640 return False
641 641
642 642
643 643 class UserGroupRoutePredicate(object):
644 644 def __init__(self, val, config):
645 645 self.val = val
646 646
647 647 def text(self):
648 648 return 'user_group_route = %s' % self.val
649 649
650 650 phash = text
651 651
652 652 def __call__(self, info, request):
653 653 if hasattr(request, 'vcs_call'):
654 654 # skip vcs calls
655 655 return
656 656
657 657 user_group_id = info['match']['user_group_id']
658 658 user_group_model = user_group.UserGroup()
659 659 by_id_match = user_group_model.get(user_group_id, cache=False)
660 660
661 661 if by_id_match:
662 662 # register this as request object we can re-use later
663 663 request.db_user_group = by_id_match
664 664 return True
665 665
666 666 return False
667 667
668 668
669 669 class UserRoutePredicateBase(object):
670 670 supports_default = None
671 671
672 672 def __init__(self, val, config):
673 673 self.val = val
674 674
675 675 def text(self):
676 676 raise NotImplementedError()
677 677
678 678 def __call__(self, info, request):
679 679 if hasattr(request, 'vcs_call'):
680 680 # skip vcs calls
681 681 return
682 682
683 683 user_id = info['match']['user_id']
684 684 user_model = user.User()
685 685 by_id_match = user_model.get(user_id, cache=False)
686 686
687 687 if by_id_match:
688 688 # register this as request object we can re-use later
689 689 request.db_user = by_id_match
690 690 request.db_user_supports_default = self.supports_default
691 691 return True
692 692
693 693 return False
694 694
695 695
696 696 class UserRoutePredicate(UserRoutePredicateBase):
697 697 supports_default = False
698 698
699 699 def text(self):
700 700 return 'user_route = %s' % self.val
701 701
702 702 phash = text
703 703
704 704
705 705 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
706 706 supports_default = True
707 707
708 708 def text(self):
709 709 return 'user_with_default_route = %s' % self.val
710 710
711 711 phash = text
712 712
713 713
714 714 def includeme(config):
715 715 config.add_route_predicate(
716 716 'repo_route', RepoRoutePredicate)
717 717 config.add_route_predicate(
718 718 'repo_accepted_types', RepoTypeRoutePredicate)
719 719 config.add_route_predicate(
720 720 'repo_forbid_when_archived', RepoForbidArchivedRoutePredicate)
721 721 config.add_route_predicate(
722 722 'repo_group_route', RepoGroupRoutePredicate)
723 723 config.add_route_predicate(
724 724 'user_group_route', UserGroupRoutePredicate)
725 725 config.add_route_predicate(
726 726 'user_route_with_default', UserRouteWithDefaultPredicate)
727 727 config.add_route_predicate(
728 728 'user_route', UserRoutePredicate)
@@ -1,311 +1,311 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23
24 24 from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPFound
25 25 from pyramid.view import view_config
26 26 from pyramid.renderers import render
27 27 from pyramid.response import Response
28 28
29 29 from rhodecode.apps._base import RepoAppView
30 30
31 31 from rhodecode.lib import helpers as h
32 32 from rhodecode.lib import diffs, codeblocks
33 33 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
34 34 from rhodecode.lib.utils import safe_str
35 35 from rhodecode.lib.utils2 import safe_unicode, str2bool
36 36 from rhodecode.lib.view_utils import parse_path_ref, get_commit_from_ref_name
37 37 from rhodecode.lib.vcs.exceptions import (
38 38 EmptyRepositoryError, RepositoryError, RepositoryRequirementError,
39 39 NodeDoesNotExistError)
40 40 from rhodecode.model.db import Repository, ChangesetStatus
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44
45 45 class RepoCompareView(RepoAppView):
46 46 def load_default_context(self):
47 47 c = self._get_local_tmpl_context(include_app_defaults=True)
48 48 c.rhodecode_repo = self.rhodecode_vcs_repo
49 49 return c
50 50
51 51 def _get_commit_or_redirect(
52 52 self, ref, ref_type, repo, redirect_after=True, partial=False):
53 53 """
54 54 This is a safe way to get a commit. If an error occurs it
55 55 redirects to a commit with a proper message. If partial is set
56 56 then it does not do redirect raise and throws an exception instead.
57 57 """
58 58 _ = self.request.translate
59 59 try:
60 60 return get_commit_from_ref_name(repo, safe_str(ref), ref_type)
61 61 except EmptyRepositoryError:
62 62 if not redirect_after:
63 63 return repo.scm_instance().EMPTY_COMMIT
64 64 h.flash(h.literal(_('There are no commits yet')),
65 65 category='warning')
66 66 if not partial:
67 67 raise HTTPFound(
68 68 h.route_path('repo_summary', repo_name=repo.repo_name))
69 69 raise HTTPBadRequest()
70 70
71 71 except RepositoryError as e:
72 72 log.exception(safe_str(e))
73 73 h.flash(safe_str(h.escape(e)), category='warning')
74 74 if not partial:
75 75 raise HTTPFound(
76 76 h.route_path('repo_summary', repo_name=repo.repo_name))
77 77 raise HTTPBadRequest()
78 78
79 79 @LoginRequired()
80 80 @HasRepoPermissionAnyDecorator(
81 81 'repository.read', 'repository.write', 'repository.admin')
82 82 @view_config(
83 83 route_name='repo_compare_select', request_method='GET',
84 84 renderer='rhodecode:templates/compare/compare_diff.mako')
85 85 def compare_select(self):
86 86 _ = self.request.translate
87 87 c = self.load_default_context()
88 88
89 89 source_repo = self.db_repo_name
90 90 target_repo = self.request.GET.get('target_repo', source_repo)
91 91 c.source_repo = Repository.get_by_repo_name(source_repo)
92 92 c.target_repo = Repository.get_by_repo_name(target_repo)
93 93
94 94 if c.source_repo is None or c.target_repo is None:
95 95 raise HTTPNotFound()
96 96
97 97 c.compare_home = True
98 98 c.commit_ranges = []
99 99 c.collapse_all_commits = False
100 100 c.diffset = None
101 101 c.limited_diff = False
102 102 c.source_ref = c.target_ref = _('Select commit')
103 103 c.source_ref_type = ""
104 104 c.target_ref_type = ""
105 105 c.commit_statuses = ChangesetStatus.STATUSES
106 106 c.preview_mode = False
107 107 c.file_path = None
108 108
109 109 return self._get_template_context(c)
110 110
111 111 @LoginRequired()
112 112 @HasRepoPermissionAnyDecorator(
113 113 'repository.read', 'repository.write', 'repository.admin')
114 114 @view_config(
115 115 route_name='repo_compare', request_method='GET',
116 116 renderer=None)
117 117 def compare(self):
118 118 _ = self.request.translate
119 119 c = self.load_default_context()
120 120
121 121 source_ref_type = self.request.matchdict['source_ref_type']
122 122 source_ref = self.request.matchdict['source_ref']
123 123 target_ref_type = self.request.matchdict['target_ref_type']
124 124 target_ref = self.request.matchdict['target_ref']
125 125
126 126 # source_ref will be evaluated in source_repo
127 127 source_repo_name = self.db_repo_name
128 128 source_path, source_id = parse_path_ref(source_ref)
129 129
130 130 # target_ref will be evaluated in target_repo
131 131 target_repo_name = self.request.GET.get('target_repo', source_repo_name)
132 132 target_path, target_id = parse_path_ref(
133 133 target_ref, default_path=self.request.GET.get('f_path', ''))
134 134
135 135 # if merge is True
136 136 # Show what changes since the shared ancestor commit of target/source
137 137 # the source would get if it was merged with target. Only commits
138 138 # which are in target but not in source will be shown.
139 139 merge = str2bool(self.request.GET.get('merge'))
140 140 # if merge is False
141 141 # Show a raw diff of source/target refs even if no ancestor exists
142 142
143 143 # c.fulldiff disables cut_off_limit
144 144 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
145 145
146 146 # fetch global flags of ignore ws or context lines
147 147 diff_context = diffs.get_diff_context(self.request)
148 148 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
149 149
150 150 c.file_path = target_path
151 151 c.commit_statuses = ChangesetStatus.STATUSES
152 152
153 153 # if partial, returns just compare_commits.html (commits log)
154 154 partial = self.request.is_xhr
155 155
156 156 # swap url for compare_diff page
157 157 c.swap_url = h.route_path(
158 158 'repo_compare',
159 159 repo_name=target_repo_name,
160 160 source_ref_type=target_ref_type,
161 161 source_ref=target_ref,
162 162 target_repo=source_repo_name,
163 163 target_ref_type=source_ref_type,
164 164 target_ref=source_ref,
165 165 _query=dict(merge=merge and '1' or '', f_path=target_path))
166 166
167 167 source_repo = Repository.get_by_repo_name(source_repo_name)
168 168 target_repo = Repository.get_by_repo_name(target_repo_name)
169 169
170 170 if source_repo is None:
171 171 log.error('Could not find the source repo: {}'
172 172 .format(source_repo_name))
173 173 h.flash(_('Could not find the source repo: `{}`')
174 174 .format(h.escape(source_repo_name)), category='error')
175 175 raise HTTPFound(
176 176 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
177 177
178 178 if target_repo is None:
179 179 log.error('Could not find the target repo: {}'
180 180 .format(source_repo_name))
181 181 h.flash(_('Could not find the target repo: `{}`')
182 182 .format(h.escape(target_repo_name)), category='error')
183 183 raise HTTPFound(
184 184 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
185 185
186 186 source_scm = source_repo.scm_instance()
187 187 target_scm = target_repo.scm_instance()
188 188
189 189 source_alias = source_scm.alias
190 190 target_alias = target_scm.alias
191 191 if source_alias != target_alias:
192 192 msg = _('The comparison of two different kinds of remote repos '
193 193 'is not available')
194 194 log.error(msg)
195 195 h.flash(msg, category='error')
196 196 raise HTTPFound(
197 197 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
198 198
199 199 source_commit = self._get_commit_or_redirect(
200 200 ref=source_id, ref_type=source_ref_type, repo=source_repo,
201 201 partial=partial)
202 202 target_commit = self._get_commit_or_redirect(
203 203 ref=target_id, ref_type=target_ref_type, repo=target_repo,
204 204 partial=partial)
205 205
206 206 c.compare_home = False
207 207 c.source_repo = source_repo
208 208 c.target_repo = target_repo
209 209 c.source_ref = source_ref
210 210 c.target_ref = target_ref
211 211 c.source_ref_type = source_ref_type
212 212 c.target_ref_type = target_ref_type
213 213
214 pre_load = ["author", "branch", "date", "message"]
214 pre_load = ["author", "date", "message", "branch"]
215 215 c.ancestor = None
216 216
217 217 try:
218 218 c.commit_ranges = source_scm.compare(
219 219 source_commit.raw_id, target_commit.raw_id,
220 220 target_scm, merge, pre_load=pre_load) or []
221 221 if merge:
222 222 c.ancestor = source_scm.get_common_ancestor(
223 223 source_commit.raw_id, target_commit.raw_id, target_scm)
224 224 except RepositoryRequirementError:
225 225 msg = _('Could not compare repos with different '
226 226 'large file settings')
227 227 log.error(msg)
228 228 if partial:
229 229 return Response(msg)
230 230 h.flash(msg, category='error')
231 231 raise HTTPFound(
232 232 h.route_path('repo_compare_select',
233 233 repo_name=self.db_repo_name))
234 234
235 235 c.statuses = self.db_repo.statuses(
236 236 [x.raw_id for x in c.commit_ranges])
237 237
238 238 # auto collapse if we have more than limit
239 239 collapse_limit = diffs.DiffProcessor._collapse_commits_over
240 240 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
241 241
242 242 if partial: # for PR ajax commits loader
243 243 if not c.ancestor:
244 244 return Response('') # cannot merge if there is no ancestor
245 245
246 246 html = render(
247 247 'rhodecode:templates/compare/compare_commits.mako',
248 248 self._get_template_context(c), self.request)
249 249 return Response(html)
250 250
251 251 if c.ancestor:
252 252 # case we want a simple diff without incoming commits,
253 253 # previewing what will be merged.
254 254 # Make the diff on target repo (which is known to have target_ref)
255 255 log.debug('Using ancestor %s as source_ref instead of %s',
256 256 c.ancestor, source_ref)
257 257 source_repo = target_repo
258 258 source_commit = target_repo.get_commit(commit_id=c.ancestor)
259 259
260 260 # diff_limit will cut off the whole diff if the limit is applied
261 261 # otherwise it will just hide the big files from the front-end
262 262 diff_limit = c.visual.cut_off_limit_diff
263 263 file_limit = c.visual.cut_off_limit_file
264 264
265 265 log.debug('calculating diff between '
266 266 'source_ref:%s and target_ref:%s for repo `%s`',
267 267 source_commit, target_commit,
268 268 safe_unicode(source_repo.scm_instance().path))
269 269
270 270 if source_commit.repository != target_commit.repository:
271 271 msg = _(
272 272 "Repositories unrelated. "
273 273 "Cannot compare commit %(commit1)s from repository %(repo1)s "
274 274 "with commit %(commit2)s from repository %(repo2)s.") % {
275 275 'commit1': h.show_id(source_commit),
276 276 'repo1': source_repo.repo_name,
277 277 'commit2': h.show_id(target_commit),
278 278 'repo2': target_repo.repo_name,
279 279 }
280 280 h.flash(msg, category='error')
281 281 raise HTTPFound(
282 282 h.route_path('repo_compare_select',
283 283 repo_name=self.db_repo_name))
284 284
285 285 txt_diff = source_repo.scm_instance().get_diff(
286 286 commit1=source_commit, commit2=target_commit,
287 287 path=target_path, path1=source_path,
288 288 ignore_whitespace=hide_whitespace_changes, context=diff_context)
289 289
290 290 diff_processor = diffs.DiffProcessor(
291 291 txt_diff, format='newdiff', diff_limit=diff_limit,
292 292 file_limit=file_limit, show_full_diff=c.fulldiff)
293 293 _parsed = diff_processor.prepare()
294 294
295 295 diffset = codeblocks.DiffSet(
296 296 repo_name=source_repo.repo_name,
297 297 source_node_getter=codeblocks.diffset_node_getter(source_commit),
298 298 target_repo_name=self.db_repo_name,
299 299 target_node_getter=codeblocks.diffset_node_getter(target_commit),
300 300 )
301 301 c.diffset = self.path_filter.render_patchset_filtered(
302 302 diffset, _parsed, source_ref, target_ref)
303 303
304 304 c.preview_mode = merge
305 305 c.source_commit = source_commit
306 306 c.target_commit = target_commit
307 307
308 308 html = render(
309 309 'rhodecode:templates/compare/compare_diff.mako',
310 310 self._get_template_context(c), self.request)
311 311 return Response(html) No newline at end of file
@@ -1,1464 +1,1464 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 import formencode
25 25 import formencode.htmlfill
26 26 import peppercorn
27 27 from pyramid.httpexceptions import (
28 28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 29 from pyramid.view import view_config
30 30 from pyramid.renderers import render
31 31
32 32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 33
34 34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 35 from rhodecode.lib.base import vcs_operation_context
36 36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 37 from rhodecode.lib.ext_json import json
38 38 from rhodecode.lib.auth import (
39 39 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 40 NotAnonymous, CSRFRequired)
41 41 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 42 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 43 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
44 44 RepositoryRequirementError, EmptyRepositoryError)
45 45 from rhodecode.model.changeset_status import ChangesetStatusModel
46 46 from rhodecode.model.comment import CommentsModel
47 47 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
48 48 ChangesetComment, ChangesetStatus, Repository)
49 49 from rhodecode.model.forms import PullRequestForm
50 50 from rhodecode.model.meta import Session
51 51 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 52 from rhodecode.model.scm import ScmModel
53 53
54 54 log = logging.getLogger(__name__)
55 55
56 56
57 57 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58 58
59 59 def load_default_context(self):
60 60 c = self._get_local_tmpl_context(include_app_defaults=True)
61 61 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 62 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 63 # backward compat., we use for OLD PRs a plain renderer
64 64 c.renderer = 'plain'
65 65 return c
66 66
67 67 def _get_pull_requests_list(
68 68 self, repo_name, source, filter_type, opened_by, statuses):
69 69
70 70 draw, start, limit = self._extract_chunk(self.request)
71 71 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 72 _render = self.request.get_partial_renderer(
73 73 'rhodecode:templates/data_table/_dt_elements.mako')
74 74
75 75 # pagination
76 76
77 77 if filter_type == 'awaiting_review':
78 78 pull_requests = PullRequestModel().get_awaiting_review(
79 79 repo_name, source=source, opened_by=opened_by,
80 80 statuses=statuses, offset=start, length=limit,
81 81 order_by=order_by, order_dir=order_dir)
82 82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 83 repo_name, source=source, statuses=statuses,
84 84 opened_by=opened_by)
85 85 elif filter_type == 'awaiting_my_review':
86 86 pull_requests = PullRequestModel().get_awaiting_my_review(
87 87 repo_name, source=source, opened_by=opened_by,
88 88 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 89 offset=start, length=limit, order_by=order_by,
90 90 order_dir=order_dir)
91 91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 92 repo_name, source=source, user_id=self._rhodecode_user.user_id,
93 93 statuses=statuses, opened_by=opened_by)
94 94 else:
95 95 pull_requests = PullRequestModel().get_all(
96 96 repo_name, source=source, opened_by=opened_by,
97 97 statuses=statuses, offset=start, length=limit,
98 98 order_by=order_by, order_dir=order_dir)
99 99 pull_requests_total_count = PullRequestModel().count_all(
100 100 repo_name, source=source, statuses=statuses,
101 101 opened_by=opened_by)
102 102
103 103 data = []
104 104 comments_model = CommentsModel()
105 105 for pr in pull_requests:
106 106 comments = comments_model.get_all_comments(
107 107 self.db_repo.repo_id, pull_request=pr)
108 108
109 109 data.append({
110 110 'name': _render('pullrequest_name',
111 111 pr.pull_request_id, pr.target_repo.repo_name),
112 112 'name_raw': pr.pull_request_id,
113 113 'status': _render('pullrequest_status',
114 114 pr.calculated_review_status()),
115 115 'title': _render(
116 116 'pullrequest_title', pr.title, pr.description),
117 117 'description': h.escape(pr.description),
118 118 'updated_on': _render('pullrequest_updated_on',
119 119 h.datetime_to_time(pr.updated_on)),
120 120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 121 'created_on': _render('pullrequest_updated_on',
122 122 h.datetime_to_time(pr.created_on)),
123 123 'created_on_raw': h.datetime_to_time(pr.created_on),
124 124 'author': _render('pullrequest_author',
125 125 pr.author.full_contact, ),
126 126 'author_raw': pr.author.full_name,
127 127 'comments': _render('pullrequest_comments', len(comments)),
128 128 'comments_raw': len(comments),
129 129 'closed': pr.is_closed(),
130 130 })
131 131
132 132 data = ({
133 133 'draw': draw,
134 134 'data': data,
135 135 'recordsTotal': pull_requests_total_count,
136 136 'recordsFiltered': pull_requests_total_count,
137 137 })
138 138 return data
139 139
140 140 @LoginRequired()
141 141 @HasRepoPermissionAnyDecorator(
142 142 'repository.read', 'repository.write', 'repository.admin')
143 143 @view_config(
144 144 route_name='pullrequest_show_all', request_method='GET',
145 145 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
146 146 def pull_request_list(self):
147 147 c = self.load_default_context()
148 148
149 149 req_get = self.request.GET
150 150 c.source = str2bool(req_get.get('source'))
151 151 c.closed = str2bool(req_get.get('closed'))
152 152 c.my = str2bool(req_get.get('my'))
153 153 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
154 154 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
155 155
156 156 c.active = 'open'
157 157 if c.my:
158 158 c.active = 'my'
159 159 if c.closed:
160 160 c.active = 'closed'
161 161 if c.awaiting_review and not c.source:
162 162 c.active = 'awaiting'
163 163 if c.source and not c.awaiting_review:
164 164 c.active = 'source'
165 165 if c.awaiting_my_review:
166 166 c.active = 'awaiting_my'
167 167
168 168 return self._get_template_context(c)
169 169
170 170 @LoginRequired()
171 171 @HasRepoPermissionAnyDecorator(
172 172 'repository.read', 'repository.write', 'repository.admin')
173 173 @view_config(
174 174 route_name='pullrequest_show_all_data', request_method='GET',
175 175 renderer='json_ext', xhr=True)
176 176 def pull_request_list_data(self):
177 177 self.load_default_context()
178 178
179 179 # additional filters
180 180 req_get = self.request.GET
181 181 source = str2bool(req_get.get('source'))
182 182 closed = str2bool(req_get.get('closed'))
183 183 my = str2bool(req_get.get('my'))
184 184 awaiting_review = str2bool(req_get.get('awaiting_review'))
185 185 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
186 186
187 187 filter_type = 'awaiting_review' if awaiting_review \
188 188 else 'awaiting_my_review' if awaiting_my_review \
189 189 else None
190 190
191 191 opened_by = None
192 192 if my:
193 193 opened_by = [self._rhodecode_user.user_id]
194 194
195 195 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
196 196 if closed:
197 197 statuses = [PullRequest.STATUS_CLOSED]
198 198
199 199 data = self._get_pull_requests_list(
200 200 repo_name=self.db_repo_name, source=source,
201 201 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
202 202
203 203 return data
204 204
205 205 def _is_diff_cache_enabled(self, target_repo):
206 206 caching_enabled = self._get_general_setting(
207 207 target_repo, 'rhodecode_diff_cache')
208 208 log.debug('Diff caching enabled: %s', caching_enabled)
209 209 return caching_enabled
210 210
211 211 def _get_diffset(self, source_repo_name, source_repo,
212 212 source_ref_id, target_ref_id,
213 213 target_commit, source_commit, diff_limit, file_limit,
214 214 fulldiff, hide_whitespace_changes, diff_context):
215 215
216 216 vcs_diff = PullRequestModel().get_diff(
217 217 source_repo, source_ref_id, target_ref_id,
218 218 hide_whitespace_changes, diff_context)
219 219
220 220 diff_processor = diffs.DiffProcessor(
221 221 vcs_diff, format='newdiff', diff_limit=diff_limit,
222 222 file_limit=file_limit, show_full_diff=fulldiff)
223 223
224 224 _parsed = diff_processor.prepare()
225 225
226 226 diffset = codeblocks.DiffSet(
227 227 repo_name=self.db_repo_name,
228 228 source_repo_name=source_repo_name,
229 229 source_node_getter=codeblocks.diffset_node_getter(target_commit),
230 230 target_node_getter=codeblocks.diffset_node_getter(source_commit),
231 231 )
232 232 diffset = self.path_filter.render_patchset_filtered(
233 233 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
234 234
235 235 return diffset
236 236
237 237 def _get_range_diffset(self, source_scm, source_repo,
238 238 commit1, commit2, diff_limit, file_limit,
239 239 fulldiff, hide_whitespace_changes, diff_context):
240 240 vcs_diff = source_scm.get_diff(
241 241 commit1, commit2,
242 242 ignore_whitespace=hide_whitespace_changes,
243 243 context=diff_context)
244 244
245 245 diff_processor = diffs.DiffProcessor(
246 246 vcs_diff, format='newdiff', diff_limit=diff_limit,
247 247 file_limit=file_limit, show_full_diff=fulldiff)
248 248
249 249 _parsed = diff_processor.prepare()
250 250
251 251 diffset = codeblocks.DiffSet(
252 252 repo_name=source_repo.repo_name,
253 253 source_node_getter=codeblocks.diffset_node_getter(commit1),
254 254 target_node_getter=codeblocks.diffset_node_getter(commit2))
255 255
256 256 diffset = self.path_filter.render_patchset_filtered(
257 257 diffset, _parsed, commit1.raw_id, commit2.raw_id)
258 258
259 259 return diffset
260 260
261 261 @LoginRequired()
262 262 @HasRepoPermissionAnyDecorator(
263 263 'repository.read', 'repository.write', 'repository.admin')
264 264 @view_config(
265 265 route_name='pullrequest_show', request_method='GET',
266 266 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
267 267 def pull_request_show(self):
268 268 _ = self.request.translate
269 269 c = self.load_default_context()
270 270
271 271 pull_request = PullRequest.get_or_404(
272 272 self.request.matchdict['pull_request_id'])
273 273 pull_request_id = pull_request.pull_request_id
274 274
275 275 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
276 276 log.debug('show: forbidden because pull request is in state %s',
277 277 pull_request.pull_request_state)
278 278 msg = _(u'Cannot show pull requests in state other than `{}`. '
279 279 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
280 280 pull_request.pull_request_state)
281 281 h.flash(msg, category='error')
282 282 raise HTTPFound(h.route_path('pullrequest_show_all',
283 283 repo_name=self.db_repo_name))
284 284
285 285 version = self.request.GET.get('version')
286 286 from_version = self.request.GET.get('from_version') or version
287 287 merge_checks = self.request.GET.get('merge_checks')
288 288 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
289 289
290 290 # fetch global flags of ignore ws or context lines
291 291 diff_context = diffs.get_diff_context(self.request)
292 292 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
293 293
294 294 force_refresh = str2bool(self.request.GET.get('force_refresh'))
295 295
296 296 (pull_request_latest,
297 297 pull_request_at_ver,
298 298 pull_request_display_obj,
299 299 at_version) = PullRequestModel().get_pr_version(
300 300 pull_request_id, version=version)
301 301 pr_closed = pull_request_latest.is_closed()
302 302
303 303 if pr_closed and (version or from_version):
304 304 # not allow to browse versions
305 305 raise HTTPFound(h.route_path(
306 306 'pullrequest_show', repo_name=self.db_repo_name,
307 307 pull_request_id=pull_request_id))
308 308
309 309 versions = pull_request_display_obj.versions()
310 310 # used to store per-commit range diffs
311 311 c.changes = collections.OrderedDict()
312 312 c.range_diff_on = self.request.GET.get('range-diff') == "1"
313 313
314 314 c.at_version = at_version
315 315 c.at_version_num = (at_version
316 316 if at_version and at_version != 'latest'
317 317 else None)
318 318 c.at_version_pos = ChangesetComment.get_index_from_version(
319 319 c.at_version_num, versions)
320 320
321 321 (prev_pull_request_latest,
322 322 prev_pull_request_at_ver,
323 323 prev_pull_request_display_obj,
324 324 prev_at_version) = PullRequestModel().get_pr_version(
325 325 pull_request_id, version=from_version)
326 326
327 327 c.from_version = prev_at_version
328 328 c.from_version_num = (prev_at_version
329 329 if prev_at_version and prev_at_version != 'latest'
330 330 else None)
331 331 c.from_version_pos = ChangesetComment.get_index_from_version(
332 332 c.from_version_num, versions)
333 333
334 334 # define if we're in COMPARE mode or VIEW at version mode
335 335 compare = at_version != prev_at_version
336 336
337 337 # pull_requests repo_name we opened it against
338 338 # ie. target_repo must match
339 339 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
340 340 raise HTTPNotFound()
341 341
342 342 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
343 343 pull_request_at_ver)
344 344
345 345 c.pull_request = pull_request_display_obj
346 346 c.renderer = pull_request_at_ver.description_renderer or c.renderer
347 347 c.pull_request_latest = pull_request_latest
348 348
349 349 if compare or (at_version and not at_version == 'latest'):
350 350 c.allowed_to_change_status = False
351 351 c.allowed_to_update = False
352 352 c.allowed_to_merge = False
353 353 c.allowed_to_delete = False
354 354 c.allowed_to_comment = False
355 355 c.allowed_to_close = False
356 356 else:
357 357 can_change_status = PullRequestModel().check_user_change_status(
358 358 pull_request_at_ver, self._rhodecode_user)
359 359 c.allowed_to_change_status = can_change_status and not pr_closed
360 360
361 361 c.allowed_to_update = PullRequestModel().check_user_update(
362 362 pull_request_latest, self._rhodecode_user) and not pr_closed
363 363 c.allowed_to_merge = PullRequestModel().check_user_merge(
364 364 pull_request_latest, self._rhodecode_user) and not pr_closed
365 365 c.allowed_to_delete = PullRequestModel().check_user_delete(
366 366 pull_request_latest, self._rhodecode_user) and not pr_closed
367 367 c.allowed_to_comment = not pr_closed
368 368 c.allowed_to_close = c.allowed_to_merge and not pr_closed
369 369
370 370 c.forbid_adding_reviewers = False
371 371 c.forbid_author_to_review = False
372 372 c.forbid_commit_author_to_review = False
373 373
374 374 if pull_request_latest.reviewer_data and \
375 375 'rules' in pull_request_latest.reviewer_data:
376 376 rules = pull_request_latest.reviewer_data['rules'] or {}
377 377 try:
378 378 c.forbid_adding_reviewers = rules.get(
379 379 'forbid_adding_reviewers')
380 380 c.forbid_author_to_review = rules.get(
381 381 'forbid_author_to_review')
382 382 c.forbid_commit_author_to_review = rules.get(
383 383 'forbid_commit_author_to_review')
384 384 except Exception:
385 385 pass
386 386
387 387 # check merge capabilities
388 388 _merge_check = MergeCheck.validate(
389 389 pull_request_latest, auth_user=self._rhodecode_user,
390 390 translator=self.request.translate,
391 391 force_shadow_repo_refresh=force_refresh)
392 392 c.pr_merge_errors = _merge_check.error_details
393 393 c.pr_merge_possible = not _merge_check.failed
394 394 c.pr_merge_message = _merge_check.merge_msg
395 395
396 396 c.pr_merge_info = MergeCheck.get_merge_conditions(
397 397 pull_request_latest, translator=self.request.translate)
398 398
399 399 c.pull_request_review_status = _merge_check.review_status
400 400 if merge_checks:
401 401 self.request.override_renderer = \
402 402 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
403 403 return self._get_template_context(c)
404 404
405 405 comments_model = CommentsModel()
406 406
407 407 # reviewers and statuses
408 408 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
409 409 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
410 410
411 411 # GENERAL COMMENTS with versions #
412 412 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
413 413 q = q.order_by(ChangesetComment.comment_id.asc())
414 414 general_comments = q
415 415
416 416 # pick comments we want to render at current version
417 417 c.comment_versions = comments_model.aggregate_comments(
418 418 general_comments, versions, c.at_version_num)
419 419 c.comments = c.comment_versions[c.at_version_num]['until']
420 420
421 421 # INLINE COMMENTS with versions #
422 422 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
423 423 q = q.order_by(ChangesetComment.comment_id.asc())
424 424 inline_comments = q
425 425
426 426 c.inline_versions = comments_model.aggregate_comments(
427 427 inline_comments, versions, c.at_version_num, inline=True)
428 428
429 429 # inject latest version
430 430 latest_ver = PullRequest.get_pr_display_object(
431 431 pull_request_latest, pull_request_latest)
432 432
433 433 c.versions = versions + [latest_ver]
434 434
435 435 # if we use version, then do not show later comments
436 436 # than current version
437 437 display_inline_comments = collections.defaultdict(
438 438 lambda: collections.defaultdict(list))
439 439 for co in inline_comments:
440 440 if c.at_version_num:
441 441 # pick comments that are at least UPTO given version, so we
442 442 # don't render comments for higher version
443 443 should_render = co.pull_request_version_id and \
444 444 co.pull_request_version_id <= c.at_version_num
445 445 else:
446 446 # showing all, for 'latest'
447 447 should_render = True
448 448
449 449 if should_render:
450 450 display_inline_comments[co.f_path][co.line_no].append(co)
451 451
452 452 # load diff data into template context, if we use compare mode then
453 453 # diff is calculated based on changes between versions of PR
454 454
455 455 source_repo = pull_request_at_ver.source_repo
456 456 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
457 457
458 458 target_repo = pull_request_at_ver.target_repo
459 459 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
460 460
461 461 if compare:
462 462 # in compare switch the diff base to latest commit from prev version
463 463 target_ref_id = prev_pull_request_display_obj.revisions[0]
464 464
465 465 # despite opening commits for bookmarks/branches/tags, we always
466 466 # convert this to rev to prevent changes after bookmark or branch change
467 467 c.source_ref_type = 'rev'
468 468 c.source_ref = source_ref_id
469 469
470 470 c.target_ref_type = 'rev'
471 471 c.target_ref = target_ref_id
472 472
473 473 c.source_repo = source_repo
474 474 c.target_repo = target_repo
475 475
476 476 c.commit_ranges = []
477 477 source_commit = EmptyCommit()
478 478 target_commit = EmptyCommit()
479 479 c.missing_requirements = False
480 480
481 481 source_scm = source_repo.scm_instance()
482 482 target_scm = target_repo.scm_instance()
483 483
484 484 shadow_scm = None
485 485 try:
486 486 shadow_scm = pull_request_latest.get_shadow_repo()
487 487 except Exception:
488 488 log.debug('Failed to get shadow repo', exc_info=True)
489 489 # try first the existing source_repo, and then shadow
490 490 # repo if we can obtain one
491 491 commits_source_repo = source_scm or shadow_scm
492 492
493 493 c.commits_source_repo = commits_source_repo
494 494 c.ancestor = None # set it to None, to hide it from PR view
495 495
496 496 # empty version means latest, so we keep this to prevent
497 497 # double caching
498 498 version_normalized = version or 'latest'
499 499 from_version_normalized = from_version or 'latest'
500 500
501 501 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
502 502 cache_file_path = diff_cache_exist(
503 503 cache_path, 'pull_request', pull_request_id, version_normalized,
504 504 from_version_normalized, source_ref_id, target_ref_id,
505 505 hide_whitespace_changes, diff_context, c.fulldiff)
506 506
507 507 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
508 508 force_recache = self.get_recache_flag()
509 509
510 510 cached_diff = None
511 511 if caching_enabled:
512 512 cached_diff = load_cached_diff(cache_file_path)
513 513
514 514 has_proper_commit_cache = (
515 515 cached_diff and cached_diff.get('commits')
516 516 and len(cached_diff.get('commits', [])) == 5
517 517 and cached_diff.get('commits')[0]
518 518 and cached_diff.get('commits')[3])
519 519
520 520 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
521 521 diff_commit_cache = \
522 522 (ancestor_commit, commit_cache, missing_requirements,
523 523 source_commit, target_commit) = cached_diff['commits']
524 524 else:
525 525 diff_commit_cache = \
526 526 (ancestor_commit, commit_cache, missing_requirements,
527 527 source_commit, target_commit) = self.get_commits(
528 528 commits_source_repo,
529 529 pull_request_at_ver,
530 530 source_commit,
531 531 source_ref_id,
532 532 source_scm,
533 533 target_commit,
534 534 target_ref_id,
535 535 target_scm)
536 536
537 537 # register our commit range
538 538 for comm in commit_cache.values():
539 539 c.commit_ranges.append(comm)
540 540
541 541 c.missing_requirements = missing_requirements
542 542 c.ancestor_commit = ancestor_commit
543 543 c.statuses = source_repo.statuses(
544 544 [x.raw_id for x in c.commit_ranges])
545 545
546 546 # auto collapse if we have more than limit
547 547 collapse_limit = diffs.DiffProcessor._collapse_commits_over
548 548 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
549 549 c.compare_mode = compare
550 550
551 551 # diff_limit is the old behavior, will cut off the whole diff
552 552 # if the limit is applied otherwise will just hide the
553 553 # big files from the front-end
554 554 diff_limit = c.visual.cut_off_limit_diff
555 555 file_limit = c.visual.cut_off_limit_file
556 556
557 557 c.missing_commits = False
558 558 if (c.missing_requirements
559 559 or isinstance(source_commit, EmptyCommit)
560 560 or source_commit == target_commit):
561 561
562 562 c.missing_commits = True
563 563 else:
564 564 c.inline_comments = display_inline_comments
565 565
566 566 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
567 567 if not force_recache and has_proper_diff_cache:
568 568 c.diffset = cached_diff['diff']
569 569 (ancestor_commit, commit_cache, missing_requirements,
570 570 source_commit, target_commit) = cached_diff['commits']
571 571 else:
572 572 c.diffset = self._get_diffset(
573 573 c.source_repo.repo_name, commits_source_repo,
574 574 source_ref_id, target_ref_id,
575 575 target_commit, source_commit,
576 576 diff_limit, file_limit, c.fulldiff,
577 577 hide_whitespace_changes, diff_context)
578 578
579 579 # save cached diff
580 580 if caching_enabled:
581 581 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
582 582
583 583 c.limited_diff = c.diffset.limited_diff
584 584
585 585 # calculate removed files that are bound to comments
586 586 comment_deleted_files = [
587 587 fname for fname in display_inline_comments
588 588 if fname not in c.diffset.file_stats]
589 589
590 590 c.deleted_files_comments = collections.defaultdict(dict)
591 591 for fname, per_line_comments in display_inline_comments.items():
592 592 if fname in comment_deleted_files:
593 593 c.deleted_files_comments[fname]['stats'] = 0
594 594 c.deleted_files_comments[fname]['comments'] = list()
595 595 for lno, comments in per_line_comments.items():
596 596 c.deleted_files_comments[fname]['comments'].extend(comments)
597 597
598 598 # maybe calculate the range diff
599 599 if c.range_diff_on:
600 600 # TODO(marcink): set whitespace/context
601 601 context_lcl = 3
602 602 ign_whitespace_lcl = False
603 603
604 604 for commit in c.commit_ranges:
605 605 commit2 = commit
606 606 commit1 = commit.first_parent
607 607
608 608 range_diff_cache_file_path = diff_cache_exist(
609 609 cache_path, 'diff', commit.raw_id,
610 610 ign_whitespace_lcl, context_lcl, c.fulldiff)
611 611
612 612 cached_diff = None
613 613 if caching_enabled:
614 614 cached_diff = load_cached_diff(range_diff_cache_file_path)
615 615
616 616 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
617 617 if not force_recache and has_proper_diff_cache:
618 618 diffset = cached_diff['diff']
619 619 else:
620 620 diffset = self._get_range_diffset(
621 621 source_scm, source_repo,
622 622 commit1, commit2, diff_limit, file_limit,
623 623 c.fulldiff, ign_whitespace_lcl, context_lcl
624 624 )
625 625
626 626 # save cached diff
627 627 if caching_enabled:
628 628 cache_diff(range_diff_cache_file_path, diffset, None)
629 629
630 630 c.changes[commit.raw_id] = diffset
631 631
632 632 # this is a hack to properly display links, when creating PR, the
633 633 # compare view and others uses different notation, and
634 634 # compare_commits.mako renders links based on the target_repo.
635 635 # We need to swap that here to generate it properly on the html side
636 636 c.target_repo = c.source_repo
637 637
638 638 c.commit_statuses = ChangesetStatus.STATUSES
639 639
640 640 c.show_version_changes = not pr_closed
641 641 if c.show_version_changes:
642 642 cur_obj = pull_request_at_ver
643 643 prev_obj = prev_pull_request_at_ver
644 644
645 645 old_commit_ids = prev_obj.revisions
646 646 new_commit_ids = cur_obj.revisions
647 647 commit_changes = PullRequestModel()._calculate_commit_id_changes(
648 648 old_commit_ids, new_commit_ids)
649 649 c.commit_changes_summary = commit_changes
650 650
651 651 # calculate the diff for commits between versions
652 652 c.commit_changes = []
653 653 mark = lambda cs, fw: list(
654 654 h.itertools.izip_longest([], cs, fillvalue=fw))
655 655 for c_type, raw_id in mark(commit_changes.added, 'a') \
656 656 + mark(commit_changes.removed, 'r') \
657 657 + mark(commit_changes.common, 'c'):
658 658
659 659 if raw_id in commit_cache:
660 660 commit = commit_cache[raw_id]
661 661 else:
662 662 try:
663 663 commit = commits_source_repo.get_commit(raw_id)
664 664 except CommitDoesNotExistError:
665 665 # in case we fail extracting still use "dummy" commit
666 666 # for display in commit diff
667 667 commit = h.AttributeDict(
668 668 {'raw_id': raw_id,
669 669 'message': 'EMPTY or MISSING COMMIT'})
670 670 c.commit_changes.append([c_type, commit])
671 671
672 672 # current user review statuses for each version
673 673 c.review_versions = {}
674 674 if self._rhodecode_user.user_id in allowed_reviewers:
675 675 for co in general_comments:
676 676 if co.author.user_id == self._rhodecode_user.user_id:
677 677 status = co.status_change
678 678 if status:
679 679 _ver_pr = status[0].comment.pull_request_version_id
680 680 c.review_versions[_ver_pr] = status[0]
681 681
682 682 return self._get_template_context(c)
683 683
684 684 def get_commits(
685 685 self, commits_source_repo, pull_request_at_ver, source_commit,
686 686 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
687 687 commit_cache = collections.OrderedDict()
688 688 missing_requirements = False
689 689 try:
690 pre_load = ["author", "branch", "date", "message", "parents"]
690 pre_load = ["author", "date", "message", "branch", "parents"]
691 691 show_revs = pull_request_at_ver.revisions
692 692 for rev in show_revs:
693 693 comm = commits_source_repo.get_commit(
694 694 commit_id=rev, pre_load=pre_load)
695 695 commit_cache[comm.raw_id] = comm
696 696
697 697 # Order here matters, we first need to get target, and then
698 698 # the source
699 699 target_commit = commits_source_repo.get_commit(
700 700 commit_id=safe_str(target_ref_id))
701 701
702 702 source_commit = commits_source_repo.get_commit(
703 703 commit_id=safe_str(source_ref_id))
704 704 except CommitDoesNotExistError:
705 705 log.warning(
706 706 'Failed to get commit from `{}` repo'.format(
707 707 commits_source_repo), exc_info=True)
708 708 except RepositoryRequirementError:
709 709 log.warning(
710 710 'Failed to get all required data from repo', exc_info=True)
711 711 missing_requirements = True
712 712 ancestor_commit = None
713 713 try:
714 714 ancestor_id = source_scm.get_common_ancestor(
715 715 source_commit.raw_id, target_commit.raw_id, target_scm)
716 716 ancestor_commit = source_scm.get_commit(ancestor_id)
717 717 except Exception:
718 718 ancestor_commit = None
719 719 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
720 720
721 721 def assure_not_empty_repo(self):
722 722 _ = self.request.translate
723 723
724 724 try:
725 725 self.db_repo.scm_instance().get_commit()
726 726 except EmptyRepositoryError:
727 727 h.flash(h.literal(_('There are no commits yet')),
728 728 category='warning')
729 729 raise HTTPFound(
730 730 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
731 731
732 732 @LoginRequired()
733 733 @NotAnonymous()
734 734 @HasRepoPermissionAnyDecorator(
735 735 'repository.read', 'repository.write', 'repository.admin')
736 736 @view_config(
737 737 route_name='pullrequest_new', request_method='GET',
738 738 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
739 739 def pull_request_new(self):
740 740 _ = self.request.translate
741 741 c = self.load_default_context()
742 742
743 743 self.assure_not_empty_repo()
744 744 source_repo = self.db_repo
745 745
746 746 commit_id = self.request.GET.get('commit')
747 747 branch_ref = self.request.GET.get('branch')
748 748 bookmark_ref = self.request.GET.get('bookmark')
749 749
750 750 try:
751 751 source_repo_data = PullRequestModel().generate_repo_data(
752 752 source_repo, commit_id=commit_id,
753 753 branch=branch_ref, bookmark=bookmark_ref,
754 754 translator=self.request.translate)
755 755 except CommitDoesNotExistError as e:
756 756 log.exception(e)
757 757 h.flash(_('Commit does not exist'), 'error')
758 758 raise HTTPFound(
759 759 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
760 760
761 761 default_target_repo = source_repo
762 762
763 763 if source_repo.parent and c.has_origin_repo_read_perm:
764 764 parent_vcs_obj = source_repo.parent.scm_instance()
765 765 if parent_vcs_obj and not parent_vcs_obj.is_empty():
766 766 # change default if we have a parent repo
767 767 default_target_repo = source_repo.parent
768 768
769 769 target_repo_data = PullRequestModel().generate_repo_data(
770 770 default_target_repo, translator=self.request.translate)
771 771
772 772 selected_source_ref = source_repo_data['refs']['selected_ref']
773 773 title_source_ref = ''
774 774 if selected_source_ref:
775 775 title_source_ref = selected_source_ref.split(':', 2)[1]
776 776 c.default_title = PullRequestModel().generate_pullrequest_title(
777 777 source=source_repo.repo_name,
778 778 source_ref=title_source_ref,
779 779 target=default_target_repo.repo_name
780 780 )
781 781
782 782 c.default_repo_data = {
783 783 'source_repo_name': source_repo.repo_name,
784 784 'source_refs_json': json.dumps(source_repo_data),
785 785 'target_repo_name': default_target_repo.repo_name,
786 786 'target_refs_json': json.dumps(target_repo_data),
787 787 }
788 788 c.default_source_ref = selected_source_ref
789 789
790 790 return self._get_template_context(c)
791 791
792 792 @LoginRequired()
793 793 @NotAnonymous()
794 794 @HasRepoPermissionAnyDecorator(
795 795 'repository.read', 'repository.write', 'repository.admin')
796 796 @view_config(
797 797 route_name='pullrequest_repo_refs', request_method='GET',
798 798 renderer='json_ext', xhr=True)
799 799 def pull_request_repo_refs(self):
800 800 self.load_default_context()
801 801 target_repo_name = self.request.matchdict['target_repo_name']
802 802 repo = Repository.get_by_repo_name(target_repo_name)
803 803 if not repo:
804 804 raise HTTPNotFound()
805 805
806 806 target_perm = HasRepoPermissionAny(
807 807 'repository.read', 'repository.write', 'repository.admin')(
808 808 target_repo_name)
809 809 if not target_perm:
810 810 raise HTTPNotFound()
811 811
812 812 return PullRequestModel().generate_repo_data(
813 813 repo, translator=self.request.translate)
814 814
815 815 @LoginRequired()
816 816 @NotAnonymous()
817 817 @HasRepoPermissionAnyDecorator(
818 818 'repository.read', 'repository.write', 'repository.admin')
819 819 @view_config(
820 820 route_name='pullrequest_repo_targets', request_method='GET',
821 821 renderer='json_ext', xhr=True)
822 822 def pullrequest_repo_targets(self):
823 823 _ = self.request.translate
824 824 filter_query = self.request.GET.get('query')
825 825
826 826 # get the parents
827 827 parent_target_repos = []
828 828 if self.db_repo.parent:
829 829 parents_query = Repository.query() \
830 830 .order_by(func.length(Repository.repo_name)) \
831 831 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
832 832
833 833 if filter_query:
834 834 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
835 835 parents_query = parents_query.filter(
836 836 Repository.repo_name.ilike(ilike_expression))
837 837 parents = parents_query.limit(20).all()
838 838
839 839 for parent in parents:
840 840 parent_vcs_obj = parent.scm_instance()
841 841 if parent_vcs_obj and not parent_vcs_obj.is_empty():
842 842 parent_target_repos.append(parent)
843 843
844 844 # get other forks, and repo itself
845 845 query = Repository.query() \
846 846 .order_by(func.length(Repository.repo_name)) \
847 847 .filter(
848 848 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
849 849 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
850 850 ) \
851 851 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
852 852
853 853 if filter_query:
854 854 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
855 855 query = query.filter(Repository.repo_name.ilike(ilike_expression))
856 856
857 857 limit = max(20 - len(parent_target_repos), 5) # not less then 5
858 858 target_repos = query.limit(limit).all()
859 859
860 860 all_target_repos = target_repos + parent_target_repos
861 861
862 862 repos = []
863 863 # This checks permissions to the repositories
864 864 for obj in ScmModel().get_repos(all_target_repos):
865 865 repos.append({
866 866 'id': obj['name'],
867 867 'text': obj['name'],
868 868 'type': 'repo',
869 869 'repo_id': obj['dbrepo']['repo_id'],
870 870 'repo_type': obj['dbrepo']['repo_type'],
871 871 'private': obj['dbrepo']['private'],
872 872
873 873 })
874 874
875 875 data = {
876 876 'more': False,
877 877 'results': [{
878 878 'text': _('Repositories'),
879 879 'children': repos
880 880 }] if repos else []
881 881 }
882 882 return data
883 883
884 884 @LoginRequired()
885 885 @NotAnonymous()
886 886 @HasRepoPermissionAnyDecorator(
887 887 'repository.read', 'repository.write', 'repository.admin')
888 888 @CSRFRequired()
889 889 @view_config(
890 890 route_name='pullrequest_create', request_method='POST',
891 891 renderer=None)
892 892 def pull_request_create(self):
893 893 _ = self.request.translate
894 894 self.assure_not_empty_repo()
895 895 self.load_default_context()
896 896
897 897 controls = peppercorn.parse(self.request.POST.items())
898 898
899 899 try:
900 900 form = PullRequestForm(
901 901 self.request.translate, self.db_repo.repo_id)()
902 902 _form = form.to_python(controls)
903 903 except formencode.Invalid as errors:
904 904 if errors.error_dict.get('revisions'):
905 905 msg = 'Revisions: %s' % errors.error_dict['revisions']
906 906 elif errors.error_dict.get('pullrequest_title'):
907 907 msg = errors.error_dict.get('pullrequest_title')
908 908 else:
909 909 msg = _('Error creating pull request: {}').format(errors)
910 910 log.exception(msg)
911 911 h.flash(msg, 'error')
912 912
913 913 # would rather just go back to form ...
914 914 raise HTTPFound(
915 915 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
916 916
917 917 source_repo = _form['source_repo']
918 918 source_ref = _form['source_ref']
919 919 target_repo = _form['target_repo']
920 920 target_ref = _form['target_ref']
921 921 commit_ids = _form['revisions'][::-1]
922 922
923 923 # find the ancestor for this pr
924 924 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
925 925 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
926 926
927 927 if not (source_db_repo or target_db_repo):
928 928 h.flash(_('source_repo or target repo not found'), category='error')
929 929 raise HTTPFound(
930 930 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
931 931
932 932 # re-check permissions again here
933 933 # source_repo we must have read permissions
934 934
935 935 source_perm = HasRepoPermissionAny(
936 936 'repository.read', 'repository.write', 'repository.admin')(
937 937 source_db_repo.repo_name)
938 938 if not source_perm:
939 939 msg = _('Not Enough permissions to source repo `{}`.'.format(
940 940 source_db_repo.repo_name))
941 941 h.flash(msg, category='error')
942 942 # copy the args back to redirect
943 943 org_query = self.request.GET.mixed()
944 944 raise HTTPFound(
945 945 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
946 946 _query=org_query))
947 947
948 948 # target repo we must have read permissions, and also later on
949 949 # we want to check branch permissions here
950 950 target_perm = HasRepoPermissionAny(
951 951 'repository.read', 'repository.write', 'repository.admin')(
952 952 target_db_repo.repo_name)
953 953 if not target_perm:
954 954 msg = _('Not Enough permissions to target repo `{}`.'.format(
955 955 target_db_repo.repo_name))
956 956 h.flash(msg, category='error')
957 957 # copy the args back to redirect
958 958 org_query = self.request.GET.mixed()
959 959 raise HTTPFound(
960 960 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
961 961 _query=org_query))
962 962
963 963 source_scm = source_db_repo.scm_instance()
964 964 target_scm = target_db_repo.scm_instance()
965 965
966 966 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
967 967 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
968 968
969 969 ancestor = source_scm.get_common_ancestor(
970 970 source_commit.raw_id, target_commit.raw_id, target_scm)
971 971
972 972 # recalculate target ref based on ancestor
973 973 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
974 974 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
975 975
976 976 get_default_reviewers_data, validate_default_reviewers = \
977 977 PullRequestModel().get_reviewer_functions()
978 978
979 979 # recalculate reviewers logic, to make sure we can validate this
980 980 reviewer_rules = get_default_reviewers_data(
981 981 self._rhodecode_db_user, source_db_repo,
982 982 source_commit, target_db_repo, target_commit)
983 983
984 984 given_reviewers = _form['review_members']
985 985 reviewers = validate_default_reviewers(
986 986 given_reviewers, reviewer_rules)
987 987
988 988 pullrequest_title = _form['pullrequest_title']
989 989 title_source_ref = source_ref.split(':', 2)[1]
990 990 if not pullrequest_title:
991 991 pullrequest_title = PullRequestModel().generate_pullrequest_title(
992 992 source=source_repo,
993 993 source_ref=title_source_ref,
994 994 target=target_repo
995 995 )
996 996
997 997 description = _form['pullrequest_desc']
998 998 description_renderer = _form['description_renderer']
999 999
1000 1000 try:
1001 1001 pull_request = PullRequestModel().create(
1002 1002 created_by=self._rhodecode_user.user_id,
1003 1003 source_repo=source_repo,
1004 1004 source_ref=source_ref,
1005 1005 target_repo=target_repo,
1006 1006 target_ref=target_ref,
1007 1007 revisions=commit_ids,
1008 1008 reviewers=reviewers,
1009 1009 title=pullrequest_title,
1010 1010 description=description,
1011 1011 description_renderer=description_renderer,
1012 1012 reviewer_data=reviewer_rules,
1013 1013 auth_user=self._rhodecode_user
1014 1014 )
1015 1015 Session().commit()
1016 1016
1017 1017 h.flash(_('Successfully opened new pull request'),
1018 1018 category='success')
1019 1019 except Exception:
1020 1020 msg = _('Error occurred during creation of this pull request.')
1021 1021 log.exception(msg)
1022 1022 h.flash(msg, category='error')
1023 1023
1024 1024 # copy the args back to redirect
1025 1025 org_query = self.request.GET.mixed()
1026 1026 raise HTTPFound(
1027 1027 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1028 1028 _query=org_query))
1029 1029
1030 1030 raise HTTPFound(
1031 1031 h.route_path('pullrequest_show', repo_name=target_repo,
1032 1032 pull_request_id=pull_request.pull_request_id))
1033 1033
1034 1034 @LoginRequired()
1035 1035 @NotAnonymous()
1036 1036 @HasRepoPermissionAnyDecorator(
1037 1037 'repository.read', 'repository.write', 'repository.admin')
1038 1038 @CSRFRequired()
1039 1039 @view_config(
1040 1040 route_name='pullrequest_update', request_method='POST',
1041 1041 renderer='json_ext')
1042 1042 def pull_request_update(self):
1043 1043 pull_request = PullRequest.get_or_404(
1044 1044 self.request.matchdict['pull_request_id'])
1045 1045 _ = self.request.translate
1046 1046
1047 1047 self.load_default_context()
1048 1048
1049 1049 if pull_request.is_closed():
1050 1050 log.debug('update: forbidden because pull request is closed')
1051 1051 msg = _(u'Cannot update closed pull requests.')
1052 1052 h.flash(msg, category='error')
1053 1053 return True
1054 1054
1055 1055 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
1056 1056 log.debug('update: forbidden because pull request is in state %s',
1057 1057 pull_request.pull_request_state)
1058 1058 msg = _(u'Cannot update pull requests in state other than `{}`. '
1059 1059 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1060 1060 pull_request.pull_request_state)
1061 1061 h.flash(msg, category='error')
1062 1062 return True
1063 1063
1064 1064 # only owner or admin can update it
1065 1065 allowed_to_update = PullRequestModel().check_user_update(
1066 1066 pull_request, self._rhodecode_user)
1067 1067 if allowed_to_update:
1068 1068 controls = peppercorn.parse(self.request.POST.items())
1069 1069
1070 1070 if 'review_members' in controls:
1071 1071 self._update_reviewers(
1072 1072 pull_request, controls['review_members'],
1073 1073 pull_request.reviewer_data)
1074 1074 elif str2bool(self.request.POST.get('update_commits', 'false')):
1075 1075 self._update_commits(pull_request)
1076 1076 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1077 1077 self._edit_pull_request(pull_request)
1078 1078 else:
1079 1079 raise HTTPBadRequest()
1080 1080 return True
1081 1081 raise HTTPForbidden()
1082 1082
1083 1083 def _edit_pull_request(self, pull_request):
1084 1084 _ = self.request.translate
1085 1085
1086 1086 try:
1087 1087 PullRequestModel().edit(
1088 1088 pull_request,
1089 1089 self.request.POST.get('title'),
1090 1090 self.request.POST.get('description'),
1091 1091 self.request.POST.get('description_renderer'),
1092 1092 self._rhodecode_user)
1093 1093 except ValueError:
1094 1094 msg = _(u'Cannot update closed pull requests.')
1095 1095 h.flash(msg, category='error')
1096 1096 return
1097 1097 else:
1098 1098 Session().commit()
1099 1099
1100 1100 msg = _(u'Pull request title & description updated.')
1101 1101 h.flash(msg, category='success')
1102 1102 return
1103 1103
1104 1104 def _update_commits(self, pull_request):
1105 1105 _ = self.request.translate
1106 1106
1107 1107 with pull_request.set_state(PullRequest.STATE_UPDATING):
1108 1108 resp = PullRequestModel().update_commits(pull_request)
1109 1109
1110 1110 if resp.executed:
1111 1111
1112 1112 if resp.target_changed and resp.source_changed:
1113 1113 changed = 'target and source repositories'
1114 1114 elif resp.target_changed and not resp.source_changed:
1115 1115 changed = 'target repository'
1116 1116 elif not resp.target_changed and resp.source_changed:
1117 1117 changed = 'source repository'
1118 1118 else:
1119 1119 changed = 'nothing'
1120 1120
1121 1121 msg = _(u'Pull request updated to "{source_commit_id}" with '
1122 1122 u'{count_added} added, {count_removed} removed commits. '
1123 1123 u'Source of changes: {change_source}')
1124 1124 msg = msg.format(
1125 1125 source_commit_id=pull_request.source_ref_parts.commit_id,
1126 1126 count_added=len(resp.changes.added),
1127 1127 count_removed=len(resp.changes.removed),
1128 1128 change_source=changed)
1129 1129 h.flash(msg, category='success')
1130 1130
1131 1131 channel = '/repo${}$/pr/{}'.format(
1132 1132 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1133 1133 message = msg + (
1134 1134 ' - <a onclick="window.location.reload()">'
1135 1135 '<strong>{}</strong></a>'.format(_('Reload page')))
1136 1136 channelstream.post_message(
1137 1137 channel, message, self._rhodecode_user.username,
1138 1138 registry=self.request.registry)
1139 1139 else:
1140 1140 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1141 1141 warning_reasons = [
1142 1142 UpdateFailureReason.NO_CHANGE,
1143 1143 UpdateFailureReason.WRONG_REF_TYPE,
1144 1144 ]
1145 1145 category = 'warning' if resp.reason in warning_reasons else 'error'
1146 1146 h.flash(msg, category=category)
1147 1147
1148 1148 @LoginRequired()
1149 1149 @NotAnonymous()
1150 1150 @HasRepoPermissionAnyDecorator(
1151 1151 'repository.read', 'repository.write', 'repository.admin')
1152 1152 @CSRFRequired()
1153 1153 @view_config(
1154 1154 route_name='pullrequest_merge', request_method='POST',
1155 1155 renderer='json_ext')
1156 1156 def pull_request_merge(self):
1157 1157 """
1158 1158 Merge will perform a server-side merge of the specified
1159 1159 pull request, if the pull request is approved and mergeable.
1160 1160 After successful merging, the pull request is automatically
1161 1161 closed, with a relevant comment.
1162 1162 """
1163 1163 pull_request = PullRequest.get_or_404(
1164 1164 self.request.matchdict['pull_request_id'])
1165 1165 _ = self.request.translate
1166 1166
1167 1167 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
1168 1168 log.debug('show: forbidden because pull request is in state %s',
1169 1169 pull_request.pull_request_state)
1170 1170 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1171 1171 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1172 1172 pull_request.pull_request_state)
1173 1173 h.flash(msg, category='error')
1174 1174 raise HTTPFound(
1175 1175 h.route_path('pullrequest_show',
1176 1176 repo_name=pull_request.target_repo.repo_name,
1177 1177 pull_request_id=pull_request.pull_request_id))
1178 1178
1179 1179 self.load_default_context()
1180 1180
1181 1181 with pull_request.set_state(PullRequest.STATE_UPDATING):
1182 1182 check = MergeCheck.validate(
1183 1183 pull_request, auth_user=self._rhodecode_user,
1184 1184 translator=self.request.translate)
1185 1185 merge_possible = not check.failed
1186 1186
1187 1187 for err_type, error_msg in check.errors:
1188 1188 h.flash(error_msg, category=err_type)
1189 1189
1190 1190 if merge_possible:
1191 1191 log.debug("Pre-conditions checked, trying to merge.")
1192 1192 extras = vcs_operation_context(
1193 1193 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1194 1194 username=self._rhodecode_db_user.username, action='push',
1195 1195 scm=pull_request.target_repo.repo_type)
1196 1196 with pull_request.set_state(PullRequest.STATE_UPDATING):
1197 1197 self._merge_pull_request(
1198 1198 pull_request, self._rhodecode_db_user, extras)
1199 1199 else:
1200 1200 log.debug("Pre-conditions failed, NOT merging.")
1201 1201
1202 1202 raise HTTPFound(
1203 1203 h.route_path('pullrequest_show',
1204 1204 repo_name=pull_request.target_repo.repo_name,
1205 1205 pull_request_id=pull_request.pull_request_id))
1206 1206
1207 1207 def _merge_pull_request(self, pull_request, user, extras):
1208 1208 _ = self.request.translate
1209 1209 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1210 1210
1211 1211 if merge_resp.executed:
1212 1212 log.debug("The merge was successful, closing the pull request.")
1213 1213 PullRequestModel().close_pull_request(
1214 1214 pull_request.pull_request_id, user)
1215 1215 Session().commit()
1216 1216 msg = _('Pull request was successfully merged and closed.')
1217 1217 h.flash(msg, category='success')
1218 1218 else:
1219 1219 log.debug(
1220 1220 "The merge was not successful. Merge response: %s", merge_resp)
1221 1221 msg = merge_resp.merge_status_message
1222 1222 h.flash(msg, category='error')
1223 1223
1224 1224 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1225 1225 _ = self.request.translate
1226 1226
1227 1227 get_default_reviewers_data, validate_default_reviewers = \
1228 1228 PullRequestModel().get_reviewer_functions()
1229 1229
1230 1230 try:
1231 1231 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1232 1232 except ValueError as e:
1233 1233 log.error('Reviewers Validation: {}'.format(e))
1234 1234 h.flash(e, category='error')
1235 1235 return
1236 1236
1237 1237 old_calculated_status = pull_request.calculated_review_status()
1238 1238 PullRequestModel().update_reviewers(
1239 1239 pull_request, reviewers, self._rhodecode_user)
1240 1240 h.flash(_('Pull request reviewers updated.'), category='success')
1241 1241 Session().commit()
1242 1242
1243 1243 # trigger status changed if change in reviewers changes the status
1244 1244 calculated_status = pull_request.calculated_review_status()
1245 1245 if old_calculated_status != calculated_status:
1246 1246 PullRequestModel().trigger_pull_request_hook(
1247 1247 pull_request, self._rhodecode_user, 'review_status_change',
1248 1248 data={'status': calculated_status})
1249 1249
1250 1250 @LoginRequired()
1251 1251 @NotAnonymous()
1252 1252 @HasRepoPermissionAnyDecorator(
1253 1253 'repository.read', 'repository.write', 'repository.admin')
1254 1254 @CSRFRequired()
1255 1255 @view_config(
1256 1256 route_name='pullrequest_delete', request_method='POST',
1257 1257 renderer='json_ext')
1258 1258 def pull_request_delete(self):
1259 1259 _ = self.request.translate
1260 1260
1261 1261 pull_request = PullRequest.get_or_404(
1262 1262 self.request.matchdict['pull_request_id'])
1263 1263 self.load_default_context()
1264 1264
1265 1265 pr_closed = pull_request.is_closed()
1266 1266 allowed_to_delete = PullRequestModel().check_user_delete(
1267 1267 pull_request, self._rhodecode_user) and not pr_closed
1268 1268
1269 1269 # only owner can delete it !
1270 1270 if allowed_to_delete:
1271 1271 PullRequestModel().delete(pull_request, self._rhodecode_user)
1272 1272 Session().commit()
1273 1273 h.flash(_('Successfully deleted pull request'),
1274 1274 category='success')
1275 1275 raise HTTPFound(h.route_path('pullrequest_show_all',
1276 1276 repo_name=self.db_repo_name))
1277 1277
1278 1278 log.warning('user %s tried to delete pull request without access',
1279 1279 self._rhodecode_user)
1280 1280 raise HTTPNotFound()
1281 1281
1282 1282 @LoginRequired()
1283 1283 @NotAnonymous()
1284 1284 @HasRepoPermissionAnyDecorator(
1285 1285 'repository.read', 'repository.write', 'repository.admin')
1286 1286 @CSRFRequired()
1287 1287 @view_config(
1288 1288 route_name='pullrequest_comment_create', request_method='POST',
1289 1289 renderer='json_ext')
1290 1290 def pull_request_comment_create(self):
1291 1291 _ = self.request.translate
1292 1292
1293 1293 pull_request = PullRequest.get_or_404(
1294 1294 self.request.matchdict['pull_request_id'])
1295 1295 pull_request_id = pull_request.pull_request_id
1296 1296
1297 1297 if pull_request.is_closed():
1298 1298 log.debug('comment: forbidden because pull request is closed')
1299 1299 raise HTTPForbidden()
1300 1300
1301 1301 allowed_to_comment = PullRequestModel().check_user_comment(
1302 1302 pull_request, self._rhodecode_user)
1303 1303 if not allowed_to_comment:
1304 1304 log.debug(
1305 1305 'comment: forbidden because pull request is from forbidden repo')
1306 1306 raise HTTPForbidden()
1307 1307
1308 1308 c = self.load_default_context()
1309 1309
1310 1310 status = self.request.POST.get('changeset_status', None)
1311 1311 text = self.request.POST.get('text')
1312 1312 comment_type = self.request.POST.get('comment_type')
1313 1313 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1314 1314 close_pull_request = self.request.POST.get('close_pull_request')
1315 1315
1316 1316 # the logic here should work like following, if we submit close
1317 1317 # pr comment, use `close_pull_request_with_comment` function
1318 1318 # else handle regular comment logic
1319 1319
1320 1320 if close_pull_request:
1321 1321 # only owner or admin or person with write permissions
1322 1322 allowed_to_close = PullRequestModel().check_user_update(
1323 1323 pull_request, self._rhodecode_user)
1324 1324 if not allowed_to_close:
1325 1325 log.debug('comment: forbidden because not allowed to close '
1326 1326 'pull request %s', pull_request_id)
1327 1327 raise HTTPForbidden()
1328 1328
1329 1329 # This also triggers `review_status_change`
1330 1330 comment, status = PullRequestModel().close_pull_request_with_comment(
1331 1331 pull_request, self._rhodecode_user, self.db_repo, message=text,
1332 1332 auth_user=self._rhodecode_user)
1333 1333 Session().flush()
1334 1334
1335 1335 PullRequestModel().trigger_pull_request_hook(
1336 1336 pull_request, self._rhodecode_user, 'comment',
1337 1337 data={'comment': comment})
1338 1338
1339 1339 else:
1340 1340 # regular comment case, could be inline, or one with status.
1341 1341 # for that one we check also permissions
1342 1342
1343 1343 allowed_to_change_status = PullRequestModel().check_user_change_status(
1344 1344 pull_request, self._rhodecode_user)
1345 1345
1346 1346 if status and allowed_to_change_status:
1347 1347 message = (_('Status change %(transition_icon)s %(status)s')
1348 1348 % {'transition_icon': '>',
1349 1349 'status': ChangesetStatus.get_status_lbl(status)})
1350 1350 text = text or message
1351 1351
1352 1352 comment = CommentsModel().create(
1353 1353 text=text,
1354 1354 repo=self.db_repo.repo_id,
1355 1355 user=self._rhodecode_user.user_id,
1356 1356 pull_request=pull_request,
1357 1357 f_path=self.request.POST.get('f_path'),
1358 1358 line_no=self.request.POST.get('line'),
1359 1359 status_change=(ChangesetStatus.get_status_lbl(status)
1360 1360 if status and allowed_to_change_status else None),
1361 1361 status_change_type=(status
1362 1362 if status and allowed_to_change_status else None),
1363 1363 comment_type=comment_type,
1364 1364 resolves_comment_id=resolves_comment_id,
1365 1365 auth_user=self._rhodecode_user
1366 1366 )
1367 1367
1368 1368 if allowed_to_change_status:
1369 1369 # calculate old status before we change it
1370 1370 old_calculated_status = pull_request.calculated_review_status()
1371 1371
1372 1372 # get status if set !
1373 1373 if status:
1374 1374 ChangesetStatusModel().set_status(
1375 1375 self.db_repo.repo_id,
1376 1376 status,
1377 1377 self._rhodecode_user.user_id,
1378 1378 comment,
1379 1379 pull_request=pull_request
1380 1380 )
1381 1381
1382 1382 Session().flush()
1383 1383 # this is somehow required to get access to some relationship
1384 1384 # loaded on comment
1385 1385 Session().refresh(comment)
1386 1386
1387 1387 PullRequestModel().trigger_pull_request_hook(
1388 1388 pull_request, self._rhodecode_user, 'comment',
1389 1389 data={'comment': comment})
1390 1390
1391 1391 # we now calculate the status of pull request, and based on that
1392 1392 # calculation we set the commits status
1393 1393 calculated_status = pull_request.calculated_review_status()
1394 1394 if old_calculated_status != calculated_status:
1395 1395 PullRequestModel().trigger_pull_request_hook(
1396 1396 pull_request, self._rhodecode_user, 'review_status_change',
1397 1397 data={'status': calculated_status})
1398 1398
1399 1399 Session().commit()
1400 1400
1401 1401 data = {
1402 1402 'target_id': h.safeid(h.safe_unicode(
1403 1403 self.request.POST.get('f_path'))),
1404 1404 }
1405 1405 if comment:
1406 1406 c.co = comment
1407 1407 rendered_comment = render(
1408 1408 'rhodecode:templates/changeset/changeset_comment_block.mako',
1409 1409 self._get_template_context(c), self.request)
1410 1410
1411 1411 data.update(comment.get_dict())
1412 1412 data.update({'rendered_text': rendered_comment})
1413 1413
1414 1414 return data
1415 1415
1416 1416 @LoginRequired()
1417 1417 @NotAnonymous()
1418 1418 @HasRepoPermissionAnyDecorator(
1419 1419 'repository.read', 'repository.write', 'repository.admin')
1420 1420 @CSRFRequired()
1421 1421 @view_config(
1422 1422 route_name='pullrequest_comment_delete', request_method='POST',
1423 1423 renderer='json_ext')
1424 1424 def pull_request_comment_delete(self):
1425 1425 pull_request = PullRequest.get_or_404(
1426 1426 self.request.matchdict['pull_request_id'])
1427 1427
1428 1428 comment = ChangesetComment.get_or_404(
1429 1429 self.request.matchdict['comment_id'])
1430 1430 comment_id = comment.comment_id
1431 1431
1432 1432 if pull_request.is_closed():
1433 1433 log.debug('comment: forbidden because pull request is closed')
1434 1434 raise HTTPForbidden()
1435 1435
1436 1436 if not comment:
1437 1437 log.debug('Comment with id:%s not found, skipping', comment_id)
1438 1438 # comment already deleted in another call probably
1439 1439 return True
1440 1440
1441 1441 if comment.pull_request.is_closed():
1442 1442 # don't allow deleting comments on closed pull request
1443 1443 raise HTTPForbidden()
1444 1444
1445 1445 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1446 1446 super_admin = h.HasPermissionAny('hg.admin')()
1447 1447 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1448 1448 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1449 1449 comment_repo_admin = is_repo_admin and is_repo_comment
1450 1450
1451 1451 if super_admin or comment_owner or comment_repo_admin:
1452 1452 old_calculated_status = comment.pull_request.calculated_review_status()
1453 1453 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1454 1454 Session().commit()
1455 1455 calculated_status = comment.pull_request.calculated_review_status()
1456 1456 if old_calculated_status != calculated_status:
1457 1457 PullRequestModel().trigger_pull_request_hook(
1458 1458 comment.pull_request, self._rhodecode_user, 'review_status_change',
1459 1459 data={'status': calculated_status})
1460 1460 return True
1461 1461 else:
1462 1462 log.warning('No permissions for user %s to delete comment_id: %s',
1463 1463 self._rhodecode_db_user, comment_id)
1464 1464 raise HTTPNotFound()
@@ -1,507 +1,506 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT commit module
23 23 """
24 24
25 25 import re
26 26 import stat
27 27 from itertools import chain
28 28 from StringIO import StringIO
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from rhodecode.lib.datelib import utcdate_fromtimestamp
33 33 from rhodecode.lib.utils import safe_unicode, safe_str
34 34 from rhodecode.lib.utils2 import safe_int
35 35 from rhodecode.lib.vcs.conf import settings
36 36 from rhodecode.lib.vcs.backends import base
37 37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
38 38 from rhodecode.lib.vcs.nodes import (
39 39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
40 40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
41 41 RemovedFileNodesGenerator, LargeFileNode)
42 42 from rhodecode.lib.vcs.compat import configparser
43 43
44 44
45 45 class GitCommit(base.BaseCommit):
46 46 """
47 47 Represents state of the repository at single commit id.
48 48 """
49 49
50 50 _filter_pre_load = [
51 51 # done through a more complex tree walk on parents
52 52 "affected_files",
53 # based on repository cached property
54 "branch",
55 53 # done through subprocess not remote call
56 54 "children",
57 55 # done through a more complex tree walk on parents
58 56 "status",
59 57 # mercurial specific property not supported here
60 58 "_file_paths",
61 59 # mercurial specific property not supported here
62 60 'obsolete',
63 61 # mercurial specific property not supported here
64 62 'phase',
65 63 # mercurial specific property not supported here
66 64 'hidden'
67 65 ]
68 66
69 67 def __init__(self, repository, raw_id, idx, pre_load=None):
70 68 self.repository = repository
71 69 self._remote = repository._remote
72 70 # TODO: johbo: Tweak of raw_id should not be necessary
73 71 self.raw_id = safe_str(raw_id)
74 72 self.idx = idx
75 73
76 74 self._set_bulk_properties(pre_load)
77 75
78 76 # caches
79 77 self._stat_modes = {} # stat info for paths
80 78 self._paths = {} # path processed with parse_tree
81 79 self.nodes = {}
82 80 self._submodules = None
83 81
84 82 def _set_bulk_properties(self, pre_load):
83
85 84 if not pre_load:
86 85 return
87 86 pre_load = [entry for entry in pre_load
88 87 if entry not in self._filter_pre_load]
89 88 if not pre_load:
90 89 return
91 90
92 91 result = self._remote.bulk_request(self.raw_id, pre_load)
93 92 for attr, value in result.items():
94 93 if attr in ["author", "message"]:
95 94 if value:
96 95 value = safe_unicode(value)
97 96 elif attr == "date":
98 97 value = utcdate_fromtimestamp(*value)
99 98 elif attr == "parents":
100 99 value = self._make_commits(value)
100 elif attr == "branch":
101 value = value[0] if value else None
101 102 self.__dict__[attr] = value
102 103
103 104 @LazyProperty
104 105 def _commit(self):
105 106 return self._remote[self.raw_id]
106 107
107 108 @LazyProperty
108 109 def _tree_id(self):
109 110 return self._remote[self._commit['tree']]['id']
110 111
111 112 @LazyProperty
112 113 def id(self):
113 114 return self.raw_id
114 115
115 116 @LazyProperty
116 117 def short_id(self):
117 118 return self.raw_id[:12]
118 119
119 120 @LazyProperty
120 121 def message(self):
121 122 return safe_unicode(self._remote.message(self.id))
122 123
123 124 @LazyProperty
124 125 def committer(self):
125 126 return safe_unicode(self._remote.author(self.id))
126 127
127 128 @LazyProperty
128 129 def author(self):
129 130 return safe_unicode(self._remote.author(self.id))
130 131
131 132 @LazyProperty
132 133 def date(self):
133 134 unix_ts, tz = self._remote.date(self.raw_id)
134 135 return utcdate_fromtimestamp(unix_ts, tz)
135 136
136 137 @LazyProperty
137 138 def status(self):
138 139 """
139 140 Returns modified, added, removed, deleted files for current commit
140 141 """
141 142 return self.changed, self.added, self.removed
142 143
143 144 @LazyProperty
144 145 def tags(self):
145 146 tags = [safe_unicode(name) for name,
146 147 commit_id in self.repository.tags.iteritems()
147 148 if commit_id == self.raw_id]
148 149 return tags
149 150
150 151 @LazyProperty
151 152 def commit_branches(self):
152 153 branches = []
153 154 for name, commit_id in self.repository.branches.iteritems():
154 155 if commit_id == self.raw_id:
155 156 branches.append(name)
156 157 return branches
157 158
158 159 @LazyProperty
159 160 def branch(self):
160 # actually commit can have multiple branches
161 branches = self.commit_branches
161 branches = safe_unicode(self._remote.branch(self.raw_id))
162 162 if branches:
163 return branches[0]
164
165 return None
163 # actually commit can have multiple branches in git
164 return safe_unicode(branches[0])
166 165
167 166 def _get_tree_id_for_path(self, path):
168 167 path = safe_str(path)
169 168 if path in self._paths:
170 169 return self._paths[path]
171 170
172 171 tree_id = self._tree_id
173 172
174 173 path = path.strip('/')
175 174 if path == '':
176 175 data = [tree_id, "tree"]
177 176 self._paths[''] = data
178 177 return data
179 178
180 179 tree_id, tree_type, tree_mode = \
181 180 self._remote.tree_and_type_for_path(self.raw_id, path)
182 181 if tree_id is None:
183 182 raise self.no_node_at_path(path)
184 183
185 184 self._paths[path] = [tree_id, tree_type]
186 185 self._stat_modes[path] = tree_mode
187 186
188 187 if path not in self._paths:
189 188 raise self.no_node_at_path(path)
190 189
191 190 return self._paths[path]
192 191
193 192 def _get_kind(self, path):
194 193 tree_id, type_ = self._get_tree_id_for_path(path)
195 194 if type_ == 'blob':
196 195 return NodeKind.FILE
197 196 elif type_ == 'tree':
198 197 return NodeKind.DIR
199 198 elif type_ == 'link':
200 199 return NodeKind.SUBMODULE
201 200 return None
202 201
203 202 def _get_filectx(self, path):
204 203 path = self._fix_path(path)
205 204 if self._get_kind(path) != NodeKind.FILE:
206 205 raise CommitError(
207 206 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
208 207 return path
209 208
210 209 def _get_file_nodes(self):
211 210 return chain(*(t[2] for t in self.walk()))
212 211
213 212 @LazyProperty
214 213 def parents(self):
215 214 """
216 215 Returns list of parent commits.
217 216 """
218 217 parent_ids = self._remote.parents(self.id)
219 218 return self._make_commits(parent_ids)
220 219
221 220 @LazyProperty
222 221 def children(self):
223 222 """
224 223 Returns list of child commits.
225 224 """
226 225 rev_filter = settings.GIT_REV_FILTER
227 226 output, __ = self.repository.run_git_command(
228 227 ['rev-list', '--children'] + rev_filter)
229 228
230 229 child_ids = []
231 230 pat = re.compile(r'^%s' % self.raw_id)
232 231 for l in output.splitlines():
233 232 if pat.match(l):
234 233 found_ids = l.split(' ')[1:]
235 234 child_ids.extend(found_ids)
236 235 return self._make_commits(child_ids)
237 236
238 237 def _make_commits(self, commit_ids):
239 238 def commit_maker(_commit_id):
240 239 return self.repository.get_commit(commit_id=commit_id)
241 240
242 241 return [commit_maker(commit_id) for commit_id in commit_ids]
243 242
244 243 def get_file_mode(self, path):
245 244 """
246 245 Returns stat mode of the file at the given `path`.
247 246 """
248 247 path = safe_str(path)
249 248 # ensure path is traversed
250 249 self._get_tree_id_for_path(path)
251 250 return self._stat_modes[path]
252 251
253 252 def is_link(self, path):
254 253 return stat.S_ISLNK(self.get_file_mode(path))
255 254
256 255 def get_file_content(self, path):
257 256 """
258 257 Returns content of the file at given `path`.
259 258 """
260 259 tree_id, _ = self._get_tree_id_for_path(path)
261 260 return self._remote.blob_as_pretty_string(tree_id)
262 261
263 262 def get_file_size(self, path):
264 263 """
265 264 Returns size of the file at given `path`.
266 265 """
267 266 tree_id, _ = self._get_tree_id_for_path(path)
268 267 return self._remote.blob_raw_length(tree_id)
269 268
270 269 def get_path_history(self, path, limit=None, pre_load=None):
271 270 """
272 271 Returns history of file as reversed list of `GitCommit` objects for
273 272 which file at given `path` has been modified.
274 273
275 274 TODO: This function now uses an underlying 'git' command which works
276 275 quickly but ideally we should replace with an algorithm.
277 276 """
278 277 self._get_filectx(path)
279 278 f_path = safe_str(path)
280 279
281 280 # optimize for n==1, rev-list is much faster for that use-case
282 281 if limit == 1:
283 282 cmd = ['rev-list', '-1', self.raw_id, '--', f_path]
284 283 else:
285 284 cmd = ['log']
286 285 if limit:
287 286 cmd.extend(['-n', str(safe_int(limit, 0))])
288 287 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
289 288
290 289 output, __ = self.repository.run_git_command(cmd)
291 290 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
292 291
293 292 return [
294 293 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
295 294 for commit_id in commit_ids]
296 295
297 296 def get_file_annotate(self, path, pre_load=None):
298 297 """
299 298 Returns a generator of four element tuples with
300 299 lineno, commit_id, commit lazy loader and line
301 300
302 301 TODO: This function now uses os underlying 'git' command which is
303 302 generally not good. Should be replaced with algorithm iterating
304 303 commits.
305 304 """
306 305 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
307 306 # -l ==> outputs long shas (and we need all 40 characters)
308 307 # --root ==> doesn't put '^' character for bounderies
309 308 # -r commit_id ==> blames for the given commit
310 309 output, __ = self.repository.run_git_command(cmd)
311 310
312 311 for i, blame_line in enumerate(output.split('\n')[:-1]):
313 312 line_no = i + 1
314 313 commit_id, line = re.split(r' ', blame_line, 1)
315 314 yield (
316 315 line_no, commit_id,
317 316 lambda: self.repository.get_commit(commit_id=commit_id,
318 317 pre_load=pre_load),
319 318 line)
320 319
321 320 def get_nodes(self, path):
322 321
323 322 if self._get_kind(path) != NodeKind.DIR:
324 323 raise CommitError(
325 324 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
326 325 path = self._fix_path(path)
327 326
328 327 tree_id, _ = self._get_tree_id_for_path(path)
329 328
330 329 dirnodes = []
331 330 filenodes = []
332 331
333 332 # extracted tree ID gives us our files...
334 333 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
335 334 if type_ == 'link':
336 335 url = self._get_submodule_url('/'.join((path, name)))
337 336 dirnodes.append(SubModuleNode(
338 337 name, url=url, commit=id_, alias=self.repository.alias))
339 338 continue
340 339
341 340 if path != '':
342 341 obj_path = '/'.join((path, name))
343 342 else:
344 343 obj_path = name
345 344 if obj_path not in self._stat_modes:
346 345 self._stat_modes[obj_path] = stat_
347 346
348 347 if type_ == 'tree':
349 348 dirnodes.append(DirNode(obj_path, commit=self))
350 349 elif type_ == 'blob':
351 350 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
352 351 else:
353 352 raise CommitError(
354 353 "Requested object should be Tree or Blob, is %s", type_)
355 354
356 355 nodes = dirnodes + filenodes
357 356 for node in nodes:
358 357 if node.path not in self.nodes:
359 358 self.nodes[node.path] = node
360 359 nodes.sort()
361 360 return nodes
362 361
363 362 def get_node(self, path, pre_load=None):
364 363 if isinstance(path, unicode):
365 364 path = path.encode('utf-8')
366 365 path = self._fix_path(path)
367 366 if path not in self.nodes:
368 367 try:
369 368 tree_id, type_ = self._get_tree_id_for_path(path)
370 369 except CommitError:
371 370 raise NodeDoesNotExistError(
372 371 "Cannot find one of parents' directories for a given "
373 372 "path: %s" % path)
374 373
375 374 if type_ == 'link':
376 375 url = self._get_submodule_url(path)
377 376 node = SubModuleNode(path, url=url, commit=tree_id,
378 377 alias=self.repository.alias)
379 378 elif type_ == 'tree':
380 379 if path == '':
381 380 node = RootNode(commit=self)
382 381 else:
383 382 node = DirNode(path, commit=self)
384 383 elif type_ == 'blob':
385 384 node = FileNode(path, commit=self, pre_load=pre_load)
386 385 self._stat_modes[path] = node.mode
387 386 else:
388 387 raise self.no_node_at_path(path)
389 388
390 389 # cache node
391 390 self.nodes[path] = node
392 391
393 392 return self.nodes[path]
394 393
395 394 def get_largefile_node(self, path):
396 395 tree_id, _ = self._get_tree_id_for_path(path)
397 396 pointer_spec = self._remote.is_large_file(tree_id)
398 397
399 398 if pointer_spec:
400 399 # content of that file regular FileNode is the hash of largefile
401 400 file_id = pointer_spec.get('oid_hash')
402 401 if self._remote.in_largefiles_store(file_id):
403 402 lf_path = self._remote.store_path(file_id)
404 403 return LargeFileNode(lf_path, commit=self, org_path=path)
405 404
406 405 @LazyProperty
407 406 def affected_files(self):
408 407 """
409 408 Gets a fast accessible file changes for given commit
410 409 """
411 410 added, modified, deleted = self._changes_cache
412 411 return list(added.union(modified).union(deleted))
413 412
414 413 @LazyProperty
415 414 def _changes_cache(self):
416 415 added = set()
417 416 modified = set()
418 417 deleted = set()
419 418 _r = self._remote
420 419
421 420 parents = self.parents
422 421 if not self.parents:
423 422 parents = [base.EmptyCommit()]
424 423 for parent in parents:
425 424 if isinstance(parent, base.EmptyCommit):
426 425 oid = None
427 426 else:
428 427 oid = parent.raw_id
429 428 changes = _r.tree_changes(oid, self.raw_id)
430 429 for (oldpath, newpath), (_, _), (_, _) in changes:
431 430 if newpath and oldpath:
432 431 modified.add(newpath)
433 432 elif newpath and not oldpath:
434 433 added.add(newpath)
435 434 elif not newpath and oldpath:
436 435 deleted.add(oldpath)
437 436 return added, modified, deleted
438 437
439 438 def _get_paths_for_status(self, status):
440 439 """
441 440 Returns sorted list of paths for given ``status``.
442 441
443 442 :param status: one of: *added*, *modified* or *deleted*
444 443 """
445 444 added, modified, deleted = self._changes_cache
446 445 return sorted({
447 446 'added': list(added),
448 447 'modified': list(modified),
449 448 'deleted': list(deleted)}[status]
450 449 )
451 450
452 451 @LazyProperty
453 452 def added(self):
454 453 """
455 454 Returns list of added ``FileNode`` objects.
456 455 """
457 456 if not self.parents:
458 457 return list(self._get_file_nodes())
459 458 return AddedFileNodesGenerator(
460 459 [n for n in self._get_paths_for_status('added')], self)
461 460
462 461 @LazyProperty
463 462 def changed(self):
464 463 """
465 464 Returns list of modified ``FileNode`` objects.
466 465 """
467 466 if not self.parents:
468 467 return []
469 468 return ChangedFileNodesGenerator(
470 469 [n for n in self._get_paths_for_status('modified')], self)
471 470
472 471 @LazyProperty
473 472 def removed(self):
474 473 """
475 474 Returns list of removed ``FileNode`` objects.
476 475 """
477 476 if not self.parents:
478 477 return []
479 478 return RemovedFileNodesGenerator(
480 479 [n for n in self._get_paths_for_status('deleted')], self)
481 480
482 481 def _get_submodule_url(self, submodule_path):
483 482 git_modules_path = '.gitmodules'
484 483
485 484 if self._submodules is None:
486 485 self._submodules = {}
487 486
488 487 try:
489 488 submodules_node = self.get_node(git_modules_path)
490 489 except NodeDoesNotExistError:
491 490 return None
492 491
493 492 content = submodules_node.content
494 493
495 494 # ConfigParser fails if there are whitespaces
496 495 content = '\n'.join(l.strip() for l in content.split('\n'))
497 496
498 497 parser = configparser.ConfigParser()
499 498 parser.readfp(StringIO(content))
500 499
501 500 for section in parser.sections():
502 501 path = parser.get(section, 'path')
503 502 url = parser.get(section, 'url')
504 503 if path and url:
505 504 self._submodules[path.strip('/')] = url
506 505
507 506 return self._submodules.get(submodule_path.strip('/'))
@@ -1,850 +1,850 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Module holding everything related to vcs nodes, with vcs2 architecture.
23 23 """
24 24
25 25 import os
26 26 import stat
27 27
28 28 from zope.cachedescriptors.property import Lazy as LazyProperty
29 29
30 30 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
31 31 from rhodecode.lib.utils import safe_unicode, safe_str
32 32 from rhodecode.lib.utils2 import md5
33 33 from rhodecode.lib.vcs import path as vcspath
34 34 from rhodecode.lib.vcs.backends.base import EmptyCommit, FILEMODE_DEFAULT
35 35 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
36 36 from rhodecode.lib.vcs.exceptions import NodeError, RemovedFileNodeError
37 37
38 38 LARGEFILE_PREFIX = '.hglf'
39 39
40 40
41 41 class NodeKind:
42 42 SUBMODULE = -1
43 43 DIR = 1
44 44 FILE = 2
45 45 LARGEFILE = 3
46 46
47 47
48 48 class NodeState:
49 49 ADDED = u'added'
50 50 CHANGED = u'changed'
51 51 NOT_CHANGED = u'not changed'
52 52 REMOVED = u'removed'
53 53
54 54
55 55 class NodeGeneratorBase(object):
56 56 """
57 57 Base class for removed added and changed filenodes, it's a lazy generator
58 58 class that will create filenodes only on iteration or call
59 59
60 60 The len method doesn't need to create filenodes at all
61 61 """
62 62
63 63 def __init__(self, current_paths, cs):
64 64 self.cs = cs
65 65 self.current_paths = current_paths
66 66
67 67 def __call__(self):
68 68 return [n for n in self]
69 69
70 70 def __getslice__(self, i, j):
71 71 for p in self.current_paths[i:j]:
72 72 yield self.cs.get_node(p)
73 73
74 74 def __len__(self):
75 75 return len(self.current_paths)
76 76
77 77 def __iter__(self):
78 78 for p in self.current_paths:
79 79 yield self.cs.get_node(p)
80 80
81 81
82 82 class AddedFileNodesGenerator(NodeGeneratorBase):
83 83 """
84 84 Class holding added files for current commit
85 85 """
86 86
87 87
88 88 class ChangedFileNodesGenerator(NodeGeneratorBase):
89 89 """
90 90 Class holding changed files for current commit
91 91 """
92 92
93 93
94 94 class RemovedFileNodesGenerator(NodeGeneratorBase):
95 95 """
96 96 Class holding removed files for current commit
97 97 """
98 98 def __iter__(self):
99 99 for p in self.current_paths:
100 100 yield RemovedFileNode(path=p)
101 101
102 102 def __getslice__(self, i, j):
103 103 for p in self.current_paths[i:j]:
104 104 yield RemovedFileNode(path=p)
105 105
106 106
107 107 class Node(object):
108 108 """
109 109 Simplest class representing file or directory on repository. SCM backends
110 110 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
111 111 directly.
112 112
113 113 Node's ``path`` cannot start with slash as we operate on *relative* paths
114 114 only. Moreover, every single node is identified by the ``path`` attribute,
115 115 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
116 116 """
117 117 RTLO_MARKER = u"\u202E" # RTLO marker allows swapping text, and certain
118 118 # security attacks could be used with this
119 119 commit = None
120 120
121 121 def __init__(self, path, kind):
122 122 self._validate_path(path) # can throw exception if path is invalid
123 123 self.path = safe_str(path.rstrip('/')) # we store paths as str
124 124 if path == '' and kind != NodeKind.DIR:
125 125 raise NodeError("Only DirNode and its subclasses may be "
126 126 "initialized with empty path")
127 127 self.kind = kind
128 128
129 129 if self.is_root() and not self.is_dir():
130 130 raise NodeError("Root node cannot be FILE kind")
131 131
132 132 def _validate_path(self, path):
133 133 if path.startswith('/'):
134 134 raise NodeError(
135 135 "Cannot initialize Node objects with slash at "
136 136 "the beginning as only relative paths are supported. "
137 137 "Got %s" % (path,))
138 138
139 139 @LazyProperty
140 140 def parent(self):
141 141 parent_path = self.get_parent_path()
142 142 if parent_path:
143 143 if self.commit:
144 144 return self.commit.get_node(parent_path)
145 145 return DirNode(parent_path)
146 146 return None
147 147
148 148 @LazyProperty
149 149 def unicode_path(self):
150 150 return safe_unicode(self.path)
151 151
152 152 @LazyProperty
153 153 def has_rtlo(self):
154 154 """Detects if a path has right-to-left-override marker"""
155 155 return self.RTLO_MARKER in self.unicode_path
156 156
157 157 @LazyProperty
158 158 def unicode_path_safe(self):
159 159 """
160 160 Special SAFE representation of path without the right-to-left-override.
161 161 This should be only used for "showing" the file, cannot be used for any
162 162 urls etc.
163 163 """
164 164 return safe_unicode(self.path).replace(self.RTLO_MARKER, '')
165 165
166 166 @LazyProperty
167 167 def dir_path(self):
168 168 """
169 169 Returns name of the directory from full path of this vcs node. Empty
170 170 string is returned if there's no directory in the path
171 171 """
172 172 _parts = self.path.rstrip('/').rsplit('/', 1)
173 173 if len(_parts) == 2:
174 174 return safe_unicode(_parts[0])
175 175 return u''
176 176
177 177 @LazyProperty
178 178 def name(self):
179 179 """
180 180 Returns name of the node so if its path
181 181 then only last part is returned.
182 182 """
183 183 return safe_unicode(self.path.rstrip('/').split('/')[-1])
184 184
185 185 @property
186 186 def kind(self):
187 187 return self._kind
188 188
189 189 @kind.setter
190 190 def kind(self, kind):
191 191 if hasattr(self, '_kind'):
192 192 raise NodeError("Cannot change node's kind")
193 193 else:
194 194 self._kind = kind
195 195 # Post setter check (path's trailing slash)
196 196 if self.path.endswith('/'):
197 197 raise NodeError("Node's path cannot end with slash")
198 198
199 199 def __cmp__(self, other):
200 200 """
201 201 Comparator using name of the node, needed for quick list sorting.
202 202 """
203 203
204 204 kind_cmp = cmp(self.kind, other.kind)
205 205 if kind_cmp:
206 206 if isinstance(self, SubModuleNode):
207 207 # we make submodules equal to dirnode for "sorting" purposes
208 208 return NodeKind.DIR
209 209 return kind_cmp
210 210 return cmp(self.name, other.name)
211 211
212 212 def __eq__(self, other):
213 213 for attr in ['name', 'path', 'kind']:
214 214 if getattr(self, attr) != getattr(other, attr):
215 215 return False
216 216 if self.is_file():
217 217 if self.content != other.content:
218 218 return False
219 219 else:
220 220 # For DirNode's check without entering each dir
221 221 self_nodes_paths = list(sorted(n.path for n in self.nodes))
222 222 other_nodes_paths = list(sorted(n.path for n in self.nodes))
223 223 if self_nodes_paths != other_nodes_paths:
224 224 return False
225 225 return True
226 226
227 227 def __ne__(self, other):
228 228 return not self.__eq__(other)
229 229
230 230 def __repr__(self):
231 231 return '<%s %r>' % (self.__class__.__name__, self.path)
232 232
233 233 def __str__(self):
234 234 return self.__repr__()
235 235
236 236 def __unicode__(self):
237 237 return self.name
238 238
239 239 def get_parent_path(self):
240 240 """
241 241 Returns node's parent path or empty string if node is root.
242 242 """
243 243 if self.is_root():
244 244 return ''
245 245 return vcspath.dirname(self.path.rstrip('/')) + '/'
246 246
247 247 def is_file(self):
248 248 """
249 249 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
250 250 otherwise.
251 251 """
252 252 return self.kind == NodeKind.FILE
253 253
254 254 def is_dir(self):
255 255 """
256 256 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
257 257 otherwise.
258 258 """
259 259 return self.kind == NodeKind.DIR
260 260
261 261 def is_root(self):
262 262 """
263 263 Returns ``True`` if node is a root node and ``False`` otherwise.
264 264 """
265 265 return self.kind == NodeKind.DIR and self.path == ''
266 266
267 267 def is_submodule(self):
268 268 """
269 269 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
270 270 otherwise.
271 271 """
272 272 return self.kind == NodeKind.SUBMODULE
273 273
274 274 def is_largefile(self):
275 275 """
276 276 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
277 277 otherwise
278 278 """
279 279 return self.kind == NodeKind.LARGEFILE
280 280
281 281 def is_link(self):
282 282 if self.commit:
283 283 return self.commit.is_link(self.path)
284 284 return False
285 285
286 286 @LazyProperty
287 287 def added(self):
288 288 return self.state is NodeState.ADDED
289 289
290 290 @LazyProperty
291 291 def changed(self):
292 292 return self.state is NodeState.CHANGED
293 293
294 294 @LazyProperty
295 295 def not_changed(self):
296 296 return self.state is NodeState.NOT_CHANGED
297 297
298 298 @LazyProperty
299 299 def removed(self):
300 300 return self.state is NodeState.REMOVED
301 301
302 302
303 303 class FileNode(Node):
304 304 """
305 305 Class representing file nodes.
306 306
307 307 :attribute: path: path to the node, relative to repository's root
308 308 :attribute: content: if given arbitrary sets content of the file
309 309 :attribute: commit: if given, first time content is accessed, callback
310 310 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
311 311 """
312 312 _filter_pre_load = []
313 313
314 314 def __init__(self, path, content=None, commit=None, mode=None, pre_load=None):
315 315 """
316 316 Only one of ``content`` and ``commit`` may be given. Passing both
317 317 would raise ``NodeError`` exception.
318 318
319 319 :param path: relative path to the node
320 320 :param content: content may be passed to constructor
321 321 :param commit: if given, will use it to lazily fetch content
322 322 :param mode: ST_MODE (i.e. 0100644)
323 323 """
324 324 if content and commit:
325 325 raise NodeError("Cannot use both content and commit")
326 326 super(FileNode, self).__init__(path, kind=NodeKind.FILE)
327 327 self.commit = commit
328 328 self._content = content
329 329 self._mode = mode or FILEMODE_DEFAULT
330 330
331 331 self._set_bulk_properties(pre_load)
332 332
333 333 def _set_bulk_properties(self, pre_load):
334 334 if not pre_load:
335 335 return
336 336 pre_load = [entry for entry in pre_load
337 337 if entry not in self._filter_pre_load]
338 338 if not pre_load:
339 339 return
340 340
341 341 for attr_name in pre_load:
342 342 result = getattr(self, attr_name)
343 343 if callable(result):
344 344 result = result()
345 345 self.__dict__[attr_name] = result
346 346
347 347 @LazyProperty
348 348 def mode(self):
349 349 """
350 350 Returns lazily mode of the FileNode. If `commit` is not set, would
351 351 use value given at initialization or `FILEMODE_DEFAULT` (default).
352 352 """
353 353 if self.commit:
354 354 mode = self.commit.get_file_mode(self.path)
355 355 else:
356 356 mode = self._mode
357 357 return mode
358 358
359 359 @LazyProperty
360 360 def raw_bytes(self):
361 361 """
362 362 Returns lazily the raw bytes of the FileNode.
363 363 """
364 364 if self.commit:
365 365 if self._content is None:
366 366 self._content = self.commit.get_file_content(self.path)
367 367 content = self._content
368 368 else:
369 369 content = self._content
370 370 return content
371 371
372 372 @LazyProperty
373 373 def md5(self):
374 374 """
375 375 Returns md5 of the file node.
376 376 """
377 377 return md5(self.raw_bytes)
378 378
379 379 def metadata_uncached(self):
380 380 """
381 381 Returns md5, binary flag of the file node, without any cache usage.
382 382 """
383 383
384 384 content = self.content_uncached()
385 385
386 386 is_binary = content and '\0' in content
387 387 size = 0
388 388 if content:
389 389 size = len(content)
390 390
391 391 return is_binary, md5(content), size, content
392 392
393 393 def content_uncached(self):
394 394 """
395 395 Returns lazily content of the FileNode. If possible, would try to
396 396 decode content from UTF-8.
397 397 """
398 398 if self.commit:
399 399 content = self.commit.get_file_content(self.path)
400 400 else:
401 401 content = self._content
402 402 return content
403 403
404 404 @LazyProperty
405 405 def content(self):
406 406 """
407 407 Returns lazily content of the FileNode. If possible, would try to
408 408 decode content from UTF-8.
409 409 """
410 410 content = self.raw_bytes
411 411
412 412 if self.is_binary:
413 413 return content
414 414 return safe_unicode(content)
415 415
416 416 @LazyProperty
417 417 def size(self):
418 418 if self.commit:
419 419 return self.commit.get_file_size(self.path)
420 420 raise NodeError(
421 421 "Cannot retrieve size of the file without related "
422 422 "commit attribute")
423 423
424 424 @LazyProperty
425 425 def message(self):
426 426 if self.commit:
427 427 return self.last_commit.message
428 428 raise NodeError(
429 429 "Cannot retrieve message of the file without related "
430 430 "commit attribute")
431 431
432 432 @LazyProperty
433 433 def last_commit(self):
434 434 if self.commit:
435 pre_load = ["author", "date", "message"]
435 pre_load = ["author", "date", "message", "parents"]
436 436 return self.commit.get_path_commit(self.path, pre_load=pre_load)
437 437 raise NodeError(
438 438 "Cannot retrieve last commit of the file without "
439 439 "related commit attribute")
440 440
441 441 def get_mimetype(self):
442 442 """
443 443 Mimetype is calculated based on the file's content. If ``_mimetype``
444 444 attribute is available, it will be returned (backends which store
445 445 mimetypes or can easily recognize them, should set this private
446 446 attribute to indicate that type should *NOT* be calculated).
447 447 """
448 448
449 449 if hasattr(self, '_mimetype'):
450 450 if (isinstance(self._mimetype, (tuple, list,)) and
451 451 len(self._mimetype) == 2):
452 452 return self._mimetype
453 453 else:
454 454 raise NodeError('given _mimetype attribute must be an 2 '
455 455 'element list or tuple')
456 456
457 457 db = get_mimetypes_db()
458 458 mtype, encoding = db.guess_type(self.name)
459 459
460 460 if mtype is None:
461 461 if self.is_binary:
462 462 mtype = 'application/octet-stream'
463 463 encoding = None
464 464 else:
465 465 mtype = 'text/plain'
466 466 encoding = None
467 467
468 468 # try with pygments
469 469 try:
470 470 from pygments.lexers import get_lexer_for_filename
471 471 mt = get_lexer_for_filename(self.name).mimetypes
472 472 except Exception:
473 473 mt = None
474 474
475 475 if mt:
476 476 mtype = mt[0]
477 477
478 478 return mtype, encoding
479 479
480 480 @LazyProperty
481 481 def mimetype(self):
482 482 """
483 483 Wrapper around full mimetype info. It returns only type of fetched
484 484 mimetype without the encoding part. use get_mimetype function to fetch
485 485 full set of (type,encoding)
486 486 """
487 487 return self.get_mimetype()[0]
488 488
489 489 @LazyProperty
490 490 def mimetype_main(self):
491 491 return self.mimetype.split('/')[0]
492 492
493 493 @classmethod
494 494 def get_lexer(cls, filename, content=None):
495 495 from pygments import lexers
496 496
497 497 extension = filename.split('.')[-1]
498 498 lexer = None
499 499
500 500 try:
501 501 lexer = lexers.guess_lexer_for_filename(
502 502 filename, content, stripnl=False)
503 503 except lexers.ClassNotFound:
504 504 lexer = None
505 505
506 506 # try our EXTENSION_MAP
507 507 if not lexer:
508 508 try:
509 509 lexer_class = LANGUAGES_EXTENSIONS_MAP.get(extension)
510 510 if lexer_class:
511 511 lexer = lexers.get_lexer_by_name(lexer_class[0])
512 512 except lexers.ClassNotFound:
513 513 lexer = None
514 514
515 515 if not lexer:
516 516 lexer = lexers.TextLexer(stripnl=False)
517 517
518 518 return lexer
519 519
520 520 @LazyProperty
521 521 def lexer(self):
522 522 """
523 523 Returns pygment's lexer class. Would try to guess lexer taking file's
524 524 content, name and mimetype.
525 525 """
526 526 return self.get_lexer(self.name, self.content)
527 527
528 528 @LazyProperty
529 529 def lexer_alias(self):
530 530 """
531 531 Returns first alias of the lexer guessed for this file.
532 532 """
533 533 return self.lexer.aliases[0]
534 534
535 535 @LazyProperty
536 536 def history(self):
537 537 """
538 538 Returns a list of commit for this file in which the file was changed
539 539 """
540 540 if self.commit is None:
541 541 raise NodeError('Unable to get commit for this FileNode')
542 542 return self.commit.get_path_history(self.path)
543 543
544 544 @LazyProperty
545 545 def annotate(self):
546 546 """
547 547 Returns a list of three element tuples with lineno, commit and line
548 548 """
549 549 if self.commit is None:
550 550 raise NodeError('Unable to get commit for this FileNode')
551 pre_load = ["author", "date", "message"]
551 pre_load = ["author", "date", "message", "parents"]
552 552 return self.commit.get_file_annotate(self.path, pre_load=pre_load)
553 553
554 554 @LazyProperty
555 555 def state(self):
556 556 if not self.commit:
557 557 raise NodeError(
558 558 "Cannot check state of the node if it's not "
559 559 "linked with commit")
560 560 elif self.path in (node.path for node in self.commit.added):
561 561 return NodeState.ADDED
562 562 elif self.path in (node.path for node in self.commit.changed):
563 563 return NodeState.CHANGED
564 564 else:
565 565 return NodeState.NOT_CHANGED
566 566
567 567 @LazyProperty
568 568 def is_binary(self):
569 569 """
570 570 Returns True if file has binary content.
571 571 """
572 572 _bin = self.raw_bytes and '\0' in self.raw_bytes
573 573 return _bin
574 574
575 575 @LazyProperty
576 576 def extension(self):
577 577 """Returns filenode extension"""
578 578 return self.name.split('.')[-1]
579 579
580 580 @property
581 581 def is_executable(self):
582 582 """
583 583 Returns ``True`` if file has executable flag turned on.
584 584 """
585 585 return bool(self.mode & stat.S_IXUSR)
586 586
587 587 def get_largefile_node(self):
588 588 """
589 589 Try to return a Mercurial FileNode from this node. It does internal
590 590 checks inside largefile store, if that file exist there it will
591 591 create special instance of LargeFileNode which can get content from
592 592 LF store.
593 593 """
594 594 if self.commit:
595 595 return self.commit.get_largefile_node(self.path)
596 596
597 597 def lines(self, count_empty=False):
598 598 all_lines, empty_lines = 0, 0
599 599
600 600 if not self.is_binary:
601 601 content = self.content
602 602 if count_empty:
603 603 all_lines = 0
604 604 empty_lines = 0
605 605 for line in content.splitlines(True):
606 606 if line == '\n':
607 607 empty_lines += 1
608 608 all_lines += 1
609 609
610 610 return all_lines, all_lines - empty_lines
611 611 else:
612 612 # fast method
613 613 empty_lines = all_lines = content.count('\n')
614 614 if all_lines == 0 and content:
615 615 # one-line without a newline
616 616 empty_lines = all_lines = 1
617 617
618 618 return all_lines, empty_lines
619 619
620 620 def __repr__(self):
621 621 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
622 622 getattr(self.commit, 'short_id', ''))
623 623
624 624
625 625 class RemovedFileNode(FileNode):
626 626 """
627 627 Dummy FileNode class - trying to access any public attribute except path,
628 628 name, kind or state (or methods/attributes checking those two) would raise
629 629 RemovedFileNodeError.
630 630 """
631 631 ALLOWED_ATTRIBUTES = [
632 632 'name', 'path', 'state', 'is_root', 'is_file', 'is_dir', 'kind',
633 633 'added', 'changed', 'not_changed', 'removed'
634 634 ]
635 635
636 636 def __init__(self, path):
637 637 """
638 638 :param path: relative path to the node
639 639 """
640 640 super(RemovedFileNode, self).__init__(path=path)
641 641
642 642 def __getattribute__(self, attr):
643 643 if attr.startswith('_') or attr in RemovedFileNode.ALLOWED_ATTRIBUTES:
644 644 return super(RemovedFileNode, self).__getattribute__(attr)
645 645 raise RemovedFileNodeError(
646 646 "Cannot access attribute %s on RemovedFileNode" % attr)
647 647
648 648 @LazyProperty
649 649 def state(self):
650 650 return NodeState.REMOVED
651 651
652 652
653 653 class DirNode(Node):
654 654 """
655 655 DirNode stores list of files and directories within this node.
656 656 Nodes may be used standalone but within repository context they
657 657 lazily fetch data within same repositorty's commit.
658 658 """
659 659
660 660 def __init__(self, path, nodes=(), commit=None):
661 661 """
662 662 Only one of ``nodes`` and ``commit`` may be given. Passing both
663 663 would raise ``NodeError`` exception.
664 664
665 665 :param path: relative path to the node
666 666 :param nodes: content may be passed to constructor
667 667 :param commit: if given, will use it to lazily fetch content
668 668 """
669 669 if nodes and commit:
670 670 raise NodeError("Cannot use both nodes and commit")
671 671 super(DirNode, self).__init__(path, NodeKind.DIR)
672 672 self.commit = commit
673 673 self._nodes = nodes
674 674
675 675 @LazyProperty
676 676 def content(self):
677 677 raise NodeError(
678 678 "%s represents a dir and has no `content` attribute" % self)
679 679
680 680 @LazyProperty
681 681 def nodes(self):
682 682 if self.commit:
683 683 nodes = self.commit.get_nodes(self.path)
684 684 else:
685 685 nodes = self._nodes
686 686 self._nodes_dict = dict((node.path, node) for node in nodes)
687 687 return sorted(nodes)
688 688
689 689 @LazyProperty
690 690 def files(self):
691 691 return sorted((node for node in self.nodes if node.is_file()))
692 692
693 693 @LazyProperty
694 694 def dirs(self):
695 695 return sorted((node for node in self.nodes if node.is_dir()))
696 696
697 697 def __iter__(self):
698 698 for node in self.nodes:
699 699 yield node
700 700
701 701 def get_node(self, path):
702 702 """
703 703 Returns node from within this particular ``DirNode``, so it is now
704 704 allowed to fetch, i.e. node located at 'docs/api/index.rst' from node
705 705 'docs'. In order to access deeper nodes one must fetch nodes between
706 706 them first - this would work::
707 707
708 708 docs = root.get_node('docs')
709 709 docs.get_node('api').get_node('index.rst')
710 710
711 711 :param: path - relative to the current node
712 712
713 713 .. note::
714 714 To access lazily (as in example above) node have to be initialized
715 715 with related commit object - without it node is out of
716 716 context and may know nothing about anything else than nearest
717 717 (located at same level) nodes.
718 718 """
719 719 try:
720 720 path = path.rstrip('/')
721 721 if path == '':
722 722 raise NodeError("Cannot retrieve node without path")
723 723 self.nodes # access nodes first in order to set _nodes_dict
724 724 paths = path.split('/')
725 725 if len(paths) == 1:
726 726 if not self.is_root():
727 727 path = '/'.join((self.path, paths[0]))
728 728 else:
729 729 path = paths[0]
730 730 return self._nodes_dict[path]
731 731 elif len(paths) > 1:
732 732 if self.commit is None:
733 733 raise NodeError(
734 734 "Cannot access deeper nodes without commit")
735 735 else:
736 736 path1, path2 = paths[0], '/'.join(paths[1:])
737 737 return self.get_node(path1).get_node(path2)
738 738 else:
739 739 raise KeyError
740 740 except KeyError:
741 741 raise NodeError("Node does not exist at %s" % path)
742 742
743 743 @LazyProperty
744 744 def state(self):
745 745 raise NodeError("Cannot access state of DirNode")
746 746
747 747 @LazyProperty
748 748 def size(self):
749 749 size = 0
750 750 for root, dirs, files in self.commit.walk(self.path):
751 751 for f in files:
752 752 size += f.size
753 753
754 754 return size
755 755
756 756 @LazyProperty
757 757 def last_commit(self):
758 758 if self.commit:
759 pre_load = ["author", "date", "message"]
759 pre_load = ["author", "date", "message", "parents"]
760 760 return self.commit.get_path_commit(self.path, pre_load=pre_load)
761 761 raise NodeError(
762 762 "Cannot retrieve last commit of the file without "
763 763 "related commit attribute")
764 764
765 765 def __repr__(self):
766 766 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
767 767 getattr(self.commit, 'short_id', ''))
768 768
769 769
770 770 class RootNode(DirNode):
771 771 """
772 772 DirNode being the root node of the repository.
773 773 """
774 774
775 775 def __init__(self, nodes=(), commit=None):
776 776 super(RootNode, self).__init__(path='', nodes=nodes, commit=commit)
777 777
778 778 def __repr__(self):
779 779 return '<%s>' % self.__class__.__name__
780 780
781 781
782 782 class SubModuleNode(Node):
783 783 """
784 784 represents a SubModule of Git or SubRepo of Mercurial
785 785 """
786 786 is_binary = False
787 787 size = 0
788 788
789 789 def __init__(self, name, url=None, commit=None, alias=None):
790 790 self.path = name
791 791 self.kind = NodeKind.SUBMODULE
792 792 self.alias = alias
793 793
794 794 # we have to use EmptyCommit here since this can point to svn/git/hg
795 795 # submodules we cannot get from repository
796 796 self.commit = EmptyCommit(str(commit), alias=alias)
797 797 self.url = url or self._extract_submodule_url()
798 798
799 799 def __repr__(self):
800 800 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
801 801 getattr(self.commit, 'short_id', ''))
802 802
803 803 def _extract_submodule_url(self):
804 804 # TODO: find a way to parse gits submodule file and extract the
805 805 # linking URL
806 806 return self.path
807 807
808 808 @LazyProperty
809 809 def name(self):
810 810 """
811 811 Returns name of the node so if its path
812 812 then only last part is returned.
813 813 """
814 814 org = safe_unicode(self.path.rstrip('/').split('/')[-1])
815 815 return u'%s @ %s' % (org, self.commit.short_id)
816 816
817 817
818 818 class LargeFileNode(FileNode):
819 819
820 820 def __init__(self, path, url=None, commit=None, alias=None, org_path=None):
821 821 self.path = path
822 822 self.org_path = org_path
823 823 self.kind = NodeKind.LARGEFILE
824 824 self.alias = alias
825 825
826 826 def _validate_path(self, path):
827 827 """
828 828 we override check since the LargeFileNode path is system absolute
829 829 """
830 830 pass
831 831
832 832 def __repr__(self):
833 833 return '<%s %r>' % (self.__class__.__name__, self.path)
834 834
835 835 @LazyProperty
836 836 def size(self):
837 837 return os.stat(self.path).st_size
838 838
839 839 @LazyProperty
840 840 def raw_bytes(self):
841 841 with open(self.path, 'rb') as f:
842 842 content = f.read()
843 843 return content
844 844
845 845 @LazyProperty
846 846 def name(self):
847 847 """
848 848 Overwrites name to be the org lf path
849 849 """
850 850 return self.org_path
@@ -1,1742 +1,1742 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid import compat
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode import events
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78 78
79 79 UPDATE_STATUS_MESSAGES = {
80 80 UpdateFailureReason.NONE: lazy_ugettext(
81 81 'Pull request update successful.'),
82 82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 83 'Pull request update failed because of an unknown error.'),
84 84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 85 'No update needed because the source and target have not changed.'),
86 86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 87 'Pull request cannot be updated because the reference type is '
88 88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 90 'This pull request cannot be updated because the target '
91 91 'reference is missing.'),
92 92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 93 'This pull request cannot be updated because the source '
94 94 'reference is missing.'),
95 95 }
96 96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98 98
99 99 def __get_pull_request(self, pull_request):
100 100 return self._get_instance((
101 101 PullRequest, PullRequestVersion), pull_request)
102 102
103 103 def _check_perms(self, perms, pull_request, user, api=False):
104 104 if not api:
105 105 return h.HasRepoPermissionAny(*perms)(
106 106 user=user, repo_name=pull_request.target_repo.repo_name)
107 107 else:
108 108 return h.HasRepoPermissionAnyApi(*perms)(
109 109 user=user, repo_name=pull_request.target_repo.repo_name)
110 110
111 111 def check_user_read(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_merge(self, pull_request, user, api=False):
116 116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 117 return self._check_perms(_perms, pull_request, user, api)
118 118
119 119 def check_user_update(self, pull_request, user, api=False):
120 120 owner = user.user_id == pull_request.user_id
121 121 return self.check_user_merge(pull_request, user, api) or owner
122 122
123 123 def check_user_delete(self, pull_request, user):
124 124 owner = user.user_id == pull_request.user_id
125 125 _perms = ('repository.admin',)
126 126 return self._check_perms(_perms, pull_request, user) or owner
127 127
128 128 def check_user_change_status(self, pull_request, user, api=False):
129 129 reviewer = user.user_id in [x.user_id for x in
130 130 pull_request.reviewers]
131 131 return self.check_user_update(pull_request, user, api) or reviewer
132 132
133 133 def check_user_comment(self, pull_request, user):
134 134 owner = user.user_id == pull_request.user_id
135 135 return self.check_user_read(pull_request, user) or owner
136 136
137 137 def get(self, pull_request):
138 138 return self.__get_pull_request(pull_request)
139 139
140 140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
141 141 opened_by=None, order_by=None,
142 142 order_dir='desc', only_created=True):
143 143 repo = None
144 144 if repo_name:
145 145 repo = self._get_repo(repo_name)
146 146
147 147 q = PullRequest.query()
148 148
149 149 # source or target
150 150 if repo and source:
151 151 q = q.filter(PullRequest.source_repo == repo)
152 152 elif repo:
153 153 q = q.filter(PullRequest.target_repo == repo)
154 154
155 155 # closed,opened
156 156 if statuses:
157 157 q = q.filter(PullRequest.status.in_(statuses))
158 158
159 159 # opened by filter
160 160 if opened_by:
161 161 q = q.filter(PullRequest.user_id.in_(opened_by))
162 162
163 163 # only get those that are in "created" state
164 164 if only_created:
165 165 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
166 166
167 167 if order_by:
168 168 order_map = {
169 169 'name_raw': PullRequest.pull_request_id,
170 170 'id': PullRequest.pull_request_id,
171 171 'title': PullRequest.title,
172 172 'updated_on_raw': PullRequest.updated_on,
173 173 'target_repo': PullRequest.target_repo_id
174 174 }
175 175 if order_dir == 'asc':
176 176 q = q.order_by(order_map[order_by].asc())
177 177 else:
178 178 q = q.order_by(order_map[order_by].desc())
179 179
180 180 return q
181 181
182 182 def count_all(self, repo_name, source=False, statuses=None,
183 183 opened_by=None):
184 184 """
185 185 Count the number of pull requests for a specific repository.
186 186
187 187 :param repo_name: target or source repo
188 188 :param source: boolean flag to specify if repo_name refers to source
189 189 :param statuses: list of pull request statuses
190 190 :param opened_by: author user of the pull request
191 191 :returns: int number of pull requests
192 192 """
193 193 q = self._prepare_get_all_query(
194 194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
195 195
196 196 return q.count()
197 197
198 198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
199 199 offset=0, length=None, order_by=None, order_dir='desc'):
200 200 """
201 201 Get all pull requests for a specific repository.
202 202
203 203 :param repo_name: target or source repo
204 204 :param source: boolean flag to specify if repo_name refers to source
205 205 :param statuses: list of pull request statuses
206 206 :param opened_by: author user of the pull request
207 207 :param offset: pagination offset
208 208 :param length: length of returned list
209 209 :param order_by: order of the returned list
210 210 :param order_dir: 'asc' or 'desc' ordering direction
211 211 :returns: list of pull requests
212 212 """
213 213 q = self._prepare_get_all_query(
214 214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
215 215 order_by=order_by, order_dir=order_dir)
216 216
217 217 if length:
218 218 pull_requests = q.limit(length).offset(offset).all()
219 219 else:
220 220 pull_requests = q.all()
221 221
222 222 return pull_requests
223 223
224 224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
225 225 opened_by=None):
226 226 """
227 227 Count the number of pull requests for a specific repository that are
228 228 awaiting review.
229 229
230 230 :param repo_name: target or source repo
231 231 :param source: boolean flag to specify if repo_name refers to source
232 232 :param statuses: list of pull request statuses
233 233 :param opened_by: author user of the pull request
234 234 :returns: int number of pull requests
235 235 """
236 236 pull_requests = self.get_awaiting_review(
237 237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
238 238
239 239 return len(pull_requests)
240 240
241 241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
242 242 opened_by=None, offset=0, length=None,
243 243 order_by=None, order_dir='desc'):
244 244 """
245 245 Get all pull requests for a specific repository that are awaiting
246 246 review.
247 247
248 248 :param repo_name: target or source repo
249 249 :param source: boolean flag to specify if repo_name refers to source
250 250 :param statuses: list of pull request statuses
251 251 :param opened_by: author user of the pull request
252 252 :param offset: pagination offset
253 253 :param length: length of returned list
254 254 :param order_by: order of the returned list
255 255 :param order_dir: 'asc' or 'desc' ordering direction
256 256 :returns: list of pull requests
257 257 """
258 258 pull_requests = self.get_all(
259 259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
260 260 order_by=order_by, order_dir=order_dir)
261 261
262 262 _filtered_pull_requests = []
263 263 for pr in pull_requests:
264 264 status = pr.calculated_review_status()
265 265 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
266 266 ChangesetStatus.STATUS_UNDER_REVIEW]:
267 267 _filtered_pull_requests.append(pr)
268 268 if length:
269 269 return _filtered_pull_requests[offset:offset+length]
270 270 else:
271 271 return _filtered_pull_requests
272 272
273 273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
274 274 opened_by=None, user_id=None):
275 275 """
276 276 Count the number of pull requests for a specific repository that are
277 277 awaiting review from a specific user.
278 278
279 279 :param repo_name: target or source repo
280 280 :param source: boolean flag to specify if repo_name refers to source
281 281 :param statuses: list of pull request statuses
282 282 :param opened_by: author user of the pull request
283 283 :param user_id: reviewer user of the pull request
284 284 :returns: int number of pull requests
285 285 """
286 286 pull_requests = self.get_awaiting_my_review(
287 287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 288 user_id=user_id)
289 289
290 290 return len(pull_requests)
291 291
292 292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
293 293 opened_by=None, user_id=None, offset=0,
294 294 length=None, order_by=None, order_dir='desc'):
295 295 """
296 296 Get all pull requests for a specific repository that are awaiting
297 297 review from a specific user.
298 298
299 299 :param repo_name: target or source repo
300 300 :param source: boolean flag to specify if repo_name refers to source
301 301 :param statuses: list of pull request statuses
302 302 :param opened_by: author user of the pull request
303 303 :param user_id: reviewer user of the pull request
304 304 :param offset: pagination offset
305 305 :param length: length of returned list
306 306 :param order_by: order of the returned list
307 307 :param order_dir: 'asc' or 'desc' ordering direction
308 308 :returns: list of pull requests
309 309 """
310 310 pull_requests = self.get_all(
311 311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
312 312 order_by=order_by, order_dir=order_dir)
313 313
314 314 _my = PullRequestModel().get_not_reviewed(user_id)
315 315 my_participation = []
316 316 for pr in pull_requests:
317 317 if pr in _my:
318 318 my_participation.append(pr)
319 319 _filtered_pull_requests = my_participation
320 320 if length:
321 321 return _filtered_pull_requests[offset:offset+length]
322 322 else:
323 323 return _filtered_pull_requests
324 324
325 325 def get_not_reviewed(self, user_id):
326 326 return [
327 327 x.pull_request for x in PullRequestReviewers.query().filter(
328 328 PullRequestReviewers.user_id == user_id).all()
329 329 ]
330 330
331 331 def _prepare_participating_query(self, user_id=None, statuses=None,
332 332 order_by=None, order_dir='desc'):
333 333 q = PullRequest.query()
334 334 if user_id:
335 335 reviewers_subquery = Session().query(
336 336 PullRequestReviewers.pull_request_id).filter(
337 337 PullRequestReviewers.user_id == user_id).subquery()
338 338 user_filter = or_(
339 339 PullRequest.user_id == user_id,
340 340 PullRequest.pull_request_id.in_(reviewers_subquery)
341 341 )
342 342 q = PullRequest.query().filter(user_filter)
343 343
344 344 # closed,opened
345 345 if statuses:
346 346 q = q.filter(PullRequest.status.in_(statuses))
347 347
348 348 if order_by:
349 349 order_map = {
350 350 'name_raw': PullRequest.pull_request_id,
351 351 'title': PullRequest.title,
352 352 'updated_on_raw': PullRequest.updated_on,
353 353 'target_repo': PullRequest.target_repo_id
354 354 }
355 355 if order_dir == 'asc':
356 356 q = q.order_by(order_map[order_by].asc())
357 357 else:
358 358 q = q.order_by(order_map[order_by].desc())
359 359
360 360 return q
361 361
362 362 def count_im_participating_in(self, user_id=None, statuses=None):
363 363 q = self._prepare_participating_query(user_id, statuses=statuses)
364 364 return q.count()
365 365
366 366 def get_im_participating_in(
367 367 self, user_id=None, statuses=None, offset=0,
368 368 length=None, order_by=None, order_dir='desc'):
369 369 """
370 370 Get all Pull requests that i'm participating in, or i have opened
371 371 """
372 372
373 373 q = self._prepare_participating_query(
374 374 user_id, statuses=statuses, order_by=order_by,
375 375 order_dir=order_dir)
376 376
377 377 if length:
378 378 pull_requests = q.limit(length).offset(offset).all()
379 379 else:
380 380 pull_requests = q.all()
381 381
382 382 return pull_requests
383 383
384 384 def get_versions(self, pull_request):
385 385 """
386 386 returns version of pull request sorted by ID descending
387 387 """
388 388 return PullRequestVersion.query()\
389 389 .filter(PullRequestVersion.pull_request == pull_request)\
390 390 .order_by(PullRequestVersion.pull_request_version_id.asc())\
391 391 .all()
392 392
393 393 def get_pr_version(self, pull_request_id, version=None):
394 394 at_version = None
395 395
396 396 if version and version == 'latest':
397 397 pull_request_ver = PullRequest.get(pull_request_id)
398 398 pull_request_obj = pull_request_ver
399 399 _org_pull_request_obj = pull_request_obj
400 400 at_version = 'latest'
401 401 elif version:
402 402 pull_request_ver = PullRequestVersion.get_or_404(version)
403 403 pull_request_obj = pull_request_ver
404 404 _org_pull_request_obj = pull_request_ver.pull_request
405 405 at_version = pull_request_ver.pull_request_version_id
406 406 else:
407 407 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
408 408 pull_request_id)
409 409
410 410 pull_request_display_obj = PullRequest.get_pr_display_object(
411 411 pull_request_obj, _org_pull_request_obj)
412 412
413 413 return _org_pull_request_obj, pull_request_obj, \
414 414 pull_request_display_obj, at_version
415 415
416 416 def create(self, created_by, source_repo, source_ref, target_repo,
417 417 target_ref, revisions, reviewers, title, description=None,
418 418 description_renderer=None,
419 419 reviewer_data=None, translator=None, auth_user=None):
420 420 translator = translator or get_current_request().translate
421 421
422 422 created_by_user = self._get_user(created_by)
423 423 auth_user = auth_user or created_by_user.AuthUser()
424 424 source_repo = self._get_repo(source_repo)
425 425 target_repo = self._get_repo(target_repo)
426 426
427 427 pull_request = PullRequest()
428 428 pull_request.source_repo = source_repo
429 429 pull_request.source_ref = source_ref
430 430 pull_request.target_repo = target_repo
431 431 pull_request.target_ref = target_ref
432 432 pull_request.revisions = revisions
433 433 pull_request.title = title
434 434 pull_request.description = description
435 435 pull_request.description_renderer = description_renderer
436 436 pull_request.author = created_by_user
437 437 pull_request.reviewer_data = reviewer_data
438 438 pull_request.pull_request_state = pull_request.STATE_CREATING
439 439 Session().add(pull_request)
440 440 Session().flush()
441 441
442 442 reviewer_ids = set()
443 443 # members / reviewers
444 444 for reviewer_object in reviewers:
445 445 user_id, reasons, mandatory, rules = reviewer_object
446 446 user = self._get_user(user_id)
447 447
448 448 # skip duplicates
449 449 if user.user_id in reviewer_ids:
450 450 continue
451 451
452 452 reviewer_ids.add(user.user_id)
453 453
454 454 reviewer = PullRequestReviewers()
455 455 reviewer.user = user
456 456 reviewer.pull_request = pull_request
457 457 reviewer.reasons = reasons
458 458 reviewer.mandatory = mandatory
459 459
460 460 # NOTE(marcink): pick only first rule for now
461 461 rule_id = list(rules)[0] if rules else None
462 462 rule = RepoReviewRule.get(rule_id) if rule_id else None
463 463 if rule:
464 464 review_group = rule.user_group_vote_rule(user_id)
465 465 # we check if this particular reviewer is member of a voting group
466 466 if review_group:
467 467 # NOTE(marcink):
468 468 # can be that user is member of more but we pick the first same,
469 469 # same as default reviewers algo
470 470 review_group = review_group[0]
471 471
472 472 rule_data = {
473 473 'rule_name':
474 474 rule.review_rule_name,
475 475 'rule_user_group_entry_id':
476 476 review_group.repo_review_rule_users_group_id,
477 477 'rule_user_group_name':
478 478 review_group.users_group.users_group_name,
479 479 'rule_user_group_members':
480 480 [x.user.username for x in review_group.users_group.members],
481 481 'rule_user_group_members_id':
482 482 [x.user.user_id for x in review_group.users_group.members],
483 483 }
484 484 # e.g {'vote_rule': -1, 'mandatory': True}
485 485 rule_data.update(review_group.rule_data())
486 486
487 487 reviewer.rule_data = rule_data
488 488
489 489 Session().add(reviewer)
490 490 Session().flush()
491 491
492 492 # Set approval status to "Under Review" for all commits which are
493 493 # part of this pull request.
494 494 ChangesetStatusModel().set_status(
495 495 repo=target_repo,
496 496 status=ChangesetStatus.STATUS_UNDER_REVIEW,
497 497 user=created_by_user,
498 498 pull_request=pull_request
499 499 )
500 500 # we commit early at this point. This has to do with a fact
501 501 # that before queries do some row-locking. And because of that
502 502 # we need to commit and finish transaction before below validate call
503 503 # that for large repos could be long resulting in long row locks
504 504 Session().commit()
505 505
506 506 # prepare workspace, and run initial merge simulation. Set state during that
507 507 # operation
508 508 pull_request = PullRequest.get(pull_request.pull_request_id)
509 509
510 510 # set as merging, for simulation, and if finished to created so we mark
511 511 # simulation is working fine
512 512 with pull_request.set_state(PullRequest.STATE_MERGING,
513 513 final_state=PullRequest.STATE_CREATED):
514 514 MergeCheck.validate(
515 515 pull_request, auth_user=auth_user, translator=translator)
516 516
517 517 self.notify_reviewers(pull_request, reviewer_ids)
518 518 self.trigger_pull_request_hook(
519 519 pull_request, created_by_user, 'create')
520 520
521 521 creation_data = pull_request.get_api_data(with_merge_state=False)
522 522 self._log_audit_action(
523 523 'repo.pull_request.create', {'data': creation_data},
524 524 auth_user, pull_request)
525 525
526 526 return pull_request
527 527
528 528 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
529 529 pull_request = self.__get_pull_request(pull_request)
530 530 target_scm = pull_request.target_repo.scm_instance()
531 531 if action == 'create':
532 532 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
533 533 elif action == 'merge':
534 534 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
535 535 elif action == 'close':
536 536 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
537 537 elif action == 'review_status_change':
538 538 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
539 539 elif action == 'update':
540 540 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
541 541 elif action == 'comment':
542 542 # dummy hook ! for comment. We want this function to handle all cases
543 543 def trigger_hook(*args, **kwargs):
544 544 pass
545 545 comment = data['comment']
546 546 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
547 547 else:
548 548 return
549 549
550 550 trigger_hook(
551 551 username=user.username,
552 552 repo_name=pull_request.target_repo.repo_name,
553 553 repo_alias=target_scm.alias,
554 554 pull_request=pull_request,
555 555 data=data)
556 556
557 557 def _get_commit_ids(self, pull_request):
558 558 """
559 559 Return the commit ids of the merged pull request.
560 560
561 561 This method is not dealing correctly yet with the lack of autoupdates
562 562 nor with the implicit target updates.
563 563 For example: if a commit in the source repo is already in the target it
564 564 will be reported anyways.
565 565 """
566 566 merge_rev = pull_request.merge_rev
567 567 if merge_rev is None:
568 568 raise ValueError('This pull request was not merged yet')
569 569
570 570 commit_ids = list(pull_request.revisions)
571 571 if merge_rev not in commit_ids:
572 572 commit_ids.append(merge_rev)
573 573
574 574 return commit_ids
575 575
576 576 def merge_repo(self, pull_request, user, extras):
577 577 log.debug("Merging pull request %s", pull_request.pull_request_id)
578 578 extras['user_agent'] = 'internal-merge'
579 579 merge_state = self._merge_pull_request(pull_request, user, extras)
580 580 if merge_state.executed:
581 581 log.debug("Merge was successful, updating the pull request comments.")
582 582 self._comment_and_close_pr(pull_request, user, merge_state)
583 583
584 584 self._log_audit_action(
585 585 'repo.pull_request.merge',
586 586 {'merge_state': merge_state.__dict__},
587 587 user, pull_request)
588 588
589 589 else:
590 590 log.warn("Merge failed, not updating the pull request.")
591 591 return merge_state
592 592
593 593 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
594 594 target_vcs = pull_request.target_repo.scm_instance()
595 595 source_vcs = pull_request.source_repo.scm_instance()
596 596
597 597 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
598 598 pr_id=pull_request.pull_request_id,
599 599 pr_title=pull_request.title,
600 600 source_repo=source_vcs.name,
601 601 source_ref_name=pull_request.source_ref_parts.name,
602 602 target_repo=target_vcs.name,
603 603 target_ref_name=pull_request.target_ref_parts.name,
604 604 )
605 605
606 606 workspace_id = self._workspace_id(pull_request)
607 607 repo_id = pull_request.target_repo.repo_id
608 608 use_rebase = self._use_rebase_for_merging(pull_request)
609 609 close_branch = self._close_branch_before_merging(pull_request)
610 610
611 611 target_ref = self._refresh_reference(
612 612 pull_request.target_ref_parts, target_vcs)
613 613
614 614 callback_daemon, extras = prepare_callback_daemon(
615 615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
616 616 host=vcs_settings.HOOKS_HOST,
617 617 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
618 618
619 619 with callback_daemon:
620 620 # TODO: johbo: Implement a clean way to run a config_override
621 621 # for a single call.
622 622 target_vcs.config.set(
623 623 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
624 624
625 625 user_name = user.short_contact
626 626 merge_state = target_vcs.merge(
627 627 repo_id, workspace_id, target_ref, source_vcs,
628 628 pull_request.source_ref_parts,
629 629 user_name=user_name, user_email=user.email,
630 630 message=message, use_rebase=use_rebase,
631 631 close_branch=close_branch)
632 632 return merge_state
633 633
634 634 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
635 635 pull_request.merge_rev = merge_state.merge_ref.commit_id
636 636 pull_request.updated_on = datetime.datetime.now()
637 637 close_msg = close_msg or 'Pull request merged and closed'
638 638
639 639 CommentsModel().create(
640 640 text=safe_unicode(close_msg),
641 641 repo=pull_request.target_repo.repo_id,
642 642 user=user.user_id,
643 643 pull_request=pull_request.pull_request_id,
644 644 f_path=None,
645 645 line_no=None,
646 646 closing_pr=True
647 647 )
648 648
649 649 Session().add(pull_request)
650 650 Session().flush()
651 651 # TODO: paris: replace invalidation with less radical solution
652 652 ScmModel().mark_for_invalidation(
653 653 pull_request.target_repo.repo_name)
654 654 self.trigger_pull_request_hook(pull_request, user, 'merge')
655 655
656 656 def has_valid_update_type(self, pull_request):
657 657 source_ref_type = pull_request.source_ref_parts.type
658 658 return source_ref_type in self.REF_TYPES
659 659
660 660 def update_commits(self, pull_request):
661 661 """
662 662 Get the updated list of commits for the pull request
663 663 and return the new pull request version and the list
664 664 of commits processed by this update action
665 665 """
666 666 pull_request = self.__get_pull_request(pull_request)
667 667 source_ref_type = pull_request.source_ref_parts.type
668 668 source_ref_name = pull_request.source_ref_parts.name
669 669 source_ref_id = pull_request.source_ref_parts.commit_id
670 670
671 671 target_ref_type = pull_request.target_ref_parts.type
672 672 target_ref_name = pull_request.target_ref_parts.name
673 673 target_ref_id = pull_request.target_ref_parts.commit_id
674 674
675 675 if not self.has_valid_update_type(pull_request):
676 676 log.debug("Skipping update of pull request %s due to ref type: %s",
677 677 pull_request, source_ref_type)
678 678 return UpdateResponse(
679 679 executed=False,
680 680 reason=UpdateFailureReason.WRONG_REF_TYPE,
681 681 old=pull_request, new=None, changes=None,
682 682 source_changed=False, target_changed=False)
683 683
684 684 # source repo
685 685 source_repo = pull_request.source_repo.scm_instance()
686 686
687 687 try:
688 688 source_commit = source_repo.get_commit(commit_id=source_ref_name)
689 689 except CommitDoesNotExistError:
690 690 return UpdateResponse(
691 691 executed=False,
692 692 reason=UpdateFailureReason.MISSING_SOURCE_REF,
693 693 old=pull_request, new=None, changes=None,
694 694 source_changed=False, target_changed=False)
695 695
696 696 source_changed = source_ref_id != source_commit.raw_id
697 697
698 698 # target repo
699 699 target_repo = pull_request.target_repo.scm_instance()
700 700
701 701 try:
702 702 target_commit = target_repo.get_commit(commit_id=target_ref_name)
703 703 except CommitDoesNotExistError:
704 704 return UpdateResponse(
705 705 executed=False,
706 706 reason=UpdateFailureReason.MISSING_TARGET_REF,
707 707 old=pull_request, new=None, changes=None,
708 708 source_changed=False, target_changed=False)
709 709 target_changed = target_ref_id != target_commit.raw_id
710 710
711 711 if not (source_changed or target_changed):
712 712 log.debug("Nothing changed in pull request %s", pull_request)
713 713 return UpdateResponse(
714 714 executed=False,
715 715 reason=UpdateFailureReason.NO_CHANGE,
716 716 old=pull_request, new=None, changes=None,
717 717 source_changed=target_changed, target_changed=source_changed)
718 718
719 719 change_in_found = 'target repo' if target_changed else 'source repo'
720 720 log.debug('Updating pull request because of change in %s detected',
721 721 change_in_found)
722 722
723 723 # Finally there is a need for an update, in case of source change
724 724 # we create a new version, else just an update
725 725 if source_changed:
726 726 pull_request_version = self._create_version_from_snapshot(pull_request)
727 727 self._link_comments_to_version(pull_request_version)
728 728 else:
729 729 try:
730 730 ver = pull_request.versions[-1]
731 731 except IndexError:
732 732 ver = None
733 733
734 734 pull_request.pull_request_version_id = \
735 735 ver.pull_request_version_id if ver else None
736 736 pull_request_version = pull_request
737 737
738 738 try:
739 739 if target_ref_type in self.REF_TYPES:
740 740 target_commit = target_repo.get_commit(target_ref_name)
741 741 else:
742 742 target_commit = target_repo.get_commit(target_ref_id)
743 743 except CommitDoesNotExistError:
744 744 return UpdateResponse(
745 745 executed=False,
746 746 reason=UpdateFailureReason.MISSING_TARGET_REF,
747 747 old=pull_request, new=None, changes=None,
748 748 source_changed=source_changed, target_changed=target_changed)
749 749
750 750 # re-compute commit ids
751 751 old_commit_ids = pull_request.revisions
752 pre_load = ["author", "branch", "date", "message"]
752 pre_load = ["author", "date", "message", "branch"]
753 753 commit_ranges = target_repo.compare(
754 754 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
755 755 pre_load=pre_load)
756 756
757 757 ancestor = source_repo.get_common_ancestor(
758 758 source_commit.raw_id, target_commit.raw_id, target_repo)
759 759
760 760 pull_request.source_ref = '%s:%s:%s' % (
761 761 source_ref_type, source_ref_name, source_commit.raw_id)
762 762 pull_request.target_ref = '%s:%s:%s' % (
763 763 target_ref_type, target_ref_name, ancestor)
764 764
765 765 pull_request.revisions = [
766 766 commit.raw_id for commit in reversed(commit_ranges)]
767 767 pull_request.updated_on = datetime.datetime.now()
768 768 Session().add(pull_request)
769 769 new_commit_ids = pull_request.revisions
770 770
771 771 old_diff_data, new_diff_data = self._generate_update_diffs(
772 772 pull_request, pull_request_version)
773 773
774 774 # calculate commit and file changes
775 775 changes = self._calculate_commit_id_changes(
776 776 old_commit_ids, new_commit_ids)
777 777 file_changes = self._calculate_file_changes(
778 778 old_diff_data, new_diff_data)
779 779
780 780 # set comments as outdated if DIFFS changed
781 781 CommentsModel().outdate_comments(
782 782 pull_request, old_diff_data=old_diff_data,
783 783 new_diff_data=new_diff_data)
784 784
785 785 commit_changes = (changes.added or changes.removed)
786 786 file_node_changes = (
787 787 file_changes.added or file_changes.modified or file_changes.removed)
788 788 pr_has_changes = commit_changes or file_node_changes
789 789
790 790 # Add an automatic comment to the pull request, in case
791 791 # anything has changed
792 792 if pr_has_changes:
793 793 update_comment = CommentsModel().create(
794 794 text=self._render_update_message(changes, file_changes),
795 795 repo=pull_request.target_repo,
796 796 user=pull_request.author,
797 797 pull_request=pull_request,
798 798 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
799 799
800 800 # Update status to "Under Review" for added commits
801 801 for commit_id in changes.added:
802 802 ChangesetStatusModel().set_status(
803 803 repo=pull_request.source_repo,
804 804 status=ChangesetStatus.STATUS_UNDER_REVIEW,
805 805 comment=update_comment,
806 806 user=pull_request.author,
807 807 pull_request=pull_request,
808 808 revision=commit_id)
809 809
810 810 log.debug(
811 811 'Updated pull request %s, added_ids: %s, common_ids: %s, '
812 812 'removed_ids: %s', pull_request.pull_request_id,
813 813 changes.added, changes.common, changes.removed)
814 814 log.debug(
815 815 'Updated pull request with the following file changes: %s',
816 816 file_changes)
817 817
818 818 log.info(
819 819 "Updated pull request %s from commit %s to commit %s, "
820 820 "stored new version %s of this pull request.",
821 821 pull_request.pull_request_id, source_ref_id,
822 822 pull_request.source_ref_parts.commit_id,
823 823 pull_request_version.pull_request_version_id)
824 824 Session().commit()
825 825 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
826 826
827 827 return UpdateResponse(
828 828 executed=True, reason=UpdateFailureReason.NONE,
829 829 old=pull_request, new=pull_request_version, changes=changes,
830 830 source_changed=source_changed, target_changed=target_changed)
831 831
832 832 def _create_version_from_snapshot(self, pull_request):
833 833 version = PullRequestVersion()
834 834 version.title = pull_request.title
835 835 version.description = pull_request.description
836 836 version.status = pull_request.status
837 837 version.pull_request_state = pull_request.pull_request_state
838 838 version.created_on = datetime.datetime.now()
839 839 version.updated_on = pull_request.updated_on
840 840 version.user_id = pull_request.user_id
841 841 version.source_repo = pull_request.source_repo
842 842 version.source_ref = pull_request.source_ref
843 843 version.target_repo = pull_request.target_repo
844 844 version.target_ref = pull_request.target_ref
845 845
846 846 version._last_merge_source_rev = pull_request._last_merge_source_rev
847 847 version._last_merge_target_rev = pull_request._last_merge_target_rev
848 848 version.last_merge_status = pull_request.last_merge_status
849 849 version.shadow_merge_ref = pull_request.shadow_merge_ref
850 850 version.merge_rev = pull_request.merge_rev
851 851 version.reviewer_data = pull_request.reviewer_data
852 852
853 853 version.revisions = pull_request.revisions
854 854 version.pull_request = pull_request
855 855 Session().add(version)
856 856 Session().flush()
857 857
858 858 return version
859 859
860 860 def _generate_update_diffs(self, pull_request, pull_request_version):
861 861
862 862 diff_context = (
863 863 self.DIFF_CONTEXT +
864 864 CommentsModel.needed_extra_diff_context())
865 865 hide_whitespace_changes = False
866 866 source_repo = pull_request_version.source_repo
867 867 source_ref_id = pull_request_version.source_ref_parts.commit_id
868 868 target_ref_id = pull_request_version.target_ref_parts.commit_id
869 869 old_diff = self._get_diff_from_pr_or_version(
870 870 source_repo, source_ref_id, target_ref_id,
871 871 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
872 872
873 873 source_repo = pull_request.source_repo
874 874 source_ref_id = pull_request.source_ref_parts.commit_id
875 875 target_ref_id = pull_request.target_ref_parts.commit_id
876 876
877 877 new_diff = self._get_diff_from_pr_or_version(
878 878 source_repo, source_ref_id, target_ref_id,
879 879 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
880 880
881 881 old_diff_data = diffs.DiffProcessor(old_diff)
882 882 old_diff_data.prepare()
883 883 new_diff_data = diffs.DiffProcessor(new_diff)
884 884 new_diff_data.prepare()
885 885
886 886 return old_diff_data, new_diff_data
887 887
888 888 def _link_comments_to_version(self, pull_request_version):
889 889 """
890 890 Link all unlinked comments of this pull request to the given version.
891 891
892 892 :param pull_request_version: The `PullRequestVersion` to which
893 893 the comments shall be linked.
894 894
895 895 """
896 896 pull_request = pull_request_version.pull_request
897 897 comments = ChangesetComment.query()\
898 898 .filter(
899 899 # TODO: johbo: Should we query for the repo at all here?
900 900 # Pending decision on how comments of PRs are to be related
901 901 # to either the source repo, the target repo or no repo at all.
902 902 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
903 903 ChangesetComment.pull_request == pull_request,
904 904 ChangesetComment.pull_request_version == None)\
905 905 .order_by(ChangesetComment.comment_id.asc())
906 906
907 907 # TODO: johbo: Find out why this breaks if it is done in a bulk
908 908 # operation.
909 909 for comment in comments:
910 910 comment.pull_request_version_id = (
911 911 pull_request_version.pull_request_version_id)
912 912 Session().add(comment)
913 913
914 914 def _calculate_commit_id_changes(self, old_ids, new_ids):
915 915 added = [x for x in new_ids if x not in old_ids]
916 916 common = [x for x in new_ids if x in old_ids]
917 917 removed = [x for x in old_ids if x not in new_ids]
918 918 total = new_ids
919 919 return ChangeTuple(added, common, removed, total)
920 920
921 921 def _calculate_file_changes(self, old_diff_data, new_diff_data):
922 922
923 923 old_files = OrderedDict()
924 924 for diff_data in old_diff_data.parsed_diff:
925 925 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
926 926
927 927 added_files = []
928 928 modified_files = []
929 929 removed_files = []
930 930 for diff_data in new_diff_data.parsed_diff:
931 931 new_filename = diff_data['filename']
932 932 new_hash = md5_safe(diff_data['raw_diff'])
933 933
934 934 old_hash = old_files.get(new_filename)
935 935 if not old_hash:
936 936 # file is not present in old diff, means it's added
937 937 added_files.append(new_filename)
938 938 else:
939 939 if new_hash != old_hash:
940 940 modified_files.append(new_filename)
941 941 # now remove a file from old, since we have seen it already
942 942 del old_files[new_filename]
943 943
944 944 # removed files is when there are present in old, but not in NEW,
945 945 # since we remove old files that are present in new diff, left-overs
946 946 # if any should be the removed files
947 947 removed_files.extend(old_files.keys())
948 948
949 949 return FileChangeTuple(added_files, modified_files, removed_files)
950 950
951 951 def _render_update_message(self, changes, file_changes):
952 952 """
953 953 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
954 954 so it's always looking the same disregarding on which default
955 955 renderer system is using.
956 956
957 957 :param changes: changes named tuple
958 958 :param file_changes: file changes named tuple
959 959
960 960 """
961 961 new_status = ChangesetStatus.get_status_lbl(
962 962 ChangesetStatus.STATUS_UNDER_REVIEW)
963 963
964 964 changed_files = (
965 965 file_changes.added + file_changes.modified + file_changes.removed)
966 966
967 967 params = {
968 968 'under_review_label': new_status,
969 969 'added_commits': changes.added,
970 970 'removed_commits': changes.removed,
971 971 'changed_files': changed_files,
972 972 'added_files': file_changes.added,
973 973 'modified_files': file_changes.modified,
974 974 'removed_files': file_changes.removed,
975 975 }
976 976 renderer = RstTemplateRenderer()
977 977 return renderer.render('pull_request_update.mako', **params)
978 978
979 979 def edit(self, pull_request, title, description, description_renderer, user):
980 980 pull_request = self.__get_pull_request(pull_request)
981 981 old_data = pull_request.get_api_data(with_merge_state=False)
982 982 if pull_request.is_closed():
983 983 raise ValueError('This pull request is closed')
984 984 if title:
985 985 pull_request.title = title
986 986 pull_request.description = description
987 987 pull_request.updated_on = datetime.datetime.now()
988 988 pull_request.description_renderer = description_renderer
989 989 Session().add(pull_request)
990 990 self._log_audit_action(
991 991 'repo.pull_request.edit', {'old_data': old_data},
992 992 user, pull_request)
993 993
994 994 def update_reviewers(self, pull_request, reviewer_data, user):
995 995 """
996 996 Update the reviewers in the pull request
997 997
998 998 :param pull_request: the pr to update
999 999 :param reviewer_data: list of tuples
1000 1000 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1001 1001 """
1002 1002 pull_request = self.__get_pull_request(pull_request)
1003 1003 if pull_request.is_closed():
1004 1004 raise ValueError('This pull request is closed')
1005 1005
1006 1006 reviewers = {}
1007 1007 for user_id, reasons, mandatory, rules in reviewer_data:
1008 1008 if isinstance(user_id, (int, compat.string_types)):
1009 1009 user_id = self._get_user(user_id).user_id
1010 1010 reviewers[user_id] = {
1011 1011 'reasons': reasons, 'mandatory': mandatory}
1012 1012
1013 1013 reviewers_ids = set(reviewers.keys())
1014 1014 current_reviewers = PullRequestReviewers.query()\
1015 1015 .filter(PullRequestReviewers.pull_request ==
1016 1016 pull_request).all()
1017 1017 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1018 1018
1019 1019 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1020 1020 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1021 1021
1022 1022 log.debug("Adding %s reviewers", ids_to_add)
1023 1023 log.debug("Removing %s reviewers", ids_to_remove)
1024 1024 changed = False
1025 1025 added_audit_reviewers = []
1026 1026 removed_audit_reviewers = []
1027 1027
1028 1028 for uid in ids_to_add:
1029 1029 changed = True
1030 1030 _usr = self._get_user(uid)
1031 1031 reviewer = PullRequestReviewers()
1032 1032 reviewer.user = _usr
1033 1033 reviewer.pull_request = pull_request
1034 1034 reviewer.reasons = reviewers[uid]['reasons']
1035 1035 # NOTE(marcink): mandatory shouldn't be changed now
1036 1036 # reviewer.mandatory = reviewers[uid]['reasons']
1037 1037 Session().add(reviewer)
1038 1038 added_audit_reviewers.append(reviewer.get_dict())
1039 1039
1040 1040 for uid in ids_to_remove:
1041 1041 changed = True
1042 1042 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1043 1043 # that prevents and fixes cases that we added the same reviewer twice.
1044 1044 # this CAN happen due to the lack of DB checks
1045 1045 reviewers = PullRequestReviewers.query()\
1046 1046 .filter(PullRequestReviewers.user_id == uid,
1047 1047 PullRequestReviewers.pull_request == pull_request)\
1048 1048 .all()
1049 1049
1050 1050 for obj in reviewers:
1051 1051 added_audit_reviewers.append(obj.get_dict())
1052 1052 Session().delete(obj)
1053 1053
1054 1054 if changed:
1055 1055 Session().expire_all()
1056 1056 pull_request.updated_on = datetime.datetime.now()
1057 1057 Session().add(pull_request)
1058 1058
1059 1059 # finally store audit logs
1060 1060 for user_data in added_audit_reviewers:
1061 1061 self._log_audit_action(
1062 1062 'repo.pull_request.reviewer.add', {'data': user_data},
1063 1063 user, pull_request)
1064 1064 for user_data in removed_audit_reviewers:
1065 1065 self._log_audit_action(
1066 1066 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1067 1067 user, pull_request)
1068 1068
1069 1069 self.notify_reviewers(pull_request, ids_to_add)
1070 1070 return ids_to_add, ids_to_remove
1071 1071
1072 1072 def get_url(self, pull_request, request=None, permalink=False):
1073 1073 if not request:
1074 1074 request = get_current_request()
1075 1075
1076 1076 if permalink:
1077 1077 return request.route_url(
1078 1078 'pull_requests_global',
1079 1079 pull_request_id=pull_request.pull_request_id,)
1080 1080 else:
1081 1081 return request.route_url('pullrequest_show',
1082 1082 repo_name=safe_str(pull_request.target_repo.repo_name),
1083 1083 pull_request_id=pull_request.pull_request_id,)
1084 1084
1085 1085 def get_shadow_clone_url(self, pull_request, request=None):
1086 1086 """
1087 1087 Returns qualified url pointing to the shadow repository. If this pull
1088 1088 request is closed there is no shadow repository and ``None`` will be
1089 1089 returned.
1090 1090 """
1091 1091 if pull_request.is_closed():
1092 1092 return None
1093 1093 else:
1094 1094 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1095 1095 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1096 1096
1097 1097 def notify_reviewers(self, pull_request, reviewers_ids):
1098 1098 # notification to reviewers
1099 1099 if not reviewers_ids:
1100 1100 return
1101 1101
1102 1102 pull_request_obj = pull_request
1103 1103 # get the current participants of this pull request
1104 1104 recipients = reviewers_ids
1105 1105 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1106 1106
1107 1107 pr_source_repo = pull_request_obj.source_repo
1108 1108 pr_target_repo = pull_request_obj.target_repo
1109 1109
1110 1110 pr_url = h.route_url('pullrequest_show',
1111 1111 repo_name=pr_target_repo.repo_name,
1112 1112 pull_request_id=pull_request_obj.pull_request_id,)
1113 1113
1114 1114 # set some variables for email notification
1115 1115 pr_target_repo_url = h.route_url(
1116 1116 'repo_summary', repo_name=pr_target_repo.repo_name)
1117 1117
1118 1118 pr_source_repo_url = h.route_url(
1119 1119 'repo_summary', repo_name=pr_source_repo.repo_name)
1120 1120
1121 1121 # pull request specifics
1122 1122 pull_request_commits = [
1123 1123 (x.raw_id, x.message)
1124 1124 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1125 1125
1126 1126 kwargs = {
1127 1127 'user': pull_request.author,
1128 1128 'pull_request': pull_request_obj,
1129 1129 'pull_request_commits': pull_request_commits,
1130 1130
1131 1131 'pull_request_target_repo': pr_target_repo,
1132 1132 'pull_request_target_repo_url': pr_target_repo_url,
1133 1133
1134 1134 'pull_request_source_repo': pr_source_repo,
1135 1135 'pull_request_source_repo_url': pr_source_repo_url,
1136 1136
1137 1137 'pull_request_url': pr_url,
1138 1138 }
1139 1139
1140 1140 # pre-generate the subject for notification itself
1141 1141 (subject,
1142 1142 _h, _e, # we don't care about those
1143 1143 body_plaintext) = EmailNotificationModel().render_email(
1144 1144 notification_type, **kwargs)
1145 1145
1146 1146 # create notification objects, and emails
1147 1147 NotificationModel().create(
1148 1148 created_by=pull_request.author,
1149 1149 notification_subject=subject,
1150 1150 notification_body=body_plaintext,
1151 1151 notification_type=notification_type,
1152 1152 recipients=recipients,
1153 1153 email_kwargs=kwargs,
1154 1154 )
1155 1155
1156 1156 def delete(self, pull_request, user):
1157 1157 pull_request = self.__get_pull_request(pull_request)
1158 1158 old_data = pull_request.get_api_data(with_merge_state=False)
1159 1159 self._cleanup_merge_workspace(pull_request)
1160 1160 self._log_audit_action(
1161 1161 'repo.pull_request.delete', {'old_data': old_data},
1162 1162 user, pull_request)
1163 1163 Session().delete(pull_request)
1164 1164
1165 1165 def close_pull_request(self, pull_request, user):
1166 1166 pull_request = self.__get_pull_request(pull_request)
1167 1167 self._cleanup_merge_workspace(pull_request)
1168 1168 pull_request.status = PullRequest.STATUS_CLOSED
1169 1169 pull_request.updated_on = datetime.datetime.now()
1170 1170 Session().add(pull_request)
1171 1171 self.trigger_pull_request_hook(
1172 1172 pull_request, pull_request.author, 'close')
1173 1173
1174 1174 pr_data = pull_request.get_api_data(with_merge_state=False)
1175 1175 self._log_audit_action(
1176 1176 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1177 1177
1178 1178 def close_pull_request_with_comment(
1179 1179 self, pull_request, user, repo, message=None, auth_user=None):
1180 1180
1181 1181 pull_request_review_status = pull_request.calculated_review_status()
1182 1182
1183 1183 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1184 1184 # approved only if we have voting consent
1185 1185 status = ChangesetStatus.STATUS_APPROVED
1186 1186 else:
1187 1187 status = ChangesetStatus.STATUS_REJECTED
1188 1188 status_lbl = ChangesetStatus.get_status_lbl(status)
1189 1189
1190 1190 default_message = (
1191 1191 'Closing with status change {transition_icon} {status}.'
1192 1192 ).format(transition_icon='>', status=status_lbl)
1193 1193 text = message or default_message
1194 1194
1195 1195 # create a comment, and link it to new status
1196 1196 comment = CommentsModel().create(
1197 1197 text=text,
1198 1198 repo=repo.repo_id,
1199 1199 user=user.user_id,
1200 1200 pull_request=pull_request.pull_request_id,
1201 1201 status_change=status_lbl,
1202 1202 status_change_type=status,
1203 1203 closing_pr=True,
1204 1204 auth_user=auth_user,
1205 1205 )
1206 1206
1207 1207 # calculate old status before we change it
1208 1208 old_calculated_status = pull_request.calculated_review_status()
1209 1209 ChangesetStatusModel().set_status(
1210 1210 repo.repo_id,
1211 1211 status,
1212 1212 user.user_id,
1213 1213 comment=comment,
1214 1214 pull_request=pull_request.pull_request_id
1215 1215 )
1216 1216
1217 1217 Session().flush()
1218 1218 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1219 1219 # we now calculate the status of pull request again, and based on that
1220 1220 # calculation trigger status change. This might happen in cases
1221 1221 # that non-reviewer admin closes a pr, which means his vote doesn't
1222 1222 # change the status, while if he's a reviewer this might change it.
1223 1223 calculated_status = pull_request.calculated_review_status()
1224 1224 if old_calculated_status != calculated_status:
1225 1225 self.trigger_pull_request_hook(
1226 1226 pull_request, user, 'review_status_change',
1227 1227 data={'status': calculated_status})
1228 1228
1229 1229 # finally close the PR
1230 1230 PullRequestModel().close_pull_request(
1231 1231 pull_request.pull_request_id, user)
1232 1232
1233 1233 return comment, status
1234 1234
1235 1235 def merge_status(self, pull_request, translator=None,
1236 1236 force_shadow_repo_refresh=False):
1237 1237 _ = translator or get_current_request().translate
1238 1238
1239 1239 if not self._is_merge_enabled(pull_request):
1240 1240 return False, _('Server-side pull request merging is disabled.')
1241 1241 if pull_request.is_closed():
1242 1242 return False, _('This pull request is closed.')
1243 1243 merge_possible, msg = self._check_repo_requirements(
1244 1244 target=pull_request.target_repo, source=pull_request.source_repo,
1245 1245 translator=_)
1246 1246 if not merge_possible:
1247 1247 return merge_possible, msg
1248 1248
1249 1249 try:
1250 1250 resp = self._try_merge(
1251 1251 pull_request,
1252 1252 force_shadow_repo_refresh=force_shadow_repo_refresh)
1253 1253 log.debug("Merge response: %s", resp)
1254 1254 status = resp.possible, resp.merge_status_message
1255 1255 except NotImplementedError:
1256 1256 status = False, _('Pull request merging is not supported.')
1257 1257
1258 1258 return status
1259 1259
1260 1260 def _check_repo_requirements(self, target, source, translator):
1261 1261 """
1262 1262 Check if `target` and `source` have compatible requirements.
1263 1263
1264 1264 Currently this is just checking for largefiles.
1265 1265 """
1266 1266 _ = translator
1267 1267 target_has_largefiles = self._has_largefiles(target)
1268 1268 source_has_largefiles = self._has_largefiles(source)
1269 1269 merge_possible = True
1270 1270 message = u''
1271 1271
1272 1272 if target_has_largefiles != source_has_largefiles:
1273 1273 merge_possible = False
1274 1274 if source_has_largefiles:
1275 1275 message = _(
1276 1276 'Target repository large files support is disabled.')
1277 1277 else:
1278 1278 message = _(
1279 1279 'Source repository large files support is disabled.')
1280 1280
1281 1281 return merge_possible, message
1282 1282
1283 1283 def _has_largefiles(self, repo):
1284 1284 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1285 1285 'extensions', 'largefiles')
1286 1286 return largefiles_ui and largefiles_ui[0].active
1287 1287
1288 1288 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1289 1289 """
1290 1290 Try to merge the pull request and return the merge status.
1291 1291 """
1292 1292 log.debug(
1293 1293 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1294 1294 pull_request.pull_request_id, force_shadow_repo_refresh)
1295 1295 target_vcs = pull_request.target_repo.scm_instance()
1296 1296 # Refresh the target reference.
1297 1297 try:
1298 1298 target_ref = self._refresh_reference(
1299 1299 pull_request.target_ref_parts, target_vcs)
1300 1300 except CommitDoesNotExistError:
1301 1301 merge_state = MergeResponse(
1302 1302 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1303 1303 metadata={'target_ref': pull_request.target_ref_parts})
1304 1304 return merge_state
1305 1305
1306 1306 target_locked = pull_request.target_repo.locked
1307 1307 if target_locked and target_locked[0]:
1308 1308 locked_by = 'user:{}'.format(target_locked[0])
1309 1309 log.debug("The target repository is locked by %s.", locked_by)
1310 1310 merge_state = MergeResponse(
1311 1311 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1312 1312 metadata={'locked_by': locked_by})
1313 1313 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1314 1314 pull_request, target_ref):
1315 1315 log.debug("Refreshing the merge status of the repository.")
1316 1316 merge_state = self._refresh_merge_state(
1317 1317 pull_request, target_vcs, target_ref)
1318 1318 else:
1319 1319 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1320 1320 metadata = {
1321 1321 'target_ref': pull_request.target_ref_parts,
1322 1322 'source_ref': pull_request.source_ref_parts,
1323 1323 }
1324 1324 if not possible and target_ref.type == 'branch':
1325 1325 # NOTE(marcink): case for mercurial multiple heads on branch
1326 1326 heads = target_vcs._heads(target_ref.name)
1327 1327 if len(heads) != 1:
1328 1328 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1329 1329 metadata.update({
1330 1330 'heads': heads
1331 1331 })
1332 1332 merge_state = MergeResponse(
1333 1333 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1334 1334
1335 1335 return merge_state
1336 1336
1337 1337 def _refresh_reference(self, reference, vcs_repository):
1338 1338 if reference.type in self.UPDATABLE_REF_TYPES:
1339 1339 name_or_id = reference.name
1340 1340 else:
1341 1341 name_or_id = reference.commit_id
1342 1342
1343 1343 refreshed_commit = vcs_repository.get_commit(name_or_id)
1344 1344 refreshed_reference = Reference(
1345 1345 reference.type, reference.name, refreshed_commit.raw_id)
1346 1346 return refreshed_reference
1347 1347
1348 1348 def _needs_merge_state_refresh(self, pull_request, target_reference):
1349 1349 return not(
1350 1350 pull_request.revisions and
1351 1351 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1352 1352 target_reference.commit_id == pull_request._last_merge_target_rev)
1353 1353
1354 1354 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1355 1355 workspace_id = self._workspace_id(pull_request)
1356 1356 source_vcs = pull_request.source_repo.scm_instance()
1357 1357 repo_id = pull_request.target_repo.repo_id
1358 1358 use_rebase = self._use_rebase_for_merging(pull_request)
1359 1359 close_branch = self._close_branch_before_merging(pull_request)
1360 1360 merge_state = target_vcs.merge(
1361 1361 repo_id, workspace_id,
1362 1362 target_reference, source_vcs, pull_request.source_ref_parts,
1363 1363 dry_run=True, use_rebase=use_rebase,
1364 1364 close_branch=close_branch)
1365 1365
1366 1366 # Do not store the response if there was an unknown error.
1367 1367 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1368 1368 pull_request._last_merge_source_rev = \
1369 1369 pull_request.source_ref_parts.commit_id
1370 1370 pull_request._last_merge_target_rev = target_reference.commit_id
1371 1371 pull_request.last_merge_status = merge_state.failure_reason
1372 1372 pull_request.shadow_merge_ref = merge_state.merge_ref
1373 1373 Session().add(pull_request)
1374 1374 Session().commit()
1375 1375
1376 1376 return merge_state
1377 1377
1378 1378 def _workspace_id(self, pull_request):
1379 1379 workspace_id = 'pr-%s' % pull_request.pull_request_id
1380 1380 return workspace_id
1381 1381
1382 1382 def generate_repo_data(self, repo, commit_id=None, branch=None,
1383 1383 bookmark=None, translator=None):
1384 1384 from rhodecode.model.repo import RepoModel
1385 1385
1386 1386 all_refs, selected_ref = \
1387 1387 self._get_repo_pullrequest_sources(
1388 1388 repo.scm_instance(), commit_id=commit_id,
1389 1389 branch=branch, bookmark=bookmark, translator=translator)
1390 1390
1391 1391 refs_select2 = []
1392 1392 for element in all_refs:
1393 1393 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1394 1394 refs_select2.append({'text': element[1], 'children': children})
1395 1395
1396 1396 return {
1397 1397 'user': {
1398 1398 'user_id': repo.user.user_id,
1399 1399 'username': repo.user.username,
1400 1400 'firstname': repo.user.first_name,
1401 1401 'lastname': repo.user.last_name,
1402 1402 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1403 1403 },
1404 1404 'name': repo.repo_name,
1405 1405 'link': RepoModel().get_url(repo),
1406 1406 'description': h.chop_at_smart(repo.description_safe, '\n'),
1407 1407 'refs': {
1408 1408 'all_refs': all_refs,
1409 1409 'selected_ref': selected_ref,
1410 1410 'select2_refs': refs_select2
1411 1411 }
1412 1412 }
1413 1413
1414 1414 def generate_pullrequest_title(self, source, source_ref, target):
1415 1415 return u'{source}#{at_ref} to {target}'.format(
1416 1416 source=source,
1417 1417 at_ref=source_ref,
1418 1418 target=target,
1419 1419 )
1420 1420
1421 1421 def _cleanup_merge_workspace(self, pull_request):
1422 1422 # Merging related cleanup
1423 1423 repo_id = pull_request.target_repo.repo_id
1424 1424 target_scm = pull_request.target_repo.scm_instance()
1425 1425 workspace_id = self._workspace_id(pull_request)
1426 1426
1427 1427 try:
1428 1428 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1429 1429 except NotImplementedError:
1430 1430 pass
1431 1431
1432 1432 def _get_repo_pullrequest_sources(
1433 1433 self, repo, commit_id=None, branch=None, bookmark=None,
1434 1434 translator=None):
1435 1435 """
1436 1436 Return a structure with repo's interesting commits, suitable for
1437 1437 the selectors in pullrequest controller
1438 1438
1439 1439 :param commit_id: a commit that must be in the list somehow
1440 1440 and selected by default
1441 1441 :param branch: a branch that must be in the list and selected
1442 1442 by default - even if closed
1443 1443 :param bookmark: a bookmark that must be in the list and selected
1444 1444 """
1445 1445 _ = translator or get_current_request().translate
1446 1446
1447 1447 commit_id = safe_str(commit_id) if commit_id else None
1448 1448 branch = safe_unicode(branch) if branch else None
1449 1449 bookmark = safe_unicode(bookmark) if bookmark else None
1450 1450
1451 1451 selected = None
1452 1452
1453 1453 # order matters: first source that has commit_id in it will be selected
1454 1454 sources = []
1455 1455 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1456 1456 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1457 1457
1458 1458 if commit_id:
1459 1459 ref_commit = (h.short_id(commit_id), commit_id)
1460 1460 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1461 1461
1462 1462 sources.append(
1463 1463 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1464 1464 )
1465 1465
1466 1466 groups = []
1467 1467
1468 1468 for group_key, ref_list, group_name, match in sources:
1469 1469 group_refs = []
1470 1470 for ref_name, ref_id in ref_list:
1471 1471 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1472 1472 group_refs.append((ref_key, ref_name))
1473 1473
1474 1474 if not selected:
1475 1475 if set([commit_id, match]) & set([ref_id, ref_name]):
1476 1476 selected = ref_key
1477 1477
1478 1478 if group_refs:
1479 1479 groups.append((group_refs, group_name))
1480 1480
1481 1481 if not selected:
1482 1482 ref = commit_id or branch or bookmark
1483 1483 if ref:
1484 1484 raise CommitDoesNotExistError(
1485 1485 u'No commit refs could be found matching: {}'.format(ref))
1486 1486 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1487 1487 selected = u'branch:{}:{}'.format(
1488 1488 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1489 1489 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1490 1490 )
1491 1491 elif repo.commit_ids:
1492 1492 # make the user select in this case
1493 1493 selected = None
1494 1494 else:
1495 1495 raise EmptyRepositoryError()
1496 1496 return groups, selected
1497 1497
1498 1498 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1499 1499 hide_whitespace_changes, diff_context):
1500 1500
1501 1501 return self._get_diff_from_pr_or_version(
1502 1502 source_repo, source_ref_id, target_ref_id,
1503 1503 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1504 1504
1505 1505 def _get_diff_from_pr_or_version(
1506 1506 self, source_repo, source_ref_id, target_ref_id,
1507 1507 hide_whitespace_changes, diff_context):
1508 1508
1509 1509 target_commit = source_repo.get_commit(
1510 1510 commit_id=safe_str(target_ref_id))
1511 1511 source_commit = source_repo.get_commit(
1512 1512 commit_id=safe_str(source_ref_id))
1513 1513 if isinstance(source_repo, Repository):
1514 1514 vcs_repo = source_repo.scm_instance()
1515 1515 else:
1516 1516 vcs_repo = source_repo
1517 1517
1518 1518 # TODO: johbo: In the context of an update, we cannot reach
1519 1519 # the old commit anymore with our normal mechanisms. It needs
1520 1520 # some sort of special support in the vcs layer to avoid this
1521 1521 # workaround.
1522 1522 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1523 1523 vcs_repo.alias == 'git'):
1524 1524 source_commit.raw_id = safe_str(source_ref_id)
1525 1525
1526 1526 log.debug('calculating diff between '
1527 1527 'source_ref:%s and target_ref:%s for repo `%s`',
1528 1528 target_ref_id, source_ref_id,
1529 1529 safe_unicode(vcs_repo.path))
1530 1530
1531 1531 vcs_diff = vcs_repo.get_diff(
1532 1532 commit1=target_commit, commit2=source_commit,
1533 1533 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1534 1534 return vcs_diff
1535 1535
1536 1536 def _is_merge_enabled(self, pull_request):
1537 1537 return self._get_general_setting(
1538 1538 pull_request, 'rhodecode_pr_merge_enabled')
1539 1539
1540 1540 def _use_rebase_for_merging(self, pull_request):
1541 1541 repo_type = pull_request.target_repo.repo_type
1542 1542 if repo_type == 'hg':
1543 1543 return self._get_general_setting(
1544 1544 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1545 1545 elif repo_type == 'git':
1546 1546 return self._get_general_setting(
1547 1547 pull_request, 'rhodecode_git_use_rebase_for_merging')
1548 1548
1549 1549 return False
1550 1550
1551 1551 def _close_branch_before_merging(self, pull_request):
1552 1552 repo_type = pull_request.target_repo.repo_type
1553 1553 if repo_type == 'hg':
1554 1554 return self._get_general_setting(
1555 1555 pull_request, 'rhodecode_hg_close_branch_before_merging')
1556 1556 elif repo_type == 'git':
1557 1557 return self._get_general_setting(
1558 1558 pull_request, 'rhodecode_git_close_branch_before_merging')
1559 1559
1560 1560 return False
1561 1561
1562 1562 def _get_general_setting(self, pull_request, settings_key, default=False):
1563 1563 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1564 1564 settings = settings_model.get_general_settings()
1565 1565 return settings.get(settings_key, default)
1566 1566
1567 1567 def _log_audit_action(self, action, action_data, user, pull_request):
1568 1568 audit_logger.store(
1569 1569 action=action,
1570 1570 action_data=action_data,
1571 1571 user=user,
1572 1572 repo=pull_request.target_repo)
1573 1573
1574 1574 def get_reviewer_functions(self):
1575 1575 """
1576 1576 Fetches functions for validation and fetching default reviewers.
1577 1577 If available we use the EE package, else we fallback to CE
1578 1578 package functions
1579 1579 """
1580 1580 try:
1581 1581 from rc_reviewers.utils import get_default_reviewers_data
1582 1582 from rc_reviewers.utils import validate_default_reviewers
1583 1583 except ImportError:
1584 1584 from rhodecode.apps.repository.utils import get_default_reviewers_data
1585 1585 from rhodecode.apps.repository.utils import validate_default_reviewers
1586 1586
1587 1587 return get_default_reviewers_data, validate_default_reviewers
1588 1588
1589 1589
1590 1590 class MergeCheck(object):
1591 1591 """
1592 1592 Perform Merge Checks and returns a check object which stores information
1593 1593 about merge errors, and merge conditions
1594 1594 """
1595 1595 TODO_CHECK = 'todo'
1596 1596 PERM_CHECK = 'perm'
1597 1597 REVIEW_CHECK = 'review'
1598 1598 MERGE_CHECK = 'merge'
1599 1599
1600 1600 def __init__(self):
1601 1601 self.review_status = None
1602 1602 self.merge_possible = None
1603 1603 self.merge_msg = ''
1604 1604 self.failed = None
1605 1605 self.errors = []
1606 1606 self.error_details = OrderedDict()
1607 1607
1608 1608 def push_error(self, error_type, message, error_key, details):
1609 1609 self.failed = True
1610 1610 self.errors.append([error_type, message])
1611 1611 self.error_details[error_key] = dict(
1612 1612 details=details,
1613 1613 error_type=error_type,
1614 1614 message=message
1615 1615 )
1616 1616
1617 1617 @classmethod
1618 1618 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1619 1619 force_shadow_repo_refresh=False):
1620 1620 _ = translator
1621 1621 merge_check = cls()
1622 1622
1623 1623 # permissions to merge
1624 1624 user_allowed_to_merge = PullRequestModel().check_user_merge(
1625 1625 pull_request, auth_user)
1626 1626 if not user_allowed_to_merge:
1627 1627 log.debug("MergeCheck: cannot merge, approval is pending.")
1628 1628
1629 1629 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1630 1630 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1631 1631 if fail_early:
1632 1632 return merge_check
1633 1633
1634 1634 # permission to merge into the target branch
1635 1635 target_commit_id = pull_request.target_ref_parts.commit_id
1636 1636 if pull_request.target_ref_parts.type == 'branch':
1637 1637 branch_name = pull_request.target_ref_parts.name
1638 1638 else:
1639 1639 # for mercurial we can always figure out the branch from the commit
1640 1640 # in case of bookmark
1641 1641 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1642 1642 branch_name = target_commit.branch
1643 1643
1644 1644 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1645 1645 pull_request.target_repo.repo_name, branch_name)
1646 1646 if branch_perm and branch_perm == 'branch.none':
1647 1647 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1648 1648 branch_name, rule)
1649 1649 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1650 1650 if fail_early:
1651 1651 return merge_check
1652 1652
1653 1653 # review status, must be always present
1654 1654 review_status = pull_request.calculated_review_status()
1655 1655 merge_check.review_status = review_status
1656 1656
1657 1657 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1658 1658 if not status_approved:
1659 1659 log.debug("MergeCheck: cannot merge, approval is pending.")
1660 1660
1661 1661 msg = _('Pull request reviewer approval is pending.')
1662 1662
1663 1663 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1664 1664
1665 1665 if fail_early:
1666 1666 return merge_check
1667 1667
1668 1668 # left over TODOs
1669 1669 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1670 1670 if todos:
1671 1671 log.debug("MergeCheck: cannot merge, {} "
1672 1672 "unresolved TODOs left.".format(len(todos)))
1673 1673
1674 1674 if len(todos) == 1:
1675 1675 msg = _('Cannot merge, {} TODO still not resolved.').format(
1676 1676 len(todos))
1677 1677 else:
1678 1678 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1679 1679 len(todos))
1680 1680
1681 1681 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1682 1682
1683 1683 if fail_early:
1684 1684 return merge_check
1685 1685
1686 1686 # merge possible, here is the filesystem simulation + shadow repo
1687 1687 merge_status, msg = PullRequestModel().merge_status(
1688 1688 pull_request, translator=translator,
1689 1689 force_shadow_repo_refresh=force_shadow_repo_refresh)
1690 1690 merge_check.merge_possible = merge_status
1691 1691 merge_check.merge_msg = msg
1692 1692 if not merge_status:
1693 1693 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1694 1694 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1695 1695
1696 1696 if fail_early:
1697 1697 return merge_check
1698 1698
1699 1699 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1700 1700 return merge_check
1701 1701
1702 1702 @classmethod
1703 1703 def get_merge_conditions(cls, pull_request, translator):
1704 1704 _ = translator
1705 1705 merge_details = {}
1706 1706
1707 1707 model = PullRequestModel()
1708 1708 use_rebase = model._use_rebase_for_merging(pull_request)
1709 1709
1710 1710 if use_rebase:
1711 1711 merge_details['merge_strategy'] = dict(
1712 1712 details={},
1713 1713 message=_('Merge strategy: rebase')
1714 1714 )
1715 1715 else:
1716 1716 merge_details['merge_strategy'] = dict(
1717 1717 details={},
1718 1718 message=_('Merge strategy: explicit merge commit')
1719 1719 )
1720 1720
1721 1721 close_branch = model._close_branch_before_merging(pull_request)
1722 1722 if close_branch:
1723 1723 repo_type = pull_request.target_repo.repo_type
1724 1724 close_msg = ''
1725 1725 if repo_type == 'hg':
1726 1726 close_msg = _('Source branch will be closed after merge.')
1727 1727 elif repo_type == 'git':
1728 1728 close_msg = _('Source branch will be deleted after merge.')
1729 1729
1730 1730 merge_details['close_branch'] = dict(
1731 1731 details={},
1732 1732 message=close_msg
1733 1733 )
1734 1734
1735 1735 return merge_details
1736 1736
1737 1737
1738 1738 ChangeTuple = collections.namedtuple(
1739 1739 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1740 1740
1741 1741 FileChangeTuple = collections.namedtuple(
1742 1742 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now