##// END OF EJS Templates
caching: add option to cache diffs for commits and pull requests....
Bartłomiej Wołyńczyk -
r2685:5ff8fcc0 default
parent child Browse files
Show More
@@ -1,635 +1,641 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import time
22 22 import logging
23 23 import operator
24 24
25 25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden
26 26
27 27 from rhodecode.lib import helpers as h, diffs
28 28 from rhodecode.lib.utils2 import StrictAttributeDict, safe_int, datetime_to_time
29 29 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
30 30 from rhodecode.model import repo
31 31 from rhodecode.model import repo_group
32 32 from rhodecode.model import user_group
33 33 from rhodecode.model import user
34 34 from rhodecode.model.db import User
35 35 from rhodecode.model.scm import ScmModel
36 from rhodecode.model.settings import VcsSettingsModel
36 37
37 38 log = logging.getLogger(__name__)
38 39
39 40
40 41 ADMIN_PREFIX = '/_admin'
41 42 STATIC_FILE_PREFIX = '/_static'
42 43
43 44 URL_NAME_REQUIREMENTS = {
44 45 # group name can have a slash in them, but they must not end with a slash
45 46 'group_name': r'.*?[^/]',
46 47 'repo_group_name': r'.*?[^/]',
47 48 # repo names can have a slash in them, but they must not end with a slash
48 49 'repo_name': r'.*?[^/]',
49 50 # file path eats up everything at the end
50 51 'f_path': r'.*',
51 52 # reference types
52 53 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
53 54 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
54 55 }
55 56
56 57
57 58 def add_route_with_slash(config,name, pattern, **kw):
58 59 config.add_route(name, pattern, **kw)
59 60 if not pattern.endswith('/'):
60 61 config.add_route(name + '_slash', pattern + '/', **kw)
61 62
62 63
63 64 def add_route_requirements(route_path, requirements=URL_NAME_REQUIREMENTS):
64 65 """
65 66 Adds regex requirements to pyramid routes using a mapping dict
66 67 e.g::
67 68 add_route_requirements('{repo_name}/settings')
68 69 """
69 70 for key, regex in requirements.items():
70 71 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
71 72 return route_path
72 73
73 74
74 75 def get_format_ref_id(repo):
75 76 """Returns a `repo` specific reference formatter function"""
76 77 if h.is_svn(repo):
77 78 return _format_ref_id_svn
78 79 else:
79 80 return _format_ref_id
80 81
81 82
82 83 def _format_ref_id(name, raw_id):
83 84 """Default formatting of a given reference `name`"""
84 85 return name
85 86
86 87
87 88 def _format_ref_id_svn(name, raw_id):
88 89 """Special way of formatting a reference for Subversion including path"""
89 90 return '%s@%s' % (name, raw_id)
90 91
91 92
92 93 class TemplateArgs(StrictAttributeDict):
93 94 pass
94 95
95 96
96 97 class BaseAppView(object):
97 98
98 99 def __init__(self, context, request):
99 100 self.request = request
100 101 self.context = context
101 102 self.session = request.session
102 103 self._rhodecode_user = request.user # auth user
103 104 self._rhodecode_db_user = self._rhodecode_user.get_instance()
104 105 self._maybe_needs_password_change(
105 106 request.matched_route.name, self._rhodecode_db_user)
106 107
107 108 def _maybe_needs_password_change(self, view_name, user_obj):
108 109 log.debug('Checking if user %s needs password change on view %s',
109 110 user_obj, view_name)
110 111 skip_user_views = [
111 112 'logout', 'login',
112 113 'my_account_password', 'my_account_password_update'
113 114 ]
114 115
115 116 if not user_obj:
116 117 return
117 118
118 119 if user_obj.username == User.DEFAULT_USER:
119 120 return
120 121
121 122 now = time.time()
122 123 should_change = user_obj.user_data.get('force_password_change')
123 124 change_after = safe_int(should_change) or 0
124 125 if should_change and now > change_after:
125 126 log.debug('User %s requires password change', user_obj)
126 127 h.flash('You are required to change your password', 'warning',
127 128 ignore_duplicate=True)
128 129
129 130 if view_name not in skip_user_views:
130 131 raise HTTPFound(
131 132 self.request.route_path('my_account_password'))
132 133
133 134 def _log_creation_exception(self, e, repo_name):
134 135 _ = self.request.translate
135 136 reason = None
136 137 if len(e.args) == 2:
137 138 reason = e.args[1]
138 139
139 140 if reason == 'INVALID_CERTIFICATE':
140 141 log.exception(
141 142 'Exception creating a repository: invalid certificate')
142 143 msg = (_('Error creating repository %s: invalid certificate')
143 144 % repo_name)
144 145 else:
145 146 log.exception("Exception creating a repository")
146 147 msg = (_('Error creating repository %s')
147 148 % repo_name)
148 149 return msg
149 150
150 151 def _get_local_tmpl_context(self, include_app_defaults=True):
151 152 c = TemplateArgs()
152 153 c.auth_user = self.request.user
153 154 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
154 155 c.rhodecode_user = self.request.user
155 156
156 157 if include_app_defaults:
157 158 from rhodecode.lib.base import attach_context_attributes
158 159 attach_context_attributes(c, self.request, self.request.user.user_id)
159 160
160 161 return c
161 162
162 163 def _get_template_context(self, tmpl_args, **kwargs):
163 164
164 165 local_tmpl_args = {
165 166 'defaults': {},
166 167 'errors': {},
167 168 'c': tmpl_args
168 169 }
169 170 local_tmpl_args.update(kwargs)
170 171 return local_tmpl_args
171 172
172 173 def load_default_context(self):
173 174 """
174 175 example:
175 176
176 177 def load_default_context(self):
177 178 c = self._get_local_tmpl_context()
178 179 c.custom_var = 'foobar'
179 180
180 181 return c
181 182 """
182 183 raise NotImplementedError('Needs implementation in view class')
183 184
184 185
185 186 class RepoAppView(BaseAppView):
186 187
187 188 def __init__(self, context, request):
188 189 super(RepoAppView, self).__init__(context, request)
189 190 self.db_repo = request.db_repo
190 191 self.db_repo_name = self.db_repo.repo_name
191 192 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
192 193
193 194 def _handle_missing_requirements(self, error):
194 195 log.error(
195 196 'Requirements are missing for repository %s: %s',
196 197 self.db_repo_name, error.message)
197 198
198 199 def _get_local_tmpl_context(self, include_app_defaults=True):
199 200 _ = self.request.translate
200 201 c = super(RepoAppView, self)._get_local_tmpl_context(
201 202 include_app_defaults=include_app_defaults)
202 203
203 204 # register common vars for this type of view
204 205 c.rhodecode_db_repo = self.db_repo
205 206 c.repo_name = self.db_repo_name
206 207 c.repository_pull_requests = self.db_repo_pull_requests
207 208 self.path_filter = PathFilter(None)
208 209
209 210 c.repository_requirements_missing = {}
210 211 try:
211 212 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
212 213 if self.rhodecode_vcs_repo:
213 214 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
214 215 c.auth_user.username)
215 216 self.path_filter = PathFilter(path_perms)
216 217 except RepositoryRequirementError as e:
217 218 c.repository_requirements_missing = {'error': str(e)}
218 219 self._handle_missing_requirements(e)
219 220 self.rhodecode_vcs_repo = None
220 221
221 222 c.path_filter = self.path_filter # used by atom_feed_entry.mako
222 223
223 224 if self.rhodecode_vcs_repo is None:
224 225 # unable to fetch this repo as vcs instance, report back to user
225 226 h.flash(_(
226 227 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
227 228 "Please check if it exist, or is not damaged.") %
228 229 {'repo_name': c.repo_name},
229 230 category='error', ignore_duplicate=True)
230 231 if c.repository_requirements_missing:
231 232 route = self.request.matched_route.name
232 233 if route.startswith(('edit_repo', 'repo_summary')):
233 234 # allow summary and edit repo on missing requirements
234 235 return c
235 236
236 237 raise HTTPFound(
237 238 h.route_path('repo_summary', repo_name=self.db_repo_name))
238 239
239 240 else: # redirect if we don't show missing requirements
240 241 raise HTTPFound(h.route_path('home'))
241 242
242 243 return c
243 244
244 245 def _get_f_path_unchecked(self, matchdict, default=None):
245 246 """
246 247 Should only be used by redirects, everything else should call _get_f_path
247 248 """
248 249 f_path = matchdict.get('f_path')
249 250 if f_path:
250 251 # fix for multiple initial slashes that causes errors for GIT
251 252 return f_path.lstrip('/')
252 253
253 254 return default
254 255
255 256 def _get_f_path(self, matchdict, default=None):
256 257 f_path_match = self._get_f_path_unchecked(matchdict, default)
257 258 return self.path_filter.assert_path_permissions(f_path_match)
258 259
260 def _get_general_setting(self, target_repo, settings_key, default=False):
261 settings_model = VcsSettingsModel(repo=target_repo)
262 settings = settings_model.get_general_settings()
263 return settings.get(settings_key, default)
264
259 265
260 266 class PathFilter(object):
261 267
262 268 # Expects and instance of BasePathPermissionChecker or None
263 269 def __init__(self, permission_checker):
264 270 self.permission_checker = permission_checker
265 271
266 272 def assert_path_permissions(self, path):
267 273 if path and self.permission_checker and not self.permission_checker.has_access(path):
268 274 raise HTTPForbidden()
269 275 return path
270 276
271 277 def filter_patchset(self, patchset):
272 278 if not self.permission_checker or not patchset:
273 279 return patchset, False
274 280 had_filtered = False
275 281 filtered_patchset = []
276 282 for patch in patchset:
277 283 filename = patch.get('filename', None)
278 284 if not filename or self.permission_checker.has_access(filename):
279 285 filtered_patchset.append(patch)
280 286 else:
281 287 had_filtered = True
282 288 if had_filtered:
283 289 if isinstance(patchset, diffs.LimitedDiffContainer):
284 290 filtered_patchset = diffs.LimitedDiffContainer(patchset.diff_limit, patchset.cur_diff_size, filtered_patchset)
285 291 return filtered_patchset, True
286 292 else:
287 293 return patchset, False
288 294
289 295 def render_patchset_filtered(self, diffset, patchset, source_ref=None, target_ref=None):
290 296 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
291 297 result = diffset.render_patchset(filtered_patchset, source_ref=source_ref, target_ref=target_ref)
292 298 result.has_hidden_changes = has_hidden_changes
293 299 return result
294 300
295 301 def get_raw_patch(self, diff_processor):
296 302 if self.permission_checker is None:
297 303 return diff_processor.as_raw()
298 304 elif self.permission_checker.has_full_access:
299 305 return diff_processor.as_raw()
300 306 else:
301 307 return '# Repository has user-specific filters, raw patch generation is disabled.'
302 308
303 309 @property
304 310 def is_enabled(self):
305 311 return self.permission_checker is not None
306 312
307 313
308 314 class RepoGroupAppView(BaseAppView):
309 315 def __init__(self, context, request):
310 316 super(RepoGroupAppView, self).__init__(context, request)
311 317 self.db_repo_group = request.db_repo_group
312 318 self.db_repo_group_name = self.db_repo_group.group_name
313 319
314 320 def _revoke_perms_on_yourself(self, form_result):
315 321 _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
316 322 form_result['perm_updates'])
317 323 _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
318 324 form_result['perm_additions'])
319 325 _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
320 326 form_result['perm_deletions'])
321 327 admin_perm = 'group.admin'
322 328 if _updates and _updates[0][1] != admin_perm or \
323 329 _additions and _additions[0][1] != admin_perm or \
324 330 _deletions and _deletions[0][1] != admin_perm:
325 331 return True
326 332 return False
327 333
328 334
329 335 class UserGroupAppView(BaseAppView):
330 336 def __init__(self, context, request):
331 337 super(UserGroupAppView, self).__init__(context, request)
332 338 self.db_user_group = request.db_user_group
333 339 self.db_user_group_name = self.db_user_group.users_group_name
334 340
335 341
336 342 class UserAppView(BaseAppView):
337 343 def __init__(self, context, request):
338 344 super(UserAppView, self).__init__(context, request)
339 345 self.db_user = request.db_user
340 346 self.db_user_id = self.db_user.user_id
341 347
342 348 _ = self.request.translate
343 349 if not request.db_user_supports_default:
344 350 if self.db_user.username == User.DEFAULT_USER:
345 351 h.flash(_("Editing user `{}` is disabled.".format(
346 352 User.DEFAULT_USER)), category='warning')
347 353 raise HTTPFound(h.route_path('users'))
348 354
349 355
350 356 class DataGridAppView(object):
351 357 """
352 358 Common class to have re-usable grid rendering components
353 359 """
354 360
355 361 def _extract_ordering(self, request, column_map=None):
356 362 column_map = column_map or {}
357 363 column_index = safe_int(request.GET.get('order[0][column]'))
358 364 order_dir = request.GET.get(
359 365 'order[0][dir]', 'desc')
360 366 order_by = request.GET.get(
361 367 'columns[%s][data][sort]' % column_index, 'name_raw')
362 368
363 369 # translate datatable to DB columns
364 370 order_by = column_map.get(order_by) or order_by
365 371
366 372 search_q = request.GET.get('search[value]')
367 373 return search_q, order_by, order_dir
368 374
369 375 def _extract_chunk(self, request):
370 376 start = safe_int(request.GET.get('start'), 0)
371 377 length = safe_int(request.GET.get('length'), 25)
372 378 draw = safe_int(request.GET.get('draw'))
373 379 return draw, start, length
374 380
375 381 def _get_order_col(self, order_by, model):
376 382 if isinstance(order_by, basestring):
377 383 try:
378 384 return operator.attrgetter(order_by)(model)
379 385 except AttributeError:
380 386 return None
381 387 else:
382 388 return order_by
383 389
384 390
385 391 class BaseReferencesView(RepoAppView):
386 392 """
387 393 Base for reference view for branches, tags and bookmarks.
388 394 """
389 395 def load_default_context(self):
390 396 c = self._get_local_tmpl_context()
391 397
392 398
393 399 return c
394 400
395 401 def load_refs_context(self, ref_items, partials_template):
396 402 _render = self.request.get_partial_renderer(partials_template)
397 403 pre_load = ["author", "date", "message"]
398 404
399 405 is_svn = h.is_svn(self.rhodecode_vcs_repo)
400 406 is_hg = h.is_hg(self.rhodecode_vcs_repo)
401 407
402 408 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
403 409
404 410 closed_refs = {}
405 411 if is_hg:
406 412 closed_refs = self.rhodecode_vcs_repo.branches_closed
407 413
408 414 data = []
409 415 for ref_name, commit_id in ref_items:
410 416 commit = self.rhodecode_vcs_repo.get_commit(
411 417 commit_id=commit_id, pre_load=pre_load)
412 418 closed = ref_name in closed_refs
413 419
414 420 # TODO: johbo: Unify generation of reference links
415 421 use_commit_id = '/' in ref_name or is_svn
416 422
417 423 if use_commit_id:
418 424 files_url = h.route_path(
419 425 'repo_files',
420 426 repo_name=self.db_repo_name,
421 427 f_path=ref_name if is_svn else '',
422 428 commit_id=commit_id)
423 429
424 430 else:
425 431 files_url = h.route_path(
426 432 'repo_files',
427 433 repo_name=self.db_repo_name,
428 434 f_path=ref_name if is_svn else '',
429 435 commit_id=ref_name,
430 436 _query=dict(at=ref_name))
431 437
432 438 data.append({
433 439 "name": _render('name', ref_name, files_url, closed),
434 440 "name_raw": ref_name,
435 441 "date": _render('date', commit.date),
436 442 "date_raw": datetime_to_time(commit.date),
437 443 "author": _render('author', commit.author),
438 444 "commit": _render(
439 445 'commit', commit.message, commit.raw_id, commit.idx),
440 446 "commit_raw": commit.idx,
441 447 "compare": _render(
442 448 'compare', format_ref_id(ref_name, commit.raw_id)),
443 449 })
444 450
445 451 return data
446 452
447 453
448 454 class RepoRoutePredicate(object):
449 455 def __init__(self, val, config):
450 456 self.val = val
451 457
452 458 def text(self):
453 459 return 'repo_route = %s' % self.val
454 460
455 461 phash = text
456 462
457 463 def __call__(self, info, request):
458 464
459 465 if hasattr(request, 'vcs_call'):
460 466 # skip vcs calls
461 467 return
462 468
463 469 repo_name = info['match']['repo_name']
464 470 repo_model = repo.RepoModel()
465 471 by_name_match = repo_model.get_by_repo_name(repo_name, cache=True)
466 472
467 473 def redirect_if_creating(db_repo):
468 474 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
469 475 raise HTTPFound(
470 476 request.route_path('repo_creating',
471 477 repo_name=db_repo.repo_name))
472 478
473 479 if by_name_match:
474 480 # register this as request object we can re-use later
475 481 request.db_repo = by_name_match
476 482 redirect_if_creating(by_name_match)
477 483 return True
478 484
479 485 by_id_match = repo_model.get_repo_by_id(repo_name)
480 486 if by_id_match:
481 487 request.db_repo = by_id_match
482 488 redirect_if_creating(by_id_match)
483 489 return True
484 490
485 491 return False
486 492
487 493
488 494 class RepoTypeRoutePredicate(object):
489 495 def __init__(self, val, config):
490 496 self.val = val or ['hg', 'git', 'svn']
491 497
492 498 def text(self):
493 499 return 'repo_accepted_type = %s' % self.val
494 500
495 501 phash = text
496 502
497 503 def __call__(self, info, request):
498 504 if hasattr(request, 'vcs_call'):
499 505 # skip vcs calls
500 506 return
501 507
502 508 rhodecode_db_repo = request.db_repo
503 509
504 510 log.debug(
505 511 '%s checking repo type for %s in %s',
506 512 self.__class__.__name__, rhodecode_db_repo.repo_type, self.val)
507 513
508 514 if rhodecode_db_repo.repo_type in self.val:
509 515 return True
510 516 else:
511 517 log.warning('Current view is not supported for repo type:%s',
512 518 rhodecode_db_repo.repo_type)
513 519 #
514 520 # h.flash(h.literal(
515 521 # _('Action not supported for %s.' % rhodecode_repo.alias)),
516 522 # category='warning')
517 523 # return redirect(
518 524 # route_path('repo_summary', repo_name=cls.rhodecode_db_repo.repo_name))
519 525
520 526 return False
521 527
522 528
523 529 class RepoGroupRoutePredicate(object):
524 530 def __init__(self, val, config):
525 531 self.val = val
526 532
527 533 def text(self):
528 534 return 'repo_group_route = %s' % self.val
529 535
530 536 phash = text
531 537
532 538 def __call__(self, info, request):
533 539 if hasattr(request, 'vcs_call'):
534 540 # skip vcs calls
535 541 return
536 542
537 543 repo_group_name = info['match']['repo_group_name']
538 544 repo_group_model = repo_group.RepoGroupModel()
539 545 by_name_match = repo_group_model.get_by_group_name(
540 546 repo_group_name, cache=True)
541 547
542 548 if by_name_match:
543 549 # register this as request object we can re-use later
544 550 request.db_repo_group = by_name_match
545 551 return True
546 552
547 553 return False
548 554
549 555
550 556 class UserGroupRoutePredicate(object):
551 557 def __init__(self, val, config):
552 558 self.val = val
553 559
554 560 def text(self):
555 561 return 'user_group_route = %s' % self.val
556 562
557 563 phash = text
558 564
559 565 def __call__(self, info, request):
560 566 if hasattr(request, 'vcs_call'):
561 567 # skip vcs calls
562 568 return
563 569
564 570 user_group_id = info['match']['user_group_id']
565 571 user_group_model = user_group.UserGroup()
566 572 by_id_match = user_group_model.get(
567 573 user_group_id, cache=True)
568 574
569 575 if by_id_match:
570 576 # register this as request object we can re-use later
571 577 request.db_user_group = by_id_match
572 578 return True
573 579
574 580 return False
575 581
576 582
577 583 class UserRoutePredicateBase(object):
578 584 supports_default = None
579 585
580 586 def __init__(self, val, config):
581 587 self.val = val
582 588
583 589 def text(self):
584 590 raise NotImplementedError()
585 591
586 592 def __call__(self, info, request):
587 593 if hasattr(request, 'vcs_call'):
588 594 # skip vcs calls
589 595 return
590 596
591 597 user_id = info['match']['user_id']
592 598 user_model = user.User()
593 599 by_id_match = user_model.get(
594 600 user_id, cache=True)
595 601
596 602 if by_id_match:
597 603 # register this as request object we can re-use later
598 604 request.db_user = by_id_match
599 605 request.db_user_supports_default = self.supports_default
600 606 return True
601 607
602 608 return False
603 609
604 610
605 611 class UserRoutePredicate(UserRoutePredicateBase):
606 612 supports_default = False
607 613
608 614 def text(self):
609 615 return 'user_route = %s' % self.val
610 616
611 617 phash = text
612 618
613 619
614 620 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
615 621 supports_default = True
616 622
617 623 def text(self):
618 624 return 'user_with_default_route = %s' % self.val
619 625
620 626 phash = text
621 627
622 628
623 629 def includeme(config):
624 630 config.add_route_predicate(
625 631 'repo_route', RepoRoutePredicate)
626 632 config.add_route_predicate(
627 633 'repo_accepted_types', RepoTypeRoutePredicate)
628 634 config.add_route_predicate(
629 635 'repo_group_route', RepoGroupRoutePredicate)
630 636 config.add_route_predicate(
631 637 'user_group_route', UserGroupRoutePredicate)
632 638 config.add_route_predicate(
633 639 'user_route_with_default', UserRouteWithDefaultPredicate)
634 640 config.add_route_predicate(
635 641 'user_route', UserRoutePredicate) No newline at end of file
@@ -1,562 +1,589 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23 import collections
24 24
25 25 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
26 26 from pyramid.view import view_config
27 27 from pyramid.renderers import render
28 28 from pyramid.response import Response
29 29
30 30 from rhodecode.apps._base import RepoAppView
31 31
32 32 from rhodecode.lib import diffs, codeblocks
33 33 from rhodecode.lib.auth import (
34 34 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
35 35
36 36 from rhodecode.lib.compat import OrderedDict
37 from rhodecode.lib.diffs import cache_diff, load_cached_diff, diff_cache_exist
37 38 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
38 39 import rhodecode.lib.helpers as h
39 from rhodecode.lib.utils2 import safe_unicode
40 from rhodecode.lib.utils2 import safe_unicode, str2bool
40 41 from rhodecode.lib.vcs.backends.base import EmptyCommit
41 42 from rhodecode.lib.vcs.exceptions import (
42 RepositoryError, CommitDoesNotExistError, NodeDoesNotExistError)
43 RepositoryError, CommitDoesNotExistError)
43 44 from rhodecode.model.db import ChangesetComment, ChangesetStatus
44 45 from rhodecode.model.changeset_status import ChangesetStatusModel
45 46 from rhodecode.model.comment import CommentsModel
46 47 from rhodecode.model.meta import Session
47
48 from rhodecode.model.settings import VcsSettingsModel
48 49
49 50 log = logging.getLogger(__name__)
50 51
51 52
52 53 def _update_with_GET(params, request):
53 54 for k in ['diff1', 'diff2', 'diff']:
54 55 params[k] += request.GET.getall(k)
55 56
56 57
57 58 def get_ignore_ws(fid, request):
58 59 ig_ws_global = request.GET.get('ignorews')
59 60 ig_ws = filter(lambda k: k.startswith('WS'), request.GET.getall(fid))
60 61 if ig_ws:
61 62 try:
62 63 return int(ig_ws[0].split(':')[-1])
63 64 except Exception:
64 65 pass
65 66 return ig_ws_global
66 67
67 68
68 69 def _ignorews_url(request, fileid=None):
69 70 _ = request.translate
70 71 fileid = str(fileid) if fileid else None
71 72 params = collections.defaultdict(list)
72 73 _update_with_GET(params, request)
73 74 label = _('Show whitespace')
74 75 tooltiplbl = _('Show whitespace for all diffs')
75 76 ig_ws = get_ignore_ws(fileid, request)
76 77 ln_ctx = get_line_ctx(fileid, request)
77 78
78 79 if ig_ws is None:
79 80 params['ignorews'] += [1]
80 81 label = _('Ignore whitespace')
81 82 tooltiplbl = _('Ignore whitespace for all diffs')
82 83 ctx_key = 'context'
83 84 ctx_val = ln_ctx
84 85
85 86 # if we have passed in ln_ctx pass it along to our params
86 87 if ln_ctx:
87 88 params[ctx_key] += [ctx_val]
88 89
89 90 if fileid:
90 91 params['anchor'] = 'a_' + fileid
91 92 return h.link_to(label, request.current_route_path(_query=params),
92 93 title=tooltiplbl, class_='tooltip')
93 94
94 95
95 96 def get_line_ctx(fid, request):
96 97 ln_ctx_global = request.GET.get('context')
97 98 if fid:
98 99 ln_ctx = filter(lambda k: k.startswith('C'), request.GET.getall(fid))
99 100 else:
100 101 _ln_ctx = filter(lambda k: k.startswith('C'), request.GET)
101 102 ln_ctx = request.GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global
102 103 if ln_ctx:
103 104 ln_ctx = [ln_ctx]
104 105
105 106 if ln_ctx:
106 107 retval = ln_ctx[0].split(':')[-1]
107 108 else:
108 109 retval = ln_ctx_global
109 110
110 111 try:
111 112 return int(retval)
112 113 except Exception:
113 114 return 3
114 115
115 116
116 117 def _context_url(request, fileid=None):
117 118 """
118 119 Generates a url for context lines.
119 120
120 121 :param fileid:
121 122 """
122 123
123 124 _ = request.translate
124 125 fileid = str(fileid) if fileid else None
125 126 ig_ws = get_ignore_ws(fileid, request)
126 127 ln_ctx = (get_line_ctx(fileid, request) or 3) * 2
127 128
128 129 params = collections.defaultdict(list)
129 130 _update_with_GET(params, request)
130 131
131 132 if ln_ctx > 0:
132 133 params['context'] += [ln_ctx]
133 134
134 135 if ig_ws:
135 136 ig_ws_key = 'ignorews'
136 137 ig_ws_val = 1
137 138 params[ig_ws_key] += [ig_ws_val]
138 139
139 140 lbl = _('Increase context')
140 141 tooltiplbl = _('Increase context for all diffs')
141 142
142 143 if fileid:
143 144 params['anchor'] = 'a_' + fileid
144 145 return h.link_to(lbl, request.current_route_path(_query=params),
145 146 title=tooltiplbl, class_='tooltip')
146 147
147 148
148 149 class RepoCommitsView(RepoAppView):
149 150 def load_default_context(self):
150 151 c = self._get_local_tmpl_context(include_app_defaults=True)
151 152 c.rhodecode_repo = self.rhodecode_vcs_repo
152 153
153 154 return c
154 155
156 def _is_diff_cache_enabled(self, target_repo):
157 caching_enabled = self._get_general_setting(
158 target_repo, 'rhodecode_diff_cache')
159 log.debug('Diff caching enabled: %s', caching_enabled)
160 return caching_enabled
161
155 162 def _commit(self, commit_id_range, method):
156 163 _ = self.request.translate
157 164 c = self.load_default_context()
158 165 c.ignorews_url = _ignorews_url
159 166 c.context_url = _context_url
160 167 c.fulldiff = self.request.GET.get('fulldiff')
161 168
162 169 # fetch global flags of ignore ws or context lines
163 170 context_lcl = get_line_ctx('', self.request)
164 171 ign_whitespace_lcl = get_ignore_ws('', self.request)
165 172
166 173 # diff_limit will cut off the whole diff if the limit is applied
167 174 # otherwise it will just hide the big files from the front-end
168 175 diff_limit = c.visual.cut_off_limit_diff
169 176 file_limit = c.visual.cut_off_limit_file
170 177
171 178 # get ranges of commit ids if preset
172 179 commit_range = commit_id_range.split('...')[:2]
173 180
174 181 try:
175 182 pre_load = ['affected_files', 'author', 'branch', 'date',
176 183 'message', 'parents']
177 184
178 185 if len(commit_range) == 2:
179 186 commits = self.rhodecode_vcs_repo.get_commits(
180 187 start_id=commit_range[0], end_id=commit_range[1],
181 188 pre_load=pre_load)
182 189 commits = list(commits)
183 190 else:
184 191 commits = [self.rhodecode_vcs_repo.get_commit(
185 192 commit_id=commit_id_range, pre_load=pre_load)]
186 193
187 194 c.commit_ranges = commits
188 195 if not c.commit_ranges:
189 196 raise RepositoryError(
190 197 'The commit range returned an empty result')
191 198 except CommitDoesNotExistError:
192 199 msg = _('No such commit exists for this repository')
193 200 h.flash(msg, category='error')
194 201 raise HTTPNotFound()
195 202 except Exception:
196 203 log.exception("General failure")
197 204 raise HTTPNotFound()
198 205
199 206 c.changes = OrderedDict()
200 207 c.lines_added = 0
201 208 c.lines_deleted = 0
202 209
203 210 # auto collapse if we have more than limit
204 211 collapse_limit = diffs.DiffProcessor._collapse_commits_over
205 212 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
206 213
207 214 c.commit_statuses = ChangesetStatus.STATUSES
208 215 c.inline_comments = []
209 216 c.files = []
210 217
211 218 c.statuses = []
212 219 c.comments = []
213 220 c.unresolved_comments = []
214 221 if len(c.commit_ranges) == 1:
215 222 commit = c.commit_ranges[0]
216 223 c.comments = CommentsModel().get_comments(
217 224 self.db_repo.repo_id,
218 225 revision=commit.raw_id)
219 226 c.statuses.append(ChangesetStatusModel().get_status(
220 227 self.db_repo.repo_id, commit.raw_id))
221 228 # comments from PR
222 229 statuses = ChangesetStatusModel().get_statuses(
223 230 self.db_repo.repo_id, commit.raw_id,
224 231 with_revisions=True)
225 232 prs = set(st.pull_request for st in statuses
226 233 if st.pull_request is not None)
227 234 # from associated statuses, check the pull requests, and
228 235 # show comments from them
229 236 for pr in prs:
230 237 c.comments.extend(pr.comments)
231 238
232 239 c.unresolved_comments = CommentsModel()\
233 240 .get_commit_unresolved_todos(commit.raw_id)
234 241
235 242 diff = None
236 243 # Iterate over ranges (default commit view is always one commit)
237 244 for commit in c.commit_ranges:
238 245 c.changes[commit.raw_id] = []
239 246
240 247 commit2 = commit
241 248 commit1 = commit.parents[0] if commit.parents else EmptyCommit()
242 249
243 _diff = self.rhodecode_vcs_repo.get_diff(
244 commit1, commit2,
245 ignore_whitespace=ign_whitespace_lcl, context=context_lcl)
246 diff_processor = diffs.DiffProcessor(
247 _diff, format='newdiff', diff_limit=diff_limit,
248 file_limit=file_limit, show_full_diff=c.fulldiff)
249
250 commit_changes = OrderedDict()
251 250 if method == 'show':
252 _parsed = diff_processor.prepare()
253 c.limited_diff = isinstance(_parsed, diffs.LimitedDiffContainer)
254
255 _parsed = diff_processor.prepare()
256
257 def _node_getter(commit):
258 def get_node(fname):
259 try:
260 return commit.get_node(fname)
261 except NodeDoesNotExistError:
262 return None
263 return get_node
264
265 251 inline_comments = CommentsModel().get_inline_comments(
266 252 self.db_repo.repo_id, revision=commit.raw_id)
267 253 c.inline_cnt = CommentsModel().get_inline_comments_count(
268 254 inline_comments)
255 c.inline_comments = inline_comments
269 256
270 diffset = codeblocks.DiffSet(
271 repo_name=self.db_repo_name,
272 source_node_getter=_node_getter(commit1),
273 target_node_getter=_node_getter(commit2),
274 comments=inline_comments)
275 diffset = self.path_filter.render_patchset_filtered(
276 diffset, _parsed, commit1.raw_id, commit2.raw_id)
257 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
258 self.db_repo)
259 cache_file_path = diff_cache_exist(
260 cache_path, 'diff', commit.raw_id,
261 ign_whitespace_lcl, context_lcl, c.fulldiff)
262
263 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
264 force_recache = str2bool(self.request.GET.get('force_recache'))
265
266 cached_diff = None
267 if caching_enabled:
268 cached_diff = load_cached_diff(cache_file_path)
277 269
270 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
271 if not force_recache and has_proper_diff_cache:
272 diffset = cached_diff['diff']
273 else:
274 vcs_diff = self.rhodecode_vcs_repo.get_diff(
275 commit1, commit2,
276 ignore_whitespace=ign_whitespace_lcl,
277 context=context_lcl)
278
279 diff_processor = diffs.DiffProcessor(
280 vcs_diff, format='newdiff', diff_limit=diff_limit,
281 file_limit=file_limit, show_full_diff=c.fulldiff)
282
283 _parsed = diff_processor.prepare()
284
285 diffset = codeblocks.DiffSet(
286 repo_name=self.db_repo_name,
287 source_node_getter=codeblocks.diffset_node_getter(commit1),
288 target_node_getter=codeblocks.diffset_node_getter(commit2))
289
290 diffset = self.path_filter.render_patchset_filtered(
291 diffset, _parsed, commit1.raw_id, commit2.raw_id)
292
293 # save cached diff
294 if caching_enabled:
295 cache_diff(cache_file_path, diffset, None)
296
297 c.limited_diff = diffset.limited_diff
278 298 c.changes[commit.raw_id] = diffset
279 299 else:
300 # TODO(marcink): no cache usage here...
301 _diff = self.rhodecode_vcs_repo.get_diff(
302 commit1, commit2,
303 ignore_whitespace=ign_whitespace_lcl, context=context_lcl)
304 diff_processor = diffs.DiffProcessor(
305 _diff, format='newdiff', diff_limit=diff_limit,
306 file_limit=file_limit, show_full_diff=c.fulldiff)
280 307 # downloads/raw we only need RAW diff nothing else
281 308 diff = self.path_filter.get_raw_patch(diff_processor)
282 309 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
283 310
284 311 # sort comments by how they were generated
285 312 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
286 313
287 314 if len(c.commit_ranges) == 1:
288 315 c.commit = c.commit_ranges[0]
289 316 c.parent_tmpl = ''.join(
290 317 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
291 318
292 319 if method == 'download':
293 320 response = Response(diff)
294 321 response.content_type = 'text/plain'
295 322 response.content_disposition = (
296 323 'attachment; filename=%s.diff' % commit_id_range[:12])
297 324 return response
298 325 elif method == 'patch':
299 326 c.diff = safe_unicode(diff)
300 327 patch = render(
301 328 'rhodecode:templates/changeset/patch_changeset.mako',
302 329 self._get_template_context(c), self.request)
303 330 response = Response(patch)
304 331 response.content_type = 'text/plain'
305 332 return response
306 333 elif method == 'raw':
307 334 response = Response(diff)
308 335 response.content_type = 'text/plain'
309 336 return response
310 337 elif method == 'show':
311 338 if len(c.commit_ranges) == 1:
312 339 html = render(
313 340 'rhodecode:templates/changeset/changeset.mako',
314 341 self._get_template_context(c), self.request)
315 342 return Response(html)
316 343 else:
317 344 c.ancestor = None
318 345 c.target_repo = self.db_repo
319 346 html = render(
320 347 'rhodecode:templates/changeset/changeset_range.mako',
321 348 self._get_template_context(c), self.request)
322 349 return Response(html)
323 350
324 351 raise HTTPBadRequest()
325 352
326 353 @LoginRequired()
327 354 @HasRepoPermissionAnyDecorator(
328 355 'repository.read', 'repository.write', 'repository.admin')
329 356 @view_config(
330 357 route_name='repo_commit', request_method='GET',
331 358 renderer=None)
332 359 def repo_commit_show(self):
333 360 commit_id = self.request.matchdict['commit_id']
334 361 return self._commit(commit_id, method='show')
335 362
336 363 @LoginRequired()
337 364 @HasRepoPermissionAnyDecorator(
338 365 'repository.read', 'repository.write', 'repository.admin')
339 366 @view_config(
340 367 route_name='repo_commit_raw', request_method='GET',
341 368 renderer=None)
342 369 @view_config(
343 370 route_name='repo_commit_raw_deprecated', request_method='GET',
344 371 renderer=None)
345 372 def repo_commit_raw(self):
346 373 commit_id = self.request.matchdict['commit_id']
347 374 return self._commit(commit_id, method='raw')
348 375
349 376 @LoginRequired()
350 377 @HasRepoPermissionAnyDecorator(
351 378 'repository.read', 'repository.write', 'repository.admin')
352 379 @view_config(
353 380 route_name='repo_commit_patch', request_method='GET',
354 381 renderer=None)
355 382 def repo_commit_patch(self):
356 383 commit_id = self.request.matchdict['commit_id']
357 384 return self._commit(commit_id, method='patch')
358 385
359 386 @LoginRequired()
360 387 @HasRepoPermissionAnyDecorator(
361 388 'repository.read', 'repository.write', 'repository.admin')
362 389 @view_config(
363 390 route_name='repo_commit_download', request_method='GET',
364 391 renderer=None)
365 392 def repo_commit_download(self):
366 393 commit_id = self.request.matchdict['commit_id']
367 394 return self._commit(commit_id, method='download')
368 395
369 396 @LoginRequired()
370 397 @NotAnonymous()
371 398 @HasRepoPermissionAnyDecorator(
372 399 'repository.read', 'repository.write', 'repository.admin')
373 400 @CSRFRequired()
374 401 @view_config(
375 402 route_name='repo_commit_comment_create', request_method='POST',
376 403 renderer='json_ext')
377 404 def repo_commit_comment_create(self):
378 405 _ = self.request.translate
379 406 commit_id = self.request.matchdict['commit_id']
380 407
381 408 c = self.load_default_context()
382 409 status = self.request.POST.get('changeset_status', None)
383 410 text = self.request.POST.get('text')
384 411 comment_type = self.request.POST.get('comment_type')
385 412 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
386 413
387 414 if status:
388 415 text = text or (_('Status change %(transition_icon)s %(status)s')
389 416 % {'transition_icon': '>',
390 417 'status': ChangesetStatus.get_status_lbl(status)})
391 418
392 419 multi_commit_ids = []
393 420 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
394 421 if _commit_id not in ['', None, EmptyCommit.raw_id]:
395 422 if _commit_id not in multi_commit_ids:
396 423 multi_commit_ids.append(_commit_id)
397 424
398 425 commit_ids = multi_commit_ids or [commit_id]
399 426
400 427 comment = None
401 428 for current_id in filter(None, commit_ids):
402 429 comment = CommentsModel().create(
403 430 text=text,
404 431 repo=self.db_repo.repo_id,
405 432 user=self._rhodecode_db_user.user_id,
406 433 commit_id=current_id,
407 434 f_path=self.request.POST.get('f_path'),
408 435 line_no=self.request.POST.get('line'),
409 436 status_change=(ChangesetStatus.get_status_lbl(status)
410 437 if status else None),
411 438 status_change_type=status,
412 439 comment_type=comment_type,
413 440 resolves_comment_id=resolves_comment_id
414 441 )
415 442
416 443 # get status if set !
417 444 if status:
418 445 # if latest status was from pull request and it's closed
419 446 # disallow changing status !
420 447 # dont_allow_on_closed_pull_request = True !
421 448
422 449 try:
423 450 ChangesetStatusModel().set_status(
424 451 self.db_repo.repo_id,
425 452 status,
426 453 self._rhodecode_db_user.user_id,
427 454 comment,
428 455 revision=current_id,
429 456 dont_allow_on_closed_pull_request=True
430 457 )
431 458 except StatusChangeOnClosedPullRequestError:
432 459 msg = _('Changing the status of a commit associated with '
433 460 'a closed pull request is not allowed')
434 461 log.exception(msg)
435 462 h.flash(msg, category='warning')
436 463 raise HTTPFound(h.route_path(
437 464 'repo_commit', repo_name=self.db_repo_name,
438 465 commit_id=current_id))
439 466
440 467 # finalize, commit and redirect
441 468 Session().commit()
442 469
443 470 data = {
444 471 'target_id': h.safeid(h.safe_unicode(
445 472 self.request.POST.get('f_path'))),
446 473 }
447 474 if comment:
448 475 c.co = comment
449 476 rendered_comment = render(
450 477 'rhodecode:templates/changeset/changeset_comment_block.mako',
451 478 self._get_template_context(c), self.request)
452 479
453 480 data.update(comment.get_dict())
454 481 data.update({'rendered_text': rendered_comment})
455 482
456 483 return data
457 484
458 485 @LoginRequired()
459 486 @NotAnonymous()
460 487 @HasRepoPermissionAnyDecorator(
461 488 'repository.read', 'repository.write', 'repository.admin')
462 489 @CSRFRequired()
463 490 @view_config(
464 491 route_name='repo_commit_comment_preview', request_method='POST',
465 492 renderer='string', xhr=True)
466 493 def repo_commit_comment_preview(self):
467 494 # Technically a CSRF token is not needed as no state changes with this
468 495 # call. However, as this is a POST is better to have it, so automated
469 496 # tools don't flag it as potential CSRF.
470 497 # Post is required because the payload could be bigger than the maximum
471 498 # allowed by GET.
472 499
473 500 text = self.request.POST.get('text')
474 501 renderer = self.request.POST.get('renderer') or 'rst'
475 502 if text:
476 503 return h.render(text, renderer=renderer, mentions=True)
477 504 return ''
478 505
479 506 @LoginRequired()
480 507 @NotAnonymous()
481 508 @HasRepoPermissionAnyDecorator(
482 509 'repository.read', 'repository.write', 'repository.admin')
483 510 @CSRFRequired()
484 511 @view_config(
485 512 route_name='repo_commit_comment_delete', request_method='POST',
486 513 renderer='json_ext')
487 514 def repo_commit_comment_delete(self):
488 515 commit_id = self.request.matchdict['commit_id']
489 516 comment_id = self.request.matchdict['comment_id']
490 517
491 518 comment = ChangesetComment.get_or_404(comment_id)
492 519 if not comment:
493 520 log.debug('Comment with id:%s not found, skipping', comment_id)
494 521 # comment already deleted in another call probably
495 522 return True
496 523
497 524 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
498 525 super_admin = h.HasPermissionAny('hg.admin')()
499 526 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
500 527 is_repo_comment = comment.repo.repo_name == self.db_repo_name
501 528 comment_repo_admin = is_repo_admin and is_repo_comment
502 529
503 530 if super_admin or comment_owner or comment_repo_admin:
504 531 CommentsModel().delete(comment=comment, user=self._rhodecode_db_user)
505 532 Session().commit()
506 533 return True
507 534 else:
508 535 log.warning('No permissions for user %s to delete comment_id: %s',
509 536 self._rhodecode_db_user, comment_id)
510 537 raise HTTPNotFound()
511 538
512 539 @LoginRequired()
513 540 @HasRepoPermissionAnyDecorator(
514 541 'repository.read', 'repository.write', 'repository.admin')
515 542 @view_config(
516 543 route_name='repo_commit_data', request_method='GET',
517 544 renderer='json_ext', xhr=True)
518 545 def repo_commit_data(self):
519 546 commit_id = self.request.matchdict['commit_id']
520 547 self.load_default_context()
521 548
522 549 try:
523 550 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
524 551 except CommitDoesNotExistError as e:
525 552 return EmptyCommit(message=str(e))
526 553
527 554 @LoginRequired()
528 555 @HasRepoPermissionAnyDecorator(
529 556 'repository.read', 'repository.write', 'repository.admin')
530 557 @view_config(
531 558 route_name='repo_commit_children', request_method='GET',
532 559 renderer='json_ext', xhr=True)
533 560 def repo_commit_children(self):
534 561 commit_id = self.request.matchdict['commit_id']
535 562 self.load_default_context()
536 563
537 564 try:
538 565 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
539 566 children = commit.children
540 567 except CommitDoesNotExistError:
541 568 children = []
542 569
543 570 result = {"results": children}
544 571 return result
545 572
546 573 @LoginRequired()
547 574 @HasRepoPermissionAnyDecorator(
548 575 'repository.read', 'repository.write', 'repository.admin')
549 576 @view_config(
550 577 route_name='repo_commit_parents', request_method='GET',
551 578 renderer='json_ext')
552 579 def repo_commit_parents(self):
553 580 commit_id = self.request.matchdict['commit_id']
554 581 self.load_default_context()
555 582
556 583 try:
557 584 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
558 585 parents = commit.parents
559 586 except CommitDoesNotExistError:
560 587 parents = []
561 588 result = {"results": parents}
562 589 return result
@@ -1,322 +1,313 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23
24 24 from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPFound
25 25 from pyramid.view import view_config
26 26 from pyramid.renderers import render
27 27 from pyramid.response import Response
28 28
29 29 from rhodecode.apps._base import RepoAppView
30 30 from rhodecode.controllers.utils import parse_path_ref, get_commit_from_ref_name
31 31 from rhodecode.lib import helpers as h
32 32 from rhodecode.lib import diffs, codeblocks
33 33 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
34 34 from rhodecode.lib.utils import safe_str
35 35 from rhodecode.lib.utils2 import safe_unicode, str2bool
36 36 from rhodecode.lib.vcs.exceptions import (
37 37 EmptyRepositoryError, RepositoryError, RepositoryRequirementError,
38 38 NodeDoesNotExistError)
39 39 from rhodecode.model.db import Repository, ChangesetStatus
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43
44 44 class RepoCompareView(RepoAppView):
45 45 def load_default_context(self):
46 46 c = self._get_local_tmpl_context(include_app_defaults=True)
47 47
48 48 c.rhodecode_repo = self.rhodecode_vcs_repo
49 49
50 50
51 51 return c
52 52
53 53 def _get_commit_or_redirect(
54 54 self, ref, ref_type, repo, redirect_after=True, partial=False):
55 55 """
56 56 This is a safe way to get a commit. If an error occurs it
57 57 redirects to a commit with a proper message. If partial is set
58 58 then it does not do redirect raise and throws an exception instead.
59 59 """
60 60 _ = self.request.translate
61 61 try:
62 62 return get_commit_from_ref_name(repo, safe_str(ref), ref_type)
63 63 except EmptyRepositoryError:
64 64 if not redirect_after:
65 65 return repo.scm_instance().EMPTY_COMMIT
66 66 h.flash(h.literal(_('There are no commits yet')),
67 67 category='warning')
68 68 if not partial:
69 69 raise HTTPFound(
70 70 h.route_path('repo_summary', repo_name=repo.repo_name))
71 71 raise HTTPBadRequest()
72 72
73 73 except RepositoryError as e:
74 74 log.exception(safe_str(e))
75 75 h.flash(safe_str(h.escape(e)), category='warning')
76 76 if not partial:
77 77 raise HTTPFound(
78 78 h.route_path('repo_summary', repo_name=repo.repo_name))
79 79 raise HTTPBadRequest()
80 80
81 81 @LoginRequired()
82 82 @HasRepoPermissionAnyDecorator(
83 83 'repository.read', 'repository.write', 'repository.admin')
84 84 @view_config(
85 85 route_name='repo_compare_select', request_method='GET',
86 86 renderer='rhodecode:templates/compare/compare_diff.mako')
87 87 def compare_select(self):
88 88 _ = self.request.translate
89 89 c = self.load_default_context()
90 90
91 91 source_repo = self.db_repo_name
92 92 target_repo = self.request.GET.get('target_repo', source_repo)
93 93 c.source_repo = Repository.get_by_repo_name(source_repo)
94 94 c.target_repo = Repository.get_by_repo_name(target_repo)
95 95
96 96 if c.source_repo is None or c.target_repo is None:
97 97 raise HTTPNotFound()
98 98
99 99 c.compare_home = True
100 100 c.commit_ranges = []
101 101 c.collapse_all_commits = False
102 102 c.diffset = None
103 103 c.limited_diff = False
104 104 c.source_ref = c.target_ref = _('Select commit')
105 105 c.source_ref_type = ""
106 106 c.target_ref_type = ""
107 107 c.commit_statuses = ChangesetStatus.STATUSES
108 108 c.preview_mode = False
109 109 c.file_path = None
110 110
111 111 return self._get_template_context(c)
112 112
113 113 @LoginRequired()
114 114 @HasRepoPermissionAnyDecorator(
115 115 'repository.read', 'repository.write', 'repository.admin')
116 116 @view_config(
117 117 route_name='repo_compare', request_method='GET',
118 118 renderer=None)
119 119 def compare(self):
120 120 _ = self.request.translate
121 121 c = self.load_default_context()
122 122
123 123 source_ref_type = self.request.matchdict['source_ref_type']
124 124 source_ref = self.request.matchdict['source_ref']
125 125 target_ref_type = self.request.matchdict['target_ref_type']
126 126 target_ref = self.request.matchdict['target_ref']
127 127
128 128 # source_ref will be evaluated in source_repo
129 129 source_repo_name = self.db_repo_name
130 130 source_path, source_id = parse_path_ref(source_ref)
131 131
132 132 # target_ref will be evaluated in target_repo
133 133 target_repo_name = self.request.GET.get('target_repo', source_repo_name)
134 134 target_path, target_id = parse_path_ref(
135 135 target_ref, default_path=self.request.GET.get('f_path', ''))
136 136
137 137 # if merge is True
138 138 # Show what changes since the shared ancestor commit of target/source
139 139 # the source would get if it was merged with target. Only commits
140 140 # which are in target but not in source will be shown.
141 141 merge = str2bool(self.request.GET.get('merge'))
142 142 # if merge is False
143 143 # Show a raw diff of source/target refs even if no ancestor exists
144 144
145 145 # c.fulldiff disables cut_off_limit
146 146 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
147 147
148 148 c.file_path = target_path
149 149 c.commit_statuses = ChangesetStatus.STATUSES
150 150
151 151 # if partial, returns just compare_commits.html (commits log)
152 152 partial = self.request.is_xhr
153 153
154 154 # swap url for compare_diff page
155 155 c.swap_url = h.route_path(
156 156 'repo_compare',
157 157 repo_name=target_repo_name,
158 158 source_ref_type=target_ref_type,
159 159 source_ref=target_ref,
160 160 target_repo=source_repo_name,
161 161 target_ref_type=source_ref_type,
162 162 target_ref=source_ref,
163 163 _query=dict(merge=merge and '1' or '', f_path=target_path))
164 164
165 165 source_repo = Repository.get_by_repo_name(source_repo_name)
166 166 target_repo = Repository.get_by_repo_name(target_repo_name)
167 167
168 168 if source_repo is None:
169 169 log.error('Could not find the source repo: {}'
170 170 .format(source_repo_name))
171 171 h.flash(_('Could not find the source repo: `{}`')
172 172 .format(h.escape(source_repo_name)), category='error')
173 173 raise HTTPFound(
174 174 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
175 175
176 176 if target_repo is None:
177 177 log.error('Could not find the target repo: {}'
178 178 .format(source_repo_name))
179 179 h.flash(_('Could not find the target repo: `{}`')
180 180 .format(h.escape(target_repo_name)), category='error')
181 181 raise HTTPFound(
182 182 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
183 183
184 184 source_scm = source_repo.scm_instance()
185 185 target_scm = target_repo.scm_instance()
186 186
187 187 source_alias = source_scm.alias
188 188 target_alias = target_scm.alias
189 189 if source_alias != target_alias:
190 190 msg = _('The comparison of two different kinds of remote repos '
191 191 'is not available')
192 192 log.error(msg)
193 193 h.flash(msg, category='error')
194 194 raise HTTPFound(
195 195 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
196 196
197 197 source_commit = self._get_commit_or_redirect(
198 198 ref=source_id, ref_type=source_ref_type, repo=source_repo,
199 199 partial=partial)
200 200 target_commit = self._get_commit_or_redirect(
201 201 ref=target_id, ref_type=target_ref_type, repo=target_repo,
202 202 partial=partial)
203 203
204 204 c.compare_home = False
205 205 c.source_repo = source_repo
206 206 c.target_repo = target_repo
207 207 c.source_ref = source_ref
208 208 c.target_ref = target_ref
209 209 c.source_ref_type = source_ref_type
210 210 c.target_ref_type = target_ref_type
211 211
212 212 pre_load = ["author", "branch", "date", "message"]
213 213 c.ancestor = None
214 214
215 215 if c.file_path:
216 216 if source_commit == target_commit:
217 217 c.commit_ranges = []
218 218 else:
219 219 c.commit_ranges = [target_commit]
220 220 else:
221 221 try:
222 222 c.commit_ranges = source_scm.compare(
223 223 source_commit.raw_id, target_commit.raw_id,
224 224 target_scm, merge, pre_load=pre_load)
225 225 if merge:
226 226 c.ancestor = source_scm.get_common_ancestor(
227 227 source_commit.raw_id, target_commit.raw_id, target_scm)
228 228 except RepositoryRequirementError:
229 229 msg = _('Could not compare repos with different '
230 230 'large file settings')
231 231 log.error(msg)
232 232 if partial:
233 233 return Response(msg)
234 234 h.flash(msg, category='error')
235 235 raise HTTPFound(
236 236 h.route_path('repo_compare_select',
237 237 repo_name=self.db_repo_name))
238 238
239 239 c.statuses = self.db_repo.statuses(
240 240 [x.raw_id for x in c.commit_ranges])
241 241
242 242 # auto collapse if we have more than limit
243 243 collapse_limit = diffs.DiffProcessor._collapse_commits_over
244 244 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
245 245
246 246 if partial: # for PR ajax commits loader
247 247 if not c.ancestor:
248 248 return Response('') # cannot merge if there is no ancestor
249 249
250 250 html = render(
251 251 'rhodecode:templates/compare/compare_commits.mako',
252 252 self._get_template_context(c), self.request)
253 253 return Response(html)
254 254
255 255 if c.ancestor:
256 256 # case we want a simple diff without incoming commits,
257 257 # previewing what will be merged.
258 258 # Make the diff on target repo (which is known to have target_ref)
259 259 log.debug('Using ancestor %s as source_ref instead of %s'
260 260 % (c.ancestor, source_ref))
261 261 source_repo = target_repo
262 262 source_commit = target_repo.get_commit(commit_id=c.ancestor)
263 263
264 264 # diff_limit will cut off the whole diff if the limit is applied
265 265 # otherwise it will just hide the big files from the front-end
266 266 diff_limit = c.visual.cut_off_limit_diff
267 267 file_limit = c.visual.cut_off_limit_file
268 268
269 269 log.debug('calculating diff between '
270 270 'source_ref:%s and target_ref:%s for repo `%s`',
271 271 source_commit, target_commit,
272 272 safe_unicode(source_repo.scm_instance().path))
273 273
274 274 if source_commit.repository != target_commit.repository:
275 275 msg = _(
276 276 "Repositories unrelated. "
277 277 "Cannot compare commit %(commit1)s from repository %(repo1)s "
278 278 "with commit %(commit2)s from repository %(repo2)s.") % {
279 279 'commit1': h.show_id(source_commit),
280 280 'repo1': source_repo.repo_name,
281 281 'commit2': h.show_id(target_commit),
282 282 'repo2': target_repo.repo_name,
283 283 }
284 284 h.flash(msg, category='error')
285 285 raise HTTPFound(
286 286 h.route_path('repo_compare_select',
287 287 repo_name=self.db_repo_name))
288 288
289 289 txt_diff = source_repo.scm_instance().get_diff(
290 290 commit1=source_commit, commit2=target_commit,
291 291 path=target_path, path1=source_path)
292 292
293 293 diff_processor = diffs.DiffProcessor(
294 294 txt_diff, format='newdiff', diff_limit=diff_limit,
295 295 file_limit=file_limit, show_full_diff=c.fulldiff)
296 296 _parsed = diff_processor.prepare()
297 297
298 def _node_getter(commit):
299 """ Returns a function that returns a node for a commit or None """
300 def get_node(fname):
301 try:
302 return commit.get_node(fname)
303 except NodeDoesNotExistError:
304 return None
305 return get_node
306
307 298 diffset = codeblocks.DiffSet(
308 299 repo_name=source_repo.repo_name,
309 source_node_getter=_node_getter(source_commit),
310 target_node_getter=_node_getter(target_commit),
300 source_node_getter=codeblocks.diffset_node_getter(source_commit),
301 target_node_getter=codeblocks.diffset_node_getter(target_commit),
311 302 )
312 303 c.diffset = self.path_filter.render_patchset_filtered(
313 304 diffset, _parsed, source_ref, target_ref)
314 305
315 306 c.preview_mode = merge
316 307 c.source_commit = source_commit
317 308 c.target_commit = target_commit
318 309
319 310 html = render(
320 311 'rhodecode:templates/compare/compare_diff.mako',
321 312 self._get_template_context(c), self.request)
322 313 return Response(html) No newline at end of file
@@ -1,1242 +1,1298 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 import formencode
25 25 import formencode.htmlfill
26 26 import peppercorn
27 27 from pyramid.httpexceptions import (
28 28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 29 from pyramid.view import view_config
30 30 from pyramid.renderers import render
31 31
32 32 from rhodecode import events
33 33 from rhodecode.apps._base import RepoAppView, DataGridAppView
34 34
35 35 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
36 36 from rhodecode.lib.base import vcs_operation_context
37 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 38 from rhodecode.lib.ext_json import json
38 39 from rhodecode.lib.auth import (
39 40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 41 NotAnonymous, CSRFRequired)
41 42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 44 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
44 RepositoryRequirementError, NodeDoesNotExistError, EmptyRepositoryError)
45 RepositoryRequirementError, EmptyRepositoryError)
45 46 from rhodecode.model.changeset_status import ChangesetStatusModel
46 47 from rhodecode.model.comment import CommentsModel
47 48 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
48 49 ChangesetComment, ChangesetStatus, Repository)
49 50 from rhodecode.model.forms import PullRequestForm
50 51 from rhodecode.model.meta import Session
51 52 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 53 from rhodecode.model.scm import ScmModel
53 54
54 55 log = logging.getLogger(__name__)
55 56
56 57
57 58 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58 59
59 60 def load_default_context(self):
60 61 c = self._get_local_tmpl_context(include_app_defaults=True)
61 62 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 63 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 64
64 65 return c
65 66
66 67 def _get_pull_requests_list(
67 68 self, repo_name, source, filter_type, opened_by, statuses):
68 69
69 70 draw, start, limit = self._extract_chunk(self.request)
70 71 search_q, order_by, order_dir = self._extract_ordering(self.request)
71 72 _render = self.request.get_partial_renderer(
72 73 'rhodecode:templates/data_table/_dt_elements.mako')
73 74
74 75 # pagination
75 76
76 77 if filter_type == 'awaiting_review':
77 78 pull_requests = PullRequestModel().get_awaiting_review(
78 79 repo_name, source=source, opened_by=opened_by,
79 80 statuses=statuses, offset=start, length=limit,
80 81 order_by=order_by, order_dir=order_dir)
81 82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
82 83 repo_name, source=source, statuses=statuses,
83 84 opened_by=opened_by)
84 85 elif filter_type == 'awaiting_my_review':
85 86 pull_requests = PullRequestModel().get_awaiting_my_review(
86 87 repo_name, source=source, opened_by=opened_by,
87 88 user_id=self._rhodecode_user.user_id, statuses=statuses,
88 89 offset=start, length=limit, order_by=order_by,
89 90 order_dir=order_dir)
90 91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
91 92 repo_name, source=source, user_id=self._rhodecode_user.user_id,
92 93 statuses=statuses, opened_by=opened_by)
93 94 else:
94 95 pull_requests = PullRequestModel().get_all(
95 96 repo_name, source=source, opened_by=opened_by,
96 97 statuses=statuses, offset=start, length=limit,
97 98 order_by=order_by, order_dir=order_dir)
98 99 pull_requests_total_count = PullRequestModel().count_all(
99 100 repo_name, source=source, statuses=statuses,
100 101 opened_by=opened_by)
101 102
102 103 data = []
103 104 comments_model = CommentsModel()
104 105 for pr in pull_requests:
105 106 comments = comments_model.get_all_comments(
106 107 self.db_repo.repo_id, pull_request=pr)
107 108
108 109 data.append({
109 110 'name': _render('pullrequest_name',
110 111 pr.pull_request_id, pr.target_repo.repo_name),
111 112 'name_raw': pr.pull_request_id,
112 113 'status': _render('pullrequest_status',
113 114 pr.calculated_review_status()),
114 115 'title': _render(
115 116 'pullrequest_title', pr.title, pr.description),
116 117 'description': h.escape(pr.description),
117 118 'updated_on': _render('pullrequest_updated_on',
118 119 h.datetime_to_time(pr.updated_on)),
119 120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
120 121 'created_on': _render('pullrequest_updated_on',
121 122 h.datetime_to_time(pr.created_on)),
122 123 'created_on_raw': h.datetime_to_time(pr.created_on),
123 124 'author': _render('pullrequest_author',
124 125 pr.author.full_contact, ),
125 126 'author_raw': pr.author.full_name,
126 127 'comments': _render('pullrequest_comments', len(comments)),
127 128 'comments_raw': len(comments),
128 129 'closed': pr.is_closed(),
129 130 })
130 131
131 132 data = ({
132 133 'draw': draw,
133 134 'data': data,
134 135 'recordsTotal': pull_requests_total_count,
135 136 'recordsFiltered': pull_requests_total_count,
136 137 })
137 138 return data
138 139
139 140 @LoginRequired()
140 141 @HasRepoPermissionAnyDecorator(
141 142 'repository.read', 'repository.write', 'repository.admin')
142 143 @view_config(
143 144 route_name='pullrequest_show_all', request_method='GET',
144 145 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
145 146 def pull_request_list(self):
146 147 c = self.load_default_context()
147 148
148 149 req_get = self.request.GET
149 150 c.source = str2bool(req_get.get('source'))
150 151 c.closed = str2bool(req_get.get('closed'))
151 152 c.my = str2bool(req_get.get('my'))
152 153 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
153 154 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
154 155
155 156 c.active = 'open'
156 157 if c.my:
157 158 c.active = 'my'
158 159 if c.closed:
159 160 c.active = 'closed'
160 161 if c.awaiting_review and not c.source:
161 162 c.active = 'awaiting'
162 163 if c.source and not c.awaiting_review:
163 164 c.active = 'source'
164 165 if c.awaiting_my_review:
165 166 c.active = 'awaiting_my'
166 167
167 168 return self._get_template_context(c)
168 169
169 170 @LoginRequired()
170 171 @HasRepoPermissionAnyDecorator(
171 172 'repository.read', 'repository.write', 'repository.admin')
172 173 @view_config(
173 174 route_name='pullrequest_show_all_data', request_method='GET',
174 175 renderer='json_ext', xhr=True)
175 176 def pull_request_list_data(self):
176 177 self.load_default_context()
177 178
178 179 # additional filters
179 180 req_get = self.request.GET
180 181 source = str2bool(req_get.get('source'))
181 182 closed = str2bool(req_get.get('closed'))
182 183 my = str2bool(req_get.get('my'))
183 184 awaiting_review = str2bool(req_get.get('awaiting_review'))
184 185 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
185 186
186 187 filter_type = 'awaiting_review' if awaiting_review \
187 188 else 'awaiting_my_review' if awaiting_my_review \
188 189 else None
189 190
190 191 opened_by = None
191 192 if my:
192 193 opened_by = [self._rhodecode_user.user_id]
193 194
194 195 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
195 196 if closed:
196 197 statuses = [PullRequest.STATUS_CLOSED]
197 198
198 199 data = self._get_pull_requests_list(
199 200 repo_name=self.db_repo_name, source=source,
200 201 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
201 202
202 203 return data
203 204
205 def _is_diff_cache_enabled(self, target_repo):
206 caching_enabled = self._get_general_setting(
207 target_repo, 'rhodecode_diff_cache')
208 log.debug('Diff caching enabled: %s', caching_enabled)
209 return caching_enabled
210
204 211 def _get_diffset(self, source_repo_name, source_repo,
205 212 source_ref_id, target_ref_id,
206 target_commit, source_commit, diff_limit, fulldiff,
207 file_limit, display_inline_comments):
213 target_commit, source_commit, diff_limit, file_limit,
214 fulldiff):
208 215
209 216 vcs_diff = PullRequestModel().get_diff(
210 217 source_repo, source_ref_id, target_ref_id)
211 218
212 219 diff_processor = diffs.DiffProcessor(
213 220 vcs_diff, format='newdiff', diff_limit=diff_limit,
214 221 file_limit=file_limit, show_full_diff=fulldiff)
215 222
216 223 _parsed = diff_processor.prepare()
217 224
218 def _node_getter(commit):
219 def get_node(fname):
220 try:
221 return commit.get_node(fname)
222 except NodeDoesNotExistError:
223 return None
224
225 return get_node
226
227 225 diffset = codeblocks.DiffSet(
228 226 repo_name=self.db_repo_name,
229 227 source_repo_name=source_repo_name,
230 source_node_getter=_node_getter(target_commit),
231 target_node_getter=_node_getter(source_commit),
232 comments=display_inline_comments
228 source_node_getter=codeblocks.diffset_node_getter(target_commit),
229 target_node_getter=codeblocks.diffset_node_getter(source_commit),
233 230 )
234 231 diffset = self.path_filter.render_patchset_filtered(
235 232 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
236 233
237 234 return diffset
238 235
239 236 @LoginRequired()
240 237 @HasRepoPermissionAnyDecorator(
241 238 'repository.read', 'repository.write', 'repository.admin')
242 239 @view_config(
243 240 route_name='pullrequest_show', request_method='GET',
244 241 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
245 242 def pull_request_show(self):
246 243 pull_request_id = self.request.matchdict['pull_request_id']
247 244
248 245 c = self.load_default_context()
249 246
250 247 version = self.request.GET.get('version')
251 248 from_version = self.request.GET.get('from_version') or version
252 249 merge_checks = self.request.GET.get('merge_checks')
253 250 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
254 251
255 252 (pull_request_latest,
256 253 pull_request_at_ver,
257 254 pull_request_display_obj,
258 255 at_version) = PullRequestModel().get_pr_version(
259 256 pull_request_id, version=version)
260 257 pr_closed = pull_request_latest.is_closed()
261 258
262 259 if pr_closed and (version or from_version):
263 260 # not allow to browse versions
264 261 raise HTTPFound(h.route_path(
265 262 'pullrequest_show', repo_name=self.db_repo_name,
266 263 pull_request_id=pull_request_id))
267 264
268 265 versions = pull_request_display_obj.versions()
269 266
270 267 c.at_version = at_version
271 268 c.at_version_num = (at_version
272 269 if at_version and at_version != 'latest'
273 270 else None)
274 271 c.at_version_pos = ChangesetComment.get_index_from_version(
275 272 c.at_version_num, versions)
276 273
277 274 (prev_pull_request_latest,
278 275 prev_pull_request_at_ver,
279 276 prev_pull_request_display_obj,
280 277 prev_at_version) = PullRequestModel().get_pr_version(
281 278 pull_request_id, version=from_version)
282 279
283 280 c.from_version = prev_at_version
284 281 c.from_version_num = (prev_at_version
285 282 if prev_at_version and prev_at_version != 'latest'
286 283 else None)
287 284 c.from_version_pos = ChangesetComment.get_index_from_version(
288 285 c.from_version_num, versions)
289 286
290 287 # define if we're in COMPARE mode or VIEW at version mode
291 288 compare = at_version != prev_at_version
292 289
293 290 # pull_requests repo_name we opened it against
294 291 # ie. target_repo must match
295 292 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
296 293 raise HTTPNotFound()
297 294
298 295 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
299 296 pull_request_at_ver)
300 297
301 298 c.pull_request = pull_request_display_obj
302 299 c.pull_request_latest = pull_request_latest
303 300
304 301 if compare or (at_version and not at_version == 'latest'):
305 302 c.allowed_to_change_status = False
306 303 c.allowed_to_update = False
307 304 c.allowed_to_merge = False
308 305 c.allowed_to_delete = False
309 306 c.allowed_to_comment = False
310 307 c.allowed_to_close = False
311 308 else:
312 309 can_change_status = PullRequestModel().check_user_change_status(
313 310 pull_request_at_ver, self._rhodecode_user)
314 311 c.allowed_to_change_status = can_change_status and not pr_closed
315 312
316 313 c.allowed_to_update = PullRequestModel().check_user_update(
317 314 pull_request_latest, self._rhodecode_user) and not pr_closed
318 315 c.allowed_to_merge = PullRequestModel().check_user_merge(
319 316 pull_request_latest, self._rhodecode_user) and not pr_closed
320 317 c.allowed_to_delete = PullRequestModel().check_user_delete(
321 318 pull_request_latest, self._rhodecode_user) and not pr_closed
322 319 c.allowed_to_comment = not pr_closed
323 320 c.allowed_to_close = c.allowed_to_merge and not pr_closed
324 321
325 322 c.forbid_adding_reviewers = False
326 323 c.forbid_author_to_review = False
327 324 c.forbid_commit_author_to_review = False
328 325
329 326 if pull_request_latest.reviewer_data and \
330 327 'rules' in pull_request_latest.reviewer_data:
331 328 rules = pull_request_latest.reviewer_data['rules'] or {}
332 329 try:
333 330 c.forbid_adding_reviewers = rules.get(
334 331 'forbid_adding_reviewers')
335 332 c.forbid_author_to_review = rules.get(
336 333 'forbid_author_to_review')
337 334 c.forbid_commit_author_to_review = rules.get(
338 335 'forbid_commit_author_to_review')
339 336 except Exception:
340 337 pass
341 338
342 339 # check merge capabilities
343 340 _merge_check = MergeCheck.validate(
344 341 pull_request_latest, user=self._rhodecode_user,
345 342 translator=self.request.translate)
346 343 c.pr_merge_errors = _merge_check.error_details
347 344 c.pr_merge_possible = not _merge_check.failed
348 345 c.pr_merge_message = _merge_check.merge_msg
349 346
350 347 c.pr_merge_info = MergeCheck.get_merge_conditions(
351 348 pull_request_latest, translator=self.request.translate)
352 349
353 350 c.pull_request_review_status = _merge_check.review_status
354 351 if merge_checks:
355 352 self.request.override_renderer = \
356 353 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
357 354 return self._get_template_context(c)
358 355
359 356 comments_model = CommentsModel()
360 357
361 358 # reviewers and statuses
362 359 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
363 360 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
364 361
365 362 # GENERAL COMMENTS with versions #
366 363 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
367 364 q = q.order_by(ChangesetComment.comment_id.asc())
368 365 general_comments = q
369 366
370 367 # pick comments we want to render at current version
371 368 c.comment_versions = comments_model.aggregate_comments(
372 369 general_comments, versions, c.at_version_num)
373 370 c.comments = c.comment_versions[c.at_version_num]['until']
374 371
375 372 # INLINE COMMENTS with versions #
376 373 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
377 374 q = q.order_by(ChangesetComment.comment_id.asc())
378 375 inline_comments = q
379 376
380 377 c.inline_versions = comments_model.aggregate_comments(
381 378 inline_comments, versions, c.at_version_num, inline=True)
382 379
383 380 # inject latest version
384 381 latest_ver = PullRequest.get_pr_display_object(
385 382 pull_request_latest, pull_request_latest)
386 383
387 384 c.versions = versions + [latest_ver]
388 385
389 386 # if we use version, then do not show later comments
390 387 # than current version
391 388 display_inline_comments = collections.defaultdict(
392 389 lambda: collections.defaultdict(list))
393 390 for co in inline_comments:
394 391 if c.at_version_num:
395 392 # pick comments that are at least UPTO given version, so we
396 393 # don't render comments for higher version
397 394 should_render = co.pull_request_version_id and \
398 395 co.pull_request_version_id <= c.at_version_num
399 396 else:
400 397 # showing all, for 'latest'
401 398 should_render = True
402 399
403 400 if should_render:
404 401 display_inline_comments[co.f_path][co.line_no].append(co)
405 402
406 403 # load diff data into template context, if we use compare mode then
407 404 # diff is calculated based on changes between versions of PR
408 405
409 406 source_repo = pull_request_at_ver.source_repo
410 407 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
411 408
412 409 target_repo = pull_request_at_ver.target_repo
413 410 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
414 411
415 412 if compare:
416 413 # in compare switch the diff base to latest commit from prev version
417 414 target_ref_id = prev_pull_request_display_obj.revisions[0]
418 415
419 416 # despite opening commits for bookmarks/branches/tags, we always
420 417 # convert this to rev to prevent changes after bookmark or branch change
421 418 c.source_ref_type = 'rev'
422 419 c.source_ref = source_ref_id
423 420
424 421 c.target_ref_type = 'rev'
425 422 c.target_ref = target_ref_id
426 423
427 424 c.source_repo = source_repo
428 425 c.target_repo = target_repo
429 426
430 427 c.commit_ranges = []
431 428 source_commit = EmptyCommit()
432 429 target_commit = EmptyCommit()
433 430 c.missing_requirements = False
434 431
435 432 source_scm = source_repo.scm_instance()
436 433 target_scm = target_repo.scm_instance()
437 434
438 435 # try first shadow repo, fallback to regular repo
439 436 try:
440 437 commits_source_repo = pull_request_latest.get_shadow_repo()
441 438 except Exception:
442 439 log.debug('Failed to get shadow repo', exc_info=True)
443 440 commits_source_repo = source_scm
444 441
445 442 c.commits_source_repo = commits_source_repo
446 commit_cache = {}
447 try:
448 pre_load = ["author", "branch", "date", "message"]
449 show_revs = pull_request_at_ver.revisions
450 for rev in show_revs:
451 comm = commits_source_repo.get_commit(
452 commit_id=rev, pre_load=pre_load)
453 c.commit_ranges.append(comm)
454 commit_cache[comm.raw_id] = comm
455
456 # Order here matters, we first need to get target, and then
457 # the source
458 target_commit = commits_source_repo.get_commit(
459 commit_id=safe_str(target_ref_id))
460
461 source_commit = commits_source_repo.get_commit(
462 commit_id=safe_str(source_ref_id))
463
464 except CommitDoesNotExistError:
465 log.warning(
466 'Failed to get commit from `{}` repo'.format(
467 commits_source_repo), exc_info=True)
468 except RepositoryRequirementError:
469 log.warning(
470 'Failed to get all required data from repo', exc_info=True)
471 c.missing_requirements = True
472
473 443 c.ancestor = None # set it to None, to hide it from PR view
474 444
475 try:
476 ancestor_id = source_scm.get_common_ancestor(
477 source_commit.raw_id, target_commit.raw_id, target_scm)
478 c.ancestor_commit = source_scm.get_commit(ancestor_id)
479 except Exception:
480 c.ancestor_commit = None
445 # empty version means latest, so we keep this to prevent
446 # double caching
447 version_normalized = version or 'latest'
448 from_version_normalized = from_version or 'latest'
449
450 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
451 target_repo)
452 cache_file_path = diff_cache_exist(
453 cache_path, 'pull_request', pull_request_id, version_normalized,
454 from_version_normalized, source_ref_id, target_ref_id, c.fulldiff)
455
456 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
457 force_recache = str2bool(self.request.GET.get('force_recache'))
458
459 cached_diff = None
460 if caching_enabled:
461 cached_diff = load_cached_diff(cache_file_path)
481 462
463 has_proper_commit_cache = (
464 cached_diff and cached_diff.get('commits')
465 and len(cached_diff.get('commits', [])) == 5
466 and cached_diff.get('commits')[0]
467 and cached_diff.get('commits')[3])
468 if not force_recache and has_proper_commit_cache:
469 diff_commit_cache = \
470 (ancestor_commit, commit_cache, missing_requirements,
471 source_commit, target_commit) = cached_diff['commits']
472 else:
473 diff_commit_cache = \
474 (ancestor_commit, commit_cache, missing_requirements,
475 source_commit, target_commit) = self.get_commits(
476 commits_source_repo,
477 pull_request_at_ver,
478 source_commit,
479 source_ref_id,
480 source_scm,
481 target_commit,
482 target_ref_id,
483 target_scm)
484
485 # register our commit range
486 for comm in commit_cache.values():
487 c.commit_ranges.append(comm)
488
489 c.missing_requirements = missing_requirements
490 c.ancestor_commit = ancestor_commit
482 491 c.statuses = source_repo.statuses(
483 492 [x.raw_id for x in c.commit_ranges])
484 493
485 494 # auto collapse if we have more than limit
486 495 collapse_limit = diffs.DiffProcessor._collapse_commits_over
487 496 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
488 497 c.compare_mode = compare
489 498
490 499 # diff_limit is the old behavior, will cut off the whole diff
491 500 # if the limit is applied otherwise will just hide the
492 501 # big files from the front-end
493 502 diff_limit = c.visual.cut_off_limit_diff
494 503 file_limit = c.visual.cut_off_limit_file
495 504
496 505 c.missing_commits = False
497 506 if (c.missing_requirements
498 507 or isinstance(source_commit, EmptyCommit)
499 508 or source_commit == target_commit):
500 509
501 510 c.missing_commits = True
502 511 else:
512 c.inline_comments = display_inline_comments
503 513
504 c.diffset = self._get_diffset(
505 c.source_repo.repo_name, commits_source_repo,
506 source_ref_id, target_ref_id,
507 target_commit, source_commit,
508 diff_limit, c.fulldiff, file_limit, display_inline_comments)
514 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
515 if not force_recache and has_proper_diff_cache:
516 c.diffset = cached_diff['diff']
517 (ancestor_commit, commit_cache, missing_requirements,
518 source_commit, target_commit) = cached_diff['commits']
519 else:
520 c.diffset = self._get_diffset(
521 c.source_repo.repo_name, commits_source_repo,
522 source_ref_id, target_ref_id,
523 target_commit, source_commit,
524 diff_limit, file_limit, c.fulldiff)
525
526 # save cached diff
527 if caching_enabled:
528 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
509 529
510 530 c.limited_diff = c.diffset.limited_diff
511 531
512 532 # calculate removed files that are bound to comments
513 533 comment_deleted_files = [
514 534 fname for fname in display_inline_comments
515 535 if fname not in c.diffset.file_stats]
516 536
517 537 c.deleted_files_comments = collections.defaultdict(dict)
518 538 for fname, per_line_comments in display_inline_comments.items():
519 539 if fname in comment_deleted_files:
520 540 c.deleted_files_comments[fname]['stats'] = 0
521 541 c.deleted_files_comments[fname]['comments'] = list()
522 542 for lno, comments in per_line_comments.items():
523 543 c.deleted_files_comments[fname]['comments'].extend(
524 544 comments)
525 545
526 546 # this is a hack to properly display links, when creating PR, the
527 547 # compare view and others uses different notation, and
528 548 # compare_commits.mako renders links based on the target_repo.
529 549 # We need to swap that here to generate it properly on the html side
530 550 c.target_repo = c.source_repo
531 551
532 552 c.commit_statuses = ChangesetStatus.STATUSES
533 553
534 554 c.show_version_changes = not pr_closed
535 555 if c.show_version_changes:
536 556 cur_obj = pull_request_at_ver
537 557 prev_obj = prev_pull_request_at_ver
538 558
539 559 old_commit_ids = prev_obj.revisions
540 560 new_commit_ids = cur_obj.revisions
541 561 commit_changes = PullRequestModel()._calculate_commit_id_changes(
542 562 old_commit_ids, new_commit_ids)
543 563 c.commit_changes_summary = commit_changes
544 564
545 565 # calculate the diff for commits between versions
546 566 c.commit_changes = []
547 567 mark = lambda cs, fw: list(
548 568 h.itertools.izip_longest([], cs, fillvalue=fw))
549 569 for c_type, raw_id in mark(commit_changes.added, 'a') \
550 570 + mark(commit_changes.removed, 'r') \
551 571 + mark(commit_changes.common, 'c'):
552 572
553 573 if raw_id in commit_cache:
554 574 commit = commit_cache[raw_id]
555 575 else:
556 576 try:
557 577 commit = commits_source_repo.get_commit(raw_id)
558 578 except CommitDoesNotExistError:
559 579 # in case we fail extracting still use "dummy" commit
560 580 # for display in commit diff
561 581 commit = h.AttributeDict(
562 582 {'raw_id': raw_id,
563 583 'message': 'EMPTY or MISSING COMMIT'})
564 584 c.commit_changes.append([c_type, commit])
565 585
566 586 # current user review statuses for each version
567 587 c.review_versions = {}
568 588 if self._rhodecode_user.user_id in allowed_reviewers:
569 589 for co in general_comments:
570 590 if co.author.user_id == self._rhodecode_user.user_id:
571 # each comment has a status change
572 591 status = co.status_change
573 592 if status:
574 593 _ver_pr = status[0].comment.pull_request_version_id
575 594 c.review_versions[_ver_pr] = status[0]
576 595
577 596 return self._get_template_context(c)
578 597
598 def get_commits(
599 self, commits_source_repo, pull_request_at_ver, source_commit,
600 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
601 commit_cache = collections.OrderedDict()
602 missing_requirements = False
603 try:
604 pre_load = ["author", "branch", "date", "message"]
605 show_revs = pull_request_at_ver.revisions
606 for rev in show_revs:
607 comm = commits_source_repo.get_commit(
608 commit_id=rev, pre_load=pre_load)
609 commit_cache[comm.raw_id] = comm
610
611 # Order here matters, we first need to get target, and then
612 # the source
613 target_commit = commits_source_repo.get_commit(
614 commit_id=safe_str(target_ref_id))
615
616 source_commit = commits_source_repo.get_commit(
617 commit_id=safe_str(source_ref_id))
618 except CommitDoesNotExistError:
619 log.warning(
620 'Failed to get commit from `{}` repo'.format(
621 commits_source_repo), exc_info=True)
622 except RepositoryRequirementError:
623 log.warning(
624 'Failed to get all required data from repo', exc_info=True)
625 missing_requirements = True
626 ancestor_commit = None
627 try:
628 ancestor_id = source_scm.get_common_ancestor(
629 source_commit.raw_id, target_commit.raw_id, target_scm)
630 ancestor_commit = source_scm.get_commit(ancestor_id)
631 except Exception:
632 ancestor_commit = None
633 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
634
579 635 def assure_not_empty_repo(self):
580 636 _ = self.request.translate
581 637
582 638 try:
583 639 self.db_repo.scm_instance().get_commit()
584 640 except EmptyRepositoryError:
585 641 h.flash(h.literal(_('There are no commits yet')),
586 642 category='warning')
587 643 raise HTTPFound(
588 644 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
589 645
590 646 @LoginRequired()
591 647 @NotAnonymous()
592 648 @HasRepoPermissionAnyDecorator(
593 649 'repository.read', 'repository.write', 'repository.admin')
594 650 @view_config(
595 651 route_name='pullrequest_new', request_method='GET',
596 652 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
597 653 def pull_request_new(self):
598 654 _ = self.request.translate
599 655 c = self.load_default_context()
600 656
601 657 self.assure_not_empty_repo()
602 658 source_repo = self.db_repo
603 659
604 660 commit_id = self.request.GET.get('commit')
605 661 branch_ref = self.request.GET.get('branch')
606 662 bookmark_ref = self.request.GET.get('bookmark')
607 663
608 664 try:
609 665 source_repo_data = PullRequestModel().generate_repo_data(
610 666 source_repo, commit_id=commit_id,
611 667 branch=branch_ref, bookmark=bookmark_ref,
612 668 translator=self.request.translate)
613 669 except CommitDoesNotExistError as e:
614 670 log.exception(e)
615 671 h.flash(_('Commit does not exist'), 'error')
616 672 raise HTTPFound(
617 673 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
618 674
619 675 default_target_repo = source_repo
620 676
621 677 if source_repo.parent:
622 678 parent_vcs_obj = source_repo.parent.scm_instance()
623 679 if parent_vcs_obj and not parent_vcs_obj.is_empty():
624 680 # change default if we have a parent repo
625 681 default_target_repo = source_repo.parent
626 682
627 683 target_repo_data = PullRequestModel().generate_repo_data(
628 684 default_target_repo, translator=self.request.translate)
629 685
630 686 selected_source_ref = source_repo_data['refs']['selected_ref']
631 687 title_source_ref = ''
632 688 if selected_source_ref:
633 689 title_source_ref = selected_source_ref.split(':', 2)[1]
634 690 c.default_title = PullRequestModel().generate_pullrequest_title(
635 691 source=source_repo.repo_name,
636 692 source_ref=title_source_ref,
637 693 target=default_target_repo.repo_name
638 694 )
639 695
640 696 c.default_repo_data = {
641 697 'source_repo_name': source_repo.repo_name,
642 698 'source_refs_json': json.dumps(source_repo_data),
643 699 'target_repo_name': default_target_repo.repo_name,
644 700 'target_refs_json': json.dumps(target_repo_data),
645 701 }
646 702 c.default_source_ref = selected_source_ref
647 703
648 704 return self._get_template_context(c)
649 705
650 706 @LoginRequired()
651 707 @NotAnonymous()
652 708 @HasRepoPermissionAnyDecorator(
653 709 'repository.read', 'repository.write', 'repository.admin')
654 710 @view_config(
655 711 route_name='pullrequest_repo_refs', request_method='GET',
656 712 renderer='json_ext', xhr=True)
657 713 def pull_request_repo_refs(self):
658 714 self.load_default_context()
659 715 target_repo_name = self.request.matchdict['target_repo_name']
660 716 repo = Repository.get_by_repo_name(target_repo_name)
661 717 if not repo:
662 718 raise HTTPNotFound()
663 719
664 720 target_perm = HasRepoPermissionAny(
665 721 'repository.read', 'repository.write', 'repository.admin')(
666 722 target_repo_name)
667 723 if not target_perm:
668 724 raise HTTPNotFound()
669 725
670 726 return PullRequestModel().generate_repo_data(
671 727 repo, translator=self.request.translate)
672 728
673 729 @LoginRequired()
674 730 @NotAnonymous()
675 731 @HasRepoPermissionAnyDecorator(
676 732 'repository.read', 'repository.write', 'repository.admin')
677 733 @view_config(
678 734 route_name='pullrequest_repo_destinations', request_method='GET',
679 735 renderer='json_ext', xhr=True)
680 736 def pull_request_repo_destinations(self):
681 737 _ = self.request.translate
682 738 filter_query = self.request.GET.get('query')
683 739
684 740 query = Repository.query() \
685 741 .order_by(func.length(Repository.repo_name)) \
686 742 .filter(
687 743 or_(Repository.repo_name == self.db_repo.repo_name,
688 744 Repository.fork_id == self.db_repo.repo_id))
689 745
690 746 if filter_query:
691 747 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
692 748 query = query.filter(
693 749 Repository.repo_name.ilike(ilike_expression))
694 750
695 751 add_parent = False
696 752 if self.db_repo.parent:
697 753 if filter_query in self.db_repo.parent.repo_name:
698 754 parent_vcs_obj = self.db_repo.parent.scm_instance()
699 755 if parent_vcs_obj and not parent_vcs_obj.is_empty():
700 756 add_parent = True
701 757
702 758 limit = 20 - 1 if add_parent else 20
703 759 all_repos = query.limit(limit).all()
704 760 if add_parent:
705 761 all_repos += [self.db_repo.parent]
706 762
707 763 repos = []
708 764 for obj in ScmModel().get_repos(all_repos):
709 765 repos.append({
710 766 'id': obj['name'],
711 767 'text': obj['name'],
712 768 'type': 'repo',
713 769 'obj': obj['dbrepo']
714 770 })
715 771
716 772 data = {
717 773 'more': False,
718 774 'results': [{
719 775 'text': _('Repositories'),
720 776 'children': repos
721 777 }] if repos else []
722 778 }
723 779 return data
724 780
725 781 @LoginRequired()
726 782 @NotAnonymous()
727 783 @HasRepoPermissionAnyDecorator(
728 784 'repository.read', 'repository.write', 'repository.admin')
729 785 @CSRFRequired()
730 786 @view_config(
731 787 route_name='pullrequest_create', request_method='POST',
732 788 renderer=None)
733 789 def pull_request_create(self):
734 790 _ = self.request.translate
735 791 self.assure_not_empty_repo()
736 792 self.load_default_context()
737 793
738 794 controls = peppercorn.parse(self.request.POST.items())
739 795
740 796 try:
741 797 form = PullRequestForm(
742 798 self.request.translate, self.db_repo.repo_id)()
743 799 _form = form.to_python(controls)
744 800 except formencode.Invalid as errors:
745 801 if errors.error_dict.get('revisions'):
746 802 msg = 'Revisions: %s' % errors.error_dict['revisions']
747 803 elif errors.error_dict.get('pullrequest_title'):
748 804 msg = errors.error_dict.get('pullrequest_title')
749 805 else:
750 806 msg = _('Error creating pull request: {}').format(errors)
751 807 log.exception(msg)
752 808 h.flash(msg, 'error')
753 809
754 810 # would rather just go back to form ...
755 811 raise HTTPFound(
756 812 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
757 813
758 814 source_repo = _form['source_repo']
759 815 source_ref = _form['source_ref']
760 816 target_repo = _form['target_repo']
761 817 target_ref = _form['target_ref']
762 818 commit_ids = _form['revisions'][::-1]
763 819
764 820 # find the ancestor for this pr
765 821 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
766 822 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
767 823
768 824 # re-check permissions again here
769 825 # source_repo we must have read permissions
770 826
771 827 source_perm = HasRepoPermissionAny(
772 828 'repository.read',
773 829 'repository.write', 'repository.admin')(source_db_repo.repo_name)
774 830 if not source_perm:
775 831 msg = _('Not Enough permissions to source repo `{}`.'.format(
776 832 source_db_repo.repo_name))
777 833 h.flash(msg, category='error')
778 834 # copy the args back to redirect
779 835 org_query = self.request.GET.mixed()
780 836 raise HTTPFound(
781 837 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
782 838 _query=org_query))
783 839
784 840 # target repo we must have read permissions, and also later on
785 841 # we want to check branch permissions here
786 842 target_perm = HasRepoPermissionAny(
787 843 'repository.read',
788 844 'repository.write', 'repository.admin')(target_db_repo.repo_name)
789 845 if not target_perm:
790 846 msg = _('Not Enough permissions to target repo `{}`.'.format(
791 847 target_db_repo.repo_name))
792 848 h.flash(msg, category='error')
793 849 # copy the args back to redirect
794 850 org_query = self.request.GET.mixed()
795 851 raise HTTPFound(
796 852 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
797 853 _query=org_query))
798 854
799 855 source_scm = source_db_repo.scm_instance()
800 856 target_scm = target_db_repo.scm_instance()
801 857
802 858 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
803 859 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
804 860
805 861 ancestor = source_scm.get_common_ancestor(
806 862 source_commit.raw_id, target_commit.raw_id, target_scm)
807 863
808 864 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
809 865 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
810 866
811 867 pullrequest_title = _form['pullrequest_title']
812 868 title_source_ref = source_ref.split(':', 2)[1]
813 869 if not pullrequest_title:
814 870 pullrequest_title = PullRequestModel().generate_pullrequest_title(
815 871 source=source_repo,
816 872 source_ref=title_source_ref,
817 873 target=target_repo
818 874 )
819 875
820 876 description = _form['pullrequest_desc']
821 877
822 878 get_default_reviewers_data, validate_default_reviewers = \
823 879 PullRequestModel().get_reviewer_functions()
824 880
825 881 # recalculate reviewers logic, to make sure we can validate this
826 882 reviewer_rules = get_default_reviewers_data(
827 883 self._rhodecode_db_user, source_db_repo,
828 884 source_commit, target_db_repo, target_commit)
829 885
830 886 given_reviewers = _form['review_members']
831 887 reviewers = validate_default_reviewers(given_reviewers, reviewer_rules)
832 888
833 889 try:
834 890 pull_request = PullRequestModel().create(
835 891 self._rhodecode_user.user_id, source_repo, source_ref,
836 892 target_repo, target_ref, commit_ids, reviewers,
837 893 pullrequest_title, description, reviewer_rules
838 894 )
839 895 Session().commit()
840 896
841 897 h.flash(_('Successfully opened new pull request'),
842 898 category='success')
843 899 except Exception:
844 900 msg = _('Error occurred during creation of this pull request.')
845 901 log.exception(msg)
846 902 h.flash(msg, category='error')
847 903
848 904 # copy the args back to redirect
849 905 org_query = self.request.GET.mixed()
850 906 raise HTTPFound(
851 907 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
852 908 _query=org_query))
853 909
854 910 raise HTTPFound(
855 911 h.route_path('pullrequest_show', repo_name=target_repo,
856 912 pull_request_id=pull_request.pull_request_id))
857 913
858 914 @LoginRequired()
859 915 @NotAnonymous()
860 916 @HasRepoPermissionAnyDecorator(
861 917 'repository.read', 'repository.write', 'repository.admin')
862 918 @CSRFRequired()
863 919 @view_config(
864 920 route_name='pullrequest_update', request_method='POST',
865 921 renderer='json_ext')
866 922 def pull_request_update(self):
867 923 pull_request = PullRequest.get_or_404(
868 924 self.request.matchdict['pull_request_id'])
869 925 _ = self.request.translate
870 926
871 927 self.load_default_context()
872 928
873 929 if pull_request.is_closed():
874 930 log.debug('update: forbidden because pull request is closed')
875 931 msg = _(u'Cannot update closed pull requests.')
876 932 h.flash(msg, category='error')
877 933 return True
878 934
879 935 # only owner or admin can update it
880 936 allowed_to_update = PullRequestModel().check_user_update(
881 937 pull_request, self._rhodecode_user)
882 938 if allowed_to_update:
883 939 controls = peppercorn.parse(self.request.POST.items())
884 940
885 941 if 'review_members' in controls:
886 942 self._update_reviewers(
887 943 pull_request, controls['review_members'],
888 944 pull_request.reviewer_data)
889 945 elif str2bool(self.request.POST.get('update_commits', 'false')):
890 946 self._update_commits(pull_request)
891 947 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
892 948 self._edit_pull_request(pull_request)
893 949 else:
894 950 raise HTTPBadRequest()
895 951 return True
896 952 raise HTTPForbidden()
897 953
898 954 def _edit_pull_request(self, pull_request):
899 955 _ = self.request.translate
900 956 try:
901 957 PullRequestModel().edit(
902 958 pull_request, self.request.POST.get('title'),
903 959 self.request.POST.get('description'), self._rhodecode_user)
904 960 except ValueError:
905 961 msg = _(u'Cannot update closed pull requests.')
906 962 h.flash(msg, category='error')
907 963 return
908 964 else:
909 965 Session().commit()
910 966
911 967 msg = _(u'Pull request title & description updated.')
912 968 h.flash(msg, category='success')
913 969 return
914 970
915 971 def _update_commits(self, pull_request):
916 972 _ = self.request.translate
917 973 resp = PullRequestModel().update_commits(pull_request)
918 974
919 975 if resp.executed:
920 976
921 977 if resp.target_changed and resp.source_changed:
922 978 changed = 'target and source repositories'
923 979 elif resp.target_changed and not resp.source_changed:
924 980 changed = 'target repository'
925 981 elif not resp.target_changed and resp.source_changed:
926 982 changed = 'source repository'
927 983 else:
928 984 changed = 'nothing'
929 985
930 986 msg = _(
931 987 u'Pull request updated to "{source_commit_id}" with '
932 988 u'{count_added} added, {count_removed} removed commits. '
933 989 u'Source of changes: {change_source}')
934 990 msg = msg.format(
935 991 source_commit_id=pull_request.source_ref_parts.commit_id,
936 992 count_added=len(resp.changes.added),
937 993 count_removed=len(resp.changes.removed),
938 994 change_source=changed)
939 995 h.flash(msg, category='success')
940 996
941 997 channel = '/repo${}$/pr/{}'.format(
942 998 pull_request.target_repo.repo_name,
943 999 pull_request.pull_request_id)
944 1000 message = msg + (
945 1001 ' - <a onclick="window.location.reload()">'
946 1002 '<strong>{}</strong></a>'.format(_('Reload page')))
947 1003 channelstream.post_message(
948 1004 channel, message, self._rhodecode_user.username,
949 1005 registry=self.request.registry)
950 1006 else:
951 1007 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
952 1008 warning_reasons = [
953 1009 UpdateFailureReason.NO_CHANGE,
954 1010 UpdateFailureReason.WRONG_REF_TYPE,
955 1011 ]
956 1012 category = 'warning' if resp.reason in warning_reasons else 'error'
957 1013 h.flash(msg, category=category)
958 1014
959 1015 @LoginRequired()
960 1016 @NotAnonymous()
961 1017 @HasRepoPermissionAnyDecorator(
962 1018 'repository.read', 'repository.write', 'repository.admin')
963 1019 @CSRFRequired()
964 1020 @view_config(
965 1021 route_name='pullrequest_merge', request_method='POST',
966 1022 renderer='json_ext')
967 1023 def pull_request_merge(self):
968 1024 """
969 1025 Merge will perform a server-side merge of the specified
970 1026 pull request, if the pull request is approved and mergeable.
971 1027 After successful merging, the pull request is automatically
972 1028 closed, with a relevant comment.
973 1029 """
974 1030 pull_request = PullRequest.get_or_404(
975 1031 self.request.matchdict['pull_request_id'])
976 1032
977 1033 self.load_default_context()
978 1034 check = MergeCheck.validate(pull_request, self._rhodecode_db_user,
979 1035 translator=self.request.translate)
980 1036 merge_possible = not check.failed
981 1037
982 1038 for err_type, error_msg in check.errors:
983 1039 h.flash(error_msg, category=err_type)
984 1040
985 1041 if merge_possible:
986 1042 log.debug("Pre-conditions checked, trying to merge.")
987 1043 extras = vcs_operation_context(
988 1044 self.request.environ, repo_name=pull_request.target_repo.repo_name,
989 1045 username=self._rhodecode_db_user.username, action='push',
990 1046 scm=pull_request.target_repo.repo_type)
991 1047 self._merge_pull_request(
992 1048 pull_request, self._rhodecode_db_user, extras)
993 1049 else:
994 1050 log.debug("Pre-conditions failed, NOT merging.")
995 1051
996 1052 raise HTTPFound(
997 1053 h.route_path('pullrequest_show',
998 1054 repo_name=pull_request.target_repo.repo_name,
999 1055 pull_request_id=pull_request.pull_request_id))
1000 1056
1001 1057 def _merge_pull_request(self, pull_request, user, extras):
1002 1058 _ = self.request.translate
1003 1059 merge_resp = PullRequestModel().merge(pull_request, user, extras=extras)
1004 1060
1005 1061 if merge_resp.executed:
1006 1062 log.debug("The merge was successful, closing the pull request.")
1007 1063 PullRequestModel().close_pull_request(
1008 1064 pull_request.pull_request_id, user)
1009 1065 Session().commit()
1010 1066 msg = _('Pull request was successfully merged and closed.')
1011 1067 h.flash(msg, category='success')
1012 1068 else:
1013 1069 log.debug(
1014 1070 "The merge was not successful. Merge response: %s",
1015 1071 merge_resp)
1016 1072 msg = PullRequestModel().merge_status_message(
1017 1073 merge_resp.failure_reason)
1018 1074 h.flash(msg, category='error')
1019 1075
1020 1076 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1021 1077 _ = self.request.translate
1022 1078 get_default_reviewers_data, validate_default_reviewers = \
1023 1079 PullRequestModel().get_reviewer_functions()
1024 1080
1025 1081 try:
1026 1082 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1027 1083 except ValueError as e:
1028 1084 log.error('Reviewers Validation: {}'.format(e))
1029 1085 h.flash(e, category='error')
1030 1086 return
1031 1087
1032 1088 PullRequestModel().update_reviewers(
1033 1089 pull_request, reviewers, self._rhodecode_user)
1034 1090 h.flash(_('Pull request reviewers updated.'), category='success')
1035 1091 Session().commit()
1036 1092
1037 1093 @LoginRequired()
1038 1094 @NotAnonymous()
1039 1095 @HasRepoPermissionAnyDecorator(
1040 1096 'repository.read', 'repository.write', 'repository.admin')
1041 1097 @CSRFRequired()
1042 1098 @view_config(
1043 1099 route_name='pullrequest_delete', request_method='POST',
1044 1100 renderer='json_ext')
1045 1101 def pull_request_delete(self):
1046 1102 _ = self.request.translate
1047 1103
1048 1104 pull_request = PullRequest.get_or_404(
1049 1105 self.request.matchdict['pull_request_id'])
1050 1106 self.load_default_context()
1051 1107
1052 1108 pr_closed = pull_request.is_closed()
1053 1109 allowed_to_delete = PullRequestModel().check_user_delete(
1054 1110 pull_request, self._rhodecode_user) and not pr_closed
1055 1111
1056 1112 # only owner can delete it !
1057 1113 if allowed_to_delete:
1058 1114 PullRequestModel().delete(pull_request, self._rhodecode_user)
1059 1115 Session().commit()
1060 1116 h.flash(_('Successfully deleted pull request'),
1061 1117 category='success')
1062 1118 raise HTTPFound(h.route_path('pullrequest_show_all',
1063 1119 repo_name=self.db_repo_name))
1064 1120
1065 1121 log.warning('user %s tried to delete pull request without access',
1066 1122 self._rhodecode_user)
1067 1123 raise HTTPNotFound()
1068 1124
1069 1125 @LoginRequired()
1070 1126 @NotAnonymous()
1071 1127 @HasRepoPermissionAnyDecorator(
1072 1128 'repository.read', 'repository.write', 'repository.admin')
1073 1129 @CSRFRequired()
1074 1130 @view_config(
1075 1131 route_name='pullrequest_comment_create', request_method='POST',
1076 1132 renderer='json_ext')
1077 1133 def pull_request_comment_create(self):
1078 1134 _ = self.request.translate
1079 1135
1080 1136 pull_request = PullRequest.get_or_404(
1081 1137 self.request.matchdict['pull_request_id'])
1082 1138 pull_request_id = pull_request.pull_request_id
1083 1139
1084 1140 if pull_request.is_closed():
1085 1141 log.debug('comment: forbidden because pull request is closed')
1086 1142 raise HTTPForbidden()
1087 1143
1088 1144 allowed_to_comment = PullRequestModel().check_user_comment(
1089 1145 pull_request, self._rhodecode_user)
1090 1146 if not allowed_to_comment:
1091 1147 log.debug(
1092 1148 'comment: forbidden because pull request is from forbidden repo')
1093 1149 raise HTTPForbidden()
1094 1150
1095 1151 c = self.load_default_context()
1096 1152
1097 1153 status = self.request.POST.get('changeset_status', None)
1098 1154 text = self.request.POST.get('text')
1099 1155 comment_type = self.request.POST.get('comment_type')
1100 1156 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1101 1157 close_pull_request = self.request.POST.get('close_pull_request')
1102 1158
1103 1159 # the logic here should work like following, if we submit close
1104 1160 # pr comment, use `close_pull_request_with_comment` function
1105 1161 # else handle regular comment logic
1106 1162
1107 1163 if close_pull_request:
1108 1164 # only owner or admin or person with write permissions
1109 1165 allowed_to_close = PullRequestModel().check_user_update(
1110 1166 pull_request, self._rhodecode_user)
1111 1167 if not allowed_to_close:
1112 1168 log.debug('comment: forbidden because not allowed to close '
1113 1169 'pull request %s', pull_request_id)
1114 1170 raise HTTPForbidden()
1115 1171 comment, status = PullRequestModel().close_pull_request_with_comment(
1116 1172 pull_request, self._rhodecode_user, self.db_repo, message=text)
1117 1173 Session().flush()
1118 1174 events.trigger(
1119 1175 events.PullRequestCommentEvent(pull_request, comment))
1120 1176
1121 1177 else:
1122 1178 # regular comment case, could be inline, or one with status.
1123 1179 # for that one we check also permissions
1124 1180
1125 1181 allowed_to_change_status = PullRequestModel().check_user_change_status(
1126 1182 pull_request, self._rhodecode_user)
1127 1183
1128 1184 if status and allowed_to_change_status:
1129 1185 message = (_('Status change %(transition_icon)s %(status)s')
1130 1186 % {'transition_icon': '>',
1131 1187 'status': ChangesetStatus.get_status_lbl(status)})
1132 1188 text = text or message
1133 1189
1134 1190 comment = CommentsModel().create(
1135 1191 text=text,
1136 1192 repo=self.db_repo.repo_id,
1137 1193 user=self._rhodecode_user.user_id,
1138 1194 pull_request=pull_request,
1139 1195 f_path=self.request.POST.get('f_path'),
1140 1196 line_no=self.request.POST.get('line'),
1141 1197 status_change=(ChangesetStatus.get_status_lbl(status)
1142 1198 if status and allowed_to_change_status else None),
1143 1199 status_change_type=(status
1144 1200 if status and allowed_to_change_status else None),
1145 1201 comment_type=comment_type,
1146 1202 resolves_comment_id=resolves_comment_id
1147 1203 )
1148 1204
1149 1205 if allowed_to_change_status:
1150 1206 # calculate old status before we change it
1151 1207 old_calculated_status = pull_request.calculated_review_status()
1152 1208
1153 1209 # get status if set !
1154 1210 if status:
1155 1211 ChangesetStatusModel().set_status(
1156 1212 self.db_repo.repo_id,
1157 1213 status,
1158 1214 self._rhodecode_user.user_id,
1159 1215 comment,
1160 1216 pull_request=pull_request
1161 1217 )
1162 1218
1163 1219 Session().flush()
1164 1220 # this is somehow required to get access to some relationship
1165 1221 # loaded on comment
1166 1222 Session().refresh(comment)
1167 1223
1168 1224 events.trigger(
1169 1225 events.PullRequestCommentEvent(pull_request, comment))
1170 1226
1171 1227 # we now calculate the status of pull request, and based on that
1172 1228 # calculation we set the commits status
1173 1229 calculated_status = pull_request.calculated_review_status()
1174 1230 if old_calculated_status != calculated_status:
1175 1231 PullRequestModel()._trigger_pull_request_hook(
1176 1232 pull_request, self._rhodecode_user, 'review_status_change')
1177 1233
1178 1234 Session().commit()
1179 1235
1180 1236 data = {
1181 1237 'target_id': h.safeid(h.safe_unicode(
1182 1238 self.request.POST.get('f_path'))),
1183 1239 }
1184 1240 if comment:
1185 1241 c.co = comment
1186 1242 rendered_comment = render(
1187 1243 'rhodecode:templates/changeset/changeset_comment_block.mako',
1188 1244 self._get_template_context(c), self.request)
1189 1245
1190 1246 data.update(comment.get_dict())
1191 1247 data.update({'rendered_text': rendered_comment})
1192 1248
1193 1249 return data
1194 1250
1195 1251 @LoginRequired()
1196 1252 @NotAnonymous()
1197 1253 @HasRepoPermissionAnyDecorator(
1198 1254 'repository.read', 'repository.write', 'repository.admin')
1199 1255 @CSRFRequired()
1200 1256 @view_config(
1201 1257 route_name='pullrequest_comment_delete', request_method='POST',
1202 1258 renderer='json_ext')
1203 1259 def pull_request_comment_delete(self):
1204 1260 pull_request = PullRequest.get_or_404(
1205 1261 self.request.matchdict['pull_request_id'])
1206 1262
1207 1263 comment = ChangesetComment.get_or_404(
1208 1264 self.request.matchdict['comment_id'])
1209 1265 comment_id = comment.comment_id
1210 1266
1211 1267 if pull_request.is_closed():
1212 1268 log.debug('comment: forbidden because pull request is closed')
1213 1269 raise HTTPForbidden()
1214 1270
1215 1271 if not comment:
1216 1272 log.debug('Comment with id:%s not found, skipping', comment_id)
1217 1273 # comment already deleted in another call probably
1218 1274 return True
1219 1275
1220 1276 if comment.pull_request.is_closed():
1221 1277 # don't allow deleting comments on closed pull request
1222 1278 raise HTTPForbidden()
1223 1279
1224 1280 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1225 1281 super_admin = h.HasPermissionAny('hg.admin')()
1226 1282 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1227 1283 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1228 1284 comment_repo_admin = is_repo_admin and is_repo_comment
1229 1285
1230 1286 if super_admin or comment_owner or comment_repo_admin:
1231 1287 old_calculated_status = comment.pull_request.calculated_review_status()
1232 1288 CommentsModel().delete(comment=comment, user=self._rhodecode_user)
1233 1289 Session().commit()
1234 1290 calculated_status = comment.pull_request.calculated_review_status()
1235 1291 if old_calculated_status != calculated_status:
1236 1292 PullRequestModel()._trigger_pull_request_hook(
1237 1293 comment.pull_request, self._rhodecode_user, 'review_status_change')
1238 1294 return True
1239 1295 else:
1240 1296 log.warning('No permissions for user %s to delete comment_id: %s',
1241 1297 self._rhodecode_db_user, comment_id)
1242 1298 raise HTTPNotFound()
@@ -1,748 +1,745 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import difflib
23 23 from itertools import groupby
24 24
25 25 from pygments import lex
26 26 from pygments.formatters.html import _get_ttype_class as pygment_token_class
27 27 from pygments.lexers.special import TextLexer, Token
28 28
29 29 from rhodecode.lib.helpers import (
30 30 get_lexer_for_filenode, html_escape, get_custom_lexer)
31 31 from rhodecode.lib.utils2 import AttributeDict, StrictAttributeDict
32 32 from rhodecode.lib.vcs.nodes import FileNode
33 from rhodecode.lib.vcs.exceptions import VCSError, NodeDoesNotExistError
33 34 from rhodecode.lib.diff_match_patch import diff_match_patch
34 35 from rhodecode.lib.diffs import LimitedDiffContainer
35 36 from pygments.lexers import get_lexer_by_name
36 37
37 38 plain_text_lexer = get_lexer_by_name(
38 39 'text', stripall=False, stripnl=False, ensurenl=False)
39 40
40 41
41 42 log = logging.getLogger(__name__)
42 43
43 44
44 45 def filenode_as_lines_tokens(filenode, lexer=None):
45 46 org_lexer = lexer
46 47 lexer = lexer or get_lexer_for_filenode(filenode)
47 48 log.debug('Generating file node pygment tokens for %s, %s, org_lexer:%s',
48 49 lexer, filenode, org_lexer)
49 50 tokens = tokenize_string(filenode.content, lexer)
50 51 lines = split_token_stream(tokens)
51 52 rv = list(lines)
52 53 return rv
53 54
54 55
55 56 def tokenize_string(content, lexer):
56 57 """
57 58 Use pygments to tokenize some content based on a lexer
58 59 ensuring all original new lines and whitespace is preserved
59 60 """
60 61
61 62 lexer.stripall = False
62 63 lexer.stripnl = False
63 64 lexer.ensurenl = False
64 65
65 66 if isinstance(lexer, TextLexer):
66 67 lexed = [(Token.Text, content)]
67 68 else:
68 69 lexed = lex(content, lexer)
69 70
70 71 for token_type, token_text in lexed:
71 72 yield pygment_token_class(token_type), token_text
72 73
73 74
74 75 def split_token_stream(tokens):
75 76 """
76 77 Take a list of (TokenType, text) tuples and split them by a string
77 78
78 79 split_token_stream([(TEXT, 'some\ntext'), (TEXT, 'more\n')])
79 80 [(TEXT, 'some'), (TEXT, 'text'),
80 81 (TEXT, 'more'), (TEXT, 'text')]
81 82 """
82 83
83 84 buffer = []
84 85 for token_class, token_text in tokens:
85 86 parts = token_text.split('\n')
86 87 for part in parts[:-1]:
87 88 buffer.append((token_class, part))
88 89 yield buffer
89 90 buffer = []
90 91
91 92 buffer.append((token_class, parts[-1]))
92 93
93 94 if buffer:
94 95 yield buffer
95 96
96 97
97 98 def filenode_as_annotated_lines_tokens(filenode):
98 99 """
99 100 Take a file node and return a list of annotations => lines, if no annotation
100 101 is found, it will be None.
101 102
102 103 eg:
103 104
104 105 [
105 106 (annotation1, [
106 107 (1, line1_tokens_list),
107 108 (2, line2_tokens_list),
108 109 ]),
109 110 (annotation2, [
110 111 (3, line1_tokens_list),
111 112 ]),
112 113 (None, [
113 114 (4, line1_tokens_list),
114 115 ]),
115 116 (annotation1, [
116 117 (5, line1_tokens_list),
117 118 (6, line2_tokens_list),
118 119 ])
119 120 ]
120 121 """
121 122
122 123 commit_cache = {} # cache commit_getter lookups
123 124
124 125 def _get_annotation(commit_id, commit_getter):
125 126 if commit_id not in commit_cache:
126 127 commit_cache[commit_id] = commit_getter()
127 128 return commit_cache[commit_id]
128 129
129 130 annotation_lookup = {
130 131 line_no: _get_annotation(commit_id, commit_getter)
131 132 for line_no, commit_id, commit_getter, line_content
132 133 in filenode.annotate
133 134 }
134 135
135 136 annotations_lines = ((annotation_lookup.get(line_no), line_no, tokens)
136 137 for line_no, tokens
137 138 in enumerate(filenode_as_lines_tokens(filenode), 1))
138 139
139 140 grouped_annotations_lines = groupby(annotations_lines, lambda x: x[0])
140 141
141 142 for annotation, group in grouped_annotations_lines:
142 143 yield (
143 144 annotation, [(line_no, tokens)
144 145 for (_, line_no, tokens) in group]
145 146 )
146 147
147 148
148 149 def render_tokenstream(tokenstream):
149 150 result = []
150 151 for token_class, token_ops_texts in rollup_tokenstream(tokenstream):
151 152
152 153 if token_class:
153 154 result.append(u'<span class="%s">' % token_class)
154 155 else:
155 156 result.append(u'<span>')
156 157
157 158 for op_tag, token_text in token_ops_texts:
158 159
159 160 if op_tag:
160 161 result.append(u'<%s>' % op_tag)
161 162
162 163 escaped_text = html_escape(token_text)
163 164
164 165 # TODO: dan: investigate showing hidden characters like space/nl/tab
165 166 # escaped_text = escaped_text.replace(' ', '<sp> </sp>')
166 167 # escaped_text = escaped_text.replace('\n', '<nl>\n</nl>')
167 168 # escaped_text = escaped_text.replace('\t', '<tab>\t</tab>')
168 169
169 170 result.append(escaped_text)
170 171
171 172 if op_tag:
172 173 result.append(u'</%s>' % op_tag)
173 174
174 175 result.append(u'</span>')
175 176
176 177 html = ''.join(result)
177 178 return html
178 179
179 180
180 181 def rollup_tokenstream(tokenstream):
181 182 """
182 183 Group a token stream of the format:
183 184
184 185 ('class', 'op', 'text')
185 186 or
186 187 ('class', 'text')
187 188
188 189 into
189 190
190 191 [('class1',
191 192 [('op1', 'text'),
192 193 ('op2', 'text')]),
193 194 ('class2',
194 195 [('op3', 'text')])]
195 196
196 197 This is used to get the minimal tags necessary when
197 198 rendering to html eg for a token stream ie.
198 199
199 200 <span class="A"><ins>he</ins>llo</span>
200 201 vs
201 202 <span class="A"><ins>he</ins></span><span class="A">llo</span>
202 203
203 204 If a 2 tuple is passed in, the output op will be an empty string.
204 205
205 206 eg:
206 207
207 208 >>> rollup_tokenstream([('classA', '', 'h'),
208 209 ('classA', 'del', 'ell'),
209 210 ('classA', '', 'o'),
210 211 ('classB', '', ' '),
211 212 ('classA', '', 'the'),
212 213 ('classA', '', 're'),
213 214 ])
214 215
215 216 [('classA', [('', 'h'), ('del', 'ell'), ('', 'o')],
216 217 ('classB', [('', ' ')],
217 218 ('classA', [('', 'there')]]
218 219
219 220 """
220 221 if tokenstream and len(tokenstream[0]) == 2:
221 222 tokenstream = ((t[0], '', t[1]) for t in tokenstream)
222 223
223 224 result = []
224 225 for token_class, op_list in groupby(tokenstream, lambda t: t[0]):
225 226 ops = []
226 227 for token_op, token_text_list in groupby(op_list, lambda o: o[1]):
227 228 text_buffer = []
228 229 for t_class, t_op, t_text in token_text_list:
229 230 text_buffer.append(t_text)
230 231 ops.append((token_op, ''.join(text_buffer)))
231 232 result.append((token_class, ops))
232 233 return result
233 234
234 235
235 236 def tokens_diff(old_tokens, new_tokens, use_diff_match_patch=True):
236 237 """
237 238 Converts a list of (token_class, token_text) tuples to a list of
238 239 (token_class, token_op, token_text) tuples where token_op is one of
239 240 ('ins', 'del', '')
240 241
241 242 :param old_tokens: list of (token_class, token_text) tuples of old line
242 243 :param new_tokens: list of (token_class, token_text) tuples of new line
243 244 :param use_diff_match_patch: boolean, will use google's diff match patch
244 245 library which has options to 'smooth' out the character by character
245 246 differences making nicer ins/del blocks
246 247 """
247 248
248 249 old_tokens_result = []
249 250 new_tokens_result = []
250 251
251 252 similarity = difflib.SequenceMatcher(None,
252 253 ''.join(token_text for token_class, token_text in old_tokens),
253 254 ''.join(token_text for token_class, token_text in new_tokens)
254 255 ).ratio()
255 256
256 257 if similarity < 0.6: # return, the blocks are too different
257 258 for token_class, token_text in old_tokens:
258 259 old_tokens_result.append((token_class, '', token_text))
259 260 for token_class, token_text in new_tokens:
260 261 new_tokens_result.append((token_class, '', token_text))
261 262 return old_tokens_result, new_tokens_result, similarity
262 263
263 264 token_sequence_matcher = difflib.SequenceMatcher(None,
264 265 [x[1] for x in old_tokens],
265 266 [x[1] for x in new_tokens])
266 267
267 268 for tag, o1, o2, n1, n2 in token_sequence_matcher.get_opcodes():
268 269 # check the differences by token block types first to give a more
269 270 # nicer "block" level replacement vs character diffs
270 271
271 272 if tag == 'equal':
272 273 for token_class, token_text in old_tokens[o1:o2]:
273 274 old_tokens_result.append((token_class, '', token_text))
274 275 for token_class, token_text in new_tokens[n1:n2]:
275 276 new_tokens_result.append((token_class, '', token_text))
276 277 elif tag == 'delete':
277 278 for token_class, token_text in old_tokens[o1:o2]:
278 279 old_tokens_result.append((token_class, 'del', token_text))
279 280 elif tag == 'insert':
280 281 for token_class, token_text in new_tokens[n1:n2]:
281 282 new_tokens_result.append((token_class, 'ins', token_text))
282 283 elif tag == 'replace':
283 284 # if same type token blocks must be replaced, do a diff on the
284 285 # characters in the token blocks to show individual changes
285 286
286 287 old_char_tokens = []
287 288 new_char_tokens = []
288 289 for token_class, token_text in old_tokens[o1:o2]:
289 290 for char in token_text:
290 291 old_char_tokens.append((token_class, char))
291 292
292 293 for token_class, token_text in new_tokens[n1:n2]:
293 294 for char in token_text:
294 295 new_char_tokens.append((token_class, char))
295 296
296 297 old_string = ''.join([token_text for
297 298 token_class, token_text in old_char_tokens])
298 299 new_string = ''.join([token_text for
299 300 token_class, token_text in new_char_tokens])
300 301
301 302 char_sequence = difflib.SequenceMatcher(
302 303 None, old_string, new_string)
303 304 copcodes = char_sequence.get_opcodes()
304 305 obuffer, nbuffer = [], []
305 306
306 307 if use_diff_match_patch:
307 308 dmp = diff_match_patch()
308 309 dmp.Diff_EditCost = 11 # TODO: dan: extract this to a setting
309 310 reps = dmp.diff_main(old_string, new_string)
310 311 dmp.diff_cleanupEfficiency(reps)
311 312
312 313 a, b = 0, 0
313 314 for op, rep in reps:
314 315 l = len(rep)
315 316 if op == 0:
316 317 for i, c in enumerate(rep):
317 318 obuffer.append((old_char_tokens[a+i][0], '', c))
318 319 nbuffer.append((new_char_tokens[b+i][0], '', c))
319 320 a += l
320 321 b += l
321 322 elif op == -1:
322 323 for i, c in enumerate(rep):
323 324 obuffer.append((old_char_tokens[a+i][0], 'del', c))
324 325 a += l
325 326 elif op == 1:
326 327 for i, c in enumerate(rep):
327 328 nbuffer.append((new_char_tokens[b+i][0], 'ins', c))
328 329 b += l
329 330 else:
330 331 for ctag, co1, co2, cn1, cn2 in copcodes:
331 332 if ctag == 'equal':
332 333 for token_class, token_text in old_char_tokens[co1:co2]:
333 334 obuffer.append((token_class, '', token_text))
334 335 for token_class, token_text in new_char_tokens[cn1:cn2]:
335 336 nbuffer.append((token_class, '', token_text))
336 337 elif ctag == 'delete':
337 338 for token_class, token_text in old_char_tokens[co1:co2]:
338 339 obuffer.append((token_class, 'del', token_text))
339 340 elif ctag == 'insert':
340 341 for token_class, token_text in new_char_tokens[cn1:cn2]:
341 342 nbuffer.append((token_class, 'ins', token_text))
342 343 elif ctag == 'replace':
343 344 for token_class, token_text in old_char_tokens[co1:co2]:
344 345 obuffer.append((token_class, 'del', token_text))
345 346 for token_class, token_text in new_char_tokens[cn1:cn2]:
346 347 nbuffer.append((token_class, 'ins', token_text))
347 348
348 349 old_tokens_result.extend(obuffer)
349 350 new_tokens_result.extend(nbuffer)
350 351
351 352 return old_tokens_result, new_tokens_result, similarity
352 353
353 354
355 def diffset_node_getter(commit):
356 def get_node(fname):
357 try:
358 return commit.get_node(fname)
359 except NodeDoesNotExistError:
360 return None
361
362 return get_node
363
364
354 365 class DiffSet(object):
355 366 """
356 367 An object for parsing the diff result from diffs.DiffProcessor and
357 368 adding highlighting, side by side/unified renderings and line diffs
358 369 """
359 370
360 371 HL_REAL = 'REAL' # highlights using original file, slow
361 372 HL_FAST = 'FAST' # highlights using just the line, fast but not correct
362 373 # in the case of multiline code
363 374 HL_NONE = 'NONE' # no highlighting, fastest
364 375
365 376 def __init__(self, highlight_mode=HL_REAL, repo_name=None,
366 377 source_repo_name=None,
367 378 source_node_getter=lambda filename: None,
368 379 target_node_getter=lambda filename: None,
369 380 source_nodes=None, target_nodes=None,
370 381 max_file_size_limit=150 * 1024, # files over this size will
371 382 # use fast highlighting
372 383 comments=None,
373 384 ):
374 385
375 386 self.highlight_mode = highlight_mode
376 387 self.highlighted_filenodes = {}
377 388 self.source_node_getter = source_node_getter
378 389 self.target_node_getter = target_node_getter
379 390 self.source_nodes = source_nodes or {}
380 391 self.target_nodes = target_nodes or {}
381 392 self.repo_name = repo_name
382 393 self.source_repo_name = source_repo_name or repo_name
383 394 self.comments = comments or {}
384 395 self.comments_store = self.comments.copy()
385 396 self.max_file_size_limit = max_file_size_limit
386 397
387 398 def render_patchset(self, patchset, source_ref=None, target_ref=None):
388 399 diffset = AttributeDict(dict(
389 400 lines_added=0,
390 401 lines_deleted=0,
391 402 changed_files=0,
392 403 files=[],
393 404 file_stats={},
394 405 limited_diff=isinstance(patchset, LimitedDiffContainer),
395 406 repo_name=self.repo_name,
396 407 source_repo_name=self.source_repo_name,
397 408 source_ref=source_ref,
398 409 target_ref=target_ref,
399 410 ))
400 411 for patch in patchset:
401 412 diffset.file_stats[patch['filename']] = patch['stats']
402 413 filediff = self.render_patch(patch)
403 414 filediff.diffset = StrictAttributeDict(dict(
404 415 source_ref=diffset.source_ref,
405 416 target_ref=diffset.target_ref,
406 417 repo_name=diffset.repo_name,
407 418 source_repo_name=diffset.source_repo_name,
408 419 ))
409 420 diffset.files.append(filediff)
410 421 diffset.changed_files += 1
411 422 if not patch['stats']['binary']:
412 423 diffset.lines_added += patch['stats']['added']
413 424 diffset.lines_deleted += patch['stats']['deleted']
414 425
415 426 return diffset
416 427
417 428 _lexer_cache = {}
418 429
419 430 def _get_lexer_for_filename(self, filename, filenode=None):
420 431 # cached because we might need to call it twice for source/target
421 432 if filename not in self._lexer_cache:
422 433 if filenode:
423 434 lexer = filenode.lexer
424 435 extension = filenode.extension
425 436 else:
426 437 lexer = FileNode.get_lexer(filename=filename)
427 438 extension = filename.split('.')[-1]
428 439
429 440 lexer = get_custom_lexer(extension) or lexer
430 441 self._lexer_cache[filename] = lexer
431 442 return self._lexer_cache[filename]
432 443
433 444 def render_patch(self, patch):
434 445 log.debug('rendering diff for %r' % patch['filename'])
435 446
436 447 source_filename = patch['original_filename']
437 448 target_filename = patch['filename']
438 449
439 450 source_lexer = plain_text_lexer
440 451 target_lexer = plain_text_lexer
441 452
442 453 if not patch['stats']['binary']:
443 454 if self.highlight_mode == self.HL_REAL:
444 455 if (source_filename and patch['operation'] in ('D', 'M')
445 456 and source_filename not in self.source_nodes):
446 457 self.source_nodes[source_filename] = (
447 458 self.source_node_getter(source_filename))
448 459
449 460 if (target_filename and patch['operation'] in ('A', 'M')
450 461 and target_filename not in self.target_nodes):
451 462 self.target_nodes[target_filename] = (
452 463 self.target_node_getter(target_filename))
453 464
454 465 elif self.highlight_mode == self.HL_FAST:
455 466 source_lexer = self._get_lexer_for_filename(source_filename)
456 467 target_lexer = self._get_lexer_for_filename(target_filename)
457 468
458 469 source_file = self.source_nodes.get(source_filename, source_filename)
459 470 target_file = self.target_nodes.get(target_filename, target_filename)
460 471
461 472 source_filenode, target_filenode = None, None
462 473
463 474 # TODO: dan: FileNode.lexer works on the content of the file - which
464 475 # can be slow - issue #4289 explains a lexer clean up - which once
465 476 # done can allow caching a lexer for a filenode to avoid the file lookup
466 477 if isinstance(source_file, FileNode):
467 478 source_filenode = source_file
468 479 #source_lexer = source_file.lexer
469 480 source_lexer = self._get_lexer_for_filename(source_filename)
470 481 source_file.lexer = source_lexer
471 482
472 483 if isinstance(target_file, FileNode):
473 484 target_filenode = target_file
474 485 #target_lexer = target_file.lexer
475 486 target_lexer = self._get_lexer_for_filename(target_filename)
476 487 target_file.lexer = target_lexer
477 488
478 489 source_file_path, target_file_path = None, None
479 490
480 491 if source_filename != '/dev/null':
481 492 source_file_path = source_filename
482 493 if target_filename != '/dev/null':
483 494 target_file_path = target_filename
484 495
485 496 source_file_type = source_lexer.name
486 497 target_file_type = target_lexer.name
487 498
488 499 filediff = AttributeDict({
489 500 'source_file_path': source_file_path,
490 501 'target_file_path': target_file_path,
491 502 'source_filenode': source_filenode,
492 503 'target_filenode': target_filenode,
493 504 'source_file_type': target_file_type,
494 505 'target_file_type': source_file_type,
495 506 'patch': {'filename': patch['filename'], 'stats': patch['stats']},
496 507 'operation': patch['operation'],
497 508 'source_mode': patch['stats']['old_mode'],
498 509 'target_mode': patch['stats']['new_mode'],
499 510 'limited_diff': isinstance(patch, LimitedDiffContainer),
500 511 'hunks': [],
501 512 'diffset': self,
502 513 })
503 514
504 515 for hunk in patch['chunks'][1:]:
505 516 hunkbit = self.parse_hunk(hunk, source_file, target_file)
506 517 hunkbit.source_file_path = source_file_path
507 518 hunkbit.target_file_path = target_file_path
508 519 filediff.hunks.append(hunkbit)
509 520
510 521 left_comments = {}
511 522 if source_file_path in self.comments_store:
512 523 for lineno, comments in self.comments_store[source_file_path].items():
513 524 left_comments[lineno] = comments
514 525
515 526 if target_file_path in self.comments_store:
516 527 for lineno, comments in self.comments_store[target_file_path].items():
517 528 left_comments[lineno] = comments
529
518 530 # left comments are one that we couldn't place in diff lines.
519 531 # could be outdated, or the diff changed and this line is no
520 532 # longer available
521 533 filediff.left_comments = left_comments
522 534
523 535 return filediff
524 536
525 537 def parse_hunk(self, hunk, source_file, target_file):
526 538 result = AttributeDict(dict(
527 539 source_start=hunk['source_start'],
528 540 source_length=hunk['source_length'],
529 541 target_start=hunk['target_start'],
530 542 target_length=hunk['target_length'],
531 543 section_header=hunk['section_header'],
532 544 lines=[],
533 545 ))
534 546 before, after = [], []
535 547
536 548 for line in hunk['lines']:
537 549
538 550 if line['action'] == 'unmod':
539 551 result.lines.extend(
540 552 self.parse_lines(before, after, source_file, target_file))
541 553 after.append(line)
542 554 before.append(line)
543 555 elif line['action'] == 'add':
544 556 after.append(line)
545 557 elif line['action'] == 'del':
546 558 before.append(line)
547 559 elif line['action'] == 'old-no-nl':
548 560 before.append(line)
549 561 elif line['action'] == 'new-no-nl':
550 562 after.append(line)
551 563
552 564 result.lines.extend(
553 565 self.parse_lines(before, after, source_file, target_file))
554 result.unified = self.as_unified(result.lines)
566 result.unified = list(self.as_unified(result.lines))
555 567 result.sideside = result.lines
556 568
557 569 return result
558 570
559 571 def parse_lines(self, before_lines, after_lines, source_file, target_file):
560 572 # TODO: dan: investigate doing the diff comparison and fast highlighting
561 573 # on the entire before and after buffered block lines rather than by
562 574 # line, this means we can get better 'fast' highlighting if the context
563 575 # allows it - eg.
564 576 # line 4: """
565 577 # line 5: this gets highlighted as a string
566 578 # line 6: """
567 579
568 580 lines = []
569 581
570 582 before_newline = AttributeDict()
571 583 after_newline = AttributeDict()
572 584 if before_lines and before_lines[-1]['action'] == 'old-no-nl':
573 585 before_newline_line = before_lines.pop(-1)
574 586 before_newline.content = '\n {}'.format(
575 587 render_tokenstream(
576 588 [(x[0], '', x[1])
577 589 for x in [('nonl', before_newline_line['line'])]]))
578 590
579 591 if after_lines and after_lines[-1]['action'] == 'new-no-nl':
580 592 after_newline_line = after_lines.pop(-1)
581 593 after_newline.content = '\n {}'.format(
582 594 render_tokenstream(
583 595 [(x[0], '', x[1])
584 596 for x in [('nonl', after_newline_line['line'])]]))
585 597
586 598 while before_lines or after_lines:
587 599 before, after = None, None
588 600 before_tokens, after_tokens = None, None
589 601
590 602 if before_lines:
591 603 before = before_lines.pop(0)
592 604 if after_lines:
593 605 after = after_lines.pop(0)
594 606
595 607 original = AttributeDict()
596 608 modified = AttributeDict()
597 609
598 610 if before:
599 611 if before['action'] == 'old-no-nl':
600 612 before_tokens = [('nonl', before['line'])]
601 613 else:
602 614 before_tokens = self.get_line_tokens(
603 615 line_text=before['line'],
604 616 line_number=before['old_lineno'],
605 617 file=source_file)
606 618 original.lineno = before['old_lineno']
607 619 original.content = before['line']
608 620 original.action = self.action_to_op(before['action'])
609 original.comments = self.get_comments_for('old',
610 source_file, before['old_lineno'])
621
622 original.get_comment_args = (
623 source_file, 'o', before['old_lineno'])
611 624
612 625 if after:
613 626 if after['action'] == 'new-no-nl':
614 627 after_tokens = [('nonl', after['line'])]
615 628 else:
616 629 after_tokens = self.get_line_tokens(
617 630 line_text=after['line'], line_number=after['new_lineno'],
618 631 file=target_file)
619 632 modified.lineno = after['new_lineno']
620 633 modified.content = after['line']
621 634 modified.action = self.action_to_op(after['action'])
622 modified.comments = self.get_comments_for('new',
623 target_file, after['new_lineno'])
635
636 modified.get_comment_args = (
637 target_file, 'n', after['new_lineno'])
624 638
625 639 # diff the lines
626 640 if before_tokens and after_tokens:
627 641 o_tokens, m_tokens, similarity = tokens_diff(
628 642 before_tokens, after_tokens)
629 643 original.content = render_tokenstream(o_tokens)
630 644 modified.content = render_tokenstream(m_tokens)
631 645 elif before_tokens:
632 646 original.content = render_tokenstream(
633 647 [(x[0], '', x[1]) for x in before_tokens])
634 648 elif after_tokens:
635 649 modified.content = render_tokenstream(
636 650 [(x[0], '', x[1]) for x in after_tokens])
637 651
638 652 if not before_lines and before_newline:
639 653 original.content += before_newline.content
640 654 before_newline = None
641 655 if not after_lines and after_newline:
642 656 modified.content += after_newline.content
643 657 after_newline = None
644 658
645 659 lines.append(AttributeDict({
646 660 'original': original,
647 661 'modified': modified,
648 662 }))
649 663
650 664 return lines
651 665
652 def get_comments_for(self, version, filename, line_number):
653 if hasattr(filename, 'unicode_path'):
654 filename = filename.unicode_path
655
656 if not isinstance(filename, basestring):
657 return None
658
659 line_key = {
660 'old': 'o',
661 'new': 'n',
662 }[version] + str(line_number)
663
664 if filename in self.comments_store:
665 file_comments = self.comments_store[filename]
666 if line_key in file_comments:
667 return file_comments.pop(line_key)
668
669 666 def get_line_tokens(self, line_text, line_number, file=None):
670 667 filenode = None
671 668 filename = None
672 669
673 670 if isinstance(file, basestring):
674 671 filename = file
675 672 elif isinstance(file, FileNode):
676 673 filenode = file
677 674 filename = file.unicode_path
678 675
679 676 if self.highlight_mode == self.HL_REAL and filenode:
680 677 lexer = self._get_lexer_for_filename(filename)
681 678 file_size_allowed = file.size < self.max_file_size_limit
682 679 if line_number and file_size_allowed:
683 680 return self.get_tokenized_filenode_line(
684 681 file, line_number, lexer)
685 682
686 683 if self.highlight_mode in (self.HL_REAL, self.HL_FAST) and filename:
687 684 lexer = self._get_lexer_for_filename(filename)
688 685 return list(tokenize_string(line_text, lexer))
689 686
690 687 return list(tokenize_string(line_text, plain_text_lexer))
691 688
692 689 def get_tokenized_filenode_line(self, filenode, line_number, lexer=None):
693 690
694 691 if filenode not in self.highlighted_filenodes:
695 692 tokenized_lines = filenode_as_lines_tokens(filenode, lexer)
696 693 self.highlighted_filenodes[filenode] = tokenized_lines
697 694 return self.highlighted_filenodes[filenode][line_number - 1]
698 695
699 696 def action_to_op(self, action):
700 697 return {
701 698 'add': '+',
702 699 'del': '-',
703 700 'unmod': ' ',
704 701 'old-no-nl': ' ',
705 702 'new-no-nl': ' ',
706 703 }.get(action, action)
707 704
708 705 def as_unified(self, lines):
709 706 """
710 707 Return a generator that yields the lines of a diff in unified order
711 708 """
712 709 def generator():
713 710 buf = []
714 711 for line in lines:
715 712
716 713 if buf and not line.original or line.original.action == ' ':
717 714 for b in buf:
718 715 yield b
719 716 buf = []
720 717
721 718 if line.original:
722 719 if line.original.action == ' ':
723 720 yield (line.original.lineno, line.modified.lineno,
724 721 line.original.action, line.original.content,
725 line.original.comments)
722 line.original.get_comment_args)
726 723 continue
727 724
728 725 if line.original.action == '-':
729 726 yield (line.original.lineno, None,
730 727 line.original.action, line.original.content,
731 line.original.comments)
728 line.original.get_comment_args)
732 729
733 730 if line.modified.action == '+':
734 731 buf.append((
735 732 None, line.modified.lineno,
736 733 line.modified.action, line.modified.content,
737 line.modified.comments))
734 line.modified.get_comment_args))
738 735 continue
739 736
740 737 if line.modified:
741 738 yield (None, line.modified.lineno,
742 739 line.modified.action, line.modified.content,
743 line.modified.comments)
740 line.modified.get_comment_args)
744 741
745 742 for b in buf:
746 743 yield b
747 744
748 745 return generator()
@@ -1,1131 +1,1212 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Set of diffing helpers, previously part of vcs
24 24 """
25 25
26 import os
26 27 import re
27 28 import collections
28 29 import difflib
29 30 import logging
31 import cPickle as pickle
30 32
31 33 from itertools import tee, imap
32 34
33 35 from rhodecode.lib.vcs.exceptions import VCSError
34 36 from rhodecode.lib.vcs.nodes import FileNode, SubModuleNode
35 from rhodecode.lib.utils2 import safe_unicode
37 from rhodecode.lib.utils2 import safe_unicode, safe_str
36 38
37 39 log = logging.getLogger(__name__)
38 40
39 41 # define max context, a file with more than this numbers of lines is unusable
40 42 # in browser anyway
41 43 MAX_CONTEXT = 1024 * 1014
42 44
43 45
44 46 class OPS(object):
45 47 ADD = 'A'
46 48 MOD = 'M'
47 49 DEL = 'D'
48 50
49 51
50 52 def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True, context=3):
51 53 """
52 54 Returns git style diff between given ``filenode_old`` and ``filenode_new``.
53 55
54 56 :param ignore_whitespace: ignore whitespaces in diff
55 57 """
56 58 # make sure we pass in default context
57 59 context = context or 3
58 60 # protect against IntOverflow when passing HUGE context
59 61 if context > MAX_CONTEXT:
60 62 context = MAX_CONTEXT
61 63
62 64 submodules = filter(lambda o: isinstance(o, SubModuleNode),
63 65 [filenode_new, filenode_old])
64 66 if submodules:
65 67 return ''
66 68
67 69 for filenode in (filenode_old, filenode_new):
68 70 if not isinstance(filenode, FileNode):
69 71 raise VCSError(
70 72 "Given object should be FileNode object, not %s"
71 73 % filenode.__class__)
72 74
73 75 repo = filenode_new.commit.repository
74 76 old_commit = filenode_old.commit or repo.EMPTY_COMMIT
75 77 new_commit = filenode_new.commit
76 78
77 79 vcs_gitdiff = repo.get_diff(
78 80 old_commit, new_commit, filenode_new.path,
79 81 ignore_whitespace, context, path1=filenode_old.path)
80 82 return vcs_gitdiff
81 83
82 84 NEW_FILENODE = 1
83 85 DEL_FILENODE = 2
84 86 MOD_FILENODE = 3
85 87 RENAMED_FILENODE = 4
86 88 COPIED_FILENODE = 5
87 89 CHMOD_FILENODE = 6
88 90 BIN_FILENODE = 7
89 91
90 92
91 93 class LimitedDiffContainer(object):
92 94
93 95 def __init__(self, diff_limit, cur_diff_size, diff):
94 96 self.diff = diff
95 97 self.diff_limit = diff_limit
96 98 self.cur_diff_size = cur_diff_size
97 99
98 100 def __getitem__(self, key):
99 101 return self.diff.__getitem__(key)
100 102
101 103 def __iter__(self):
102 104 for l in self.diff:
103 105 yield l
104 106
105 107
106 108 class Action(object):
107 109 """
108 110 Contains constants for the action value of the lines in a parsed diff.
109 111 """
110 112
111 113 ADD = 'add'
112 114 DELETE = 'del'
113 115 UNMODIFIED = 'unmod'
114 116
115 117 CONTEXT = 'context'
116 118 OLD_NO_NL = 'old-no-nl'
117 119 NEW_NO_NL = 'new-no-nl'
118 120
119 121
120 122 class DiffProcessor(object):
121 123 """
122 124 Give it a unified or git diff and it returns a list of the files that were
123 125 mentioned in the diff together with a dict of meta information that
124 126 can be used to render it in a HTML template.
125 127
126 128 .. note:: Unicode handling
127 129
128 130 The original diffs are a byte sequence and can contain filenames
129 131 in mixed encodings. This class generally returns `unicode` objects
130 132 since the result is intended for presentation to the user.
131 133
132 134 """
133 135 _chunk_re = re.compile(r'^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
134 136 _newline_marker = re.compile(r'^\\ No newline at end of file')
135 137
136 138 # used for inline highlighter word split
137 139 _token_re = re.compile(r'()(&gt;|&lt;|&amp;|\W+?)')
138 140
139 141 # collapse ranges of commits over given number
140 142 _collapse_commits_over = 5
141 143
142 144 def __init__(self, diff, format='gitdiff', diff_limit=None,
143 145 file_limit=None, show_full_diff=True):
144 146 """
145 147 :param diff: A `Diff` object representing a diff from a vcs backend
146 148 :param format: format of diff passed, `udiff` or `gitdiff`
147 149 :param diff_limit: define the size of diff that is considered "big"
148 150 based on that parameter cut off will be triggered, set to None
149 151 to show full diff
150 152 """
151 153 self._diff = diff
152 154 self._format = format
153 155 self.adds = 0
154 156 self.removes = 0
155 157 # calculate diff size
156 158 self.diff_limit = diff_limit
157 159 self.file_limit = file_limit
158 160 self.show_full_diff = show_full_diff
159 161 self.cur_diff_size = 0
160 162 self.parsed = False
161 163 self.parsed_diff = []
162 164
163 165 log.debug('Initialized DiffProcessor with %s mode', format)
164 166 if format == 'gitdiff':
165 167 self.differ = self._highlight_line_difflib
166 168 self._parser = self._parse_gitdiff
167 169 else:
168 170 self.differ = self._highlight_line_udiff
169 171 self._parser = self._new_parse_gitdiff
170 172
171 173 def _copy_iterator(self):
172 174 """
173 175 make a fresh copy of generator, we should not iterate thru
174 176 an original as it's needed for repeating operations on
175 177 this instance of DiffProcessor
176 178 """
177 179 self.__udiff, iterator_copy = tee(self.__udiff)
178 180 return iterator_copy
179 181
180 182 def _escaper(self, string):
181 183 """
182 184 Escaper for diff escapes special chars and checks the diff limit
183 185
184 186 :param string:
185 187 """
186 188 self.cur_diff_size += len(string)
187 189
188 190 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
189 191 raise DiffLimitExceeded('Diff Limit Exceeded')
190 192
191 193 return string \
192 194 .replace('&', '&amp;')\
193 195 .replace('<', '&lt;')\
194 196 .replace('>', '&gt;')
195 197
196 198 def _line_counter(self, l):
197 199 """
198 200 Checks each line and bumps total adds/removes for this diff
199 201
200 202 :param l:
201 203 """
202 204 if l.startswith('+') and not l.startswith('+++'):
203 205 self.adds += 1
204 206 elif l.startswith('-') and not l.startswith('---'):
205 207 self.removes += 1
206 208 return safe_unicode(l)
207 209
208 210 def _highlight_line_difflib(self, line, next_):
209 211 """
210 212 Highlight inline changes in both lines.
211 213 """
212 214
213 215 if line['action'] == Action.DELETE:
214 216 old, new = line, next_
215 217 else:
216 218 old, new = next_, line
217 219
218 220 oldwords = self._token_re.split(old['line'])
219 221 newwords = self._token_re.split(new['line'])
220 222 sequence = difflib.SequenceMatcher(None, oldwords, newwords)
221 223
222 224 oldfragments, newfragments = [], []
223 225 for tag, i1, i2, j1, j2 in sequence.get_opcodes():
224 226 oldfrag = ''.join(oldwords[i1:i2])
225 227 newfrag = ''.join(newwords[j1:j2])
226 228 if tag != 'equal':
227 229 if oldfrag:
228 230 oldfrag = '<del>%s</del>' % oldfrag
229 231 if newfrag:
230 232 newfrag = '<ins>%s</ins>' % newfrag
231 233 oldfragments.append(oldfrag)
232 234 newfragments.append(newfrag)
233 235
234 236 old['line'] = "".join(oldfragments)
235 237 new['line'] = "".join(newfragments)
236 238
237 239 def _highlight_line_udiff(self, line, next_):
238 240 """
239 241 Highlight inline changes in both lines.
240 242 """
241 243 start = 0
242 244 limit = min(len(line['line']), len(next_['line']))
243 245 while start < limit and line['line'][start] == next_['line'][start]:
244 246 start += 1
245 247 end = -1
246 248 limit -= start
247 249 while -end <= limit and line['line'][end] == next_['line'][end]:
248 250 end -= 1
249 251 end += 1
250 252 if start or end:
251 253 def do(l):
252 254 last = end + len(l['line'])
253 255 if l['action'] == Action.ADD:
254 256 tag = 'ins'
255 257 else:
256 258 tag = 'del'
257 259 l['line'] = '%s<%s>%s</%s>%s' % (
258 260 l['line'][:start],
259 261 tag,
260 262 l['line'][start:last],
261 263 tag,
262 264 l['line'][last:]
263 265 )
264 266 do(line)
265 267 do(next_)
266 268
267 269 def _clean_line(self, line, command):
268 270 if command in ['+', '-', ' ']:
269 271 # only modify the line if it's actually a diff thing
270 272 line = line[1:]
271 273 return line
272 274
273 275 def _parse_gitdiff(self, inline_diff=True):
274 276 _files = []
275 277 diff_container = lambda arg: arg
276 278
277 279 for chunk in self._diff.chunks():
278 280 head = chunk.header
279 281
280 282 diff = imap(self._escaper, self.diff_splitter(chunk.diff))
281 283 raw_diff = chunk.raw
282 284 limited_diff = False
283 285 exceeds_limit = False
284 286
285 287 op = None
286 288 stats = {
287 289 'added': 0,
288 290 'deleted': 0,
289 291 'binary': False,
290 292 'ops': {},
291 293 }
292 294
293 295 if head['deleted_file_mode']:
294 296 op = OPS.DEL
295 297 stats['binary'] = True
296 298 stats['ops'][DEL_FILENODE] = 'deleted file'
297 299
298 300 elif head['new_file_mode']:
299 301 op = OPS.ADD
300 302 stats['binary'] = True
301 303 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
302 304 else: # modify operation, can be copy, rename or chmod
303 305
304 306 # CHMOD
305 307 if head['new_mode'] and head['old_mode']:
306 308 op = OPS.MOD
307 309 stats['binary'] = True
308 310 stats['ops'][CHMOD_FILENODE] = (
309 311 'modified file chmod %s => %s' % (
310 312 head['old_mode'], head['new_mode']))
311 313 # RENAME
312 314 if head['rename_from'] != head['rename_to']:
313 315 op = OPS.MOD
314 316 stats['binary'] = True
315 317 stats['ops'][RENAMED_FILENODE] = (
316 318 'file renamed from %s to %s' % (
317 319 head['rename_from'], head['rename_to']))
318 320 # COPY
319 321 if head.get('copy_from') and head.get('copy_to'):
320 322 op = OPS.MOD
321 323 stats['binary'] = True
322 324 stats['ops'][COPIED_FILENODE] = (
323 325 'file copied from %s to %s' % (
324 326 head['copy_from'], head['copy_to']))
325 327
326 328 # If our new parsed headers didn't match anything fallback to
327 329 # old style detection
328 330 if op is None:
329 331 if not head['a_file'] and head['b_file']:
330 332 op = OPS.ADD
331 333 stats['binary'] = True
332 334 stats['ops'][NEW_FILENODE] = 'new file'
333 335
334 336 elif head['a_file'] and not head['b_file']:
335 337 op = OPS.DEL
336 338 stats['binary'] = True
337 339 stats['ops'][DEL_FILENODE] = 'deleted file'
338 340
339 341 # it's not ADD not DELETE
340 342 if op is None:
341 343 op = OPS.MOD
342 344 stats['binary'] = True
343 345 stats['ops'][MOD_FILENODE] = 'modified file'
344 346
345 347 # a real non-binary diff
346 348 if head['a_file'] or head['b_file']:
347 349 try:
348 350 raw_diff, chunks, _stats = self._parse_lines(diff)
349 351 stats['binary'] = False
350 352 stats['added'] = _stats[0]
351 353 stats['deleted'] = _stats[1]
352 354 # explicit mark that it's a modified file
353 355 if op == OPS.MOD:
354 356 stats['ops'][MOD_FILENODE] = 'modified file'
355 357 exceeds_limit = len(raw_diff) > self.file_limit
356 358
357 359 # changed from _escaper function so we validate size of
358 360 # each file instead of the whole diff
359 361 # diff will hide big files but still show small ones
360 362 # from my tests, big files are fairly safe to be parsed
361 363 # but the browser is the bottleneck
362 364 if not self.show_full_diff and exceeds_limit:
363 365 raise DiffLimitExceeded('File Limit Exceeded')
364 366
365 367 except DiffLimitExceeded:
366 368 diff_container = lambda _diff: \
367 369 LimitedDiffContainer(
368 370 self.diff_limit, self.cur_diff_size, _diff)
369 371
370 372 exceeds_limit = len(raw_diff) > self.file_limit
371 373 limited_diff = True
372 374 chunks = []
373 375
374 376 else: # GIT format binary patch, or possibly empty diff
375 377 if head['bin_patch']:
376 378 # we have operation already extracted, but we mark simply
377 379 # it's a diff we wont show for binary files
378 380 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
379 381 chunks = []
380 382
381 383 if chunks and not self.show_full_diff and op == OPS.DEL:
382 384 # if not full diff mode show deleted file contents
383 385 # TODO: anderson: if the view is not too big, there is no way
384 386 # to see the content of the file
385 387 chunks = []
386 388
387 389 chunks.insert(0, [{
388 390 'old_lineno': '',
389 391 'new_lineno': '',
390 392 'action': Action.CONTEXT,
391 393 'line': msg,
392 394 } for _op, msg in stats['ops'].iteritems()
393 395 if _op not in [MOD_FILENODE]])
394 396
395 397 _files.append({
396 398 'filename': safe_unicode(head['b_path']),
397 399 'old_revision': head['a_blob_id'],
398 400 'new_revision': head['b_blob_id'],
399 401 'chunks': chunks,
400 402 'raw_diff': safe_unicode(raw_diff),
401 403 'operation': op,
402 404 'stats': stats,
403 405 'exceeds_limit': exceeds_limit,
404 406 'is_limited_diff': limited_diff,
405 407 })
406 408
407 409 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
408 410 OPS.DEL: 2}.get(info['operation'])
409 411
410 412 if not inline_diff:
411 413 return diff_container(sorted(_files, key=sorter))
412 414
413 415 # highlight inline changes
414 416 for diff_data in _files:
415 417 for chunk in diff_data['chunks']:
416 418 lineiter = iter(chunk)
417 419 try:
418 420 while 1:
419 421 line = lineiter.next()
420 422 if line['action'] not in (
421 423 Action.UNMODIFIED, Action.CONTEXT):
422 424 nextline = lineiter.next()
423 425 if nextline['action'] in ['unmod', 'context'] or \
424 426 nextline['action'] == line['action']:
425 427 continue
426 428 self.differ(line, nextline)
427 429 except StopIteration:
428 430 pass
429 431
430 432 return diff_container(sorted(_files, key=sorter))
431 433
432 434 def _check_large_diff(self):
433 435 log.debug('Diff exceeds current diff_limit of %s', self.diff_limit)
434 436 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
435 437 raise DiffLimitExceeded('Diff Limit `%s` Exceeded', self.diff_limit)
436 438
437 439 # FIXME: NEWDIFFS: dan: this replaces _parse_gitdiff
438 440 def _new_parse_gitdiff(self, inline_diff=True):
439 441 _files = []
440 442
441 443 # this can be overriden later to a LimitedDiffContainer type
442 444 diff_container = lambda arg: arg
443 445
444 446 for chunk in self._diff.chunks():
445 447 head = chunk.header
446 448 log.debug('parsing diff %r' % head)
447 449
448 450 raw_diff = chunk.raw
449 451 limited_diff = False
450 452 exceeds_limit = False
451 453
452 454 op = None
453 455 stats = {
454 456 'added': 0,
455 457 'deleted': 0,
456 458 'binary': False,
457 459 'old_mode': None,
458 460 'new_mode': None,
459 461 'ops': {},
460 462 }
461 463 if head['old_mode']:
462 464 stats['old_mode'] = head['old_mode']
463 465 if head['new_mode']:
464 466 stats['new_mode'] = head['new_mode']
465 467 if head['b_mode']:
466 468 stats['new_mode'] = head['b_mode']
467 469
468 470 # delete file
469 471 if head['deleted_file_mode']:
470 472 op = OPS.DEL
471 473 stats['binary'] = True
472 474 stats['ops'][DEL_FILENODE] = 'deleted file'
473 475
474 476 # new file
475 477 elif head['new_file_mode']:
476 478 op = OPS.ADD
477 479 stats['binary'] = True
478 480 stats['old_mode'] = None
479 481 stats['new_mode'] = head['new_file_mode']
480 482 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
481 483
482 484 # modify operation, can be copy, rename or chmod
483 485 else:
484 486 # CHMOD
485 487 if head['new_mode'] and head['old_mode']:
486 488 op = OPS.MOD
487 489 stats['binary'] = True
488 490 stats['ops'][CHMOD_FILENODE] = (
489 491 'modified file chmod %s => %s' % (
490 492 head['old_mode'], head['new_mode']))
491 493
492 494 # RENAME
493 495 if head['rename_from'] != head['rename_to']:
494 496 op = OPS.MOD
495 497 stats['binary'] = True
496 498 stats['renamed'] = (head['rename_from'], head['rename_to'])
497 499 stats['ops'][RENAMED_FILENODE] = (
498 500 'file renamed from %s to %s' % (
499 501 head['rename_from'], head['rename_to']))
500 502 # COPY
501 503 if head.get('copy_from') and head.get('copy_to'):
502 504 op = OPS.MOD
503 505 stats['binary'] = True
504 506 stats['copied'] = (head['copy_from'], head['copy_to'])
505 507 stats['ops'][COPIED_FILENODE] = (
506 508 'file copied from %s to %s' % (
507 509 head['copy_from'], head['copy_to']))
508 510
509 511 # If our new parsed headers didn't match anything fallback to
510 512 # old style detection
511 513 if op is None:
512 514 if not head['a_file'] and head['b_file']:
513 515 op = OPS.ADD
514 516 stats['binary'] = True
515 517 stats['new_file'] = True
516 518 stats['ops'][NEW_FILENODE] = 'new file'
517 519
518 520 elif head['a_file'] and not head['b_file']:
519 521 op = OPS.DEL
520 522 stats['binary'] = True
521 523 stats['ops'][DEL_FILENODE] = 'deleted file'
522 524
523 525 # it's not ADD not DELETE
524 526 if op is None:
525 527 op = OPS.MOD
526 528 stats['binary'] = True
527 529 stats['ops'][MOD_FILENODE] = 'modified file'
528 530
529 531 # a real non-binary diff
530 532 if head['a_file'] or head['b_file']:
531 533 # simulate splitlines, so we keep the line end part
532 534 diff = self.diff_splitter(chunk.diff)
533 535
534 536 # append each file to the diff size
535 537 raw_chunk_size = len(raw_diff)
536 538
537 539 exceeds_limit = raw_chunk_size > self.file_limit
538 540 self.cur_diff_size += raw_chunk_size
539 541
540 542 try:
541 543 # Check each file instead of the whole diff.
542 544 # Diff will hide big files but still show small ones.
543 545 # From the tests big files are fairly safe to be parsed
544 546 # but the browser is the bottleneck.
545 547 if not self.show_full_diff and exceeds_limit:
546 548 log.debug('File `%s` exceeds current file_limit of %s',
547 549 safe_unicode(head['b_path']), self.file_limit)
548 550 raise DiffLimitExceeded(
549 551 'File Limit %s Exceeded', self.file_limit)
550 552
551 553 self._check_large_diff()
552 554
553 555 raw_diff, chunks, _stats = self._new_parse_lines(diff)
554 556 stats['binary'] = False
555 557 stats['added'] = _stats[0]
556 558 stats['deleted'] = _stats[1]
557 559 # explicit mark that it's a modified file
558 560 if op == OPS.MOD:
559 561 stats['ops'][MOD_FILENODE] = 'modified file'
560 562
561 563 except DiffLimitExceeded:
562 564 diff_container = lambda _diff: \
563 565 LimitedDiffContainer(
564 566 self.diff_limit, self.cur_diff_size, _diff)
565 567
566 568 limited_diff = True
567 569 chunks = []
568 570
569 571 else: # GIT format binary patch, or possibly empty diff
570 572 if head['bin_patch']:
571 573 # we have operation already extracted, but we mark simply
572 574 # it's a diff we wont show for binary files
573 575 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
574 576 chunks = []
575 577
576 578 # Hide content of deleted node by setting empty chunks
577 579 if chunks and not self.show_full_diff and op == OPS.DEL:
578 580 # if not full diff mode show deleted file contents
579 581 # TODO: anderson: if the view is not too big, there is no way
580 582 # to see the content of the file
581 583 chunks = []
582 584
583 585 chunks.insert(
584 586 0, [{'old_lineno': '',
585 587 'new_lineno': '',
586 588 'action': Action.CONTEXT,
587 589 'line': msg,
588 590 } for _op, msg in stats['ops'].iteritems()
589 591 if _op not in [MOD_FILENODE]])
590 592
591 593 original_filename = safe_unicode(head['a_path'])
592 594 _files.append({
593 595 'original_filename': original_filename,
594 596 'filename': safe_unicode(head['b_path']),
595 597 'old_revision': head['a_blob_id'],
596 598 'new_revision': head['b_blob_id'],
597 599 'chunks': chunks,
598 600 'raw_diff': safe_unicode(raw_diff),
599 601 'operation': op,
600 602 'stats': stats,
601 603 'exceeds_limit': exceeds_limit,
602 604 'is_limited_diff': limited_diff,
603 605 })
604 606
605 607 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
606 608 OPS.DEL: 2}.get(info['operation'])
607 609
608 610 return diff_container(sorted(_files, key=sorter))
609 611
610 612 # FIXME: NEWDIFFS: dan: this gets replaced by _new_parse_lines
611 613 def _parse_lines(self, diff_iter):
612 614 """
613 615 Parse the diff an return data for the template.
614 616 """
615 617
616 618 stats = [0, 0]
617 619 chunks = []
618 620 raw_diff = []
619 621
620 622 try:
621 623 line = diff_iter.next()
622 624
623 625 while line:
624 626 raw_diff.append(line)
625 627 lines = []
626 628 chunks.append(lines)
627 629
628 630 match = self._chunk_re.match(line)
629 631
630 632 if not match:
631 633 break
632 634
633 635 gr = match.groups()
634 636 (old_line, old_end,
635 637 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
636 638 old_line -= 1
637 639 new_line -= 1
638 640
639 641 context = len(gr) == 5
640 642 old_end += old_line
641 643 new_end += new_line
642 644
643 645 if context:
644 646 # skip context only if it's first line
645 647 if int(gr[0]) > 1:
646 648 lines.append({
647 649 'old_lineno': '...',
648 650 'new_lineno': '...',
649 651 'action': Action.CONTEXT,
650 652 'line': line,
651 653 })
652 654
653 655 line = diff_iter.next()
654 656
655 657 while old_line < old_end or new_line < new_end:
656 658 command = ' '
657 659 if line:
658 660 command = line[0]
659 661
660 662 affects_old = affects_new = False
661 663
662 664 # ignore those if we don't expect them
663 665 if command in '#@':
664 666 continue
665 667 elif command == '+':
666 668 affects_new = True
667 669 action = Action.ADD
668 670 stats[0] += 1
669 671 elif command == '-':
670 672 affects_old = True
671 673 action = Action.DELETE
672 674 stats[1] += 1
673 675 else:
674 676 affects_old = affects_new = True
675 677 action = Action.UNMODIFIED
676 678
677 679 if not self._newline_marker.match(line):
678 680 old_line += affects_old
679 681 new_line += affects_new
680 682 lines.append({
681 683 'old_lineno': affects_old and old_line or '',
682 684 'new_lineno': affects_new and new_line or '',
683 685 'action': action,
684 686 'line': self._clean_line(line, command)
685 687 })
686 688 raw_diff.append(line)
687 689
688 690 line = diff_iter.next()
689 691
690 692 if self._newline_marker.match(line):
691 693 # we need to append to lines, since this is not
692 694 # counted in the line specs of diff
693 695 lines.append({
694 696 'old_lineno': '...',
695 697 'new_lineno': '...',
696 698 'action': Action.CONTEXT,
697 699 'line': self._clean_line(line, command)
698 700 })
699 701
700 702 except StopIteration:
701 703 pass
702 704 return ''.join(raw_diff), chunks, stats
703 705
704 706 # FIXME: NEWDIFFS: dan: this replaces _parse_lines
705 707 def _new_parse_lines(self, diff_iter):
706 708 """
707 709 Parse the diff an return data for the template.
708 710 """
709 711
710 712 stats = [0, 0]
711 713 chunks = []
712 714 raw_diff = []
713 715
714 716 try:
715 717 line = diff_iter.next()
716 718
717 719 while line:
718 720 raw_diff.append(line)
719 721 # match header e.g @@ -0,0 +1 @@\n'
720 722 match = self._chunk_re.match(line)
721 723
722 724 if not match:
723 725 break
724 726
725 727 gr = match.groups()
726 728 (old_line, old_end,
727 729 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
728 730
729 731 lines = []
730 732 hunk = {
731 733 'section_header': gr[-1],
732 734 'source_start': old_line,
733 735 'source_length': old_end,
734 736 'target_start': new_line,
735 737 'target_length': new_end,
736 738 'lines': lines,
737 739 }
738 740 chunks.append(hunk)
739 741
740 742 old_line -= 1
741 743 new_line -= 1
742 744
743 745 context = len(gr) == 5
744 746 old_end += old_line
745 747 new_end += new_line
746 748
747 749 line = diff_iter.next()
748 750
749 751 while old_line < old_end or new_line < new_end:
750 752 command = ' '
751 753 if line:
752 754 command = line[0]
753 755
754 756 affects_old = affects_new = False
755 757
756 758 # ignore those if we don't expect them
757 759 if command in '#@':
758 760 continue
759 761 elif command == '+':
760 762 affects_new = True
761 763 action = Action.ADD
762 764 stats[0] += 1
763 765 elif command == '-':
764 766 affects_old = True
765 767 action = Action.DELETE
766 768 stats[1] += 1
767 769 else:
768 770 affects_old = affects_new = True
769 771 action = Action.UNMODIFIED
770 772
771 773 if not self._newline_marker.match(line):
772 774 old_line += affects_old
773 775 new_line += affects_new
774 776 lines.append({
775 777 'old_lineno': affects_old and old_line or '',
776 778 'new_lineno': affects_new and new_line or '',
777 779 'action': action,
778 780 'line': self._clean_line(line, command)
779 781 })
780 782 raw_diff.append(line)
781 783
782 784 line = diff_iter.next()
783 785
784 786 if self._newline_marker.match(line):
785 787 # we need to append to lines, since this is not
786 788 # counted in the line specs of diff
787 789 if affects_old:
788 790 action = Action.OLD_NO_NL
789 791 elif affects_new:
790 792 action = Action.NEW_NO_NL
791 793 else:
792 794 raise Exception('invalid context for no newline')
793 795
794 796 lines.append({
795 797 'old_lineno': None,
796 798 'new_lineno': None,
797 799 'action': action,
798 800 'line': self._clean_line(line, command)
799 801 })
800 802
801 803 except StopIteration:
802 804 pass
803 805
804 806 return ''.join(raw_diff), chunks, stats
805 807
806 808 def _safe_id(self, idstring):
807 809 """Make a string safe for including in an id attribute.
808 810
809 811 The HTML spec says that id attributes 'must begin with
810 812 a letter ([A-Za-z]) and may be followed by any number
811 813 of letters, digits ([0-9]), hyphens ("-"), underscores
812 814 ("_"), colons (":"), and periods (".")'. These regexps
813 815 are slightly over-zealous, in that they remove colons
814 816 and periods unnecessarily.
815 817
816 818 Whitespace is transformed into underscores, and then
817 819 anything which is not a hyphen or a character that
818 820 matches \w (alphanumerics and underscore) is removed.
819 821
820 822 """
821 823 # Transform all whitespace to underscore
822 824 idstring = re.sub(r'\s', "_", '%s' % idstring)
823 825 # Remove everything that is not a hyphen or a member of \w
824 826 idstring = re.sub(r'(?!-)\W', "", idstring).lower()
825 827 return idstring
826 828
827 829 @classmethod
828 830 def diff_splitter(cls, string):
829 831 """
830 832 Diff split that emulates .splitlines() but works only on \n
831 833 """
832 834 if not string:
833 835 return
834 836 elif string == '\n':
835 837 yield u'\n'
836 838 else:
837 839
838 840 has_newline = string.endswith('\n')
839 841 elements = string.split('\n')
840 842 if has_newline:
841 843 # skip last element as it's empty string from newlines
842 844 elements = elements[:-1]
843 845
844 846 len_elements = len(elements)
845 847
846 848 for cnt, line in enumerate(elements, start=1):
847 849 last_line = cnt == len_elements
848 850 if last_line and not has_newline:
849 851 yield safe_unicode(line)
850 852 else:
851 853 yield safe_unicode(line) + '\n'
852 854
853 855 def prepare(self, inline_diff=True):
854 856 """
855 857 Prepare the passed udiff for HTML rendering.
856 858
857 859 :return: A list of dicts with diff information.
858 860 """
859 861 parsed = self._parser(inline_diff=inline_diff)
860 862 self.parsed = True
861 863 self.parsed_diff = parsed
862 864 return parsed
863 865
864 866 def as_raw(self, diff_lines=None):
865 867 """
866 868 Returns raw diff as a byte string
867 869 """
868 870 return self._diff.raw
869 871
870 872 def as_html(self, table_class='code-difftable', line_class='line',
871 873 old_lineno_class='lineno old', new_lineno_class='lineno new',
872 874 code_class='code', enable_comments=False, parsed_lines=None):
873 875 """
874 876 Return given diff as html table with customized css classes
875 877 """
876 878 # TODO(marcink): not sure how to pass in translator
877 879 # here in an efficient way, leave the _ for proper gettext extraction
878 880 _ = lambda s: s
879 881
880 882 def _link_to_if(condition, label, url):
881 883 """
882 884 Generates a link if condition is meet or just the label if not.
883 885 """
884 886
885 887 if condition:
886 888 return '''<a href="%(url)s" class="tooltip"
887 889 title="%(title)s">%(label)s</a>''' % {
888 890 'title': _('Click to select line'),
889 891 'url': url,
890 892 'label': label
891 893 }
892 894 else:
893 895 return label
894 896 if not self.parsed:
895 897 self.prepare()
896 898
897 899 diff_lines = self.parsed_diff
898 900 if parsed_lines:
899 901 diff_lines = parsed_lines
900 902
901 903 _html_empty = True
902 904 _html = []
903 905 _html.append('''<table class="%(table_class)s">\n''' % {
904 906 'table_class': table_class
905 907 })
906 908
907 909 for diff in diff_lines:
908 910 for line in diff['chunks']:
909 911 _html_empty = False
910 912 for change in line:
911 913 _html.append('''<tr class="%(lc)s %(action)s">\n''' % {
912 914 'lc': line_class,
913 915 'action': change['action']
914 916 })
915 917 anchor_old_id = ''
916 918 anchor_new_id = ''
917 919 anchor_old = "%(filename)s_o%(oldline_no)s" % {
918 920 'filename': self._safe_id(diff['filename']),
919 921 'oldline_no': change['old_lineno']
920 922 }
921 923 anchor_new = "%(filename)s_n%(oldline_no)s" % {
922 924 'filename': self._safe_id(diff['filename']),
923 925 'oldline_no': change['new_lineno']
924 926 }
925 927 cond_old = (change['old_lineno'] != '...' and
926 928 change['old_lineno'])
927 929 cond_new = (change['new_lineno'] != '...' and
928 930 change['new_lineno'])
929 931 if cond_old:
930 932 anchor_old_id = 'id="%s"' % anchor_old
931 933 if cond_new:
932 934 anchor_new_id = 'id="%s"' % anchor_new
933 935
934 936 if change['action'] != Action.CONTEXT:
935 937 anchor_link = True
936 938 else:
937 939 anchor_link = False
938 940
939 941 ###########################################################
940 942 # COMMENT ICONS
941 943 ###########################################################
942 944 _html.append('''\t<td class="add-comment-line"><span class="add-comment-content">''')
943 945
944 946 if enable_comments and change['action'] != Action.CONTEXT:
945 947 _html.append('''<a href="#"><span class="icon-comment-add"></span></a>''')
946 948
947 949 _html.append('''</span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>\n''')
948 950
949 951 ###########################################################
950 952 # OLD LINE NUMBER
951 953 ###########################################################
952 954 _html.append('''\t<td %(a_id)s class="%(olc)s">''' % {
953 955 'a_id': anchor_old_id,
954 956 'olc': old_lineno_class
955 957 })
956 958
957 959 _html.append('''%(link)s''' % {
958 960 'link': _link_to_if(anchor_link, change['old_lineno'],
959 961 '#%s' % anchor_old)
960 962 })
961 963 _html.append('''</td>\n''')
962 964 ###########################################################
963 965 # NEW LINE NUMBER
964 966 ###########################################################
965 967
966 968 _html.append('''\t<td %(a_id)s class="%(nlc)s">''' % {
967 969 'a_id': anchor_new_id,
968 970 'nlc': new_lineno_class
969 971 })
970 972
971 973 _html.append('''%(link)s''' % {
972 974 'link': _link_to_if(anchor_link, change['new_lineno'],
973 975 '#%s' % anchor_new)
974 976 })
975 977 _html.append('''</td>\n''')
976 978 ###########################################################
977 979 # CODE
978 980 ###########################################################
979 981 code_classes = [code_class]
980 982 if (not enable_comments or
981 983 change['action'] == Action.CONTEXT):
982 984 code_classes.append('no-comment')
983 985 _html.append('\t<td class="%s">' % ' '.join(code_classes))
984 986 _html.append('''\n\t\t<pre>%(code)s</pre>\n''' % {
985 987 'code': change['line']
986 988 })
987 989
988 990 _html.append('''\t</td>''')
989 991 _html.append('''\n</tr>\n''')
990 992 _html.append('''</table>''')
991 993 if _html_empty:
992 994 return None
993 995 return ''.join(_html)
994 996
995 997 def stat(self):
996 998 """
997 999 Returns tuple of added, and removed lines for this instance
998 1000 """
999 1001 return self.adds, self.removes
1000 1002
1001 1003 def get_context_of_line(
1002 1004 self, path, diff_line=None, context_before=3, context_after=3):
1003 1005 """
1004 1006 Returns the context lines for the specified diff line.
1005 1007
1006 1008 :type diff_line: :class:`DiffLineNumber`
1007 1009 """
1008 1010 assert self.parsed, "DiffProcessor is not initialized."
1009 1011
1010 1012 if None not in diff_line:
1011 1013 raise ValueError(
1012 1014 "Cannot specify both line numbers: {}".format(diff_line))
1013 1015
1014 1016 file_diff = self._get_file_diff(path)
1015 1017 chunk, idx = self._find_chunk_line_index(file_diff, diff_line)
1016 1018
1017 1019 first_line_to_include = max(idx - context_before, 0)
1018 1020 first_line_after_context = idx + context_after + 1
1019 1021 context_lines = chunk[first_line_to_include:first_line_after_context]
1020 1022
1021 1023 line_contents = [
1022 1024 _context_line(line) for line in context_lines
1023 1025 if _is_diff_content(line)]
1024 1026 # TODO: johbo: Interim fixup, the diff chunks drop the final newline.
1025 1027 # Once they are fixed, we can drop this line here.
1026 1028 if line_contents:
1027 1029 line_contents[-1] = (
1028 1030 line_contents[-1][0], line_contents[-1][1].rstrip('\n') + '\n')
1029 1031 return line_contents
1030 1032
1031 1033 def find_context(self, path, context, offset=0):
1032 1034 """
1033 1035 Finds the given `context` inside of the diff.
1034 1036
1035 1037 Use the parameter `offset` to specify which offset the target line has
1036 1038 inside of the given `context`. This way the correct diff line will be
1037 1039 returned.
1038 1040
1039 1041 :param offset: Shall be used to specify the offset of the main line
1040 1042 within the given `context`.
1041 1043 """
1042 1044 if offset < 0 or offset >= len(context):
1043 1045 raise ValueError(
1044 1046 "Only positive values up to the length of the context "
1045 1047 "minus one are allowed.")
1046 1048
1047 1049 matches = []
1048 1050 file_diff = self._get_file_diff(path)
1049 1051
1050 1052 for chunk in file_diff['chunks']:
1051 1053 context_iter = iter(context)
1052 1054 for line_idx, line in enumerate(chunk):
1053 1055 try:
1054 1056 if _context_line(line) == context_iter.next():
1055 1057 continue
1056 1058 except StopIteration:
1057 1059 matches.append((line_idx, chunk))
1058 1060 context_iter = iter(context)
1059 1061
1060 1062 # Increment position and triger StopIteration
1061 1063 # if we had a match at the end
1062 1064 line_idx += 1
1063 1065 try:
1064 1066 context_iter.next()
1065 1067 except StopIteration:
1066 1068 matches.append((line_idx, chunk))
1067 1069
1068 1070 effective_offset = len(context) - offset
1069 1071 found_at_diff_lines = [
1070 1072 _line_to_diff_line_number(chunk[idx - effective_offset])
1071 1073 for idx, chunk in matches]
1072 1074
1073 1075 return found_at_diff_lines
1074 1076
1075 1077 def _get_file_diff(self, path):
1076 1078 for file_diff in self.parsed_diff:
1077 1079 if file_diff['filename'] == path:
1078 1080 break
1079 1081 else:
1080 1082 raise FileNotInDiffException("File {} not in diff".format(path))
1081 1083 return file_diff
1082 1084
1083 1085 def _find_chunk_line_index(self, file_diff, diff_line):
1084 1086 for chunk in file_diff['chunks']:
1085 1087 for idx, line in enumerate(chunk):
1086 1088 if line['old_lineno'] == diff_line.old:
1087 1089 return chunk, idx
1088 1090 if line['new_lineno'] == diff_line.new:
1089 1091 return chunk, idx
1090 1092 raise LineNotInDiffException(
1091 1093 "The line {} is not part of the diff.".format(diff_line))
1092 1094
1093 1095
1094 1096 def _is_diff_content(line):
1095 1097 return line['action'] in (
1096 1098 Action.UNMODIFIED, Action.ADD, Action.DELETE)
1097 1099
1098 1100
1099 1101 def _context_line(line):
1100 1102 return (line['action'], line['line'])
1101 1103
1102 1104
1103 1105 DiffLineNumber = collections.namedtuple('DiffLineNumber', ['old', 'new'])
1104 1106
1105 1107
1106 1108 def _line_to_diff_line_number(line):
1107 1109 new_line_no = line['new_lineno'] or None
1108 1110 old_line_no = line['old_lineno'] or None
1109 1111 return DiffLineNumber(old=old_line_no, new=new_line_no)
1110 1112
1111 1113
1112 1114 class FileNotInDiffException(Exception):
1113 1115 """
1114 1116 Raised when the context for a missing file is requested.
1115 1117
1116 1118 If you request the context for a line in a file which is not part of the
1117 1119 given diff, then this exception is raised.
1118 1120 """
1119 1121
1120 1122
1121 1123 class LineNotInDiffException(Exception):
1122 1124 """
1123 1125 Raised when the context for a missing line is requested.
1124 1126
1125 1127 If you request the context for a line in a file and this line is not
1126 1128 part of the given diff, then this exception is raised.
1127 1129 """
1128 1130
1129 1131
1130 1132 class DiffLimitExceeded(Exception):
1131 1133 pass
1134
1135
1136 def cache_diff(cached_diff_file, diff, commits):
1137
1138 struct = {
1139 'version': 'v1',
1140 'diff': diff,
1141 'commits': commits
1142 }
1143
1144 try:
1145 with open(cached_diff_file, 'wb') as f:
1146 pickle.dump(struct, f)
1147 log.debug('Saved diff cache under %s', cached_diff_file)
1148 except Exception:
1149 log.warn('Failed to save cache', exc_info=True)
1150 # cleanup file to not store it "damaged"
1151 try:
1152 os.remove(cached_diff_file)
1153 except Exception:
1154 log.exception('Failed to cleanup path %s', cached_diff_file)
1155
1156
1157 def load_cached_diff(cached_diff_file):
1158
1159 default_struct = {
1160 'version': 'v1',
1161 'diff': None,
1162 'commits': None
1163 }
1164
1165 has_cache = os.path.isfile(cached_diff_file)
1166 if not has_cache:
1167 return default_struct
1168
1169 data = None
1170 try:
1171 with open(cached_diff_file, 'rb') as f:
1172 data = pickle.load(f)
1173 log.debug('Loaded diff cache from %s', cached_diff_file)
1174 except Exception:
1175 log.warn('Failed to read diff cache file', exc_info=True)
1176
1177 if not data:
1178 data = default_struct
1179
1180 if not isinstance(data, dict):
1181 # old version of data ?
1182 data = default_struct
1183
1184 return data
1185
1186
1187 def generate_diff_cache_key(*args):
1188 """
1189 Helper to generate a cache key using arguments
1190 """
1191 def arg_mapper(input_param):
1192 input_param = safe_str(input_param)
1193 # we cannot allow '/' in arguments since it would allow
1194 # subdirectory usage
1195 input_param.replace('/', '_')
1196 return input_param or None # prevent empty string arguments
1197
1198 return '_'.join([
1199 '{}' for i in range(len(args))]).format(*map(arg_mapper, args))
1200
1201
1202 def diff_cache_exist(cache_storage, *args):
1203 """
1204 Based on all generated arguments check and return a cache path
1205 """
1206 cache_key = generate_diff_cache_key(*args)
1207 cache_file_path = os.path.join(cache_storage, cache_key)
1208 # prevent path traversal attacks using some param that have e.g '../../'
1209 if not os.path.abspath(cache_file_path).startswith(cache_storage):
1210 raise ValueError('Final path must be within {}'.format(cache_storage))
1211
1212 return cache_file_path
@@ -1,984 +1,993 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Some simple helper functions
24 24 """
25 25
26 26 import collections
27 27 import datetime
28 28 import dateutil.relativedelta
29 29 import hashlib
30 30 import logging
31 31 import re
32 32 import sys
33 33 import time
34 34 import urllib
35 35 import urlobject
36 36 import uuid
37 37 import getpass
38 38
39 39 import pygments.lexers
40 40 import sqlalchemy
41 41 import sqlalchemy.engine.url
42 42 import sqlalchemy.exc
43 43 import sqlalchemy.sql
44 44 import webob
45 45 import pyramid.threadlocal
46 46
47 47 import rhodecode
48 48 from rhodecode.translation import _, _pluralize
49 49
50 50
51 51 def md5(s):
52 52 return hashlib.md5(s).hexdigest()
53 53
54 54
55 55 def md5_safe(s):
56 56 return md5(safe_str(s))
57 57
58 58
59 59 def __get_lem(extra_mapping=None):
60 60 """
61 61 Get language extension map based on what's inside pygments lexers
62 62 """
63 63 d = collections.defaultdict(lambda: [])
64 64
65 65 def __clean(s):
66 66 s = s.lstrip('*')
67 67 s = s.lstrip('.')
68 68
69 69 if s.find('[') != -1:
70 70 exts = []
71 71 start, stop = s.find('['), s.find(']')
72 72
73 73 for suffix in s[start + 1:stop]:
74 74 exts.append(s[:s.find('[')] + suffix)
75 75 return [e.lower() for e in exts]
76 76 else:
77 77 return [s.lower()]
78 78
79 79 for lx, t in sorted(pygments.lexers.LEXERS.items()):
80 80 m = map(__clean, t[-2])
81 81 if m:
82 82 m = reduce(lambda x, y: x + y, m)
83 83 for ext in m:
84 84 desc = lx.replace('Lexer', '')
85 85 d[ext].append(desc)
86 86
87 87 data = dict(d)
88 88
89 89 extra_mapping = extra_mapping or {}
90 90 if extra_mapping:
91 91 for k, v in extra_mapping.items():
92 92 if k not in data:
93 93 # register new mapping2lexer
94 94 data[k] = [v]
95 95
96 96 return data
97 97
98 98
99 99 def str2bool(_str):
100 100 """
101 101 returns True/False value from given string, it tries to translate the
102 102 string into boolean
103 103
104 104 :param _str: string value to translate into boolean
105 105 :rtype: boolean
106 106 :returns: boolean from given string
107 107 """
108 108 if _str is None:
109 109 return False
110 110 if _str in (True, False):
111 111 return _str
112 112 _str = str(_str).strip().lower()
113 113 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
114 114
115 115
116 116 def aslist(obj, sep=None, strip=True):
117 117 """
118 118 Returns given string separated by sep as list
119 119
120 120 :param obj:
121 121 :param sep:
122 122 :param strip:
123 123 """
124 124 if isinstance(obj, (basestring,)):
125 125 lst = obj.split(sep)
126 126 if strip:
127 127 lst = [v.strip() for v in lst]
128 128 return lst
129 129 elif isinstance(obj, (list, tuple)):
130 130 return obj
131 131 elif obj is None:
132 132 return []
133 133 else:
134 134 return [obj]
135 135
136 136
137 137 def convert_line_endings(line, mode):
138 138 """
139 139 Converts a given line "line end" accordingly to given mode
140 140
141 141 Available modes are::
142 142 0 - Unix
143 143 1 - Mac
144 144 2 - DOS
145 145
146 146 :param line: given line to convert
147 147 :param mode: mode to convert to
148 148 :rtype: str
149 149 :return: converted line according to mode
150 150 """
151 151 if mode == 0:
152 152 line = line.replace('\r\n', '\n')
153 153 line = line.replace('\r', '\n')
154 154 elif mode == 1:
155 155 line = line.replace('\r\n', '\r')
156 156 line = line.replace('\n', '\r')
157 157 elif mode == 2:
158 158 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
159 159 return line
160 160
161 161
162 162 def detect_mode(line, default):
163 163 """
164 164 Detects line break for given line, if line break couldn't be found
165 165 given default value is returned
166 166
167 167 :param line: str line
168 168 :param default: default
169 169 :rtype: int
170 170 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
171 171 """
172 172 if line.endswith('\r\n'):
173 173 return 2
174 174 elif line.endswith('\n'):
175 175 return 0
176 176 elif line.endswith('\r'):
177 177 return 1
178 178 else:
179 179 return default
180 180
181 181
182 182 def safe_int(val, default=None):
183 183 """
184 184 Returns int() of val if val is not convertable to int use default
185 185 instead
186 186
187 187 :param val:
188 188 :param default:
189 189 """
190 190
191 191 try:
192 192 val = int(val)
193 193 except (ValueError, TypeError):
194 194 val = default
195 195
196 196 return val
197 197
198 198
199 199 def safe_unicode(str_, from_encoding=None):
200 200 """
201 201 safe unicode function. Does few trick to turn str_ into unicode
202 202
203 203 In case of UnicodeDecode error, we try to return it with encoding detected
204 204 by chardet library if it fails fallback to unicode with errors replaced
205 205
206 206 :param str_: string to decode
207 207 :rtype: unicode
208 208 :returns: unicode object
209 209 """
210 210 if isinstance(str_, unicode):
211 211 return str_
212 212
213 213 if not from_encoding:
214 214 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
215 215 'utf8'), sep=',')
216 216 from_encoding = DEFAULT_ENCODINGS
217 217
218 218 if not isinstance(from_encoding, (list, tuple)):
219 219 from_encoding = [from_encoding]
220 220
221 221 try:
222 222 return unicode(str_)
223 223 except UnicodeDecodeError:
224 224 pass
225 225
226 226 for enc in from_encoding:
227 227 try:
228 228 return unicode(str_, enc)
229 229 except UnicodeDecodeError:
230 230 pass
231 231
232 232 try:
233 233 import chardet
234 234 encoding = chardet.detect(str_)['encoding']
235 235 if encoding is None:
236 236 raise Exception()
237 237 return str_.decode(encoding)
238 238 except (ImportError, UnicodeDecodeError, Exception):
239 239 return unicode(str_, from_encoding[0], 'replace')
240 240
241 241
242 242 def safe_str(unicode_, to_encoding=None):
243 243 """
244 244 safe str function. Does few trick to turn unicode_ into string
245 245
246 246 In case of UnicodeEncodeError, we try to return it with encoding detected
247 247 by chardet library if it fails fallback to string with errors replaced
248 248
249 249 :param unicode_: unicode to encode
250 250 :rtype: str
251 251 :returns: str object
252 252 """
253 253
254 254 # if it's not basestr cast to str
255 255 if not isinstance(unicode_, basestring):
256 256 return str(unicode_)
257 257
258 258 if isinstance(unicode_, str):
259 259 return unicode_
260 260
261 261 if not to_encoding:
262 262 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
263 263 'utf8'), sep=',')
264 264 to_encoding = DEFAULT_ENCODINGS
265 265
266 266 if not isinstance(to_encoding, (list, tuple)):
267 267 to_encoding = [to_encoding]
268 268
269 269 for enc in to_encoding:
270 270 try:
271 271 return unicode_.encode(enc)
272 272 except UnicodeEncodeError:
273 273 pass
274 274
275 275 try:
276 276 import chardet
277 277 encoding = chardet.detect(unicode_)['encoding']
278 278 if encoding is None:
279 279 raise UnicodeEncodeError()
280 280
281 281 return unicode_.encode(encoding)
282 282 except (ImportError, UnicodeEncodeError):
283 283 return unicode_.encode(to_encoding[0], 'replace')
284 284
285 285
286 286 def remove_suffix(s, suffix):
287 287 if s.endswith(suffix):
288 288 s = s[:-1 * len(suffix)]
289 289 return s
290 290
291 291
292 292 def remove_prefix(s, prefix):
293 293 if s.startswith(prefix):
294 294 s = s[len(prefix):]
295 295 return s
296 296
297 297
298 298 def find_calling_context(ignore_modules=None):
299 299 """
300 300 Look through the calling stack and return the frame which called
301 301 this function and is part of core module ( ie. rhodecode.* )
302 302
303 303 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
304 304 """
305 305
306 306 ignore_modules = ignore_modules or []
307 307
308 308 f = sys._getframe(2)
309 309 while f.f_back is not None:
310 310 name = f.f_globals.get('__name__')
311 311 if name and name.startswith(__name__.split('.')[0]):
312 312 if name not in ignore_modules:
313 313 return f
314 314 f = f.f_back
315 315 return None
316 316
317 317
318 318 def ping_connection(connection, branch):
319 319 if branch:
320 320 # "branch" refers to a sub-connection of a connection,
321 321 # we don't want to bother pinging on these.
322 322 return
323 323
324 324 # turn off "close with result". This flag is only used with
325 325 # "connectionless" execution, otherwise will be False in any case
326 326 save_should_close_with_result = connection.should_close_with_result
327 327 connection.should_close_with_result = False
328 328
329 329 try:
330 330 # run a SELECT 1. use a core select() so that
331 331 # the SELECT of a scalar value without a table is
332 332 # appropriately formatted for the backend
333 333 connection.scalar(sqlalchemy.sql.select([1]))
334 334 except sqlalchemy.exc.DBAPIError as err:
335 335 # catch SQLAlchemy's DBAPIError, which is a wrapper
336 336 # for the DBAPI's exception. It includes a .connection_invalidated
337 337 # attribute which specifies if this connection is a "disconnect"
338 338 # condition, which is based on inspection of the original exception
339 339 # by the dialect in use.
340 340 if err.connection_invalidated:
341 341 # run the same SELECT again - the connection will re-validate
342 342 # itself and establish a new connection. The disconnect detection
343 343 # here also causes the whole connection pool to be invalidated
344 344 # so that all stale connections are discarded.
345 345 connection.scalar(sqlalchemy.sql.select([1]))
346 346 else:
347 347 raise
348 348 finally:
349 349 # restore "close with result"
350 350 connection.should_close_with_result = save_should_close_with_result
351 351
352 352
353 353 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
354 354 """Custom engine_from_config functions."""
355 355 log = logging.getLogger('sqlalchemy.engine')
356 356 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
357 357
358 358 def color_sql(sql):
359 359 color_seq = '\033[1;33m' # This is yellow: code 33
360 360 normal = '\x1b[0m'
361 361 return ''.join([color_seq, sql, normal])
362 362
363 363 if configuration['debug']:
364 364 # attach events only for debug configuration
365 365
366 366 def before_cursor_execute(conn, cursor, statement,
367 367 parameters, context, executemany):
368 368 setattr(conn, 'query_start_time', time.time())
369 369 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
370 370 calling_context = find_calling_context(ignore_modules=[
371 371 'rhodecode.lib.caching_query',
372 372 'rhodecode.model.settings',
373 373 ])
374 374 if calling_context:
375 375 log.info(color_sql('call context %s:%s' % (
376 376 calling_context.f_code.co_filename,
377 377 calling_context.f_lineno,
378 378 )))
379 379
380 380 def after_cursor_execute(conn, cursor, statement,
381 381 parameters, context, executemany):
382 382 delattr(conn, 'query_start_time')
383 383
384 384 sqlalchemy.event.listen(engine, "engine_connect",
385 385 ping_connection)
386 386 sqlalchemy.event.listen(engine, "before_cursor_execute",
387 387 before_cursor_execute)
388 388 sqlalchemy.event.listen(engine, "after_cursor_execute",
389 389 after_cursor_execute)
390 390
391 391 return engine
392 392
393 393
394 394 def get_encryption_key(config):
395 395 secret = config.get('rhodecode.encrypted_values.secret')
396 396 default = config['beaker.session.secret']
397 397 return secret or default
398 398
399 399
400 400 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
401 401 short_format=False):
402 402 """
403 403 Turns a datetime into an age string.
404 404 If show_short_version is True, this generates a shorter string with
405 405 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
406 406
407 407 * IMPORTANT*
408 408 Code of this function is written in special way so it's easier to
409 409 backport it to javascript. If you mean to update it, please also update
410 410 `jquery.timeago-extension.js` file
411 411
412 412 :param prevdate: datetime object
413 413 :param now: get current time, if not define we use
414 414 `datetime.datetime.now()`
415 415 :param show_short_version: if it should approximate the date and
416 416 return a shorter string
417 417 :param show_suffix:
418 418 :param short_format: show short format, eg 2D instead of 2 days
419 419 :rtype: unicode
420 420 :returns: unicode words describing age
421 421 """
422 422
423 423 def _get_relative_delta(now, prevdate):
424 424 base = dateutil.relativedelta.relativedelta(now, prevdate)
425 425 return {
426 426 'year': base.years,
427 427 'month': base.months,
428 428 'day': base.days,
429 429 'hour': base.hours,
430 430 'minute': base.minutes,
431 431 'second': base.seconds,
432 432 }
433 433
434 434 def _is_leap_year(year):
435 435 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
436 436
437 437 def get_month(prevdate):
438 438 return prevdate.month
439 439
440 440 def get_year(prevdate):
441 441 return prevdate.year
442 442
443 443 now = now or datetime.datetime.now()
444 444 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
445 445 deltas = {}
446 446 future = False
447 447
448 448 if prevdate > now:
449 449 now_old = now
450 450 now = prevdate
451 451 prevdate = now_old
452 452 future = True
453 453 if future:
454 454 prevdate = prevdate.replace(microsecond=0)
455 455 # Get date parts deltas
456 456 for part in order:
457 457 rel_delta = _get_relative_delta(now, prevdate)
458 458 deltas[part] = rel_delta[part]
459 459
460 460 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
461 461 # not 1 hour, -59 minutes and -59 seconds)
462 462 offsets = [[5, 60], [4, 60], [3, 24]]
463 463 for element in offsets: # seconds, minutes, hours
464 464 num = element[0]
465 465 length = element[1]
466 466
467 467 part = order[num]
468 468 carry_part = order[num - 1]
469 469
470 470 if deltas[part] < 0:
471 471 deltas[part] += length
472 472 deltas[carry_part] -= 1
473 473
474 474 # Same thing for days except that the increment depends on the (variable)
475 475 # number of days in the month
476 476 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
477 477 if deltas['day'] < 0:
478 478 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
479 479 deltas['day'] += 29
480 480 else:
481 481 deltas['day'] += month_lengths[get_month(prevdate) - 1]
482 482
483 483 deltas['month'] -= 1
484 484
485 485 if deltas['month'] < 0:
486 486 deltas['month'] += 12
487 487 deltas['year'] -= 1
488 488
489 489 # Format the result
490 490 if short_format:
491 491 fmt_funcs = {
492 492 'year': lambda d: u'%dy' % d,
493 493 'month': lambda d: u'%dm' % d,
494 494 'day': lambda d: u'%dd' % d,
495 495 'hour': lambda d: u'%dh' % d,
496 496 'minute': lambda d: u'%dmin' % d,
497 497 'second': lambda d: u'%dsec' % d,
498 498 }
499 499 else:
500 500 fmt_funcs = {
501 501 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
502 502 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
503 503 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
504 504 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
505 505 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
506 506 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
507 507 }
508 508
509 509 i = 0
510 510 for part in order:
511 511 value = deltas[part]
512 512 if value != 0:
513 513
514 514 if i < 5:
515 515 sub_part = order[i + 1]
516 516 sub_value = deltas[sub_part]
517 517 else:
518 518 sub_value = 0
519 519
520 520 if sub_value == 0 or show_short_version:
521 521 _val = fmt_funcs[part](value)
522 522 if future:
523 523 if show_suffix:
524 524 return _(u'in ${ago}', mapping={'ago': _val})
525 525 else:
526 526 return _(_val)
527 527
528 528 else:
529 529 if show_suffix:
530 530 return _(u'${ago} ago', mapping={'ago': _val})
531 531 else:
532 532 return _(_val)
533 533
534 534 val = fmt_funcs[part](value)
535 535 val_detail = fmt_funcs[sub_part](sub_value)
536 536 mapping = {'val': val, 'detail': val_detail}
537 537
538 538 if short_format:
539 539 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
540 540 if show_suffix:
541 541 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
542 542 if future:
543 543 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
544 544 else:
545 545 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
546 546 if show_suffix:
547 547 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
548 548 if future:
549 549 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
550 550
551 551 return datetime_tmpl
552 552 i += 1
553 553 return _(u'just now')
554 554
555 555
556 556 def cleaned_uri(uri):
557 557 """
558 558 Quotes '[' and ']' from uri if there is only one of them.
559 559 according to RFC3986 we cannot use such chars in uri
560 560 :param uri:
561 561 :return: uri without this chars
562 562 """
563 563 return urllib.quote(uri, safe='@$:/')
564 564
565 565
566 566 def uri_filter(uri):
567 567 """
568 568 Removes user:password from given url string
569 569
570 570 :param uri:
571 571 :rtype: unicode
572 572 :returns: filtered list of strings
573 573 """
574 574 if not uri:
575 575 return ''
576 576
577 577 proto = ''
578 578
579 579 for pat in ('https://', 'http://'):
580 580 if uri.startswith(pat):
581 581 uri = uri[len(pat):]
582 582 proto = pat
583 583 break
584 584
585 585 # remove passwords and username
586 586 uri = uri[uri.find('@') + 1:]
587 587
588 588 # get the port
589 589 cred_pos = uri.find(':')
590 590 if cred_pos == -1:
591 591 host, port = uri, None
592 592 else:
593 593 host, port = uri[:cred_pos], uri[cred_pos + 1:]
594 594
595 595 return filter(None, [proto, host, port])
596 596
597 597
598 598 def credentials_filter(uri):
599 599 """
600 600 Returns a url with removed credentials
601 601
602 602 :param uri:
603 603 """
604 604
605 605 uri = uri_filter(uri)
606 606 # check if we have port
607 607 if len(uri) > 2 and uri[2]:
608 608 uri[2] = ':' + uri[2]
609 609
610 610 return ''.join(uri)
611 611
612 612
613 613 def get_clone_url(request, uri_tmpl, repo_name, repo_id, **override):
614 614 qualifed_home_url = request.route_url('home')
615 615 parsed_url = urlobject.URLObject(qualifed_home_url)
616 616 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
617 617
618 618 args = {
619 619 'scheme': parsed_url.scheme,
620 620 'user': '',
621 621 'sys_user': getpass.getuser(),
622 622 # path if we use proxy-prefix
623 623 'netloc': parsed_url.netloc+decoded_path,
624 624 'hostname': parsed_url.hostname,
625 625 'prefix': decoded_path,
626 626 'repo': repo_name,
627 627 'repoid': str(repo_id)
628 628 }
629 629 args.update(override)
630 630 args['user'] = urllib.quote(safe_str(args['user']))
631 631
632 632 for k, v in args.items():
633 633 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
634 634
635 635 # remove leading @ sign if it's present. Case of empty user
636 636 url_obj = urlobject.URLObject(uri_tmpl)
637 637 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
638 638
639 639 return safe_unicode(url)
640 640
641 641
642 642 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None):
643 643 """
644 644 Safe version of get_commit if this commit doesn't exists for a
645 645 repository it returns a Dummy one instead
646 646
647 647 :param repo: repository instance
648 648 :param commit_id: commit id as str
649 649 :param pre_load: optional list of commit attributes to load
650 650 """
651 651 # TODO(skreft): remove these circular imports
652 652 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
653 653 from rhodecode.lib.vcs.exceptions import RepositoryError
654 654 if not isinstance(repo, BaseRepository):
655 655 raise Exception('You must pass an Repository '
656 656 'object as first argument got %s', type(repo))
657 657
658 658 try:
659 659 commit = repo.get_commit(
660 660 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
661 661 except (RepositoryError, LookupError):
662 662 commit = EmptyCommit()
663 663 return commit
664 664
665 665
666 666 def datetime_to_time(dt):
667 667 if dt:
668 668 return time.mktime(dt.timetuple())
669 669
670 670
671 671 def time_to_datetime(tm):
672 672 if tm:
673 673 if isinstance(tm, basestring):
674 674 try:
675 675 tm = float(tm)
676 676 except ValueError:
677 677 return
678 678 return datetime.datetime.fromtimestamp(tm)
679 679
680 680
681 681 def time_to_utcdatetime(tm):
682 682 if tm:
683 683 if isinstance(tm, basestring):
684 684 try:
685 685 tm = float(tm)
686 686 except ValueError:
687 687 return
688 688 return datetime.datetime.utcfromtimestamp(tm)
689 689
690 690
691 691 MENTIONS_REGEX = re.compile(
692 692 # ^@ or @ without any special chars in front
693 693 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
694 694 # main body starts with letter, then can be . - _
695 695 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
696 696 re.VERBOSE | re.MULTILINE)
697 697
698 698
699 699 def extract_mentioned_users(s):
700 700 """
701 701 Returns unique usernames from given string s that have @mention
702 702
703 703 :param s: string to get mentions
704 704 """
705 705 usrs = set()
706 706 for username in MENTIONS_REGEX.findall(s):
707 707 usrs.add(username)
708 708
709 709 return sorted(list(usrs), key=lambda k: k.lower())
710 710
711 711
712 class StrictAttributeDict(dict):
712 class AttributeDictBase(dict):
713 def __getstate__(self):
714 odict = self.__dict__ # get attribute dictionary
715 return odict
716
717 def __setstate__(self, dict):
718 self.__dict__ = dict
719
720 __setattr__ = dict.__setitem__
721 __delattr__ = dict.__delitem__
722
723
724 class StrictAttributeDict(AttributeDictBase):
713 725 """
714 726 Strict Version of Attribute dict which raises an Attribute error when
715 727 requested attribute is not set
716 728 """
717 729 def __getattr__(self, attr):
718 730 try:
719 731 return self[attr]
720 732 except KeyError:
721 733 raise AttributeError('%s object has no attribute %s' % (
722 734 self.__class__, attr))
723 __setattr__ = dict.__setitem__
724 __delattr__ = dict.__delitem__
725 735
726 736
727 class AttributeDict(dict):
737 class AttributeDict(AttributeDictBase):
728 738 def __getattr__(self, attr):
729 739 return self.get(attr, None)
730 __setattr__ = dict.__setitem__
731 __delattr__ = dict.__delitem__
740
732 741
733 742
734 743 def fix_PATH(os_=None):
735 744 """
736 745 Get current active python path, and append it to PATH variable to fix
737 746 issues of subprocess calls and different python versions
738 747 """
739 748 if os_ is None:
740 749 import os
741 750 else:
742 751 os = os_
743 752
744 753 cur_path = os.path.split(sys.executable)[0]
745 754 if not os.environ['PATH'].startswith(cur_path):
746 755 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
747 756
748 757
749 758 def obfuscate_url_pw(engine):
750 759 _url = engine or ''
751 760 try:
752 761 _url = sqlalchemy.engine.url.make_url(engine)
753 762 if _url.password:
754 763 _url.password = 'XXXXX'
755 764 except Exception:
756 765 pass
757 766 return unicode(_url)
758 767
759 768
760 769 def get_server_url(environ):
761 770 req = webob.Request(environ)
762 771 return req.host_url + req.script_name
763 772
764 773
765 774 def unique_id(hexlen=32):
766 775 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
767 776 return suuid(truncate_to=hexlen, alphabet=alphabet)
768 777
769 778
770 779 def suuid(url=None, truncate_to=22, alphabet=None):
771 780 """
772 781 Generate and return a short URL safe UUID.
773 782
774 783 If the url parameter is provided, set the namespace to the provided
775 784 URL and generate a UUID.
776 785
777 786 :param url to get the uuid for
778 787 :truncate_to: truncate the basic 22 UUID to shorter version
779 788
780 789 The IDs won't be universally unique any longer, but the probability of
781 790 a collision will still be very low.
782 791 """
783 792 # Define our alphabet.
784 793 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
785 794
786 795 # If no URL is given, generate a random UUID.
787 796 if url is None:
788 797 unique_id = uuid.uuid4().int
789 798 else:
790 799 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
791 800
792 801 alphabet_length = len(_ALPHABET)
793 802 output = []
794 803 while unique_id > 0:
795 804 digit = unique_id % alphabet_length
796 805 output.append(_ALPHABET[digit])
797 806 unique_id = int(unique_id / alphabet_length)
798 807 return "".join(output)[:truncate_to]
799 808
800 809
801 810 def get_current_rhodecode_user(request=None):
802 811 """
803 812 Gets rhodecode user from request
804 813 """
805 814 pyramid_request = request or pyramid.threadlocal.get_current_request()
806 815
807 816 # web case
808 817 if pyramid_request and hasattr(pyramid_request, 'user'):
809 818 return pyramid_request.user
810 819
811 820 # api case
812 821 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
813 822 return pyramid_request.rpc_user
814 823
815 824 return None
816 825
817 826
818 827 def action_logger_generic(action, namespace=''):
819 828 """
820 829 A generic logger for actions useful to the system overview, tries to find
821 830 an acting user for the context of the call otherwise reports unknown user
822 831
823 832 :param action: logging message eg 'comment 5 deleted'
824 833 :param type: string
825 834
826 835 :param namespace: namespace of the logging message eg. 'repo.comments'
827 836 :param type: string
828 837
829 838 """
830 839
831 840 logger_name = 'rhodecode.actions'
832 841
833 842 if namespace:
834 843 logger_name += '.' + namespace
835 844
836 845 log = logging.getLogger(logger_name)
837 846
838 847 # get a user if we can
839 848 user = get_current_rhodecode_user()
840 849
841 850 logfunc = log.info
842 851
843 852 if not user:
844 853 user = '<unknown user>'
845 854 logfunc = log.warning
846 855
847 856 logfunc('Logging action by {}: {}'.format(user, action))
848 857
849 858
850 859 def escape_split(text, sep=',', maxsplit=-1):
851 860 r"""
852 861 Allows for escaping of the separator: e.g. arg='foo\, bar'
853 862
854 863 It should be noted that the way bash et. al. do command line parsing, those
855 864 single quotes are required.
856 865 """
857 866 escaped_sep = r'\%s' % sep
858 867
859 868 if escaped_sep not in text:
860 869 return text.split(sep, maxsplit)
861 870
862 871 before, _mid, after = text.partition(escaped_sep)
863 872 startlist = before.split(sep, maxsplit) # a regular split is fine here
864 873 unfinished = startlist[-1]
865 874 startlist = startlist[:-1]
866 875
867 876 # recurse because there may be more escaped separators
868 877 endlist = escape_split(after, sep, maxsplit)
869 878
870 879 # finish building the escaped value. we use endlist[0] becaue the first
871 880 # part of the string sent in recursion is the rest of the escaped value.
872 881 unfinished += sep + endlist[0]
873 882
874 883 return startlist + [unfinished] + endlist[1:] # put together all the parts
875 884
876 885
877 886 class OptionalAttr(object):
878 887 """
879 888 Special Optional Option that defines other attribute. Example::
880 889
881 890 def test(apiuser, userid=Optional(OAttr('apiuser')):
882 891 user = Optional.extract(userid)
883 892 # calls
884 893
885 894 """
886 895
887 896 def __init__(self, attr_name):
888 897 self.attr_name = attr_name
889 898
890 899 def __repr__(self):
891 900 return '<OptionalAttr:%s>' % self.attr_name
892 901
893 902 def __call__(self):
894 903 return self
895 904
896 905
897 906 # alias
898 907 OAttr = OptionalAttr
899 908
900 909
901 910 class Optional(object):
902 911 """
903 912 Defines an optional parameter::
904 913
905 914 param = param.getval() if isinstance(param, Optional) else param
906 915 param = param() if isinstance(param, Optional) else param
907 916
908 917 is equivalent of::
909 918
910 919 param = Optional.extract(param)
911 920
912 921 """
913 922
914 923 def __init__(self, type_):
915 924 self.type_ = type_
916 925
917 926 def __repr__(self):
918 927 return '<Optional:%s>' % self.type_.__repr__()
919 928
920 929 def __call__(self):
921 930 return self.getval()
922 931
923 932 def getval(self):
924 933 """
925 934 returns value from this Optional instance
926 935 """
927 936 if isinstance(self.type_, OAttr):
928 937 # use params name
929 938 return self.type_.attr_name
930 939 return self.type_
931 940
932 941 @classmethod
933 942 def extract(cls, val):
934 943 """
935 944 Extracts value from Optional() instance
936 945
937 946 :param val:
938 947 :return: original value if it's not Optional instance else
939 948 value of instance
940 949 """
941 950 if isinstance(val, cls):
942 951 return val.getval()
943 952 return val
944 953
945 954
946 955 def glob2re(pat):
947 956 """
948 957 Translate a shell PATTERN to a regular expression.
949 958
950 959 There is no way to quote meta-characters.
951 960 """
952 961
953 962 i, n = 0, len(pat)
954 963 res = ''
955 964 while i < n:
956 965 c = pat[i]
957 966 i = i+1
958 967 if c == '*':
959 968 #res = res + '.*'
960 969 res = res + '[^/]*'
961 970 elif c == '?':
962 971 #res = res + '.'
963 972 res = res + '[^/]'
964 973 elif c == '[':
965 974 j = i
966 975 if j < n and pat[j] == '!':
967 976 j = j+1
968 977 if j < n and pat[j] == ']':
969 978 j = j+1
970 979 while j < n and pat[j] != ']':
971 980 j = j+1
972 981 if j >= n:
973 982 res = res + '\\['
974 983 else:
975 984 stuff = pat[i:j].replace('\\','\\\\')
976 985 i = j+1
977 986 if stuff[0] == '!':
978 987 stuff = '^' + stuff[1:]
979 988 elif stuff[0] == '^':
980 989 stuff = '\\' + stuff
981 990 res = '%s[%s]' % (res, stuff)
982 991 else:
983 992 res = res + re.escape(c)
984 993 return res + '\Z(?ms)'
@@ -1,1698 +1,1712 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24
25 25 import collections
26 26 import datetime
27 27 import fnmatch
28 28 import itertools
29 29 import logging
30 30 import os
31 31 import re
32 32 import time
33 33 import warnings
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from rhodecode.lib.utils2 import safe_str, safe_unicode
38 38 from rhodecode.lib.vcs import connection
39 39 from rhodecode.lib.vcs.utils import author_name, author_email
40 40 from rhodecode.lib.vcs.conf import settings
41 41 from rhodecode.lib.vcs.exceptions import (
42 42 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
43 43 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
44 44 NodeDoesNotExistError, NodeNotChangedError, VCSError,
45 45 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
46 46 RepositoryError)
47 47
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 FILEMODE_DEFAULT = 0100644
53 53 FILEMODE_EXECUTABLE = 0100755
54 54
55 55 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
56 56 MergeResponse = collections.namedtuple(
57 57 'MergeResponse',
58 58 ('possible', 'executed', 'merge_ref', 'failure_reason'))
59 59
60 60
61 61 class MergeFailureReason(object):
62 62 """
63 63 Enumeration with all the reasons why the server side merge could fail.
64 64
65 65 DO NOT change the number of the reasons, as they may be stored in the
66 66 database.
67 67
68 68 Changing the name of a reason is acceptable and encouraged to deprecate old
69 69 reasons.
70 70 """
71 71
72 72 # Everything went well.
73 73 NONE = 0
74 74
75 75 # An unexpected exception was raised. Check the logs for more details.
76 76 UNKNOWN = 1
77 77
78 78 # The merge was not successful, there are conflicts.
79 79 MERGE_FAILED = 2
80 80
81 81 # The merge succeeded but we could not push it to the target repository.
82 82 PUSH_FAILED = 3
83 83
84 84 # The specified target is not a head in the target repository.
85 85 TARGET_IS_NOT_HEAD = 4
86 86
87 87 # The source repository contains more branches than the target. Pushing
88 88 # the merge will create additional branches in the target.
89 89 HG_SOURCE_HAS_MORE_BRANCHES = 5
90 90
91 91 # The target reference has multiple heads. That does not allow to correctly
92 92 # identify the target location. This could only happen for mercurial
93 93 # branches.
94 94 HG_TARGET_HAS_MULTIPLE_HEADS = 6
95 95
96 96 # The target repository is locked
97 97 TARGET_IS_LOCKED = 7
98 98
99 99 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
100 100 # A involved commit could not be found.
101 101 _DEPRECATED_MISSING_COMMIT = 8
102 102
103 103 # The target repo reference is missing.
104 104 MISSING_TARGET_REF = 9
105 105
106 106 # The source repo reference is missing.
107 107 MISSING_SOURCE_REF = 10
108 108
109 109 # The merge was not successful, there are conflicts related to sub
110 110 # repositories.
111 111 SUBREPO_MERGE_FAILED = 11
112 112
113 113
114 114 class UpdateFailureReason(object):
115 115 """
116 116 Enumeration with all the reasons why the pull request update could fail.
117 117
118 118 DO NOT change the number of the reasons, as they may be stored in the
119 119 database.
120 120
121 121 Changing the name of a reason is acceptable and encouraged to deprecate old
122 122 reasons.
123 123 """
124 124
125 125 # Everything went well.
126 126 NONE = 0
127 127
128 128 # An unexpected exception was raised. Check the logs for more details.
129 129 UNKNOWN = 1
130 130
131 131 # The pull request is up to date.
132 132 NO_CHANGE = 2
133 133
134 134 # The pull request has a reference type that is not supported for update.
135 135 WRONG_REF_TYPE = 3
136 136
137 137 # Update failed because the target reference is missing.
138 138 MISSING_TARGET_REF = 4
139 139
140 140 # Update failed because the source reference is missing.
141 141 MISSING_SOURCE_REF = 5
142 142
143 143
144 144 class BaseRepository(object):
145 145 """
146 146 Base Repository for final backends
147 147
148 148 .. attribute:: DEFAULT_BRANCH_NAME
149 149
150 150 name of default branch (i.e. "trunk" for svn, "master" for git etc.
151 151
152 152 .. attribute:: commit_ids
153 153
154 154 list of all available commit ids, in ascending order
155 155
156 156 .. attribute:: path
157 157
158 158 absolute path to the repository
159 159
160 160 .. attribute:: bookmarks
161 161
162 162 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
163 163 there are no bookmarks or the backend implementation does not support
164 164 bookmarks.
165 165
166 166 .. attribute:: tags
167 167
168 168 Mapping from name to :term:`Commit ID` of the tag.
169 169
170 170 """
171 171
172 172 DEFAULT_BRANCH_NAME = None
173 173 DEFAULT_CONTACT = u"Unknown"
174 174 DEFAULT_DESCRIPTION = u"unknown"
175 175 EMPTY_COMMIT_ID = '0' * 40
176 176
177 177 path = None
178 178 _remote = None
179 179
180 180 def __init__(self, repo_path, config=None, create=False, **kwargs):
181 181 """
182 182 Initializes repository. Raises RepositoryError if repository could
183 183 not be find at the given ``repo_path`` or directory at ``repo_path``
184 184 exists and ``create`` is set to True.
185 185
186 186 :param repo_path: local path of the repository
187 187 :param config: repository configuration
188 188 :param create=False: if set to True, would try to create repository.
189 189 :param src_url=None: if set, should be proper url from which repository
190 190 would be cloned; requires ``create`` parameter to be set to True -
191 191 raises RepositoryError if src_url is set and create evaluates to
192 192 False
193 193 """
194 194 raise NotImplementedError
195 195
196 196 def __repr__(self):
197 197 return '<%s at %s>' % (self.__class__.__name__, self.path)
198 198
199 199 def __len__(self):
200 200 return self.count()
201 201
202 202 def __eq__(self, other):
203 203 same_instance = isinstance(other, self.__class__)
204 204 return same_instance and other.path == self.path
205 205
206 206 def __ne__(self, other):
207 207 return not self.__eq__(other)
208 208
209 def get_create_shadow_cache_pr_path(self, repo):
210 path = os.path.join(
211 os.path.dirname(self.path),
212 '.__shadow_diff_cache_repo_{}/'.format(repo.repo_id))
213 if not os.path.exists(path):
214 os.makedirs(path, 0755)
215 return path
216
209 217 @classmethod
210 218 def get_default_config(cls, default=None):
211 219 config = Config()
212 220 if default and isinstance(default, list):
213 221 for section, key, val in default:
214 222 config.set(section, key, val)
215 223 return config
216 224
217 225 @LazyProperty
218 226 def EMPTY_COMMIT(self):
219 227 return EmptyCommit(self.EMPTY_COMMIT_ID)
220 228
221 229 @LazyProperty
222 230 def alias(self):
223 231 for k, v in settings.BACKENDS.items():
224 232 if v.split('.')[-1] == str(self.__class__.__name__):
225 233 return k
226 234
227 235 @LazyProperty
228 236 def name(self):
229 237 return safe_unicode(os.path.basename(self.path))
230 238
231 239 @LazyProperty
232 240 def description(self):
233 241 raise NotImplementedError
234 242
235 243 def refs(self):
236 244 """
237 245 returns a `dict` with branches, bookmarks, tags, and closed_branches
238 246 for this repository
239 247 """
240 248 return dict(
241 249 branches=self.branches,
242 250 branches_closed=self.branches_closed,
243 251 tags=self.tags,
244 252 bookmarks=self.bookmarks
245 253 )
246 254
247 255 @LazyProperty
248 256 def branches(self):
249 257 """
250 258 A `dict` which maps branch names to commit ids.
251 259 """
252 260 raise NotImplementedError
253 261
254 262 @LazyProperty
255 263 def branches_closed(self):
256 264 """
257 265 A `dict` which maps tags names to commit ids.
258 266 """
259 267 raise NotImplementedError
260 268
261 269 @LazyProperty
262 270 def bookmarks(self):
263 271 """
264 272 A `dict` which maps tags names to commit ids.
265 273 """
266 274 raise NotImplementedError
267 275
268 276 @LazyProperty
269 277 def tags(self):
270 278 """
271 279 A `dict` which maps tags names to commit ids.
272 280 """
273 281 raise NotImplementedError
274 282
275 283 @LazyProperty
276 284 def size(self):
277 285 """
278 286 Returns combined size in bytes for all repository files
279 287 """
280 288 tip = self.get_commit()
281 289 return tip.size
282 290
283 291 def size_at_commit(self, commit_id):
284 292 commit = self.get_commit(commit_id)
285 293 return commit.size
286 294
287 295 def is_empty(self):
288 296 return not bool(self.commit_ids)
289 297
290 298 @staticmethod
291 299 def check_url(url, config):
292 300 """
293 301 Function will check given url and try to verify if it's a valid
294 302 link.
295 303 """
296 304 raise NotImplementedError
297 305
298 306 @staticmethod
299 307 def is_valid_repository(path):
300 308 """
301 309 Check if given `path` contains a valid repository of this backend
302 310 """
303 311 raise NotImplementedError
304 312
305 313 # ==========================================================================
306 314 # COMMITS
307 315 # ==========================================================================
308 316
309 317 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
310 318 """
311 319 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
312 320 are both None, most recent commit is returned.
313 321
314 322 :param pre_load: Optional. List of commit attributes to load.
315 323
316 324 :raises ``EmptyRepositoryError``: if there are no commits
317 325 """
318 326 raise NotImplementedError
319 327
320 328 def __iter__(self):
321 329 for commit_id in self.commit_ids:
322 330 yield self.get_commit(commit_id=commit_id)
323 331
324 332 def get_commits(
325 333 self, start_id=None, end_id=None, start_date=None, end_date=None,
326 334 branch_name=None, show_hidden=False, pre_load=None):
327 335 """
328 336 Returns iterator of `BaseCommit` objects from start to end
329 337 not inclusive. This should behave just like a list, ie. end is not
330 338 inclusive.
331 339
332 340 :param start_id: None or str, must be a valid commit id
333 341 :param end_id: None or str, must be a valid commit id
334 342 :param start_date:
335 343 :param end_date:
336 344 :param branch_name:
337 345 :param show_hidden:
338 346 :param pre_load:
339 347 """
340 348 raise NotImplementedError
341 349
342 350 def __getitem__(self, key):
343 351 """
344 352 Allows index based access to the commit objects of this repository.
345 353 """
346 354 pre_load = ["author", "branch", "date", "message", "parents"]
347 355 if isinstance(key, slice):
348 356 return self._get_range(key, pre_load)
349 357 return self.get_commit(commit_idx=key, pre_load=pre_load)
350 358
351 359 def _get_range(self, slice_obj, pre_load):
352 360 for commit_id in self.commit_ids.__getitem__(slice_obj):
353 361 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
354 362
355 363 def count(self):
356 364 return len(self.commit_ids)
357 365
358 366 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
359 367 """
360 368 Creates and returns a tag for the given ``commit_id``.
361 369
362 370 :param name: name for new tag
363 371 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
364 372 :param commit_id: commit id for which new tag would be created
365 373 :param message: message of the tag's commit
366 374 :param date: date of tag's commit
367 375
368 376 :raises TagAlreadyExistError: if tag with same name already exists
369 377 """
370 378 raise NotImplementedError
371 379
372 380 def remove_tag(self, name, user, message=None, date=None):
373 381 """
374 382 Removes tag with the given ``name``.
375 383
376 384 :param name: name of the tag to be removed
377 385 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
378 386 :param message: message of the tag's removal commit
379 387 :param date: date of tag's removal commit
380 388
381 389 :raises TagDoesNotExistError: if tag with given name does not exists
382 390 """
383 391 raise NotImplementedError
384 392
385 393 def get_diff(
386 394 self, commit1, commit2, path=None, ignore_whitespace=False,
387 395 context=3, path1=None):
388 396 """
389 397 Returns (git like) *diff*, as plain text. Shows changes introduced by
390 398 `commit2` since `commit1`.
391 399
392 400 :param commit1: Entry point from which diff is shown. Can be
393 401 ``self.EMPTY_COMMIT`` - in this case, patch showing all
394 402 the changes since empty state of the repository until `commit2`
395 403 :param commit2: Until which commit changes should be shown.
396 404 :param path: Can be set to a path of a file to create a diff of that
397 405 file. If `path1` is also set, this value is only associated to
398 406 `commit2`.
399 407 :param ignore_whitespace: If set to ``True``, would not show whitespace
400 408 changes. Defaults to ``False``.
401 409 :param context: How many lines before/after changed lines should be
402 410 shown. Defaults to ``3``.
403 411 :param path1: Can be set to a path to associate with `commit1`. This
404 412 parameter works only for backends which support diff generation for
405 413 different paths. Other backends will raise a `ValueError` if `path1`
406 414 is set and has a different value than `path`.
407 415 :param file_path: filter this diff by given path pattern
408 416 """
409 417 raise NotImplementedError
410 418
411 419 def strip(self, commit_id, branch=None):
412 420 """
413 421 Strip given commit_id from the repository
414 422 """
415 423 raise NotImplementedError
416 424
417 425 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
418 426 """
419 427 Return a latest common ancestor commit if one exists for this repo
420 428 `commit_id1` vs `commit_id2` from `repo2`.
421 429
422 430 :param commit_id1: Commit it from this repository to use as a
423 431 target for the comparison.
424 432 :param commit_id2: Source commit id to use for comparison.
425 433 :param repo2: Source repository to use for comparison.
426 434 """
427 435 raise NotImplementedError
428 436
429 437 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
430 438 """
431 439 Compare this repository's revision `commit_id1` with `commit_id2`.
432 440
433 441 Returns a tuple(commits, ancestor) that would be merged from
434 442 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
435 443 will be returned as ancestor.
436 444
437 445 :param commit_id1: Commit it from this repository to use as a
438 446 target for the comparison.
439 447 :param commit_id2: Source commit id to use for comparison.
440 448 :param repo2: Source repository to use for comparison.
441 449 :param merge: If set to ``True`` will do a merge compare which also
442 450 returns the common ancestor.
443 451 :param pre_load: Optional. List of commit attributes to load.
444 452 """
445 453 raise NotImplementedError
446 454
447 455 def merge(self, target_ref, source_repo, source_ref, workspace_id,
448 456 user_name='', user_email='', message='', dry_run=False,
449 457 use_rebase=False, close_branch=False):
450 458 """
451 459 Merge the revisions specified in `source_ref` from `source_repo`
452 460 onto the `target_ref` of this repository.
453 461
454 462 `source_ref` and `target_ref` are named tupls with the following
455 463 fields `type`, `name` and `commit_id`.
456 464
457 465 Returns a MergeResponse named tuple with the following fields
458 466 'possible', 'executed', 'source_commit', 'target_commit',
459 467 'merge_commit'.
460 468
461 469 :param target_ref: `target_ref` points to the commit on top of which
462 470 the `source_ref` should be merged.
463 471 :param source_repo: The repository that contains the commits to be
464 472 merged.
465 473 :param source_ref: `source_ref` points to the topmost commit from
466 474 the `source_repo` which should be merged.
467 475 :param workspace_id: `workspace_id` unique identifier.
468 476 :param user_name: Merge commit `user_name`.
469 477 :param user_email: Merge commit `user_email`.
470 478 :param message: Merge commit `message`.
471 479 :param dry_run: If `True` the merge will not take place.
472 480 :param use_rebase: If `True` commits from the source will be rebased
473 481 on top of the target instead of being merged.
474 482 :param close_branch: If `True` branch will be close before merging it
475 483 """
476 484 if dry_run:
477 485 message = message or 'dry_run_merge_message'
478 486 user_email = user_email or 'dry-run-merge@rhodecode.com'
479 487 user_name = user_name or 'Dry-Run User'
480 488 else:
481 489 if not user_name:
482 490 raise ValueError('user_name cannot be empty')
483 491 if not user_email:
484 492 raise ValueError('user_email cannot be empty')
485 493 if not message:
486 494 raise ValueError('message cannot be empty')
487 495
488 496 shadow_repository_path = self._maybe_prepare_merge_workspace(
489 497 workspace_id, target_ref, source_ref)
490 498
491 499 try:
492 500 return self._merge_repo(
493 501 shadow_repository_path, target_ref, source_repo,
494 502 source_ref, message, user_name, user_email, dry_run=dry_run,
495 503 use_rebase=use_rebase, close_branch=close_branch)
496 504 except RepositoryError:
497 505 log.exception(
498 506 'Unexpected failure when running merge, dry-run=%s',
499 507 dry_run)
500 508 return MergeResponse(
501 509 False, False, None, MergeFailureReason.UNKNOWN)
502 510
503 511 def _merge_repo(self, shadow_repository_path, target_ref,
504 512 source_repo, source_ref, merge_message,
505 513 merger_name, merger_email, dry_run=False,
506 514 use_rebase=False, close_branch=False):
507 515 """Internal implementation of merge."""
508 516 raise NotImplementedError
509 517
510 518 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
511 519 """
512 520 Create the merge workspace.
513 521
514 522 :param workspace_id: `workspace_id` unique identifier.
515 523 """
516 524 raise NotImplementedError
517 525
518 526 def cleanup_merge_workspace(self, workspace_id):
519 527 """
520 528 Remove merge workspace.
521 529
522 530 This function MUST not fail in case there is no workspace associated to
523 531 the given `workspace_id`.
524 532
525 533 :param workspace_id: `workspace_id` unique identifier.
526 534 """
527 535 raise NotImplementedError
528 536
529 537 # ========== #
530 538 # COMMIT API #
531 539 # ========== #
532 540
533 541 @LazyProperty
534 542 def in_memory_commit(self):
535 543 """
536 544 Returns :class:`InMemoryCommit` object for this repository.
537 545 """
538 546 raise NotImplementedError
539 547
540 548 # ======================== #
541 549 # UTILITIES FOR SUBCLASSES #
542 550 # ======================== #
543 551
544 552 def _validate_diff_commits(self, commit1, commit2):
545 553 """
546 554 Validates that the given commits are related to this repository.
547 555
548 556 Intended as a utility for sub classes to have a consistent validation
549 557 of input parameters in methods like :meth:`get_diff`.
550 558 """
551 559 self._validate_commit(commit1)
552 560 self._validate_commit(commit2)
553 561 if (isinstance(commit1, EmptyCommit) and
554 562 isinstance(commit2, EmptyCommit)):
555 563 raise ValueError("Cannot compare two empty commits")
556 564
557 565 def _validate_commit(self, commit):
558 566 if not isinstance(commit, BaseCommit):
559 567 raise TypeError(
560 568 "%s is not of type BaseCommit" % repr(commit))
561 569 if commit.repository != self and not isinstance(commit, EmptyCommit):
562 570 raise ValueError(
563 571 "Commit %s must be a valid commit from this repository %s, "
564 572 "related to this repository instead %s." %
565 573 (commit, self, commit.repository))
566 574
567 575 def _validate_commit_id(self, commit_id):
568 576 if not isinstance(commit_id, basestring):
569 577 raise TypeError("commit_id must be a string value")
570 578
571 579 def _validate_commit_idx(self, commit_idx):
572 580 if not isinstance(commit_idx, (int, long)):
573 581 raise TypeError("commit_idx must be a numeric value")
574 582
575 583 def _validate_branch_name(self, branch_name):
576 584 if branch_name and branch_name not in self.branches_all:
577 585 msg = ("Branch %s not found in %s" % (branch_name, self))
578 586 raise BranchDoesNotExistError(msg)
579 587
580 588 #
581 589 # Supporting deprecated API parts
582 590 # TODO: johbo: consider to move this into a mixin
583 591 #
584 592
585 593 @property
586 594 def EMPTY_CHANGESET(self):
587 595 warnings.warn(
588 596 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
589 597 return self.EMPTY_COMMIT_ID
590 598
591 599 @property
592 600 def revisions(self):
593 601 warnings.warn("Use commits attribute instead", DeprecationWarning)
594 602 return self.commit_ids
595 603
596 604 @revisions.setter
597 605 def revisions(self, value):
598 606 warnings.warn("Use commits attribute instead", DeprecationWarning)
599 607 self.commit_ids = value
600 608
601 609 def get_changeset(self, revision=None, pre_load=None):
602 610 warnings.warn("Use get_commit instead", DeprecationWarning)
603 611 commit_id = None
604 612 commit_idx = None
605 613 if isinstance(revision, basestring):
606 614 commit_id = revision
607 615 else:
608 616 commit_idx = revision
609 617 return self.get_commit(
610 618 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
611 619
612 620 def get_changesets(
613 621 self, start=None, end=None, start_date=None, end_date=None,
614 622 branch_name=None, pre_load=None):
615 623 warnings.warn("Use get_commits instead", DeprecationWarning)
616 624 start_id = self._revision_to_commit(start)
617 625 end_id = self._revision_to_commit(end)
618 626 return self.get_commits(
619 627 start_id=start_id, end_id=end_id, start_date=start_date,
620 628 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
621 629
622 630 def _revision_to_commit(self, revision):
623 631 """
624 632 Translates a revision to a commit_id
625 633
626 634 Helps to support the old changeset based API which allows to use
627 635 commit ids and commit indices interchangeable.
628 636 """
629 637 if revision is None:
630 638 return revision
631 639
632 640 if isinstance(revision, basestring):
633 641 commit_id = revision
634 642 else:
635 643 commit_id = self.commit_ids[revision]
636 644 return commit_id
637 645
638 646 @property
639 647 def in_memory_changeset(self):
640 648 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
641 649 return self.in_memory_commit
642 650
643 651 def get_path_permissions(self, username):
644 652 """
645 653 Returns a path permission checker or None if not supported
646 654
647 655 :param username: session user name
648 656 :return: an instance of BasePathPermissionChecker or None
649 657 """
650 658 return None
651 659
652 660 def install_hooks(self, force=False):
653 661 return self._remote.install_hooks(force)
654 662
655 663
656 664 class BaseCommit(object):
657 665 """
658 666 Each backend should implement it's commit representation.
659 667
660 668 **Attributes**
661 669
662 670 ``repository``
663 671 repository object within which commit exists
664 672
665 673 ``id``
666 674 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
667 675 just ``tip``.
668 676
669 677 ``raw_id``
670 678 raw commit representation (i.e. full 40 length sha for git
671 679 backend)
672 680
673 681 ``short_id``
674 682 shortened (if apply) version of ``raw_id``; it would be simple
675 683 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
676 684 as ``raw_id`` for subversion
677 685
678 686 ``idx``
679 687 commit index
680 688
681 689 ``files``
682 690 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
683 691
684 692 ``dirs``
685 693 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
686 694
687 695 ``nodes``
688 696 combined list of ``Node`` objects
689 697
690 698 ``author``
691 699 author of the commit, as unicode
692 700
693 701 ``message``
694 702 message of the commit, as unicode
695 703
696 704 ``parents``
697 705 list of parent commits
698 706
699 707 """
700 708
701 709 branch = None
702 710 """
703 711 Depending on the backend this should be set to the branch name of the
704 712 commit. Backends not supporting branches on commits should leave this
705 713 value as ``None``.
706 714 """
707 715
708 716 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
709 717 """
710 718 This template is used to generate a default prefix for repository archives
711 719 if no prefix has been specified.
712 720 """
713 721
714 722 def __str__(self):
715 723 return '<%s at %s:%s>' % (
716 724 self.__class__.__name__, self.idx, self.short_id)
717 725
718 726 def __repr__(self):
719 727 return self.__str__()
720 728
721 729 def __unicode__(self):
722 730 return u'%s:%s' % (self.idx, self.short_id)
723 731
724 732 def __eq__(self, other):
725 733 same_instance = isinstance(other, self.__class__)
726 734 return same_instance and self.raw_id == other.raw_id
727 735
728 736 def __json__(self):
729 737 parents = []
730 738 try:
731 739 for parent in self.parents:
732 740 parents.append({'raw_id': parent.raw_id})
733 741 except NotImplementedError:
734 742 # empty commit doesn't have parents implemented
735 743 pass
736 744
737 745 return {
738 746 'short_id': self.short_id,
739 747 'raw_id': self.raw_id,
740 748 'revision': self.idx,
741 749 'message': self.message,
742 750 'date': self.date,
743 751 'author': self.author,
744 752 'parents': parents,
745 753 'branch': self.branch
746 754 }
747 755
756 def __getstate__(self):
757 d = self.__dict__.copy()
758 d.pop('_remote', None)
759 d.pop('repository', None)
760 return d
761
748 762 def _get_refs(self):
749 763 return {
750 764 'branches': [self.branch] if self.branch else [],
751 765 'bookmarks': getattr(self, 'bookmarks', []),
752 766 'tags': self.tags
753 767 }
754 768
755 769 @LazyProperty
756 770 def last(self):
757 771 """
758 772 ``True`` if this is last commit in repository, ``False``
759 773 otherwise; trying to access this attribute while there is no
760 774 commits would raise `EmptyRepositoryError`
761 775 """
762 776 if self.repository is None:
763 777 raise CommitError("Cannot check if it's most recent commit")
764 778 return self.raw_id == self.repository.commit_ids[-1]
765 779
766 780 @LazyProperty
767 781 def parents(self):
768 782 """
769 783 Returns list of parent commits.
770 784 """
771 785 raise NotImplementedError
772 786
773 787 @property
774 788 def merge(self):
775 789 """
776 790 Returns boolean if commit is a merge.
777 791 """
778 792 return len(self.parents) > 1
779 793
780 794 @LazyProperty
781 795 def children(self):
782 796 """
783 797 Returns list of child commits.
784 798 """
785 799 raise NotImplementedError
786 800
787 801 @LazyProperty
788 802 def id(self):
789 803 """
790 804 Returns string identifying this commit.
791 805 """
792 806 raise NotImplementedError
793 807
794 808 @LazyProperty
795 809 def raw_id(self):
796 810 """
797 811 Returns raw string identifying this commit.
798 812 """
799 813 raise NotImplementedError
800 814
801 815 @LazyProperty
802 816 def short_id(self):
803 817 """
804 818 Returns shortened version of ``raw_id`` attribute, as string,
805 819 identifying this commit, useful for presentation to users.
806 820 """
807 821 raise NotImplementedError
808 822
809 823 @LazyProperty
810 824 def idx(self):
811 825 """
812 826 Returns integer identifying this commit.
813 827 """
814 828 raise NotImplementedError
815 829
816 830 @LazyProperty
817 831 def committer(self):
818 832 """
819 833 Returns committer for this commit
820 834 """
821 835 raise NotImplementedError
822 836
823 837 @LazyProperty
824 838 def committer_name(self):
825 839 """
826 840 Returns committer name for this commit
827 841 """
828 842
829 843 return author_name(self.committer)
830 844
831 845 @LazyProperty
832 846 def committer_email(self):
833 847 """
834 848 Returns committer email address for this commit
835 849 """
836 850
837 851 return author_email(self.committer)
838 852
839 853 @LazyProperty
840 854 def author(self):
841 855 """
842 856 Returns author for this commit
843 857 """
844 858
845 859 raise NotImplementedError
846 860
847 861 @LazyProperty
848 862 def author_name(self):
849 863 """
850 864 Returns author name for this commit
851 865 """
852 866
853 867 return author_name(self.author)
854 868
855 869 @LazyProperty
856 870 def author_email(self):
857 871 """
858 872 Returns author email address for this commit
859 873 """
860 874
861 875 return author_email(self.author)
862 876
863 877 def get_file_mode(self, path):
864 878 """
865 879 Returns stat mode of the file at `path`.
866 880 """
867 881 raise NotImplementedError
868 882
869 883 def is_link(self, path):
870 884 """
871 885 Returns ``True`` if given `path` is a symlink
872 886 """
873 887 raise NotImplementedError
874 888
875 889 def get_file_content(self, path):
876 890 """
877 891 Returns content of the file at the given `path`.
878 892 """
879 893 raise NotImplementedError
880 894
881 895 def get_file_size(self, path):
882 896 """
883 897 Returns size of the file at the given `path`.
884 898 """
885 899 raise NotImplementedError
886 900
887 901 def get_file_commit(self, path, pre_load=None):
888 902 """
889 903 Returns last commit of the file at the given `path`.
890 904
891 905 :param pre_load: Optional. List of commit attributes to load.
892 906 """
893 907 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
894 908 if not commits:
895 909 raise RepositoryError(
896 910 'Failed to fetch history for path {}. '
897 911 'Please check if such path exists in your repository'.format(
898 912 path))
899 913 return commits[0]
900 914
901 915 def get_file_history(self, path, limit=None, pre_load=None):
902 916 """
903 917 Returns history of file as reversed list of :class:`BaseCommit`
904 918 objects for which file at given `path` has been modified.
905 919
906 920 :param limit: Optional. Allows to limit the size of the returned
907 921 history. This is intended as a hint to the underlying backend, so
908 922 that it can apply optimizations depending on the limit.
909 923 :param pre_load: Optional. List of commit attributes to load.
910 924 """
911 925 raise NotImplementedError
912 926
913 927 def get_file_annotate(self, path, pre_load=None):
914 928 """
915 929 Returns a generator of four element tuples with
916 930 lineno, sha, commit lazy loader and line
917 931
918 932 :param pre_load: Optional. List of commit attributes to load.
919 933 """
920 934 raise NotImplementedError
921 935
922 936 def get_nodes(self, path):
923 937 """
924 938 Returns combined ``DirNode`` and ``FileNode`` objects list representing
925 939 state of commit at the given ``path``.
926 940
927 941 :raises ``CommitError``: if node at the given ``path`` is not
928 942 instance of ``DirNode``
929 943 """
930 944 raise NotImplementedError
931 945
932 946 def get_node(self, path):
933 947 """
934 948 Returns ``Node`` object from the given ``path``.
935 949
936 950 :raises ``NodeDoesNotExistError``: if there is no node at the given
937 951 ``path``
938 952 """
939 953 raise NotImplementedError
940 954
941 955 def get_largefile_node(self, path):
942 956 """
943 957 Returns the path to largefile from Mercurial/Git-lfs storage.
944 958 or None if it's not a largefile node
945 959 """
946 960 return None
947 961
948 962 def archive_repo(self, file_path, kind='tgz', subrepos=None,
949 963 prefix=None, write_metadata=False, mtime=None):
950 964 """
951 965 Creates an archive containing the contents of the repository.
952 966
953 967 :param file_path: path to the file which to create the archive.
954 968 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
955 969 :param prefix: name of root directory in archive.
956 970 Default is repository name and commit's short_id joined with dash:
957 971 ``"{repo_name}-{short_id}"``.
958 972 :param write_metadata: write a metadata file into archive.
959 973 :param mtime: custom modification time for archive creation, defaults
960 974 to time.time() if not given.
961 975
962 976 :raise VCSError: If prefix has a problem.
963 977 """
964 978 allowed_kinds = settings.ARCHIVE_SPECS.keys()
965 979 if kind not in allowed_kinds:
966 980 raise ImproperArchiveTypeError(
967 981 'Archive kind (%s) not supported use one of %s' %
968 982 (kind, allowed_kinds))
969 983
970 984 prefix = self._validate_archive_prefix(prefix)
971 985
972 986 mtime = mtime or time.mktime(self.date.timetuple())
973 987
974 988 file_info = []
975 989 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
976 990 for _r, _d, files in cur_rev.walk('/'):
977 991 for f in files:
978 992 f_path = os.path.join(prefix, f.path)
979 993 file_info.append(
980 994 (f_path, f.mode, f.is_link(), f.raw_bytes))
981 995
982 996 if write_metadata:
983 997 metadata = [
984 998 ('repo_name', self.repository.name),
985 999 ('rev', self.raw_id),
986 1000 ('create_time', mtime),
987 1001 ('branch', self.branch),
988 1002 ('tags', ','.join(self.tags)),
989 1003 ]
990 1004 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
991 1005 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
992 1006
993 1007 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
994 1008
995 1009 def _validate_archive_prefix(self, prefix):
996 1010 if prefix is None:
997 1011 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
998 1012 repo_name=safe_str(self.repository.name),
999 1013 short_id=self.short_id)
1000 1014 elif not isinstance(prefix, str):
1001 1015 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1002 1016 elif prefix.startswith('/'):
1003 1017 raise VCSError("Prefix cannot start with leading slash")
1004 1018 elif prefix.strip() == '':
1005 1019 raise VCSError("Prefix cannot be empty")
1006 1020 return prefix
1007 1021
1008 1022 @LazyProperty
1009 1023 def root(self):
1010 1024 """
1011 1025 Returns ``RootNode`` object for this commit.
1012 1026 """
1013 1027 return self.get_node('')
1014 1028
1015 1029 def next(self, branch=None):
1016 1030 """
1017 1031 Returns next commit from current, if branch is gives it will return
1018 1032 next commit belonging to this branch
1019 1033
1020 1034 :param branch: show commits within the given named branch
1021 1035 """
1022 1036 indexes = xrange(self.idx + 1, self.repository.count())
1023 1037 return self._find_next(indexes, branch)
1024 1038
1025 1039 def prev(self, branch=None):
1026 1040 """
1027 1041 Returns previous commit from current, if branch is gives it will
1028 1042 return previous commit belonging to this branch
1029 1043
1030 1044 :param branch: show commit within the given named branch
1031 1045 """
1032 1046 indexes = xrange(self.idx - 1, -1, -1)
1033 1047 return self._find_next(indexes, branch)
1034 1048
1035 1049 def _find_next(self, indexes, branch=None):
1036 1050 if branch and self.branch != branch:
1037 1051 raise VCSError('Branch option used on commit not belonging '
1038 1052 'to that branch')
1039 1053
1040 1054 for next_idx in indexes:
1041 1055 commit = self.repository.get_commit(commit_idx=next_idx)
1042 1056 if branch and branch != commit.branch:
1043 1057 continue
1044 1058 return commit
1045 1059 raise CommitDoesNotExistError
1046 1060
1047 1061 def diff(self, ignore_whitespace=True, context=3):
1048 1062 """
1049 1063 Returns a `Diff` object representing the change made by this commit.
1050 1064 """
1051 1065 parent = (
1052 1066 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1053 1067 diff = self.repository.get_diff(
1054 1068 parent, self,
1055 1069 ignore_whitespace=ignore_whitespace,
1056 1070 context=context)
1057 1071 return diff
1058 1072
1059 1073 @LazyProperty
1060 1074 def added(self):
1061 1075 """
1062 1076 Returns list of added ``FileNode`` objects.
1063 1077 """
1064 1078 raise NotImplementedError
1065 1079
1066 1080 @LazyProperty
1067 1081 def changed(self):
1068 1082 """
1069 1083 Returns list of modified ``FileNode`` objects.
1070 1084 """
1071 1085 raise NotImplementedError
1072 1086
1073 1087 @LazyProperty
1074 1088 def removed(self):
1075 1089 """
1076 1090 Returns list of removed ``FileNode`` objects.
1077 1091 """
1078 1092 raise NotImplementedError
1079 1093
1080 1094 @LazyProperty
1081 1095 def size(self):
1082 1096 """
1083 1097 Returns total number of bytes from contents of all filenodes.
1084 1098 """
1085 1099 return sum((node.size for node in self.get_filenodes_generator()))
1086 1100
1087 1101 def walk(self, topurl=''):
1088 1102 """
1089 1103 Similar to os.walk method. Insted of filesystem it walks through
1090 1104 commit starting at given ``topurl``. Returns generator of tuples
1091 1105 (topnode, dirnodes, filenodes).
1092 1106 """
1093 1107 topnode = self.get_node(topurl)
1094 1108 if not topnode.is_dir():
1095 1109 return
1096 1110 yield (topnode, topnode.dirs, topnode.files)
1097 1111 for dirnode in topnode.dirs:
1098 1112 for tup in self.walk(dirnode.path):
1099 1113 yield tup
1100 1114
1101 1115 def get_filenodes_generator(self):
1102 1116 """
1103 1117 Returns generator that yields *all* file nodes.
1104 1118 """
1105 1119 for topnode, dirs, files in self.walk():
1106 1120 for node in files:
1107 1121 yield node
1108 1122
1109 1123 #
1110 1124 # Utilities for sub classes to support consistent behavior
1111 1125 #
1112 1126
1113 1127 def no_node_at_path(self, path):
1114 1128 return NodeDoesNotExistError(
1115 1129 u"There is no file nor directory at the given path: "
1116 1130 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1117 1131
1118 1132 def _fix_path(self, path):
1119 1133 """
1120 1134 Paths are stored without trailing slash so we need to get rid off it if
1121 1135 needed.
1122 1136 """
1123 1137 return path.rstrip('/')
1124 1138
1125 1139 #
1126 1140 # Deprecated API based on changesets
1127 1141 #
1128 1142
1129 1143 @property
1130 1144 def revision(self):
1131 1145 warnings.warn("Use idx instead", DeprecationWarning)
1132 1146 return self.idx
1133 1147
1134 1148 @revision.setter
1135 1149 def revision(self, value):
1136 1150 warnings.warn("Use idx instead", DeprecationWarning)
1137 1151 self.idx = value
1138 1152
1139 1153 def get_file_changeset(self, path):
1140 1154 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1141 1155 return self.get_file_commit(path)
1142 1156
1143 1157
1144 1158 class BaseChangesetClass(type):
1145 1159
1146 1160 def __instancecheck__(self, instance):
1147 1161 return isinstance(instance, BaseCommit)
1148 1162
1149 1163
1150 1164 class BaseChangeset(BaseCommit):
1151 1165
1152 1166 __metaclass__ = BaseChangesetClass
1153 1167
1154 1168 def __new__(cls, *args, **kwargs):
1155 1169 warnings.warn(
1156 1170 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1157 1171 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1158 1172
1159 1173
1160 1174 class BaseInMemoryCommit(object):
1161 1175 """
1162 1176 Represents differences between repository's state (most recent head) and
1163 1177 changes made *in place*.
1164 1178
1165 1179 **Attributes**
1166 1180
1167 1181 ``repository``
1168 1182 repository object for this in-memory-commit
1169 1183
1170 1184 ``added``
1171 1185 list of ``FileNode`` objects marked as *added*
1172 1186
1173 1187 ``changed``
1174 1188 list of ``FileNode`` objects marked as *changed*
1175 1189
1176 1190 ``removed``
1177 1191 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1178 1192 *removed*
1179 1193
1180 1194 ``parents``
1181 1195 list of :class:`BaseCommit` instances representing parents of
1182 1196 in-memory commit. Should always be 2-element sequence.
1183 1197
1184 1198 """
1185 1199
1186 1200 def __init__(self, repository):
1187 1201 self.repository = repository
1188 1202 self.added = []
1189 1203 self.changed = []
1190 1204 self.removed = []
1191 1205 self.parents = []
1192 1206
1193 1207 def add(self, *filenodes):
1194 1208 """
1195 1209 Marks given ``FileNode`` objects as *to be committed*.
1196 1210
1197 1211 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1198 1212 latest commit
1199 1213 :raises ``NodeAlreadyAddedError``: if node with same path is already
1200 1214 marked as *added*
1201 1215 """
1202 1216 # Check if not already marked as *added* first
1203 1217 for node in filenodes:
1204 1218 if node.path in (n.path for n in self.added):
1205 1219 raise NodeAlreadyAddedError(
1206 1220 "Such FileNode %s is already marked for addition"
1207 1221 % node.path)
1208 1222 for node in filenodes:
1209 1223 self.added.append(node)
1210 1224
1211 1225 def change(self, *filenodes):
1212 1226 """
1213 1227 Marks given ``FileNode`` objects to be *changed* in next commit.
1214 1228
1215 1229 :raises ``EmptyRepositoryError``: if there are no commits yet
1216 1230 :raises ``NodeAlreadyExistsError``: if node with same path is already
1217 1231 marked to be *changed*
1218 1232 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1219 1233 marked to be *removed*
1220 1234 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1221 1235 commit
1222 1236 :raises ``NodeNotChangedError``: if node hasn't really be changed
1223 1237 """
1224 1238 for node in filenodes:
1225 1239 if node.path in (n.path for n in self.removed):
1226 1240 raise NodeAlreadyRemovedError(
1227 1241 "Node at %s is already marked as removed" % node.path)
1228 1242 try:
1229 1243 self.repository.get_commit()
1230 1244 except EmptyRepositoryError:
1231 1245 raise EmptyRepositoryError(
1232 1246 "Nothing to change - try to *add* new nodes rather than "
1233 1247 "changing them")
1234 1248 for node in filenodes:
1235 1249 if node.path in (n.path for n in self.changed):
1236 1250 raise NodeAlreadyChangedError(
1237 1251 "Node at '%s' is already marked as changed" % node.path)
1238 1252 self.changed.append(node)
1239 1253
1240 1254 def remove(self, *filenodes):
1241 1255 """
1242 1256 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1243 1257 *removed* in next commit.
1244 1258
1245 1259 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1246 1260 be *removed*
1247 1261 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1248 1262 be *changed*
1249 1263 """
1250 1264 for node in filenodes:
1251 1265 if node.path in (n.path for n in self.removed):
1252 1266 raise NodeAlreadyRemovedError(
1253 1267 "Node is already marked to for removal at %s" % node.path)
1254 1268 if node.path in (n.path for n in self.changed):
1255 1269 raise NodeAlreadyChangedError(
1256 1270 "Node is already marked to be changed at %s" % node.path)
1257 1271 # We only mark node as *removed* - real removal is done by
1258 1272 # commit method
1259 1273 self.removed.append(node)
1260 1274
1261 1275 def reset(self):
1262 1276 """
1263 1277 Resets this instance to initial state (cleans ``added``, ``changed``
1264 1278 and ``removed`` lists).
1265 1279 """
1266 1280 self.added = []
1267 1281 self.changed = []
1268 1282 self.removed = []
1269 1283 self.parents = []
1270 1284
1271 1285 def get_ipaths(self):
1272 1286 """
1273 1287 Returns generator of paths from nodes marked as added, changed or
1274 1288 removed.
1275 1289 """
1276 1290 for node in itertools.chain(self.added, self.changed, self.removed):
1277 1291 yield node.path
1278 1292
1279 1293 def get_paths(self):
1280 1294 """
1281 1295 Returns list of paths from nodes marked as added, changed or removed.
1282 1296 """
1283 1297 return list(self.get_ipaths())
1284 1298
1285 1299 def check_integrity(self, parents=None):
1286 1300 """
1287 1301 Checks in-memory commit's integrity. Also, sets parents if not
1288 1302 already set.
1289 1303
1290 1304 :raises CommitError: if any error occurs (i.e.
1291 1305 ``NodeDoesNotExistError``).
1292 1306 """
1293 1307 if not self.parents:
1294 1308 parents = parents or []
1295 1309 if len(parents) == 0:
1296 1310 try:
1297 1311 parents = [self.repository.get_commit(), None]
1298 1312 except EmptyRepositoryError:
1299 1313 parents = [None, None]
1300 1314 elif len(parents) == 1:
1301 1315 parents += [None]
1302 1316 self.parents = parents
1303 1317
1304 1318 # Local parents, only if not None
1305 1319 parents = [p for p in self.parents if p]
1306 1320
1307 1321 # Check nodes marked as added
1308 1322 for p in parents:
1309 1323 for node in self.added:
1310 1324 try:
1311 1325 p.get_node(node.path)
1312 1326 except NodeDoesNotExistError:
1313 1327 pass
1314 1328 else:
1315 1329 raise NodeAlreadyExistsError(
1316 1330 "Node `%s` already exists at %s" % (node.path, p))
1317 1331
1318 1332 # Check nodes marked as changed
1319 1333 missing = set(self.changed)
1320 1334 not_changed = set(self.changed)
1321 1335 if self.changed and not parents:
1322 1336 raise NodeDoesNotExistError(str(self.changed[0].path))
1323 1337 for p in parents:
1324 1338 for node in self.changed:
1325 1339 try:
1326 1340 old = p.get_node(node.path)
1327 1341 missing.remove(node)
1328 1342 # if content actually changed, remove node from not_changed
1329 1343 if old.content != node.content:
1330 1344 not_changed.remove(node)
1331 1345 except NodeDoesNotExistError:
1332 1346 pass
1333 1347 if self.changed and missing:
1334 1348 raise NodeDoesNotExistError(
1335 1349 "Node `%s` marked as modified but missing in parents: %s"
1336 1350 % (node.path, parents))
1337 1351
1338 1352 if self.changed and not_changed:
1339 1353 raise NodeNotChangedError(
1340 1354 "Node `%s` wasn't actually changed (parents: %s)"
1341 1355 % (not_changed.pop().path, parents))
1342 1356
1343 1357 # Check nodes marked as removed
1344 1358 if self.removed and not parents:
1345 1359 raise NodeDoesNotExistError(
1346 1360 "Cannot remove node at %s as there "
1347 1361 "were no parents specified" % self.removed[0].path)
1348 1362 really_removed = set()
1349 1363 for p in parents:
1350 1364 for node in self.removed:
1351 1365 try:
1352 1366 p.get_node(node.path)
1353 1367 really_removed.add(node)
1354 1368 except CommitError:
1355 1369 pass
1356 1370 not_removed = set(self.removed) - really_removed
1357 1371 if not_removed:
1358 1372 # TODO: johbo: This code branch does not seem to be covered
1359 1373 raise NodeDoesNotExistError(
1360 1374 "Cannot remove node at %s from "
1361 1375 "following parents: %s" % (not_removed, parents))
1362 1376
1363 1377 def commit(
1364 1378 self, message, author, parents=None, branch=None, date=None,
1365 1379 **kwargs):
1366 1380 """
1367 1381 Performs in-memory commit (doesn't check workdir in any way) and
1368 1382 returns newly created :class:`BaseCommit`. Updates repository's
1369 1383 attribute `commits`.
1370 1384
1371 1385 .. note::
1372 1386
1373 1387 While overriding this method each backend's should call
1374 1388 ``self.check_integrity(parents)`` in the first place.
1375 1389
1376 1390 :param message: message of the commit
1377 1391 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1378 1392 :param parents: single parent or sequence of parents from which commit
1379 1393 would be derived
1380 1394 :param date: ``datetime.datetime`` instance. Defaults to
1381 1395 ``datetime.datetime.now()``.
1382 1396 :param branch: branch name, as string. If none given, default backend's
1383 1397 branch would be used.
1384 1398
1385 1399 :raises ``CommitError``: if any error occurs while committing
1386 1400 """
1387 1401 raise NotImplementedError
1388 1402
1389 1403
1390 1404 class BaseInMemoryChangesetClass(type):
1391 1405
1392 1406 def __instancecheck__(self, instance):
1393 1407 return isinstance(instance, BaseInMemoryCommit)
1394 1408
1395 1409
1396 1410 class BaseInMemoryChangeset(BaseInMemoryCommit):
1397 1411
1398 1412 __metaclass__ = BaseInMemoryChangesetClass
1399 1413
1400 1414 def __new__(cls, *args, **kwargs):
1401 1415 warnings.warn(
1402 1416 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1403 1417 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1404 1418
1405 1419
1406 1420 class EmptyCommit(BaseCommit):
1407 1421 """
1408 1422 An dummy empty commit. It's possible to pass hash when creating
1409 1423 an EmptyCommit
1410 1424 """
1411 1425
1412 1426 def __init__(
1413 1427 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1414 1428 message='', author='', date=None):
1415 1429 self._empty_commit_id = commit_id
1416 1430 # TODO: johbo: Solve idx parameter, default value does not make
1417 1431 # too much sense
1418 1432 self.idx = idx
1419 1433 self.message = message
1420 1434 self.author = author
1421 1435 self.date = date or datetime.datetime.fromtimestamp(0)
1422 1436 self.repository = repo
1423 1437 self.alias = alias
1424 1438
1425 1439 @LazyProperty
1426 1440 def raw_id(self):
1427 1441 """
1428 1442 Returns raw string identifying this commit, useful for web
1429 1443 representation.
1430 1444 """
1431 1445
1432 1446 return self._empty_commit_id
1433 1447
1434 1448 @LazyProperty
1435 1449 def branch(self):
1436 1450 if self.alias:
1437 1451 from rhodecode.lib.vcs.backends import get_backend
1438 1452 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1439 1453
1440 1454 @LazyProperty
1441 1455 def short_id(self):
1442 1456 return self.raw_id[:12]
1443 1457
1444 1458 @LazyProperty
1445 1459 def id(self):
1446 1460 return self.raw_id
1447 1461
1448 1462 def get_file_commit(self, path):
1449 1463 return self
1450 1464
1451 1465 def get_file_content(self, path):
1452 1466 return u''
1453 1467
1454 1468 def get_file_size(self, path):
1455 1469 return 0
1456 1470
1457 1471
1458 1472 class EmptyChangesetClass(type):
1459 1473
1460 1474 def __instancecheck__(self, instance):
1461 1475 return isinstance(instance, EmptyCommit)
1462 1476
1463 1477
1464 1478 class EmptyChangeset(EmptyCommit):
1465 1479
1466 1480 __metaclass__ = EmptyChangesetClass
1467 1481
1468 1482 def __new__(cls, *args, **kwargs):
1469 1483 warnings.warn(
1470 1484 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1471 1485 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1472 1486
1473 1487 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1474 1488 alias=None, revision=-1, message='', author='', date=None):
1475 1489 if requested_revision is not None:
1476 1490 warnings.warn(
1477 1491 "Parameter requested_revision not supported anymore",
1478 1492 DeprecationWarning)
1479 1493 super(EmptyChangeset, self).__init__(
1480 1494 commit_id=cs, repo=repo, alias=alias, idx=revision,
1481 1495 message=message, author=author, date=date)
1482 1496
1483 1497 @property
1484 1498 def revision(self):
1485 1499 warnings.warn("Use idx instead", DeprecationWarning)
1486 1500 return self.idx
1487 1501
1488 1502 @revision.setter
1489 1503 def revision(self, value):
1490 1504 warnings.warn("Use idx instead", DeprecationWarning)
1491 1505 self.idx = value
1492 1506
1493 1507
1494 1508 class EmptyRepository(BaseRepository):
1495 1509 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1496 1510 pass
1497 1511
1498 1512 def get_diff(self, *args, **kwargs):
1499 1513 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1500 1514 return GitDiff('')
1501 1515
1502 1516
1503 1517 class CollectionGenerator(object):
1504 1518
1505 1519 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1506 1520 self.repo = repo
1507 1521 self.commit_ids = commit_ids
1508 1522 # TODO: (oliver) this isn't currently hooked up
1509 1523 self.collection_size = None
1510 1524 self.pre_load = pre_load
1511 1525
1512 1526 def __len__(self):
1513 1527 if self.collection_size is not None:
1514 1528 return self.collection_size
1515 1529 return self.commit_ids.__len__()
1516 1530
1517 1531 def __iter__(self):
1518 1532 for commit_id in self.commit_ids:
1519 1533 # TODO: johbo: Mercurial passes in commit indices or commit ids
1520 1534 yield self._commit_factory(commit_id)
1521 1535
1522 1536 def _commit_factory(self, commit_id):
1523 1537 """
1524 1538 Allows backends to override the way commits are generated.
1525 1539 """
1526 1540 return self.repo.get_commit(commit_id=commit_id,
1527 1541 pre_load=self.pre_load)
1528 1542
1529 1543 def __getslice__(self, i, j):
1530 1544 """
1531 1545 Returns an iterator of sliced repository
1532 1546 """
1533 1547 commit_ids = self.commit_ids[i:j]
1534 1548 return self.__class__(
1535 1549 self.repo, commit_ids, pre_load=self.pre_load)
1536 1550
1537 1551 def __repr__(self):
1538 1552 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1539 1553
1540 1554
1541 1555 class Config(object):
1542 1556 """
1543 1557 Represents the configuration for a repository.
1544 1558
1545 1559 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1546 1560 standard library. It implements only the needed subset.
1547 1561 """
1548 1562
1549 1563 def __init__(self):
1550 1564 self._values = {}
1551 1565
1552 1566 def copy(self):
1553 1567 clone = Config()
1554 1568 for section, values in self._values.items():
1555 1569 clone._values[section] = values.copy()
1556 1570 return clone
1557 1571
1558 1572 def __repr__(self):
1559 1573 return '<Config(%s sections) at %s>' % (
1560 1574 len(self._values), hex(id(self)))
1561 1575
1562 1576 def items(self, section):
1563 1577 return self._values.get(section, {}).iteritems()
1564 1578
1565 1579 def get(self, section, option):
1566 1580 return self._values.get(section, {}).get(option)
1567 1581
1568 1582 def set(self, section, option, value):
1569 1583 section_values = self._values.setdefault(section, {})
1570 1584 section_values[option] = value
1571 1585
1572 1586 def clear_section(self, section):
1573 1587 self._values[section] = {}
1574 1588
1575 1589 def serialize(self):
1576 1590 """
1577 1591 Creates a list of three tuples (section, key, value) representing
1578 1592 this config object.
1579 1593 """
1580 1594 items = []
1581 1595 for section in self._values:
1582 1596 for option, value in self._values[section].items():
1583 1597 items.append(
1584 1598 (safe_str(section), safe_str(option), safe_str(value)))
1585 1599 return items
1586 1600
1587 1601
1588 1602 class Diff(object):
1589 1603 """
1590 1604 Represents a diff result from a repository backend.
1591 1605
1592 1606 Subclasses have to provide a backend specific value for
1593 1607 :attr:`_header_re` and :attr:`_meta_re`.
1594 1608 """
1595 1609 _meta_re = None
1596 1610 _header_re = None
1597 1611
1598 1612 def __init__(self, raw_diff):
1599 1613 self.raw = raw_diff
1600 1614
1601 1615 def chunks(self):
1602 1616 """
1603 1617 split the diff in chunks of separate --git a/file b/file chunks
1604 1618 to make diffs consistent we must prepend with \n, and make sure
1605 1619 we can detect last chunk as this was also has special rule
1606 1620 """
1607 1621
1608 1622 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1609 1623 header = diff_parts[0]
1610 1624
1611 1625 if self._meta_re:
1612 1626 match = self._meta_re.match(header)
1613 1627
1614 1628 chunks = diff_parts[1:]
1615 1629 total_chunks = len(chunks)
1616 1630
1617 1631 return (
1618 1632 DiffChunk(chunk, self, cur_chunk == total_chunks)
1619 1633 for cur_chunk, chunk in enumerate(chunks, start=1))
1620 1634
1621 1635
1622 1636 class DiffChunk(object):
1623 1637
1624 1638 def __init__(self, chunk, diff, last_chunk):
1625 1639 self._diff = diff
1626 1640
1627 1641 # since we split by \ndiff --git that part is lost from original diff
1628 1642 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1629 1643 if not last_chunk:
1630 1644 chunk += '\n'
1631 1645
1632 1646 match = self._diff._header_re.match(chunk)
1633 1647 self.header = match.groupdict()
1634 1648 self.diff = chunk[match.end():]
1635 1649 self.raw = chunk
1636 1650
1637 1651
1638 1652 class BasePathPermissionChecker(object):
1639 1653
1640 1654 @staticmethod
1641 1655 def create_from_patterns(includes, excludes):
1642 1656 if includes and '*' in includes and not excludes:
1643 1657 return AllPathPermissionChecker()
1644 1658 elif excludes and '*' in excludes:
1645 1659 return NonePathPermissionChecker()
1646 1660 else:
1647 1661 return PatternPathPermissionChecker(includes, excludes)
1648 1662
1649 1663 @property
1650 1664 def has_full_access(self):
1651 1665 raise NotImplemented()
1652 1666
1653 1667 def has_access(self, path):
1654 1668 raise NotImplemented()
1655 1669
1656 1670
1657 1671 class AllPathPermissionChecker(BasePathPermissionChecker):
1658 1672
1659 1673 @property
1660 1674 def has_full_access(self):
1661 1675 return True
1662 1676
1663 1677 def has_access(self, path):
1664 1678 return True
1665 1679
1666 1680
1667 1681 class NonePathPermissionChecker(BasePathPermissionChecker):
1668 1682
1669 1683 @property
1670 1684 def has_full_access(self):
1671 1685 return False
1672 1686
1673 1687 def has_access(self, path):
1674 1688 return False
1675 1689
1676 1690
1677 1691 class PatternPathPermissionChecker(BasePathPermissionChecker):
1678 1692
1679 1693 def __init__(self, includes, excludes):
1680 1694 self.includes = includes
1681 1695 self.excludes = excludes
1682 1696 self.includes_re = [] if not includes else [
1683 1697 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1684 1698 self.excludes_re = [] if not excludes else [
1685 1699 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1686 1700
1687 1701 @property
1688 1702 def has_full_access(self):
1689 1703 return '*' in self.includes and not self.excludes
1690 1704
1691 1705 def has_access(self, path):
1692 1706 for regex in self.excludes_re:
1693 1707 if regex.match(path):
1694 1708 return False
1695 1709 for regex in self.includes_re:
1696 1710 if regex.match(path):
1697 1711 return True
1698 1712 return False
@@ -1,617 +1,620 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 this is forms validation classes
23 23 http://formencode.org/module-formencode.validators.html
24 24 for list off all availible validators
25 25
26 26 we can create our own validators
27 27
28 28 The table below outlines the options which can be used in a schema in addition to the validators themselves
29 29 pre_validators [] These validators will be applied before the schema
30 30 chained_validators [] These validators will be applied after the schema
31 31 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
32 32 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
33 33 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
34 34 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
35 35
36 36
37 37 <name> = formencode.validators.<name of validator>
38 38 <name> must equal form name
39 39 list=[1,2,3,4,5]
40 40 for SELECT use formencode.All(OneOf(list), Int())
41 41
42 42 """
43 43
44 44 import deform
45 45 import logging
46 46 import formencode
47 47
48 48 from pkg_resources import resource_filename
49 49 from formencode import All, Pipe
50 50
51 51 from pyramid.threadlocal import get_current_request
52 52
53 53 from rhodecode import BACKENDS
54 54 from rhodecode.lib import helpers
55 55 from rhodecode.model import validators as v
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 deform_templates = resource_filename('deform', 'templates')
61 61 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
62 62 search_path = (rhodecode_templates, deform_templates)
63 63
64 64
65 65 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
66 66 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
67 67 def __call__(self, template_name, **kw):
68 68 kw['h'] = helpers
69 69 kw['request'] = get_current_request()
70 70 return self.load(template_name)(**kw)
71 71
72 72
73 73 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
74 74 deform.Form.set_default_renderer(form_renderer)
75 75
76 76
77 77 def LoginForm(localizer):
78 78 _ = localizer
79 79
80 80 class _LoginForm(formencode.Schema):
81 81 allow_extra_fields = True
82 82 filter_extra_fields = True
83 83 username = v.UnicodeString(
84 84 strip=True,
85 85 min=1,
86 86 not_empty=True,
87 87 messages={
88 88 'empty': _(u'Please enter a login'),
89 89 'tooShort': _(u'Enter a value %(min)i characters long or more')
90 90 }
91 91 )
92 92
93 93 password = v.UnicodeString(
94 94 strip=False,
95 95 min=3,
96 96 max=72,
97 97 not_empty=True,
98 98 messages={
99 99 'empty': _(u'Please enter a password'),
100 100 'tooShort': _(u'Enter %(min)i characters or more')}
101 101 )
102 102
103 103 remember = v.StringBoolean(if_missing=False)
104 104
105 105 chained_validators = [v.ValidAuth(localizer)]
106 106 return _LoginForm
107 107
108 108
109 109 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
110 110 old_data = old_data or {}
111 111 available_languages = available_languages or []
112 112 _ = localizer
113 113
114 114 class _UserForm(formencode.Schema):
115 115 allow_extra_fields = True
116 116 filter_extra_fields = True
117 117 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
118 118 v.ValidUsername(localizer, edit, old_data))
119 119 if edit:
120 120 new_password = All(
121 121 v.ValidPassword(localizer),
122 122 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
123 123 )
124 124 password_confirmation = All(
125 125 v.ValidPassword(localizer),
126 126 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
127 127 )
128 128 admin = v.StringBoolean(if_missing=False)
129 129 else:
130 130 password = All(
131 131 v.ValidPassword(localizer),
132 132 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
133 133 )
134 134 password_confirmation = All(
135 135 v.ValidPassword(localizer),
136 136 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
137 137 )
138 138
139 139 password_change = v.StringBoolean(if_missing=False)
140 140 create_repo_group = v.StringBoolean(if_missing=False)
141 141
142 142 active = v.StringBoolean(if_missing=False)
143 143 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
144 144 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
145 145 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
146 146 extern_name = v.UnicodeString(strip=True)
147 147 extern_type = v.UnicodeString(strip=True)
148 148 language = v.OneOf(available_languages, hideList=False,
149 149 testValueList=True, if_missing=None)
150 150 chained_validators = [v.ValidPasswordsMatch(localizer)]
151 151 return _UserForm
152 152
153 153
154 154 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
155 155 old_data = old_data or {}
156 156 _ = localizer
157 157
158 158 class _UserGroupForm(formencode.Schema):
159 159 allow_extra_fields = True
160 160 filter_extra_fields = True
161 161
162 162 users_group_name = All(
163 163 v.UnicodeString(strip=True, min=1, not_empty=True),
164 164 v.ValidUserGroup(localizer, edit, old_data)
165 165 )
166 166 user_group_description = v.UnicodeString(strip=True, min=1,
167 167 not_empty=False)
168 168
169 169 users_group_active = v.StringBoolean(if_missing=False)
170 170
171 171 if edit:
172 172 # this is user group owner
173 173 user = All(
174 174 v.UnicodeString(not_empty=True),
175 175 v.ValidRepoUser(localizer, allow_disabled))
176 176 return _UserGroupForm
177 177
178 178
179 179 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
180 180 can_create_in_root=False, allow_disabled=False):
181 181 _ = localizer
182 182 old_data = old_data or {}
183 183 available_groups = available_groups or []
184 184
185 185 class _RepoGroupForm(formencode.Schema):
186 186 allow_extra_fields = True
187 187 filter_extra_fields = False
188 188
189 189 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
190 190 v.SlugifyName(localizer),)
191 191 group_description = v.UnicodeString(strip=True, min=1,
192 192 not_empty=False)
193 193 group_copy_permissions = v.StringBoolean(if_missing=False)
194 194
195 195 group_parent_id = v.OneOf(available_groups, hideList=False,
196 196 testValueList=True, not_empty=True)
197 197 enable_locking = v.StringBoolean(if_missing=False)
198 198 chained_validators = [
199 199 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
200 200
201 201 if edit:
202 202 # this is repo group owner
203 203 user = All(
204 204 v.UnicodeString(not_empty=True),
205 205 v.ValidRepoUser(localizer, allow_disabled))
206 206 return _RepoGroupForm
207 207
208 208
209 209 def RegisterForm(localizer, edit=False, old_data=None):
210 210 _ = localizer
211 211 old_data = old_data or {}
212 212
213 213 class _RegisterForm(formencode.Schema):
214 214 allow_extra_fields = True
215 215 filter_extra_fields = True
216 216 username = All(
217 217 v.ValidUsername(localizer, edit, old_data),
218 218 v.UnicodeString(strip=True, min=1, not_empty=True)
219 219 )
220 220 password = All(
221 221 v.ValidPassword(localizer),
222 222 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
223 223 )
224 224 password_confirmation = All(
225 225 v.ValidPassword(localizer),
226 226 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
227 227 )
228 228 active = v.StringBoolean(if_missing=False)
229 229 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
230 230 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
231 231 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
232 232
233 233 chained_validators = [v.ValidPasswordsMatch(localizer)]
234 234 return _RegisterForm
235 235
236 236
237 237 def PasswordResetForm(localizer):
238 238 _ = localizer
239 239
240 240 class _PasswordResetForm(formencode.Schema):
241 241 allow_extra_fields = True
242 242 filter_extra_fields = True
243 243 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
244 244 return _PasswordResetForm
245 245
246 246
247 247 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None,
248 248 landing_revs=None, allow_disabled=False):
249 249 _ = localizer
250 250 old_data = old_data or {}
251 251 repo_groups = repo_groups or []
252 252 landing_revs = landing_revs or []
253 253 supported_backends = BACKENDS.keys()
254 254
255 255 class _RepoForm(formencode.Schema):
256 256 allow_extra_fields = True
257 257 filter_extra_fields = False
258 258 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
259 259 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
260 260 repo_group = All(v.CanWriteGroup(localizer, old_data),
261 261 v.OneOf(repo_groups, hideList=True))
262 262 repo_type = v.OneOf(supported_backends, required=False,
263 263 if_missing=old_data.get('repo_type'))
264 264 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
265 265 repo_private = v.StringBoolean(if_missing=False)
266 266 repo_landing_rev = v.OneOf(landing_revs, hideList=True)
267 267 repo_copy_permissions = v.StringBoolean(if_missing=False)
268 268 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
269 269
270 270 repo_enable_statistics = v.StringBoolean(if_missing=False)
271 271 repo_enable_downloads = v.StringBoolean(if_missing=False)
272 272 repo_enable_locking = v.StringBoolean(if_missing=False)
273 273
274 274 if edit:
275 275 # this is repo owner
276 276 user = All(
277 277 v.UnicodeString(not_empty=True),
278 278 v.ValidRepoUser(localizer, allow_disabled))
279 279 clone_uri_change = v.UnicodeString(
280 280 not_empty=False, if_missing=v.Missing)
281 281
282 282 chained_validators = [v.ValidCloneUri(localizer),
283 283 v.ValidRepoName(localizer, edit, old_data)]
284 284 return _RepoForm
285 285
286 286
287 287 def RepoPermsForm(localizer):
288 288 _ = localizer
289 289
290 290 class _RepoPermsForm(formencode.Schema):
291 291 allow_extra_fields = True
292 292 filter_extra_fields = False
293 293 chained_validators = [v.ValidPerms(localizer, type_='repo')]
294 294 return _RepoPermsForm
295 295
296 296
297 297 def RepoGroupPermsForm(localizer, valid_recursive_choices):
298 298 _ = localizer
299 299
300 300 class _RepoGroupPermsForm(formencode.Schema):
301 301 allow_extra_fields = True
302 302 filter_extra_fields = False
303 303 recursive = v.OneOf(valid_recursive_choices)
304 304 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
305 305 return _RepoGroupPermsForm
306 306
307 307
308 308 def UserGroupPermsForm(localizer):
309 309 _ = localizer
310 310
311 311 class _UserPermsForm(formencode.Schema):
312 312 allow_extra_fields = True
313 313 filter_extra_fields = False
314 314 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
315 315 return _UserPermsForm
316 316
317 317
318 318 def RepoFieldForm(localizer):
319 319 _ = localizer
320 320
321 321 class _RepoFieldForm(formencode.Schema):
322 322 filter_extra_fields = True
323 323 allow_extra_fields = True
324 324
325 325 new_field_key = All(v.FieldKey(localizer),
326 326 v.UnicodeString(strip=True, min=3, not_empty=True))
327 327 new_field_value = v.UnicodeString(not_empty=False, if_missing=u'')
328 328 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
329 329 if_missing='str')
330 330 new_field_label = v.UnicodeString(not_empty=False)
331 331 new_field_desc = v.UnicodeString(not_empty=False)
332 332 return _RepoFieldForm
333 333
334 334
335 335 def RepoForkForm(localizer, edit=False, old_data=None,
336 336 supported_backends=BACKENDS.keys(), repo_groups=None,
337 337 landing_revs=None):
338 338 _ = localizer
339 339 old_data = old_data or {}
340 340 repo_groups = repo_groups or []
341 341 landing_revs = landing_revs or []
342 342
343 343 class _RepoForkForm(formencode.Schema):
344 344 allow_extra_fields = True
345 345 filter_extra_fields = False
346 346 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
347 347 v.SlugifyName(localizer))
348 348 repo_group = All(v.CanWriteGroup(localizer, ),
349 349 v.OneOf(repo_groups, hideList=True))
350 350 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
351 351 description = v.UnicodeString(strip=True, min=1, not_empty=True)
352 352 private = v.StringBoolean(if_missing=False)
353 353 copy_permissions = v.StringBoolean(if_missing=False)
354 354 fork_parent_id = v.UnicodeString()
355 355 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
356 356 landing_rev = v.OneOf(landing_revs, hideList=True)
357 357 return _RepoForkForm
358 358
359 359
360 360 def ApplicationSettingsForm(localizer):
361 361 _ = localizer
362 362
363 363 class _ApplicationSettingsForm(formencode.Schema):
364 364 allow_extra_fields = True
365 365 filter_extra_fields = False
366 366 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
367 367 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
368 368 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
369 369 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
370 370 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
371 371 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
372 372 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
373 373 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
374 374 return _ApplicationSettingsForm
375 375
376 376
377 377 def ApplicationVisualisationForm(localizer):
378 378 from rhodecode.model.db import Repository
379 379 _ = localizer
380 380
381 381 class _ApplicationVisualisationForm(formencode.Schema):
382 382 allow_extra_fields = True
383 383 filter_extra_fields = False
384 384 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
385 385 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
386 386 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
387 387
388 388 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
389 389 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
390 390 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
391 391 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
392 392 rhodecode_show_version = v.StringBoolean(if_missing=False)
393 393 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
394 394 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
395 395 rhodecode_gravatar_url = v.UnicodeString(min=3)
396 396 rhodecode_clone_uri_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI)
397 397 rhodecode_clone_uri_ssh_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_SSH)
398 398 rhodecode_support_url = v.UnicodeString()
399 399 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
400 400 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
401 401 return _ApplicationVisualisationForm
402 402
403 403
404 404 class _BaseVcsSettingsForm(formencode.Schema):
405 405
406 406 allow_extra_fields = True
407 407 filter_extra_fields = False
408 408 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
409 409 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
410 410 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
411 411
412 412 # PR/Code-review
413 413 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
414 414 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
415 415
416 416 # hg
417 417 extensions_largefiles = v.StringBoolean(if_missing=False)
418 418 extensions_evolve = v.StringBoolean(if_missing=False)
419 419 phases_publish = v.StringBoolean(if_missing=False)
420 420
421 421 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
422 422 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
423 423
424 424 # git
425 425 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
426 426 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
427 427 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
428 428
429 429 # svn
430 430 vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False)
431 431 vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None)
432 432
433 # cache
434 rhodecode_diff_cache = v.StringBoolean(if_missing=False)
435
433 436
434 437 def ApplicationUiSettingsForm(localizer):
435 438 _ = localizer
436 439
437 440 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
438 441 web_push_ssl = v.StringBoolean(if_missing=False)
439 442 paths_root_path = All(
440 443 v.ValidPath(localizer),
441 444 v.UnicodeString(strip=True, min=1, not_empty=True)
442 445 )
443 446 largefiles_usercache = All(
444 447 v.ValidPath(localizer),
445 448 v.UnicodeString(strip=True, min=2, not_empty=True))
446 449 vcs_git_lfs_store_location = All(
447 450 v.ValidPath(localizer),
448 451 v.UnicodeString(strip=True, min=2, not_empty=True))
449 452 extensions_hgsubversion = v.StringBoolean(if_missing=False)
450 453 extensions_hggit = v.StringBoolean(if_missing=False)
451 454 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
452 455 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
453 456 return _ApplicationUiSettingsForm
454 457
455 458
456 459 def RepoVcsSettingsForm(localizer, repo_name):
457 460 _ = localizer
458 461
459 462 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
460 463 inherit_global_settings = v.StringBoolean(if_missing=False)
461 464 new_svn_branch = v.ValidSvnPattern(localizer,
462 465 section='vcs_svn_branch', repo_name=repo_name)
463 466 new_svn_tag = v.ValidSvnPattern(localizer,
464 467 section='vcs_svn_tag', repo_name=repo_name)
465 468 return _RepoVcsSettingsForm
466 469
467 470
468 471 def LabsSettingsForm(localizer):
469 472 _ = localizer
470 473
471 474 class _LabSettingsForm(formencode.Schema):
472 475 allow_extra_fields = True
473 476 filter_extra_fields = False
474 477 return _LabSettingsForm
475 478
476 479
477 480 def ApplicationPermissionsForm(
478 481 localizer, register_choices, password_reset_choices,
479 482 extern_activate_choices):
480 483 _ = localizer
481 484
482 485 class _DefaultPermissionsForm(formencode.Schema):
483 486 allow_extra_fields = True
484 487 filter_extra_fields = True
485 488
486 489 anonymous = v.StringBoolean(if_missing=False)
487 490 default_register = v.OneOf(register_choices)
488 491 default_register_message = v.UnicodeString()
489 492 default_password_reset = v.OneOf(password_reset_choices)
490 493 default_extern_activate = v.OneOf(extern_activate_choices)
491 494 return _DefaultPermissionsForm
492 495
493 496
494 497 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
495 498 user_group_perms_choices):
496 499 _ = localizer
497 500
498 501 class _ObjectPermissionsForm(formencode.Schema):
499 502 allow_extra_fields = True
500 503 filter_extra_fields = True
501 504 overwrite_default_repo = v.StringBoolean(if_missing=False)
502 505 overwrite_default_group = v.StringBoolean(if_missing=False)
503 506 overwrite_default_user_group = v.StringBoolean(if_missing=False)
504 507 default_repo_perm = v.OneOf(repo_perms_choices)
505 508 default_group_perm = v.OneOf(group_perms_choices)
506 509 default_user_group_perm = v.OneOf(user_group_perms_choices)
507 510 return _ObjectPermissionsForm
508 511
509 512
510 513 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
511 514 repo_group_create_choices, user_group_create_choices,
512 515 fork_choices, inherit_default_permissions_choices):
513 516 _ = localizer
514 517
515 518 class _DefaultPermissionsForm(formencode.Schema):
516 519 allow_extra_fields = True
517 520 filter_extra_fields = True
518 521
519 522 anonymous = v.StringBoolean(if_missing=False)
520 523
521 524 default_repo_create = v.OneOf(create_choices)
522 525 default_repo_create_on_write = v.OneOf(create_on_write_choices)
523 526 default_user_group_create = v.OneOf(user_group_create_choices)
524 527 default_repo_group_create = v.OneOf(repo_group_create_choices)
525 528 default_fork_create = v.OneOf(fork_choices)
526 529 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
527 530 return _DefaultPermissionsForm
528 531
529 532
530 533 def UserIndividualPermissionsForm(localizer):
531 534 _ = localizer
532 535
533 536 class _DefaultPermissionsForm(formencode.Schema):
534 537 allow_extra_fields = True
535 538 filter_extra_fields = True
536 539
537 540 inherit_default_permissions = v.StringBoolean(if_missing=False)
538 541 return _DefaultPermissionsForm
539 542
540 543
541 544 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
542 545 _ = localizer
543 546 old_data = old_data or {}
544 547
545 548 class _DefaultsForm(formencode.Schema):
546 549 allow_extra_fields = True
547 550 filter_extra_fields = True
548 551 default_repo_type = v.OneOf(supported_backends)
549 552 default_repo_private = v.StringBoolean(if_missing=False)
550 553 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
551 554 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
552 555 default_repo_enable_locking = v.StringBoolean(if_missing=False)
553 556 return _DefaultsForm
554 557
555 558
556 559 def AuthSettingsForm(localizer):
557 560 _ = localizer
558 561
559 562 class _AuthSettingsForm(formencode.Schema):
560 563 allow_extra_fields = True
561 564 filter_extra_fields = True
562 565 auth_plugins = All(v.ValidAuthPlugins(localizer),
563 566 v.UniqueListFromString(localizer)(not_empty=True))
564 567 return _AuthSettingsForm
565 568
566 569
567 570 def UserExtraEmailForm(localizer):
568 571 _ = localizer
569 572
570 573 class _UserExtraEmailForm(formencode.Schema):
571 574 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
572 575 return _UserExtraEmailForm
573 576
574 577
575 578 def UserExtraIpForm(localizer):
576 579 _ = localizer
577 580
578 581 class _UserExtraIpForm(formencode.Schema):
579 582 ip = v.ValidIp(localizer)(not_empty=True)
580 583 return _UserExtraIpForm
581 584
582 585
583 586 def PullRequestForm(localizer, repo_id):
584 587 _ = localizer
585 588
586 589 class ReviewerForm(formencode.Schema):
587 590 user_id = v.Int(not_empty=True)
588 591 reasons = All()
589 592 rules = All(v.UniqueList(localizer, convert=int)())
590 593 mandatory = v.StringBoolean()
591 594
592 595 class _PullRequestForm(formencode.Schema):
593 596 allow_extra_fields = True
594 597 filter_extra_fields = True
595 598
596 599 common_ancestor = v.UnicodeString(strip=True, required=True)
597 600 source_repo = v.UnicodeString(strip=True, required=True)
598 601 source_ref = v.UnicodeString(strip=True, required=True)
599 602 target_repo = v.UnicodeString(strip=True, required=True)
600 603 target_ref = v.UnicodeString(strip=True, required=True)
601 604 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
602 605 v.UniqueList(localizer)(not_empty=True))
603 606 review_members = formencode.ForEach(ReviewerForm())
604 607 pullrequest_title = v.UnicodeString(strip=True, required=True, min=3, max=255)
605 608 pullrequest_desc = v.UnicodeString(strip=True, required=False)
606 609
607 610 return _PullRequestForm
608 611
609 612
610 613 def IssueTrackerPatternsForm(localizer):
611 614 _ = localizer
612 615
613 616 class _IssueTrackerPatternsForm(formencode.Schema):
614 617 allow_extra_fields = True
615 618 filter_extra_fields = False
616 619 chained_validators = [v.ValidPattern(localizer)]
617 620 return _IssueTrackerPatternsForm
@@ -1,826 +1,830 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import hashlib
23 23 import logging
24 24 from collections import namedtuple
25 25 from functools import wraps
26 26 import bleach
27 27
28 28 from rhodecode.lib import caches
29 29 from rhodecode.lib.utils2 import (
30 30 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
31 31 from rhodecode.lib.vcs.backends import base
32 32 from rhodecode.model import BaseModel
33 33 from rhodecode.model.db import (
34 34 RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting)
35 35 from rhodecode.model.meta import Session
36 36
37 37
38 38 log = logging.getLogger(__name__)
39 39
40 40
41 41 UiSetting = namedtuple(
42 42 'UiSetting', ['section', 'key', 'value', 'active'])
43 43
44 44 SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google']
45 45
46 46
47 47 class SettingNotFound(Exception):
48 48 def __init__(self, setting_id):
49 49 msg = 'Setting `{}` is not found'.format(setting_id)
50 50 super(SettingNotFound, self).__init__(msg)
51 51
52 52
53 53 class SettingsModel(BaseModel):
54 54 BUILTIN_HOOKS = (
55 55 RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH,
56 56 RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH,
57 57 RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL,
58 58 RhodeCodeUi.HOOK_PUSH_KEY,)
59 59 HOOKS_SECTION = 'hooks'
60 60
61 61 def __init__(self, sa=None, repo=None):
62 62 self.repo = repo
63 63 self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi
64 64 self.SettingsDbModel = (
65 65 RepoRhodeCodeSetting if repo else RhodeCodeSetting)
66 66 super(SettingsModel, self).__init__(sa)
67 67
68 68 def get_ui_by_key(self, key):
69 69 q = self.UiDbModel.query()
70 70 q = q.filter(self.UiDbModel.ui_key == key)
71 71 q = self._filter_by_repo(RepoRhodeCodeUi, q)
72 72 return q.scalar()
73 73
74 74 def get_ui_by_section(self, section):
75 75 q = self.UiDbModel.query()
76 76 q = q.filter(self.UiDbModel.ui_section == section)
77 77 q = self._filter_by_repo(RepoRhodeCodeUi, q)
78 78 return q.all()
79 79
80 80 def get_ui_by_section_and_key(self, section, key):
81 81 q = self.UiDbModel.query()
82 82 q = q.filter(self.UiDbModel.ui_section == section)
83 83 q = q.filter(self.UiDbModel.ui_key == key)
84 84 q = self._filter_by_repo(RepoRhodeCodeUi, q)
85 85 return q.scalar()
86 86
87 87 def get_ui(self, section=None, key=None):
88 88 q = self.UiDbModel.query()
89 89 q = self._filter_by_repo(RepoRhodeCodeUi, q)
90 90
91 91 if section:
92 92 q = q.filter(self.UiDbModel.ui_section == section)
93 93 if key:
94 94 q = q.filter(self.UiDbModel.ui_key == key)
95 95
96 96 # TODO: mikhail: add caching
97 97 result = [
98 98 UiSetting(
99 99 section=safe_str(r.ui_section), key=safe_str(r.ui_key),
100 100 value=safe_str(r.ui_value), active=r.ui_active
101 101 )
102 102 for r in q.all()
103 103 ]
104 104 return result
105 105
106 106 def get_builtin_hooks(self):
107 107 q = self.UiDbModel.query()
108 108 q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
109 109 return self._get_hooks(q)
110 110
111 111 def get_custom_hooks(self):
112 112 q = self.UiDbModel.query()
113 113 q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
114 114 return self._get_hooks(q)
115 115
116 116 def create_ui_section_value(self, section, val, key=None, active=True):
117 117 new_ui = self.UiDbModel()
118 118 new_ui.ui_section = section
119 119 new_ui.ui_value = val
120 120 new_ui.ui_active = active
121 121
122 122 if self.repo:
123 123 repo = self._get_repo(self.repo)
124 124 repository_id = repo.repo_id
125 125 new_ui.repository_id = repository_id
126 126
127 127 if not key:
128 128 # keys are unique so they need appended info
129 129 if self.repo:
130 130 key = hashlib.sha1(
131 131 '{}{}{}'.format(section, val, repository_id)).hexdigest()
132 132 else:
133 133 key = hashlib.sha1('{}{}'.format(section, val)).hexdigest()
134 134
135 135 new_ui.ui_key = key
136 136
137 137 Session().add(new_ui)
138 138 return new_ui
139 139
140 140 def create_or_update_hook(self, key, value):
141 141 ui = (
142 142 self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or
143 143 self.UiDbModel())
144 144 ui.ui_section = self.HOOKS_SECTION
145 145 ui.ui_active = True
146 146 ui.ui_key = key
147 147 ui.ui_value = value
148 148
149 149 if self.repo:
150 150 repo = self._get_repo(self.repo)
151 151 repository_id = repo.repo_id
152 152 ui.repository_id = repository_id
153 153
154 154 Session().add(ui)
155 155 return ui
156 156
157 157 def delete_ui(self, id_):
158 158 ui = self.UiDbModel.get(id_)
159 159 if not ui:
160 160 raise SettingNotFound(id_)
161 161 Session().delete(ui)
162 162
163 163 def get_setting_by_name(self, name):
164 164 q = self._get_settings_query()
165 165 q = q.filter(self.SettingsDbModel.app_settings_name == name)
166 166 return q.scalar()
167 167
168 168 def create_or_update_setting(
169 169 self, name, val=Optional(''), type_=Optional('unicode')):
170 170 """
171 171 Creates or updates RhodeCode setting. If updates is triggered it will
172 172 only update parameters that are explicityl set Optional instance will
173 173 be skipped
174 174
175 175 :param name:
176 176 :param val:
177 177 :param type_:
178 178 :return:
179 179 """
180 180
181 181 res = self.get_setting_by_name(name)
182 182 repo = self._get_repo(self.repo) if self.repo else None
183 183
184 184 if not res:
185 185 val = Optional.extract(val)
186 186 type_ = Optional.extract(type_)
187 187
188 188 args = (
189 189 (repo.repo_id, name, val, type_)
190 190 if repo else (name, val, type_))
191 191 res = self.SettingsDbModel(*args)
192 192
193 193 else:
194 194 if self.repo:
195 195 res.repository_id = repo.repo_id
196 196
197 197 res.app_settings_name = name
198 198 if not isinstance(type_, Optional):
199 199 # update if set
200 200 res.app_settings_type = type_
201 201 if not isinstance(val, Optional):
202 202 # update if set
203 203 res.app_settings_value = val
204 204
205 205 Session().add(res)
206 206 return res
207 207
208 208 def invalidate_settings_cache(self):
209 209 namespace = 'rhodecode_settings'
210 210 cache_manager = caches.get_cache_manager('sql_cache_short', namespace)
211 211 caches.clear_cache_manager(cache_manager)
212 212
213 213 def get_all_settings(self, cache=False):
214 214
215 215 def _compute():
216 216 q = self._get_settings_query()
217 217 if not q:
218 218 raise Exception('Could not get application settings !')
219 219
220 220 settings = {
221 221 'rhodecode_' + result.app_settings_name: result.app_settings_value
222 222 for result in q
223 223 }
224 224 return settings
225 225
226 226 if cache:
227 227 log.debug('Fetching app settings using cache')
228 228 repo = self._get_repo(self.repo) if self.repo else None
229 229 namespace = 'rhodecode_settings'
230 230 cache_manager = caches.get_cache_manager(
231 231 'sql_cache_short', namespace)
232 232 _cache_key = (
233 233 "get_repo_{}_settings".format(repo.repo_id)
234 234 if repo else "get_app_settings")
235 235
236 236 return cache_manager.get(_cache_key, createfunc=_compute)
237 237
238 238 else:
239 239 return _compute()
240 240
241 241 def get_auth_settings(self):
242 242 q = self._get_settings_query()
243 243 q = q.filter(
244 244 self.SettingsDbModel.app_settings_name.startswith('auth_'))
245 245 rows = q.all()
246 246 auth_settings = {
247 247 row.app_settings_name: row.app_settings_value for row in rows}
248 248 return auth_settings
249 249
250 250 def get_auth_plugins(self):
251 251 auth_plugins = self.get_setting_by_name("auth_plugins")
252 252 return auth_plugins.app_settings_value
253 253
254 254 def get_default_repo_settings(self, strip_prefix=False):
255 255 q = self._get_settings_query()
256 256 q = q.filter(
257 257 self.SettingsDbModel.app_settings_name.startswith('default_'))
258 258 rows = q.all()
259 259
260 260 result = {}
261 261 for row in rows:
262 262 key = row.app_settings_name
263 263 if strip_prefix:
264 264 key = remove_prefix(key, prefix='default_')
265 265 result.update({key: row.app_settings_value})
266 266 return result
267 267
268 268 def get_repo(self):
269 269 repo = self._get_repo(self.repo)
270 270 if not repo:
271 271 raise Exception(
272 272 'Repository `{}` cannot be found inside the database'.format(
273 273 self.repo))
274 274 return repo
275 275
276 276 def _filter_by_repo(self, model, query):
277 277 if self.repo:
278 278 repo = self.get_repo()
279 279 query = query.filter(model.repository_id == repo.repo_id)
280 280 return query
281 281
282 282 def _get_hooks(self, query):
283 283 query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION)
284 284 query = self._filter_by_repo(RepoRhodeCodeUi, query)
285 285 return query.all()
286 286
287 287 def _get_settings_query(self):
288 288 q = self.SettingsDbModel.query()
289 289 return self._filter_by_repo(RepoRhodeCodeSetting, q)
290 290
291 291 def list_enabled_social_plugins(self, settings):
292 292 enabled = []
293 293 for plug in SOCIAL_PLUGINS_LIST:
294 294 if str2bool(settings.get('rhodecode_auth_{}_enabled'.format(plug)
295 295 )):
296 296 enabled.append(plug)
297 297 return enabled
298 298
299 299
300 300 def assert_repo_settings(func):
301 301 @wraps(func)
302 302 def _wrapper(self, *args, **kwargs):
303 303 if not self.repo_settings:
304 304 raise Exception('Repository is not specified')
305 305 return func(self, *args, **kwargs)
306 306 return _wrapper
307 307
308 308
309 309 class IssueTrackerSettingsModel(object):
310 310 INHERIT_SETTINGS = 'inherit_issue_tracker_settings'
311 311 SETTINGS_PREFIX = 'issuetracker_'
312 312
313 313 def __init__(self, sa=None, repo=None):
314 314 self.global_settings = SettingsModel(sa=sa)
315 315 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
316 316
317 317 @property
318 318 def inherit_global_settings(self):
319 319 if not self.repo_settings:
320 320 return True
321 321 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
322 322 return setting.app_settings_value if setting else True
323 323
324 324 @inherit_global_settings.setter
325 325 def inherit_global_settings(self, value):
326 326 if self.repo_settings:
327 327 settings = self.repo_settings.create_or_update_setting(
328 328 self.INHERIT_SETTINGS, value, type_='bool')
329 329 Session().add(settings)
330 330
331 331 def _get_keyname(self, key, uid, prefix=''):
332 332 return '{0}{1}{2}_{3}'.format(
333 333 prefix, self.SETTINGS_PREFIX, key, uid)
334 334
335 335 def _make_dict_for_settings(self, qs):
336 336 prefix_match = self._get_keyname('pat', '', 'rhodecode_')
337 337
338 338 issuetracker_entries = {}
339 339 # create keys
340 340 for k, v in qs.items():
341 341 if k.startswith(prefix_match):
342 342 uid = k[len(prefix_match):]
343 343 issuetracker_entries[uid] = None
344 344
345 345 # populate
346 346 for uid in issuetracker_entries:
347 347 issuetracker_entries[uid] = AttributeDict({
348 348 'pat': qs.get(
349 349 self._get_keyname('pat', uid, 'rhodecode_')),
350 350 'url': bleach.clean(
351 351 qs.get(self._get_keyname('url', uid, 'rhodecode_')) or ''),
352 352 'pref': bleach.clean(
353 353 qs.get(self._get_keyname('pref', uid, 'rhodecode_')) or ''),
354 354 'desc': qs.get(
355 355 self._get_keyname('desc', uid, 'rhodecode_')),
356 356 })
357 357 return issuetracker_entries
358 358
359 359 def get_global_settings(self, cache=False):
360 360 """
361 361 Returns list of global issue tracker settings
362 362 """
363 363 defaults = self.global_settings.get_all_settings(cache=cache)
364 364 settings = self._make_dict_for_settings(defaults)
365 365 return settings
366 366
367 367 def get_repo_settings(self, cache=False):
368 368 """
369 369 Returns list of issue tracker settings per repository
370 370 """
371 371 if not self.repo_settings:
372 372 raise Exception('Repository is not specified')
373 373 all_settings = self.repo_settings.get_all_settings(cache=cache)
374 374 settings = self._make_dict_for_settings(all_settings)
375 375 return settings
376 376
377 377 def get_settings(self, cache=False):
378 378 if self.inherit_global_settings:
379 379 return self.get_global_settings(cache=cache)
380 380 else:
381 381 return self.get_repo_settings(cache=cache)
382 382
383 383 def delete_entries(self, uid):
384 384 if self.repo_settings:
385 385 all_patterns = self.get_repo_settings()
386 386 settings_model = self.repo_settings
387 387 else:
388 388 all_patterns = self.get_global_settings()
389 389 settings_model = self.global_settings
390 390 entries = all_patterns.get(uid, [])
391 391
392 392 for del_key in entries:
393 393 setting_name = self._get_keyname(del_key, uid)
394 394 entry = settings_model.get_setting_by_name(setting_name)
395 395 if entry:
396 396 Session().delete(entry)
397 397
398 398 Session().commit()
399 399
400 400 def create_or_update_setting(
401 401 self, name, val=Optional(''), type_=Optional('unicode')):
402 402 if self.repo_settings:
403 403 setting = self.repo_settings.create_or_update_setting(
404 404 name, val, type_)
405 405 else:
406 406 setting = self.global_settings.create_or_update_setting(
407 407 name, val, type_)
408 408 return setting
409 409
410 410
411 411 class VcsSettingsModel(object):
412 412
413 413 INHERIT_SETTINGS = 'inherit_vcs_settings'
414 414 GENERAL_SETTINGS = (
415 415 'use_outdated_comments',
416 416 'pr_merge_enabled',
417 417 'hg_use_rebase_for_merging',
418 418 'hg_close_branch_before_merging',
419 419 'git_use_rebase_for_merging',
420 'git_close_branch_before_merging')
420 'git_close_branch_before_merging',
421 'diff_cache',
422 )
421 423
422 424 HOOKS_SETTINGS = (
423 425 ('hooks', 'changegroup.repo_size'),
424 426 ('hooks', 'changegroup.push_logger'),
425 427 ('hooks', 'outgoing.pull_logger'),)
426 428 HG_SETTINGS = (
427 429 ('extensions', 'largefiles'),
428 430 ('phases', 'publish'),
429 431 ('extensions', 'evolve'),)
430 432 GIT_SETTINGS = (
431 433 ('vcs_git_lfs', 'enabled'),)
432 434 GLOBAL_HG_SETTINGS = (
433 435 ('extensions', 'largefiles'),
434 436 ('largefiles', 'usercache'),
435 437 ('phases', 'publish'),
436 438 ('extensions', 'hgsubversion'),
437 439 ('extensions', 'evolve'),)
438 440 GLOBAL_GIT_SETTINGS = (
439 441 ('vcs_git_lfs', 'enabled'),
440 442 ('vcs_git_lfs', 'store_location'))
443
441 444 GLOBAL_SVN_SETTINGS = (
442 445 ('vcs_svn_proxy', 'http_requests_enabled'),
443 446 ('vcs_svn_proxy', 'http_server_url'))
444 447
445 448 SVN_BRANCH_SECTION = 'vcs_svn_branch'
446 449 SVN_TAG_SECTION = 'vcs_svn_tag'
447 450 SSL_SETTING = ('web', 'push_ssl')
448 451 PATH_SETTING = ('paths', '/')
449 452
450 453 def __init__(self, sa=None, repo=None):
451 454 self.global_settings = SettingsModel(sa=sa)
452 455 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
453 456 self._ui_settings = (
454 457 self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS)
455 458 self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION)
456 459
457 460 @property
458 461 @assert_repo_settings
459 462 def inherit_global_settings(self):
460 463 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
461 464 return setting.app_settings_value if setting else True
462 465
463 466 @inherit_global_settings.setter
464 467 @assert_repo_settings
465 468 def inherit_global_settings(self, value):
466 469 self.repo_settings.create_or_update_setting(
467 470 self.INHERIT_SETTINGS, value, type_='bool')
468 471
469 472 def get_global_svn_branch_patterns(self):
470 473 return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
471 474
472 475 @assert_repo_settings
473 476 def get_repo_svn_branch_patterns(self):
474 477 return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
475 478
476 479 def get_global_svn_tag_patterns(self):
477 480 return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION)
478 481
479 482 @assert_repo_settings
480 483 def get_repo_svn_tag_patterns(self):
481 484 return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION)
482 485
483 486 def get_global_settings(self):
484 487 return self._collect_all_settings(global_=True)
485 488
486 489 @assert_repo_settings
487 490 def get_repo_settings(self):
488 491 return self._collect_all_settings(global_=False)
489 492
490 493 @assert_repo_settings
491 494 def create_or_update_repo_settings(
492 495 self, data, inherit_global_settings=False):
493 496 from rhodecode.model.scm import ScmModel
494 497
495 498 self.inherit_global_settings = inherit_global_settings
496 499
497 500 repo = self.repo_settings.get_repo()
498 501 if not inherit_global_settings:
499 502 if repo.repo_type == 'svn':
500 503 self.create_repo_svn_settings(data)
501 504 else:
502 505 self.create_or_update_repo_hook_settings(data)
503 506 self.create_or_update_repo_pr_settings(data)
504 507
505 508 if repo.repo_type == 'hg':
506 509 self.create_or_update_repo_hg_settings(data)
507 510
508 511 if repo.repo_type == 'git':
509 512 self.create_or_update_repo_git_settings(data)
510 513
511 514 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
512 515
513 516 @assert_repo_settings
514 517 def create_or_update_repo_hook_settings(self, data):
515 518 for section, key in self.HOOKS_SETTINGS:
516 519 data_key = self._get_form_ui_key(section, key)
517 520 if data_key not in data:
518 521 raise ValueError(
519 522 'The given data does not contain {} key'.format(data_key))
520 523
521 524 active = data.get(data_key)
522 525 repo_setting = self.repo_settings.get_ui_by_section_and_key(
523 526 section, key)
524 527 if not repo_setting:
525 528 global_setting = self.global_settings.\
526 529 get_ui_by_section_and_key(section, key)
527 530 self.repo_settings.create_ui_section_value(
528 531 section, global_setting.ui_value, key=key, active=active)
529 532 else:
530 533 repo_setting.ui_active = active
531 534 Session().add(repo_setting)
532 535
533 536 def update_global_hook_settings(self, data):
534 537 for section, key in self.HOOKS_SETTINGS:
535 538 data_key = self._get_form_ui_key(section, key)
536 539 if data_key not in data:
537 540 raise ValueError(
538 541 'The given data does not contain {} key'.format(data_key))
539 542 active = data.get(data_key)
540 543 repo_setting = self.global_settings.get_ui_by_section_and_key(
541 544 section, key)
542 545 repo_setting.ui_active = active
543 546 Session().add(repo_setting)
544 547
545 548 @assert_repo_settings
546 549 def create_or_update_repo_pr_settings(self, data):
547 550 return self._create_or_update_general_settings(
548 551 self.repo_settings, data)
549 552
550 553 def create_or_update_global_pr_settings(self, data):
551 554 return self._create_or_update_general_settings(
552 555 self.global_settings, data)
553 556
554 557 @assert_repo_settings
555 558 def create_repo_svn_settings(self, data):
556 559 return self._create_svn_settings(self.repo_settings, data)
557 560
558 561 @assert_repo_settings
559 562 def create_or_update_repo_hg_settings(self, data):
560 563 largefiles, phases, evolve = \
561 564 self.HG_SETTINGS
562 565 largefiles_key, phases_key, evolve_key = \
563 566 self._get_settings_keys(self.HG_SETTINGS, data)
564 567
565 568 self._create_or_update_ui(
566 569 self.repo_settings, *largefiles, value='',
567 570 active=data[largefiles_key])
568 571 self._create_or_update_ui(
569 572 self.repo_settings, *evolve, value='',
570 573 active=data[evolve_key])
571 574 self._create_or_update_ui(
572 575 self.repo_settings, *phases, value=safe_str(data[phases_key]))
573 576
577
574 578 def create_or_update_global_hg_settings(self, data):
575 579 largefiles, largefiles_store, phases, hgsubversion, evolve \
576 580 = self.GLOBAL_HG_SETTINGS
577 581 largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \
578 582 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS, data)
579 583
580 584 self._create_or_update_ui(
581 585 self.global_settings, *largefiles, value='',
582 586 active=data[largefiles_key])
583 587 self._create_or_update_ui(
584 588 self.global_settings, *largefiles_store,
585 589 value=data[largefiles_store_key])
586 590 self._create_or_update_ui(
587 591 self.global_settings, *phases, value=safe_str(data[phases_key]))
588 592 self._create_or_update_ui(
589 593 self.global_settings, *hgsubversion, active=data[subversion_key])
590 594 self._create_or_update_ui(
591 595 self.global_settings, *evolve, value='',
592 596 active=data[evolve_key])
593 597
594 598 def create_or_update_repo_git_settings(self, data):
595 599 # NOTE(marcink): # comma make unpack work properly
596 600 lfs_enabled, \
597 601 = self.GIT_SETTINGS
598 602
599 603 lfs_enabled_key, \
600 604 = self._get_settings_keys(self.GIT_SETTINGS, data)
601 605
602 606 self._create_or_update_ui(
603 607 self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key],
604 608 active=data[lfs_enabled_key])
605 609
606 610 def create_or_update_global_git_settings(self, data):
607 611 lfs_enabled, lfs_store_location \
608 612 = self.GLOBAL_GIT_SETTINGS
609 613 lfs_enabled_key, lfs_store_location_key \
610 614 = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)
611 615
612 616 self._create_or_update_ui(
613 617 self.global_settings, *lfs_enabled, value=data[lfs_enabled_key],
614 618 active=data[lfs_enabled_key])
615 619 self._create_or_update_ui(
616 620 self.global_settings, *lfs_store_location,
617 621 value=data[lfs_store_location_key])
618 622
619 623 def create_or_update_global_svn_settings(self, data):
620 624 # branch/tags patterns
621 625 self._create_svn_settings(self.global_settings, data)
622 626
623 627 http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS
624 628 http_requests_enabled_key, http_server_url_key = self._get_settings_keys(
625 629 self.GLOBAL_SVN_SETTINGS, data)
626 630
627 631 self._create_or_update_ui(
628 632 self.global_settings, *http_requests_enabled,
629 633 value=safe_str(data[http_requests_enabled_key]))
630 634 self._create_or_update_ui(
631 635 self.global_settings, *http_server_url,
632 636 value=data[http_server_url_key])
633 637
634 638 def update_global_ssl_setting(self, value):
635 639 self._create_or_update_ui(
636 640 self.global_settings, *self.SSL_SETTING, value=value)
637 641
638 642 def update_global_path_setting(self, value):
639 643 self._create_or_update_ui(
640 644 self.global_settings, *self.PATH_SETTING, value=value)
641 645
642 646 @assert_repo_settings
643 647 def delete_repo_svn_pattern(self, id_):
644 648 ui = self.repo_settings.UiDbModel.get(id_)
645 649 if ui and ui.repository.repo_name == self.repo_settings.repo:
646 650 # only delete if it's the same repo as initialized settings
647 651 self.repo_settings.delete_ui(id_)
648 652 else:
649 653 # raise error as if we wouldn't find this option
650 654 self.repo_settings.delete_ui(-1)
651 655
652 656 def delete_global_svn_pattern(self, id_):
653 657 self.global_settings.delete_ui(id_)
654 658
655 659 @assert_repo_settings
656 660 def get_repo_ui_settings(self, section=None, key=None):
657 661 global_uis = self.global_settings.get_ui(section, key)
658 662 repo_uis = self.repo_settings.get_ui(section, key)
659 663 filtered_repo_uis = self._filter_ui_settings(repo_uis)
660 664 filtered_repo_uis_keys = [
661 665 (s.section, s.key) for s in filtered_repo_uis]
662 666
663 667 def _is_global_ui_filtered(ui):
664 668 return (
665 669 (ui.section, ui.key) in filtered_repo_uis_keys
666 670 or ui.section in self._svn_sections)
667 671
668 672 filtered_global_uis = [
669 673 ui for ui in global_uis if not _is_global_ui_filtered(ui)]
670 674
671 675 return filtered_global_uis + filtered_repo_uis
672 676
673 677 def get_global_ui_settings(self, section=None, key=None):
674 678 return self.global_settings.get_ui(section, key)
675 679
676 680 def get_ui_settings_as_config_obj(self, section=None, key=None):
677 681 config = base.Config()
678 682
679 683 ui_settings = self.get_ui_settings(section=section, key=key)
680 684
681 685 for entry in ui_settings:
682 686 config.set(entry.section, entry.key, entry.value)
683 687
684 688 return config
685 689
686 690 def get_ui_settings(self, section=None, key=None):
687 691 if not self.repo_settings or self.inherit_global_settings:
688 692 return self.get_global_ui_settings(section, key)
689 693 else:
690 694 return self.get_repo_ui_settings(section, key)
691 695
692 696 def get_svn_patterns(self, section=None):
693 697 if not self.repo_settings:
694 698 return self.get_global_ui_settings(section)
695 699 else:
696 700 return self.get_repo_ui_settings(section)
697 701
698 702 @assert_repo_settings
699 703 def get_repo_general_settings(self):
700 704 global_settings = self.global_settings.get_all_settings()
701 705 repo_settings = self.repo_settings.get_all_settings()
702 706 filtered_repo_settings = self._filter_general_settings(repo_settings)
703 707 global_settings.update(filtered_repo_settings)
704 708 return global_settings
705 709
706 710 def get_global_general_settings(self):
707 711 return self.global_settings.get_all_settings()
708 712
709 713 def get_general_settings(self):
710 714 if not self.repo_settings or self.inherit_global_settings:
711 715 return self.get_global_general_settings()
712 716 else:
713 717 return self.get_repo_general_settings()
714 718
715 719 def get_repos_location(self):
716 720 return self.global_settings.get_ui_by_key('/').ui_value
717 721
718 722 def _filter_ui_settings(self, settings):
719 723 filtered_settings = [
720 724 s for s in settings if self._should_keep_setting(s)]
721 725 return filtered_settings
722 726
723 727 def _should_keep_setting(self, setting):
724 728 keep = (
725 729 (setting.section, setting.key) in self._ui_settings or
726 730 setting.section in self._svn_sections)
727 731 return keep
728 732
729 733 def _filter_general_settings(self, settings):
730 734 keys = ['rhodecode_{}'.format(key) for key in self.GENERAL_SETTINGS]
731 735 return {
732 736 k: settings[k]
733 737 for k in settings if k in keys}
734 738
735 739 def _collect_all_settings(self, global_=False):
736 740 settings = self.global_settings if global_ else self.repo_settings
737 741 result = {}
738 742
739 743 for section, key in self._ui_settings:
740 744 ui = settings.get_ui_by_section_and_key(section, key)
741 745 result_key = self._get_form_ui_key(section, key)
742 746
743 747 if ui:
744 748 if section in ('hooks', 'extensions'):
745 749 result[result_key] = ui.ui_active
746 750 elif result_key in ['vcs_git_lfs_enabled']:
747 751 result[result_key] = ui.ui_active
748 752 else:
749 753 result[result_key] = ui.ui_value
750 754
751 755 for name in self.GENERAL_SETTINGS:
752 756 setting = settings.get_setting_by_name(name)
753 757 if setting:
754 758 result_key = 'rhodecode_{}'.format(name)
755 759 result[result_key] = setting.app_settings_value
756 760
757 761 return result
758 762
759 763 def _get_form_ui_key(self, section, key):
760 764 return '{section}_{key}'.format(
761 765 section=section, key=key.replace('.', '_'))
762 766
763 767 def _create_or_update_ui(
764 768 self, settings, section, key, value=None, active=None):
765 769 ui = settings.get_ui_by_section_and_key(section, key)
766 770 if not ui:
767 771 active = True if active is None else active
768 772 settings.create_ui_section_value(
769 773 section, value, key=key, active=active)
770 774 else:
771 775 if active is not None:
772 776 ui.ui_active = active
773 777 if value is not None:
774 778 ui.ui_value = value
775 779 Session().add(ui)
776 780
777 781 def _create_svn_settings(self, settings, data):
778 782 svn_settings = {
779 783 'new_svn_branch': self.SVN_BRANCH_SECTION,
780 784 'new_svn_tag': self.SVN_TAG_SECTION
781 785 }
782 786 for key in svn_settings:
783 787 if data.get(key):
784 788 settings.create_ui_section_value(svn_settings[key], data[key])
785 789
786 790 def _create_or_update_general_settings(self, settings, data):
787 791 for name in self.GENERAL_SETTINGS:
788 792 data_key = 'rhodecode_{}'.format(name)
789 793 if data_key not in data:
790 794 raise ValueError(
791 795 'The given data does not contain {} key'.format(data_key))
792 796 setting = settings.create_or_update_setting(
793 797 name, data[data_key], 'bool')
794 798 Session().add(setting)
795 799
796 800 def _get_settings_keys(self, settings, data):
797 801 data_keys = [self._get_form_ui_key(*s) for s in settings]
798 802 for data_key in data_keys:
799 803 if data_key not in data:
800 804 raise ValueError(
801 805 'The given data does not contain {} key'.format(data_key))
802 806 return data_keys
803 807
804 808 def create_largeobjects_dirs_if_needed(self, repo_store_path):
805 809 """
806 810 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
807 811 does a repository scan if enabled in the settings.
808 812 """
809 813
810 814 from rhodecode.lib.vcs.backends.hg import largefiles_store
811 815 from rhodecode.lib.vcs.backends.git import lfs_store
812 816
813 817 paths = [
814 818 largefiles_store(repo_store_path),
815 819 lfs_store(repo_store_path)]
816 820
817 821 for path in paths:
818 822 if os.path.isdir(path):
819 823 continue
820 824 if os.path.isfile(path):
821 825 continue
822 826 # not a file nor dir, we try to create it
823 827 try:
824 828 os.makedirs(path)
825 829 except Exception:
826 830 log.warning('Failed to create largefiles dir:%s', path)
@@ -1,370 +1,384 b''
1 1 ## snippet for displaying vcs settings
2 2 ## usage:
3 3 ## <%namespace name="vcss" file="/base/vcssettings.mako"/>
4 4 ## ${vcss.vcs_settings_fields()}
5 5
6 6 <%def name="vcs_settings_fields(suffix='', svn_branch_patterns=None, svn_tag_patterns=None, repo_type=None, display_globals=False, allow_repo_location_change=False, **kwargs)">
7 7 % if display_globals:
8 8 <div class="panel panel-default">
9 9 <div class="panel-heading" id="general">
10 10 <h3 class="panel-title">${_('General')}<a class="permalink" href="#general"></a></h3>
11 11 </div>
12 12 <div class="panel-body">
13 13 <div class="field">
14 14 <div class="checkbox">
15 15 ${h.checkbox('web_push_ssl' + suffix, 'True')}
16 16 <label for="web_push_ssl${suffix}">${_('Require SSL for vcs operations')}</label>
17 17 </div>
18 18 <div class="label">
19 19 <span class="help-block">${_('Activate to set RhodeCode to require SSL for pushing or pulling. If SSL certificate is missing it will return a HTTP Error 406: Not Acceptable.')}</span>
20 20 </div>
21 21 </div>
22 22 </div>
23 23 </div>
24 24 % endif
25 25
26 26 % if display_globals:
27 27 <div class="panel panel-default">
28 28 <div class="panel-heading" id="vcs-storage-options">
29 29 <h3 class="panel-title">${_('Main Storage Location')}<a class="permalink" href="#vcs-storage-options"></a></h3>
30 30 </div>
31 31 <div class="panel-body">
32 32 <div class="field">
33 33 <div class="inputx locked_input">
34 34 %if allow_repo_location_change:
35 35 ${h.text('paths_root_path',size=59,readonly="readonly", class_="disabled")}
36 36 <span id="path_unlock" class="tooltip"
37 37 title="${h.tooltip(_('Click to unlock. You must restart RhodeCode in order to make this setting take effect.'))}">
38 38 <div class="btn btn-default lock_input_button"><i id="path_unlock_icon" class="icon-lock"></i></div>
39 39 </span>
40 40 %else:
41 41 ${_('Repository location change is disabled. You can enable this by changing the `allow_repo_location_change` inside .ini file.')}
42 42 ## form still requires this but we cannot internally change it anyway
43 43 ${h.hidden('paths_root_path',size=30,readonly="readonly", class_="disabled")}
44 44 %endif
45 45 </div>
46 46 </div>
47 47 <div class="label">
48 48 <span class="help-block">${_('Filesystem location where repositories should be stored. After changing this value a restart and rescan of the repository folder are required.')}</span>
49 49 </div>
50 50 </div>
51 51 </div>
52 52 % endif
53 53
54 54 % if display_globals or repo_type in ['git', 'hg']:
55 55 <div class="panel panel-default">
56 56 <div class="panel-heading" id="vcs-hooks-options">
57 57 <h3 class="panel-title">${_('Internal Hooks')}<a class="permalink" href="#vcs-hooks-options"></a></h3>
58 58 </div>
59 59 <div class="panel-body">
60 60 <div class="field">
61 61 <div class="checkbox">
62 62 ${h.checkbox('hooks_changegroup_repo_size' + suffix, 'True', **kwargs)}
63 63 <label for="hooks_changegroup_repo_size${suffix}">${_('Show repository size after push')}</label>
64 64 </div>
65 65
66 66 <div class="label">
67 67 <span class="help-block">${_('Trigger a hook that calculates repository size after each push.')}</span>
68 68 </div>
69 69 <div class="checkbox">
70 70 ${h.checkbox('hooks_changegroup_push_logger' + suffix, 'True', **kwargs)}
71 71 <label for="hooks_changegroup_push_logger${suffix}">${_('Execute pre/post push hooks')}</label>
72 72 </div>
73 73 <div class="label">
74 74 <span class="help-block">${_('Execute Built in pre/post push hooks. This also executes rcextensions hooks.')}</span>
75 75 </div>
76 76 <div class="checkbox">
77 77 ${h.checkbox('hooks_outgoing_pull_logger' + suffix, 'True', **kwargs)}
78 78 <label for="hooks_outgoing_pull_logger${suffix}">${_('Execute pre/post pull hooks')}</label>
79 79 </div>
80 80 <div class="label">
81 81 <span class="help-block">${_('Execute Built in pre/post pull hooks. This also executes rcextensions hooks.')}</span>
82 82 </div>
83 83 </div>
84 84 </div>
85 85 </div>
86 86 % endif
87 87
88 88 % if display_globals or repo_type in ['hg']:
89 89 <div class="panel panel-default">
90 90 <div class="panel-heading" id="vcs-hg-options">
91 91 <h3 class="panel-title">${_('Mercurial Settings')}<a class="permalink" href="#vcs-hg-options"></a></h3>
92 92 </div>
93 93 <div class="panel-body">
94 94 <div class="checkbox">
95 95 ${h.checkbox('extensions_largefiles' + suffix, 'True', **kwargs)}
96 96 <label for="extensions_largefiles${suffix}">${_('Enable largefiles extension')}</label>
97 97 </div>
98 98 <div class="label">
99 99 % if display_globals:
100 100 <span class="help-block">${_('Enable Largefiles extensions for all repositories.')}</span>
101 101 % else:
102 102 <span class="help-block">${_('Enable Largefiles extensions for this repository.')}</span>
103 103 % endif
104 104 </div>
105 105
106 106 % if display_globals:
107 107 <div class="field">
108 108 <div class="input">
109 109 ${h.text('largefiles_usercache' + suffix, size=59)}
110 110 </div>
111 111 </div>
112 112 <div class="label">
113 113 <span class="help-block">${_('Filesystem location where Mercurial largefile objects should be stored.')}</span>
114 114 </div>
115 115 % endif
116 116
117 117 <div class="checkbox">
118 118 ${h.checkbox('phases_publish' + suffix, 'True', **kwargs)}
119 119 <label for="phases_publish${suffix}">${_('Set repositories as publishing') if display_globals else _('Set repository as publishing')}</label>
120 120 </div>
121 121 <div class="label">
122 122 <span class="help-block">${_('When this is enabled all commits in the repository are seen as public commits by clients.')}</span>
123 123 </div>
124 124 % if display_globals:
125 125 <div class="checkbox">
126 126 ${h.checkbox('extensions_hgsubversion' + suffix,'True')}
127 127 <label for="extensions_hgsubversion${suffix}">${_('Enable hgsubversion extension')}</label>
128 128 </div>
129 129 <div class="label">
130 130 <span class="help-block">${_('Requires hgsubversion library to be installed. Allows cloning remote SVN repositories and migrates them to Mercurial type.')}</span>
131 131 </div>
132 132 % endif
133 133
134 134 <div class="checkbox">
135 135 ${h.checkbox('extensions_evolve' + suffix, 'True', **kwargs)}
136 136 <label for="extensions_evolve${suffix}">${_('Enable evolve extension')}</label>
137 137 </div>
138 138 <div class="label">
139 139 % if display_globals:
140 140 <span class="help-block">${_('Enable evolve extension for all repositories.')}</span>
141 141 % else:
142 142 <span class="help-block">${_('Enable evolve extension for this repository.')}</span>
143 143 % endif
144 144 </div>
145 145
146 146 </div>
147 147 </div>
148 148 % endif
149 149
150 150 % if display_globals or repo_type in ['git']:
151 151 <div class="panel panel-default">
152 152 <div class="panel-heading" id="vcs-git-options">
153 153 <h3 class="panel-title">${_('Git Settings')}<a class="permalink" href="#vcs-git-options"></a></h3>
154 154 </div>
155 155 <div class="panel-body">
156 156 <div class="checkbox">
157 157 ${h.checkbox('vcs_git_lfs_enabled' + suffix, 'True', **kwargs)}
158 158 <label for="vcs_git_lfs_enabled${suffix}">${_('Enable lfs extension')}</label>
159 159 </div>
160 160 <div class="label">
161 161 % if display_globals:
162 162 <span class="help-block">${_('Enable lfs extensions for all repositories.')}</span>
163 163 % else:
164 164 <span class="help-block">${_('Enable lfs extensions for this repository.')}</span>
165 165 % endif
166 166 </div>
167 167
168 168 % if display_globals:
169 169 <div class="field">
170 170 <div class="input">
171 171 ${h.text('vcs_git_lfs_store_location' + suffix, size=59)}
172 172 </div>
173 173 </div>
174 174 <div class="label">
175 175 <span class="help-block">${_('Filesystem location where Git lfs objects should be stored.')}</span>
176 176 </div>
177 177 % endif
178 178 </div>
179 179 </div>
180 180 % endif
181 181
182 182
183 183 % if display_globals:
184 184 <div class="panel panel-default">
185 185 <div class="panel-heading" id="vcs-global-svn-options">
186 186 <h3 class="panel-title">${_('Global Subversion Settings')}<a class="permalink" href="#vcs-global-svn-options"></a></h3>
187 187 </div>
188 188 <div class="panel-body">
189 189 <div class="field">
190 190 <div class="checkbox">
191 191 ${h.checkbox('vcs_svn_proxy_http_requests_enabled' + suffix, 'True', **kwargs)}
192 192 <label for="vcs_svn_proxy_http_requests_enabled${suffix}">${_('Proxy subversion HTTP requests')}</label>
193 193 </div>
194 194 <div class="label">
195 195 <span class="help-block">
196 196 ${_('Subversion HTTP Support. Enables communication with SVN over HTTP protocol.')}
197 197 <a href="${h.route_url('enterprise_svn_setup')}" target="_blank">${_('SVN Protocol setup Documentation')}</a>.
198 198 </span>
199 199 </div>
200 200 </div>
201 201 <div class="field">
202 202 <div class="label">
203 203 <label for="vcs_svn_proxy_http_server_url">${_('Subversion HTTP Server URL')}</label><br/>
204 204 </div>
205 205 <div class="input">
206 206 ${h.text('vcs_svn_proxy_http_server_url',size=59)}
207 207 % if c.svn_proxy_generate_config:
208 208 <span class="buttons">
209 209 <button class="btn btn-primary" id="vcs_svn_generate_cfg">${_('Generate Apache Config')}</button>
210 210 </span>
211 211 % endif
212 212 </div>
213 213 </div>
214 214 </div>
215 215 </div>
216 216 % endif
217 217
218 218 % if display_globals or repo_type in ['svn']:
219 219 <div class="panel panel-default">
220 220 <div class="panel-heading" id="vcs-svn-options">
221 221 <h3 class="panel-title">${_('Subversion Settings')}<a class="permalink" href="#vcs-svn-options"></a></h3>
222 222 </div>
223 223 <div class="panel-body">
224 224 <div class="field">
225 225 <div class="content" >
226 226 <label>${_('Repository patterns')}</label><br/>
227 227 </div>
228 228 </div>
229 229 <div class="label">
230 230 <span class="help-block">${_('Patterns for identifying SVN branches and tags. For recursive search, use "*". Eg.: "/branches/*"')}</span>
231 231 </div>
232 232
233 233 <div class="field branch_patterns">
234 234 <div class="input" >
235 235 <label>${_('Branches')}:</label><br/>
236 236 </div>
237 237 % if svn_branch_patterns:
238 238 % for branch in svn_branch_patterns:
239 239 <div class="input adjacent" id="${'id%s' % branch.ui_id}">
240 240 ${h.hidden('branch_ui_key' + suffix, branch.ui_key)}
241 241 ${h.text('branch_value_%d' % branch.ui_id + suffix, branch.ui_value, size=59, readonly="readonly", class_='disabled')}
242 242 % if kwargs.get('disabled') != 'disabled':
243 243 <span class="btn btn-x" onclick="ajaxDeletePattern(${branch.ui_id},'${'id%s' % branch.ui_id}')">
244 244 ${_('Delete')}
245 245 </span>
246 246 % endif
247 247 </div>
248 248 % endfor
249 249 %endif
250 250 </div>
251 251 % if kwargs.get('disabled') != 'disabled':
252 252 <div class="field branch_patterns">
253 253 <div class="input" >
254 254 ${h.text('new_svn_branch',size=59,placeholder='New branch pattern')}
255 255 </div>
256 256 </div>
257 257 % endif
258 258 <div class="field tag_patterns">
259 259 <div class="input" >
260 260 <label>${_('Tags')}:</label><br/>
261 261 </div>
262 262 % if svn_tag_patterns:
263 263 % for tag in svn_tag_patterns:
264 264 <div class="input" id="${'id%s' % tag.ui_id + suffix}">
265 265 ${h.hidden('tag_ui_key' + suffix, tag.ui_key)}
266 266 ${h.text('tag_ui_value_new_%d' % tag.ui_id + suffix, tag.ui_value, size=59, readonly="readonly", class_='disabled tag_input')}
267 267 % if kwargs.get('disabled') != 'disabled':
268 268 <span class="btn btn-x" onclick="ajaxDeletePattern(${tag.ui_id},'${'id%s' % tag.ui_id}')">
269 269 ${_('Delete')}
270 270 </span>
271 271 %endif
272 272 </div>
273 273 % endfor
274 274 % endif
275 275 </div>
276 276 % if kwargs.get('disabled') != 'disabled':
277 277 <div class="field tag_patterns">
278 278 <div class="input" >
279 279 ${h.text('new_svn_tag' + suffix, size=59, placeholder='New tag pattern')}
280 280 </div>
281 281 </div>
282 282 %endif
283 283 </div>
284 284 </div>
285 285 % else:
286 286 ${h.hidden('new_svn_branch' + suffix, '')}
287 287 ${h.hidden('new_svn_tag' + suffix, '')}
288 288 % endif
289 289
290 290
291 291 % if display_globals or repo_type in ['hg', 'git']:
292 292 <div class="panel panel-default">
293 293 <div class="panel-heading" id="vcs-pull-requests-options">
294 294 <h3 class="panel-title">${_('Pull Request Settings')}<a class="permalink" href="#vcs-pull-requests-options"></a></h3>
295 295 </div>
296 296 <div class="panel-body">
297 297 <div class="checkbox">
298 298 ${h.checkbox('rhodecode_pr_merge_enabled' + suffix, 'True', **kwargs)}
299 299 <label for="rhodecode_pr_merge_enabled${suffix}">${_('Enable server-side merge for pull requests')}</label>
300 300 </div>
301 301 <div class="label">
302 302 <span class="help-block">${_('Note: when this feature is enabled, it only runs hooks defined in the rcextension package. Custom hooks added on the Admin -> Settings -> Hooks page will not be run when pull requests are automatically merged from the web interface.')}</span>
303 303 </div>
304 304 <div class="checkbox">
305 305 ${h.checkbox('rhodecode_use_outdated_comments' + suffix, 'True', **kwargs)}
306 306 <label for="rhodecode_use_outdated_comments${suffix}">${_('Invalidate and relocate inline comments during update')}</label>
307 307 </div>
308 308 <div class="label">
309 309 <span class="help-block">${_('During the update of a pull request, the position of inline comments will be updated and outdated inline comments will be hidden.')}</span>
310 310 </div>
311 311 </div>
312 312 </div>
313 313 % endif
314 314
315 % if display_globals or repo_type in ['hg', 'git', 'svn']:
316 <div class="panel panel-default">
317 <div class="panel-heading" id="vcs-pull-requests-options">
318 <h3 class="panel-title">${_('Diff cache')}<a class="permalink" href="#vcs-pull-requests-options"></a></h3>
319 </div>
320 <div class="panel-body">
321 <div class="checkbox">
322 ${h.checkbox('rhodecode_diff_cache' + suffix, 'True', **kwargs)}
323 <label for="rhodecode_diff_cache${suffix}">${_('Enable caching diffs for pull requests cache and commits')}</label>
324 </div>
325 </div>
326 </div>
327 % endif
328
315 329 % if display_globals or repo_type in ['hg',]:
316 330 <div class="panel panel-default">
317 331 <div class="panel-heading" id="vcs-pull-requests-options">
318 332 <h3 class="panel-title">${_('Mercurial Pull Request Settings')}<a class="permalink" href="#vcs-hg-pull-requests-options"></a></h3>
319 333 </div>
320 334 <div class="panel-body">
321 335 ## Specific HG settings
322 336 <div class="checkbox">
323 337 ${h.checkbox('rhodecode_hg_use_rebase_for_merging' + suffix, 'True', **kwargs)}
324 338 <label for="rhodecode_hg_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
325 339 </div>
326 340 <div class="label">
327 341 <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
328 342 </div>
329 343
330 344 <div class="checkbox">
331 345 ${h.checkbox('rhodecode_hg_close_branch_before_merging' + suffix, 'True', **kwargs)}
332 346 <label for="rhodecode_hg_close_branch_before_merging{suffix}">${_('Close branch before merging it')}</label>
333 347 </div>
334 348 <div class="label">
335 349 <span class="help-block">${_('Close branch before merging it into destination branch. No effect when rebase strategy is use.')}</span>
336 350 </div>
337 351
338 352
339 353 </div>
340 354 </div>
341 355 % endif
342 356
343 357 ## DISABLED FOR GIT FOR NOW as the rebase/close is not supported yet
344 358 ## % if display_globals or repo_type in ['git']:
345 359 ## <div class="panel panel-default">
346 360 ## <div class="panel-heading" id="vcs-pull-requests-options">
347 361 ## <h3 class="panel-title">${_('Git Pull Request Settings')}<a class="permalink" href="#vcs-git-pull-requests-options"> ¶</a></h3>
348 362 ## </div>
349 363 ## <div class="panel-body">
350 364 ## <div class="checkbox">
351 365 ## ${h.checkbox('rhodecode_git_use_rebase_for_merging' + suffix, 'True', **kwargs)}
352 366 ## <label for="rhodecode_git_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
353 367 ## </div>
354 368 ## <div class="label">
355 369 ## <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
356 370 ## </div>
357 371 ##
358 372 ## <div class="checkbox">
359 373 ## ${h.checkbox('rhodecode_git_close_branch_before_merging' + suffix, 'True', **kwargs)}
360 374 ## <label for="rhodecode_git_close_branch_before_merging{suffix}">${_('Delete branch after merging it')}</label>
361 375 ## </div>
362 376 ## <div class="label">
363 377 ## <span class="help-block">${_('Delete branch after merging it into destination branch. No effect when rebase strategy is use.')}</span>
364 378 ## </div>
365 379 ## </div>
366 380 ## </div>
367 381 ## % endif
368 382
369 383
370 384 </%def>
@@ -1,354 +1,354 b''
1 1 ## -*- coding: utf-8 -*-
2 2
3 3 <%inherit file="/base/base.mako"/>
4 4 <%namespace name="diff_block" file="/changeset/diff_block.mako"/>
5 5
6 6 <%def name="title()">
7 7 ${_('%s Commit') % c.repo_name} - ${h.show_id(c.commit)}
8 8 %if c.rhodecode_name:
9 9 &middot; ${h.branding(c.rhodecode_name)}
10 10 %endif
11 11 </%def>
12 12
13 13 <%def name="menu_bar_nav()">
14 14 ${self.menu_items(active='repositories')}
15 15 </%def>
16 16
17 17 <%def name="menu_bar_subnav()">
18 18 ${self.repo_menu(active='changelog')}
19 19 </%def>
20 20
21 21 <%def name="main()">
22 22 <script>
23 23 // TODO: marcink switch this to pyroutes
24 24 AJAX_COMMENT_DELETE_URL = "${h.route_path('repo_commit_comment_delete',repo_name=c.repo_name,commit_id=c.commit.raw_id,comment_id='__COMMENT_ID__')}";
25 25 templateContext.commit_data.commit_id = "${c.commit.raw_id}";
26 26 </script>
27 27 <div class="box">
28 28 <div class="title">
29 29 ${self.repo_page_title(c.rhodecode_db_repo)}
30 30 </div>
31 31
32 32 <div id="changeset_compare_view_content" class="summary changeset">
33 33 <div class="summary-detail">
34 34 <div class="summary-detail-header">
35 35 <div class="breadcrumbs files_location">
36 36 <h4>
37 37 ${_('Commit')}
38 38
39 39 <code>
40 40 ${h.show_id(c.commit)}
41 41 </code>
42 42 <i class="tooltip icon-clipboard clipboard-action" data-clipboard-text="${c.commit.raw_id}" title="${_('Copy the full commit id')}"></i>
43 43 % if hasattr(c.commit, 'phase'):
44 44 <span class="tag phase-${c.commit.phase} tooltip" title="${_('Commit phase')}">${c.commit.phase}</span>
45 45 % endif
46 46
47 47 ## obsolete commits
48 48 % if hasattr(c.commit, 'obsolete'):
49 49 % if c.commit.obsolete:
50 50 <span class="tag obsolete-${c.commit.obsolete} tooltip" title="${_('Evolve State')}">${_('obsolete')}</span>
51 51 % endif
52 52 % endif
53 53
54 54 ## hidden commits
55 55 % if hasattr(c.commit, 'hidden'):
56 56 % if c.commit.hidden:
57 57 <span class="tag hidden-${c.commit.hidden} tooltip" title="${_('Evolve State')}">${_('hidden')}</span>
58 58 % endif
59 59 % endif
60 60 </h4>
61 61
62 62 </div>
63 63 <div class="pull-right">
64 64 <span id="parent_link">
65 65 <a href="#parentCommit" title="${_('Parent Commit')}">${_('Parent')}</a>
66 66 </span>
67 67 |
68 68 <span id="child_link">
69 69 <a href="#childCommit" title="${_('Child Commit')}">${_('Child')}</a>
70 70 </span>
71 71 </div>
72 72 </div>
73 73
74 74 <div class="fieldset">
75 75 <div class="left-label">
76 76 ${_('Description')}:
77 77 </div>
78 78 <div class="right-content">
79 79 <div id="trimmed_message_box" class="commit">${h.urlify_commit_message(c.commit.message,c.repo_name)}</div>
80 80 <div id="message_expand" style="display:none;">
81 81 ${_('Expand')}
82 82 </div>
83 83 </div>
84 84 </div>
85 85
86 86 %if c.statuses:
87 87 <div class="fieldset">
88 88 <div class="left-label">
89 89 ${_('Commit status')}:
90 90 </div>
91 91 <div class="right-content">
92 92 <div class="changeset-status-ico">
93 93 <div class="${'flag_status %s' % c.statuses[0]} pull-left"></div>
94 94 </div>
95 95 <div title="${_('Commit status')}" class="changeset-status-lbl">[${h.commit_status_lbl(c.statuses[0])}]</div>
96 96 </div>
97 97 </div>
98 98 %endif
99 99
100 100 <div class="fieldset">
101 101 <div class="left-label">
102 102 ${_('References')}:
103 103 </div>
104 104 <div class="right-content">
105 105 <div class="tags">
106 106
107 107 %if c.commit.merge:
108 108 <span class="mergetag tag">
109 109 <i class="icon-merge"></i>${_('merge')}
110 110 </span>
111 111 %endif
112 112
113 113 %if h.is_hg(c.rhodecode_repo):
114 114 %for book in c.commit.bookmarks:
115 115 <span class="booktag tag" title="${h.tooltip(_('Bookmark %s') % book)}">
116 116 <a href="${h.route_path('repo_files:default_path',repo_name=c.repo_name,commit_id=c.commit.raw_id,_query=dict(at=book))}"><i class="icon-bookmark"></i>${h.shorter(book)}</a>
117 117 </span>
118 118 %endfor
119 119 %endif
120 120
121 121 %for tag in c.commit.tags:
122 122 <span class="tagtag tag" title="${h.tooltip(_('Tag %s') % tag)}">
123 123 <a href="${h.route_path('repo_files:default_path',repo_name=c.repo_name,commit_id=c.commit.raw_id,_query=dict(at=tag))}"><i class="icon-tag"></i>${tag}</a>
124 124 </span>
125 125 %endfor
126 126
127 127 %if c.commit.branch:
128 128 <span class="branchtag tag" title="${h.tooltip(_('Branch %s') % c.commit.branch)}">
129 129 <a href="${h.route_path('repo_files:default_path',repo_name=c.repo_name,commit_id=c.commit.raw_id,_query=dict(at=c.commit.branch))}"><i class="icon-code-fork"></i>${h.shorter(c.commit.branch)}</a>
130 130 </span>
131 131 %endif
132 132 </div>
133 133 </div>
134 134 </div>
135 135
136 136 <div class="fieldset">
137 137 <div class="left-label">
138 138 ${_('Diff options')}:
139 139 </div>
140 140 <div class="right-content">
141 141 <div class="diff-actions">
142 142 <a href="${h.route_path('repo_commit_raw',repo_name=c.repo_name,commit_id=c.commit.raw_id)}" class="tooltip" title="${h.tooltip(_('Raw diff'))}">
143 143 ${_('Raw Diff')}
144 144 </a>
145 145 |
146 146 <a href="${h.route_path('repo_commit_patch',repo_name=c.repo_name,commit_id=c.commit.raw_id)}" class="tooltip" title="${h.tooltip(_('Patch diff'))}">
147 147 ${_('Patch Diff')}
148 148 </a>
149 149 |
150 150 <a href="${h.route_path('repo_commit_download',repo_name=c.repo_name,commit_id=c.commit.raw_id,_query=dict(diff='download'))}" class="tooltip" title="${h.tooltip(_('Download diff'))}">
151 151 ${_('Download Diff')}
152 152 </a>
153 153 |
154 154 ${c.ignorews_url(request)}
155 155 |
156 156 ${c.context_url(request)}
157 157 </div>
158 158 </div>
159 159 </div>
160 160
161 161 <div class="fieldset">
162 162 <div class="left-label">
163 163 ${_('Comments')}:
164 164 </div>
165 165 <div class="right-content">
166 166 <div class="comments-number">
167 167 %if c.comments:
168 168 <a href="#comments">${_ungettext("%d Commit comment", "%d Commit comments", len(c.comments)) % len(c.comments)}</a>,
169 169 %else:
170 170 ${_ungettext("%d Commit comment", "%d Commit comments", len(c.comments)) % len(c.comments)}
171 171 %endif
172 172 %if c.inline_cnt:
173 173 <a href="#" onclick="return Rhodecode.comments.nextComment();" id="inline-comments-counter">${_ungettext("%d Inline Comment", "%d Inline Comments", c.inline_cnt) % c.inline_cnt}</a>
174 174 %else:
175 175 ${_ungettext("%d Inline Comment", "%d Inline Comments", c.inline_cnt) % c.inline_cnt}
176 176 %endif
177 177 </div>
178 178 </div>
179 179 </div>
180 180
181 181 <div class="fieldset">
182 182 <div class="left-label">
183 183 ${_('Unresolved TODOs')}:
184 184 </div>
185 185 <div class="right-content">
186 186 <div class="comments-number">
187 187 % if c.unresolved_comments:
188 188 % for co in c.unresolved_comments:
189 189 <a class="permalink" href="#comment-${co.comment_id}" onclick="Rhodecode.comments.scrollToComment($('#comment-${co.comment_id}'))"> #${co.comment_id}</a>${'' if loop.last else ','}
190 190 % endfor
191 191 % else:
192 192 ${_('There are no unresolved TODOs')}
193 193 % endif
194 194 </div>
195 195 </div>
196 196 </div>
197 197
198 198 </div> <!-- end summary-detail -->
199 199
200 200 <div id="commit-stats" class="sidebar-right">
201 201 <div class="summary-detail-header">
202 202 <h4 class="item">
203 203 ${_('Author')}
204 204 </h4>
205 205 </div>
206 206 <div class="sidebar-right-content">
207 207 ${self.gravatar_with_user(c.commit.author)}
208 208 <div class="user-inline-data">- ${h.age_component(c.commit.date)}</div>
209 209 </div>
210 210 </div><!-- end sidebar -->
211 211 </div> <!-- end summary -->
212 212 <div class="cs_files">
213 213 <%namespace name="cbdiffs" file="/codeblocks/diffs.mako"/>
214 214 ${cbdiffs.render_diffset_menu()}
215 215 ${cbdiffs.render_diffset(
216 c.changes[c.commit.raw_id], commit=c.commit, use_comments=True)}
216 c.changes[c.commit.raw_id], commit=c.commit, use_comments=True,inline_comments=c.inline_comments )}
217 217 </div>
218 218
219 219 ## template for inline comment form
220 220 <%namespace name="comment" file="/changeset/changeset_file_comment.mako"/>
221 221
222 222 ## render comments
223 223 ${comment.generate_comments(c.comments)}
224 224
225 225 ## main comment form and it status
226 226 ${comment.comments(h.route_path('repo_commit_comment_create', repo_name=c.repo_name, commit_id=c.commit.raw_id),
227 227 h.commit_status(c.rhodecode_db_repo, c.commit.raw_id))}
228 228 </div>
229 229
230 230 ## FORM FOR MAKING JS ACTION AS CHANGESET COMMENTS
231 231 <script type="text/javascript">
232 232
233 233 $(document).ready(function() {
234 234
235 235 var boxmax = parseInt($('#trimmed_message_box').css('max-height'), 10);
236 236 if($('#trimmed_message_box').height() === boxmax){
237 237 $('#message_expand').show();
238 238 }
239 239
240 240 $('#message_expand').on('click', function(e){
241 241 $('#trimmed_message_box').css('max-height', 'none');
242 242 $(this).hide();
243 243 });
244 244
245 245 $('.show-inline-comments').on('click', function(e){
246 246 var boxid = $(this).attr('data-comment-id');
247 247 var button = $(this);
248 248
249 249 if(button.hasClass("comments-visible")) {
250 250 $('#{0} .inline-comments'.format(boxid)).each(function(index){
251 251 $(this).hide();
252 252 });
253 253 button.removeClass("comments-visible");
254 254 } else {
255 255 $('#{0} .inline-comments'.format(boxid)).each(function(index){
256 256 $(this).show();
257 257 });
258 258 button.addClass("comments-visible");
259 259 }
260 260 });
261 261
262 262
263 263 // next links
264 264 $('#child_link').on('click', function(e){
265 265 // fetch via ajax what is going to be the next link, if we have
266 266 // >1 links show them to user to choose
267 267 if(!$('#child_link').hasClass('disabled')){
268 268 $.ajax({
269 269 url: '${h.route_path('repo_commit_children',repo_name=c.repo_name, commit_id=c.commit.raw_id)}',
270 270 success: function(data) {
271 271 if(data.results.length === 0){
272 272 $('#child_link').html("${_('No Child Commits')}").addClass('disabled');
273 273 }
274 274 if(data.results.length === 1){
275 275 var commit = data.results[0];
276 276 window.location = pyroutes.url('repo_commit', {'repo_name': '${c.repo_name}','commit_id': commit.raw_id});
277 277 }
278 278 else if(data.results.length === 2){
279 279 $('#child_link').addClass('disabled');
280 280 $('#child_link').addClass('double');
281 281 var _html = '';
282 282 _html +='<a title="__title__" href="__url__">__rev__</a> '
283 283 .replace('__rev__','r{0}:{1}'.format(data.results[0].revision, data.results[0].raw_id.substr(0,6)))
284 284 .replace('__title__', data.results[0].message)
285 285 .replace('__url__', pyroutes.url('repo_commit', {'repo_name': '${c.repo_name}','commit_id': data.results[0].raw_id}));
286 286 _html +=' | ';
287 287 _html +='<a title="__title__" href="__url__">__rev__</a> '
288 288 .replace('__rev__','r{0}:{1}'.format(data.results[1].revision, data.results[1].raw_id.substr(0,6)))
289 289 .replace('__title__', data.results[1].message)
290 290 .replace('__url__', pyroutes.url('repo_commit', {'repo_name': '${c.repo_name}','commit_id': data.results[1].raw_id}));
291 291 $('#child_link').html(_html);
292 292 }
293 293 }
294 294 });
295 295 e.preventDefault();
296 296 }
297 297 });
298 298
299 299 // prev links
300 300 $('#parent_link').on('click', function(e){
301 301 // fetch via ajax what is going to be the next link, if we have
302 302 // >1 links show them to user to choose
303 303 if(!$('#parent_link').hasClass('disabled')){
304 304 $.ajax({
305 305 url: '${h.route_path("repo_commit_parents",repo_name=c.repo_name, commit_id=c.commit.raw_id)}',
306 306 success: function(data) {
307 307 if(data.results.length === 0){
308 308 $('#parent_link').html('${_('No Parent Commits')}').addClass('disabled');
309 309 }
310 310 if(data.results.length === 1){
311 311 var commit = data.results[0];
312 312 window.location = pyroutes.url('repo_commit', {'repo_name': '${c.repo_name}','commit_id': commit.raw_id});
313 313 }
314 314 else if(data.results.length === 2){
315 315 $('#parent_link').addClass('disabled');
316 316 $('#parent_link').addClass('double');
317 317 var _html = '';
318 318 _html +='<a title="__title__" href="__url__">Parent __rev__</a>'
319 319 .replace('__rev__','r{0}:{1}'.format(data.results[0].revision, data.results[0].raw_id.substr(0,6)))
320 320 .replace('__title__', data.results[0].message)
321 321 .replace('__url__', pyroutes.url('repo_commit', {'repo_name': '${c.repo_name}','commit_id': data.results[0].raw_id}));
322 322 _html +=' | ';
323 323 _html +='<a title="__title__" href="__url__">Parent __rev__</a>'
324 324 .replace('__rev__','r{0}:{1}'.format(data.results[1].revision, data.results[1].raw_id.substr(0,6)))
325 325 .replace('__title__', data.results[1].message)
326 326 .replace('__url__', pyroutes.url('repo_commit', {'repo_name': '${c.repo_name}','commit_id': data.results[1].raw_id}));
327 327 $('#parent_link').html(_html);
328 328 }
329 329 }
330 330 });
331 331 e.preventDefault();
332 332 }
333 333 });
334 334
335 335 if (location.hash) {
336 336 var result = splitDelimitedHash(location.hash);
337 337 var line = $('html').find(result.loc);
338 338 if (line.length > 0){
339 339 offsetScroll(line, 70);
340 340 }
341 341 }
342 342
343 343 // browse tree @ revision
344 344 $('#files_link').on('click', function(e){
345 345 window.location = '${h.route_path('repo_files:default_path',repo_name=c.repo_name, commit_id=c.commit.raw_id)}';
346 346 e.preventDefault();
347 347 });
348 348
349 349 // inject comments into their proper positions
350 350 var file_comments = $('.inline-comment-placeholder');
351 351 })
352 352 </script>
353 353
354 354 </%def>
@@ -1,695 +1,730 b''
1 1 <%namespace name="commentblock" file="/changeset/changeset_file_comment.mako"/>
2 2
3 3 <%def name="diff_line_anchor(filename, line, type)"><%
4 4 return '%s_%s_%i' % (h.safeid(filename), type, line)
5 5 %></%def>
6 6
7 7 <%def name="action_class(action)">
8 8 <%
9 9 return {
10 10 '-': 'cb-deletion',
11 11 '+': 'cb-addition',
12 12 ' ': 'cb-context',
13 13 }.get(action, 'cb-empty')
14 14 %>
15 15 </%def>
16 16
17 17 <%def name="op_class(op_id)">
18 18 <%
19 19 return {
20 20 DEL_FILENODE: 'deletion', # file deleted
21 21 BIN_FILENODE: 'warning' # binary diff hidden
22 22 }.get(op_id, 'addition')
23 23 %>
24 24 </%def>
25 25
26 26
27 27
28 28 <%def name="render_diffset(diffset, commit=None,
29 29
30 30 # collapse all file diff entries when there are more than this amount of files in the diff
31 31 collapse_when_files_over=20,
32 32
33 33 # collapse lines in the diff when more than this amount of lines changed in the file diff
34 34 lines_changed_limit=500,
35 35
36 36 # add a ruler at to the output
37 37 ruler_at_chars=0,
38 38
39 39 # show inline comments
40 40 use_comments=False,
41 41
42 42 # disable new comments
43 43 disable_new_comments=False,
44 44
45 45 # special file-comments that were deleted in previous versions
46 46 # it's used for showing outdated comments for deleted files in a PR
47 deleted_files_comments=None
47 deleted_files_comments=None,
48
49 # for cache purpose
50 inline_comments=None
48 51
49 52 )">
50
51 53 %if use_comments:
52 54 <div id="cb-comments-inline-container-template" class="js-template">
53 ${inline_comments_container([])}
55 ${inline_comments_container([], inline_comments)}
54 56 </div>
55 57 <div class="js-template" id="cb-comment-inline-form-template">
56 58 <div class="comment-inline-form ac">
57 59
58 60 %if c.rhodecode_user.username != h.DEFAULT_USER:
59 61 ## render template for inline comments
60 62 ${commentblock.comment_form(form_type='inline')}
61 63 %else:
62 64 ${h.form('', class_='inline-form comment-form-login', method='get')}
63 65 <div class="pull-left">
64 66 <div class="comment-help pull-right">
65 67 ${_('You need to be logged in to leave comments.')} <a href="${h.route_path('login', _query={'came_from': h.current_route_path(request)})}">${_('Login now')}</a>
66 68 </div>
67 69 </div>
68 70 <div class="comment-button pull-right">
69 71 <button type="button" class="cb-comment-cancel" onclick="return Rhodecode.comments.cancelComment(this);">
70 72 ${_('Cancel')}
71 73 </button>
72 74 </div>
73 75 <div class="clearfix"></div>
74 76 ${h.end_form()}
75 77 %endif
76 78 </div>
77 79 </div>
78 80
79 81 %endif
80 82 <%
81 83 collapse_all = len(diffset.files) > collapse_when_files_over
82 84 %>
83 85
84 86 %if c.diffmode == 'sideside':
85 87 <style>
86 88 .wrapper {
87 89 max-width: 1600px !important;
88 90 }
89 91 </style>
90 92 %endif
91 93
92 94 %if ruler_at_chars:
93 95 <style>
94 96 .diff table.cb .cb-content:after {
95 97 content: "";
96 98 border-left: 1px solid blue;
97 99 position: absolute;
98 100 top: 0;
99 101 height: 18px;
100 102 opacity: .2;
101 103 z-index: 10;
102 104 //## +5 to account for diff action (+/-)
103 105 left: ${ruler_at_chars + 5}ch;
104 106 </style>
105 107 %endif
106 108
107 109 <div class="diffset ${disable_new_comments and 'diffset-comments-disabled'}">
108 110 <div class="diffset-heading ${diffset.limited_diff and 'diffset-heading-warning' or ''}">
109 111 %if commit:
110 112 <div class="pull-right">
111 113 <a class="btn tooltip" title="${h.tooltip(_('Browse Files at revision {}').format(commit.raw_id))}" href="${h.route_path('repo_files',repo_name=diffset.repo_name, commit_id=commit.raw_id, f_path='')}">
112 114 ${_('Browse Files')}
113 115 </a>
114 116 </div>
115 117 %endif
116 118 <h2 class="clearinner">
117 119 %if commit:
118 120 <a class="tooltip revision" title="${h.tooltip(commit.message)}" href="${h.route_path('repo_commit',repo_name=c.repo_name,commit_id=commit.raw_id)}">${'r%s:%s' % (commit.revision,h.short_id(commit.raw_id))}</a> -
119 121 ${h.age_component(commit.date)} -
120 122 %endif
121 123
122 124 %if diffset.limited_diff:
123 125 ${_('The requested commit is too big and content was truncated.')}
124 126
125 127 ${_ungettext('%(num)s file changed.', '%(num)s files changed.', diffset.changed_files) % {'num': diffset.changed_files}}
126 128 <a href="${h.current_route_path(request, fulldiff=1)}" onclick="return confirm('${_("Showing a big diff might take some time and resources, continue?")}')">${_('Show full diff')}</a>
127 129 %else:
128 130 ${_ungettext('%(num)s file changed: %(linesadd)s inserted, ''%(linesdel)s deleted',
129 131 '%(num)s files changed: %(linesadd)s inserted, %(linesdel)s deleted', diffset.changed_files) % {'num': diffset.changed_files, 'linesadd': diffset.lines_added, 'linesdel': diffset.lines_deleted}}
130 132 %endif
131 133
132 134 </h2>
133 135 </div>
134 136
135 137 %if diffset.has_hidden_changes:
136 138 <p class="empty_data">${_('Some changes may be hidden')}</p>
137 139 %elif not diffset.files:
138 140 <p class="empty_data">${_('No files')}</p>
139 141 %endif
140 142
141 143 <div class="filediffs">
142 144 ## initial value could be marked as False later on
143 145 <% over_lines_changed_limit = False %>
144 146 %for i, filediff in enumerate(diffset.files):
145 147
146 148 <%
147 149 lines_changed = filediff.patch['stats']['added'] + filediff.patch['stats']['deleted']
148 150 over_lines_changed_limit = lines_changed > lines_changed_limit
149 151 %>
150 152 <input ${collapse_all and 'checked' or ''} class="filediff-collapse-state" id="filediff-collapse-${id(filediff)}" type="checkbox">
151 153 <div
152 154 class="filediff"
153 155 data-f-path="${filediff.patch['filename']}"
154 156 id="a_${h.FID('', filediff.patch['filename'])}">
155 157 <label for="filediff-collapse-${id(filediff)}" class="filediff-heading">
156 158 <div class="filediff-collapse-indicator"></div>
157 159 ${diff_ops(filediff)}
158 160 </label>
159 161 ${diff_menu(filediff, use_comments=use_comments)}
160 162 <table class="cb cb-diff-${c.diffmode} code-highlight ${over_lines_changed_limit and 'cb-collapsed' or ''}">
161 163 %if not filediff.hunks:
162 164 %for op_id, op_text in filediff.patch['stats']['ops'].items():
163 165 <tr>
164 166 <td class="cb-text cb-${op_class(op_id)}" ${c.diffmode == 'unified' and 'colspan=4' or 'colspan=6'}>
165 167 %if op_id == DEL_FILENODE:
166 168 ${_('File was deleted')}
167 169 %elif op_id == BIN_FILENODE:
168 170 ${_('Binary file hidden')}
169 171 %else:
170 172 ${op_text}
171 173 %endif
172 174 </td>
173 175 </tr>
174 176 %endfor
175 177 %endif
176 178 %if filediff.limited_diff:
177 179 <tr class="cb-warning cb-collapser">
178 180 <td class="cb-text" ${c.diffmode == 'unified' and 'colspan=4' or 'colspan=6'}>
179 181 ${_('The requested commit is too big and content was truncated.')} <a href="${h.current_route_path(request, fulldiff=1)}" onclick="return confirm('${_("Showing a big diff might take some time and resources, continue?")}')">${_('Show full diff')}</a>
180 182 </td>
181 183 </tr>
182 184 %else:
183 185 %if over_lines_changed_limit:
184 186 <tr class="cb-warning cb-collapser">
185 187 <td class="cb-text" ${c.diffmode == 'unified' and 'colspan=4' or 'colspan=6'}>
186 188 ${_('This diff has been collapsed as it changes many lines, (%i lines changed)' % lines_changed)}
187 189 <a href="#" class="cb-expand"
188 190 onclick="$(this).closest('table').removeClass('cb-collapsed'); return false;">${_('Show them')}
189 191 </a>
190 192 <a href="#" class="cb-collapse"
191 193 onclick="$(this).closest('table').addClass('cb-collapsed'); return false;">${_('Hide them')}
192 194 </a>
193 195 </td>
194 196 </tr>
195 197 %endif
196 198 %endif
197 199
198 200 %for hunk in filediff.hunks:
199 201 <tr class="cb-hunk">
200 202 <td ${c.diffmode == 'unified' and 'colspan=3' or ''}>
201 203 ## TODO: dan: add ajax loading of more context here
202 204 ## <a href="#">
203 205 <i class="icon-more"></i>
204 206 ## </a>
205 207 </td>
206 208 <td ${c.diffmode == 'sideside' and 'colspan=5' or ''}>
207 209 @@
208 210 -${hunk.source_start},${hunk.source_length}
209 211 +${hunk.target_start},${hunk.target_length}
210 212 ${hunk.section_header}
211 213 </td>
212 214 </tr>
213 215 %if c.diffmode == 'unified':
214 ${render_hunk_lines_unified(hunk, use_comments=use_comments)}
216 ${render_hunk_lines_unified(hunk, use_comments=use_comments, inline_comments=inline_comments)}
215 217 %elif c.diffmode == 'sideside':
216 ${render_hunk_lines_sideside(hunk, use_comments=use_comments)}
218 ${render_hunk_lines_sideside(hunk, use_comments=use_comments, inline_comments=inline_comments)}
217 219 %else:
218 220 <tr class="cb-line">
219 221 <td>unknown diff mode</td>
220 222 </tr>
221 223 %endif
222 224 %endfor
223 225
224 226 ## outdated comments that do not fit into currently displayed lines
225 227 % for lineno, comments in filediff.left_comments.items():
226 228
227 229 %if c.diffmode == 'unified':
228 230 <tr class="cb-line">
229 231 <td class="cb-data cb-context"></td>
230 232 <td class="cb-lineno cb-context"></td>
231 233 <td class="cb-lineno cb-context"></td>
232 234 <td class="cb-content cb-context">
233 ${inline_comments_container(comments)}
235 ${inline_comments_container(comments, inline_comments)}
234 236 </td>
235 237 </tr>
236 238 %elif c.diffmode == 'sideside':
237 239 <tr class="cb-line">
238 240 <td class="cb-data cb-context"></td>
239 241 <td class="cb-lineno cb-context"></td>
240 242 <td class="cb-content cb-context">
241 243 % if lineno.startswith('o'):
242 ${inline_comments_container(comments)}
244 ${inline_comments_container(comments, inline_comments)}
243 245 % endif
244 246 </td>
245 247
246 248 <td class="cb-data cb-context"></td>
247 249 <td class="cb-lineno cb-context"></td>
248 250 <td class="cb-content cb-context">
249 251 % if lineno.startswith('n'):
250 ${inline_comments_container(comments)}
252 ${inline_comments_container(comments, inline_comments)}
251 253 % endif
252 254 </td>
253 255 </tr>
254 256 %endif
255 257
256 258 % endfor
257 259
258 260 </table>
259 261 </div>
260 262 %endfor
261 263
262 264 ## outdated comments that are made for a file that has been deleted
263 265 % for filename, comments_dict in (deleted_files_comments or {}).items():
264 266
265 267 <div class="filediffs filediff-outdated" style="display: none">
266 268 <input ${collapse_all and 'checked' or ''} class="filediff-collapse-state" id="filediff-collapse-${id(filename)}" type="checkbox">
267 269 <div class="filediff" data-f-path="${filename}" id="a_${h.FID('', filename)}">
268 270 <label for="filediff-collapse-${id(filename)}" class="filediff-heading">
269 271 <div class="filediff-collapse-indicator"></div>
270 272 <span class="pill">
271 273 ## file was deleted
272 274 <strong>${filename}</strong>
273 275 </span>
274 276 <span class="pill-group" style="float: left">
275 277 ## file op, doesn't need translation
276 278 <span class="pill" op="removed">removed in this version</span>
277 279 </span>
278 280 <a class="pill filediff-anchor" href="#a_${h.FID('', filename)}"></a>
279 281 <span class="pill-group" style="float: right">
280 282 <span class="pill" op="deleted">-${comments_dict['stats']}</span>
281 283 </span>
282 284 </label>
283 285
284 286 <table class="cb cb-diff-${c.diffmode} code-highlight ${over_lines_changed_limit and 'cb-collapsed' or ''}">
285 287 <tr>
286 288 % if c.diffmode == 'unified':
287 289 <td></td>
288 290 %endif
289 291
290 292 <td></td>
291 293 <td class="cb-text cb-${op_class(BIN_FILENODE)}" ${c.diffmode == 'unified' and 'colspan=4' or 'colspan=5'}>
292 294 ${_('File was deleted in this version, and outdated comments were made on it')}
293 295 </td>
294 296 </tr>
295 297 %if c.diffmode == 'unified':
296 298 <tr class="cb-line">
297 299 <td class="cb-data cb-context"></td>
298 300 <td class="cb-lineno cb-context"></td>
299 301 <td class="cb-lineno cb-context"></td>
300 302 <td class="cb-content cb-context">
301 ${inline_comments_container(comments_dict['comments'])}
303 ${inline_comments_container(comments_dict['comments'], inline_comments)}
302 304 </td>
303 305 </tr>
304 306 %elif c.diffmode == 'sideside':
305 307 <tr class="cb-line">
306 308 <td class="cb-data cb-context"></td>
307 309 <td class="cb-lineno cb-context"></td>
308 310 <td class="cb-content cb-context"></td>
309 311
310 312 <td class="cb-data cb-context"></td>
311 313 <td class="cb-lineno cb-context"></td>
312 314 <td class="cb-content cb-context">
313 ${inline_comments_container(comments_dict['comments'])}
315 ${inline_comments_container(comments_dict['comments'], inline_comments)}
314 316 </td>
315 317 </tr>
316 318 %endif
317 319 </table>
318 320 </div>
319 321 </div>
320 322 % endfor
321 323
322 324 </div>
323 325 </div>
324 326 </%def>
325 327
326 328 <%def name="diff_ops(filediff)">
327 329 <%
328 330 from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
329 331 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE, COPIED_FILENODE
330 332 %>
331 333 <span class="pill">
332 334 %if filediff.source_file_path and filediff.target_file_path:
333 335 %if filediff.source_file_path != filediff.target_file_path:
334 336 ## file was renamed, or copied
335 337 %if RENAMED_FILENODE in filediff.patch['stats']['ops']:
336 338 <strong>${filediff.target_file_path}</strong><del>${filediff.source_file_path}</del>
337 339 <% final_path = filediff.target_file_path %>
338 340 %elif COPIED_FILENODE in filediff.patch['stats']['ops']:
339 341 <strong>${filediff.target_file_path}</strong>${filediff.source_file_path}
340 342 <% final_path = filediff.target_file_path %>
341 343 %endif
342 344 %else:
343 345 ## file was modified
344 346 <strong>${filediff.source_file_path}</strong>
345 347 <% final_path = filediff.source_file_path %>
346 348 %endif
347 349 %else:
348 350 %if filediff.source_file_path:
349 351 ## file was deleted
350 352 <strong>${filediff.source_file_path}</strong>
351 353 <% final_path = filediff.source_file_path %>
352 354 %else:
353 355 ## file was added
354 356 <strong>${filediff.target_file_path}</strong>
355 357 <% final_path = filediff.target_file_path %>
356 358 %endif
357 359 %endif
358 360 <i style="color: #aaa" class="tooltip icon-clipboard clipboard-action" data-clipboard-text="${final_path}" title="${_('Copy the full path')}" onclick="return false;"></i>
359 361 </span>
360 362 <span class="pill-group" style="float: left">
361 363 %if filediff.limited_diff:
362 364 <span class="pill tooltip" op="limited" title="The stats for this diff are not complete">limited diff</span>
363 365 %endif
364 366
365 367 %if RENAMED_FILENODE in filediff.patch['stats']['ops']:
366 368 <span class="pill" op="renamed">renamed</span>
367 369 %endif
368 370
369 371 %if COPIED_FILENODE in filediff.patch['stats']['ops']:
370 372 <span class="pill" op="copied">copied</span>
371 373 %endif
372 374
373 375 %if NEW_FILENODE in filediff.patch['stats']['ops']:
374 376 <span class="pill" op="created">created</span>
375 377 %if filediff['target_mode'].startswith('120'):
376 378 <span class="pill" op="symlink">symlink</span>
377 379 %else:
378 380 <span class="pill" op="mode">${nice_mode(filediff['target_mode'])}</span>
379 381 %endif
380 382 %endif
381 383
382 384 %if DEL_FILENODE in filediff.patch['stats']['ops']:
383 385 <span class="pill" op="removed">removed</span>
384 386 %endif
385 387
386 388 %if CHMOD_FILENODE in filediff.patch['stats']['ops']:
387 389 <span class="pill" op="mode">
388 390 ${nice_mode(filediff['source_mode'])}${nice_mode(filediff['target_mode'])}
389 391 </span>
390 392 %endif
391 393 </span>
392 394
393 395 <a class="pill filediff-anchor" href="#a_${h.FID('', filediff.patch['filename'])}"></a>
394 396
395 397 <span class="pill-group" style="float: right">
396 398 %if BIN_FILENODE in filediff.patch['stats']['ops']:
397 399 <span class="pill" op="binary">binary</span>
398 400 %if MOD_FILENODE in filediff.patch['stats']['ops']:
399 401 <span class="pill" op="modified">modified</span>
400 402 %endif
401 403 %endif
402 404 %if filediff.patch['stats']['added']:
403 405 <span class="pill" op="added">+${filediff.patch['stats']['added']}</span>
404 406 %endif
405 407 %if filediff.patch['stats']['deleted']:
406 408 <span class="pill" op="deleted">-${filediff.patch['stats']['deleted']}</span>
407 409 %endif
408 410 </span>
409 411
410 412 </%def>
411 413
412 414 <%def name="nice_mode(filemode)">
413 415 ${filemode.startswith('100') and filemode[3:] or filemode}
414 416 </%def>
415 417
416 418 <%def name="diff_menu(filediff, use_comments=False)">
417 419 <div class="filediff-menu">
418 420 %if filediff.diffset.source_ref:
419 421 %if filediff.operation in ['D', 'M']:
420 422 <a
421 423 class="tooltip"
422 424 href="${h.route_path('repo_files',repo_name=filediff.diffset.repo_name,commit_id=filediff.diffset.source_ref,f_path=filediff.source_file_path)}"
423 425 title="${h.tooltip(_('Show file at commit: %(commit_id)s') % {'commit_id': filediff.diffset.source_ref[:12]})}"
424 426 >
425 427 ${_('Show file before')}
426 428 </a> |
427 429 %else:
428 430 <span
429 431 class="tooltip"
430 432 title="${h.tooltip(_('File no longer present at commit: %(commit_id)s') % {'commit_id': filediff.diffset.source_ref[:12]})}"
431 433 >
432 434 ${_('Show file before')}
433 435 </span> |
434 436 %endif
435 437 %if filediff.operation in ['A', 'M']:
436 438 <a
437 439 class="tooltip"
438 440 href="${h.route_path('repo_files',repo_name=filediff.diffset.source_repo_name,commit_id=filediff.diffset.target_ref,f_path=filediff.target_file_path)}"
439 441 title="${h.tooltip(_('Show file at commit: %(commit_id)s') % {'commit_id': filediff.diffset.target_ref[:12]})}"
440 442 >
441 443 ${_('Show file after')}
442 444 </a> |
443 445 %else:
444 446 <span
445 447 class="tooltip"
446 448 title="${h.tooltip(_('File no longer present at commit: %(commit_id)s') % {'commit_id': filediff.diffset.target_ref[:12]})}"
447 449 >
448 450 ${_('Show file after')}
449 451 </span> |
450 452 %endif
451 453 <a
452 454 class="tooltip"
453 455 title="${h.tooltip(_('Raw diff'))}"
454 456 href="${h.route_path('repo_files_diff',repo_name=filediff.diffset.repo_name,f_path=filediff.target_file_path, _query=dict(diff2=filediff.diffset.target_ref,diff1=filediff.diffset.source_ref,diff='raw'))}"
455 457 >
456 458 ${_('Raw diff')}
457 459 </a> |
458 460 <a
459 461 class="tooltip"
460 462 title="${h.tooltip(_('Download diff'))}"
461 463 href="${h.route_path('repo_files_diff',repo_name=filediff.diffset.repo_name,f_path=filediff.target_file_path, _query=dict(diff2=filediff.diffset.target_ref,diff1=filediff.diffset.source_ref,diff='download'))}"
462 464 >
463 465 ${_('Download diff')}
464 466 </a>
465 467 % if use_comments:
466 468 |
467 469 % endif
468 470
469 471 ## TODO: dan: refactor ignorews_url and context_url into the diff renderer same as diffmode=unified/sideside. Also use ajax to load more context (by clicking hunks)
470 472 %if hasattr(c, 'ignorews_url'):
471 473 ${c.ignorews_url(request, h.FID('', filediff.patch['filename']))}
472 474 %endif
473 475 %if hasattr(c, 'context_url'):
474 476 ${c.context_url(request, h.FID('', filediff.patch['filename']))}
475 477 %endif
476 478
477 479 %if use_comments:
478 480 <a href="#" onclick="return Rhodecode.comments.toggleComments(this);">
479 481 <span class="show-comment-button">${_('Show comments')}</span><span class="hide-comment-button">${_('Hide comments')}</span>
480 482 </a>
481 483 %endif
482 484 %endif
483 485 </div>
484 486 </%def>
485 487
486 488
487 <%def name="inline_comments_container(comments)">
489 <%def name="inline_comments_container(comments, inline_comments)">
488 490 <div class="inline-comments">
489 491 %for comment in comments:
490 ${commentblock.comment_block(comment, inline=True)}
492 ${commentblock.comment_block(comment, inline=True)}
491 493 %endfor
492
493 494 % if comments and comments[-1].outdated:
494 495 <span class="btn btn-secondary cb-comment-add-button comment-outdated}"
495 496 style="display: none;}">
496 497 ${_('Add another comment')}
497 498 </span>
498 499 % else:
499 500 <span onclick="return Rhodecode.comments.createComment(this)"
500 501 class="btn btn-secondary cb-comment-add-button">
501 502 ${_('Add another comment')}
502 503 </span>
503 504 % endif
504 505
505 506 </div>
506 507 </%def>
507 508
509 <%!
510 def get_comments_for(comments, filename, line_version, line_number):
511 if hasattr(filename, 'unicode_path'):
512 filename = filename.unicode_path
508 513
509 <%def name="render_hunk_lines_sideside(hunk, use_comments=False)">
514 if not isinstance(filename, basestring):
515 return None
516
517 line_key = '{}{}'.format(line_version, line_number)
518 if comments and filename in comments:
519 file_comments = comments[filename]
520 if line_key in file_comments:
521 return file_comments[line_key]
522 %>
523
524 <%def name="render_hunk_lines_sideside(hunk, use_comments=False, inline_comments=None)">
525
510 526 %for i, line in enumerate(hunk.sideside):
511 527 <%
512 528 old_line_anchor, new_line_anchor = None, None
513 529 if line.original.lineno:
514 530 old_line_anchor = diff_line_anchor(hunk.source_file_path, line.original.lineno, 'o')
515 531 if line.modified.lineno:
516 532 new_line_anchor = diff_line_anchor(hunk.target_file_path, line.modified.lineno, 'n')
517 533 %>
518 534
519 535 <tr class="cb-line">
520 536 <td class="cb-data ${action_class(line.original.action)}"
521 537 data-line-no="${line.original.lineno}"
522 538 >
523 539 <div>
524 %if line.original.comments:
525 <% has_outdated = any([x.outdated for x in line.original.comments]) %>
540 <% loc = None %>
541 %if line.original.get_comment_args:
542 <% loc = get_comments_for(inline_comments, *line.original.get_comment_args) %>
543 %endif
544 %if loc:
545 <% has_outdated = any([x.outdated for x in loc]) %>
526 546 % if has_outdated:
527 <i title="${_('comments including outdated')}:${len(line.original.comments)}" class="icon-comment_toggle" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
547 <i title="${_('comments including outdated')}:${len(loc)}" class="icon-comment_toggle" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
528 548 % else:
529 <i title="${_('comments')}: ${len(line.original.comments)}" class="icon-comment" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
549 <i title="${_('comments')}: ${len(loc)}" class="icon-comment" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
530 550 % endif
531 551 %endif
532 552 </div>
533 553 </td>
534 554 <td class="cb-lineno ${action_class(line.original.action)}"
535 555 data-line-no="${line.original.lineno}"
536 556 %if old_line_anchor:
537 557 id="${old_line_anchor}"
538 558 %endif
539 559 >
540 560 %if line.original.lineno:
541 561 <a name="${old_line_anchor}" href="#${old_line_anchor}">${line.original.lineno}</a>
542 562 %endif
543 563 </td>
544 564 <td class="cb-content ${action_class(line.original.action)}"
545 565 data-line-no="o${line.original.lineno}"
546 566 >
547 567 %if use_comments and line.original.lineno:
548 568 ${render_add_comment_button()}
549 569 %endif
550 570 <span class="cb-code">${line.original.action} ${line.original.content or '' | n}</span>
551 %if use_comments and line.original.lineno and line.original.comments:
552 ${inline_comments_container(line.original.comments)}
571
572 %if use_comments and line.original.lineno and loc:
573 ${inline_comments_container(loc, inline_comments)}
553 574 %endif
575
554 576 </td>
555 577 <td class="cb-data ${action_class(line.modified.action)}"
556 578 data-line-no="${line.modified.lineno}"
557 579 >
558 580 <div>
559 %if line.modified.comments:
560 <% has_outdated = any([x.outdated for x in line.modified.comments]) %>
581
582 %if line.modified.get_comment_args:
583 <% lmc = get_comments_for(inline_comments, *line.modified.get_comment_args) %>
584 %else:
585 <% lmc = None%>
586 %endif
587 %if lmc:
588 <% has_outdated = any([x.outdated for x in lmc]) %>
561 589 % if has_outdated:
562 <i title="${_('comments including outdated')}:${len(line.modified.comments)}" class="icon-comment_toggle" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
590 <i title="${_('comments including outdated')}:${len(lmc)}" class="icon-comment_toggle" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
563 591 % else:
564 <i title="${_('comments')}: ${len(line.modified.comments)}" class="icon-comment" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
592 <i title="${_('comments')}: ${len(lmc)}" class="icon-comment" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
565 593 % endif
566 594 %endif
567 595 </div>
568 596 </td>
569 597 <td class="cb-lineno ${action_class(line.modified.action)}"
570 598 data-line-no="${line.modified.lineno}"
571 599 %if new_line_anchor:
572 600 id="${new_line_anchor}"
573 601 %endif
574 602 >
575 603 %if line.modified.lineno:
576 604 <a name="${new_line_anchor}" href="#${new_line_anchor}">${line.modified.lineno}</a>
577 605 %endif
578 606 </td>
579 607 <td class="cb-content ${action_class(line.modified.action)}"
580 608 data-line-no="n${line.modified.lineno}"
581 609 >
582 610 %if use_comments and line.modified.lineno:
583 611 ${render_add_comment_button()}
584 612 %endif
585 613 <span class="cb-code">${line.modified.action} ${line.modified.content or '' | n}</span>
586 %if use_comments and line.modified.lineno and line.modified.comments:
587 ${inline_comments_container(line.modified.comments)}
614 %if use_comments and line.modified.lineno and lmc:
615 ${inline_comments_container(lmc, inline_comments)}
588 616 %endif
589 617 </td>
590 618 </tr>
591 619 %endfor
592 620 </%def>
593 621
594 622
595 <%def name="render_hunk_lines_unified(hunk, use_comments=False)">
596 %for old_line_no, new_line_no, action, content, comments in hunk.unified:
623 <%def name="render_hunk_lines_unified(hunk, use_comments=False, inline_comments=None)">
624 %for old_line_no, new_line_no, action, content, comments_args in hunk.unified:
597 625 <%
598 626 old_line_anchor, new_line_anchor = None, None
599 627 if old_line_no:
600 628 old_line_anchor = diff_line_anchor(hunk.source_file_path, old_line_no, 'o')
601 629 if new_line_no:
602 630 new_line_anchor = diff_line_anchor(hunk.target_file_path, new_line_no, 'n')
603 631 %>
604 632 <tr class="cb-line">
605 633 <td class="cb-data ${action_class(action)}">
606 634 <div>
635
636 %if comments_args:
637 <% comments = get_comments_for(inline_comments, *comments_args) %>
638 %else:
639 <% comments = None%>
640 %endif
641
607 642 % if comments:
608 643 <% has_outdated = any([x.outdated for x in comments]) %>
609 644 % if has_outdated:
610 645 <i title="${_('comments including outdated')}:${len(comments)}" class="icon-comment_toggle" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
611 646 % else:
612 647 <i title="${_('comments')}: ${len(comments)}" class="icon-comment" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
613 648 % endif
614 649 % endif
615 650 </div>
616 651 </td>
617 652 <td class="cb-lineno ${action_class(action)}"
618 653 data-line-no="${old_line_no}"
619 654 %if old_line_anchor:
620 655 id="${old_line_anchor}"
621 656 %endif
622 657 >
623 658 %if old_line_anchor:
624 659 <a name="${old_line_anchor}" href="#${old_line_anchor}">${old_line_no}</a>
625 660 %endif
626 661 </td>
627 662 <td class="cb-lineno ${action_class(action)}"
628 663 data-line-no="${new_line_no}"
629 664 %if new_line_anchor:
630 665 id="${new_line_anchor}"
631 666 %endif
632 667 >
633 668 %if new_line_anchor:
634 669 <a name="${new_line_anchor}" href="#${new_line_anchor}">${new_line_no}</a>
635 670 %endif
636 671 </td>
637 672 <td class="cb-content ${action_class(action)}"
638 673 data-line-no="${new_line_no and 'n' or 'o'}${new_line_no or old_line_no}"
639 674 >
640 675 %if use_comments:
641 676 ${render_add_comment_button()}
642 677 %endif
643 678 <span class="cb-code">${action} ${content or '' | n}</span>
644 679 %if use_comments and comments:
645 ${inline_comments_container(comments)}
680 ${inline_comments_container(comments, inline_comments)}
646 681 %endif
647 682 </td>
648 683 </tr>
649 684 %endfor
650 685 </%def>
651 686
652 687 <%def name="render_add_comment_button()">
653 688 <button class="btn btn-small btn-primary cb-comment-box-opener" onclick="return Rhodecode.comments.createComment(this)">
654 689 <span><i class="icon-comment"></i></span>
655 690 </button>
656 691 </%def>
657 692
658 693 <%def name="render_diffset_menu()">
659 694
660 695 <div class="diffset-menu clearinner">
661 696 <div class="pull-right">
662 697 <div class="btn-group">
663 698
664 699 <a
665 700 class="btn ${c.diffmode == 'sideside' and 'btn-primary'} tooltip"
666 701 title="${h.tooltip(_('View side by side'))}"
667 702 href="${h.current_route_path(request, diffmode='sideside')}">
668 703 <span>${_('Side by Side')}</span>
669 704 </a>
670 705 <a
671 706 class="btn ${c.diffmode == 'unified' and 'btn-primary'} tooltip"
672 707 title="${h.tooltip(_('View unified'))}" href="${h.current_route_path(request, diffmode='unified')}">
673 708 <span>${_('Unified')}</span>
674 709 </a>
675 710 </div>
676 711 </div>
677 712
678 713 <div class="pull-left">
679 714 <div class="btn-group">
680 715 <a
681 716 class="btn"
682 717 href="#"
683 718 onclick="$('input[class=filediff-collapse-state]').prop('checked', false); return false">${_('Expand All Files')}</a>
684 719 <a
685 720 class="btn"
686 721 href="#"
687 722 onclick="$('input[class=filediff-collapse-state]').prop('checked', true); return false">${_('Collapse All Files')}</a>
688 723 <a
689 724 class="btn"
690 725 href="#"
691 726 onclick="return Rhodecode.comments.toggleWideMode(this)">${_('Wide Mode Diff')}</a>
692 727 </div>
693 728 </div>
694 729 </div>
695 730 </%def>
@@ -1,854 +1,855 b''
1 1 <%inherit file="/base/base.mako"/>
2 2 <%namespace name="base" file="/base/base.mako"/>
3 3
4 4 <%def name="title()">
5 5 ${_('%s Pull Request #%s') % (c.repo_name, c.pull_request.pull_request_id)}
6 6 %if c.rhodecode_name:
7 7 &middot; ${h.branding(c.rhodecode_name)}
8 8 %endif
9 9 </%def>
10 10
11 11 <%def name="breadcrumbs_links()">
12 12 <span id="pr-title">
13 13 ${c.pull_request.title}
14 14 %if c.pull_request.is_closed():
15 15 (${_('Closed')})
16 16 %endif
17 17 </span>
18 18 <div id="pr-title-edit" class="input" style="display: none;">
19 19 ${h.text('pullrequest_title', id_="pr-title-input", class_="large", value=c.pull_request.title)}
20 20 </div>
21 21 </%def>
22 22
23 23 <%def name="menu_bar_nav()">
24 24 ${self.menu_items(active='repositories')}
25 25 </%def>
26 26
27 27 <%def name="menu_bar_subnav()">
28 28 ${self.repo_menu(active='showpullrequest')}
29 29 </%def>
30 30
31 31 <%def name="main()">
32 32
33 33 <script type="text/javascript">
34 34 // TODO: marcink switch this to pyroutes
35 35 AJAX_COMMENT_DELETE_URL = "${h.route_path('pullrequest_comment_delete',repo_name=c.repo_name,pull_request_id=c.pull_request.pull_request_id,comment_id='__COMMENT_ID__')}";
36 36 templateContext.pull_request_data.pull_request_id = ${c.pull_request.pull_request_id};
37 37 </script>
38 38 <div class="box">
39 39
40 40 <div class="title">
41 41 ${self.repo_page_title(c.rhodecode_db_repo)}
42 42 </div>
43 43
44 44 ${self.breadcrumbs()}
45 45
46 46 <div class="box pr-summary">
47 47
48 48 <div class="summary-details block-left">
49 49 <% summary = lambda n:{False:'summary-short'}.get(n) %>
50 50 <div class="pr-details-title">
51 51 <a href="${h.route_path('pull_requests_global', pull_request_id=c.pull_request.pull_request_id)}">${_('Pull request #%s') % c.pull_request.pull_request_id}</a> ${_('From')} ${h.format_date(c.pull_request.created_on)}
52 52 %if c.allowed_to_update:
53 53 <div id="delete_pullrequest" class="pull-right action_button ${'' if c.allowed_to_delete else 'disabled' }" style="clear:inherit;padding: 0">
54 54 % if c.allowed_to_delete:
55 55 ${h.secure_form(h.route_path('pullrequest_delete', repo_name=c.pull_request.target_repo.repo_name, pull_request_id=c.pull_request.pull_request_id), request=request)}
56 56 ${h.submit('remove_%s' % c.pull_request.pull_request_id, _('Delete'),
57 57 class_="btn btn-link btn-danger no-margin",onclick="return confirm('"+_('Confirm to delete this pull request')+"');")}
58 58 ${h.end_form()}
59 59 % else:
60 60 ${_('Delete')}
61 61 % endif
62 62 </div>
63 63 <div id="open_edit_pullrequest" class="pull-right action_button">${_('Edit')}</div>
64 64 <div id="close_edit_pullrequest" class="pull-right action_button" style="display: none;padding: 0">${_('Cancel')}</div>
65 65 %endif
66 66 </div>
67 67
68 68 <div id="summary" class="fields pr-details-content">
69 69 <div class="field">
70 70 <div class="label-summary">
71 71 <label>${_('Source')}:</label>
72 72 </div>
73 73 <div class="input">
74 74 <div class="pr-origininfo">
75 75 ## branch link is only valid if it is a branch
76 76 <span class="tag">
77 77 %if c.pull_request.source_ref_parts.type == 'branch':
78 78 <a href="${h.route_path('repo_changelog', repo_name=c.pull_request.source_repo.repo_name, _query=dict(branch=c.pull_request.source_ref_parts.name))}">${c.pull_request.source_ref_parts.type}: ${c.pull_request.source_ref_parts.name}</a>
79 79 %else:
80 80 ${c.pull_request.source_ref_parts.type}: ${c.pull_request.source_ref_parts.name}
81 81 %endif
82 82 </span>
83 83 <span class="clone-url">
84 84 <a href="${h.route_path('repo_summary', repo_name=c.pull_request.source_repo.repo_name)}">${c.pull_request.source_repo.clone_url()}</a>
85 85 </span>
86 86 <br/>
87 87 % if c.ancestor_commit:
88 88 ${_('Common ancestor')}:
89 89 <code><a href="${h.route_path('repo_commit', repo_name=c.target_repo.repo_name, commit_id=c.ancestor_commit.raw_id)}">${h.show_id(c.ancestor_commit)}</a></code>
90 90 % endif
91 91 </div>
92 92 %if h.is_hg(c.pull_request.source_repo):
93 93 <% clone_url = 'hg pull -r {} {}'.format(h.short_id(c.source_ref), c.pull_request.source_repo.clone_url()) %>
94 94 %elif h.is_git(c.pull_request.source_repo):
95 95 <% clone_url = 'git pull {} {}'.format(c.pull_request.source_repo.clone_url(), c.pull_request.source_ref_parts.name) %>
96 96 %endif
97 97
98 98 <div class="">
99 99 <input type="text" class="input-monospace pr-pullinfo" value="${clone_url}" readonly="readonly">
100 100 <i class="tooltip icon-clipboard clipboard-action pull-right pr-pullinfo-copy" data-clipboard-text="${clone_url}" title="${_('Copy the pull url')}"></i>
101 101 </div>
102 102
103 103 </div>
104 104 </div>
105 105 <div class="field">
106 106 <div class="label-summary">
107 107 <label>${_('Target')}:</label>
108 108 </div>
109 109 <div class="input">
110 110 <div class="pr-targetinfo">
111 111 ## branch link is only valid if it is a branch
112 112 <span class="tag">
113 113 %if c.pull_request.target_ref_parts.type == 'branch':
114 114 <a href="${h.route_path('repo_changelog', repo_name=c.pull_request.target_repo.repo_name, _query=dict(branch=c.pull_request.target_ref_parts.name))}">${c.pull_request.target_ref_parts.type}: ${c.pull_request.target_ref_parts.name}</a>
115 115 %else:
116 116 ${c.pull_request.target_ref_parts.type}: ${c.pull_request.target_ref_parts.name}
117 117 %endif
118 118 </span>
119 119 <span class="clone-url">
120 120 <a href="${h.route_path('repo_summary', repo_name=c.pull_request.target_repo.repo_name)}">${c.pull_request.target_repo.clone_url()}</a>
121 121 </span>
122 122 </div>
123 123 </div>
124 124 </div>
125 125
126 126 ## Link to the shadow repository.
127 127 <div class="field">
128 128 <div class="label-summary">
129 129 <label>${_('Merge')}:</label>
130 130 </div>
131 131 <div class="input">
132 132 % if not c.pull_request.is_closed() and c.pull_request.shadow_merge_ref:
133 133 %if h.is_hg(c.pull_request.target_repo):
134 134 <% clone_url = 'hg clone --update {} {} pull-request-{}'.format(c.pull_request.shadow_merge_ref.name, c.shadow_clone_url, c.pull_request.pull_request_id) %>
135 135 %elif h.is_git(c.pull_request.target_repo):
136 136 <% clone_url = 'git clone --branch {} {} pull-request-{}'.format(c.pull_request.shadow_merge_ref.name, c.shadow_clone_url, c.pull_request.pull_request_id) %>
137 137 %endif
138 138 <div class="">
139 139 <input type="text" class="input-monospace pr-mergeinfo" value="${clone_url}" readonly="readonly">
140 140 <i class="tooltip icon-clipboard clipboard-action pull-right pr-mergeinfo-copy" data-clipboard-text="${clone_url}" title="${_('Copy the clone url')}"></i>
141 141 </div>
142 142 % else:
143 143 <div class="">
144 144 ${_('Shadow repository data not available')}.
145 145 </div>
146 146 % endif
147 147 </div>
148 148 </div>
149 149
150 150 <div class="field">
151 151 <div class="label-summary">
152 152 <label>${_('Review')}:</label>
153 153 </div>
154 154 <div class="input">
155 155 %if c.pull_request_review_status:
156 156 <div class="${'flag_status %s' % c.pull_request_review_status} tooltip pull-left"></div>
157 157 <span class="changeset-status-lbl tooltip">
158 158 %if c.pull_request.is_closed():
159 159 ${_('Closed')},
160 160 %endif
161 161 ${h.commit_status_lbl(c.pull_request_review_status)}
162 162 </span>
163 163 - ${_ungettext('calculated based on %s reviewer vote', 'calculated based on %s reviewers votes', len(c.pull_request_reviewers)) % len(c.pull_request_reviewers)}
164 164 %endif
165 165 </div>
166 166 </div>
167 167 <div class="field">
168 168 <div class="pr-description-label label-summary">
169 169 <label>${_('Description')}:</label>
170 170 </div>
171 171 <div id="pr-desc" class="input">
172 172 <div class="pr-description">${h.urlify_commit_message(c.pull_request.description, c.repo_name)}</div>
173 173 </div>
174 174 <div id="pr-desc-edit" class="input textarea editor" style="display: none;">
175 175 <textarea id="pr-description-input" size="30">${c.pull_request.description}</textarea>
176 176 </div>
177 177 </div>
178 178
179 179 <div class="field">
180 180 <div class="label-summary">
181 181 <label>${_('Versions')}:</label>
182 182 </div>
183 183
184 184 <% outdated_comm_count_ver = len(c.inline_versions[None]['outdated']) %>
185 185 <% general_outdated_comm_count_ver = len(c.comment_versions[None]['outdated']) %>
186 186
187 187 <div class="pr-versions">
188 188 % if c.show_version_changes:
189 189 <% outdated_comm_count_ver = len(c.inline_versions[c.at_version_num]['outdated']) %>
190 190 <% general_outdated_comm_count_ver = len(c.comment_versions[c.at_version_num]['outdated']) %>
191 191 <a id="show-pr-versions" class="input" onclick="return versionController.toggleVersionView(this)" href="#show-pr-versions"
192 192 data-toggle-on="${_ungettext('{} version available for this pull request, show it.', '{} versions available for this pull request, show them.', len(c.versions)).format(len(c.versions))}"
193 193 data-toggle-off="${_('Hide all versions of this pull request')}">
194 194 ${_ungettext('{} version available for this pull request, show it.', '{} versions available for this pull request, show them.', len(c.versions)).format(len(c.versions))}
195 195 </a>
196 196 <table>
197 197 ## SHOW ALL VERSIONS OF PR
198 198 <% ver_pr = None %>
199 199
200 200 % for data in reversed(list(enumerate(c.versions, 1))):
201 201 <% ver_pos = data[0] %>
202 202 <% ver = data[1] %>
203 203 <% ver_pr = ver.pull_request_version_id %>
204 204 <% display_row = '' if c.at_version and (c.at_version_num == ver_pr or c.from_version_num == ver_pr) else 'none' %>
205 205
206 206 <tr class="version-pr" style="display: ${display_row}">
207 207 <td>
208 208 <code>
209 209 <a href="${request.current_route_path(_query=dict(version=ver_pr or 'latest'))}">v${ver_pos}</a>
210 210 </code>
211 211 </td>
212 212 <td>
213 213 <input ${'checked="checked"' if c.from_version_num == ver_pr else ''} class="compare-radio-button" type="radio" name="ver_source" value="${ver_pr or 'latest'}" data-ver-pos="${ver_pos}"/>
214 214 <input ${'checked="checked"' if c.at_version_num == ver_pr else ''} class="compare-radio-button" type="radio" name="ver_target" value="${ver_pr or 'latest'}" data-ver-pos="${ver_pos}"/>
215 215 </td>
216 216 <td>
217 217 <% review_status = c.review_versions[ver_pr].status if ver_pr in c.review_versions else 'not_reviewed' %>
218 218 <div class="${'flag_status %s' % review_status} tooltip pull-left" title="${_('Your review status at this version')}">
219 219 </div>
220 220 </td>
221 221 <td>
222 222 % if c.at_version_num != ver_pr:
223 223 <i class="icon-comment"></i>
224 224 <code class="tooltip" title="${_('Comment from pull request version {0}, general:{1} inline:{2}').format(ver_pos, len(c.comment_versions[ver_pr]['at']), len(c.inline_versions[ver_pr]['at']))}">
225 225 G:${len(c.comment_versions[ver_pr]['at'])} / I:${len(c.inline_versions[ver_pr]['at'])}
226 226 </code>
227 227 % endif
228 228 </td>
229 229 <td>
230 230 ##<code>${ver.source_ref_parts.commit_id[:6]}</code>
231 231 </td>
232 232 <td>
233 233 ${h.age_component(ver.updated_on, time_is_local=True)}
234 234 </td>
235 235 </tr>
236 236 % endfor
237 237
238 238 <tr>
239 239 <td colspan="6">
240 240 <button id="show-version-diff" onclick="return versionController.showVersionDiff()" class="btn btn-sm" style="display: none"
241 241 data-label-text-locked="${_('select versions to show changes')}"
242 242 data-label-text-diff="${_('show changes between versions')}"
243 243 data-label-text-show="${_('show pull request for this version')}"
244 244 >
245 245 ${_('select versions to show changes')}
246 246 </button>
247 247 </td>
248 248 </tr>
249 249
250 250 ## show comment/inline comments summary
251 251 <%def name="comments_summary()">
252 252 <tr>
253 253 <td colspan="6" class="comments-summary-td">
254 254
255 255 % if c.at_version:
256 256 <% inline_comm_count_ver = len(c.inline_versions[c.at_version_num]['display']) %>
257 257 <% general_comm_count_ver = len(c.comment_versions[c.at_version_num]['display']) %>
258 258 ${_('Comments at this version')}:
259 259 % else:
260 260 <% inline_comm_count_ver = len(c.inline_versions[c.at_version_num]['until']) %>
261 261 <% general_comm_count_ver = len(c.comment_versions[c.at_version_num]['until']) %>
262 262 ${_('Comments for this pull request')}:
263 263 % endif
264 264
265 265
266 266 %if general_comm_count_ver:
267 267 <a href="#comments">${_("%d General ") % general_comm_count_ver}</a>
268 268 %else:
269 269 ${_("%d General ") % general_comm_count_ver}
270 270 %endif
271 271
272 272 %if inline_comm_count_ver:
273 273 , <a href="#" onclick="return Rhodecode.comments.nextComment();" id="inline-comments-counter">${_("%d Inline") % inline_comm_count_ver}</a>
274 274 %else:
275 275 , ${_("%d Inline") % inline_comm_count_ver}
276 276 %endif
277 277
278 278 %if outdated_comm_count_ver:
279 279 , <a href="#" onclick="showOutdated(); Rhodecode.comments.nextOutdatedComment(); return false;">${_("%d Outdated") % outdated_comm_count_ver}</a>
280 280 <a href="#" class="showOutdatedComments" onclick="showOutdated(this); return false;"> | ${_('show outdated comments')}</a>
281 281 <a href="#" class="hideOutdatedComments" style="display: none" onclick="hideOutdated(this); return false;"> | ${_('hide outdated comments')}</a>
282 282 %else:
283 283 , ${_("%d Outdated") % outdated_comm_count_ver}
284 284 %endif
285 285 </td>
286 286 </tr>
287 287 </%def>
288 288 ${comments_summary()}
289 289 </table>
290 290 % else:
291 291 <div class="input">
292 292 ${_('Pull request versions not available')}.
293 293 </div>
294 294 <div>
295 295 <table>
296 296 ${comments_summary()}
297 297 </table>
298 298 </div>
299 299 % endif
300 300 </div>
301 301 </div>
302 302
303 303 <div id="pr-save" class="field" style="display: none;">
304 304 <div class="label-summary"></div>
305 305 <div class="input">
306 306 <span id="edit_pull_request" class="btn btn-small no-margin">${_('Save Changes')}</span>
307 307 </div>
308 308 </div>
309 309 </div>
310 310 </div>
311 311 <div>
312 312 ## AUTHOR
313 313 <div class="reviewers-title block-right">
314 314 <div class="pr-details-title">
315 315 ${_('Author of this pull request')}
316 316 </div>
317 317 </div>
318 318 <div class="block-right pr-details-content reviewers">
319 319 <ul class="group_members">
320 320 <li>
321 321 ${self.gravatar_with_user(c.pull_request.author.email, 16)}
322 322 </li>
323 323 </ul>
324 324 </div>
325 325
326 326 ## REVIEW RULES
327 327 <div id="review_rules" style="display: none" class="reviewers-title block-right">
328 328 <div class="pr-details-title">
329 329 ${_('Reviewer rules')}
330 330 %if c.allowed_to_update:
331 331 <span id="close_edit_reviewers" class="block-right action_button last-item" style="display: none;">${_('Close')}</span>
332 332 %endif
333 333 </div>
334 334 <div class="pr-reviewer-rules">
335 335 ## review rules will be appended here, by default reviewers logic
336 336 </div>
337 337 <input id="review_data" type="hidden" name="review_data" value="">
338 338 </div>
339 339
340 340 ## REVIEWERS
341 341 <div class="reviewers-title block-right">
342 342 <div class="pr-details-title">
343 343 ${_('Pull request reviewers')}
344 344 %if c.allowed_to_update:
345 345 <span id="open_edit_reviewers" class="block-right action_button last-item">${_('Edit')}</span>
346 346 %endif
347 347 </div>
348 348 </div>
349 349 <div id="reviewers" class="block-right pr-details-content reviewers">
350 350
351 351 ## members redering block
352 352 <input type="hidden" name="__start__" value="review_members:sequence">
353 353 <ul id="review_members" class="group_members">
354 354
355 355 % for review_obj, member, reasons, mandatory, status in c.pull_request_reviewers:
356 356 <script>
357 357 var member = ${h.json.dumps(h.reviewer_as_json(member, reasons=reasons, mandatory=mandatory, user_group=review_obj.rule_user_group_data()))|n};
358 358 var status = "${(status[0][1].status if status else 'not_reviewed')}";
359 359 var status_lbl = "${h.commit_status_lbl(status[0][1].status if status else 'not_reviewed')}";
360 360 var allowed_to_update = ${h.json.dumps(c.allowed_to_update)};
361 361
362 362 var entry = renderTemplate('reviewMemberEntry', {
363 363 'member': member,
364 364 'mandatory': member.mandatory,
365 365 'reasons': member.reasons,
366 366 'allowed_to_update': allowed_to_update,
367 367 'review_status': status,
368 368 'review_status_label': status_lbl,
369 369 'user_group': member.user_group,
370 370 'create': false
371 371 });
372 372 $('#review_members').append(entry)
373 373 </script>
374 374
375 375 % endfor
376 376
377 377 </ul>
378 378 <input type="hidden" name="__end__" value="review_members:sequence">
379 379 ## end members redering block
380 380
381 381 %if not c.pull_request.is_closed():
382 382 <div id="add_reviewer" class="ac" style="display: none;">
383 383 %if c.allowed_to_update:
384 384 % if not c.forbid_adding_reviewers:
385 385 <div id="add_reviewer_input" class="reviewer_ac">
386 386 ${h.text('user', class_='ac-input', placeholder=_('Add reviewer or reviewer group'))}
387 387 <div id="reviewers_container"></div>
388 388 </div>
389 389 % endif
390 390 <div class="pull-right">
391 391 <button id="update_pull_request" class="btn btn-small no-margin">${_('Save Changes')}</button>
392 392 </div>
393 393 %endif
394 394 </div>
395 395 %endif
396 396 </div>
397 397 </div>
398 398 </div>
399 399 <div class="box">
400 400 ##DIFF
401 401 <div class="table" >
402 402 <div id="changeset_compare_view_content">
403 403 ##CS
404 404 % if c.missing_requirements:
405 405 <div class="box">
406 406 <div class="alert alert-warning">
407 407 <div>
408 408 <strong>${_('Missing requirements:')}</strong>
409 409 ${_('These commits cannot be displayed, because this repository uses the Mercurial largefiles extension, which was not enabled.')}
410 410 </div>
411 411 </div>
412 412 </div>
413 413 % elif c.missing_commits:
414 414 <div class="box">
415 415 <div class="alert alert-warning">
416 416 <div>
417 417 <strong>${_('Missing commits')}:</strong>
418 418 ${_('This pull request cannot be displayed, because one or more commits no longer exist in the source repository.')}
419 419 ${_('Please update this pull request, push the commits back into the source repository, or consider closing this pull request.')}
420 420 </div>
421 421 </div>
422 422 </div>
423 423 % endif
424 424
425 425 <div class="compare_view_commits_title">
426 426 % if not c.compare_mode:
427 427
428 428 % if c.at_version_pos:
429 429 <h4>
430 430 ${_('Showing changes at v%d, commenting is disabled.') % c.at_version_pos}
431 431 </h4>
432 432 % endif
433 433
434 434 <div class="pull-left">
435 435 <div class="btn-group">
436 436 <a
437 437 class="btn"
438 438 href="#"
439 439 onclick="$('.compare_select').show();$('.compare_select_hidden').hide(); return false">
440 440 ${_ungettext('Expand %s commit','Expand %s commits', len(c.commit_ranges)) % len(c.commit_ranges)}
441 441 </a>
442 442 <a
443 443 class="btn"
444 444 href="#"
445 445 onclick="$('.compare_select').hide();$('.compare_select_hidden').show(); return false">
446 446 ${_ungettext('Collapse %s commit','Collapse %s commits', len(c.commit_ranges)) % len(c.commit_ranges)}
447 447 </a>
448 448 </div>
449 449 </div>
450 450
451 451 <div class="pull-right">
452 452 % if c.allowed_to_update and not c.pull_request.is_closed():
453 453 <a id="update_commits" class="btn btn-primary no-margin pull-right">${_('Update commits')}</a>
454 454 % else:
455 455 <a class="tooltip btn disabled pull-right" disabled="disabled" title="${_('Update is disabled for current view')}">${_('Update commits')}</a>
456 456 % endif
457 457
458 458 </div>
459 459 % endif
460 460 </div>
461 461
462 462 % if not c.missing_commits:
463 463 % if c.compare_mode:
464 464 % if c.at_version:
465 465 <h4>
466 466 ${_('Commits and changes between v{ver_from} and {ver_to} of this pull request, commenting is disabled').format(ver_from=c.from_version_pos, ver_to=c.at_version_pos if c.at_version_pos else 'latest')}:
467 467 </h4>
468 468
469 469 <div class="subtitle-compare">
470 470 ${_('commits added: {}, removed: {}').format(len(c.commit_changes_summary.added), len(c.commit_changes_summary.removed))}
471 471 </div>
472 472
473 473 <div class="container">
474 474 <table class="rctable compare_view_commits">
475 475 <tr>
476 476 <th></th>
477 477 <th>${_('Time')}</th>
478 478 <th>${_('Author')}</th>
479 479 <th>${_('Commit')}</th>
480 480 <th></th>
481 481 <th>${_('Description')}</th>
482 482 </tr>
483 483
484 484 % for c_type, commit in c.commit_changes:
485 485 % if c_type in ['a', 'r']:
486 486 <%
487 487 if c_type == 'a':
488 488 cc_title = _('Commit added in displayed changes')
489 489 elif c_type == 'r':
490 490 cc_title = _('Commit removed in displayed changes')
491 491 else:
492 492 cc_title = ''
493 493 %>
494 494 <tr id="row-${commit.raw_id}" commit_id="${commit.raw_id}" class="compare_select">
495 495 <td>
496 496 <div class="commit-change-indicator color-${c_type}-border">
497 497 <div class="commit-change-content color-${c_type} tooltip" title="${h.tooltip(cc_title)}">
498 498 ${c_type.upper()}
499 499 </div>
500 500 </div>
501 501 </td>
502 502 <td class="td-time">
503 503 ${h.age_component(commit.date)}
504 504 </td>
505 505 <td class="td-user">
506 506 ${base.gravatar_with_user(commit.author, 16)}
507 507 </td>
508 508 <td class="td-hash">
509 509 <code>
510 510 <a href="${h.route_path('repo_commit', repo_name=c.target_repo.repo_name, commit_id=commit.raw_id)}">
511 511 r${commit.revision}:${h.short_id(commit.raw_id)}
512 512 </a>
513 513 ${h.hidden('revisions', commit.raw_id)}
514 514 </code>
515 515 </td>
516 516 <td class="expand_commit" data-commit-id="${commit.raw_id}" title="${_( 'Expand commit message')}">
517 517 <div class="show_more_col">
518 518 <i class="show_more"></i>
519 519 </div>
520 520 </td>
521 521 <td class="mid td-description">
522 522 <div class="log-container truncate-wrap">
523 523 <div class="message truncate" id="c-${commit.raw_id}" data-message-raw="${commit.message}">
524 524 ${h.urlify_commit_message(commit.message, c.repo_name)}
525 525 </div>
526 526 </div>
527 527 </td>
528 528 </tr>
529 529 % endif
530 530 % endfor
531 531 </table>
532 532 </div>
533 533
534 534 <script>
535 535 $('.expand_commit').on('click',function(e){
536 536 var target_expand = $(this);
537 537 var cid = target_expand.data('commitId');
538 538
539 539 if (target_expand.hasClass('open')){
540 540 $('#c-'+cid).css({
541 541 'height': '1.5em',
542 542 'white-space': 'nowrap',
543 543 'text-overflow': 'ellipsis',
544 544 'overflow':'hidden'
545 545 });
546 546 target_expand.removeClass('open');
547 547 }
548 548 else {
549 549 $('#c-'+cid).css({
550 550 'height': 'auto',
551 551 'white-space': 'pre-line',
552 552 'text-overflow': 'initial',
553 553 'overflow':'visible'
554 554 });
555 555 target_expand.addClass('open');
556 556 }
557 557 });
558 558 </script>
559 559
560 560 % endif
561 561
562 562 % else:
563 563 <%include file="/compare/compare_commits.mako" />
564 564 % endif
565 565
566 566 <div class="cs_files">
567 567 <%namespace name="cbdiffs" file="/codeblocks/diffs.mako"/>
568 568 ${cbdiffs.render_diffset_menu()}
569 569 ${cbdiffs.render_diffset(
570 570 c.diffset, use_comments=True,
571 571 collapse_when_files_over=30,
572 572 disable_new_comments=not c.allowed_to_comment,
573 deleted_files_comments=c.deleted_files_comments)}
573 deleted_files_comments=c.deleted_files_comments,
574 inline_comments=c.inline_comments)}
574 575 </div>
575 576 % else:
576 577 ## skipping commits we need to clear the view for missing commits
577 578 <div style="clear:both;"></div>
578 579 % endif
579 580
580 581 </div>
581 582 </div>
582 583
583 584 ## template for inline comment form
584 585 <%namespace name="comment" file="/changeset/changeset_file_comment.mako"/>
585 586
586 587 ## render general comments
587 588
588 589 <div id="comment-tr-show">
589 590 <div class="comment">
590 591 % if general_outdated_comm_count_ver:
591 592 <div class="meta">
592 593 % if general_outdated_comm_count_ver == 1:
593 594 ${_('there is {num} general comment from older versions').format(num=general_outdated_comm_count_ver)},
594 595 <a href="#show-hidden-comments" onclick="$('.comment-general.comment-outdated').show(); $(this).parent().hide(); return false;">${_('show it')}</a>
595 596 % else:
596 597 ${_('there are {num} general comments from older versions').format(num=general_outdated_comm_count_ver)},
597 598 <a href="#show-hidden-comments" onclick="$('.comment-general.comment-outdated').show(); $(this).parent().hide(); return false;">${_('show them')}</a>
598 599 % endif
599 600 </div>
600 601 % endif
601 602 </div>
602 603 </div>
603 604
604 605 ${comment.generate_comments(c.comments, include_pull_request=True, is_pull_request=True)}
605 606
606 607 % if not c.pull_request.is_closed():
607 608 ## merge status, and merge action
608 609 <div class="pull-request-merge">
609 610 <%include file="/pullrequests/pullrequest_merge_checks.mako"/>
610 611 </div>
611 612
612 613 ## main comment form and it status
613 614 ${comment.comments(h.route_path('pullrequest_comment_create', repo_name=c.repo_name,
614 615 pull_request_id=c.pull_request.pull_request_id),
615 616 c.pull_request_review_status,
616 617 is_pull_request=True, change_status=c.allowed_to_change_status)}
617 618 %endif
618 619
619 620 <script type="text/javascript">
620 621 if (location.hash) {
621 622 var result = splitDelimitedHash(location.hash);
622 623 var line = $('html').find(result.loc);
623 624 // show hidden comments if we use location.hash
624 625 if (line.hasClass('comment-general')) {
625 626 $(line).show();
626 627 } else if (line.hasClass('comment-inline')) {
627 628 $(line).show();
628 629 var $cb = $(line).closest('.cb');
629 630 $cb.removeClass('cb-collapsed')
630 631 }
631 632 if (line.length > 0){
632 633 offsetScroll(line, 70);
633 634 }
634 635 }
635 636
636 637 versionController = new VersionController();
637 638 versionController.init();
638 639
639 640 reviewersController = new ReviewersController();
640 641
641 642 $(function(){
642 643
643 644 // custom code mirror
644 645 var codeMirrorInstance = initPullRequestsCodeMirror('#pr-description-input');
645 646
646 647 var PRDetails = {
647 648 editButton: $('#open_edit_pullrequest'),
648 649 closeButton: $('#close_edit_pullrequest'),
649 650 deleteButton: $('#delete_pullrequest'),
650 651 viewFields: $('#pr-desc, #pr-title'),
651 652 editFields: $('#pr-desc-edit, #pr-title-edit, #pr-save'),
652 653
653 654 init: function() {
654 655 var that = this;
655 656 this.editButton.on('click', function(e) { that.edit(); });
656 657 this.closeButton.on('click', function(e) { that.view(); });
657 658 },
658 659
659 660 edit: function(event) {
660 661 this.viewFields.hide();
661 662 this.editButton.hide();
662 663 this.deleteButton.hide();
663 664 this.closeButton.show();
664 665 this.editFields.show();
665 666 codeMirrorInstance.refresh();
666 667 },
667 668
668 669 view: function(event) {
669 670 this.editButton.show();
670 671 this.deleteButton.show();
671 672 this.editFields.hide();
672 673 this.closeButton.hide();
673 674 this.viewFields.show();
674 675 }
675 676 };
676 677
677 678 var ReviewersPanel = {
678 679 editButton: $('#open_edit_reviewers'),
679 680 closeButton: $('#close_edit_reviewers'),
680 681 addButton: $('#add_reviewer'),
681 682 removeButtons: $('.reviewer_member_remove,.reviewer_member_mandatory_remove'),
682 683
683 684 init: function() {
684 685 var self = this;
685 686 this.editButton.on('click', function(e) { self.edit(); });
686 687 this.closeButton.on('click', function(e) { self.close(); });
687 688 },
688 689
689 690 edit: function(event) {
690 691 this.editButton.hide();
691 692 this.closeButton.show();
692 693 this.addButton.show();
693 694 this.removeButtons.css('visibility', 'visible');
694 695 // review rules
695 696 reviewersController.loadReviewRules(
696 697 ${c.pull_request.reviewer_data_json | n});
697 698 },
698 699
699 700 close: function(event) {
700 701 this.editButton.show();
701 702 this.closeButton.hide();
702 703 this.addButton.hide();
703 704 this.removeButtons.css('visibility', 'hidden');
704 705 // hide review rules
705 706 reviewersController.hideReviewRules()
706 707 }
707 708 };
708 709
709 710 PRDetails.init();
710 711 ReviewersPanel.init();
711 712
712 713 showOutdated = function(self){
713 714 $('.comment-inline.comment-outdated').show();
714 715 $('.filediff-outdated').show();
715 716 $('.showOutdatedComments').hide();
716 717 $('.hideOutdatedComments').show();
717 718 };
718 719
719 720 hideOutdated = function(self){
720 721 $('.comment-inline.comment-outdated').hide();
721 722 $('.filediff-outdated').hide();
722 723 $('.hideOutdatedComments').hide();
723 724 $('.showOutdatedComments').show();
724 725 };
725 726
726 727 refreshMergeChecks = function(){
727 728 var loadUrl = "${request.current_route_path(_query=dict(merge_checks=1))}";
728 729 $('.pull-request-merge').css('opacity', 0.3);
729 730 $('.action-buttons-extra').css('opacity', 0.3);
730 731
731 732 $('.pull-request-merge').load(
732 733 loadUrl, function() {
733 734 $('.pull-request-merge').css('opacity', 1);
734 735
735 736 $('.action-buttons-extra').css('opacity', 1);
736 737 injectCloseAction();
737 738 }
738 739 );
739 740 };
740 741
741 742 injectCloseAction = function() {
742 743 var closeAction = $('#close-pull-request-action').html();
743 744 var $actionButtons = $('.action-buttons-extra');
744 745 // clear the action before
745 746 $actionButtons.html("");
746 747 $actionButtons.html(closeAction);
747 748 };
748 749
749 750 closePullRequest = function (status) {
750 751 // inject closing flag
751 752 $('.action-buttons-extra').append('<input type="hidden" class="close-pr-input" id="close_pull_request" value="1">');
752 753 $(generalCommentForm.statusChange).select2("val", status).trigger('change');
753 754 $(generalCommentForm.submitForm).submit();
754 755 };
755 756
756 757 $('#show-outdated-comments').on('click', function(e){
757 758 var button = $(this);
758 759 var outdated = $('.comment-outdated');
759 760
760 761 if (button.html() === "(Show)") {
761 762 button.html("(Hide)");
762 763 outdated.show();
763 764 } else {
764 765 button.html("(Show)");
765 766 outdated.hide();
766 767 }
767 768 });
768 769
769 770 $('.show-inline-comments').on('change', function(e){
770 771 var show = 'none';
771 772 var target = e.currentTarget;
772 773 if(target.checked){
773 774 show = ''
774 775 }
775 776 var boxid = $(target).attr('id_for');
776 777 var comments = $('#{0} .inline-comments'.format(boxid));
777 778 var fn_display = function(idx){
778 779 $(this).css('display', show);
779 780 };
780 781 $(comments).each(fn_display);
781 782 var btns = $('#{0} .inline-comments-button'.format(boxid));
782 783 $(btns).each(fn_display);
783 784 });
784 785
785 786 $('#merge_pull_request_form').submit(function() {
786 787 if (!$('#merge_pull_request').attr('disabled')) {
787 788 $('#merge_pull_request').attr('disabled', 'disabled');
788 789 }
789 790 return true;
790 791 });
791 792
792 793 $('#edit_pull_request').on('click', function(e){
793 794 var title = $('#pr-title-input').val();
794 795 var description = codeMirrorInstance.getValue();
795 796 editPullRequest(
796 797 "${c.repo_name}", "${c.pull_request.pull_request_id}",
797 798 title, description);
798 799 });
799 800
800 801 $('#update_pull_request').on('click', function(e){
801 802 $(this).attr('disabled', 'disabled');
802 803 $(this).addClass('disabled');
803 804 $(this).html(_gettext('Saving...'));
804 805 reviewersController.updateReviewers(
805 806 "${c.repo_name}", "${c.pull_request.pull_request_id}");
806 807 });
807 808
808 809 $('#update_commits').on('click', function(e){
809 810 var isDisabled = !$(e.currentTarget).attr('disabled');
810 811 $(e.currentTarget).attr('disabled', 'disabled');
811 812 $(e.currentTarget).addClass('disabled');
812 813 $(e.currentTarget).removeClass('btn-primary');
813 814 $(e.currentTarget).text(_gettext('Updating...'));
814 815 if(isDisabled){
815 816 updateCommits(
816 817 "${c.repo_name}", "${c.pull_request.pull_request_id}");
817 818 }
818 819 });
819 820 // fixing issue with caches on firefox
820 821 $('#update_commits').removeAttr("disabled");
821 822
822 823 $('.show-inline-comments').on('click', function(e){
823 824 var boxid = $(this).attr('data-comment-id');
824 825 var button = $(this);
825 826
826 827 if(button.hasClass("comments-visible")) {
827 828 $('#{0} .inline-comments'.format(boxid)).each(function(index){
828 829 $(this).hide();
829 830 });
830 831 button.removeClass("comments-visible");
831 832 } else {
832 833 $('#{0} .inline-comments'.format(boxid)).each(function(index){
833 834 $(this).show();
834 835 });
835 836 button.addClass("comments-visible");
836 837 }
837 838 });
838 839
839 840 // register submit callback on commentForm form to track TODOs
840 841 window.commentFormGlobalSubmitSuccessCallback = function(){
841 842 refreshMergeChecks();
842 843 };
843 844 // initial injection
844 845 injectCloseAction();
845 846
846 847 ReviewerAutoComplete('#user');
847 848
848 849 })
849 850 </script>
850 851
851 852 </div>
852 853 </div>
853 854
854 855 </%def>
@@ -1,1083 +1,1084 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23
24 24 from rhodecode.lib.utils2 import str2bool
25 25 from rhodecode.model.meta import Session
26 26 from rhodecode.model.settings import VcsSettingsModel, UiSetting
27 27
28 28
29 29 HOOKS_FORM_DATA = {
30 30 'hooks_changegroup_repo_size': True,
31 31 'hooks_changegroup_push_logger': True,
32 32 'hooks_outgoing_pull_logger': True
33 33 }
34 34
35 35 SVN_FORM_DATA = {
36 36 'new_svn_branch': 'test-branch',
37 37 'new_svn_tag': 'test-tag'
38 38 }
39 39
40 40 GENERAL_FORM_DATA = {
41 41 'rhodecode_pr_merge_enabled': True,
42 42 'rhodecode_use_outdated_comments': True,
43 43 'rhodecode_hg_use_rebase_for_merging': True,
44 44 'rhodecode_hg_close_branch_before_merging': True,
45 45 'rhodecode_git_use_rebase_for_merging': True,
46 46 'rhodecode_git_close_branch_before_merging': True,
47 'rhodecode_diff_cache': True,
47 48 }
48 49
49 50
50 51 class TestInheritGlobalSettingsProperty(object):
51 52 def test_get_raises_exception_when_repository_not_specified(self):
52 53 model = VcsSettingsModel()
53 54 with pytest.raises(Exception) as exc_info:
54 55 model.inherit_global_settings
55 56 assert exc_info.value.message == 'Repository is not specified'
56 57
57 58 def test_true_is_returned_when_value_is_not_found(self, repo_stub):
58 59 model = VcsSettingsModel(repo=repo_stub.repo_name)
59 60 assert model.inherit_global_settings is True
60 61
61 62 def test_value_is_returned(self, repo_stub, settings_util):
62 63 model = VcsSettingsModel(repo=repo_stub.repo_name)
63 64 settings_util.create_repo_rhodecode_setting(
64 65 repo_stub, VcsSettingsModel.INHERIT_SETTINGS, False, 'bool')
65 66 assert model.inherit_global_settings is False
66 67
67 68 def test_value_is_set(self, repo_stub):
68 69 model = VcsSettingsModel(repo=repo_stub.repo_name)
69 70 model.inherit_global_settings = False
70 71 setting = model.repo_settings.get_setting_by_name(
71 72 VcsSettingsModel.INHERIT_SETTINGS)
72 73 try:
73 74 assert setting.app_settings_type == 'bool'
74 75 assert setting.app_settings_value is False
75 76 finally:
76 77 Session().delete(setting)
77 78 Session().commit()
78 79
79 80 def test_set_raises_exception_when_repository_not_specified(self):
80 81 model = VcsSettingsModel()
81 82 with pytest.raises(Exception) as exc_info:
82 83 model.inherit_global_settings = False
83 84 assert exc_info.value.message == 'Repository is not specified'
84 85
85 86
86 87 class TestVcsSettingsModel(object):
87 88 def test_global_svn_branch_patterns(self):
88 89 model = VcsSettingsModel()
89 90 expected_result = {'test': 'test'}
90 91 with mock.patch.object(model, 'global_settings') as settings_mock:
91 92 get_settings = settings_mock.get_ui_by_section
92 93 get_settings.return_value = expected_result
93 94 settings_mock.return_value = expected_result
94 95 result = model.get_global_svn_branch_patterns()
95 96
96 97 get_settings.assert_called_once_with(model.SVN_BRANCH_SECTION)
97 98 assert expected_result == result
98 99
99 100 def test_repo_svn_branch_patterns(self):
100 101 model = VcsSettingsModel()
101 102 expected_result = {'test': 'test'}
102 103 with mock.patch.object(model, 'repo_settings') as settings_mock:
103 104 get_settings = settings_mock.get_ui_by_section
104 105 get_settings.return_value = expected_result
105 106 settings_mock.return_value = expected_result
106 107 result = model.get_repo_svn_branch_patterns()
107 108
108 109 get_settings.assert_called_once_with(model.SVN_BRANCH_SECTION)
109 110 assert expected_result == result
110 111
111 112 def test_repo_svn_branch_patterns_raises_exception_when_repo_is_not_set(
112 113 self):
113 114 model = VcsSettingsModel()
114 115 with pytest.raises(Exception) as exc_info:
115 116 model.get_repo_svn_branch_patterns()
116 117 assert exc_info.value.message == 'Repository is not specified'
117 118
118 119 def test_global_svn_tag_patterns(self):
119 120 model = VcsSettingsModel()
120 121 expected_result = {'test': 'test'}
121 122 with mock.patch.object(model, 'global_settings') as settings_mock:
122 123 get_settings = settings_mock.get_ui_by_section
123 124 get_settings.return_value = expected_result
124 125 settings_mock.return_value = expected_result
125 126 result = model.get_global_svn_tag_patterns()
126 127
127 128 get_settings.assert_called_once_with(model.SVN_TAG_SECTION)
128 129 assert expected_result == result
129 130
130 131 def test_repo_svn_tag_patterns(self):
131 132 model = VcsSettingsModel()
132 133 expected_result = {'test': 'test'}
133 134 with mock.patch.object(model, 'repo_settings') as settings_mock:
134 135 get_settings = settings_mock.get_ui_by_section
135 136 get_settings.return_value = expected_result
136 137 settings_mock.return_value = expected_result
137 138 result = model.get_repo_svn_tag_patterns()
138 139
139 140 get_settings.assert_called_once_with(model.SVN_TAG_SECTION)
140 141 assert expected_result == result
141 142
142 143 def test_repo_svn_tag_patterns_raises_exception_when_repo_is_not_set(self):
143 144 model = VcsSettingsModel()
144 145 with pytest.raises(Exception) as exc_info:
145 146 model.get_repo_svn_tag_patterns()
146 147 assert exc_info.value.message == 'Repository is not specified'
147 148
148 149 def test_get_global_settings(self):
149 150 expected_result = {'test': 'test'}
150 151 model = VcsSettingsModel()
151 152 with mock.patch.object(model, '_collect_all_settings') as collect_mock:
152 153 collect_mock.return_value = expected_result
153 154 result = model.get_global_settings()
154 155
155 156 collect_mock.assert_called_once_with(global_=True)
156 157 assert result == expected_result
157 158
158 159 def test_get_repo_settings(self, repo_stub):
159 160 model = VcsSettingsModel(repo=repo_stub.repo_name)
160 161 expected_result = {'test': 'test'}
161 162 with mock.patch.object(model, '_collect_all_settings') as collect_mock:
162 163 collect_mock.return_value = expected_result
163 164 result = model.get_repo_settings()
164 165
165 166 collect_mock.assert_called_once_with(global_=False)
166 167 assert result == expected_result
167 168
168 169 @pytest.mark.parametrize('settings, global_', [
169 170 ('global_settings', True),
170 171 ('repo_settings', False)
171 172 ])
172 173 def test_collect_all_settings(self, settings, global_):
173 174 model = VcsSettingsModel()
174 175 result_mock = self._mock_result()
175 176
176 177 settings_patch = mock.patch.object(model, settings)
177 178 with settings_patch as settings_mock:
178 179 settings_mock.get_ui_by_section_and_key.return_value = result_mock
179 180 settings_mock.get_setting_by_name.return_value = result_mock
180 181 result = model._collect_all_settings(global_=global_)
181 182
182 183 ui_settings = model.HG_SETTINGS + model.GIT_SETTINGS + model.HOOKS_SETTINGS
183 184 self._assert_get_settings_calls(
184 185 settings_mock, ui_settings, model.GENERAL_SETTINGS)
185 186 self._assert_collect_all_settings_result(
186 187 ui_settings, model.GENERAL_SETTINGS, result)
187 188
188 189 @pytest.mark.parametrize('settings, global_', [
189 190 ('global_settings', True),
190 191 ('repo_settings', False)
191 192 ])
192 193 def test_collect_all_settings_without_empty_value(self, settings, global_):
193 194 model = VcsSettingsModel()
194 195
195 196 settings_patch = mock.patch.object(model, settings)
196 197 with settings_patch as settings_mock:
197 198 settings_mock.get_ui_by_section_and_key.return_value = None
198 199 settings_mock.get_setting_by_name.return_value = None
199 200 result = model._collect_all_settings(global_=global_)
200 201
201 202 assert result == {}
202 203
203 204 def _mock_result(self):
204 205 result_mock = mock.Mock()
205 206 result_mock.ui_value = 'ui_value'
206 207 result_mock.ui_active = True
207 208 result_mock.app_settings_value = 'setting_value'
208 209 return result_mock
209 210
210 211 def _assert_get_settings_calls(
211 212 self, settings_mock, ui_settings, general_settings):
212 213 assert (
213 214 settings_mock.get_ui_by_section_and_key.call_count ==
214 215 len(ui_settings))
215 216 assert (
216 217 settings_mock.get_setting_by_name.call_count ==
217 218 len(general_settings))
218 219
219 220 for section, key in ui_settings:
220 221 expected_call = mock.call(section, key)
221 222 assert (
222 223 expected_call in
223 224 settings_mock.get_ui_by_section_and_key.call_args_list)
224 225
225 226 for name in general_settings:
226 227 expected_call = mock.call(name)
227 228 assert (
228 229 expected_call in
229 230 settings_mock.get_setting_by_name.call_args_list)
230 231
231 232 def _assert_collect_all_settings_result(
232 233 self, ui_settings, general_settings, result):
233 234 expected_result = {}
234 235 for section, key in ui_settings:
235 236 key = '{}_{}'.format(section, key.replace('.', '_'))
236 237
237 238 if section in ('extensions', 'hooks'):
238 239 value = True
239 240 elif key in ['vcs_git_lfs_enabled']:
240 241 value = True
241 242 else:
242 243 value = 'ui_value'
243 244 expected_result[key] = value
244 245
245 246 for name in general_settings:
246 247 key = 'rhodecode_' + name
247 248 expected_result[key] = 'setting_value'
248 249
249 250 assert expected_result == result
250 251
251 252
252 253 class TestCreateOrUpdateRepoHookSettings(object):
253 254 def test_create_when_no_repo_object_found(self, repo_stub):
254 255 model = VcsSettingsModel(repo=repo_stub.repo_name)
255 256
256 257 self._create_settings(model, HOOKS_FORM_DATA)
257 258
258 259 cleanup = []
259 260 try:
260 261 for section, key in model.HOOKS_SETTINGS:
261 262 ui = model.repo_settings.get_ui_by_section_and_key(
262 263 section, key)
263 264 assert ui.ui_active is True
264 265 cleanup.append(ui)
265 266 finally:
266 267 for ui in cleanup:
267 268 Session().delete(ui)
268 269 Session().commit()
269 270
270 271 def test_create_raises_exception_when_data_incomplete(self, repo_stub):
271 272 model = VcsSettingsModel(repo=repo_stub.repo_name)
272 273
273 274 deleted_key = 'hooks_changegroup_repo_size'
274 275 data = HOOKS_FORM_DATA.copy()
275 276 data.pop(deleted_key)
276 277
277 278 with pytest.raises(ValueError) as exc_info:
278 279 model.create_or_update_repo_hook_settings(data)
279 280 assert (
280 281 exc_info.value.message ==
281 282 'The given data does not contain {} key'.format(deleted_key))
282 283
283 284 def test_update_when_repo_object_found(self, repo_stub, settings_util):
284 285 model = VcsSettingsModel(repo=repo_stub.repo_name)
285 286 for section, key in model.HOOKS_SETTINGS:
286 287 settings_util.create_repo_rhodecode_ui(
287 288 repo_stub, section, None, key=key, active=False)
288 289 model.create_or_update_repo_hook_settings(HOOKS_FORM_DATA)
289 290 for section, key in model.HOOKS_SETTINGS:
290 291 ui = model.repo_settings.get_ui_by_section_and_key(section, key)
291 292 assert ui.ui_active is True
292 293
293 294 def _create_settings(self, model, data):
294 295 global_patch = mock.patch.object(model, 'global_settings')
295 296 global_setting = mock.Mock()
296 297 global_setting.ui_value = 'Test value'
297 298 with global_patch as global_mock:
298 299 global_mock.get_ui_by_section_and_key.return_value = global_setting
299 300 model.create_or_update_repo_hook_settings(HOOKS_FORM_DATA)
300 301
301 302
302 303 class TestUpdateGlobalHookSettings(object):
303 304 def test_update_raises_exception_when_data_incomplete(self):
304 305 model = VcsSettingsModel()
305 306
306 307 deleted_key = 'hooks_changegroup_repo_size'
307 308 data = HOOKS_FORM_DATA.copy()
308 309 data.pop(deleted_key)
309 310
310 311 with pytest.raises(ValueError) as exc_info:
311 312 model.update_global_hook_settings(data)
312 313 assert (
313 314 exc_info.value.message ==
314 315 'The given data does not contain {} key'.format(deleted_key))
315 316
316 317 def test_update_global_hook_settings(self, settings_util):
317 318 model = VcsSettingsModel()
318 319 setting_mock = mock.MagicMock()
319 320 setting_mock.ui_active = False
320 321 get_settings_patcher = mock.patch.object(
321 322 model.global_settings, 'get_ui_by_section_and_key',
322 323 return_value=setting_mock)
323 324 session_patcher = mock.patch('rhodecode.model.settings.Session')
324 325 with get_settings_patcher as get_settings_mock, session_patcher:
325 326 model.update_global_hook_settings(HOOKS_FORM_DATA)
326 327 assert setting_mock.ui_active is True
327 328 assert get_settings_mock.call_count == 3
328 329
329 330
330 331 class TestCreateOrUpdateRepoGeneralSettings(object):
331 332 def test_calls_create_or_update_general_settings(self, repo_stub):
332 333 model = VcsSettingsModel(repo=repo_stub.repo_name)
333 334 create_patch = mock.patch.object(
334 335 model, '_create_or_update_general_settings')
335 336 with create_patch as create_mock:
336 337 model.create_or_update_repo_pr_settings(GENERAL_FORM_DATA)
337 338 create_mock.assert_called_once_with(
338 339 model.repo_settings, GENERAL_FORM_DATA)
339 340
340 341 def test_raises_exception_when_repository_is_not_specified(self):
341 342 model = VcsSettingsModel()
342 343 with pytest.raises(Exception) as exc_info:
343 344 model.create_or_update_repo_pr_settings(GENERAL_FORM_DATA)
344 345 assert exc_info.value.message == 'Repository is not specified'
345 346
346 347
347 348 class TestCreateOrUpdatGlobalGeneralSettings(object):
348 349 def test_calls_create_or_update_general_settings(self):
349 350 model = VcsSettingsModel()
350 351 create_patch = mock.patch.object(
351 352 model, '_create_or_update_general_settings')
352 353 with create_patch as create_mock:
353 354 model.create_or_update_global_pr_settings(GENERAL_FORM_DATA)
354 355 create_mock.assert_called_once_with(
355 356 model.global_settings, GENERAL_FORM_DATA)
356 357
357 358
358 359 class TestCreateOrUpdateGeneralSettings(object):
359 360 def test_create_when_no_repo_settings_found(self, repo_stub):
360 361 model = VcsSettingsModel(repo=repo_stub.repo_name)
361 362 model._create_or_update_general_settings(
362 363 model.repo_settings, GENERAL_FORM_DATA)
363 364
364 365 cleanup = []
365 366 try:
366 367 for name in model.GENERAL_SETTINGS:
367 368 setting = model.repo_settings.get_setting_by_name(name)
368 369 assert setting.app_settings_value is True
369 370 cleanup.append(setting)
370 371 finally:
371 372 for setting in cleanup:
372 373 Session().delete(setting)
373 374 Session().commit()
374 375
375 376 def test_create_raises_exception_when_data_incomplete(self, repo_stub):
376 377 model = VcsSettingsModel(repo=repo_stub.repo_name)
377 378
378 379 deleted_key = 'rhodecode_pr_merge_enabled'
379 380 data = GENERAL_FORM_DATA.copy()
380 381 data.pop(deleted_key)
381 382
382 383 with pytest.raises(ValueError) as exc_info:
383 384 model._create_or_update_general_settings(model.repo_settings, data)
384 385 assert (
385 386 exc_info.value.message ==
386 387 'The given data does not contain {} key'.format(deleted_key))
387 388
388 389 def test_update_when_repo_setting_found(self, repo_stub, settings_util):
389 390 model = VcsSettingsModel(repo=repo_stub.repo_name)
390 391 for name in model.GENERAL_SETTINGS:
391 392 settings_util.create_repo_rhodecode_setting(
392 393 repo_stub, name, False, 'bool')
393 394
394 395 model._create_or_update_general_settings(
395 396 model.repo_settings, GENERAL_FORM_DATA)
396 397
397 398 for name in model.GENERAL_SETTINGS:
398 399 setting = model.repo_settings.get_setting_by_name(name)
399 400 assert setting.app_settings_value is True
400 401
401 402
402 403 class TestCreateRepoSvnSettings(object):
403 404 def test_calls_create_svn_settings(self, repo_stub):
404 405 model = VcsSettingsModel(repo=repo_stub.repo_name)
405 406 with mock.patch.object(model, '_create_svn_settings') as create_mock:
406 407 model.create_repo_svn_settings(SVN_FORM_DATA)
407 408 create_mock.assert_called_once_with(model.repo_settings, SVN_FORM_DATA)
408 409
409 410 def test_raises_exception_when_repository_is_not_specified(self):
410 411 model = VcsSettingsModel()
411 412 with pytest.raises(Exception) as exc_info:
412 413 model.create_repo_svn_settings(SVN_FORM_DATA)
413 414 assert exc_info.value.message == 'Repository is not specified'
414 415
415 416
416 417 class TestCreateSvnSettings(object):
417 418 def test_create(self, repo_stub):
418 419 model = VcsSettingsModel(repo=repo_stub.repo_name)
419 420 model._create_svn_settings(model.repo_settings, SVN_FORM_DATA)
420 421 Session().commit()
421 422
422 423 branch_ui = model.repo_settings.get_ui_by_section(
423 424 model.SVN_BRANCH_SECTION)
424 425 tag_ui = model.repo_settings.get_ui_by_section(
425 426 model.SVN_TAG_SECTION)
426 427
427 428 try:
428 429 assert len(branch_ui) == 1
429 430 assert len(tag_ui) == 1
430 431 finally:
431 432 Session().delete(branch_ui[0])
432 433 Session().delete(tag_ui[0])
433 434 Session().commit()
434 435
435 436 def test_create_tag(self, repo_stub):
436 437 model = VcsSettingsModel(repo=repo_stub.repo_name)
437 438 data = SVN_FORM_DATA.copy()
438 439 data.pop('new_svn_branch')
439 440 model._create_svn_settings(model.repo_settings, data)
440 441 Session().commit()
441 442
442 443 branch_ui = model.repo_settings.get_ui_by_section(
443 444 model.SVN_BRANCH_SECTION)
444 445 tag_ui = model.repo_settings.get_ui_by_section(
445 446 model.SVN_TAG_SECTION)
446 447
447 448 try:
448 449 assert len(branch_ui) == 0
449 450 assert len(tag_ui) == 1
450 451 finally:
451 452 Session().delete(tag_ui[0])
452 453 Session().commit()
453 454
454 455 def test_create_nothing_when_no_svn_settings_specified(self, repo_stub):
455 456 model = VcsSettingsModel(repo=repo_stub.repo_name)
456 457 model._create_svn_settings(model.repo_settings, {})
457 458 Session().commit()
458 459
459 460 branch_ui = model.repo_settings.get_ui_by_section(
460 461 model.SVN_BRANCH_SECTION)
461 462 tag_ui = model.repo_settings.get_ui_by_section(
462 463 model.SVN_TAG_SECTION)
463 464
464 465 assert len(branch_ui) == 0
465 466 assert len(tag_ui) == 0
466 467
467 468 def test_create_nothing_when_empty_settings_specified(self, repo_stub):
468 469 model = VcsSettingsModel(repo=repo_stub.repo_name)
469 470 data = {
470 471 'new_svn_branch': '',
471 472 'new_svn_tag': ''
472 473 }
473 474 model._create_svn_settings(model.repo_settings, data)
474 475 Session().commit()
475 476
476 477 branch_ui = model.repo_settings.get_ui_by_section(
477 478 model.SVN_BRANCH_SECTION)
478 479 tag_ui = model.repo_settings.get_ui_by_section(
479 480 model.SVN_TAG_SECTION)
480 481
481 482 assert len(branch_ui) == 0
482 483 assert len(tag_ui) == 0
483 484
484 485
485 486 class TestCreateOrUpdateUi(object):
486 487 def test_create(self, repo_stub):
487 488 model = VcsSettingsModel(repo=repo_stub.repo_name)
488 489 model._create_or_update_ui(
489 490 model.repo_settings, 'test-section', 'test-key', active=False,
490 491 value='False')
491 492 Session().commit()
492 493
493 494 created_ui = model.repo_settings.get_ui_by_section_and_key(
494 495 'test-section', 'test-key')
495 496
496 497 try:
497 498 assert created_ui.ui_active is False
498 499 assert str2bool(created_ui.ui_value) is False
499 500 finally:
500 501 Session().delete(created_ui)
501 502 Session().commit()
502 503
503 504 def test_update(self, repo_stub, settings_util):
504 505 model = VcsSettingsModel(repo=repo_stub.repo_name)
505 506
506 507 largefiles, phases, evolve = model.HG_SETTINGS
507 508
508 509 section = 'test-section'
509 510 key = 'test-key'
510 511 settings_util.create_repo_rhodecode_ui(
511 512 repo_stub, section, 'True', key=key, active=True)
512 513
513 514 model._create_or_update_ui(
514 515 model.repo_settings, section, key, active=False, value='False')
515 516 Session().commit()
516 517
517 518 created_ui = model.repo_settings.get_ui_by_section_and_key(
518 519 section, key)
519 520 assert created_ui.ui_active is False
520 521 assert str2bool(created_ui.ui_value) is False
521 522
522 523
523 524 class TestCreateOrUpdateRepoHgSettings(object):
524 525 FORM_DATA = {
525 526 'extensions_largefiles': False,
526 527 'extensions_evolve': False,
527 528 'phases_publish': False
528 529 }
529 530
530 531 def test_creates_repo_hg_settings_when_data_is_correct(self, repo_stub):
531 532 model = VcsSettingsModel(repo=repo_stub.repo_name)
532 533 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
533 534 model.create_or_update_repo_hg_settings(self.FORM_DATA)
534 535 expected_calls = [
535 536 mock.call(model.repo_settings, 'extensions', 'largefiles',
536 537 active=False, value=''),
537 538 mock.call(model.repo_settings, 'extensions', 'evolve',
538 539 active=False, value=''),
539 540 mock.call(model.repo_settings, 'phases', 'publish', value='False'),
540 541 ]
541 542 assert expected_calls == create_mock.call_args_list
542 543
543 544 @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys())
544 545 def test_key_is_not_found(self, repo_stub, field_to_remove):
545 546 model = VcsSettingsModel(repo=repo_stub.repo_name)
546 547 data = self.FORM_DATA.copy()
547 548 data.pop(field_to_remove)
548 549 with pytest.raises(ValueError) as exc_info:
549 550 model.create_or_update_repo_hg_settings(data)
550 551 expected_message = 'The given data does not contain {} key'.format(
551 552 field_to_remove)
552 553 assert exc_info.value.message == expected_message
553 554
554 555 def test_create_raises_exception_when_repository_not_specified(self):
555 556 model = VcsSettingsModel()
556 557 with pytest.raises(Exception) as exc_info:
557 558 model.create_or_update_repo_hg_settings(self.FORM_DATA)
558 559 assert exc_info.value.message == 'Repository is not specified'
559 560
560 561
561 562 class TestUpdateGlobalSslSetting(object):
562 563 def test_updates_global_hg_settings(self):
563 564 model = VcsSettingsModel()
564 565 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
565 566 model.update_global_ssl_setting('False')
566 567 create_mock.assert_called_once_with(
567 568 model.global_settings, 'web', 'push_ssl', value='False')
568 569
569 570
570 571 class TestUpdateGlobalPathSetting(object):
571 572 def test_updates_global_path_settings(self):
572 573 model = VcsSettingsModel()
573 574 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
574 575 model.update_global_path_setting('False')
575 576 create_mock.assert_called_once_with(
576 577 model.global_settings, 'paths', '/', value='False')
577 578
578 579
579 580 class TestCreateOrUpdateGlobalHgSettings(object):
580 581 FORM_DATA = {
581 582 'extensions_largefiles': False,
582 583 'largefiles_usercache': '/example/largefiles-store',
583 584 'phases_publish': False,
584 585 'extensions_hgsubversion': False,
585 586 'extensions_evolve': False
586 587 }
587 588
588 589 def test_creates_repo_hg_settings_when_data_is_correct(self):
589 590 model = VcsSettingsModel()
590 591 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
591 592 model.create_or_update_global_hg_settings(self.FORM_DATA)
592 593 expected_calls = [
593 594 mock.call(model.global_settings, 'extensions', 'largefiles',
594 595 active=False, value=''),
595 596 mock.call(model.global_settings, 'largefiles', 'usercache',
596 597 value='/example/largefiles-store'),
597 598 mock.call(model.global_settings, 'phases', 'publish',
598 599 value='False'),
599 600 mock.call(model.global_settings, 'extensions', 'hgsubversion',
600 601 active=False),
601 602 mock.call(model.global_settings, 'extensions', 'evolve',
602 603 active=False, value='')
603 604 ]
604 605 assert expected_calls == create_mock.call_args_list
605 606
606 607 @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys())
607 608 def test_key_is_not_found(self, repo_stub, field_to_remove):
608 609 model = VcsSettingsModel(repo=repo_stub.repo_name)
609 610 data = self.FORM_DATA.copy()
610 611 data.pop(field_to_remove)
611 612 with pytest.raises(Exception) as exc_info:
612 613 model.create_or_update_global_hg_settings(data)
613 614 expected_message = 'The given data does not contain {} key'.format(
614 615 field_to_remove)
615 616 assert exc_info.value.message == expected_message
616 617
617 618
618 619 class TestCreateOrUpdateGlobalGitSettings(object):
619 620 FORM_DATA = {
620 621 'vcs_git_lfs_enabled': False,
621 622 'vcs_git_lfs_store_location': '/example/lfs-store',
622 623 }
623 624
624 625 def test_creates_repo_hg_settings_when_data_is_correct(self):
625 626 model = VcsSettingsModel()
626 627 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
627 628 model.create_or_update_global_git_settings(self.FORM_DATA)
628 629 expected_calls = [
629 630 mock.call(model.global_settings, 'vcs_git_lfs', 'enabled',
630 631 active=False, value=False),
631 632 mock.call(model.global_settings, 'vcs_git_lfs', 'store_location',
632 633 value='/example/lfs-store'),
633 634 ]
634 635 assert expected_calls == create_mock.call_args_list
635 636
636 637
637 638 class TestDeleteRepoSvnPattern(object):
638 639 def test_success_when_repo_is_set(self, backend_svn, settings_util):
639 640 repo = backend_svn.create_repo()
640 641 repo_name = repo.repo_name
641 642
642 643 model = VcsSettingsModel(repo=repo_name)
643 644 entry = settings_util.create_repo_rhodecode_ui(
644 645 repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'svn-branch')
645 646 Session().commit()
646 647
647 648 model.delete_repo_svn_pattern(entry.ui_id)
648 649
649 650 def test_fail_when_delete_id_from_other_repo(self, backend_svn):
650 651 repo_name = backend_svn.repo_name
651 652 model = VcsSettingsModel(repo=repo_name)
652 653 delete_ui_patch = mock.patch.object(model.repo_settings, 'delete_ui')
653 654 with delete_ui_patch as delete_ui_mock:
654 655 model.delete_repo_svn_pattern(123)
655 656 delete_ui_mock.assert_called_once_with(-1)
656 657
657 658 def test_raises_exception_when_repository_is_not_specified(self):
658 659 model = VcsSettingsModel()
659 660 with pytest.raises(Exception) as exc_info:
660 661 model.delete_repo_svn_pattern(123)
661 662 assert exc_info.value.message == 'Repository is not specified'
662 663
663 664
664 665 class TestDeleteGlobalSvnPattern(object):
665 666 def test_delete_global_svn_pattern_calls_delete_ui(self):
666 667 model = VcsSettingsModel()
667 668 delete_ui_patch = mock.patch.object(model.global_settings, 'delete_ui')
668 669 with delete_ui_patch as delete_ui_mock:
669 670 model.delete_global_svn_pattern(123)
670 671 delete_ui_mock.assert_called_once_with(123)
671 672
672 673
673 674 class TestFilterUiSettings(object):
674 675 def test_settings_are_filtered(self):
675 676 model = VcsSettingsModel()
676 677 repo_settings = [
677 678 UiSetting('extensions', 'largefiles', '', True),
678 679 UiSetting('phases', 'publish', 'True', True),
679 680 UiSetting('hooks', 'changegroup.repo_size', 'hook', True),
680 681 UiSetting('hooks', 'changegroup.push_logger', 'hook', True),
681 682 UiSetting('hooks', 'outgoing.pull_logger', 'hook', True),
682 683 UiSetting(
683 684 'vcs_svn_branch', '84223c972204fa545ca1b22dac7bef5b68d7442d',
684 685 'test_branch', True),
685 686 UiSetting(
686 687 'vcs_svn_tag', '84229c972204fa545ca1b22dac7bef5b68d7442d',
687 688 'test_tag', True),
688 689 ]
689 690 non_repo_settings = [
690 691 UiSetting('largefiles', 'usercache', '/example/largefiles-store', True),
691 692 UiSetting('test', 'outgoing.pull_logger', 'hook', True),
692 693 UiSetting('hooks', 'test2', 'hook', True),
693 694 UiSetting(
694 695 'vcs_svn_repo', '84229c972204fa545ca1b22dac7bef5b68d7442d',
695 696 'test_tag', True),
696 697 ]
697 698 settings = repo_settings + non_repo_settings
698 699 filtered_settings = model._filter_ui_settings(settings)
699 700 assert sorted(filtered_settings) == sorted(repo_settings)
700 701
701 702
702 703 class TestFilterGeneralSettings(object):
703 704 def test_settings_are_filtered(self):
704 705 model = VcsSettingsModel()
705 706 settings = {
706 707 'rhodecode_abcde': 'value1',
707 708 'rhodecode_vwxyz': 'value2',
708 709 }
709 710 general_settings = {
710 711 'rhodecode_{}'.format(key): 'value'
711 712 for key in VcsSettingsModel.GENERAL_SETTINGS
712 713 }
713 714 settings.update(general_settings)
714 715
715 716 filtered_settings = model._filter_general_settings(general_settings)
716 717 assert sorted(filtered_settings) == sorted(general_settings)
717 718
718 719
719 720 class TestGetRepoUiSettings(object):
720 721 def test_global_uis_are_returned_when_no_repo_uis_found(
721 722 self, repo_stub):
722 723 model = VcsSettingsModel(repo=repo_stub.repo_name)
723 724 result = model.get_repo_ui_settings()
724 725 svn_sections = (
725 726 VcsSettingsModel.SVN_TAG_SECTION,
726 727 VcsSettingsModel.SVN_BRANCH_SECTION)
727 728 expected_result = [
728 729 s for s in model.global_settings.get_ui()
729 730 if s.section not in svn_sections]
730 731 assert sorted(result) == sorted(expected_result)
731 732
732 733 def test_repo_uis_are_overriding_global_uis(
733 734 self, repo_stub, settings_util):
734 735 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
735 736 settings_util.create_repo_rhodecode_ui(
736 737 repo_stub, section, 'repo', key=key, active=False)
737 738 model = VcsSettingsModel(repo=repo_stub.repo_name)
738 739 result = model.get_repo_ui_settings()
739 740 for setting in result:
740 741 locator = (setting.section, setting.key)
741 742 if locator in VcsSettingsModel.HOOKS_SETTINGS:
742 743 assert setting.value == 'repo'
743 744
744 745 assert setting.active is False
745 746
746 747 def test_global_svn_patterns_are_not_in_list(
747 748 self, repo_stub, settings_util):
748 749 svn_sections = (
749 750 VcsSettingsModel.SVN_TAG_SECTION,
750 751 VcsSettingsModel.SVN_BRANCH_SECTION)
751 752 for section in svn_sections:
752 753 settings_util.create_rhodecode_ui(
753 754 section, 'repo', key='deadbeef' + section, active=False)
754 755 model = VcsSettingsModel(repo=repo_stub.repo_name)
755 756 result = model.get_repo_ui_settings()
756 757 for setting in result:
757 758 assert setting.section not in svn_sections
758 759
759 760 def test_repo_uis_filtered_by_section_are_returned(
760 761 self, repo_stub, settings_util):
761 762 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
762 763 settings_util.create_repo_rhodecode_ui(
763 764 repo_stub, section, 'repo', key=key, active=False)
764 765 model = VcsSettingsModel(repo=repo_stub.repo_name)
765 766 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
766 767 result = model.get_repo_ui_settings(section=section)
767 768 for setting in result:
768 769 assert setting.section == section
769 770
770 771 def test_repo_uis_filtered_by_key_are_returned(
771 772 self, repo_stub, settings_util):
772 773 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
773 774 settings_util.create_repo_rhodecode_ui(
774 775 repo_stub, section, 'repo', key=key, active=False)
775 776 model = VcsSettingsModel(repo=repo_stub.repo_name)
776 777 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
777 778 result = model.get_repo_ui_settings(key=key)
778 779 for setting in result:
779 780 assert setting.key == key
780 781
781 782 def test_raises_exception_when_repository_is_not_specified(self):
782 783 model = VcsSettingsModel()
783 784 with pytest.raises(Exception) as exc_info:
784 785 model.get_repo_ui_settings()
785 786 assert exc_info.value.message == 'Repository is not specified'
786 787
787 788
788 789 class TestGetRepoGeneralSettings(object):
789 790 def test_global_settings_are_returned_when_no_repo_settings_found(
790 791 self, repo_stub):
791 792 model = VcsSettingsModel(repo=repo_stub.repo_name)
792 793 result = model.get_repo_general_settings()
793 794 expected_result = model.global_settings.get_all_settings()
794 795 assert sorted(result) == sorted(expected_result)
795 796
796 797 def test_repo_uis_are_overriding_global_uis(
797 798 self, repo_stub, settings_util):
798 799 for key in VcsSettingsModel.GENERAL_SETTINGS:
799 800 settings_util.create_repo_rhodecode_setting(
800 801 repo_stub, key, 'abcde', type_='unicode')
801 802 model = VcsSettingsModel(repo=repo_stub.repo_name)
802 803 result = model.get_repo_ui_settings()
803 804 for key in result:
804 805 if key in VcsSettingsModel.GENERAL_SETTINGS:
805 806 assert result[key] == 'abcde'
806 807
807 808 def test_raises_exception_when_repository_is_not_specified(self):
808 809 model = VcsSettingsModel()
809 810 with pytest.raises(Exception) as exc_info:
810 811 model.get_repo_general_settings()
811 812 assert exc_info.value.message == 'Repository is not specified'
812 813
813 814
814 815 class TestGetGlobalGeneralSettings(object):
815 816 def test_global_settings_are_returned(self, repo_stub):
816 817 model = VcsSettingsModel()
817 818 result = model.get_global_general_settings()
818 819 expected_result = model.global_settings.get_all_settings()
819 820 assert sorted(result) == sorted(expected_result)
820 821
821 822 def test_repo_uis_are_not_overriding_global_uis(
822 823 self, repo_stub, settings_util):
823 824 for key in VcsSettingsModel.GENERAL_SETTINGS:
824 825 settings_util.create_repo_rhodecode_setting(
825 826 repo_stub, key, 'abcde', type_='unicode')
826 827 model = VcsSettingsModel(repo=repo_stub.repo_name)
827 828 result = model.get_global_general_settings()
828 829 expected_result = model.global_settings.get_all_settings()
829 830 assert sorted(result) == sorted(expected_result)
830 831
831 832
832 833 class TestGetGlobalUiSettings(object):
833 834 def test_global_uis_are_returned(self, repo_stub):
834 835 model = VcsSettingsModel()
835 836 result = model.get_global_ui_settings()
836 837 expected_result = model.global_settings.get_ui()
837 838 assert sorted(result) == sorted(expected_result)
838 839
839 840 def test_repo_uis_are_not_overriding_global_uis(
840 841 self, repo_stub, settings_util):
841 842 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
842 843 settings_util.create_repo_rhodecode_ui(
843 844 repo_stub, section, 'repo', key=key, active=False)
844 845 model = VcsSettingsModel(repo=repo_stub.repo_name)
845 846 result = model.get_global_ui_settings()
846 847 expected_result = model.global_settings.get_ui()
847 848 assert sorted(result) == sorted(expected_result)
848 849
849 850 def test_ui_settings_filtered_by_section(
850 851 self, repo_stub, settings_util):
851 852 model = VcsSettingsModel(repo=repo_stub.repo_name)
852 853 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
853 854 result = model.get_global_ui_settings(section=section)
854 855 expected_result = model.global_settings.get_ui(section=section)
855 856 assert sorted(result) == sorted(expected_result)
856 857
857 858 def test_ui_settings_filtered_by_key(
858 859 self, repo_stub, settings_util):
859 860 model = VcsSettingsModel(repo=repo_stub.repo_name)
860 861 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
861 862 result = model.get_global_ui_settings(key=key)
862 863 expected_result = model.global_settings.get_ui(key=key)
863 864 assert sorted(result) == sorted(expected_result)
864 865
865 866
866 867 class TestGetGeneralSettings(object):
867 868 def test_global_settings_are_returned_when_inherited_is_true(
868 869 self, repo_stub, settings_util):
869 870 model = VcsSettingsModel(repo=repo_stub.repo_name)
870 871 model.inherit_global_settings = True
871 872 for key in VcsSettingsModel.GENERAL_SETTINGS:
872 873 settings_util.create_repo_rhodecode_setting(
873 874 repo_stub, key, 'abcde', type_='unicode')
874 875 result = model.get_general_settings()
875 876 expected_result = model.get_global_general_settings()
876 877 assert sorted(result) == sorted(expected_result)
877 878
878 879 def test_repo_settings_are_returned_when_inherited_is_false(
879 880 self, repo_stub, settings_util):
880 881 model = VcsSettingsModel(repo=repo_stub.repo_name)
881 882 model.inherit_global_settings = False
882 883 for key in VcsSettingsModel.GENERAL_SETTINGS:
883 884 settings_util.create_repo_rhodecode_setting(
884 885 repo_stub, key, 'abcde', type_='unicode')
885 886 result = model.get_general_settings()
886 887 expected_result = model.get_repo_general_settings()
887 888 assert sorted(result) == sorted(expected_result)
888 889
889 890 def test_global_settings_are_returned_when_no_repository_specified(self):
890 891 model = VcsSettingsModel()
891 892 result = model.get_general_settings()
892 893 expected_result = model.get_global_general_settings()
893 894 assert sorted(result) == sorted(expected_result)
894 895
895 896
896 897 class TestGetUiSettings(object):
897 898 def test_global_settings_are_returned_when_inherited_is_true(
898 899 self, repo_stub, settings_util):
899 900 model = VcsSettingsModel(repo=repo_stub.repo_name)
900 901 model.inherit_global_settings = True
901 902 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
902 903 settings_util.create_repo_rhodecode_ui(
903 904 repo_stub, section, 'repo', key=key, active=True)
904 905 result = model.get_ui_settings()
905 906 expected_result = model.get_global_ui_settings()
906 907 assert sorted(result) == sorted(expected_result)
907 908
908 909 def test_repo_settings_are_returned_when_inherited_is_false(
909 910 self, repo_stub, settings_util):
910 911 model = VcsSettingsModel(repo=repo_stub.repo_name)
911 912 model.inherit_global_settings = False
912 913 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
913 914 settings_util.create_repo_rhodecode_ui(
914 915 repo_stub, section, 'repo', key=key, active=True)
915 916 result = model.get_ui_settings()
916 917 expected_result = model.get_repo_ui_settings()
917 918 assert sorted(result) == sorted(expected_result)
918 919
919 920 def test_repo_settings_filtered_by_section_and_key(self, repo_stub):
920 921 model = VcsSettingsModel(repo=repo_stub.repo_name)
921 922 model.inherit_global_settings = False
922 923 args = ('section', 'key')
923 924 with mock.patch.object(model, 'get_repo_ui_settings') as settings_mock:
924 925 model.get_ui_settings(*args)
925 926 settings_mock.assert_called_once_with(*args)
926 927
927 928 def test_global_settings_filtered_by_section_and_key(self):
928 929 model = VcsSettingsModel()
929 930 args = ('section', 'key')
930 931 with mock.patch.object(model, 'get_global_ui_settings') as (
931 932 settings_mock):
932 933 model.get_ui_settings(*args)
933 934 settings_mock.assert_called_once_with(*args)
934 935
935 936 def test_global_settings_are_returned_when_no_repository_specified(self):
936 937 model = VcsSettingsModel()
937 938 result = model.get_ui_settings()
938 939 expected_result = model.get_global_ui_settings()
939 940 assert sorted(result) == sorted(expected_result)
940 941
941 942
942 943 class TestGetSvnPatterns(object):
943 944 def test_repo_settings_filtered_by_section_and_key(self, repo_stub):
944 945 model = VcsSettingsModel(repo=repo_stub.repo_name)
945 946 args = ('section', )
946 947 with mock.patch.object(model, 'get_repo_ui_settings') as settings_mock:
947 948 model.get_svn_patterns(*args)
948 949 settings_mock.assert_called_once_with(*args)
949 950
950 951 def test_global_settings_filtered_by_section_and_key(self):
951 952 model = VcsSettingsModel()
952 953 args = ('section', )
953 954 with mock.patch.object(model, 'get_global_ui_settings') as (
954 955 settings_mock):
955 956 model.get_svn_patterns(*args)
956 957 settings_mock.assert_called_once_with(*args)
957 958
958 959
959 960 class TestGetReposLocation(object):
960 961 def test_returns_repos_location(self, repo_stub):
961 962 model = VcsSettingsModel()
962 963
963 964 result_mock = mock.Mock()
964 965 result_mock.ui_value = '/tmp'
965 966
966 967 with mock.patch.object(model, 'global_settings') as settings_mock:
967 968 settings_mock.get_ui_by_key.return_value = result_mock
968 969 result = model.get_repos_location()
969 970
970 971 settings_mock.get_ui_by_key.assert_called_once_with('/')
971 972 assert result == '/tmp'
972 973
973 974
974 975 class TestCreateOrUpdateRepoSettings(object):
975 976 FORM_DATA = {
976 977 'inherit_global_settings': False,
977 978 'hooks_changegroup_repo_size': False,
978 979 'hooks_changegroup_push_logger': False,
979 980 'hooks_outgoing_pull_logger': False,
980 981 'extensions_largefiles': False,
981 982 'extensions_evolve': False,
982 983 'largefiles_usercache': '/example/largefiles-store',
983 984 'vcs_git_lfs_enabled': False,
984 985 'vcs_git_lfs_store_location': '/',
985 986 'phases_publish': 'False',
986 987 'rhodecode_pr_merge_enabled': False,
987 988 'rhodecode_use_outdated_comments': False,
988 989 'new_svn_branch': '',
989 990 'new_svn_tag': ''
990 991 }
991 992
992 993 def test_get_raises_exception_when_repository_not_specified(self):
993 994 model = VcsSettingsModel()
994 995 with pytest.raises(Exception) as exc_info:
995 996 model.create_or_update_repo_settings(data=self.FORM_DATA)
996 997 assert exc_info.value.message == 'Repository is not specified'
997 998
998 999 def test_only_svn_settings_are_updated_when_type_is_svn(self, backend_svn):
999 1000 repo = backend_svn.create_repo()
1000 1001 model = VcsSettingsModel(repo=repo)
1001 1002 with self._patch_model(model) as mocks:
1002 1003 model.create_or_update_repo_settings(
1003 1004 data=self.FORM_DATA, inherit_global_settings=False)
1004 1005 mocks['create_repo_svn_settings'].assert_called_once_with(
1005 1006 self.FORM_DATA)
1006 1007 non_called_methods = (
1007 1008 'create_or_update_repo_hook_settings',
1008 1009 'create_or_update_repo_pr_settings',
1009 1010 'create_or_update_repo_hg_settings')
1010 1011 for method in non_called_methods:
1011 1012 assert mocks[method].call_count == 0
1012 1013
1013 1014 def test_non_svn_settings_are_updated_when_type_is_hg(self, backend_hg):
1014 1015 repo = backend_hg.create_repo()
1015 1016 model = VcsSettingsModel(repo=repo)
1016 1017 with self._patch_model(model) as mocks:
1017 1018 model.create_or_update_repo_settings(
1018 1019 data=self.FORM_DATA, inherit_global_settings=False)
1019 1020
1020 1021 assert mocks['create_repo_svn_settings'].call_count == 0
1021 1022 called_methods = (
1022 1023 'create_or_update_repo_hook_settings',
1023 1024 'create_or_update_repo_pr_settings',
1024 1025 'create_or_update_repo_hg_settings')
1025 1026 for method in called_methods:
1026 1027 mocks[method].assert_called_once_with(self.FORM_DATA)
1027 1028
1028 1029 def test_non_svn_and_hg_settings_are_updated_when_type_is_git(
1029 1030 self, backend_git):
1030 1031 repo = backend_git.create_repo()
1031 1032 model = VcsSettingsModel(repo=repo)
1032 1033 with self._patch_model(model) as mocks:
1033 1034 model.create_or_update_repo_settings(
1034 1035 data=self.FORM_DATA, inherit_global_settings=False)
1035 1036
1036 1037 assert mocks['create_repo_svn_settings'].call_count == 0
1037 1038 called_methods = (
1038 1039 'create_or_update_repo_hook_settings',
1039 1040 'create_or_update_repo_pr_settings')
1040 1041 non_called_methods = (
1041 1042 'create_repo_svn_settings',
1042 1043 'create_or_update_repo_hg_settings'
1043 1044 )
1044 1045 for method in called_methods:
1045 1046 mocks[method].assert_called_once_with(self.FORM_DATA)
1046 1047 for method in non_called_methods:
1047 1048 assert mocks[method].call_count == 0
1048 1049
1049 1050 def test_no_methods_are_called_when_settings_are_inherited(
1050 1051 self, backend):
1051 1052 repo = backend.create_repo()
1052 1053 model = VcsSettingsModel(repo=repo)
1053 1054 with self._patch_model(model) as mocks:
1054 1055 model.create_or_update_repo_settings(
1055 1056 data=self.FORM_DATA, inherit_global_settings=True)
1056 1057 for method_name in mocks:
1057 1058 assert mocks[method_name].call_count == 0
1058 1059
1059 1060 def test_cache_is_marked_for_invalidation(self, repo_stub):
1060 1061 model = VcsSettingsModel(repo=repo_stub)
1061 1062 invalidation_patcher = mock.patch(
1062 1063 'rhodecode.model.scm.ScmModel.mark_for_invalidation')
1063 1064 with invalidation_patcher as invalidation_mock:
1064 1065 model.create_or_update_repo_settings(
1065 1066 data=self.FORM_DATA, inherit_global_settings=True)
1066 1067 invalidation_mock.assert_called_once_with(
1067 1068 repo_stub.repo_name, delete=True)
1068 1069
1069 1070 def test_inherit_flag_is_saved(self, repo_stub):
1070 1071 model = VcsSettingsModel(repo=repo_stub)
1071 1072 model.inherit_global_settings = True
1072 1073 with self._patch_model(model):
1073 1074 model.create_or_update_repo_settings(
1074 1075 data=self.FORM_DATA, inherit_global_settings=False)
1075 1076 assert model.inherit_global_settings is False
1076 1077
1077 1078 def _patch_model(self, model):
1078 1079 return mock.patch.multiple(
1079 1080 model,
1080 1081 create_repo_svn_settings=mock.DEFAULT,
1081 1082 create_or_update_repo_hook_settings=mock.DEFAULT,
1082 1083 create_or_update_repo_pr_settings=mock.DEFAULT,
1083 1084 create_or_update_repo_hg_settings=mock.DEFAULT)
General Comments 0
You need to be logged in to leave comments. Login now