Show More
@@ -33,6 +33,7 b' from rhodecode.model import user_group' | |||
|
33 | 33 | from rhodecode.model import user |
|
34 | 34 | from rhodecode.model.db import User |
|
35 | 35 | from rhodecode.model.scm import ScmModel |
|
36 | from rhodecode.model.settings import VcsSettingsModel | |
|
36 | 37 | |
|
37 | 38 | log = logging.getLogger(__name__) |
|
38 | 39 | |
@@ -256,6 +257,11 b' class RepoAppView(BaseAppView):' | |||
|
256 | 257 | f_path_match = self._get_f_path_unchecked(matchdict, default) |
|
257 | 258 | return self.path_filter.assert_path_permissions(f_path_match) |
|
258 | 259 | |
|
260 | def _get_general_setting(self, target_repo, settings_key, default=False): | |
|
261 | settings_model = VcsSettingsModel(repo=target_repo) | |
|
262 | settings = settings_model.get_general_settings() | |
|
263 | return settings.get(settings_key, default) | |
|
264 | ||
|
259 | 265 | |
|
260 | 266 | class PathFilter(object): |
|
261 | 267 |
@@ -34,17 +34,18 b' from rhodecode.lib.auth import (' | |||
|
34 | 34 | LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired) |
|
35 | 35 | |
|
36 | 36 | from rhodecode.lib.compat import OrderedDict |
|
37 | from rhodecode.lib.diffs import cache_diff, load_cached_diff, diff_cache_exist | |
|
37 | 38 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError |
|
38 | 39 | import rhodecode.lib.helpers as h |
|
39 | from rhodecode.lib.utils2 import safe_unicode | |
|
40 | from rhodecode.lib.utils2 import safe_unicode, str2bool | |
|
40 | 41 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
41 | 42 | from rhodecode.lib.vcs.exceptions import ( |
|
42 |
RepositoryError, CommitDoesNotExistError |
|
|
43 | RepositoryError, CommitDoesNotExistError) | |
|
43 | 44 | from rhodecode.model.db import ChangesetComment, ChangesetStatus |
|
44 | 45 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
45 | 46 | from rhodecode.model.comment import CommentsModel |
|
46 | 47 | from rhodecode.model.meta import Session |
|
47 | ||
|
48 | from rhodecode.model.settings import VcsSettingsModel | |
|
48 | 49 | |
|
49 | 50 | log = logging.getLogger(__name__) |
|
50 | 51 | |
@@ -152,6 +153,12 b' class RepoCommitsView(RepoAppView):' | |||
|
152 | 153 | |
|
153 | 154 | return c |
|
154 | 155 | |
|
156 | def _is_diff_cache_enabled(self, target_repo): | |
|
157 | caching_enabled = self._get_general_setting( | |
|
158 | target_repo, 'rhodecode_diff_cache') | |
|
159 | log.debug('Diff caching enabled: %s', caching_enabled) | |
|
160 | return caching_enabled | |
|
161 | ||
|
155 | 162 | def _commit(self, commit_id_range, method): |
|
156 | 163 | _ = self.request.translate |
|
157 | 164 | c = self.load_default_context() |
@@ -240,43 +247,63 b' class RepoCommitsView(RepoAppView):' | |||
|
240 | 247 | commit2 = commit |
|
241 | 248 | commit1 = commit.parents[0] if commit.parents else EmptyCommit() |
|
242 | 249 | |
|
243 | _diff = self.rhodecode_vcs_repo.get_diff( | |
|
244 | commit1, commit2, | |
|
245 | ignore_whitespace=ign_whitespace_lcl, context=context_lcl) | |
|
246 | diff_processor = diffs.DiffProcessor( | |
|
247 | _diff, format='newdiff', diff_limit=diff_limit, | |
|
248 | file_limit=file_limit, show_full_diff=c.fulldiff) | |
|
249 | ||
|
250 | commit_changes = OrderedDict() | |
|
251 | 250 | if method == 'show': |
|
252 | _parsed = diff_processor.prepare() | |
|
253 | c.limited_diff = isinstance(_parsed, diffs.LimitedDiffContainer) | |
|
254 | ||
|
255 | _parsed = diff_processor.prepare() | |
|
256 | ||
|
257 | def _node_getter(commit): | |
|
258 | def get_node(fname): | |
|
259 | try: | |
|
260 | return commit.get_node(fname) | |
|
261 | except NodeDoesNotExistError: | |
|
262 | return None | |
|
263 | return get_node | |
|
264 | ||
|
265 | 251 | inline_comments = CommentsModel().get_inline_comments( |
|
266 | 252 | self.db_repo.repo_id, revision=commit.raw_id) |
|
267 | 253 | c.inline_cnt = CommentsModel().get_inline_comments_count( |
|
268 | 254 | inline_comments) |
|
255 | c.inline_comments = inline_comments | |
|
269 | 256 | |
|
270 | diffset = codeblocks.DiffSet( | |
|
271 |
|
|
|
272 | source_node_getter=_node_getter(commit1), | |
|
273 | target_node_getter=_node_getter(commit2), | |
|
274 | comments=inline_comments) | |
|
275 | diffset = self.path_filter.render_patchset_filtered( | |
|
276 | diffset, _parsed, commit1.raw_id, commit2.raw_id) | |
|
257 | cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path( | |
|
258 | self.db_repo) | |
|
259 | cache_file_path = diff_cache_exist( | |
|
260 | cache_path, 'diff', commit.raw_id, | |
|
261 | ign_whitespace_lcl, context_lcl, c.fulldiff) | |
|
262 | ||
|
263 | caching_enabled = self._is_diff_cache_enabled(self.db_repo) | |
|
264 | force_recache = str2bool(self.request.GET.get('force_recache')) | |
|
265 | ||
|
266 | cached_diff = None | |
|
267 | if caching_enabled: | |
|
268 | cached_diff = load_cached_diff(cache_file_path) | |
|
277 | 269 | |
|
270 | has_proper_diff_cache = cached_diff and cached_diff.get('diff') | |
|
271 | if not force_recache and has_proper_diff_cache: | |
|
272 | diffset = cached_diff['diff'] | |
|
273 | else: | |
|
274 | vcs_diff = self.rhodecode_vcs_repo.get_diff( | |
|
275 | commit1, commit2, | |
|
276 | ignore_whitespace=ign_whitespace_lcl, | |
|
277 | context=context_lcl) | |
|
278 | ||
|
279 | diff_processor = diffs.DiffProcessor( | |
|
280 | vcs_diff, format='newdiff', diff_limit=diff_limit, | |
|
281 | file_limit=file_limit, show_full_diff=c.fulldiff) | |
|
282 | ||
|
283 | _parsed = diff_processor.prepare() | |
|
284 | ||
|
285 | diffset = codeblocks.DiffSet( | |
|
286 | repo_name=self.db_repo_name, | |
|
287 | source_node_getter=codeblocks.diffset_node_getter(commit1), | |
|
288 | target_node_getter=codeblocks.diffset_node_getter(commit2)) | |
|
289 | ||
|
290 | diffset = self.path_filter.render_patchset_filtered( | |
|
291 | diffset, _parsed, commit1.raw_id, commit2.raw_id) | |
|
292 | ||
|
293 | # save cached diff | |
|
294 | if caching_enabled: | |
|
295 | cache_diff(cache_file_path, diffset, None) | |
|
296 | ||
|
297 | c.limited_diff = diffset.limited_diff | |
|
278 | 298 | c.changes[commit.raw_id] = diffset |
|
279 | 299 | else: |
|
300 | # TODO(marcink): no cache usage here... | |
|
301 | _diff = self.rhodecode_vcs_repo.get_diff( | |
|
302 | commit1, commit2, | |
|
303 | ignore_whitespace=ign_whitespace_lcl, context=context_lcl) | |
|
304 | diff_processor = diffs.DiffProcessor( | |
|
305 | _diff, format='newdiff', diff_limit=diff_limit, | |
|
306 | file_limit=file_limit, show_full_diff=c.fulldiff) | |
|
280 | 307 | # downloads/raw we only need RAW diff nothing else |
|
281 | 308 | diff = self.path_filter.get_raw_patch(diff_processor) |
|
282 | 309 | c.changes[commit.raw_id] = [None, None, None, None, diff, None, None] |
@@ -295,19 +295,10 b' class RepoCompareView(RepoAppView):' | |||
|
295 | 295 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
296 | 296 | _parsed = diff_processor.prepare() |
|
297 | 297 | |
|
298 | def _node_getter(commit): | |
|
299 | """ Returns a function that returns a node for a commit or None """ | |
|
300 | def get_node(fname): | |
|
301 | try: | |
|
302 | return commit.get_node(fname) | |
|
303 | except NodeDoesNotExistError: | |
|
304 | return None | |
|
305 | return get_node | |
|
306 | ||
|
307 | 298 | diffset = codeblocks.DiffSet( |
|
308 | 299 | repo_name=source_repo.repo_name, |
|
309 | source_node_getter=_node_getter(source_commit), | |
|
310 | target_node_getter=_node_getter(target_commit), | |
|
300 | source_node_getter=codeblocks.diffset_node_getter(source_commit), | |
|
301 | target_node_getter=codeblocks.diffset_node_getter(target_commit), | |
|
311 | 302 | ) |
|
312 | 303 | c.diffset = self.path_filter.render_patchset_filtered( |
|
313 | 304 | diffset, _parsed, source_ref, target_ref) |
@@ -34,6 +34,7 b' from rhodecode.apps._base import RepoApp' | |||
|
34 | 34 | |
|
35 | 35 | from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream |
|
36 | 36 | from rhodecode.lib.base import vcs_operation_context |
|
37 | from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist | |
|
37 | 38 | from rhodecode.lib.ext_json import json |
|
38 | 39 | from rhodecode.lib.auth import ( |
|
39 | 40 | LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator, |
@@ -41,7 +42,7 b' from rhodecode.lib.auth import (' | |||
|
41 | 42 | from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode |
|
42 | 43 | from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason |
|
43 | 44 | from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError, |
|
44 |
RepositoryRequirementError, |
|
|
45 | RepositoryRequirementError, EmptyRepositoryError) | |
|
45 | 46 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
46 | 47 | from rhodecode.model.comment import CommentsModel |
|
47 | 48 | from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion, |
@@ -201,10 +202,16 b' class RepoPullRequestsView(RepoAppView, ' | |||
|
201 | 202 | |
|
202 | 203 | return data |
|
203 | 204 | |
|
205 | def _is_diff_cache_enabled(self, target_repo): | |
|
206 | caching_enabled = self._get_general_setting( | |
|
207 | target_repo, 'rhodecode_diff_cache') | |
|
208 | log.debug('Diff caching enabled: %s', caching_enabled) | |
|
209 | return caching_enabled | |
|
210 | ||
|
204 | 211 | def _get_diffset(self, source_repo_name, source_repo, |
|
205 | 212 | source_ref_id, target_ref_id, |
|
206 |
target_commit, source_commit, diff_limit, f |
|
|
207 |
f |
|
|
213 | target_commit, source_commit, diff_limit, file_limit, | |
|
214 | fulldiff): | |
|
208 | 215 | |
|
209 | 216 | vcs_diff = PullRequestModel().get_diff( |
|
210 | 217 | source_repo, source_ref_id, target_ref_id) |
@@ -215,21 +222,11 b' class RepoPullRequestsView(RepoAppView, ' | |||
|
215 | 222 | |
|
216 | 223 | _parsed = diff_processor.prepare() |
|
217 | 224 | |
|
218 | def _node_getter(commit): | |
|
219 | def get_node(fname): | |
|
220 | try: | |
|
221 | return commit.get_node(fname) | |
|
222 | except NodeDoesNotExistError: | |
|
223 | return None | |
|
224 | ||
|
225 | return get_node | |
|
226 | ||
|
227 | 225 | diffset = codeblocks.DiffSet( |
|
228 | 226 | repo_name=self.db_repo_name, |
|
229 | 227 | source_repo_name=source_repo_name, |
|
230 | source_node_getter=_node_getter(target_commit), | |
|
231 | target_node_getter=_node_getter(source_commit), | |
|
232 | comments=display_inline_comments | |
|
228 | source_node_getter=codeblocks.diffset_node_getter(target_commit), | |
|
229 | target_node_getter=codeblocks.diffset_node_getter(source_commit), | |
|
233 | 230 | ) |
|
234 | 231 | diffset = self.path_filter.render_patchset_filtered( |
|
235 | 232 | diffset, _parsed, target_commit.raw_id, source_commit.raw_id) |
@@ -443,42 +440,54 b' class RepoPullRequestsView(RepoAppView, ' | |||
|
443 | 440 | commits_source_repo = source_scm |
|
444 | 441 | |
|
445 | 442 | c.commits_source_repo = commits_source_repo |
|
446 | commit_cache = {} | |
|
447 | try: | |
|
448 | pre_load = ["author", "branch", "date", "message"] | |
|
449 | show_revs = pull_request_at_ver.revisions | |
|
450 | for rev in show_revs: | |
|
451 | comm = commits_source_repo.get_commit( | |
|
452 | commit_id=rev, pre_load=pre_load) | |
|
453 | c.commit_ranges.append(comm) | |
|
454 | commit_cache[comm.raw_id] = comm | |
|
455 | ||
|
456 | # Order here matters, we first need to get target, and then | |
|
457 | # the source | |
|
458 | target_commit = commits_source_repo.get_commit( | |
|
459 | commit_id=safe_str(target_ref_id)) | |
|
460 | ||
|
461 | source_commit = commits_source_repo.get_commit( | |
|
462 | commit_id=safe_str(source_ref_id)) | |
|
463 | ||
|
464 | except CommitDoesNotExistError: | |
|
465 | log.warning( | |
|
466 | 'Failed to get commit from `{}` repo'.format( | |
|
467 | commits_source_repo), exc_info=True) | |
|
468 | except RepositoryRequirementError: | |
|
469 | log.warning( | |
|
470 | 'Failed to get all required data from repo', exc_info=True) | |
|
471 | c.missing_requirements = True | |
|
472 | ||
|
473 | 443 | c.ancestor = None # set it to None, to hide it from PR view |
|
474 | 444 | |
|
475 | try: | |
|
476 | ancestor_id = source_scm.get_common_ancestor( | |
|
477 | source_commit.raw_id, target_commit.raw_id, target_scm) | |
|
478 | c.ancestor_commit = source_scm.get_commit(ancestor_id) | |
|
479 | except Exception: | |
|
480 | c.ancestor_commit = None | |
|
445 | # empty version means latest, so we keep this to prevent | |
|
446 | # double caching | |
|
447 | version_normalized = version or 'latest' | |
|
448 | from_version_normalized = from_version or 'latest' | |
|
449 | ||
|
450 | cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path( | |
|
451 | target_repo) | |
|
452 | cache_file_path = diff_cache_exist( | |
|
453 | cache_path, 'pull_request', pull_request_id, version_normalized, | |
|
454 | from_version_normalized, source_ref_id, target_ref_id, c.fulldiff) | |
|
455 | ||
|
456 | caching_enabled = self._is_diff_cache_enabled(c.target_repo) | |
|
457 | force_recache = str2bool(self.request.GET.get('force_recache')) | |
|
458 | ||
|
459 | cached_diff = None | |
|
460 | if caching_enabled: | |
|
461 | cached_diff = load_cached_diff(cache_file_path) | |
|
481 | 462 | |
|
463 | has_proper_commit_cache = ( | |
|
464 | cached_diff and cached_diff.get('commits') | |
|
465 | and len(cached_diff.get('commits', [])) == 5 | |
|
466 | and cached_diff.get('commits')[0] | |
|
467 | and cached_diff.get('commits')[3]) | |
|
468 | if not force_recache and has_proper_commit_cache: | |
|
469 | diff_commit_cache = \ | |
|
470 | (ancestor_commit, commit_cache, missing_requirements, | |
|
471 | source_commit, target_commit) = cached_diff['commits'] | |
|
472 | else: | |
|
473 | diff_commit_cache = \ | |
|
474 | (ancestor_commit, commit_cache, missing_requirements, | |
|
475 | source_commit, target_commit) = self.get_commits( | |
|
476 | commits_source_repo, | |
|
477 | pull_request_at_ver, | |
|
478 | source_commit, | |
|
479 | source_ref_id, | |
|
480 | source_scm, | |
|
481 | target_commit, | |
|
482 | target_ref_id, | |
|
483 | target_scm) | |
|
484 | ||
|
485 | # register our commit range | |
|
486 | for comm in commit_cache.values(): | |
|
487 | c.commit_ranges.append(comm) | |
|
488 | ||
|
489 | c.missing_requirements = missing_requirements | |
|
490 | c.ancestor_commit = ancestor_commit | |
|
482 | 491 | c.statuses = source_repo.statuses( |
|
483 | 492 | [x.raw_id for x in c.commit_ranges]) |
|
484 | 493 | |
@@ -500,12 +509,23 b' class RepoPullRequestsView(RepoAppView, ' | |||
|
500 | 509 | |
|
501 | 510 | c.missing_commits = True |
|
502 | 511 | else: |
|
512 | c.inline_comments = display_inline_comments | |
|
503 | 513 | |
|
504 | c.diffset = self._get_diffset( | |
|
505 | c.source_repo.repo_name, commits_source_repo, | |
|
506 | source_ref_id, target_ref_id, | |
|
507 | target_commit, source_commit, | |
|
508 | diff_limit, c.fulldiff, file_limit, display_inline_comments) | |
|
514 | has_proper_diff_cache = cached_diff and cached_diff.get('commits') | |
|
515 | if not force_recache and has_proper_diff_cache: | |
|
516 | c.diffset = cached_diff['diff'] | |
|
517 | (ancestor_commit, commit_cache, missing_requirements, | |
|
518 | source_commit, target_commit) = cached_diff['commits'] | |
|
519 | else: | |
|
520 | c.diffset = self._get_diffset( | |
|
521 | c.source_repo.repo_name, commits_source_repo, | |
|
522 | source_ref_id, target_ref_id, | |
|
523 | target_commit, source_commit, | |
|
524 | diff_limit, file_limit, c.fulldiff) | |
|
525 | ||
|
526 | # save cached diff | |
|
527 | if caching_enabled: | |
|
528 | cache_diff(cache_file_path, c.diffset, diff_commit_cache) | |
|
509 | 529 | |
|
510 | 530 | c.limited_diff = c.diffset.limited_diff |
|
511 | 531 | |
@@ -568,7 +588,6 b' class RepoPullRequestsView(RepoAppView, ' | |||
|
568 | 588 | if self._rhodecode_user.user_id in allowed_reviewers: |
|
569 | 589 | for co in general_comments: |
|
570 | 590 | if co.author.user_id == self._rhodecode_user.user_id: |
|
571 | # each comment has a status change | |
|
572 | 591 | status = co.status_change |
|
573 | 592 | if status: |
|
574 | 593 | _ver_pr = status[0].comment.pull_request_version_id |
@@ -576,6 +595,43 b' class RepoPullRequestsView(RepoAppView, ' | |||
|
576 | 595 | |
|
577 | 596 | return self._get_template_context(c) |
|
578 | 597 | |
|
598 | def get_commits( | |
|
599 | self, commits_source_repo, pull_request_at_ver, source_commit, | |
|
600 | source_ref_id, source_scm, target_commit, target_ref_id, target_scm): | |
|
601 | commit_cache = collections.OrderedDict() | |
|
602 | missing_requirements = False | |
|
603 | try: | |
|
604 | pre_load = ["author", "branch", "date", "message"] | |
|
605 | show_revs = pull_request_at_ver.revisions | |
|
606 | for rev in show_revs: | |
|
607 | comm = commits_source_repo.get_commit( | |
|
608 | commit_id=rev, pre_load=pre_load) | |
|
609 | commit_cache[comm.raw_id] = comm | |
|
610 | ||
|
611 | # Order here matters, we first need to get target, and then | |
|
612 | # the source | |
|
613 | target_commit = commits_source_repo.get_commit( | |
|
614 | commit_id=safe_str(target_ref_id)) | |
|
615 | ||
|
616 | source_commit = commits_source_repo.get_commit( | |
|
617 | commit_id=safe_str(source_ref_id)) | |
|
618 | except CommitDoesNotExistError: | |
|
619 | log.warning( | |
|
620 | 'Failed to get commit from `{}` repo'.format( | |
|
621 | commits_source_repo), exc_info=True) | |
|
622 | except RepositoryRequirementError: | |
|
623 | log.warning( | |
|
624 | 'Failed to get all required data from repo', exc_info=True) | |
|
625 | missing_requirements = True | |
|
626 | ancestor_commit = None | |
|
627 | try: | |
|
628 | ancestor_id = source_scm.get_common_ancestor( | |
|
629 | source_commit.raw_id, target_commit.raw_id, target_scm) | |
|
630 | ancestor_commit = source_scm.get_commit(ancestor_id) | |
|
631 | except Exception: | |
|
632 | ancestor_commit = None | |
|
633 | return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit | |
|
634 | ||
|
579 | 635 | def assure_not_empty_repo(self): |
|
580 | 636 | _ = self.request.translate |
|
581 | 637 |
@@ -30,6 +30,7 b' from rhodecode.lib.helpers import (' | |||
|
30 | 30 | get_lexer_for_filenode, html_escape, get_custom_lexer) |
|
31 | 31 | from rhodecode.lib.utils2 import AttributeDict, StrictAttributeDict |
|
32 | 32 | from rhodecode.lib.vcs.nodes import FileNode |
|
33 | from rhodecode.lib.vcs.exceptions import VCSError, NodeDoesNotExistError | |
|
33 | 34 | from rhodecode.lib.diff_match_patch import diff_match_patch |
|
34 | 35 | from rhodecode.lib.diffs import LimitedDiffContainer |
|
35 | 36 | from pygments.lexers import get_lexer_by_name |
@@ -351,6 +352,16 b' def tokens_diff(old_tokens, new_tokens, ' | |||
|
351 | 352 | return old_tokens_result, new_tokens_result, similarity |
|
352 | 353 | |
|
353 | 354 | |
|
355 | def diffset_node_getter(commit): | |
|
356 | def get_node(fname): | |
|
357 | try: | |
|
358 | return commit.get_node(fname) | |
|
359 | except NodeDoesNotExistError: | |
|
360 | return None | |
|
361 | ||
|
362 | return get_node | |
|
363 | ||
|
364 | ||
|
354 | 365 | class DiffSet(object): |
|
355 | 366 | """ |
|
356 | 367 | An object for parsing the diff result from diffs.DiffProcessor and |
@@ -515,6 +526,7 b' class DiffSet(object):' | |||
|
515 | 526 | if target_file_path in self.comments_store: |
|
516 | 527 | for lineno, comments in self.comments_store[target_file_path].items(): |
|
517 | 528 | left_comments[lineno] = comments |
|
529 | ||
|
518 | 530 | # left comments are one that we couldn't place in diff lines. |
|
519 | 531 | # could be outdated, or the diff changed and this line is no |
|
520 | 532 | # longer available |
@@ -551,7 +563,7 b' class DiffSet(object):' | |||
|
551 | 563 | |
|
552 | 564 | result.lines.extend( |
|
553 | 565 | self.parse_lines(before, after, source_file, target_file)) |
|
554 | result.unified = self.as_unified(result.lines) | |
|
566 | result.unified = list(self.as_unified(result.lines)) | |
|
555 | 567 | result.sideside = result.lines |
|
556 | 568 | |
|
557 | 569 | return result |
@@ -606,8 +618,9 b' class DiffSet(object):' | |||
|
606 | 618 | original.lineno = before['old_lineno'] |
|
607 | 619 | original.content = before['line'] |
|
608 | 620 | original.action = self.action_to_op(before['action']) |
|
609 | original.comments = self.get_comments_for('old', | |
|
610 | source_file, before['old_lineno']) | |
|
621 | ||
|
622 | original.get_comment_args = ( | |
|
623 | source_file, 'o', before['old_lineno']) | |
|
611 | 624 | |
|
612 | 625 | if after: |
|
613 | 626 | if after['action'] == 'new-no-nl': |
@@ -619,8 +632,9 b' class DiffSet(object):' | |||
|
619 | 632 | modified.lineno = after['new_lineno'] |
|
620 | 633 | modified.content = after['line'] |
|
621 | 634 | modified.action = self.action_to_op(after['action']) |
|
622 | modified.comments = self.get_comments_for('new', | |
|
623 | target_file, after['new_lineno']) | |
|
635 | ||
|
636 | modified.get_comment_args = ( | |
|
637 | target_file, 'n', after['new_lineno']) | |
|
624 | 638 | |
|
625 | 639 | # diff the lines |
|
626 | 640 | if before_tokens and after_tokens: |
@@ -649,23 +663,6 b' class DiffSet(object):' | |||
|
649 | 663 | |
|
650 | 664 | return lines |
|
651 | 665 | |
|
652 | def get_comments_for(self, version, filename, line_number): | |
|
653 | if hasattr(filename, 'unicode_path'): | |
|
654 | filename = filename.unicode_path | |
|
655 | ||
|
656 | if not isinstance(filename, basestring): | |
|
657 | return None | |
|
658 | ||
|
659 | line_key = { | |
|
660 | 'old': 'o', | |
|
661 | 'new': 'n', | |
|
662 | }[version] + str(line_number) | |
|
663 | ||
|
664 | if filename in self.comments_store: | |
|
665 | file_comments = self.comments_store[filename] | |
|
666 | if line_key in file_comments: | |
|
667 | return file_comments.pop(line_key) | |
|
668 | ||
|
669 | 666 | def get_line_tokens(self, line_text, line_number, file=None): |
|
670 | 667 | filenode = None |
|
671 | 668 | filename = None |
@@ -722,25 +719,25 b' class DiffSet(object):' | |||
|
722 | 719 | if line.original.action == ' ': |
|
723 | 720 | yield (line.original.lineno, line.modified.lineno, |
|
724 | 721 | line.original.action, line.original.content, |
|
725 | line.original.comments) | |
|
722 | line.original.get_comment_args) | |
|
726 | 723 | continue |
|
727 | 724 | |
|
728 | 725 | if line.original.action == '-': |
|
729 | 726 | yield (line.original.lineno, None, |
|
730 | 727 | line.original.action, line.original.content, |
|
731 | line.original.comments) | |
|
728 | line.original.get_comment_args) | |
|
732 | 729 | |
|
733 | 730 | if line.modified.action == '+': |
|
734 | 731 | buf.append(( |
|
735 | 732 | None, line.modified.lineno, |
|
736 | 733 | line.modified.action, line.modified.content, |
|
737 | line.modified.comments)) | |
|
734 | line.modified.get_comment_args)) | |
|
738 | 735 | continue |
|
739 | 736 | |
|
740 | 737 | if line.modified: |
|
741 | 738 | yield (None, line.modified.lineno, |
|
742 | 739 | line.modified.action, line.modified.content, |
|
743 | line.modified.comments) | |
|
740 | line.modified.get_comment_args) | |
|
744 | 741 | |
|
745 | 742 | for b in buf: |
|
746 | 743 | yield b |
@@ -23,16 +23,18 b'' | |||
|
23 | 23 | Set of diffing helpers, previously part of vcs |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | import os | |
|
26 | 27 | import re |
|
27 | 28 | import collections |
|
28 | 29 | import difflib |
|
29 | 30 | import logging |
|
31 | import cPickle as pickle | |
|
30 | 32 | |
|
31 | 33 | from itertools import tee, imap |
|
32 | 34 | |
|
33 | 35 | from rhodecode.lib.vcs.exceptions import VCSError |
|
34 | 36 | from rhodecode.lib.vcs.nodes import FileNode, SubModuleNode |
|
35 | from rhodecode.lib.utils2 import safe_unicode | |
|
37 | from rhodecode.lib.utils2 import safe_unicode, safe_str | |
|
36 | 38 | |
|
37 | 39 | log = logging.getLogger(__name__) |
|
38 | 40 | |
@@ -1129,3 +1131,82 b' class LineNotInDiffException(Exception):' | |||
|
1129 | 1131 | |
|
1130 | 1132 | class DiffLimitExceeded(Exception): |
|
1131 | 1133 | pass |
|
1134 | ||
|
1135 | ||
|
1136 | def cache_diff(cached_diff_file, diff, commits): | |
|
1137 | ||
|
1138 | struct = { | |
|
1139 | 'version': 'v1', | |
|
1140 | 'diff': diff, | |
|
1141 | 'commits': commits | |
|
1142 | } | |
|
1143 | ||
|
1144 | try: | |
|
1145 | with open(cached_diff_file, 'wb') as f: | |
|
1146 | pickle.dump(struct, f) | |
|
1147 | log.debug('Saved diff cache under %s', cached_diff_file) | |
|
1148 | except Exception: | |
|
1149 | log.warn('Failed to save cache', exc_info=True) | |
|
1150 | # cleanup file to not store it "damaged" | |
|
1151 | try: | |
|
1152 | os.remove(cached_diff_file) | |
|
1153 | except Exception: | |
|
1154 | log.exception('Failed to cleanup path %s', cached_diff_file) | |
|
1155 | ||
|
1156 | ||
|
1157 | def load_cached_diff(cached_diff_file): | |
|
1158 | ||
|
1159 | default_struct = { | |
|
1160 | 'version': 'v1', | |
|
1161 | 'diff': None, | |
|
1162 | 'commits': None | |
|
1163 | } | |
|
1164 | ||
|
1165 | has_cache = os.path.isfile(cached_diff_file) | |
|
1166 | if not has_cache: | |
|
1167 | return default_struct | |
|
1168 | ||
|
1169 | data = None | |
|
1170 | try: | |
|
1171 | with open(cached_diff_file, 'rb') as f: | |
|
1172 | data = pickle.load(f) | |
|
1173 | log.debug('Loaded diff cache from %s', cached_diff_file) | |
|
1174 | except Exception: | |
|
1175 | log.warn('Failed to read diff cache file', exc_info=True) | |
|
1176 | ||
|
1177 | if not data: | |
|
1178 | data = default_struct | |
|
1179 | ||
|
1180 | if not isinstance(data, dict): | |
|
1181 | # old version of data ? | |
|
1182 | data = default_struct | |
|
1183 | ||
|
1184 | return data | |
|
1185 | ||
|
1186 | ||
|
1187 | def generate_diff_cache_key(*args): | |
|
1188 | """ | |
|
1189 | Helper to generate a cache key using arguments | |
|
1190 | """ | |
|
1191 | def arg_mapper(input_param): | |
|
1192 | input_param = safe_str(input_param) | |
|
1193 | # we cannot allow '/' in arguments since it would allow | |
|
1194 | # subdirectory usage | |
|
1195 | input_param.replace('/', '_') | |
|
1196 | return input_param or None # prevent empty string arguments | |
|
1197 | ||
|
1198 | return '_'.join([ | |
|
1199 | '{}' for i in range(len(args))]).format(*map(arg_mapper, args)) | |
|
1200 | ||
|
1201 | ||
|
1202 | def diff_cache_exist(cache_storage, *args): | |
|
1203 | """ | |
|
1204 | Based on all generated arguments check and return a cache path | |
|
1205 | """ | |
|
1206 | cache_key = generate_diff_cache_key(*args) | |
|
1207 | cache_file_path = os.path.join(cache_storage, cache_key) | |
|
1208 | # prevent path traversal attacks using some param that have e.g '../../' | |
|
1209 | if not os.path.abspath(cache_file_path).startswith(cache_storage): | |
|
1210 | raise ValueError('Final path must be within {}'.format(cache_storage)) | |
|
1211 | ||
|
1212 | return cache_file_path |
@@ -709,7 +709,19 b' def extract_mentioned_users(s):' | |||
|
709 | 709 | return sorted(list(usrs), key=lambda k: k.lower()) |
|
710 | 710 | |
|
711 | 711 | |
|
712 |
class |
|
|
712 | class AttributeDictBase(dict): | |
|
713 | def __getstate__(self): | |
|
714 | odict = self.__dict__ # get attribute dictionary | |
|
715 | return odict | |
|
716 | ||
|
717 | def __setstate__(self, dict): | |
|
718 | self.__dict__ = dict | |
|
719 | ||
|
720 | __setattr__ = dict.__setitem__ | |
|
721 | __delattr__ = dict.__delitem__ | |
|
722 | ||
|
723 | ||
|
724 | class StrictAttributeDict(AttributeDictBase): | |
|
713 | 725 | """ |
|
714 | 726 | Strict Version of Attribute dict which raises an Attribute error when |
|
715 | 727 | requested attribute is not set |
@@ -720,15 +732,12 b' class StrictAttributeDict(dict):' | |||
|
720 | 732 | except KeyError: |
|
721 | 733 | raise AttributeError('%s object has no attribute %s' % ( |
|
722 | 734 | self.__class__, attr)) |
|
723 | __setattr__ = dict.__setitem__ | |
|
724 | __delattr__ = dict.__delitem__ | |
|
725 | 735 | |
|
726 | 736 | |
|
727 |
class AttributeDict( |
|
|
737 | class AttributeDict(AttributeDictBase): | |
|
728 | 738 | def __getattr__(self, attr): |
|
729 | 739 | return self.get(attr, None) |
|
730 | __setattr__ = dict.__setitem__ | |
|
731 | __delattr__ = dict.__delitem__ | |
|
740 | ||
|
732 | 741 | |
|
733 | 742 | |
|
734 | 743 | def fix_PATH(os_=None): |
@@ -206,6 +206,14 b' class BaseRepository(object):' | |||
|
206 | 206 | def __ne__(self, other): |
|
207 | 207 | return not self.__eq__(other) |
|
208 | 208 | |
|
209 | def get_create_shadow_cache_pr_path(self, repo): | |
|
210 | path = os.path.join( | |
|
211 | os.path.dirname(self.path), | |
|
212 | '.__shadow_diff_cache_repo_{}/'.format(repo.repo_id)) | |
|
213 | if not os.path.exists(path): | |
|
214 | os.makedirs(path, 0755) | |
|
215 | return path | |
|
216 | ||
|
209 | 217 | @classmethod |
|
210 | 218 | def get_default_config(cls, default=None): |
|
211 | 219 | config = Config() |
@@ -745,6 +753,12 b' class BaseCommit(object):' | |||
|
745 | 753 | 'branch': self.branch |
|
746 | 754 | } |
|
747 | 755 | |
|
756 | def __getstate__(self): | |
|
757 | d = self.__dict__.copy() | |
|
758 | d.pop('_remote', None) | |
|
759 | d.pop('repository', None) | |
|
760 | return d | |
|
761 | ||
|
748 | 762 | def _get_refs(self): |
|
749 | 763 | return { |
|
750 | 764 | 'branches': [self.branch] if self.branch else [], |
@@ -430,6 +430,9 b' class _BaseVcsSettingsForm(formencode.Sc' | |||
|
430 | 430 | vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False) |
|
431 | 431 | vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None) |
|
432 | 432 | |
|
433 | # cache | |
|
434 | rhodecode_diff_cache = v.StringBoolean(if_missing=False) | |
|
435 | ||
|
433 | 436 | |
|
434 | 437 | def ApplicationUiSettingsForm(localizer): |
|
435 | 438 | _ = localizer |
@@ -417,7 +417,9 b' class VcsSettingsModel(object):' | |||
|
417 | 417 | 'hg_use_rebase_for_merging', |
|
418 | 418 | 'hg_close_branch_before_merging', |
|
419 | 419 | 'git_use_rebase_for_merging', |
|
420 |
'git_close_branch_before_merging' |
|
|
420 | 'git_close_branch_before_merging', | |
|
421 | 'diff_cache', | |
|
422 | ) | |
|
421 | 423 | |
|
422 | 424 | HOOKS_SETTINGS = ( |
|
423 | 425 | ('hooks', 'changegroup.repo_size'), |
@@ -438,6 +440,7 b' class VcsSettingsModel(object):' | |||
|
438 | 440 | GLOBAL_GIT_SETTINGS = ( |
|
439 | 441 | ('vcs_git_lfs', 'enabled'), |
|
440 | 442 | ('vcs_git_lfs', 'store_location')) |
|
443 | ||
|
441 | 444 | GLOBAL_SVN_SETTINGS = ( |
|
442 | 445 | ('vcs_svn_proxy', 'http_requests_enabled'), |
|
443 | 446 | ('vcs_svn_proxy', 'http_server_url')) |
@@ -571,6 +574,7 b' class VcsSettingsModel(object):' | |||
|
571 | 574 | self._create_or_update_ui( |
|
572 | 575 | self.repo_settings, *phases, value=safe_str(data[phases_key])) |
|
573 | 576 | |
|
577 | ||
|
574 | 578 | def create_or_update_global_hg_settings(self, data): |
|
575 | 579 | largefiles, largefiles_store, phases, hgsubversion, evolve \ |
|
576 | 580 | = self.GLOBAL_HG_SETTINGS |
@@ -312,6 +312,20 b'' | |||
|
312 | 312 | </div> |
|
313 | 313 | % endif |
|
314 | 314 | |
|
315 | % if display_globals or repo_type in ['hg', 'git', 'svn']: | |
|
316 | <div class="panel panel-default"> | |
|
317 | <div class="panel-heading" id="vcs-pull-requests-options"> | |
|
318 | <h3 class="panel-title">${_('Diff cache')}<a class="permalink" href="#vcs-pull-requests-options"> ¶</a></h3> | |
|
319 | </div> | |
|
320 | <div class="panel-body"> | |
|
321 | <div class="checkbox"> | |
|
322 | ${h.checkbox('rhodecode_diff_cache' + suffix, 'True', **kwargs)} | |
|
323 | <label for="rhodecode_diff_cache${suffix}">${_('Enable caching diffs for pull requests cache and commits')}</label> | |
|
324 | </div> | |
|
325 | </div> | |
|
326 | </div> | |
|
327 | % endif | |
|
328 | ||
|
315 | 329 | % if display_globals or repo_type in ['hg',]: |
|
316 | 330 | <div class="panel panel-default"> |
|
317 | 331 | <div class="panel-heading" id="vcs-pull-requests-options"> |
@@ -213,7 +213,7 b'' | |||
|
213 | 213 | <%namespace name="cbdiffs" file="/codeblocks/diffs.mako"/> |
|
214 | 214 | ${cbdiffs.render_diffset_menu()} |
|
215 | 215 | ${cbdiffs.render_diffset( |
|
216 | c.changes[c.commit.raw_id], commit=c.commit, use_comments=True)} | |
|
216 | c.changes[c.commit.raw_id], commit=c.commit, use_comments=True,inline_comments=c.inline_comments )} | |
|
217 | 217 | </div> |
|
218 | 218 | |
|
219 | 219 | ## template for inline comment form |
@@ -44,13 +44,15 b" return '%s_%s_%i' % (h.safeid(filename)," | |||
|
44 | 44 | |
|
45 | 45 | # special file-comments that were deleted in previous versions |
|
46 | 46 | # it's used for showing outdated comments for deleted files in a PR |
|
47 | deleted_files_comments=None | |
|
47 | deleted_files_comments=None, | |
|
48 | ||
|
49 | # for cache purpose | |
|
50 | inline_comments=None | |
|
48 | 51 | |
|
49 | 52 | )"> |
|
50 | ||
|
51 | 53 | %if use_comments: |
|
52 | 54 | <div id="cb-comments-inline-container-template" class="js-template"> |
|
53 | ${inline_comments_container([])} | |
|
55 | ${inline_comments_container([], inline_comments)} | |
|
54 | 56 | </div> |
|
55 | 57 | <div class="js-template" id="cb-comment-inline-form-template"> |
|
56 | 58 | <div class="comment-inline-form ac"> |
@@ -211,9 +213,9 b' collapse_all = len(diffset.files) > coll' | |||
|
211 | 213 | </td> |
|
212 | 214 | </tr> |
|
213 | 215 | %if c.diffmode == 'unified': |
|
214 | ${render_hunk_lines_unified(hunk, use_comments=use_comments)} | |
|
216 | ${render_hunk_lines_unified(hunk, use_comments=use_comments, inline_comments=inline_comments)} | |
|
215 | 217 | %elif c.diffmode == 'sideside': |
|
216 | ${render_hunk_lines_sideside(hunk, use_comments=use_comments)} | |
|
218 | ${render_hunk_lines_sideside(hunk, use_comments=use_comments, inline_comments=inline_comments)} | |
|
217 | 219 | %else: |
|
218 | 220 | <tr class="cb-line"> |
|
219 | 221 | <td>unknown diff mode</td> |
@@ -230,7 +232,7 b' collapse_all = len(diffset.files) > coll' | |||
|
230 | 232 | <td class="cb-lineno cb-context"></td> |
|
231 | 233 | <td class="cb-lineno cb-context"></td> |
|
232 | 234 | <td class="cb-content cb-context"> |
|
233 | ${inline_comments_container(comments)} | |
|
235 | ${inline_comments_container(comments, inline_comments)} | |
|
234 | 236 | </td> |
|
235 | 237 | </tr> |
|
236 | 238 | %elif c.diffmode == 'sideside': |
@@ -239,7 +241,7 b' collapse_all = len(diffset.files) > coll' | |||
|
239 | 241 | <td class="cb-lineno cb-context"></td> |
|
240 | 242 | <td class="cb-content cb-context"> |
|
241 | 243 | % if lineno.startswith('o'): |
|
242 | ${inline_comments_container(comments)} | |
|
244 | ${inline_comments_container(comments, inline_comments)} | |
|
243 | 245 | % endif |
|
244 | 246 | </td> |
|
245 | 247 | |
@@ -247,7 +249,7 b' collapse_all = len(diffset.files) > coll' | |||
|
247 | 249 | <td class="cb-lineno cb-context"></td> |
|
248 | 250 | <td class="cb-content cb-context"> |
|
249 | 251 | % if lineno.startswith('n'): |
|
250 | ${inline_comments_container(comments)} | |
|
252 | ${inline_comments_container(comments, inline_comments)} | |
|
251 | 253 | % endif |
|
252 | 254 | </td> |
|
253 | 255 | </tr> |
@@ -298,7 +300,7 b' collapse_all = len(diffset.files) > coll' | |||
|
298 | 300 | <td class="cb-lineno cb-context"></td> |
|
299 | 301 | <td class="cb-lineno cb-context"></td> |
|
300 | 302 | <td class="cb-content cb-context"> |
|
301 | ${inline_comments_container(comments_dict['comments'])} | |
|
303 | ${inline_comments_container(comments_dict['comments'], inline_comments)} | |
|
302 | 304 | </td> |
|
303 | 305 | </tr> |
|
304 | 306 | %elif c.diffmode == 'sideside': |
@@ -310,7 +312,7 b' collapse_all = len(diffset.files) > coll' | |||
|
310 | 312 | <td class="cb-data cb-context"></td> |
|
311 | 313 | <td class="cb-lineno cb-context"></td> |
|
312 | 314 | <td class="cb-content cb-context"> |
|
313 | ${inline_comments_container(comments_dict['comments'])} | |
|
315 | ${inline_comments_container(comments_dict['comments'], inline_comments)} | |
|
314 | 316 | </td> |
|
315 | 317 | </tr> |
|
316 | 318 | %endif |
@@ -484,12 +486,11 b' from rhodecode.lib.diffs import NEW_FILE' | |||
|
484 | 486 | </%def> |
|
485 | 487 | |
|
486 | 488 | |
|
487 | <%def name="inline_comments_container(comments)"> | |
|
489 | <%def name="inline_comments_container(comments, inline_comments)"> | |
|
488 | 490 | <div class="inline-comments"> |
|
489 | 491 | %for comment in comments: |
|
490 | ${commentblock.comment_block(comment, inline=True)} | |
|
492 | ${commentblock.comment_block(comment, inline=True)} | |
|
491 | 493 | %endfor |
|
492 | ||
|
493 | 494 | % if comments and comments[-1].outdated: |
|
494 | 495 | <span class="btn btn-secondary cb-comment-add-button comment-outdated}" |
|
495 | 496 | style="display: none;}"> |
@@ -505,8 +506,23 b' from rhodecode.lib.diffs import NEW_FILE' | |||
|
505 | 506 | </div> |
|
506 | 507 | </%def> |
|
507 | 508 | |
|
509 | <%! | |
|
510 | def get_comments_for(comments, filename, line_version, line_number): | |
|
511 | if hasattr(filename, 'unicode_path'): | |
|
512 | filename = filename.unicode_path | |
|
508 | 513 | |
|
509 | <%def name="render_hunk_lines_sideside(hunk, use_comments=False)"> | |
|
514 | if not isinstance(filename, basestring): | |
|
515 | return None | |
|
516 | ||
|
517 | line_key = '{}{}'.format(line_version, line_number) | |
|
518 | if comments and filename in comments: | |
|
519 | file_comments = comments[filename] | |
|
520 | if line_key in file_comments: | |
|
521 | return file_comments[line_key] | |
|
522 | %> | |
|
523 | ||
|
524 | <%def name="render_hunk_lines_sideside(hunk, use_comments=False, inline_comments=None)"> | |
|
525 | ||
|
510 | 526 | %for i, line in enumerate(hunk.sideside): |
|
511 | 527 | <% |
|
512 | 528 | old_line_anchor, new_line_anchor = None, None |
@@ -521,12 +537,16 b' from rhodecode.lib.diffs import NEW_FILE' | |||
|
521 | 537 | data-line-no="${line.original.lineno}" |
|
522 | 538 | > |
|
523 | 539 | <div> |
|
524 | %if line.original.comments: | |
|
525 | <% has_outdated = any([x.outdated for x in line.original.comments]) %> | |
|
540 | <% loc = None %> | |
|
541 | %if line.original.get_comment_args: | |
|
542 | <% loc = get_comments_for(inline_comments, *line.original.get_comment_args) %> | |
|
543 | %endif | |
|
544 | %if loc: | |
|
545 | <% has_outdated = any([x.outdated for x in loc]) %> | |
|
526 | 546 | % if has_outdated: |
|
527 |
<i title="${_('comments including outdated')}:${len(l |
|
|
547 | <i title="${_('comments including outdated')}:${len(loc)}" class="icon-comment_toggle" onclick="return Rhodecode.comments.toggleLineComments(this)"></i> | |
|
528 | 548 | % else: |
|
529 |
<i title="${_('comments')}: ${len(l |
|
|
549 | <i title="${_('comments')}: ${len(loc)}" class="icon-comment" onclick="return Rhodecode.comments.toggleLineComments(this)"></i> | |
|
530 | 550 | % endif |
|
531 | 551 | %endif |
|
532 | 552 | </div> |
@@ -548,20 +568,28 b' from rhodecode.lib.diffs import NEW_FILE' | |||
|
548 | 568 | ${render_add_comment_button()} |
|
549 | 569 | %endif |
|
550 | 570 | <span class="cb-code">${line.original.action} ${line.original.content or '' | n}</span> |
|
551 | %if use_comments and line.original.lineno and line.original.comments: | |
|
552 |
|
|
|
571 | ||
|
572 | %if use_comments and line.original.lineno and loc: | |
|
573 | ${inline_comments_container(loc, inline_comments)} | |
|
553 | 574 | %endif |
|
575 | ||
|
554 | 576 | </td> |
|
555 | 577 | <td class="cb-data ${action_class(line.modified.action)}" |
|
556 | 578 | data-line-no="${line.modified.lineno}" |
|
557 | 579 | > |
|
558 | 580 | <div> |
|
559 | %if line.modified.comments: | |
|
560 | <% has_outdated = any([x.outdated for x in line.modified.comments]) %> | |
|
581 | ||
|
582 | %if line.modified.get_comment_args: | |
|
583 | <% lmc = get_comments_for(inline_comments, *line.modified.get_comment_args) %> | |
|
584 | %else: | |
|
585 | <% lmc = None%> | |
|
586 | %endif | |
|
587 | %if lmc: | |
|
588 | <% has_outdated = any([x.outdated for x in lmc]) %> | |
|
561 | 589 | % if has_outdated: |
|
562 |
<i title="${_('comments including outdated')}:${len(l |
|
|
590 | <i title="${_('comments including outdated')}:${len(lmc)}" class="icon-comment_toggle" onclick="return Rhodecode.comments.toggleLineComments(this)"></i> | |
|
563 | 591 | % else: |
|
564 |
<i title="${_('comments')}: ${len(l |
|
|
592 | <i title="${_('comments')}: ${len(lmc)}" class="icon-comment" onclick="return Rhodecode.comments.toggleLineComments(this)"></i> | |
|
565 | 593 | % endif |
|
566 | 594 | %endif |
|
567 | 595 | </div> |
@@ -583,8 +611,8 b' from rhodecode.lib.diffs import NEW_FILE' | |||
|
583 | 611 | ${render_add_comment_button()} |
|
584 | 612 | %endif |
|
585 | 613 | <span class="cb-code">${line.modified.action} ${line.modified.content or '' | n}</span> |
|
586 |
%if use_comments and line.modified.lineno and l |
|
|
587 |
${inline_comments_container(l |
|
|
614 | %if use_comments and line.modified.lineno and lmc: | |
|
615 | ${inline_comments_container(lmc, inline_comments)} | |
|
588 | 616 | %endif |
|
589 | 617 | </td> |
|
590 | 618 | </tr> |
@@ -592,8 +620,8 b' from rhodecode.lib.diffs import NEW_FILE' | |||
|
592 | 620 | </%def> |
|
593 | 621 | |
|
594 | 622 | |
|
595 | <%def name="render_hunk_lines_unified(hunk, use_comments=False)"> | |
|
596 | %for old_line_no, new_line_no, action, content, comments in hunk.unified: | |
|
623 | <%def name="render_hunk_lines_unified(hunk, use_comments=False, inline_comments=None)"> | |
|
624 | %for old_line_no, new_line_no, action, content, comments_args in hunk.unified: | |
|
597 | 625 | <% |
|
598 | 626 | old_line_anchor, new_line_anchor = None, None |
|
599 | 627 | if old_line_no: |
@@ -604,6 +632,13 b' from rhodecode.lib.diffs import NEW_FILE' | |||
|
604 | 632 | <tr class="cb-line"> |
|
605 | 633 | <td class="cb-data ${action_class(action)}"> |
|
606 | 634 | <div> |
|
635 | ||
|
636 | %if comments_args: | |
|
637 | <% comments = get_comments_for(inline_comments, *comments_args) %> | |
|
638 | %else: | |
|
639 | <% comments = None%> | |
|
640 | %endif | |
|
641 | ||
|
607 | 642 |
|
|
608 | 643 | <% has_outdated = any([x.outdated for x in comments]) %> |
|
609 | 644 | % if has_outdated: |
@@ -642,7 +677,7 b' from rhodecode.lib.diffs import NEW_FILE' | |||
|
642 | 677 | %endif |
|
643 | 678 | <span class="cb-code">${action} ${content or '' | n}</span> |
|
644 | 679 | %if use_comments and comments: |
|
645 | ${inline_comments_container(comments)} | |
|
680 | ${inline_comments_container(comments, inline_comments)} | |
|
646 | 681 | %endif |
|
647 | 682 | </td> |
|
648 | 683 | </tr> |
@@ -570,7 +570,8 b'' | |||
|
570 | 570 | c.diffset, use_comments=True, |
|
571 | 571 | collapse_when_files_over=30, |
|
572 | 572 | disable_new_comments=not c.allowed_to_comment, |
|
573 |
deleted_files_comments=c.deleted_files_comments |
|
|
573 | deleted_files_comments=c.deleted_files_comments, | |
|
574 | inline_comments=c.inline_comments)} | |
|
574 | 575 | </div> |
|
575 | 576 | % else: |
|
576 | 577 | ## skipping commits we need to clear the view for missing commits |
General Comments 0
You need to be logged in to leave comments.
Login now