Show More
@@ -166,9 +166,15 b' startup.import_repos = false' | |||||
166 | ## change this to unique ID for security |
|
166 | ## change this to unique ID for security | |
167 | app_instance_uuid = rc-production |
|
167 | app_instance_uuid = rc-production | |
168 |
|
168 | |||
169 | ## cut off limit for large diffs (size in bytes) |
|
169 | ## cut off limit for large diffs (size in bytes). If overall diff size on | |
170 | cut_off_limit_diff = 1024000 |
|
170 | ## commit, or pull request exceeds this limit this diff will be displayed | |
171 | cut_off_limit_file = 256000 |
|
171 | ## partially. E.g 512000 == 512Kb | |
|
172 | cut_off_limit_diff = 512000 | |||
|
173 | ||||
|
174 | ## cut off limit for large files inside diffs (size in bytes). Each individual | |||
|
175 | ## file inside diff which exceeds this limit will be displayed partially. | |||
|
176 | ## E.g 128000 == 128Kb | |||
|
177 | cut_off_limit_file = 128000 | |||
172 |
|
178 | |||
173 | ## use cache version of scm repo everywhere |
|
179 | ## use cache version of scm repo everywhere | |
174 | vcs_full_cache = true |
|
180 | vcs_full_cache = true |
@@ -140,9 +140,15 b' startup.import_repos = false' | |||||
140 | ## change this to unique ID for security |
|
140 | ## change this to unique ID for security | |
141 | app_instance_uuid = rc-production |
|
141 | app_instance_uuid = rc-production | |
142 |
|
142 | |||
143 | ## cut off limit for large diffs (size in bytes) |
|
143 | ## cut off limit for large diffs (size in bytes). If overall diff size on | |
144 | cut_off_limit_diff = 1024000 |
|
144 | ## commit, or pull request exceeds this limit this diff will be displayed | |
145 | cut_off_limit_file = 256000 |
|
145 | ## partially. E.g 512000 == 512Kb | |
|
146 | cut_off_limit_diff = 512000 | |||
|
147 | ||||
|
148 | ## cut off limit for large files inside diffs (size in bytes). Each individual | |||
|
149 | ## file inside diff which exceeds this limit will be displayed partially. | |||
|
150 | ## E.g 128000 == 128Kb | |||
|
151 | cut_off_limit_file = 128000 | |||
146 |
|
152 | |||
147 | ## use cache version of scm repo everywhere |
|
153 | ## use cache version of scm repo everywhere | |
148 | vcs_full_cache = true |
|
154 | vcs_full_cache = true |
@@ -227,6 +227,7 b' class DiffProcessor(object):' | |||||
227 | self.parsed = False |
|
227 | self.parsed = False | |
228 | self.parsed_diff = [] |
|
228 | self.parsed_diff = [] | |
229 |
|
229 | |||
|
230 | log.debug('Initialized DiffProcessor with %s mode', format) | |||
230 | if format == 'gitdiff': |
|
231 | if format == 'gitdiff': | |
231 | self.differ = self._highlight_line_difflib |
|
232 | self.differ = self._highlight_line_difflib | |
232 | self._parser = self._parse_gitdiff |
|
233 | self._parser = self._parse_gitdiff | |
@@ -496,36 +497,26 b' class DiffProcessor(object):' | |||||
496 |
|
497 | |||
497 | return diff_container(sorted(_files, key=sorter)) |
|
498 | return diff_container(sorted(_files, key=sorter)) | |
498 |
|
499 | |||
499 |
|
500 | def _check_large_diff(self): | ||
500 | # FIXME: NEWDIFFS: dan: this replaces the old _escaper function |
|
501 | log.debug('Diff exceeds current diff_limit of %s', self.diff_limit) | |
501 | def _process_line(self, string): |
|
|||
502 | """ |
|
|||
503 | Process a diff line, checks the diff limit |
|
|||
504 |
|
||||
505 | :param string: |
|
|||
506 | """ |
|
|||
507 |
|
||||
508 | self.cur_diff_size += len(string) |
|
|||
509 |
|
||||
510 | if not self.show_full_diff and (self.cur_diff_size > self.diff_limit): |
|
502 | if not self.show_full_diff and (self.cur_diff_size > self.diff_limit): | |
511 | raise DiffLimitExceeded('Diff Limit Exceeded') |
|
503 | raise DiffLimitExceeded('Diff Limit `%s` Exceeded', self.diff_limit) | |
512 |
|
||||
513 | return safe_unicode(string) |
|
|||
514 |
|
504 | |||
515 | # FIXME: NEWDIFFS: dan: this replaces _parse_gitdiff |
|
505 | # FIXME: NEWDIFFS: dan: this replaces _parse_gitdiff | |
516 | def _new_parse_gitdiff(self, inline_diff=True): |
|
506 | def _new_parse_gitdiff(self, inline_diff=True): | |
517 | _files = [] |
|
507 | _files = [] | |
|
508 | ||||
|
509 | # this can be overriden later to a LimitedDiffContainer type | |||
518 | diff_container = lambda arg: arg |
|
510 | diff_container = lambda arg: arg | |
|
511 | ||||
519 | for chunk in self._diff.chunks(): |
|
512 | for chunk in self._diff.chunks(): | |
520 | head = chunk.header |
|
513 | head = chunk.header | |
521 | log.debug('parsing diff %r' % head) |
|
514 | log.debug('parsing diff %r' % head) | |
522 |
|
515 | |||
523 | diff = imap(self._process_line, chunk.diff.splitlines(1)) |
|
|||
524 | raw_diff = chunk.raw |
|
516 | raw_diff = chunk.raw | |
525 | limited_diff = False |
|
517 | limited_diff = False | |
526 | exceeds_limit = False |
|
518 | exceeds_limit = False | |
527 | # if 'empty_file_to_modify_and_rename' in head['a_path']: |
|
519 | ||
528 | # 1/0 |
|
|||
529 | op = None |
|
520 | op = None | |
530 | stats = { |
|
521 | stats = { | |
531 | 'added': 0, |
|
522 | 'added': 0, | |
@@ -542,19 +533,22 b' class DiffProcessor(object):' | |||||
542 | if head['b_mode']: |
|
533 | if head['b_mode']: | |
543 | stats['new_mode'] = head['b_mode'] |
|
534 | stats['new_mode'] = head['b_mode'] | |
544 |
|
535 | |||
|
536 | # delete file | |||
545 | if head['deleted_file_mode']: |
|
537 | if head['deleted_file_mode']: | |
546 | op = OPS.DEL |
|
538 | op = OPS.DEL | |
547 | stats['binary'] = True |
|
539 | stats['binary'] = True | |
548 | stats['ops'][DEL_FILENODE] = 'deleted file' |
|
540 | stats['ops'][DEL_FILENODE] = 'deleted file' | |
549 |
|
541 | |||
|
542 | # new file | |||
550 | elif head['new_file_mode']: |
|
543 | elif head['new_file_mode']: | |
551 | op = OPS.ADD |
|
544 | op = OPS.ADD | |
552 | stats['binary'] = True |
|
545 | stats['binary'] = True | |
553 | stats['old_mode'] = None |
|
546 | stats['old_mode'] = None | |
554 | stats['new_mode'] = head['new_file_mode'] |
|
547 | stats['new_mode'] = head['new_file_mode'] | |
555 | stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode'] |
|
548 | stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode'] | |
556 | else: # modify operation, can be copy, rename or chmod |
|
|||
557 |
|
549 | |||
|
550 | # modify operation, can be copy, rename or chmod | |||
|
551 | else: | |||
558 | # CHMOD |
|
552 | # CHMOD | |
559 | if head['new_mode'] and head['old_mode']: |
|
553 | if head['new_mode'] and head['old_mode']: | |
560 | op = OPS.MOD |
|
554 | op = OPS.MOD | |
@@ -602,7 +596,27 b' class DiffProcessor(object):' | |||||
602 |
|
596 | |||
603 | # a real non-binary diff |
|
597 | # a real non-binary diff | |
604 | if head['a_file'] or head['b_file']: |
|
598 | if head['a_file'] or head['b_file']: | |
|
599 | diff = iter(chunk.diff.splitlines(1)) | |||
|
600 | ||||
|
601 | # append each file to the diff size | |||
|
602 | raw_chunk_size = len(raw_diff) | |||
|
603 | ||||
|
604 | exceeds_limit = raw_chunk_size > self.file_limit | |||
|
605 | self.cur_diff_size += raw_chunk_size | |||
|
606 | ||||
605 | try: |
|
607 | try: | |
|
608 | # Check each file instead of the whole diff. | |||
|
609 | # Diff will hide big files but still show small ones. | |||
|
610 | # From the tests big files are fairly safe to be parsed | |||
|
611 | # but the browser is the bottleneck. | |||
|
612 | if not self.show_full_diff and exceeds_limit: | |||
|
613 | log.debug('File `%s` exceeds current file_limit of %s', | |||
|
614 | safe_unicode(head['b_path']), self.file_limit) | |||
|
615 | raise DiffLimitExceeded( | |||
|
616 | 'File Limit %s Exceeded', self.file_limit) | |||
|
617 | ||||
|
618 | self._check_large_diff() | |||
|
619 | ||||
606 | raw_diff, chunks, _stats = self._new_parse_lines(diff) |
|
620 | raw_diff, chunks, _stats = self._new_parse_lines(diff) | |
607 | stats['binary'] = False |
|
621 | stats['binary'] = False | |
608 | stats['added'] = _stats[0] |
|
622 | stats['added'] = _stats[0] | |
@@ -610,22 +624,12 b' class DiffProcessor(object):' | |||||
610 | # explicit mark that it's a modified file |
|
624 | # explicit mark that it's a modified file | |
611 | if op == OPS.MOD: |
|
625 | if op == OPS.MOD: | |
612 | stats['ops'][MOD_FILENODE] = 'modified file' |
|
626 | stats['ops'][MOD_FILENODE] = 'modified file' | |
613 | exceeds_limit = len(raw_diff) > self.file_limit |
|
|||
614 |
|
||||
615 | # changed from _escaper function so we validate size of |
|
|||
616 | # each file instead of the whole diff |
|
|||
617 | # diff will hide big files but still show small ones |
|
|||
618 | # from my tests, big files are fairly safe to be parsed |
|
|||
619 | # but the browser is the bottleneck |
|
|||
620 | if not self.show_full_diff and exceeds_limit: |
|
|||
621 | raise DiffLimitExceeded('File Limit Exceeded') |
|
|||
622 |
|
627 | |||
623 | except DiffLimitExceeded: |
|
628 | except DiffLimitExceeded: | |
624 | diff_container = lambda _diff: \ |
|
629 | diff_container = lambda _diff: \ | |
625 | LimitedDiffContainer( |
|
630 | LimitedDiffContainer( | |
626 | self.diff_limit, self.cur_diff_size, _diff) |
|
631 | self.diff_limit, self.cur_diff_size, _diff) | |
627 |
|
632 | |||
628 | exceeds_limit = len(raw_diff) > self.file_limit |
|
|||
629 | limited_diff = True |
|
633 | limited_diff = True | |
630 | chunks = [] |
|
634 | chunks = [] | |
631 |
|
635 | |||
@@ -636,19 +640,20 b' class DiffProcessor(object):' | |||||
636 | stats['ops'][BIN_FILENODE] = 'binary diff hidden' |
|
640 | stats['ops'][BIN_FILENODE] = 'binary diff hidden' | |
637 | chunks = [] |
|
641 | chunks = [] | |
638 |
|
642 | |||
|
643 | # Hide content of deleted node by setting empty chunks | |||
639 | if chunks and not self.show_full_diff and op == OPS.DEL: |
|
644 | if chunks and not self.show_full_diff and op == OPS.DEL: | |
640 | # if not full diff mode show deleted file contents |
|
645 | # if not full diff mode show deleted file contents | |
641 | # TODO: anderson: if the view is not too big, there is no way |
|
646 | # TODO: anderson: if the view is not too big, there is no way | |
642 | # to see the content of the file |
|
647 | # to see the content of the file | |
643 | chunks = [] |
|
648 | chunks = [] | |
644 |
|
649 | |||
645 |
chunks.insert( |
|
650 | chunks.insert( | |
646 |
|
|
651 | 0, [{'old_lineno': '', | |
647 |
|
|
652 | 'new_lineno': '', | |
648 |
|
|
653 | 'action': Action.CONTEXT, | |
649 |
|
|
654 | 'line': msg, | |
650 |
|
|
655 | } for _op, msg in stats['ops'].iteritems() | |
651 |
|
|
656 | if _op not in [MOD_FILENODE]]) | |
652 |
|
657 | |||
653 | original_filename = safe_unicode(head['a_path']) |
|
658 | original_filename = safe_unicode(head['a_path']) | |
654 | _files.append({ |
|
659 | _files.append({ | |
@@ -664,7 +669,6 b' class DiffProcessor(object):' | |||||
664 | 'is_limited_diff': limited_diff, |
|
669 | 'is_limited_diff': limited_diff, | |
665 | }) |
|
670 | }) | |
666 |
|
671 | |||
667 |
|
||||
668 | sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1, |
|
672 | sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1, | |
669 | OPS.DEL: 2}.get(info['operation']) |
|
673 | OPS.DEL: 2}.get(info['operation']) | |
670 |
|
674 | |||
@@ -766,18 +770,19 b' class DiffProcessor(object):' | |||||
766 | return ''.join(raw_diff), chunks, stats |
|
770 | return ''.join(raw_diff), chunks, stats | |
767 |
|
771 | |||
768 | # FIXME: NEWDIFFS: dan: this replaces _parse_lines |
|
772 | # FIXME: NEWDIFFS: dan: this replaces _parse_lines | |
769 | def _new_parse_lines(self, diff): |
|
773 | def _new_parse_lines(self, diff_iter): | |
770 | """ |
|
774 | """ | |
771 | Parse the diff an return data for the template. |
|
775 | Parse the diff an return data for the template. | |
772 | """ |
|
776 | """ | |
773 |
|
777 | |||
774 | lineiter = iter(diff) |
|
|||
775 | stats = [0, 0] |
|
778 | stats = [0, 0] | |
776 | chunks = [] |
|
779 | chunks = [] | |
777 | raw_diff = [] |
|
780 | raw_diff = [] | |
778 |
|
781 | |||
|
782 | diff_iter = imap(lambda s: safe_unicode(s), diff_iter) | |||
|
783 | ||||
779 | try: |
|
784 | try: | |
780 |
line = |
|
785 | line = diff_iter.next() | |
781 |
|
786 | |||
782 | while line: |
|
787 | while line: | |
783 | raw_diff.append(line) |
|
788 | raw_diff.append(line) | |
@@ -808,7 +813,7 b' class DiffProcessor(object):' | |||||
808 | old_end += old_line |
|
813 | old_end += old_line | |
809 | new_end += new_line |
|
814 | new_end += new_line | |
810 |
|
815 | |||
811 |
line = |
|
816 | line = diff_iter.next() | |
812 |
|
817 | |||
813 | while old_line < old_end or new_line < new_end: |
|
818 | while old_line < old_end or new_line < new_end: | |
814 | command = ' ' |
|
819 | command = ' ' | |
@@ -843,7 +848,7 b' class DiffProcessor(object):' | |||||
843 | }) |
|
848 | }) | |
844 | raw_diff.append(line) |
|
849 | raw_diff.append(line) | |
845 |
|
850 | |||
846 |
line = |
|
851 | line = diff_iter.next() | |
847 |
|
852 | |||
848 | if self._newline_marker.match(line): |
|
853 | if self._newline_marker.match(line): | |
849 | # we need to append to lines, since this is not |
|
854 | # we need to append to lines, since this is not | |
@@ -864,6 +869,7 b' class DiffProcessor(object):' | |||||
864 |
|
869 | |||
865 | except StopIteration: |
|
870 | except StopIteration: | |
866 | pass |
|
871 | pass | |
|
872 | ||||
867 | return ''.join(raw_diff), chunks, stats |
|
873 | return ''.join(raw_diff), chunks, stats | |
868 |
|
874 | |||
869 | def _safe_id(self, idstring): |
|
875 | def _safe_id(self, idstring): |
General Comments 0
You need to be logged in to leave comments.
Login now