##// END OF EJS Templates
code: fixes to escape characters improperly used
super-admin -
r5149:ef80e1c5 default
parent child Browse files
Show More
@@ -816,7 +816,7 b' class DiffProcessor(object):'
816 816 return b''.join(raw_diff), chunks, stats
817 817
818 818 def _safe_id(self, idstring):
819 """Make a string safe for including in an id attribute.
819 r"""Make a string safe for including in an id attribute.
820 820
821 821 The HTML spec says that id attributes 'must begin with
822 822 a letter ([A-Za-z]) and may be followed by any number
@@ -828,8 +828,8 b' class DiffProcessor(object):'
828 828 Whitespace is transformed into underscores, and then
829 829 anything which is not a hyphen or a character that
830 830 matches \w (alphanumerics and underscore) is removed.
831 """
831 832
832 """
833 833 # Transform all whitespace to underscore
834 834 idstring = re.sub(r'\s', "_", f'{idstring}')
835 835 # Remove everything that is not a hyphen or a member of \w
@@ -1038,33 +1038,71 b' def gravatar_with_user(request, author, '
1038 1038 return _render('gravatar_with_user', author, show_disabled=show_disabled, tooltip=tooltip)
1039 1039
1040 1040
1041 tags_paterns = OrderedDict((
1042 ('lang', (re.compile(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+\.]*)\]'),
1043 '<div class="metatag" tag="lang">\\2</div>')),
1044
1045 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1046 '<div class="metatag" tag="see">see: \\1 </div>')),
1047
1048 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'),
1049 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')),
1050
1051 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1052 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
1053
1054 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
1055 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
1056
1057 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
1058 '<div class="metatag" tag="state \\1">\\1</div>')),
1059
1041 tags_patterns = OrderedDict(
1042 (
1043 (
1044 "lang",
1045 (
1046 re.compile(r"\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]"),
1047 '<div class="metatag" tag="lang">\\2</div>',
1048 ),
1049 ),
1050 (
1051 "see",
1052 (
1053 re.compile(r"\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]"),
1054 '<div class="metatag" tag="see">see: \\1 </div>',
1055 ),
1056 ),
1057 (
1058 "url",
1059 (
1060 re.compile(
1061 r"\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]"
1062 ),
1063 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>',
1064 ),
1065 ),
1066 (
1067 "license",
1068 (
1069 re.compile(
1070 r"\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]"
1071 ),
1072 r'<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>',
1073 ),
1074 ),
1075 (
1076 "ref",
1077 (
1078 re.compile(
1079 r"\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]"
1080 ),
1081 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>',
1082 ),
1083 ),
1084 (
1085 "state",
1086 (
1087 re.compile(r"\[(stable|featured|stale|dead|dev|deprecated)\]"),
1088 '<div class="metatag" tag="state \\1">\\1</div>',
1089 ),
1090 ),
1060 1091 # label in grey
1061 ('label', (re.compile(r'\[([a-z]+)\]'),
1062 '<div class="metatag" tag="label">\\1</div>')),
1063
1092 (
1093 "label",
1094 (re.compile(r"\[([a-z]+)\]"), '<div class="metatag" tag="label">\\1</div>'),
1095 ),
1064 1096 # generic catch all in grey
1065 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
1066 '<div class="metatag" tag="generic">\\1</div>')),
1067 ))
1097 (
1098 "generic",
1099 (
1100 re.compile(r"\[([a-zA-Z0-9\.\-\_]+)\]"),
1101 '<div class="metatag" tag="generic">\\1</div>',
1102 ),
1103 ),
1104 )
1105 )
1068 1106
1069 1107
1070 1108 def extract_metatags(value):
@@ -1075,7 +1113,7 b' def extract_metatags(value):'
1075 1113 if not value:
1076 1114 return tags, ''
1077 1115
1078 for key, val in list(tags_paterns.items()):
1116 for key, val in list(tags_patterns.items()):
1079 1117 pat, replace_html = val
1080 1118 tags.extend([(key, x.group()) for x in pat.finditer(value)])
1081 1119 value = pat.sub('', value)
@@ -1091,7 +1129,7 b' def style_metatag(tag_type, value):'
1091 1129 return ''
1092 1130
1093 1131 html_value = value
1094 tag_data = tags_paterns.get(tag_type)
1132 tag_data = tags_patterns.get(tag_type)
1095 1133 if tag_data:
1096 1134 pat, replace_html = tag_data
1097 1135 # convert to plain `str` instead of a markup tag to be used in
@@ -1530,7 +1568,7 b' def urlify_text(text_, safe=True, **href'
1530 1568 """
1531 1569
1532 1570 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1533 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1571 r'''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1534 1572
1535 1573 def url_func(match_obj):
1536 1574 url_full = match_obj.groups()[0]
@@ -148,7 +148,7 b' def normalize_text_for_matching(x):'
148 148 Replaces all non alfanum characters to spaces and lower cases the string,
149 149 useful for comparing two text strings without punctuation
150 150 """
151 return re.sub(r'[^\w]', ' ', x.lower())
151 return re.sub(r'\W', ' ', x.lower())
152 152
153 153
154 154 def get_matching_line_offsets(lines, terms=None, markers=None):
@@ -28,14 +28,14 b' from rhodecode.tests import no_newline_i'
28 28
29 29
30 30 @pytest.mark.parametrize('url, expected_url', [
31 ('http://rc.com', '<a href="http://rc.com">http://rc.com</a>'),
32 ('http://rc.com/test', '<a href="http://rc.com/test">http://rc.com/test</a>'),
33 ('http://rc.com/!foo', '<a href="http://rc.com/!foo">http://rc.com/!foo</a>'),
34 ('http://rc.com/&foo', '<a href="http://rc.com/&amp;foo">http://rc.com/&amp;foo</a>'),
35 ('http://rc.com/?foo-1&bar=1', '<a href="http://rc.com/?foo-1&amp;bar=1">http://rc.com/?foo-1&amp;bar=1</a>'),
36 ('http://rc.com?foo-1&bar=1', '<a href="http://rc.com?foo-1&amp;bar=1">http://rc.com?foo-1&amp;bar=1</a>'),
37 ('http://rc.com/#foo', '<a href="http://rc.com/#foo">http://rc.com/#foo</a>'),
38 ('http://rc.com/@foo', '<a href="http://rc.com/@foo">http://rc.com/@foo</a>'),
31 (r'https://rc.com', '<a href="https://rc.com">http://rc.com</a>'),
32 (r'https://rc.com/test', '<a href="https://rc.com/test">https://rc.com/test</a>'),
33 (r'https://rc.com/!foo', '<a href="https://rc.com/!foo">https://rc.com/!foo</a>'),
34 (r'https://rc.com/&foo', '<a href="https://rc.com/&amp;foo">https://rc.com/&amp;foo</a>'),
35 (r'https://rc.com/?foo-1&bar=1', '<a href="https://rc.com/?foo-1&amp;bar=1">https://rc.com/?foo-1&amp;bar=1</a>'),
36 (r'https://rc.com?foo-1&bar=1', '<a href="https://rc.com?foo-1&amp;bar=1">https://rc.com?foo-1&amp;bar=1</a>'),
37 (r'https://rc.com/#foo', '<a href="https://rc.com/#foo">https://rc.com/#foo</a>'),
38 (r'https://rc.com/@foo', '<a href="https://rc.com/@foo">https://rc.com/@foo</a>'),
39 39 ])
40 40 def test_urlify_text(url, expected_url):
41 41 assert helpers.urlify_text(url) == expected_url
@@ -95,12 +95,12 b' def test_format_binary():'
95 95
96 96
97 97 @pytest.mark.parametrize('text_string, pattern, expected', [
98 ('No issue here', '(?:#)(?P<issue_id>\d+)', []),
98 ('No issue here', r'(?:#)(?P<issue_id>\d+)', []),
99 99 ('Fix #42', '(?:#)(?P<issue_id>\d+)',
100 [{'url': 'http://r.io/{repo}/i/42', 'id': '42'}]),
100 [{'url': 'https://r.io/{repo}/i/42', 'id': '42'}]),
101 101 ('Fix #42, #53', '(?:#)(?P<issue_id>\d+)', [
102 {'url': 'http://r.io/{repo}/i/42', 'id': '42'},
103 {'url': 'http://r.io/{repo}/i/53', 'id': '53'}]),
102 {'url': 'https://r.io/{repo}/i/42', 'id': '42'},
103 {'url': 'https://r.io/{repo}/i/53', 'id': '53'}]),
104 104 ('Fix #42', '(?:#)?<issue_id>\d+)', []), # Broken regex
105 105 ])
106 106 def test_extract_issues(backend, text_string, pattern, expected):
@@ -109,7 +109,7 b' def test_extract_issues(backend, text_st'
109 109 '123': {
110 110 'uid': '123',
111 111 'pat': pattern,
112 'url': 'http://r.io/${repo}/i/${issue_id}',
112 'url': r'https://r.io/${repo}/i/${issue_id}',
113 113 'pref': '#',
114 114 'desc': 'Test Pattern'
115 115 }
@@ -36,29 +36,29 b' class TestGroupNameType(object):'
36 36 assert result == expected
37 37
38 38 @pytest.mark.parametrize('given, expected', [
39 ('//group1/group2//', 'group1/group2'),
40 ('//group1///group2//', 'group1/group2'),
41 ('group1/group2///group3', 'group1/group2/group3'),
42 ('v1.2', 'v1.2'),
43 ('/v1.2', 'v1.2'),
44 ('.dirs', '.dirs'),
45 ('..dirs', '.dirs'),
46 ('./..dirs', '.dirs'),
47 ('dir/;name;/;[];/sub', 'dir/name/sub'),
48 (',/,/,d,,,', 'd'),
49 ('/;/#/,d,,,', 'd'),
50 ('long../../..name', 'long./.name'),
51 ('long../..name', 'long./.name'),
52 ('../', ''),
53 ('\'../"../', ''),
54 ('c,/,/..//./,c,,,/.d/../.........c', 'c/c/.d/.c'),
55 ('c,/,/..//./,c,,,', 'c/c'),
56 ('d../..d', 'd./.d'),
57 ('d../../d', 'd./d'),
39 (r'//group1/group2//', 'group1/group2'),
40 (r'//group1///group2//', 'group1/group2'),
41 (r'group1/group2///group3', 'group1/group2/group3'),
42 (r'v1.2', 'v1.2'),
43 (r'/v1.2', 'v1.2'),
44 (r'.dirs', '.dirs'),
45 (r'..dirs', '.dirs'),
46 (r'./..dirs', '.dirs'),
47 (r'dir/;name;/;[];/sub', 'dir/name/sub'),
48 (r',/,/,d,,,', 'd'),
49 (r'/;/#/,d,,,', 'd'),
50 (r'long../../..name', 'long./.name'),
51 (r'long../..name', 'long./.name'),
52 (r'../', ''),
53 (r'\'../"../', ''),
54 (r'c,/,/..//./,c,,,/.d/../.........c', 'c/c/.d/.c'),
55 (r'c,/,/..//./,c,,,', 'c/c'),
56 (r'd../..d', 'd./.d'),
57 (r'd../../d', 'd./d'),
58 58
59 ('d\;\./\,\./d', 'd./d'),
60 ('d\.\./\.\./d', 'd./d'),
61 ('d\.\./\..\../d', 'd./d'),
59 (r'd\;\./\,\./d', 'd./d'),
60 (r'd\.\./\.\./d', 'd./d'),
61 (r'd\.\./\..\../d', 'd./d'),
62 62 ])
63 63 def test_deserialize_clean_up_name(self, given, expected):
64 64 class TestSchema(colander.Schema):
General Comments 0
You need to be logged in to leave comments. Login now