##// END OF EJS Templates
fix(caching): fixed problems with Cache query for users....
fix(caching): fixed problems with Cache query for users. The old way of querying caused the user get query to be always cached, and returning old results even in 2fa forms. The new limited query doesn't cache the user object resolving issues

File last commit:

r5088:8f6d1ed6 default
r5365:ae8a165b default
Show More
test_codeblocks.py
336 lines | 9.7 KiB | text/x-python | PythonLexer
tests: fixed all tests for python3 BIG changes
r5087
dan
codeblocks: add new code token rendering function that...
r1025
copyrights: updated for 2023
r5088 # Copyright (C) 2016-2023 RhodeCode GmbH
dan
codeblocks: add new code token rendering function that...
r1025 #
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License, version 3
# (only), as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This program is dual-licensed. If you wish to learn more about the
# RhodeCode Enterprise Edition, including its added features, Support services,
# and proprietary license terms, please see https://rhodecode.com/licenses/
import pytest
home: moved home and repo group views into pyramid....
r1774 from pygments.lexers import get_lexer_by_name
dan
codeblocks: add new code token rendering function that...
r1025
home: moved home and repo group views into pyramid....
r1774 from rhodecode.tests import no_newline_id_generator
dan
codeblocks: add new code token rendering function that...
r1025 from rhodecode.lib.codeblocks import (
tokenize_string, split_token_stream, rollup_tokenstream,
render_tokenstream)
class TestTokenizeString(object):
python_code = '''
import this
var = 6
core: use py3 compatible prints
r3057 print("this")
dan
codeblocks: add new code token rendering function that...
r1025
'''
def test_tokenize_as_python(self):
lexer = get_lexer_by_name('python')
tokens = list(tokenize_string(self.python_code, lexer))
tests: fixed all tests for python3 BIG changes
r5087 expected_tokens = [
('w', '\n'),
('', ' '),
('kn', 'import'),
('', ' '),
('nn', 'this'),
('w', '\n'),
('w', '\n'),
('', ' '),
('n', 'var'),
('', ' '),
('o', '='),
('', ' '),
('mi', '6'),
('w', '\n'),
('', ' '),
('nb', 'print'),
('p', '('),
('s2', '"'),
('s2', 'this'),
('s2', '"'),
('p', ')'),
('w', '\n'),
('w', '\n'),
('', ' ')
]
dan
codeblocks: add new code token rendering function that...
r1025
tests: fixed all tests for python3 BIG changes
r5087 assert tokens == expected_tokens
dan
codeblocks: add new code token rendering function that...
r1025
def test_tokenize_as_text(self):
lexer = get_lexer_by_name('text')
tokens = list(tokenize_string(self.python_code, lexer))
assert tokens == [
('',
tests: fixed all tests for python3 BIG changes
r5087 '\n import this\n\n var = 6\n print("this")\n\n ')
dan
codeblocks: add new code token rendering function that...
r1025 ]
class TestSplitTokenStream(object):
def test_split_token_stream(self):
diffs: fixed case of bogus files diff rendering...
r3444 tokens = [('type1', 'some\ntext'), ('type2', 'more\n')]
content = [x + y for x, y in tokens]
lines = list(split_token_stream(tokens, content))
dan
codeblocks: add new code token rendering function that...
r1025
assert lines == [
tests: fixed all tests for python3 BIG changes
r5087 [('type1', 'some')],
[('type1', 'text'), ('type2', 'more')],
[('type2', '')],
dan
codeblocks: add new code token rendering function that...
r1025 ]
def test_split_token_stream_single(self):
diffs: fixed case of bogus files diff rendering...
r3444 tokens = [('type1', '\n')]
content = [x + y for x, y in tokens]
lines = list(split_token_stream(tokens, content))
dan
codeblocks: add new code token rendering function that...
r1025 assert lines == [
[('type1', '')],
[('type1', '')],
]
def test_split_token_stream_single_repeat(self):
diffs: fixed case of bogus files diff rendering...
r3444 tokens = [('type1', '\n\n\n')]
content = [x + y for x, y in tokens]
lines = list(split_token_stream(tokens, content))
dan
codeblocks: add new code token rendering function that...
r1025 assert lines == [
[('type1', '')],
[('type1', '')],
[('type1', '')],
[('type1', '')],
]
def test_split_token_stream_multiple_repeat(self):
diffs: fixed case of bogus files diff rendering...
r3444 tokens = [('type1', '\n\n'), ('type2', '\n\n')]
content = [x + y for x, y in tokens]
dan
codeblocks: add new code token rendering function that...
r1025
diffs: fixed case of bogus files diff rendering...
r3444 lines = list(split_token_stream(tokens, content))
dan
codeblocks: add new code token rendering function that...
r1025 assert lines == [
[('type1', '')],
[('type1', '')],
[('type1', ''), ('type2', '')],
[('type2', '')],
[('type2', '')],
]
diffs: fixed case of bogus files diff rendering...
r3444 def test_no_tokens_by_content(self):
tokens = []
tests: fixed all tests for python3 BIG changes
r5087 content = '\ufeff'
diffs: fixed case of bogus files diff rendering...
r3444 lines = list(split_token_stream(tokens, content))
assert lines == [
[('', content)],
]
def test_no_tokens_by_valid_content(self):
from pygments.lexers.css import CssLexer
tests: fixed all tests for python3 BIG changes
r5087 content = '\ufeff table.dataTable'
diffs: fixed case of bogus files diff rendering...
r3444 tokens = tokenize_string(content, CssLexer())
lines = list(split_token_stream(tokens, content))
assert lines == [
tests: fixed all tests for python3 BIG changes
r5087 [('w', ' '),
('nt', 'table'),
('p', '.'),
('nc', 'dataTable')],
diffs: fixed case of bogus files diff rendering...
r3444 ]
dan
codeblocks: add new code token rendering function that...
r1025
class TestRollupTokens(object):
@pytest.mark.parametrize('tokenstream,output', [
([],
[]),
([('A', 'hell'), ('A', 'o')], [
('A', [
('', 'hello')]),
]),
([('A', 'hell'), ('B', 'o')], [
('A', [
('', 'hell')]),
('B', [
('', 'o')]),
]),
([('A', 'hel'), ('A', 'lo'), ('B', ' '), ('A', 'there')], [
('A', [
('', 'hello')]),
('B', [
('', ' ')]),
('A', [
('', 'there')]),
]),
])
def test_rollup_tokenstream_without_ops(self, tokenstream, output):
assert list(rollup_tokenstream(tokenstream)) == output
@pytest.mark.parametrize('tokenstream,output', [
([],
[]),
([('A', '', 'hell'), ('A', '', 'o')], [
('A', [
('', 'hello')]),
]),
([('A', '', 'hell'), ('B', '', 'o')], [
('A', [
('', 'hell')]),
('B', [
('', 'o')]),
]),
([('A', '', 'h'), ('B', '', 'e'), ('C', '', 'y')], [
('A', [
('', 'h')]),
('B', [
('', 'e')]),
('C', [
('', 'y')]),
]),
([('A', '', 'h'), ('A', '', 'e'), ('C', '', 'y')], [
('A', [
('', 'he')]),
('C', [
('', 'y')]),
]),
([('A', 'ins', 'h'), ('A', 'ins', 'e')], [
('A', [
('ins', 'he')
]),
]),
([('A', 'ins', 'h'), ('A', 'del', 'e')], [
('A', [
('ins', 'h'),
('del', 'e')
]),
]),
([('A', 'ins', 'h'), ('B', 'del', 'e'), ('B', 'del', 'y')], [
('A', [
('ins', 'h'),
]),
('B', [
('del', 'ey'),
]),
]),
([('A', 'ins', 'h'), ('A', 'del', 'e'), ('B', 'del', 'y')], [
('A', [
('ins', 'h'),
('del', 'e'),
]),
('B', [
('del', 'y'),
]),
]),
([('A', '', 'some'), ('A', 'ins', 'new'), ('A', '', 'name')], [
('A', [
('', 'some'),
('ins', 'new'),
('', 'name'),
]),
]),
])
def test_rollup_tokenstream_with_ops(self, tokenstream, output):
assert list(rollup_tokenstream(tokenstream)) == output
class TestRenderTokenStream(object):
@pytest.mark.parametrize('tokenstream,output', [
(
[],
'',
),
(
tests: fixed all tests for python3 BIG changes
r5087 [('', '', '')],
dan
codeblocks: add new code token rendering function that...
r1025 '<span></span>',
),
(
tests: fixed all tests for python3 BIG changes
r5087 [('', '', 'text')],
dan
codeblocks: add new code token rendering function that...
r1025 '<span>text</span>',
),
(
tests: fixed all tests for python3 BIG changes
r5087 [('A', '', '')],
dan
codeblocks: add new code token rendering function that...
r1025 '<span class="A"></span>',
),
(
tests: fixed all tests for python3 BIG changes
r5087 [('A', '', 'hello')],
dan
codeblocks: add new code token rendering function that...
r1025 '<span class="A">hello</span>',
),
(
tests: fixed all tests for python3 BIG changes
r5087 [('A', '', 'hel'), ('A', '', 'lo')],
dan
codeblocks: add new code token rendering function that...
r1025 '<span class="A">hello</span>',
),
(
tests: fixed all tests for python3 BIG changes
r5087 [('A', '', 'two\n'), ('A', '', 'lines')],
dan
diffs: replace compare controller with new html based diffs:...
r1030 '<span class="A">two\nlines</span>',
dan
codeblocks: add new code token rendering function that...
r1025 ),
(
tests: fixed all tests for python3 BIG changes
r5087 [('A', '', '\nthree\n'), ('A', '', 'lines')],
dan
diffs: replace compare controller with new html based diffs:...
r1030 '<span class="A">\nthree\nlines</span>',
dan
codeblocks: add new code token rendering function that...
r1025 ),
(
tests: fixed all tests for python3 BIG changes
r5087 [('', '', '\n'), ('A', '', 'line')],
dan
diffs: replace compare controller with new html based diffs:...
r1030 '<span>\n</span><span class="A">line</span>',
dan
codeblocks: add new code token rendering function that...
r1025 ),
(
tests: fixed all tests for python3 BIG changes
r5087 [('', 'ins', '\n'), ('A', '', 'line')],
dan
diffs: replace compare controller with new html based diffs:...
r1030 '<span><ins>\n</ins></span><span class="A">line</span>',
dan
codeblocks: add new code token rendering function that...
r1025 ),
(
tests: fixed all tests for python3 BIG changes
r5087 [('A', '', 'hel'), ('A', 'ins', 'lo')],
dan
codeblocks: add new code token rendering function that...
r1025 '<span class="A">hel<ins>lo</ins></span>',
),
(
tests: fixed all tests for python3 BIG changes
r5087 [('A', '', 'hel'), ('A', 'ins', 'l'), ('A', 'ins', 'o')],
dan
codeblocks: add new code token rendering function that...
r1025 '<span class="A">hel<ins>lo</ins></span>',
),
(
tests: fixed all tests for python3 BIG changes
r5087 [('A', '', 'hel'), ('A', 'ins', 'l'), ('A', 'del', 'o')],
dan
codeblocks: add new code token rendering function that...
r1025 '<span class="A">hel<ins>l</ins><del>o</del></span>',
),
(
tests: fixed all tests for python3 BIG changes
r5087 [('A', '', 'hel'), ('B', '', 'lo')],
dan
codeblocks: add new code token rendering function that...
r1025 '<span class="A">hel</span><span class="B">lo</span>',
),
(
tests: fixed all tests for python3 BIG changes
r5087 [('A', '', 'hel'), ('B', 'ins', 'lo')],
dan
codeblocks: add new code token rendering function that...
r1025 '<span class="A">hel</span><span class="B"><ins>lo</ins></span>',
),
home: moved home and repo group views into pyramid....
r1774 ], ids=no_newline_id_generator)
dan
codeblocks: add new code token rendering function that...
r1025 def test_render_tokenstream_with_ops(self, tokenstream, output):
html = render_tokenstream(tokenstream)
assert html == output
@pytest.mark.parametrize('tokenstream,output', [
(
tests: fixed all tests for python3 BIG changes
r5087 [('A', 'hel'), ('A', 'lo')],
dan
codeblocks: add new code token rendering function that...
r1025 '<span class="A">hello</span>',
),
(
tests: fixed all tests for python3 BIG changes
r5087 [('A', 'hel'), ('A', 'l'), ('A', 'o')],
dan
codeblocks: add new code token rendering function that...
r1025 '<span class="A">hello</span>',
),
(
tests: fixed all tests for python3 BIG changes
r5087 [('A', 'hel'), ('A', 'l'), ('A', 'o')],
dan
codeblocks: add new code token rendering function that...
r1025 '<span class="A">hello</span>',
),
(
tests: fixed all tests for python3 BIG changes
r5087 [('A', 'hel'), ('B', 'lo')],
dan
codeblocks: add new code token rendering function that...
r1025 '<span class="A">hel</span><span class="B">lo</span>',
),
(
tests: fixed all tests for python3 BIG changes
r5087 [('A', 'hel'), ('B', 'lo')],
dan
codeblocks: add new code token rendering function that...
r1025 '<span class="A">hel</span><span class="B">lo</span>',
),
])
def test_render_tokenstream_without_ops(self, tokenstream, output):
html = render_tokenstream(tokenstream)
assert html == output