##// END OF EJS Templates
Raise an ImportError if docstrings should be sphinxified, but docrepr isn't available.
Raise an ImportError if docstrings should be sphinxified, but docrepr isn't available.

File last commit:

r27977:ed7f35f8
r28223:3ad4cd93
Show More
test_lexers.py
184 lines | 5.9 KiB | text/x-python | PythonLexer
Min RK
move test_lexers to lib/tests...
r21232 """Test lexers module"""
Min RK
remove nbconvert
r21234 # Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from unittest import TestCase
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 from pygments import __version__ as pygments_version
Min RK
move test_lexers to lib/tests...
r21232 from pygments.token import Token
Min RK
compare shell tokens with BashLexer output...
r22047 from pygments.lexers import BashLexer
Min RK
move test_lexers to lib/tests...
r21232
from .. import lexers
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 pyg214 = tuple(int(x) for x in pygments_version.split(".")[:2]) >= (2, 14)
Min RK
move test_lexers to lib/tests...
r21232
Min RK
remove nbconvert
r21234 class TestLexers(TestCase):
Min RK
move test_lexers to lib/tests...
r21232 """Collection of lexers tests"""
def setUp(self):
self.lexer = lexers.IPythonLexer()
Min RK
compare shell tokens with BashLexer output...
r22047 self.bash_lexer = BashLexer()
Min RK
move test_lexers to lib/tests...
r21232
def testIPythonLexer(self):
fragment = '!echo $HOME\n'
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 bash_tokens = [
Min RK
move test_lexers to lib/tests...
r21232 (Token.Operator, '!'),
]
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 bash_tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
ipylex_token = list(self.lexer.get_tokens(fragment))
assert bash_tokens[:-1] == ipylex_token[:-1]
Min RK
move test_lexers to lib/tests...
r21232
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 fragment_2 = "!" + fragment
Min RK
move test_lexers to lib/tests...
r21232 tokens_2 = [
(Token.Operator, '!!'),
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 ] + bash_tokens[1:]
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
Min RK
move test_lexers to lib/tests...
r21232
fragment_2 = '\t %%!\n' + fragment[1:]
tokens_2 = [
(Token.Text, '\t '),
(Token.Operator, '%%!'),
(Token.Text, '\n'),
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 ] + bash_tokens[1:]
assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
Min RK
move test_lexers to lib/tests...
r21232
fragment_2 = 'x = ' + fragment
tokens_2 = [
(Token.Name, 'x'),
(Token.Text, ' '),
(Token.Operator, '='),
(Token.Text, ' '),
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 ] + bash_tokens
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
Min RK
move test_lexers to lib/tests...
r21232
fragment_2 = 'x, = ' + fragment
tokens_2 = [
(Token.Name, 'x'),
(Token.Punctuation, ','),
(Token.Text, ' '),
(Token.Operator, '='),
(Token.Text, ' '),
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 ] + bash_tokens
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
Min RK
move test_lexers to lib/tests...
r21232
fragment_2 = 'x, = %sx ' + fragment[1:]
tokens_2 = [
(Token.Name, 'x'),
(Token.Punctuation, ','),
(Token.Text, ' '),
(Token.Operator, '='),
(Token.Text, ' '),
(Token.Operator, '%'),
(Token.Keyword, 'sx'),
(Token.Text, ' '),
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 ] + bash_tokens[1:]
if tokens_2[7] == (Token.Text, " ") and pyg214: # pygments 2.14+
tokens_2[7] = (Token.Text.Whitespace, " ")
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
Min RK
move test_lexers to lib/tests...
r21232
fragment_2 = 'f = %R function () {}\n'
tokens_2 = [
(Token.Name, 'f'),
(Token.Text, ' '),
(Token.Operator, '='),
(Token.Text, ' '),
(Token.Operator, '%'),
(Token.Keyword, 'R'),
(Token.Text, ' function () {}\n'),
]
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
Min RK
move test_lexers to lib/tests...
r21232
fragment_2 = '\t%%xyz\n$foo\n'
tokens_2 = [
(Token.Text, '\t'),
(Token.Operator, '%%'),
(Token.Keyword, 'xyz'),
(Token.Text, '\n$foo\n'),
]
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
Min RK
move test_lexers to lib/tests...
r21232
fragment_2 = '%system?\n'
tokens_2 = [
(Token.Operator, '%'),
(Token.Keyword, 'system'),
(Token.Operator, '?'),
(Token.Text, '\n'),
]
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
Min RK
move test_lexers to lib/tests...
r21232
fragment_2 = 'x != y\n'
tokens_2 = [
(Token.Name, 'x'),
(Token.Text, ' '),
(Token.Operator, '!='),
(Token.Text, ' '),
(Token.Name, 'y'),
(Token.Text, '\n'),
]
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
Min RK
move test_lexers to lib/tests...
r21232
fragment_2 = ' ?math.sin\n'
tokens_2 = [
(Token.Text, ' '),
(Token.Operator, '?'),
(Token.Text, 'math.sin'),
(Token.Text, '\n'),
]
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
Min RK
support 'foo*??' in lexer...
r21712
fragment = ' *int*?\n'
tokens = [
(Token.Text, ' *int*'),
(Token.Operator, '?'),
(Token.Text, '\n'),
]
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 assert tokens == list(self.lexer.get_tokens(fragment))
Jörg Dietrich
add three tests for cell magic syntax highlighting
r24345
fragment = '%%writefile -a foo.py\nif a == b:\n pass'
tokens = [
(Token.Operator, '%%writefile'),
(Token.Text, ' -a foo.py\n'),
(Token.Keyword, 'if'),
(Token.Text, ' '),
(Token.Name, 'a'),
(Token.Text, ' '),
(Token.Operator, '=='),
(Token.Text, ' '),
(Token.Name, 'b'),
(Token.Punctuation, ':'),
(Token.Text, '\n'),
(Token.Text, ' '),
(Token.Keyword, 'pass'),
(Token.Text, '\n'),
]
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 if tokens[10] == (Token.Text, "\n") and pyg214: # pygments 2.14+
tokens[10] = (Token.Text.Whitespace, "\n")
assert tokens[:-1] == list(self.lexer.get_tokens(fragment))[:-1]
Jörg Dietrich
add three tests for cell magic syntax highlighting
r24345
fragment = '%%timeit\nmath.sin(0)'
tokens = [
(Token.Operator, '%%timeit\n'),
(Token.Name, 'math'),
(Token.Operator, '.'),
(Token.Name, 'sin'),
(Token.Punctuation, '('),
(Token.Literal.Number.Integer, '0'),
(Token.Punctuation, ')'),
(Token.Text, '\n'),
]
fragment = '%%HTML\n<div>foo</div>'
tokens = [
(Token.Operator, '%%HTML'),
(Token.Text, '\n'),
(Token.Punctuation, '<'),
(Token.Name.Tag, 'div'),
(Token.Punctuation, '>'),
(Token.Text, 'foo'),
(Token.Punctuation, '<'),
(Token.Punctuation, '/'),
(Token.Name.Tag, 'div'),
(Token.Punctuation, '>'),
(Token.Text, '\n'),
]
Matthias Bussonnier
Fix tests for pygments > 2.14...
r27977 assert tokens == list(self.lexer.get_tokens(fragment))