test_lexers.py
184 lines
| 5.9 KiB
| text/x-python
|
PythonLexer
Min RK
|
r21232 | """Test lexers module""" | ||
Min RK
|
r21234 | # Copyright (c) IPython Development Team. | ||
# Distributed under the terms of the Modified BSD License. | ||||
from unittest import TestCase | ||||
Matthias Bussonnier
|
r27977 | from pygments import __version__ as pygments_version | ||
Min RK
|
r21232 | from pygments.token import Token | ||
Min RK
|
r22047 | from pygments.lexers import BashLexer | ||
Min RK
|
r21232 | |||
from .. import lexers | ||||
Matthias Bussonnier
|
r27977 | pyg214 = tuple(int(x) for x in pygments_version.split(".")[:2]) >= (2, 14) | ||
Min RK
|
r21232 | |||
Min RK
|
r21234 | class TestLexers(TestCase): | ||
Min RK
|
r21232 | """Collection of lexers tests""" | ||
def setUp(self): | ||||
Matthias Bussonnier
|
r28494 | self.lexer = lexers.IPythonLexer() | ||
Min RK
|
r22047 | self.bash_lexer = BashLexer() | ||
Min RK
|
r21232 | |||
Matthias Bussonnier
|
r28494 | def testIPythonLexer(self): | ||
Min RK
|
r21232 | fragment = '!echo $HOME\n' | ||
Matthias Bussonnier
|
r27977 | bash_tokens = [ | ||
Min RK
|
r21232 | (Token.Operator, '!'), | ||
] | ||||
Matthias Bussonnier
|
r27977 | bash_tokens.extend(self.bash_lexer.get_tokens(fragment[1:])) | ||
ipylex_token = list(self.lexer.get_tokens(fragment)) | ||||
assert bash_tokens[:-1] == ipylex_token[:-1] | ||||
Min RK
|
r21232 | |||
Matthias Bussonnier
|
r27977 | fragment_2 = "!" + fragment | ||
Min RK
|
r21232 | tokens_2 = [ | ||
(Token.Operator, '!!'), | ||||
Matthias Bussonnier
|
r27977 | ] + bash_tokens[1:] | ||
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1] | ||||
Min RK
|
r21232 | |||
fragment_2 = '\t %%!\n' + fragment[1:] | ||||
tokens_2 = [ | ||||
(Token.Text, '\t '), | ||||
(Token.Operator, '%%!'), | ||||
(Token.Text, '\n'), | ||||
Matthias Bussonnier
|
r27977 | ] + bash_tokens[1:] | ||
assert tokens_2 == list(self.lexer.get_tokens(fragment_2)) | ||||
Min RK
|
r21232 | |||
fragment_2 = 'x = ' + fragment | ||||
tokens_2 = [ | ||||
(Token.Name, 'x'), | ||||
(Token.Text, ' '), | ||||
(Token.Operator, '='), | ||||
(Token.Text, ' '), | ||||
Matthias Bussonnier
|
r27977 | ] + bash_tokens | ||
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1] | ||||
Min RK
|
r21232 | |||
fragment_2 = 'x, = ' + fragment | ||||
tokens_2 = [ | ||||
(Token.Name, 'x'), | ||||
(Token.Punctuation, ','), | ||||
(Token.Text, ' '), | ||||
(Token.Operator, '='), | ||||
(Token.Text, ' '), | ||||
Matthias Bussonnier
|
r27977 | ] + bash_tokens | ||
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1] | ||||
Min RK
|
r21232 | |||
fragment_2 = 'x, = %sx ' + fragment[1:] | ||||
tokens_2 = [ | ||||
(Token.Name, 'x'), | ||||
(Token.Punctuation, ','), | ||||
(Token.Text, ' '), | ||||
(Token.Operator, '='), | ||||
(Token.Text, ' '), | ||||
(Token.Operator, '%'), | ||||
(Token.Keyword, 'sx'), | ||||
(Token.Text, ' '), | ||||
Matthias Bussonnier
|
r27977 | ] + bash_tokens[1:] | ||
if tokens_2[7] == (Token.Text, " ") and pyg214: # pygments 2.14+ | ||||
tokens_2[7] = (Token.Text.Whitespace, " ") | ||||
assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1] | ||||
Min RK
|
r21232 | |||
fragment_2 = 'f = %R function () {}\n' | ||||
tokens_2 = [ | ||||
(Token.Name, 'f'), | ||||
(Token.Text, ' '), | ||||
(Token.Operator, '='), | ||||
(Token.Text, ' '), | ||||
(Token.Operator, '%'), | ||||
(Token.Keyword, 'R'), | ||||
(Token.Text, ' function () {}\n'), | ||||
] | ||||
Matthias Bussonnier
|
r27977 | assert tokens_2 == list(self.lexer.get_tokens(fragment_2)) | ||
Min RK
|
r21232 | |||
fragment_2 = '\t%%xyz\n$foo\n' | ||||
tokens_2 = [ | ||||
(Token.Text, '\t'), | ||||
(Token.Operator, '%%'), | ||||
(Token.Keyword, 'xyz'), | ||||
(Token.Text, '\n$foo\n'), | ||||
] | ||||
Matthias Bussonnier
|
r27977 | assert tokens_2 == list(self.lexer.get_tokens(fragment_2)) | ||
Min RK
|
r21232 | |||
fragment_2 = '%system?\n' | ||||
tokens_2 = [ | ||||
(Token.Operator, '%'), | ||||
(Token.Keyword, 'system'), | ||||
(Token.Operator, '?'), | ||||
(Token.Text, '\n'), | ||||
] | ||||
Matthias Bussonnier
|
r27977 | assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1] | ||
Min RK
|
r21232 | |||
fragment_2 = 'x != y\n' | ||||
tokens_2 = [ | ||||
(Token.Name, 'x'), | ||||
(Token.Text, ' '), | ||||
(Token.Operator, '!='), | ||||
(Token.Text, ' '), | ||||
(Token.Name, 'y'), | ||||
(Token.Text, '\n'), | ||||
] | ||||
Matthias Bussonnier
|
r27977 | assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1] | ||
Min RK
|
r21232 | |||
fragment_2 = ' ?math.sin\n' | ||||
tokens_2 = [ | ||||
(Token.Text, ' '), | ||||
(Token.Operator, '?'), | ||||
(Token.Text, 'math.sin'), | ||||
(Token.Text, '\n'), | ||||
] | ||||
Matthias Bussonnier
|
r27977 | assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1] | ||
Min RK
|
r21712 | |||
fragment = ' *int*?\n' | ||||
tokens = [ | ||||
(Token.Text, ' *int*'), | ||||
(Token.Operator, '?'), | ||||
(Token.Text, '\n'), | ||||
] | ||||
Matthias Bussonnier
|
r27977 | assert tokens == list(self.lexer.get_tokens(fragment)) | ||
Jörg Dietrich
|
r24345 | |||
fragment = '%%writefile -a foo.py\nif a == b:\n pass' | ||||
tokens = [ | ||||
(Token.Operator, '%%writefile'), | ||||
(Token.Text, ' -a foo.py\n'), | ||||
(Token.Keyword, 'if'), | ||||
(Token.Text, ' '), | ||||
(Token.Name, 'a'), | ||||
(Token.Text, ' '), | ||||
(Token.Operator, '=='), | ||||
(Token.Text, ' '), | ||||
(Token.Name, 'b'), | ||||
(Token.Punctuation, ':'), | ||||
(Token.Text, '\n'), | ||||
(Token.Text, ' '), | ||||
(Token.Keyword, 'pass'), | ||||
(Token.Text, '\n'), | ||||
] | ||||
Matthias Bussonnier
|
r27977 | if tokens[10] == (Token.Text, "\n") and pyg214: # pygments 2.14+ | ||
tokens[10] = (Token.Text.Whitespace, "\n") | ||||
assert tokens[:-1] == list(self.lexer.get_tokens(fragment))[:-1] | ||||
Jörg Dietrich
|
r24345 | |||
fragment = '%%timeit\nmath.sin(0)' | ||||
tokens = [ | ||||
(Token.Operator, '%%timeit\n'), | ||||
(Token.Name, 'math'), | ||||
(Token.Operator, '.'), | ||||
(Token.Name, 'sin'), | ||||
(Token.Punctuation, '('), | ||||
(Token.Literal.Number.Integer, '0'), | ||||
(Token.Punctuation, ')'), | ||||
(Token.Text, '\n'), | ||||
] | ||||
fragment = '%%HTML\n<div>foo</div>' | ||||
tokens = [ | ||||
(Token.Operator, '%%HTML'), | ||||
(Token.Text, '\n'), | ||||
(Token.Punctuation, '<'), | ||||
(Token.Name.Tag, 'div'), | ||||
(Token.Punctuation, '>'), | ||||
(Token.Text, 'foo'), | ||||
(Token.Punctuation, '<'), | ||||
(Token.Punctuation, '/'), | ||||
(Token.Name.Tag, 'div'), | ||||
(Token.Punctuation, '>'), | ||||
(Token.Text, '\n'), | ||||
] | ||||
Matthias Bussonnier
|
r27977 | assert tokens == list(self.lexer.get_tokens(fragment)) | ||