##// END OF EJS Templates
Override terminal size in doctests to standardise traceback format...
Override terminal size in doctests to standardise traceback format Should fix test failures on Jenkins

File last commit:

r22047:22d6258f
r22157:ca5c9233
Show More
test_lexers.py
129 lines | 3.9 KiB | text/x-python | PythonLexer
Min RK
move test_lexers to lib/tests...
r21232 """Test lexers module"""
Min RK
remove nbconvert
r21234 # Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from unittest import TestCase
Min RK
move test_lexers to lib/tests...
r21232 from pygments.token import Token
Min RK
compare shell tokens with BashLexer output...
r22047 from pygments.lexers import BashLexer
Min RK
move test_lexers to lib/tests...
r21232
from .. import lexers
Min RK
remove nbconvert
r21234 class TestLexers(TestCase):
Min RK
move test_lexers to lib/tests...
r21232 """Collection of lexers tests"""
def setUp(self):
self.lexer = lexers.IPythonLexer()
Min RK
compare shell tokens with BashLexer output...
r22047 self.bash_lexer = BashLexer()
Min RK
move test_lexers to lib/tests...
r21232
def testIPythonLexer(self):
fragment = '!echo $HOME\n'
tokens = [
(Token.Operator, '!'),
]
Min RK
compare shell tokens with BashLexer output...
r22047 tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
Min RK
move test_lexers to lib/tests...
r21232 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
fragment_2 = '!' + fragment
tokens_2 = [
(Token.Operator, '!!'),
] + tokens[1:]
self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
fragment_2 = '\t %%!\n' + fragment[1:]
tokens_2 = [
(Token.Text, '\t '),
(Token.Operator, '%%!'),
(Token.Text, '\n'),
] + tokens[1:]
self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
fragment_2 = 'x = ' + fragment
tokens_2 = [
(Token.Name, 'x'),
(Token.Text, ' '),
(Token.Operator, '='),
(Token.Text, ' '),
] + tokens
self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
fragment_2 = 'x, = ' + fragment
tokens_2 = [
(Token.Name, 'x'),
(Token.Punctuation, ','),
(Token.Text, ' '),
(Token.Operator, '='),
(Token.Text, ' '),
] + tokens
self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
fragment_2 = 'x, = %sx ' + fragment[1:]
tokens_2 = [
(Token.Name, 'x'),
(Token.Punctuation, ','),
(Token.Text, ' '),
(Token.Operator, '='),
(Token.Text, ' '),
(Token.Operator, '%'),
(Token.Keyword, 'sx'),
(Token.Text, ' '),
] + tokens[1:]
self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
fragment_2 = 'f = %R function () {}\n'
tokens_2 = [
(Token.Name, 'f'),
(Token.Text, ' '),
(Token.Operator, '='),
(Token.Text, ' '),
(Token.Operator, '%'),
(Token.Keyword, 'R'),
(Token.Text, ' function () {}\n'),
]
self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
fragment_2 = '\t%%xyz\n$foo\n'
tokens_2 = [
(Token.Text, '\t'),
(Token.Operator, '%%'),
(Token.Keyword, 'xyz'),
(Token.Text, '\n$foo\n'),
]
self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
fragment_2 = '%system?\n'
tokens_2 = [
(Token.Operator, '%'),
(Token.Keyword, 'system'),
(Token.Operator, '?'),
(Token.Text, '\n'),
]
self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
fragment_2 = 'x != y\n'
tokens_2 = [
(Token.Name, 'x'),
(Token.Text, ' '),
(Token.Operator, '!='),
(Token.Text, ' '),
(Token.Name, 'y'),
(Token.Text, '\n'),
]
self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
fragment_2 = ' ?math.sin\n'
tokens_2 = [
(Token.Text, ' '),
(Token.Operator, '?'),
(Token.Text, 'math.sin'),
(Token.Text, '\n'),
]
self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
Min RK
support 'foo*??' in lexer...
r21712
fragment = ' *int*?\n'
tokens = [
(Token.Text, ' *int*'),
(Token.Operator, '?'),
(Token.Text, '\n'),
]
self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))