##// END OF EJS Templates
Fix tests for pygments > 2.14...
Matthias Bussonnier -
Show More
@@ -1,176 +1,184
1 """Test lexers module"""
1 """Test lexers module"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from unittest import TestCase
6 from unittest import TestCase
7 from pygments import __version__ as pygments_version
7 from pygments.token import Token
8 from pygments.token import Token
8 from pygments.lexers import BashLexer
9 from pygments.lexers import BashLexer
9
10
10 from .. import lexers
11 from .. import lexers
11
12
13 pyg214 = tuple(int(x) for x in pygments_version.split(".")[:2]) >= (2, 14)
14
12
15
13 class TestLexers(TestCase):
16 class TestLexers(TestCase):
14 """Collection of lexers tests"""
17 """Collection of lexers tests"""
15 def setUp(self):
18 def setUp(self):
16 self.lexer = lexers.IPythonLexer()
19 self.lexer = lexers.IPythonLexer()
17 self.bash_lexer = BashLexer()
20 self.bash_lexer = BashLexer()
18
21
19 def testIPythonLexer(self):
22 def testIPythonLexer(self):
20 fragment = '!echo $HOME\n'
23 fragment = '!echo $HOME\n'
21 tokens = [
24 bash_tokens = [
22 (Token.Operator, '!'),
25 (Token.Operator, '!'),
23 ]
26 ]
24 tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
27 bash_tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
25 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
28 ipylex_token = list(self.lexer.get_tokens(fragment))
29 assert bash_tokens[:-1] == ipylex_token[:-1]
26
30
27 fragment_2 = '!' + fragment
31 fragment_2 = "!" + fragment
28 tokens_2 = [
32 tokens_2 = [
29 (Token.Operator, '!!'),
33 (Token.Operator, '!!'),
30 ] + tokens[1:]
34 ] + bash_tokens[1:]
31 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
35 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
32
36
33 fragment_2 = '\t %%!\n' + fragment[1:]
37 fragment_2 = '\t %%!\n' + fragment[1:]
34 tokens_2 = [
38 tokens_2 = [
35 (Token.Text, '\t '),
39 (Token.Text, '\t '),
36 (Token.Operator, '%%!'),
40 (Token.Operator, '%%!'),
37 (Token.Text, '\n'),
41 (Token.Text, '\n'),
38 ] + tokens[1:]
42 ] + bash_tokens[1:]
39 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
43 assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
40
44
41 fragment_2 = 'x = ' + fragment
45 fragment_2 = 'x = ' + fragment
42 tokens_2 = [
46 tokens_2 = [
43 (Token.Name, 'x'),
47 (Token.Name, 'x'),
44 (Token.Text, ' '),
48 (Token.Text, ' '),
45 (Token.Operator, '='),
49 (Token.Operator, '='),
46 (Token.Text, ' '),
50 (Token.Text, ' '),
47 ] + tokens
51 ] + bash_tokens
48 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
52 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
49
53
50 fragment_2 = 'x, = ' + fragment
54 fragment_2 = 'x, = ' + fragment
51 tokens_2 = [
55 tokens_2 = [
52 (Token.Name, 'x'),
56 (Token.Name, 'x'),
53 (Token.Punctuation, ','),
57 (Token.Punctuation, ','),
54 (Token.Text, ' '),
58 (Token.Text, ' '),
55 (Token.Operator, '='),
59 (Token.Operator, '='),
56 (Token.Text, ' '),
60 (Token.Text, ' '),
57 ] + tokens
61 ] + bash_tokens
58 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
62 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
59
63
60 fragment_2 = 'x, = %sx ' + fragment[1:]
64 fragment_2 = 'x, = %sx ' + fragment[1:]
61 tokens_2 = [
65 tokens_2 = [
62 (Token.Name, 'x'),
66 (Token.Name, 'x'),
63 (Token.Punctuation, ','),
67 (Token.Punctuation, ','),
64 (Token.Text, ' '),
68 (Token.Text, ' '),
65 (Token.Operator, '='),
69 (Token.Operator, '='),
66 (Token.Text, ' '),
70 (Token.Text, ' '),
67 (Token.Operator, '%'),
71 (Token.Operator, '%'),
68 (Token.Keyword, 'sx'),
72 (Token.Keyword, 'sx'),
69 (Token.Text, ' '),
73 (Token.Text, ' '),
70 ] + tokens[1:]
74 ] + bash_tokens[1:]
71 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
75 if tokens_2[7] == (Token.Text, " ") and pyg214: # pygments 2.14+
76 tokens_2[7] = (Token.Text.Whitespace, " ")
77 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
72
78
73 fragment_2 = 'f = %R function () {}\n'
79 fragment_2 = 'f = %R function () {}\n'
74 tokens_2 = [
80 tokens_2 = [
75 (Token.Name, 'f'),
81 (Token.Name, 'f'),
76 (Token.Text, ' '),
82 (Token.Text, ' '),
77 (Token.Operator, '='),
83 (Token.Operator, '='),
78 (Token.Text, ' '),
84 (Token.Text, ' '),
79 (Token.Operator, '%'),
85 (Token.Operator, '%'),
80 (Token.Keyword, 'R'),
86 (Token.Keyword, 'R'),
81 (Token.Text, ' function () {}\n'),
87 (Token.Text, ' function () {}\n'),
82 ]
88 ]
83 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
89 assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
84
90
85 fragment_2 = '\t%%xyz\n$foo\n'
91 fragment_2 = '\t%%xyz\n$foo\n'
86 tokens_2 = [
92 tokens_2 = [
87 (Token.Text, '\t'),
93 (Token.Text, '\t'),
88 (Token.Operator, '%%'),
94 (Token.Operator, '%%'),
89 (Token.Keyword, 'xyz'),
95 (Token.Keyword, 'xyz'),
90 (Token.Text, '\n$foo\n'),
96 (Token.Text, '\n$foo\n'),
91 ]
97 ]
92 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
98 assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
93
99
94 fragment_2 = '%system?\n'
100 fragment_2 = '%system?\n'
95 tokens_2 = [
101 tokens_2 = [
96 (Token.Operator, '%'),
102 (Token.Operator, '%'),
97 (Token.Keyword, 'system'),
103 (Token.Keyword, 'system'),
98 (Token.Operator, '?'),
104 (Token.Operator, '?'),
99 (Token.Text, '\n'),
105 (Token.Text, '\n'),
100 ]
106 ]
101 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
107 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
102
108
103 fragment_2 = 'x != y\n'
109 fragment_2 = 'x != y\n'
104 tokens_2 = [
110 tokens_2 = [
105 (Token.Name, 'x'),
111 (Token.Name, 'x'),
106 (Token.Text, ' '),
112 (Token.Text, ' '),
107 (Token.Operator, '!='),
113 (Token.Operator, '!='),
108 (Token.Text, ' '),
114 (Token.Text, ' '),
109 (Token.Name, 'y'),
115 (Token.Name, 'y'),
110 (Token.Text, '\n'),
116 (Token.Text, '\n'),
111 ]
117 ]
112 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
118 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
113
119
114 fragment_2 = ' ?math.sin\n'
120 fragment_2 = ' ?math.sin\n'
115 tokens_2 = [
121 tokens_2 = [
116 (Token.Text, ' '),
122 (Token.Text, ' '),
117 (Token.Operator, '?'),
123 (Token.Operator, '?'),
118 (Token.Text, 'math.sin'),
124 (Token.Text, 'math.sin'),
119 (Token.Text, '\n'),
125 (Token.Text, '\n'),
120 ]
126 ]
121 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
127 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
122
128
123 fragment = ' *int*?\n'
129 fragment = ' *int*?\n'
124 tokens = [
130 tokens = [
125 (Token.Text, ' *int*'),
131 (Token.Text, ' *int*'),
126 (Token.Operator, '?'),
132 (Token.Operator, '?'),
127 (Token.Text, '\n'),
133 (Token.Text, '\n'),
128 ]
134 ]
129 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
135 assert tokens == list(self.lexer.get_tokens(fragment))
130
136
131 fragment = '%%writefile -a foo.py\nif a == b:\n pass'
137 fragment = '%%writefile -a foo.py\nif a == b:\n pass'
132 tokens = [
138 tokens = [
133 (Token.Operator, '%%writefile'),
139 (Token.Operator, '%%writefile'),
134 (Token.Text, ' -a foo.py\n'),
140 (Token.Text, ' -a foo.py\n'),
135 (Token.Keyword, 'if'),
141 (Token.Keyword, 'if'),
136 (Token.Text, ' '),
142 (Token.Text, ' '),
137 (Token.Name, 'a'),
143 (Token.Name, 'a'),
138 (Token.Text, ' '),
144 (Token.Text, ' '),
139 (Token.Operator, '=='),
145 (Token.Operator, '=='),
140 (Token.Text, ' '),
146 (Token.Text, ' '),
141 (Token.Name, 'b'),
147 (Token.Name, 'b'),
142 (Token.Punctuation, ':'),
148 (Token.Punctuation, ':'),
143 (Token.Text, '\n'),
149 (Token.Text, '\n'),
144 (Token.Text, ' '),
150 (Token.Text, ' '),
145 (Token.Keyword, 'pass'),
151 (Token.Keyword, 'pass'),
146 (Token.Text, '\n'),
152 (Token.Text, '\n'),
147 ]
153 ]
148 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
154 if tokens[10] == (Token.Text, "\n") and pyg214: # pygments 2.14+
155 tokens[10] = (Token.Text.Whitespace, "\n")
156 assert tokens[:-1] == list(self.lexer.get_tokens(fragment))[:-1]
149
157
150 fragment = '%%timeit\nmath.sin(0)'
158 fragment = '%%timeit\nmath.sin(0)'
151 tokens = [
159 tokens = [
152 (Token.Operator, '%%timeit\n'),
160 (Token.Operator, '%%timeit\n'),
153 (Token.Name, 'math'),
161 (Token.Name, 'math'),
154 (Token.Operator, '.'),
162 (Token.Operator, '.'),
155 (Token.Name, 'sin'),
163 (Token.Name, 'sin'),
156 (Token.Punctuation, '('),
164 (Token.Punctuation, '('),
157 (Token.Literal.Number.Integer, '0'),
165 (Token.Literal.Number.Integer, '0'),
158 (Token.Punctuation, ')'),
166 (Token.Punctuation, ')'),
159 (Token.Text, '\n'),
167 (Token.Text, '\n'),
160 ]
168 ]
161
169
162 fragment = '%%HTML\n<div>foo</div>'
170 fragment = '%%HTML\n<div>foo</div>'
163 tokens = [
171 tokens = [
164 (Token.Operator, '%%HTML'),
172 (Token.Operator, '%%HTML'),
165 (Token.Text, '\n'),
173 (Token.Text, '\n'),
166 (Token.Punctuation, '<'),
174 (Token.Punctuation, '<'),
167 (Token.Name.Tag, 'div'),
175 (Token.Name.Tag, 'div'),
168 (Token.Punctuation, '>'),
176 (Token.Punctuation, '>'),
169 (Token.Text, 'foo'),
177 (Token.Text, 'foo'),
170 (Token.Punctuation, '<'),
178 (Token.Punctuation, '<'),
171 (Token.Punctuation, '/'),
179 (Token.Punctuation, '/'),
172 (Token.Name.Tag, 'div'),
180 (Token.Name.Tag, 'div'),
173 (Token.Punctuation, '>'),
181 (Token.Punctuation, '>'),
174 (Token.Text, '\n'),
182 (Token.Text, '\n'),
175 ]
183 ]
176 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
184 assert tokens == list(self.lexer.get_tokens(fragment))
General Comments 0
You need to be logged in to leave comments. Login now