##// END OF EJS Templates
Merge pull request #13882 from Carreau/pyg-14...
Matthias Bussonnier -
r27978:f1eee2c9 merge
parent child Browse files
Show More
@@ -1,176 +1,184 b''
1 1 """Test lexers module"""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 from unittest import TestCase
7 from pygments import __version__ as pygments_version
7 8 from pygments.token import Token
8 9 from pygments.lexers import BashLexer
9 10
10 11 from .. import lexers
11 12
13 pyg214 = tuple(int(x) for x in pygments_version.split(".")[:2]) >= (2, 14)
14
12 15
13 16 class TestLexers(TestCase):
14 17 """Collection of lexers tests"""
15 18 def setUp(self):
16 19 self.lexer = lexers.IPythonLexer()
17 20 self.bash_lexer = BashLexer()
18 21
19 22 def testIPythonLexer(self):
20 23 fragment = '!echo $HOME\n'
21 tokens = [
24 bash_tokens = [
22 25 (Token.Operator, '!'),
23 26 ]
24 tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
25 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
27 bash_tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
28 ipylex_token = list(self.lexer.get_tokens(fragment))
29 assert bash_tokens[:-1] == ipylex_token[:-1]
26 30
27 fragment_2 = '!' + fragment
31 fragment_2 = "!" + fragment
28 32 tokens_2 = [
29 33 (Token.Operator, '!!'),
30 ] + tokens[1:]
31 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
34 ] + bash_tokens[1:]
35 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
32 36
33 37 fragment_2 = '\t %%!\n' + fragment[1:]
34 38 tokens_2 = [
35 39 (Token.Text, '\t '),
36 40 (Token.Operator, '%%!'),
37 41 (Token.Text, '\n'),
38 ] + tokens[1:]
39 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
42 ] + bash_tokens[1:]
43 assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
40 44
41 45 fragment_2 = 'x = ' + fragment
42 46 tokens_2 = [
43 47 (Token.Name, 'x'),
44 48 (Token.Text, ' '),
45 49 (Token.Operator, '='),
46 50 (Token.Text, ' '),
47 ] + tokens
48 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
51 ] + bash_tokens
52 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
49 53
50 54 fragment_2 = 'x, = ' + fragment
51 55 tokens_2 = [
52 56 (Token.Name, 'x'),
53 57 (Token.Punctuation, ','),
54 58 (Token.Text, ' '),
55 59 (Token.Operator, '='),
56 60 (Token.Text, ' '),
57 ] + tokens
58 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
61 ] + bash_tokens
62 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
59 63
60 64 fragment_2 = 'x, = %sx ' + fragment[1:]
61 65 tokens_2 = [
62 66 (Token.Name, 'x'),
63 67 (Token.Punctuation, ','),
64 68 (Token.Text, ' '),
65 69 (Token.Operator, '='),
66 70 (Token.Text, ' '),
67 71 (Token.Operator, '%'),
68 72 (Token.Keyword, 'sx'),
69 73 (Token.Text, ' '),
70 ] + tokens[1:]
71 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
74 ] + bash_tokens[1:]
75 if tokens_2[7] == (Token.Text, " ") and pyg214: # pygments 2.14+
76 tokens_2[7] = (Token.Text.Whitespace, " ")
77 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
72 78
73 79 fragment_2 = 'f = %R function () {}\n'
74 80 tokens_2 = [
75 81 (Token.Name, 'f'),
76 82 (Token.Text, ' '),
77 83 (Token.Operator, '='),
78 84 (Token.Text, ' '),
79 85 (Token.Operator, '%'),
80 86 (Token.Keyword, 'R'),
81 87 (Token.Text, ' function () {}\n'),
82 88 ]
83 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
89 assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
84 90
85 91 fragment_2 = '\t%%xyz\n$foo\n'
86 92 tokens_2 = [
87 93 (Token.Text, '\t'),
88 94 (Token.Operator, '%%'),
89 95 (Token.Keyword, 'xyz'),
90 96 (Token.Text, '\n$foo\n'),
91 97 ]
92 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
98 assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
93 99
94 100 fragment_2 = '%system?\n'
95 101 tokens_2 = [
96 102 (Token.Operator, '%'),
97 103 (Token.Keyword, 'system'),
98 104 (Token.Operator, '?'),
99 105 (Token.Text, '\n'),
100 106 ]
101 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
107 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
102 108
103 109 fragment_2 = 'x != y\n'
104 110 tokens_2 = [
105 111 (Token.Name, 'x'),
106 112 (Token.Text, ' '),
107 113 (Token.Operator, '!='),
108 114 (Token.Text, ' '),
109 115 (Token.Name, 'y'),
110 116 (Token.Text, '\n'),
111 117 ]
112 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
118 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
113 119
114 120 fragment_2 = ' ?math.sin\n'
115 121 tokens_2 = [
116 122 (Token.Text, ' '),
117 123 (Token.Operator, '?'),
118 124 (Token.Text, 'math.sin'),
119 125 (Token.Text, '\n'),
120 126 ]
121 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
127 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
122 128
123 129 fragment = ' *int*?\n'
124 130 tokens = [
125 131 (Token.Text, ' *int*'),
126 132 (Token.Operator, '?'),
127 133 (Token.Text, '\n'),
128 134 ]
129 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
135 assert tokens == list(self.lexer.get_tokens(fragment))
130 136
131 137 fragment = '%%writefile -a foo.py\nif a == b:\n pass'
132 138 tokens = [
133 139 (Token.Operator, '%%writefile'),
134 140 (Token.Text, ' -a foo.py\n'),
135 141 (Token.Keyword, 'if'),
136 142 (Token.Text, ' '),
137 143 (Token.Name, 'a'),
138 144 (Token.Text, ' '),
139 145 (Token.Operator, '=='),
140 146 (Token.Text, ' '),
141 147 (Token.Name, 'b'),
142 148 (Token.Punctuation, ':'),
143 149 (Token.Text, '\n'),
144 150 (Token.Text, ' '),
145 151 (Token.Keyword, 'pass'),
146 152 (Token.Text, '\n'),
147 153 ]
148 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
154 if tokens[10] == (Token.Text, "\n") and pyg214: # pygments 2.14+
155 tokens[10] = (Token.Text.Whitespace, "\n")
156 assert tokens[:-1] == list(self.lexer.get_tokens(fragment))[:-1]
149 157
150 158 fragment = '%%timeit\nmath.sin(0)'
151 159 tokens = [
152 160 (Token.Operator, '%%timeit\n'),
153 161 (Token.Name, 'math'),
154 162 (Token.Operator, '.'),
155 163 (Token.Name, 'sin'),
156 164 (Token.Punctuation, '('),
157 165 (Token.Literal.Number.Integer, '0'),
158 166 (Token.Punctuation, ')'),
159 167 (Token.Text, '\n'),
160 168 ]
161 169
162 170 fragment = '%%HTML\n<div>foo</div>'
163 171 tokens = [
164 172 (Token.Operator, '%%HTML'),
165 173 (Token.Text, '\n'),
166 174 (Token.Punctuation, '<'),
167 175 (Token.Name.Tag, 'div'),
168 176 (Token.Punctuation, '>'),
169 177 (Token.Text, 'foo'),
170 178 (Token.Punctuation, '<'),
171 179 (Token.Punctuation, '/'),
172 180 (Token.Name.Tag, 'div'),
173 181 (Token.Punctuation, '>'),
174 182 (Token.Text, '\n'),
175 183 ]
176 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
184 assert tokens == list(self.lexer.get_tokens(fragment))
General Comments 0
You need to be logged in to leave comments. Login now