##// END OF EJS Templates
Merge pull request #13882 from Carreau/pyg-14...
Matthias Bussonnier -
r27978:f1eee2c9 merge
parent child Browse files
Show More
@@ -4,11 +4,14 b''
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from unittest import TestCase
6 from unittest import TestCase
7 from pygments import __version__ as pygments_version
7 from pygments.token import Token
8 from pygments.token import Token
8 from pygments.lexers import BashLexer
9 from pygments.lexers import BashLexer
9
10
10 from .. import lexers
11 from .. import lexers
11
12
13 pyg214 = tuple(int(x) for x in pygments_version.split(".")[:2]) >= (2, 14)
14
12
15
13 class TestLexers(TestCase):
16 class TestLexers(TestCase):
14 """Collection of lexers tests"""
17 """Collection of lexers tests"""
@@ -18,25 +21,26 b' class TestLexers(TestCase):'
18
21
19 def testIPythonLexer(self):
22 def testIPythonLexer(self):
20 fragment = '!echo $HOME\n'
23 fragment = '!echo $HOME\n'
21 tokens = [
24 bash_tokens = [
22 (Token.Operator, '!'),
25 (Token.Operator, '!'),
23 ]
26 ]
24 tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
27 bash_tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
25 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
28 ipylex_token = list(self.lexer.get_tokens(fragment))
29 assert bash_tokens[:-1] == ipylex_token[:-1]
26
30
27 fragment_2 = '!' + fragment
31 fragment_2 = "!" + fragment
28 tokens_2 = [
32 tokens_2 = [
29 (Token.Operator, '!!'),
33 (Token.Operator, '!!'),
30 ] + tokens[1:]
34 ] + bash_tokens[1:]
31 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
35 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
32
36
33 fragment_2 = '\t %%!\n' + fragment[1:]
37 fragment_2 = '\t %%!\n' + fragment[1:]
34 tokens_2 = [
38 tokens_2 = [
35 (Token.Text, '\t '),
39 (Token.Text, '\t '),
36 (Token.Operator, '%%!'),
40 (Token.Operator, '%%!'),
37 (Token.Text, '\n'),
41 (Token.Text, '\n'),
38 ] + tokens[1:]
42 ] + bash_tokens[1:]
39 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
43 assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
40
44
41 fragment_2 = 'x = ' + fragment
45 fragment_2 = 'x = ' + fragment
42 tokens_2 = [
46 tokens_2 = [
@@ -44,8 +48,8 b' class TestLexers(TestCase):'
44 (Token.Text, ' '),
48 (Token.Text, ' '),
45 (Token.Operator, '='),
49 (Token.Operator, '='),
46 (Token.Text, ' '),
50 (Token.Text, ' '),
47 ] + tokens
51 ] + bash_tokens
48 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
52 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
49
53
50 fragment_2 = 'x, = ' + fragment
54 fragment_2 = 'x, = ' + fragment
51 tokens_2 = [
55 tokens_2 = [
@@ -54,8 +58,8 b' class TestLexers(TestCase):'
54 (Token.Text, ' '),
58 (Token.Text, ' '),
55 (Token.Operator, '='),
59 (Token.Operator, '='),
56 (Token.Text, ' '),
60 (Token.Text, ' '),
57 ] + tokens
61 ] + bash_tokens
58 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
62 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
59
63
60 fragment_2 = 'x, = %sx ' + fragment[1:]
64 fragment_2 = 'x, = %sx ' + fragment[1:]
61 tokens_2 = [
65 tokens_2 = [
@@ -67,8 +71,10 b' class TestLexers(TestCase):'
67 (Token.Operator, '%'),
71 (Token.Operator, '%'),
68 (Token.Keyword, 'sx'),
72 (Token.Keyword, 'sx'),
69 (Token.Text, ' '),
73 (Token.Text, ' '),
70 ] + tokens[1:]
74 ] + bash_tokens[1:]
71 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
75 if tokens_2[7] == (Token.Text, " ") and pyg214: # pygments 2.14+
76 tokens_2[7] = (Token.Text.Whitespace, " ")
77 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
72
78
73 fragment_2 = 'f = %R function () {}\n'
79 fragment_2 = 'f = %R function () {}\n'
74 tokens_2 = [
80 tokens_2 = [
@@ -80,7 +86,7 b' class TestLexers(TestCase):'
80 (Token.Keyword, 'R'),
86 (Token.Keyword, 'R'),
81 (Token.Text, ' function () {}\n'),
87 (Token.Text, ' function () {}\n'),
82 ]
88 ]
83 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
89 assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
84
90
85 fragment_2 = '\t%%xyz\n$foo\n'
91 fragment_2 = '\t%%xyz\n$foo\n'
86 tokens_2 = [
92 tokens_2 = [
@@ -89,7 +95,7 b' class TestLexers(TestCase):'
89 (Token.Keyword, 'xyz'),
95 (Token.Keyword, 'xyz'),
90 (Token.Text, '\n$foo\n'),
96 (Token.Text, '\n$foo\n'),
91 ]
97 ]
92 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
98 assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
93
99
94 fragment_2 = '%system?\n'
100 fragment_2 = '%system?\n'
95 tokens_2 = [
101 tokens_2 = [
@@ -98,7 +104,7 b' class TestLexers(TestCase):'
98 (Token.Operator, '?'),
104 (Token.Operator, '?'),
99 (Token.Text, '\n'),
105 (Token.Text, '\n'),
100 ]
106 ]
101 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
107 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
102
108
103 fragment_2 = 'x != y\n'
109 fragment_2 = 'x != y\n'
104 tokens_2 = [
110 tokens_2 = [
@@ -109,7 +115,7 b' class TestLexers(TestCase):'
109 (Token.Name, 'y'),
115 (Token.Name, 'y'),
110 (Token.Text, '\n'),
116 (Token.Text, '\n'),
111 ]
117 ]
112 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
118 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
113
119
114 fragment_2 = ' ?math.sin\n'
120 fragment_2 = ' ?math.sin\n'
115 tokens_2 = [
121 tokens_2 = [
@@ -118,7 +124,7 b' class TestLexers(TestCase):'
118 (Token.Text, 'math.sin'),
124 (Token.Text, 'math.sin'),
119 (Token.Text, '\n'),
125 (Token.Text, '\n'),
120 ]
126 ]
121 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
127 assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
122
128
123 fragment = ' *int*?\n'
129 fragment = ' *int*?\n'
124 tokens = [
130 tokens = [
@@ -126,7 +132,7 b' class TestLexers(TestCase):'
126 (Token.Operator, '?'),
132 (Token.Operator, '?'),
127 (Token.Text, '\n'),
133 (Token.Text, '\n'),
128 ]
134 ]
129 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
135 assert tokens == list(self.lexer.get_tokens(fragment))
130
136
131 fragment = '%%writefile -a foo.py\nif a == b:\n pass'
137 fragment = '%%writefile -a foo.py\nif a == b:\n pass'
132 tokens = [
138 tokens = [
@@ -145,7 +151,9 b' class TestLexers(TestCase):'
145 (Token.Keyword, 'pass'),
151 (Token.Keyword, 'pass'),
146 (Token.Text, '\n'),
152 (Token.Text, '\n'),
147 ]
153 ]
148 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
154 if tokens[10] == (Token.Text, "\n") and pyg214: # pygments 2.14+
155 tokens[10] = (Token.Text.Whitespace, "\n")
156 assert tokens[:-1] == list(self.lexer.get_tokens(fragment))[:-1]
149
157
150 fragment = '%%timeit\nmath.sin(0)'
158 fragment = '%%timeit\nmath.sin(0)'
151 tokens = [
159 tokens = [
@@ -173,4 +181,4 b' class TestLexers(TestCase):'
173 (Token.Punctuation, '>'),
181 (Token.Punctuation, '>'),
174 (Token.Text, '\n'),
182 (Token.Text, '\n'),
175 ]
183 ]
176 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
184 assert tokens == list(self.lexer.get_tokens(fragment))
General Comments 0
You need to be logged in to leave comments. Login now