Show More
@@ -4,11 +4,14 b'' | |||
|
4 | 4 | # Distributed under the terms of the Modified BSD License. |
|
5 | 5 | |
|
6 | 6 | from unittest import TestCase |
|
7 | from pygments import __version__ as pygments_version | |
|
7 | 8 | from pygments.token import Token |
|
8 | 9 | from pygments.lexers import BashLexer |
|
9 | 10 | |
|
10 | 11 | from .. import lexers |
|
11 | 12 | |
|
13 | pyg214 = tuple(int(x) for x in pygments_version.split(".")[:2]) >= (2, 14) | |
|
14 | ||
|
12 | 15 | |
|
13 | 16 | class TestLexers(TestCase): |
|
14 | 17 | """Collection of lexers tests""" |
@@ -18,25 +21,26 b' class TestLexers(TestCase):' | |||
|
18 | 21 | |
|
19 | 22 | def testIPythonLexer(self): |
|
20 | 23 | fragment = '!echo $HOME\n' |
|
21 | tokens = [ | |
|
24 | bash_tokens = [ | |
|
22 | 25 | (Token.Operator, '!'), |
|
23 | 26 | ] |
|
24 | tokens.extend(self.bash_lexer.get_tokens(fragment[1:])) | |
|
25 |
|
|
|
27 | bash_tokens.extend(self.bash_lexer.get_tokens(fragment[1:])) | |
|
28 | ipylex_token = list(self.lexer.get_tokens(fragment)) | |
|
29 | assert bash_tokens[:-1] == ipylex_token[:-1] | |
|
26 | 30 | |
|
27 |
fragment_2 = |
|
|
31 | fragment_2 = "!" + fragment | |
|
28 | 32 | tokens_2 = [ |
|
29 | 33 | (Token.Operator, '!!'), |
|
30 | ] + tokens[1:] | |
|
31 |
|
|
|
34 | ] + bash_tokens[1:] | |
|
35 | assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1] | |
|
32 | 36 | |
|
33 | 37 | fragment_2 = '\t %%!\n' + fragment[1:] |
|
34 | 38 | tokens_2 = [ |
|
35 | 39 | (Token.Text, '\t '), |
|
36 | 40 | (Token.Operator, '%%!'), |
|
37 | 41 | (Token.Text, '\n'), |
|
38 | ] + tokens[1:] | |
|
39 |
|
|
|
42 | ] + bash_tokens[1:] | |
|
43 | assert tokens_2 == list(self.lexer.get_tokens(fragment_2)) | |
|
40 | 44 | |
|
41 | 45 | fragment_2 = 'x = ' + fragment |
|
42 | 46 | tokens_2 = [ |
@@ -44,8 +48,8 b' class TestLexers(TestCase):' | |||
|
44 | 48 | (Token.Text, ' '), |
|
45 | 49 | (Token.Operator, '='), |
|
46 | 50 | (Token.Text, ' '), |
|
47 | ] + tokens | |
|
48 |
|
|
|
51 | ] + bash_tokens | |
|
52 | assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1] | |
|
49 | 53 | |
|
50 | 54 | fragment_2 = 'x, = ' + fragment |
|
51 | 55 | tokens_2 = [ |
@@ -54,8 +58,8 b' class TestLexers(TestCase):' | |||
|
54 | 58 | (Token.Text, ' '), |
|
55 | 59 | (Token.Operator, '='), |
|
56 | 60 | (Token.Text, ' '), |
|
57 | ] + tokens | |
|
58 |
|
|
|
61 | ] + bash_tokens | |
|
62 | assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1] | |
|
59 | 63 | |
|
60 | 64 | fragment_2 = 'x, = %sx ' + fragment[1:] |
|
61 | 65 | tokens_2 = [ |
@@ -67,8 +71,10 b' class TestLexers(TestCase):' | |||
|
67 | 71 | (Token.Operator, '%'), |
|
68 | 72 | (Token.Keyword, 'sx'), |
|
69 | 73 | (Token.Text, ' '), |
|
70 | ] + tokens[1:] | |
|
71 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
|
74 | ] + bash_tokens[1:] | |
|
75 | if tokens_2[7] == (Token.Text, " ") and pyg214: # pygments 2.14+ | |
|
76 | tokens_2[7] = (Token.Text.Whitespace, " ") | |
|
77 | assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1] | |
|
72 | 78 | |
|
73 | 79 | fragment_2 = 'f = %R function () {}\n' |
|
74 | 80 | tokens_2 = [ |
@@ -80,7 +86,7 b' class TestLexers(TestCase):' | |||
|
80 | 86 | (Token.Keyword, 'R'), |
|
81 | 87 | (Token.Text, ' function () {}\n'), |
|
82 | 88 | ] |
|
83 |
|
|
|
89 | assert tokens_2 == list(self.lexer.get_tokens(fragment_2)) | |
|
84 | 90 | |
|
85 | 91 | fragment_2 = '\t%%xyz\n$foo\n' |
|
86 | 92 | tokens_2 = [ |
@@ -89,7 +95,7 b' class TestLexers(TestCase):' | |||
|
89 | 95 | (Token.Keyword, 'xyz'), |
|
90 | 96 | (Token.Text, '\n$foo\n'), |
|
91 | 97 | ] |
|
92 |
|
|
|
98 | assert tokens_2 == list(self.lexer.get_tokens(fragment_2)) | |
|
93 | 99 | |
|
94 | 100 | fragment_2 = '%system?\n' |
|
95 | 101 | tokens_2 = [ |
@@ -98,7 +104,7 b' class TestLexers(TestCase):' | |||
|
98 | 104 | (Token.Operator, '?'), |
|
99 | 105 | (Token.Text, '\n'), |
|
100 | 106 | ] |
|
101 |
|
|
|
107 | assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1] | |
|
102 | 108 | |
|
103 | 109 | fragment_2 = 'x != y\n' |
|
104 | 110 | tokens_2 = [ |
@@ -109,7 +115,7 b' class TestLexers(TestCase):' | |||
|
109 | 115 | (Token.Name, 'y'), |
|
110 | 116 | (Token.Text, '\n'), |
|
111 | 117 | ] |
|
112 |
|
|
|
118 | assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1] | |
|
113 | 119 | |
|
114 | 120 | fragment_2 = ' ?math.sin\n' |
|
115 | 121 | tokens_2 = [ |
@@ -118,7 +124,7 b' class TestLexers(TestCase):' | |||
|
118 | 124 | (Token.Text, 'math.sin'), |
|
119 | 125 | (Token.Text, '\n'), |
|
120 | 126 | ] |
|
121 |
|
|
|
127 | assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1] | |
|
122 | 128 | |
|
123 | 129 | fragment = ' *int*?\n' |
|
124 | 130 | tokens = [ |
@@ -126,7 +132,7 b' class TestLexers(TestCase):' | |||
|
126 | 132 | (Token.Operator, '?'), |
|
127 | 133 | (Token.Text, '\n'), |
|
128 | 134 | ] |
|
129 |
|
|
|
135 | assert tokens == list(self.lexer.get_tokens(fragment)) | |
|
130 | 136 | |
|
131 | 137 | fragment = '%%writefile -a foo.py\nif a == b:\n pass' |
|
132 | 138 | tokens = [ |
@@ -145,7 +151,9 b' class TestLexers(TestCase):' | |||
|
145 | 151 | (Token.Keyword, 'pass'), |
|
146 | 152 | (Token.Text, '\n'), |
|
147 | 153 | ] |
|
148 | self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) | |
|
154 | if tokens[10] == (Token.Text, "\n") and pyg214: # pygments 2.14+ | |
|
155 | tokens[10] = (Token.Text.Whitespace, "\n") | |
|
156 | assert tokens[:-1] == list(self.lexer.get_tokens(fragment))[:-1] | |
|
149 | 157 | |
|
150 | 158 | fragment = '%%timeit\nmath.sin(0)' |
|
151 | 159 | tokens = [ |
@@ -173,4 +181,4 b' class TestLexers(TestCase):' | |||
|
173 | 181 | (Token.Punctuation, '>'), |
|
174 | 182 | (Token.Text, '\n'), |
|
175 | 183 | ] |
|
176 |
|
|
|
184 | assert tokens == list(self.lexer.get_tokens(fragment)) |
General Comments 0
You need to be logged in to leave comments.
Login now