##// END OF EJS Templates
add three tests for cell magic syntax highlighting
Jörg Dietrich -
Show More
@@ -1,129 +1,176 b''
1 1 """Test lexers module"""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 from unittest import TestCase
7 7 from pygments.token import Token
8 8 from pygments.lexers import BashLexer
9 9
10 10 from .. import lexers
11 11
12 12
13 13 class TestLexers(TestCase):
14 14 """Collection of lexers tests"""
15 15 def setUp(self):
16 16 self.lexer = lexers.IPythonLexer()
17 17 self.bash_lexer = BashLexer()
18 18
19 19 def testIPythonLexer(self):
20 20 fragment = '!echo $HOME\n'
21 21 tokens = [
22 22 (Token.Operator, '!'),
23 23 ]
24 24 tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
25 25 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
26 26
27 27 fragment_2 = '!' + fragment
28 28 tokens_2 = [
29 29 (Token.Operator, '!!'),
30 30 ] + tokens[1:]
31 31 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
32 32
33 33 fragment_2 = '\t %%!\n' + fragment[1:]
34 34 tokens_2 = [
35 35 (Token.Text, '\t '),
36 36 (Token.Operator, '%%!'),
37 37 (Token.Text, '\n'),
38 38 ] + tokens[1:]
39 39 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
40 40
41 41 fragment_2 = 'x = ' + fragment
42 42 tokens_2 = [
43 43 (Token.Name, 'x'),
44 44 (Token.Text, ' '),
45 45 (Token.Operator, '='),
46 46 (Token.Text, ' '),
47 47 ] + tokens
48 48 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
49 49
50 50 fragment_2 = 'x, = ' + fragment
51 51 tokens_2 = [
52 52 (Token.Name, 'x'),
53 53 (Token.Punctuation, ','),
54 54 (Token.Text, ' '),
55 55 (Token.Operator, '='),
56 56 (Token.Text, ' '),
57 57 ] + tokens
58 58 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
59 59
60 60 fragment_2 = 'x, = %sx ' + fragment[1:]
61 61 tokens_2 = [
62 62 (Token.Name, 'x'),
63 63 (Token.Punctuation, ','),
64 64 (Token.Text, ' '),
65 65 (Token.Operator, '='),
66 66 (Token.Text, ' '),
67 67 (Token.Operator, '%'),
68 68 (Token.Keyword, 'sx'),
69 69 (Token.Text, ' '),
70 70 ] + tokens[1:]
71 71 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
72 72
73 73 fragment_2 = 'f = %R function () {}\n'
74 74 tokens_2 = [
75 75 (Token.Name, 'f'),
76 76 (Token.Text, ' '),
77 77 (Token.Operator, '='),
78 78 (Token.Text, ' '),
79 79 (Token.Operator, '%'),
80 80 (Token.Keyword, 'R'),
81 81 (Token.Text, ' function () {}\n'),
82 82 ]
83 83 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
84 84
85 85 fragment_2 = '\t%%xyz\n$foo\n'
86 86 tokens_2 = [
87 87 (Token.Text, '\t'),
88 88 (Token.Operator, '%%'),
89 89 (Token.Keyword, 'xyz'),
90 90 (Token.Text, '\n$foo\n'),
91 91 ]
92 92 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
93 93
94 94 fragment_2 = '%system?\n'
95 95 tokens_2 = [
96 96 (Token.Operator, '%'),
97 97 (Token.Keyword, 'system'),
98 98 (Token.Operator, '?'),
99 99 (Token.Text, '\n'),
100 100 ]
101 101 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
102 102
103 103 fragment_2 = 'x != y\n'
104 104 tokens_2 = [
105 105 (Token.Name, 'x'),
106 106 (Token.Text, ' '),
107 107 (Token.Operator, '!='),
108 108 (Token.Text, ' '),
109 109 (Token.Name, 'y'),
110 110 (Token.Text, '\n'),
111 111 ]
112 112 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
113 113
114 114 fragment_2 = ' ?math.sin\n'
115 115 tokens_2 = [
116 116 (Token.Text, ' '),
117 117 (Token.Operator, '?'),
118 118 (Token.Text, 'math.sin'),
119 119 (Token.Text, '\n'),
120 120 ]
121 121 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
122 122
123 123 fragment = ' *int*?\n'
124 124 tokens = [
125 125 (Token.Text, ' *int*'),
126 126 (Token.Operator, '?'),
127 127 (Token.Text, '\n'),
128 128 ]
129 129 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
130
131 fragment = '%%writefile -a foo.py\nif a == b:\n pass'
132 tokens = [
133 (Token.Operator, '%%writefile'),
134 (Token.Text, ' -a foo.py\n'),
135 (Token.Keyword, 'if'),
136 (Token.Text, ' '),
137 (Token.Name, 'a'),
138 (Token.Text, ' '),
139 (Token.Operator, '=='),
140 (Token.Text, ' '),
141 (Token.Name, 'b'),
142 (Token.Punctuation, ':'),
143 (Token.Text, '\n'),
144 (Token.Text, ' '),
145 (Token.Keyword, 'pass'),
146 (Token.Text, '\n'),
147 ]
148 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
149
150 fragment = '%%timeit\nmath.sin(0)'
151 tokens = [
152 (Token.Operator, '%%timeit\n'),
153 (Token.Name, 'math'),
154 (Token.Operator, '.'),
155 (Token.Name, 'sin'),
156 (Token.Punctuation, '('),
157 (Token.Literal.Number.Integer, '0'),
158 (Token.Punctuation, ')'),
159 (Token.Text, '\n'),
160 ]
161
162 fragment = '%%HTML\n<div>foo</div>'
163 tokens = [
164 (Token.Operator, '%%HTML'),
165 (Token.Text, '\n'),
166 (Token.Punctuation, '<'),
167 (Token.Name.Tag, 'div'),
168 (Token.Punctuation, '>'),
169 (Token.Text, 'foo'),
170 (Token.Punctuation, '<'),
171 (Token.Punctuation, '/'),
172 (Token.Name.Tag, 'div'),
173 (Token.Punctuation, '>'),
174 (Token.Text, '\n'),
175 ]
176 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
General Comments 0
You need to be logged in to leave comments. Login now