Show More
@@ -1,130 +1,129 b'' | |||||
1 | """Test lexers module""" |
|
1 | """Test lexers module""" | |
2 |
|
2 | |||
3 | # Copyright (c) IPython Development Team. |
|
3 | # Copyright (c) IPython Development Team. | |
4 | # Distributed under the terms of the Modified BSD License. |
|
4 | # Distributed under the terms of the Modified BSD License. | |
5 |
|
5 | |||
6 | from unittest import TestCase |
|
6 | from unittest import TestCase | |
7 | from pygments.token import Token |
|
7 | from pygments.token import Token | |
|
8 | from pygments.lexers import BashLexer | |||
8 |
|
9 | |||
9 | from .. import lexers |
|
10 | from .. import lexers | |
10 |
|
11 | |||
11 |
|
12 | |||
12 | class TestLexers(TestCase): |
|
13 | class TestLexers(TestCase): | |
13 | """Collection of lexers tests""" |
|
14 | """Collection of lexers tests""" | |
14 | def setUp(self): |
|
15 | def setUp(self): | |
15 | self.lexer = lexers.IPythonLexer() |
|
16 | self.lexer = lexers.IPythonLexer() | |
|
17 | self.bash_lexer = BashLexer() | |||
16 |
|
18 | |||
17 | def testIPythonLexer(self): |
|
19 | def testIPythonLexer(self): | |
18 | fragment = '!echo $HOME\n' |
|
20 | fragment = '!echo $HOME\n' | |
19 | tokens = [ |
|
21 | tokens = [ | |
20 | (Token.Operator, '!'), |
|
22 | (Token.Operator, '!'), | |
21 | (Token.Name.Builtin, 'echo'), |
|
|||
22 | (Token.Text, ' '), |
|
|||
23 | (Token.Name.Variable, '$HOME'), |
|
|||
24 | (Token.Text, '\n'), |
|
|||
25 | ] |
|
23 | ] | |
|
24 | tokens.extend(self.bash_lexer.get_tokens(fragment[1:])) | |||
26 | self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) |
|
25 | self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) | |
27 |
|
26 | |||
28 | fragment_2 = '!' + fragment |
|
27 | fragment_2 = '!' + fragment | |
29 | tokens_2 = [ |
|
28 | tokens_2 = [ | |
30 | (Token.Operator, '!!'), |
|
29 | (Token.Operator, '!!'), | |
31 | ] + tokens[1:] |
|
30 | ] + tokens[1:] | |
32 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
31 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
33 |
|
32 | |||
34 | fragment_2 = '\t %%!\n' + fragment[1:] |
|
33 | fragment_2 = '\t %%!\n' + fragment[1:] | |
35 | tokens_2 = [ |
|
34 | tokens_2 = [ | |
36 | (Token.Text, '\t '), |
|
35 | (Token.Text, '\t '), | |
37 | (Token.Operator, '%%!'), |
|
36 | (Token.Operator, '%%!'), | |
38 | (Token.Text, '\n'), |
|
37 | (Token.Text, '\n'), | |
39 | ] + tokens[1:] |
|
38 | ] + tokens[1:] | |
40 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
39 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
41 |
|
40 | |||
42 | fragment_2 = 'x = ' + fragment |
|
41 | fragment_2 = 'x = ' + fragment | |
43 | tokens_2 = [ |
|
42 | tokens_2 = [ | |
44 | (Token.Name, 'x'), |
|
43 | (Token.Name, 'x'), | |
45 | (Token.Text, ' '), |
|
44 | (Token.Text, ' '), | |
46 | (Token.Operator, '='), |
|
45 | (Token.Operator, '='), | |
47 | (Token.Text, ' '), |
|
46 | (Token.Text, ' '), | |
48 | ] + tokens |
|
47 | ] + tokens | |
49 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
48 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
50 |
|
49 | |||
51 | fragment_2 = 'x, = ' + fragment |
|
50 | fragment_2 = 'x, = ' + fragment | |
52 | tokens_2 = [ |
|
51 | tokens_2 = [ | |
53 | (Token.Name, 'x'), |
|
52 | (Token.Name, 'x'), | |
54 | (Token.Punctuation, ','), |
|
53 | (Token.Punctuation, ','), | |
55 | (Token.Text, ' '), |
|
54 | (Token.Text, ' '), | |
56 | (Token.Operator, '='), |
|
55 | (Token.Operator, '='), | |
57 | (Token.Text, ' '), |
|
56 | (Token.Text, ' '), | |
58 | ] + tokens |
|
57 | ] + tokens | |
59 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
58 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
60 |
|
59 | |||
61 | fragment_2 = 'x, = %sx ' + fragment[1:] |
|
60 | fragment_2 = 'x, = %sx ' + fragment[1:] | |
62 | tokens_2 = [ |
|
61 | tokens_2 = [ | |
63 | (Token.Name, 'x'), |
|
62 | (Token.Name, 'x'), | |
64 | (Token.Punctuation, ','), |
|
63 | (Token.Punctuation, ','), | |
65 | (Token.Text, ' '), |
|
64 | (Token.Text, ' '), | |
66 | (Token.Operator, '='), |
|
65 | (Token.Operator, '='), | |
67 | (Token.Text, ' '), |
|
66 | (Token.Text, ' '), | |
68 | (Token.Operator, '%'), |
|
67 | (Token.Operator, '%'), | |
69 | (Token.Keyword, 'sx'), |
|
68 | (Token.Keyword, 'sx'), | |
70 | (Token.Text, ' '), |
|
69 | (Token.Text, ' '), | |
71 | ] + tokens[1:] |
|
70 | ] + tokens[1:] | |
72 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
71 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
73 |
|
72 | |||
74 | fragment_2 = 'f = %R function () {}\n' |
|
73 | fragment_2 = 'f = %R function () {}\n' | |
75 | tokens_2 = [ |
|
74 | tokens_2 = [ | |
76 | (Token.Name, 'f'), |
|
75 | (Token.Name, 'f'), | |
77 | (Token.Text, ' '), |
|
76 | (Token.Text, ' '), | |
78 | (Token.Operator, '='), |
|
77 | (Token.Operator, '='), | |
79 | (Token.Text, ' '), |
|
78 | (Token.Text, ' '), | |
80 | (Token.Operator, '%'), |
|
79 | (Token.Operator, '%'), | |
81 | (Token.Keyword, 'R'), |
|
80 | (Token.Keyword, 'R'), | |
82 | (Token.Text, ' function () {}\n'), |
|
81 | (Token.Text, ' function () {}\n'), | |
83 | ] |
|
82 | ] | |
84 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
83 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
85 |
|
84 | |||
86 | fragment_2 = '\t%%xyz\n$foo\n' |
|
85 | fragment_2 = '\t%%xyz\n$foo\n' | |
87 | tokens_2 = [ |
|
86 | tokens_2 = [ | |
88 | (Token.Text, '\t'), |
|
87 | (Token.Text, '\t'), | |
89 | (Token.Operator, '%%'), |
|
88 | (Token.Operator, '%%'), | |
90 | (Token.Keyword, 'xyz'), |
|
89 | (Token.Keyword, 'xyz'), | |
91 | (Token.Text, '\n$foo\n'), |
|
90 | (Token.Text, '\n$foo\n'), | |
92 | ] |
|
91 | ] | |
93 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
92 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
94 |
|
93 | |||
95 | fragment_2 = '%system?\n' |
|
94 | fragment_2 = '%system?\n' | |
96 | tokens_2 = [ |
|
95 | tokens_2 = [ | |
97 | (Token.Operator, '%'), |
|
96 | (Token.Operator, '%'), | |
98 | (Token.Keyword, 'system'), |
|
97 | (Token.Keyword, 'system'), | |
99 | (Token.Operator, '?'), |
|
98 | (Token.Operator, '?'), | |
100 | (Token.Text, '\n'), |
|
99 | (Token.Text, '\n'), | |
101 | ] |
|
100 | ] | |
102 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
101 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
103 |
|
102 | |||
104 | fragment_2 = 'x != y\n' |
|
103 | fragment_2 = 'x != y\n' | |
105 | tokens_2 = [ |
|
104 | tokens_2 = [ | |
106 | (Token.Name, 'x'), |
|
105 | (Token.Name, 'x'), | |
107 | (Token.Text, ' '), |
|
106 | (Token.Text, ' '), | |
108 | (Token.Operator, '!='), |
|
107 | (Token.Operator, '!='), | |
109 | (Token.Text, ' '), |
|
108 | (Token.Text, ' '), | |
110 | (Token.Name, 'y'), |
|
109 | (Token.Name, 'y'), | |
111 | (Token.Text, '\n'), |
|
110 | (Token.Text, '\n'), | |
112 | ] |
|
111 | ] | |
113 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
112 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
114 |
|
113 | |||
115 | fragment_2 = ' ?math.sin\n' |
|
114 | fragment_2 = ' ?math.sin\n' | |
116 | tokens_2 = [ |
|
115 | tokens_2 = [ | |
117 | (Token.Text, ' '), |
|
116 | (Token.Text, ' '), | |
118 | (Token.Operator, '?'), |
|
117 | (Token.Operator, '?'), | |
119 | (Token.Text, 'math.sin'), |
|
118 | (Token.Text, 'math.sin'), | |
120 | (Token.Text, '\n'), |
|
119 | (Token.Text, '\n'), | |
121 | ] |
|
120 | ] | |
122 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
121 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
123 |
|
122 | |||
124 | fragment = ' *int*?\n' |
|
123 | fragment = ' *int*?\n' | |
125 | tokens = [ |
|
124 | tokens = [ | |
126 | (Token.Text, ' *int*'), |
|
125 | (Token.Text, ' *int*'), | |
127 | (Token.Operator, '?'), |
|
126 | (Token.Operator, '?'), | |
128 | (Token.Text, '\n'), |
|
127 | (Token.Text, '\n'), | |
129 | ] |
|
128 | ] | |
130 | self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) |
|
129 | self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) |
General Comments 0
You need to be logged in to leave comments.
Login now