##// END OF EJS Templates
compare shell tokens with BashLexer output...
Min RK -
Show More
@@ -1,130 +1,129 b''
1 1 """Test lexers module"""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 from unittest import TestCase
7 7 from pygments.token import Token
8 from pygments.lexers import BashLexer
8 9
9 10 from .. import lexers
10 11
11 12
12 13 class TestLexers(TestCase):
13 14 """Collection of lexers tests"""
14 15 def setUp(self):
15 16 self.lexer = lexers.IPythonLexer()
17 self.bash_lexer = BashLexer()
16 18
17 19 def testIPythonLexer(self):
18 20 fragment = '!echo $HOME\n'
19 21 tokens = [
20 22 (Token.Operator, '!'),
21 (Token.Name.Builtin, 'echo'),
22 (Token.Text, ' '),
23 (Token.Name.Variable, '$HOME'),
24 (Token.Text, '\n'),
25 23 ]
24 tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
26 25 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
27 26
28 27 fragment_2 = '!' + fragment
29 28 tokens_2 = [
30 29 (Token.Operator, '!!'),
31 30 ] + tokens[1:]
32 31 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
33 32
34 33 fragment_2 = '\t %%!\n' + fragment[1:]
35 34 tokens_2 = [
36 35 (Token.Text, '\t '),
37 36 (Token.Operator, '%%!'),
38 37 (Token.Text, '\n'),
39 38 ] + tokens[1:]
40 39 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
41 40
42 41 fragment_2 = 'x = ' + fragment
43 42 tokens_2 = [
44 43 (Token.Name, 'x'),
45 44 (Token.Text, ' '),
46 45 (Token.Operator, '='),
47 46 (Token.Text, ' '),
48 47 ] + tokens
49 48 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
50 49
51 50 fragment_2 = 'x, = ' + fragment
52 51 tokens_2 = [
53 52 (Token.Name, 'x'),
54 53 (Token.Punctuation, ','),
55 54 (Token.Text, ' '),
56 55 (Token.Operator, '='),
57 56 (Token.Text, ' '),
58 57 ] + tokens
59 58 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
60 59
61 60 fragment_2 = 'x, = %sx ' + fragment[1:]
62 61 tokens_2 = [
63 62 (Token.Name, 'x'),
64 63 (Token.Punctuation, ','),
65 64 (Token.Text, ' '),
66 65 (Token.Operator, '='),
67 66 (Token.Text, ' '),
68 67 (Token.Operator, '%'),
69 68 (Token.Keyword, 'sx'),
70 69 (Token.Text, ' '),
71 70 ] + tokens[1:]
72 71 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
73 72
74 73 fragment_2 = 'f = %R function () {}\n'
75 74 tokens_2 = [
76 75 (Token.Name, 'f'),
77 76 (Token.Text, ' '),
78 77 (Token.Operator, '='),
79 78 (Token.Text, ' '),
80 79 (Token.Operator, '%'),
81 80 (Token.Keyword, 'R'),
82 81 (Token.Text, ' function () {}\n'),
83 82 ]
84 83 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
85 84
86 85 fragment_2 = '\t%%xyz\n$foo\n'
87 86 tokens_2 = [
88 87 (Token.Text, '\t'),
89 88 (Token.Operator, '%%'),
90 89 (Token.Keyword, 'xyz'),
91 90 (Token.Text, '\n$foo\n'),
92 91 ]
93 92 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
94 93
95 94 fragment_2 = '%system?\n'
96 95 tokens_2 = [
97 96 (Token.Operator, '%'),
98 97 (Token.Keyword, 'system'),
99 98 (Token.Operator, '?'),
100 99 (Token.Text, '\n'),
101 100 ]
102 101 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
103 102
104 103 fragment_2 = 'x != y\n'
105 104 tokens_2 = [
106 105 (Token.Name, 'x'),
107 106 (Token.Text, ' '),
108 107 (Token.Operator, '!='),
109 108 (Token.Text, ' '),
110 109 (Token.Name, 'y'),
111 110 (Token.Text, '\n'),
112 111 ]
113 112 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
114 113
115 114 fragment_2 = ' ?math.sin\n'
116 115 tokens_2 = [
117 116 (Token.Text, ' '),
118 117 (Token.Operator, '?'),
119 118 (Token.Text, 'math.sin'),
120 119 (Token.Text, '\n'),
121 120 ]
122 121 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
123 122
124 123 fragment = ' *int*?\n'
125 124 tokens = [
126 125 (Token.Text, ' *int*'),
127 126 (Token.Operator, '?'),
128 127 (Token.Text, '\n'),
129 128 ]
130 129 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
General Comments 0
You need to be logged in to leave comments. Login now