##// END OF EJS Templates
move test_lexers to lib/tests...
Min RK -
Show More
@@ -0,0 +1,131 b''
1 """Test lexers module"""
2 #-----------------------------------------------------------------------------
3 # Copyright (C) 2014 The IPython Development Team
4 #
5 # Distributed under the terms of the BSD License. The full license is in
6 # the file COPYING, distributed as part of this software.
7 #-----------------------------------------------------------------------------
8
9 #-----------------------------------------------------------------------------
10 # Imports
11 #-----------------------------------------------------------------------------
12 from pygments.token import Token
13
14 from jupyter_nbconvert.tests.base import TestsBase
15 from .. import lexers
16
17
18 #-----------------------------------------------------------------------------
19 # Classes and functions
20 #-----------------------------------------------------------------------------
21 class TestLexers(TestsBase):
22 """Collection of lexers tests"""
23 def setUp(self):
24 self.lexer = lexers.IPythonLexer()
25
26 def testIPythonLexer(self):
27 fragment = '!echo $HOME\n'
28 tokens = [
29 (Token.Operator, '!'),
30 (Token.Name.Builtin, 'echo'),
31 (Token.Text, ' '),
32 (Token.Name.Variable, '$HOME'),
33 (Token.Text, '\n'),
34 ]
35 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
36
37 fragment_2 = '!' + fragment
38 tokens_2 = [
39 (Token.Operator, '!!'),
40 ] + tokens[1:]
41 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
42
43 fragment_2 = '\t %%!\n' + fragment[1:]
44 tokens_2 = [
45 (Token.Text, '\t '),
46 (Token.Operator, '%%!'),
47 (Token.Text, '\n'),
48 ] + tokens[1:]
49 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
50
51 fragment_2 = 'x = ' + fragment
52 tokens_2 = [
53 (Token.Name, 'x'),
54 (Token.Text, ' '),
55 (Token.Operator, '='),
56 (Token.Text, ' '),
57 ] + tokens
58 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
59
60 fragment_2 = 'x, = ' + fragment
61 tokens_2 = [
62 (Token.Name, 'x'),
63 (Token.Punctuation, ','),
64 (Token.Text, ' '),
65 (Token.Operator, '='),
66 (Token.Text, ' '),
67 ] + tokens
68 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
69
70 fragment_2 = 'x, = %sx ' + fragment[1:]
71 tokens_2 = [
72 (Token.Name, 'x'),
73 (Token.Punctuation, ','),
74 (Token.Text, ' '),
75 (Token.Operator, '='),
76 (Token.Text, ' '),
77 (Token.Operator, '%'),
78 (Token.Keyword, 'sx'),
79 (Token.Text, ' '),
80 ] + tokens[1:]
81 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
82
83 fragment_2 = 'f = %R function () {}\n'
84 tokens_2 = [
85 (Token.Name, 'f'),
86 (Token.Text, ' '),
87 (Token.Operator, '='),
88 (Token.Text, ' '),
89 (Token.Operator, '%'),
90 (Token.Keyword, 'R'),
91 (Token.Text, ' function () {}\n'),
92 ]
93 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
94
95 fragment_2 = '\t%%xyz\n$foo\n'
96 tokens_2 = [
97 (Token.Text, '\t'),
98 (Token.Operator, '%%'),
99 (Token.Keyword, 'xyz'),
100 (Token.Text, '\n$foo\n'),
101 ]
102 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
103
104 fragment_2 = '%system?\n'
105 tokens_2 = [
106 (Token.Operator, '%'),
107 (Token.Keyword, 'system'),
108 (Token.Operator, '?'),
109 (Token.Text, '\n'),
110 ]
111 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
112
113 fragment_2 = 'x != y\n'
114 tokens_2 = [
115 (Token.Name, 'x'),
116 (Token.Text, ' '),
117 (Token.Operator, '!='),
118 (Token.Text, ' '),
119 (Token.Name, 'y'),
120 (Token.Text, '\n'),
121 ]
122 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
123
124 fragment_2 = ' ?math.sin\n'
125 tokens_2 = [
126 (Token.Text, ' '),
127 (Token.Operator, '?'),
128 (Token.Text, 'math.sin'),
129 (Token.Text, '\n'),
130 ]
131 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
General Comments 0
You need to be logged in to leave comments. Login now