##// END OF EJS Templates
Merge pull request #7556 from abalkin/issue-7548-2...
Matthias Bussonnier -
r20139:b2c6426f merge
parent child Browse files
Show More
@@ -0,0 +1,111 b''
1 """Test lexers module"""
2 #-----------------------------------------------------------------------------
3 # Copyright (C) 2014 The IPython Development Team
4 #
5 # Distributed under the terms of the BSD License. The full license is in
6 # the file COPYING, distributed as part of this software.
7 #-----------------------------------------------------------------------------
8
9 #-----------------------------------------------------------------------------
10 # Imports
11 #-----------------------------------------------------------------------------
12 from pygments.token import Token
13
14 from IPython.nbconvert.tests.base import TestsBase
15 from .. import lexers
16
17
18 #-----------------------------------------------------------------------------
19 # Classes and functions
20 #-----------------------------------------------------------------------------
21 class TestLexers(TestsBase):
22 """Collection of lexers tests"""
23 def setUp(self):
24 self.lexer = lexers.IPythonLexer()
25
26 def testIPythonLexer(self):
27 fragment = '!echo $HOME\n'
28 tokens = [
29 (Token.Operator, '!'),
30 (Token.Name.Builtin, 'echo'),
31 (Token.Text, ' '),
32 (Token.Name.Variable, '$HOME'),
33 (Token.Text, '\n'),
34 ]
35 self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
36
37 fragment_2 = '!' + fragment
38 tokens_2 = [
39 (Token.Operator, '!!'),
40 ] + tokens[1:]
41 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
42
43 fragment_2 = '\t %%!\n' + fragment[1:]
44 tokens_2 = [
45 (Token.Text, '\t '),
46 (Token.Operator, '%%!'),
47 (Token.Text, '\n'),
48 ] + tokens[1:]
49 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
50
51 fragment_2 = 'x = ' + fragment
52 tokens_2 = [
53 (Token.Name, 'x'),
54 (Token.Text, ' '),
55 (Token.Operator, '='),
56 (Token.Text, ' '),
57 ] + tokens
58 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
59
60 fragment_2 = 'x, = ' + fragment
61 tokens_2 = [
62 (Token.Name, 'x'),
63 (Token.Punctuation, ','),
64 (Token.Text, ' '),
65 (Token.Operator, '='),
66 (Token.Text, ' '),
67 ] + tokens
68 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
69
70 fragment_2 = 'x, = %sx ' + fragment[1:]
71 tokens_2 = [
72 (Token.Name, 'x'),
73 (Token.Punctuation, ','),
74 (Token.Text, ' '),
75 (Token.Operator, '='),
76 (Token.Text, ' '),
77 (Token.Operator, '%'),
78 (Token.Keyword, 'sx'),
79 (Token.Text, ' '),
80 ] + tokens[1:]
81 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
82
83 fragment_2 = 'f = %R function () {}\n'
84 tokens_2 = [
85 (Token.Name, 'f'),
86 (Token.Text, ' '),
87 (Token.Operator, '='),
88 (Token.Text, ' '),
89 (Token.Operator, '%'),
90 (Token.Keyword, 'R'),
91 (Token.Text, ' function () {}\n'),
92 ]
93 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
94
95 fragment_2 = '\t%%xyz\n$foo\n'
96 tokens_2 = [
97 (Token.Text, '\t'),
98 (Token.Operator, '%%'),
99 (Token.Keyword, 'xyz'),
100 (Token.Text, '\n$foo\n'),
101 ]
102 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
103
104 fragment_2 = '%system?\n'
105 tokens_2 = [
106 (Token.Operator, '%'),
107 (Token.Keyword, 'system'),
108 (Token.Operator, '?'),
109 (Token.Text, '\n'),
110 ]
111 self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
@@ -49,10 +49,15 b' from IPython.testing.skipdoctest import skip_doctest'
49 line_re = re.compile('.*?\n')
49 line_re = re.compile('.*?\n')
50
50
51 ipython_tokens = [
51 ipython_tokens = [
52 (r'(\%+)(\w+)\s+(\.*)(\n)', bygroups(Operator, Keyword,
52 (r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)),
53 (r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(BashLexer))),
54 (r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)),
55 (r"\b(\?\??)(\s*)$", bygroups(Operator, Text)),
56 (r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword,
53 using(BashLexer), Text)),
57 using(BashLexer), Text)),
54 (r'(\%+)(\w+)\b', bygroups(Operator, Keyword)),
58 (r'(%)(\w+)(.*\n)', bygroups(Operator, Keyword, Text)),
55 (r'^(!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
59 (r'^(!!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
60 (r'(!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
56 ]
61 ]
57
62
58 def build_ipy_lexer(python3):
63 def build_ipy_lexer(python3):
General Comments 0
You need to be logged in to leave comments. Login now