Show More
@@ -53,6 +53,10 b' ipython_tokens = [' | |||||
53 | using(BashLexer), Text)), |
|
53 | using(BashLexer), Text)), | |
54 | (r'(\%+)(\w+)\b', bygroups(Operator, Keyword)), |
|
54 | (r'(\%+)(\w+)\b', bygroups(Operator, Keyword)), | |
55 | (r'^(!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)), |
|
55 | (r'^(!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)), | |
|
56 | (r'^(.+)(=)(\s*)(!)(.+)(\n)', bygroups( | |||
|
57 | # With the limited syntax allowed on the l.h.s. of a shell capture, | |||
|
58 | # we don't need to differentiate between Python 2 and 3. | |||
|
59 | using(Python3Lexer), Operator, Text, Operator, using(BashLexer), Text)), | |||
56 | ] |
|
60 | ] | |
57 |
|
61 | |||
58 | def build_ipy_lexer(python3): |
|
62 | def build_ipy_lexer(python3): |
@@ -33,3 +33,21 b' class TestLexers(TestsBase):' | |||||
33 | (Token.Text, '\n'), |
|
33 | (Token.Text, '\n'), | |
34 | ] |
|
34 | ] | |
35 | self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) |
|
35 | self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) | |
|
36 | ||||
|
37 | fragment_2 = 'x = ' + fragment | |||
|
38 | tokens_2 = [ | |||
|
39 | (Token.Name, 'x'), | |||
|
40 | (Token.Text, ' '), | |||
|
41 | (Token.Operator, '='), | |||
|
42 | (Token.Text, ' '), | |||
|
43 | ] + tokens | |||
|
44 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |||
|
45 | fragment_2 = 'x, = ' + fragment | |||
|
46 | tokens_2 = [ | |||
|
47 | (Token.Name, 'x'), | |||
|
48 | (Token.Punctuation, ','), | |||
|
49 | (Token.Text, ' '), | |||
|
50 | (Token.Operator, '='), | |||
|
51 | (Token.Text, ' '), | |||
|
52 | ] + tokens | |||
|
53 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
General Comments 0
You need to be logged in to leave comments.
Login now