Show More
@@ -50,6 +50,7 b" line_re = re.compile('.*?\\n')" | |||||
50 |
|
50 | |||
51 | ipython_tokens = [ |
|
51 | ipython_tokens = [ | |
52 | (r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)), |
|
52 | (r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)), | |
|
53 | (r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(BashLexer))), | |||
53 | (r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)), |
|
54 | (r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)), | |
54 | (r"\b(\?\??)(\s*)$", bygroups(Operator, Text)), |
|
55 | (r"\b(\?\??)(\s*)$", bygroups(Operator, Text)), | |
55 | (r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword, |
|
56 | (r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword, |
@@ -40,6 +40,14 b' class TestLexers(TestsBase):' | |||||
40 | ] + tokens[1:] |
|
40 | ] + tokens[1:] | |
41 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) |
|
41 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |
42 |
|
42 | |||
|
43 | fragment_2 = '\t %%!\n' + fragment[1:] | |||
|
44 | tokens_2 = [ | |||
|
45 | (Token.Text, '\t '), | |||
|
46 | (Token.Operator, '%%!'), | |||
|
47 | (Token.Text, '\n'), | |||
|
48 | ] + tokens[1:] | |||
|
49 | self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) | |||
|
50 | ||||
43 | fragment_2 = 'x = ' + fragment |
|
51 | fragment_2 = 'x = ' + fragment | |
44 | tokens_2 = [ |
|
52 | tokens_2 = [ | |
45 | (Token.Name, 'x'), |
|
53 | (Token.Name, 'x'), |
General Comments 0
You need to be logged in to leave comments.
Login now