From 681b9d6fbfc39e2af519c15cfc1f7c2353b0a58d 2015-01-24 02:37:34 From: Lev Abalkin Date: 2015-01-24 02:37:34 Subject: [PATCH] Closes #7558: Added a rule for cell magics. --- diff --git a/IPython/nbconvert/utils/lexers.py b/IPython/nbconvert/utils/lexers.py index 0926df3..e8e9290 100644 --- a/IPython/nbconvert/utils/lexers.py +++ b/IPython/nbconvert/utils/lexers.py @@ -49,6 +49,7 @@ from IPython.testing.skipdoctest import skip_doctest line_re = re.compile('.*?\n') ipython_tokens = [ + (r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)), (r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword, using(BashLexer), Text)), (r'(%)(\w+)(.*\n)', bygroups(Operator, Keyword, Text)), diff --git a/IPython/nbconvert/utils/tests/test_lexers.py b/IPython/nbconvert/utils/tests/test_lexers.py index be5fbe4..2cbc1cc 100644 --- a/IPython/nbconvert/utils/tests/test_lexers.py +++ b/IPython/nbconvert/utils/tests/test_lexers.py @@ -77,3 +77,12 @@ class TestLexers(TestsBase): (Token.Text, ' function () {}\n'), ] self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) + + fragment_2 = '\t%%xyz\n$foo\n' + tokens_2 = [ + (Token.Text, '\t'), + (Token.Operator, '%%'), + (Token.Keyword, 'xyz'), + (Token.Text, '\n$foo\n'), + ] + self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))