diff --git a/IPython/nbconvert/utils/lexers.py b/IPython/nbconvert/utils/lexers.py index 8fe0ff9..ee0b892 100644 --- a/IPython/nbconvert/utils/lexers.py +++ b/IPython/nbconvert/utils/lexers.py @@ -50,6 +50,7 @@ line_re = re.compile('.*?\n') ipython_tokens = [ (r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)), + (r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(BashLexer))), (r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)), (r"\b(\?\??)(\s*)$", bygroups(Operator, Text)), (r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword, diff --git a/IPython/nbconvert/utils/tests/test_lexers.py b/IPython/nbconvert/utils/tests/test_lexers.py index 351a60c..a18f892 100644 --- a/IPython/nbconvert/utils/tests/test_lexers.py +++ b/IPython/nbconvert/utils/tests/test_lexers.py @@ -40,6 +40,14 @@ class TestLexers(TestsBase): ] + tokens[1:] self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) + fragment_2 = '\t %%!\n' + fragment[1:] + tokens_2 = [ + (Token.Text, '\t '), + (Token.Operator, '%%!'), + (Token.Text, '\n'), + ] + tokens[1:] + self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) + fragment_2 = 'x = ' + fragment tokens_2 = [ (Token.Name, 'x'),