diff --git a/IPython/nbconvert/utils/lexers.py b/IPython/nbconvert/utils/lexers.py index 99bd770..55aa070 100644 --- a/IPython/nbconvert/utils/lexers.py +++ b/IPython/nbconvert/utils/lexers.py @@ -53,6 +53,10 @@ ipython_tokens = [ using(BashLexer), Text)), (r'(\%+)(\w+)\b', bygroups(Operator, Keyword)), (r'^(!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)), + (r'^(.+)(=)(\s*)(!)(.+)(\n)', bygroups( + # With the limited syntax allowed on the l.h.s. of a shell capture, + # we don't need to differentiate between Python 2 and 3. + using(Python3Lexer), Operator, Text, Operator, using(BashLexer), Text)), ] def build_ipy_lexer(python3): diff --git a/IPython/nbconvert/utils/tests/test_lexers.py b/IPython/nbconvert/utils/tests/test_lexers.py index d2371c1..b7a7ea4 100644 --- a/IPython/nbconvert/utils/tests/test_lexers.py +++ b/IPython/nbconvert/utils/tests/test_lexers.py @@ -33,3 +33,21 @@ class TestLexers(TestsBase): (Token.Text, '\n'), ] self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + fragment_2 = 'x = ' + fragment + tokens_2 = [ + (Token.Name, 'x'), + (Token.Text, ' '), + (Token.Operator, '='), + (Token.Text, ' '), + ] + tokens + self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2))) + fragment_2 = 'x, = ' + fragment + tokens_2 = [ + (Token.Name, 'x'), + (Token.Punctuation, ','), + (Token.Text, ' '), + (Token.Operator, '='), + (Token.Text, ' '), + ] + tokens + self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))