##// END OF EJS Templates
Silence tokenization errors in ultratb....
Craig Citro -
Show More
@@ -2,6 +2,7 b''
2 """Tests for IPython.core.ultratb
2 """Tests for IPython.core.ultratb
3 """
3 """
4 import io
4 import io
5 import logging
5 import sys
6 import sys
6 import os.path
7 import os.path
7 from textwrap import dedent
8 from textwrap import dedent
@@ -372,3 +373,28 b' def test_handlers():'
372 handler(*sys.exc_info())
373 handler(*sys.exc_info())
373 buff.write('')
374 buff.write('')
374
375
376
377 class TokenizeFailureTest(unittest.TestCase):
378 """Tests related to https://github.com/ipython/ipython/issues/6864."""
379
380 def testLogging(self):
381 message = "An unexpected error occurred while tokenizing input"
382 cell = 'raise ValueError("""a\nb""")'
383
384 stream = io.StringIO()
385 handler = logging.StreamHandler(stream)
386 logger = logging.getLogger()
387 loglevel = logger.level
388 logger.addHandler(handler)
389 self.addCleanup(lambda: logger.removeHandler(handler))
390 self.addCleanup(lambda: logger.setLevel(loglevel))
391
392 logger.setLevel(logging.INFO)
393 with tt.AssertNotPrints(message):
394 ip.run_cell(cell)
395 self.assertNotIn(message, stream.getvalue())
396
397 logger.setLevel(logging.DEBUG)
398 with tt.AssertNotPrints(message):
399 ip.run_cell(cell)
400 self.assertIn(message, stream.getvalue())
@@ -121,7 +121,7 b' from IPython.utils import path as util_path'
121 from IPython.utils import py3compat
121 from IPython.utils import py3compat
122 from IPython.utils.data import uniq_stable
122 from IPython.utils.data import uniq_stable
123 from IPython.utils.terminal import get_terminal_size
123 from IPython.utils.terminal import get_terminal_size
124 from logging import info, error
124 from logging import info, error, debug
125
125
126 import IPython.utils.colorable as colorable
126 import IPython.utils.colorable as colorable
127
127
@@ -952,10 +952,15 b' class VerboseTB(TBTools):'
952 # - see gh-6300
952 # - see gh-6300
953 pass
953 pass
954 except tokenize.TokenError as msg:
954 except tokenize.TokenError as msg:
955 # Tokenizing may fail for various reasons, many of which are
956 # harmless. (A good example is when the line in question is the
957 # close of a triple-quoted string, cf gh-6864). We don't want to
958 # show this to users, but want make it available for debugging
959 # purposes.
955 _m = ("An unexpected error occurred while tokenizing input\n"
960 _m = ("An unexpected error occurred while tokenizing input\n"
956 "The following traceback may be corrupted or invalid\n"
961 "The following traceback may be corrupted or invalid\n"
957 "The error message is: %s\n" % msg)
962 "The error message is: %s\n" % msg)
958 error(_m)
963 debug(_m)
959
964
960 # Join composite names (e.g. "dict.fromkeys")
965 # Join composite names (e.g. "dict.fromkeys")
961 names = ['.'.join(n) for n in names]
966 names = ['.'.join(n) for n in names]
General Comments 0
You need to be logged in to leave comments. Login now