Show More
@@ -83,6 +83,11 b' import tokenize' | |||||
83 | import traceback |
|
83 | import traceback | |
84 | import types |
|
84 | import types | |
85 |
|
85 | |||
|
86 | try: # Python 2 | |||
|
87 | generate_tokens = tokenize.generate_tokens | |||
|
88 | except AttributeError: # Python 3 | |||
|
89 | generate_tokens = tokenize.tokenize | |||
|
90 | ||||
86 | # For purposes of monkeypatching inspect to fix a bug in it. |
|
91 | # For purposes of monkeypatching inspect to fix a bug in it. | |
87 | from inspect import getsourcefile, getfile, getmodule,\ |
|
92 | from inspect import getsourcefile, getfile, getmodule,\ | |
88 | ismodule, isclass, ismethod, isfunction, istraceback, isframe, iscode |
|
93 | ismodule, isclass, ismethod, isfunction, istraceback, isframe, iscode | |
@@ -94,6 +99,7 b' from IPython.core.display_trap import DisplayTrap' | |||||
94 | from IPython.core.excolors import exception_colors |
|
99 | from IPython.core.excolors import exception_colors | |
95 | from IPython.utils import PyColorize |
|
100 | from IPython.utils import PyColorize | |
96 | from IPython.utils import io |
|
101 | from IPython.utils import io | |
|
102 | from IPython.utils import py3compat | |||
97 | from IPython.utils.data import uniq_stable |
|
103 | from IPython.utils.data import uniq_stable | |
98 | from IPython.utils.warn import info, error |
|
104 | from IPython.utils.warn import info, error | |
99 |
|
105 | |||
@@ -278,8 +284,7 b' def _format_traceback_lines(lnum, index, lines, Colors, lvals=None,scheme=None):' | |||||
278 | # serious refactoring, so that all of the ultratb and PyColorize code |
|
284 | # serious refactoring, so that all of the ultratb and PyColorize code | |
279 | # is unicode-safe. So for now this is rather an ugly hack, but |
|
285 | # is unicode-safe. So for now this is rather an ugly hack, but | |
280 | # necessary to at least have readable tracebacks. Improvements welcome! |
|
286 | # necessary to at least have readable tracebacks. Improvements welcome! | |
281 | if type(line)==unicode: |
|
287 | line = py3compat.cast_bytes_py2(line, 'utf-8') | |
282 | line = line.encode('utf-8', 'replace') |
|
|||
283 |
|
288 | |||
284 | new_line, err = _line_format(line, 'str', scheme) |
|
289 | new_line, err = _line_format(line, 'str', scheme) | |
285 | if not err: line = new_line |
|
290 | if not err: line = new_line | |
@@ -872,7 +877,8 b' class VerboseTB(TBTools):' | |||||
872 | try: |
|
877 | try: | |
873 | # This builds the names list in-place by capturing it from the |
|
878 | # This builds the names list in-place by capturing it from the | |
874 | # enclosing scope. |
|
879 | # enclosing scope. | |
875 |
|
|
880 | for token in generate_tokens(linereader): | |
|
881 | tokeneater(*token) | |||
876 | except IndexError: |
|
882 | except IndexError: | |
877 | # signals exit of tokenizer |
|
883 | # signals exit of tokenizer | |
878 | pass |
|
884 | pass | |
@@ -933,7 +939,7 b' class VerboseTB(TBTools):' | |||||
933 | # ... and format it |
|
939 | # ... and format it | |
934 | exception = ['%s%s%s: %s' % (Colors.excName, etype_str, |
|
940 | exception = ['%s%s%s: %s' % (Colors.excName, etype_str, | |
935 | ColorsNormal, evalue_str)] |
|
941 | ColorsNormal, evalue_str)] | |
936 | if type(evalue) is types.InstanceType: |
|
942 | if (not py3compat.PY3) and type(evalue) is types.InstanceType: | |
937 | try: |
|
943 | try: | |
938 | names = [w for w in dir(evalue) if isinstance(w, basestring)] |
|
944 | names = [w for w in dir(evalue) if isinstance(w, basestring)] | |
939 | except: |
|
945 | except: |
@@ -42,6 +42,13 b' import sys' | |||||
42 | import token |
|
42 | import token | |
43 | import tokenize |
|
43 | import tokenize | |
44 |
|
44 | |||
|
45 | try: | |||
|
46 | generate_tokens = tokenize.generate_tokens | |||
|
47 | except AttributeError: | |||
|
48 | # Python 3. Note that we use the undocumented _tokenize because it expects | |||
|
49 | # strings, not bytes. See also Python issue #9969. | |||
|
50 | generate_tokens = tokenize._tokenize | |||
|
51 | ||||
45 | from IPython.utils.coloransi import * |
|
52 | from IPython.utils.coloransi import * | |
46 |
|
53 | |||
47 | ############################################################################# |
|
54 | ############################################################################# | |
@@ -177,7 +184,8 b' class Parser:' | |||||
177 |
|
184 | |||
178 | error = False |
|
185 | error = False | |
179 | try: |
|
186 | try: | |
180 |
|
|
187 | for token in generate_tokens(text.readline): | |
|
188 | self(*token) | |||
181 | except tokenize.TokenError, ex: |
|
189 | except tokenize.TokenError, ex: | |
182 | msg = ex[0] |
|
190 | msg = ex[0] | |
183 | line = ex[1][0] |
|
191 | line = ex[1][0] |
@@ -35,6 +35,7 b' if sys.version_info[0] >= 3:' | |||||
35 | unicode_to_str = no_code |
|
35 | unicode_to_str = no_code | |
36 | str_to_bytes = encode |
|
36 | str_to_bytes = encode | |
37 | bytes_to_str = decode |
|
37 | bytes_to_str = decode | |
|
38 | cast_bytes_py2 = no_code | |||
38 |
|
39 | |||
39 | def isidentifier(s, dotted=False): |
|
40 | def isidentifier(s, dotted=False): | |
40 | if dotted: |
|
41 | if dotted: | |
@@ -53,6 +54,7 b' else:' | |||||
53 | unicode_to_str = encode |
|
54 | unicode_to_str = encode | |
54 | str_to_bytes = no_code |
|
55 | str_to_bytes = no_code | |
55 | bytes_to_str = no_code |
|
56 | bytes_to_str = no_code | |
|
57 | cast_bytes_py2 = cast_bytes | |||
56 |
|
58 | |||
57 | import re |
|
59 | import re | |
58 | _name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$") |
|
60 | _name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$") |
General Comments 0
You need to be logged in to leave comments.
Login now