##// END OF EJS Templates
Simplify tokenizing code for VerboseTB.
Thomas Kluyver -
Show More
@@ -832,33 +832,29 b' class VerboseTB(TBTools):'
832 # disabled.
832 # disabled.
833 call = tpl_call_fail % func
833 call = tpl_call_fail % func
834
834
835 # Initialize a list of names on the current line, which the
835 def linereader(file=file, lnum=[lnum], getline=linecache.getline):
836 # tokenizer below will populate.
836 if file.endswith(('.pyc','.pyo')):
837 names = []
837 file = pyfile.source_from_cache(file)
838
838 line = getline(file, lnum[0])
839 def tokeneater(token_type, token, start, end, line):
839 lnum[0] += 1
840 """Stateful tokeneater which builds dotted names.
840 return line
841
841
842 The list of names it appends to (from the enclosing scope) can
842 # Build the list of names on this line of code where the exception
843 contain repeated composite names. This is unavoidable, since
843 # occurred.
844 there is no way to disambguate partial dotted structures until
844 try:
845 the full list is known. The caller is responsible for pruning
845 names = []
846 the final list of duplicates before using it."""
846 name_cont = False
847
847
848 for token_type, token, start, end, line in generate_tokens(linereader):
848 # build composite names
849 # build composite names
849 if token == '.':
850 if token_type == tokenize.NAME and token not in keyword.kwlist:
851 if name_cont:
852 # Continuation of a dotted name
850 try:
853 try:
851 names[-1] += '.'
854 names[-1].append(token)
852 # store state so the next token is added for x.y.z names
853 tokeneater.name_cont = True
854 return
855 except IndexError:
855 except IndexError:
856 pass
856 names.append([token])
857 if token_type == tokenize.NAME and token not in keyword.kwlist:
857 name_cont = False
858 if tokeneater.name_cont:
859 # Dotted names
860 names[-1] += token
861 tokeneater.name_cont = False
862 else:
858 else:
863 # Regular new names. We append everything, the caller
859 # Regular new names. We append everything, the caller
864 # will be responsible for pruning the list later. It's
860 # will be responsible for pruning the list later. It's
@@ -866,27 +862,12 b' class VerboseTB(TBTools):'
866 # names can fool us. The pruning at the end is easy
862 # names can fool us. The pruning at the end is easy
867 # to do (or the caller can print a list with repeated
863 # to do (or the caller can print a list with repeated
868 # names if so desired.
864 # names if so desired.
869 names.append(token)
865 names.append([token])
866 elif token == '.':
867 name_cont = True
870 elif token_type == tokenize.NEWLINE:
868 elif token_type == tokenize.NEWLINE:
871 raise IndexError
869 break
872 # we need to store a bit of state in the tokenizer to build
873 # dotted names
874 tokeneater.name_cont = False
875
876 def linereader(file=file, lnum=[lnum], getline=linecache.getline):
877 if file.endswith(('.pyc','.pyo')):
878 file = pyfile.source_from_cache(file)
879 line = getline(file, lnum[0])
880 lnum[0] += 1
881 return line
882
870
883 # Build the list of names on this line of code where the exception
884 # occurred.
885 try:
886 # This builds the names list in-place by capturing it from the
887 # enclosing scope.
888 for token in generate_tokens(linereader):
889 tokeneater(*token)
890 except (IndexError, UnicodeDecodeError):
871 except (IndexError, UnicodeDecodeError):
891 # signals exit of tokenizer
872 # signals exit of tokenizer
892 pass
873 pass
@@ -896,6 +877,8 b' class VerboseTB(TBTools):'
896 "The error message is: %s\n" % msg)
877 "The error message is: %s\n" % msg)
897 error(_m)
878 error(_m)
898
879
880 # Join composite names (e.g. "dict.fromkeys")
881 names = ['.'.join(n) for n in names]
899 # prune names list of duplicates, but keep the right order
882 # prune names list of duplicates, but keep the right order
900 unique_names = uniq_stable(names)
883 unique_names = uniq_stable(names)
901
884
General Comments 0
You need to be logged in to leave comments. Login now