Show More
@@ -832,47 +832,6 class VerboseTB(TBTools): | |||
|
832 | 832 | # disabled. |
|
833 | 833 | call = tpl_call_fail % func |
|
834 | 834 | |
|
835 | # Initialize a list of names on the current line, which the | |
|
836 | # tokenizer below will populate. | |
|
837 | names = [] | |
|
838 | ||
|
839 | def tokeneater(token_type, token, start, end, line): | |
|
840 | """Stateful tokeneater which builds dotted names. | |
|
841 | ||
|
842 | The list of names it appends to (from the enclosing scope) can | |
|
843 | contain repeated composite names. This is unavoidable, since | |
|
844 | there is no way to disambguate partial dotted structures until | |
|
845 | the full list is known. The caller is responsible for pruning | |
|
846 | the final list of duplicates before using it.""" | |
|
847 | ||
|
848 | # build composite names | |
|
849 | if token == '.': | |
|
850 | try: | |
|
851 | names[-1] += '.' | |
|
852 | # store state so the next token is added for x.y.z names | |
|
853 | tokeneater.name_cont = True | |
|
854 | return | |
|
855 | except IndexError: | |
|
856 | pass | |
|
857 | if token_type == tokenize.NAME and token not in keyword.kwlist: | |
|
858 | if tokeneater.name_cont: | |
|
859 | # Dotted names | |
|
860 | names[-1] += token | |
|
861 | tokeneater.name_cont = False | |
|
862 | else: | |
|
863 | # Regular new names. We append everything, the caller | |
|
864 | # will be responsible for pruning the list later. It's | |
|
865 | # very tricky to try to prune as we go, b/c composite | |
|
866 | # names can fool us. The pruning at the end is easy | |
|
867 | # to do (or the caller can print a list with repeated | |
|
868 | # names if so desired. | |
|
869 | names.append(token) | |
|
870 | elif token_type == tokenize.NEWLINE: | |
|
871 | raise IndexError | |
|
872 | # we need to store a bit of state in the tokenizer to build | |
|
873 | # dotted names | |
|
874 | tokeneater.name_cont = False | |
|
875 | ||
|
876 | 835 | def linereader(file=file, lnum=[lnum], getline=linecache.getline): |
|
877 | 836 | if file.endswith(('.pyc','.pyo')): |
|
878 | 837 | file = pyfile.source_from_cache(file) |
@@ -883,10 +842,32 class VerboseTB(TBTools): | |||
|
883 | 842 | # Build the list of names on this line of code where the exception |
|
884 | 843 | # occurred. |
|
885 | 844 | try: |
|
886 | # This builds the names list in-place by capturing it from the | |
|
887 | # enclosing scope. | |
|
888 | for token in generate_tokens(linereader): | |
|
889 | tokeneater(*token) | |
|
845 | names = [] | |
|
846 | name_cont = False | |
|
847 | ||
|
848 | for token_type, token, start, end, line in generate_tokens(linereader): | |
|
849 | # build composite names | |
|
850 | if token_type == tokenize.NAME and token not in keyword.kwlist: | |
|
851 | if name_cont: | |
|
852 | # Continuation of a dotted name | |
|
853 | try: | |
|
854 | names[-1].append(token) | |
|
855 | except IndexError: | |
|
856 | names.append([token]) | |
|
857 | name_cont = False | |
|
858 | else: | |
|
859 | # Regular new names. We append everything, the caller | |
|
860 | # will be responsible for pruning the list later. It's | |
|
861 | # very tricky to try to prune as we go, b/c composite | |
|
862 | # names can fool us. The pruning at the end is easy | |
|
863 | # to do (or the caller can print a list with repeated | |
|
864 | # names if so desired. | |
|
865 | names.append([token]) | |
|
866 | elif token == '.': | |
|
867 | name_cont = True | |
|
868 | elif token_type == tokenize.NEWLINE: | |
|
869 | break | |
|
870 | ||
|
890 | 871 | except (IndexError, UnicodeDecodeError): |
|
891 | 872 | # signals exit of tokenizer |
|
892 | 873 | pass |
@@ -896,6 +877,8 class VerboseTB(TBTools): | |||
|
896 | 877 | "The error message is: %s\n" % msg) |
|
897 | 878 | error(_m) |
|
898 | 879 | |
|
880 | # Join composite names (e.g. "dict.fromkeys") | |
|
881 | names = ['.'.join(n) for n in names] | |
|
899 | 882 | # prune names list of duplicates, but keep the right order |
|
900 | 883 | unique_names = uniq_stable(names) |
|
901 | 884 |
General Comments 0
You need to be logged in to leave comments.
Login now