##// END OF EJS Templates
Some more optional typing to make mypy happy
Matthias Bussonnier -
Show More
@@ -13,7 +13,7 b' deprecated in 7.0.'
13 from codeop import compile_command
13 from codeop import compile_command
14 import re
14 import re
15 import tokenize
15 import tokenize
16 from typing import List, Tuple, Union
16 from typing import List, Tuple, Optional, Any
17 import warnings
17 import warnings
18
18
19 _indent_re = re.compile(r'^[ \t]+')
19 _indent_re = re.compile(r'^[ \t]+')
@@ -100,7 +100,7 b' def cell_magic(lines):'
100 % (magic_name, first_line, body)]
100 % (magic_name, first_line, body)]
101
101
102
102
103 def _find_assign_op(token_line) -> Union[int, None]:
103 def _find_assign_op(token_line) -> Optional[int]:
104 """Get the index of the first assignment in the line ('=' not inside brackets)
104 """Get the index of the first assignment in the line ('=' not inside brackets)
105
105
106 Note: We don't try to support multiple special assignment (a = b = %foo)
106 Note: We don't try to support multiple special assignment (a = b = %foo)
@@ -115,6 +115,7 b' def _find_assign_op(token_line) -> Union[int, None]:'
115 elif s in {')', ']', '}'}:
115 elif s in {')', ']', '}'}:
116 if paren_level > 0:
116 if paren_level > 0:
117 paren_level -= 1
117 paren_level -= 1
118 return None
118
119
119 def find_end_of_continued_line(lines, start_line: int):
120 def find_end_of_continued_line(lines, start_line: int):
120 """Find the last line of a line explicitly extended using backslashes.
121 """Find the last line of a line explicitly extended using backslashes.
@@ -475,8 +476,10 b' def make_tokens_by_line(lines:List[str]):'
475 # lines or comments. This is intentional - see https://bugs.python.org/issue17061
476 # lines or comments. This is intentional - see https://bugs.python.org/issue17061
476 # We want to group the former case together but split the latter, so we
477 # We want to group the former case together but split the latter, so we
477 # track parentheses level, similar to the internals of tokenize.
478 # track parentheses level, similar to the internals of tokenize.
478 NEWLINE, NL = tokenize.NEWLINE, tokenize.NL
479
479 tokens_by_line = [[]]
480 # reexported from token on 3.7+
481 NEWLINE, NL = tokenize.NEWLINE, tokenize.NL # type: ignore
482 tokens_by_line:List[List[Any]] = [[]]
480 if len(lines) > 1 and not lines[0].endswith(('\n', '\r', '\r\n', '\x0b', '\x0c')):
483 if len(lines) > 1 and not lines[0].endswith(('\n', '\r', '\r\n', '\x0b', '\x0c')):
481 warnings.warn("`make_tokens_by_line` received a list of lines which do not have lineending markers ('\\n', '\\r', '\\r\\n', '\\x0b', '\\x0c'), behavior will be unspecified")
484 warnings.warn("`make_tokens_by_line` received a list of lines which do not have lineending markers ('\\n', '\\r', '\\r\\n', '\\x0b', '\\x0c'), behavior will be unspecified")
482 parenlev = 0
485 parenlev = 0
@@ -661,7 +664,7 b' class TransformerManager:'
661 # We're in a multiline string or expression
664 # We're in a multiline string or expression
662 return 'incomplete', find_last_indent(lines)
665 return 'incomplete', find_last_indent(lines)
663
666
664 newline_types = {tokenize.NEWLINE, tokenize.COMMENT, tokenize.ENDMARKER}
667 newline_types = {tokenize.NEWLINE, tokenize.COMMENT, tokenize.ENDMARKER} # type: ignore
665
668
666 # Pop the last line which only contains DEDENTs and ENDMARKER
669 # Pop the last line which only contains DEDENTs and ENDMARKER
667 last_token_line = None
670 last_token_line = None
@@ -2,4 +2,4 b''
2 This package contains all third-party modules bundled with IPython.
2 This package contains all third-party modules bundled with IPython.
3 """
3 """
4
4
5 __all__ = []
5 __all__: List[str] = []
General Comments 0
You need to be logged in to leave comments. Login now