Show More
@@ -13,7 +13,7 deprecated in 7.0. | |||
|
13 | 13 | from codeop import compile_command |
|
14 | 14 | import re |
|
15 | 15 | import tokenize |
|
16 | from typing import List, Tuple | |
|
16 | from typing import List, Tuple, Union | |
|
17 | 17 | import warnings |
|
18 | 18 | |
|
19 | 19 | _indent_re = re.compile(r'^[ \t]+') |
@@ -87,7 +87,7 def cell_magic(lines): | |||
|
87 | 87 | % (magic_name, first_line, body)] |
|
88 | 88 | |
|
89 | 89 | |
|
90 | def _find_assign_op(token_line): | |
|
90 | def _find_assign_op(token_line) -> Union[int, None]: | |
|
91 | 91 | """Get the index of the first assignment in the line ('=' not inside brackets) |
|
92 | 92 | |
|
93 | 93 | Note: We don't try to support multiple special assignment (a = b = %foo) |
@@ -97,9 +97,9 def _find_assign_op(token_line): | |||
|
97 | 97 | s = ti.string |
|
98 | 98 | if s == '=' and paren_level == 0: |
|
99 | 99 | return i |
|
100 |
if s in |
|
|
100 | if s in {'(','[','{'}: | |
|
101 | 101 | paren_level += 1 |
|
102 |
elif s in ') |
|
|
102 | elif s in {')', ']', '}'}: | |
|
103 | 103 | if paren_level > 0: |
|
104 | 104 | paren_level -= 1 |
|
105 | 105 | |
@@ -449,11 +449,14 class HelpEnd(TokenTransformBase): | |||
|
449 | 449 | |
|
450 | 450 | return lines_before + [new_line] + lines_after |
|
451 | 451 | |
|
452 | def make_tokens_by_line(lines): | |
|
452 | def make_tokens_by_line(lines:List[str]): | |
|
453 | 453 | """Tokenize a series of lines and group tokens by line. |
|
454 | 454 | |
|
455 | The tokens for a multiline Python string or expression are | |
|
456 | grouped as one line. | |
|
455 | The tokens for a multiline Python string or expression are grouped as one | |
|
456 | line. All lines except the last lines should keep their line ending ('\\n', | |
|
457 | '\\r\\n') for this to properly work. Use `.splitlines(keeplineending=True)` | |
|
458 | for example when passing block of text to this function. | |
|
459 | ||
|
457 | 460 | """ |
|
458 | 461 | # NL tokens are used inside multiline expressions, but also after blank |
|
459 | 462 | # lines or comments. This is intentional - see https://bugs.python.org/issue17061 |
@@ -461,6 +464,8 def make_tokens_by_line(lines): | |||
|
461 | 464 | # track parentheses level, similar to the internals of tokenize. |
|
462 | 465 | NEWLINE, NL = tokenize.NEWLINE, tokenize.NL |
|
463 | 466 | tokens_by_line = [[]] |
|
467 | if len(lines) > 1 and not lines[0].endswith(('\n', '\r', '\r\n', '\x0b', '\x0c')): | |
|
468 | warnings.warn("`make_tokens_by_line` received a list of lines which do not have lineending markers ('\\n', '\\r', '\\r\\n', '\\x0b', '\\x0c'), behavior will be unspecified") | |
|
464 | 469 | parenlev = 0 |
|
465 | 470 | try: |
|
466 | 471 | for token in tokenize.generate_tokens(iter(lines).__next__): |
@@ -8,7 +8,7 import nose.tools as nt | |||
|
8 | 8 | import string |
|
9 | 9 | |
|
10 | 10 | from IPython.core import inputtransformer2 as ipt2 |
|
11 | from IPython.core.inputtransformer2 import make_tokens_by_line | |
|
11 | from IPython.core.inputtransformer2 import make_tokens_by_line, _find_assign_op | |
|
12 | 12 | |
|
13 | 13 | from textwrap import dedent |
|
14 | 14 | |
@@ -53,6 +53,22 b = get_ipython().getoutput('foo bar') | |||
|
53 | 53 | g() |
|
54 | 54 | """.splitlines(keepends=True)) |
|
55 | 55 | |
|
56 | ##### | |
|
57 | ||
|
58 | MULTILINE_SYSTEM_ASSIGN_AFTER_DEDENT = ("""\ | |
|
59 | def test(): | |
|
60 | for i in range(1): | |
|
61 | print(i) | |
|
62 | res =! ls | |
|
63 | """.splitlines(keepends=True), (4, 7), '''\ | |
|
64 | def test(): | |
|
65 | for i in range(1): | |
|
66 | print(i) | |
|
67 | res =get_ipython().getoutput(\' ls\') | |
|
68 | '''.splitlines(keepends=True)) | |
|
69 | ||
|
70 | ###### | |
|
71 | ||
|
56 | 72 | AUTOCALL_QUOTE = ( |
|
57 | 73 | [",f 1 2 3\n"], (1, 0), |
|
58 | 74 | ['f("1", "2", "3")\n'] |
@@ -103,6 +119,7 b) = zip? | |||
|
103 | 119 | [r"get_ipython().set_next_input('(a,\nb) = zip');get_ipython().run_line_magic('pinfo', 'zip')" + "\n"] |
|
104 | 120 | ) |
|
105 | 121 | |
|
122 | ||
|
106 | 123 | def null_cleanup_transformer(lines): |
|
107 | 124 | """ |
|
108 | 125 | A cleanup transform that returns an empty list. |
@@ -144,18 +161,21 def test_continued_line(): | |||
|
144 | 161 | def test_find_assign_magic(): |
|
145 | 162 | check_find(ipt2.MagicAssign, MULTILINE_MAGIC_ASSIGN) |
|
146 | 163 | check_find(ipt2.MagicAssign, MULTILINE_SYSTEM_ASSIGN, match=False) |
|
164 | check_find(ipt2.MagicAssign, MULTILINE_SYSTEM_ASSIGN_AFTER_DEDENT, match=False) | |
|
147 | 165 | |
|
148 | 166 | def test_transform_assign_magic(): |
|
149 | 167 | check_transform(ipt2.MagicAssign, MULTILINE_MAGIC_ASSIGN) |
|
150 | 168 | |
|
151 | 169 | def test_find_assign_system(): |
|
152 | 170 | check_find(ipt2.SystemAssign, MULTILINE_SYSTEM_ASSIGN) |
|
171 | check_find(ipt2.SystemAssign, MULTILINE_SYSTEM_ASSIGN_AFTER_DEDENT) | |
|
153 | 172 | check_find(ipt2.SystemAssign, (["a = !ls\n"], (1, 5), None)) |
|
154 | 173 | check_find(ipt2.SystemAssign, (["a=!ls\n"], (1, 2), None)) |
|
155 | 174 | check_find(ipt2.SystemAssign, MULTILINE_MAGIC_ASSIGN, match=False) |
|
156 | 175 | |
|
157 | 176 | def test_transform_assign_system(): |
|
158 | 177 | check_transform(ipt2.SystemAssign, MULTILINE_SYSTEM_ASSIGN) |
|
178 | check_transform(ipt2.SystemAssign, MULTILINE_SYSTEM_ASSIGN_AFTER_DEDENT) | |
|
159 | 179 | |
|
160 | 180 | def test_find_magic_escape(): |
|
161 | 181 | check_find(ipt2.EscapedCommand, MULTILINE_MAGIC) |
@@ -203,6 +223,17 def test_transform_help(): | |||
|
203 | 223 | tf = ipt2.HelpEnd((1, 0), (2, 8)) |
|
204 | 224 | nt.assert_equal(tf.transform(HELP_MULTILINE[0]), HELP_MULTILINE[2]) |
|
205 | 225 | |
|
226 | def test_find_assign_op_dedent(): | |
|
227 | """ | |
|
228 | be carefull that empty token like dedent are not counted as parens | |
|
229 | """ | |
|
230 | class Tk: | |
|
231 | def __init__(self, s): | |
|
232 | self.string = s | |
|
233 | ||
|
234 | nt.assert_equal(_find_assign_op([Tk(s) for s in ('','a','=','b')]), 2) | |
|
235 | nt.assert_equal(_find_assign_op([Tk(s) for s in ('','(', 'a','=','b', ')', '=' ,'5')]), 6) | |
|
236 | ||
|
206 | 237 | def test_check_complete(): |
|
207 | 238 | cc = ipt2.TransformerManager().check_complete |
|
208 | 239 | nt.assert_equal(cc("a = 1"), ('complete', None)) |
General Comments 0
You need to be logged in to leave comments.
Login now