##// END OF EJS Templates
run formatter
Matthias Bussonnier -
Show More
@@ -152,7 +152,12 b' def find_next_indent(code):'
152 152 if not tokens:
153 153 return 0
154 154
155 while (tokens[-1].type in {tokenize.DEDENT, tokenize.NEWLINE, tokenize.COMMENT, tokenize.ERRORTOKEN}):
155 while tokens[-1].type in {
156 tokenize.DEDENT,
157 tokenize.NEWLINE,
158 tokenize.COMMENT,
159 tokenize.ERRORTOKEN,
160 }:
156 161 tokens.pop()
157 162
158 163 # Starting in Python 3.12, the tokenize module adds implicit newlines at the end
@@ -292,17 +292,18 b' class SystemAssign(TokenTransformBase):'
292 292 def find_post_312(cls, tokens_by_line):
293 293 for line in tokens_by_line:
294 294 assign_ix = _find_assign_op(line)
295 if (assign_ix is not None) \
296 and not line[assign_ix].line.strip().startswith('=') \
297 and (len(line) >= assign_ix + 2) \
298 and (line[assign_ix + 1].type == tokenize.OP) \
299 and (line[assign_ix + 1].string == '!'):
295 if (
296 (assign_ix is not None)
297 and not line[assign_ix].line.strip().startswith("=")
298 and (len(line) >= assign_ix + 2)
299 and (line[assign_ix + 1].type == tokenize.OP)
300 and (line[assign_ix + 1].string == "!")
301 ):
300 302 return cls(line[assign_ix + 1].start)
301 303
302 304 @classmethod
303 305 def find(cls, tokens_by_line):
304 """Find the first system assignment (a = !foo) in the cell.
305 """
306 """Find the first system assignment (a = !foo) in the cell."""
306 307 if sys.version_info < (3, 12):
307 308 return cls.find_pre_312(tokens_by_line)
308 309 return cls.find_post_312(tokens_by_line)
@@ -531,8 +532,9 b' def make_tokens_by_line(lines:List[str]):'
531 532 )
532 533 parenlev = 0
533 534 try:
534 for token in tokenutil.generate_tokens_catch_errors(iter(lines).__next__,
535 extra_errors_to_catch=['expected EOF']):
535 for token in tokenutil.generate_tokens_catch_errors(
536 iter(lines).__next__, extra_errors_to_catch=["expected EOF"]
537 ):
536 538 tokens_by_line[-1].append(token)
537 539 if (token.type == NEWLINE) \
538 540 or ((token.type == NL) and (parenlev <= 0)):
@@ -701,8 +703,8 b' class TransformerManager:'
701 703 for line in reversed(lines):
702 704 if not line.strip():
703 705 continue
704 elif line.strip('\n').endswith('\\'):
705 return 'incomplete', find_last_indent(lines)
706 elif line.strip("\n").endswith("\\"):
707 return "incomplete", find_last_indent(lines)
706 708 else:
707 709 break
708 710
@@ -742,8 +744,10 b' class TransformerManager:'
742 744 if not tokens_by_line:
743 745 return 'incomplete', find_last_indent(lines)
744 746
745 if (tokens_by_line[-1][-1].type != tokenize.ENDMARKER
746 and tokens_by_line[-1][-1].type != tokenize.ERRORTOKEN):
747 if (
748 tokens_by_line[-1][-1].type != tokenize.ENDMARKER
749 and tokens_by_line[-1][-1].type != tokenize.ERRORTOKEN
750 ):
747 751 # We're in a multiline string or expression
748 752 return 'incomplete', find_last_indent(lines)
749 753
@@ -297,6 +297,7 b' def test_find_assign_op_dedent():'
297 297 _find_assign_op([Tk(s) for s in ("", "(", "a", "=", "b", ")", "=", "5")]) == 6
298 298 )
299 299
300
300 301 extra_closing_paren_param = (
301 302 pytest.param("(\n))", "invalid", None)
302 303 if sys.version_info >= (3, 12)
@@ -21,9 +21,13 b' def generate_tokens(readline):'
21 21 # catch EOF error
22 22 return
23 23
24
24 25 def generate_tokens_catch_errors(readline, extra_errors_to_catch=None):
25 default_errors_to_catch = ['unterminated string literal', 'invalid non-printable character',
26 'after line continuation character']
26 default_errors_to_catch = [
27 "unterminated string literal",
28 "invalid non-printable character",
29 "after line continuation character",
30 ]
27 31 assert extra_errors_to_catch is None or isinstance(extra_errors_to_catch, list)
28 32 errors_to_catch = default_errors_to_catch + (extra_errors_to_catch or [])
29 33
@@ -40,12 +44,13 b' def generate_tokens_catch_errors(readline, extra_errors_to_catch=None):'
40 44 line = tokens[-1].line
41 45 else:
42 46 start = end = (1, 0)
43 line = ''
44 yield tokenize.TokenInfo(tokenize.ERRORTOKEN, '', start, end, line)
47 line = ""
48 yield tokenize.TokenInfo(tokenize.ERRORTOKEN, "", start, end, line)
45 49 else:
46 50 # Catch EOF
47 51 raise
48 52
53
49 54 def line_at_cursor(cell, cursor_pos=0):
50 55 """Return the line in a cell at a given cursor position
51 56
General Comments 0
You need to be logged in to leave comments. Login now