##// END OF EJS Templates
Fix bug when network map is disconnected (#13925)...
Fix bug when network map is disconnected (#13925) Hi there :) I encounter a little bug with the ipython logger. When using the `%logstart` magic and writing the log to a remote path, everything works fine until the network map gets disconnected. After the network drive is disconnected you basically powerless, and you have to reopen the console. This can be easily reproduce by sharing a windows folder ![image](https://user-images.githubusercontent.com/23289491/216761685-1bbcc92a-13e4-409c-badf-513f610fef46.png) Mounting it using ![image](https://user-images.githubusercontent.com/23289491/216761695-8b413c89-b4f8-4423-b9c0-d68c2997af38.png) Start an IPython session, start a log with `%logstart Z:\log` and disconnect. Anything you will try to do from now on will not work, and you can't use the Session, and will result `OSError: [Errno 22] Invalid argument` I couldn't reproduce it in Linux environment, i guess it really depends on the `OS` here. Also weirdly this happens when the client close the connection. that's mean if the Server close the connection to the mount everything still working perfectly I added a `try except` block to allow you to stop the logger and continue using your ipython normally I Search for a way to test it but I couldn't find an easy way to reproduce it in test.

File last commit:

r26419:7663c521
r28082:defefe64 merge
Show More
tokenutil.py
127 lines | 3.8 KiB | text/x-python | PythonLexer
"""Token-related utilities"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from collections import namedtuple
from io import StringIO
from keyword import iskeyword
import tokenize
Token = namedtuple('Token', ['token', 'text', 'start', 'end', 'line'])
def generate_tokens(readline):
"""wrap generate_tokens to catch EOF errors"""
try:
for token in tokenize.generate_tokens(readline):
yield token
except tokenize.TokenError:
# catch EOF error
return
def line_at_cursor(cell, cursor_pos=0):
"""Return the line in a cell at a given cursor position
Used for calling line-based APIs that don't support multi-line input, yet.
Parameters
----------
cell : str
multiline block of text
cursor_pos : integer
the cursor position
Returns
-------
(line, offset): (string, integer)
The line with the current cursor, and the character offset of the start of the line.
"""
offset = 0
lines = cell.splitlines(True)
for line in lines:
next_offset = offset + len(line)
if not line.endswith('\n'):
# If the last line doesn't have a trailing newline, treat it as if
# it does so that the cursor at the end of the line still counts
# as being on that line.
next_offset += 1
if next_offset > cursor_pos:
break
offset = next_offset
else:
line = ""
return (line, offset)
def token_at_cursor(cell, cursor_pos=0):
"""Get the token at a given cursor
Used for introspection.
Function calls are prioritized, so the token for the callable will be returned
if the cursor is anywhere inside the call.
Parameters
----------
cell : unicode
A block of Python code
cursor_pos : int
The location of the cursor in the block where the token should be found
"""
names = []
tokens = []
call_names = []
offsets = {1: 0} # lines start at 1
for tup in generate_tokens(StringIO(cell).readline):
tok = Token(*tup)
# token, text, start, end, line = tup
start_line, start_col = tok.start
end_line, end_col = tok.end
if end_line + 1 not in offsets:
# keep track of offsets for each line
lines = tok.line.splitlines(True)
for lineno, line in enumerate(lines, start_line + 1):
if lineno not in offsets:
offsets[lineno] = offsets[lineno-1] + len(line)
offset = offsets[start_line]
# allow '|foo' to find 'foo' at the beginning of a line
boundary = cursor_pos + 1 if start_col == 0 else cursor_pos
if offset + start_col >= boundary:
# current token starts after the cursor,
# don't consume it
break
if tok.token == tokenize.NAME and not iskeyword(tok.text):
if names and tokens and tokens[-1].token == tokenize.OP and tokens[-1].text == '.':
names[-1] = "%s.%s" % (names[-1], tok.text)
else:
names.append(tok.text)
elif tok.token == tokenize.OP:
if tok.text == '=' and names:
# don't inspect the lhs of an assignment
names.pop(-1)
if tok.text == '(' and names:
# if we are inside a function call, inspect the function
call_names.append(names[-1])
elif tok.text == ')' and call_names:
call_names.pop(-1)
tokens.append(tok)
if offsets[end_line] + end_col > cursor_pos:
# we found the cursor, stop reading
break
if call_names:
return call_names[-1]
elif names:
return names[-1]
else:
return ''