##// END OF EJS Templates
remove use of travis-wheels repo...
remove use of travis-wheels repo it hasn’t been updated in some time due to manylinux wheels being available

File last commit:

r23477:df4f8623
r23550:959bcbe4
Show More
tokenutil.py
127 lines | 3.7 KiB | text/x-python | PythonLexer
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 """Token-related utilities"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from collections import namedtuple
from io import StringIO
from keyword import iskeyword
from . import tokenize2
from .py3compat import cast_unicode_py2
Token = namedtuple('Token', ['token', 'text', 'start', 'end', 'line'])
def generate_tokens(readline):
"""wrap generate_tokens to catch EOF errors"""
try:
for token in tokenize2.generate_tokens(readline):
yield token
except tokenize2.TokenError:
# catch EOF error
return
MinRK
only complete on current line...
r18478 def line_at_cursor(cell, cursor_pos=0):
"""Return the line in a cell at a given cursor position
Used for calling line-based APIs that don't support multi-line input, yet.
Parameters
----------
klonuo
Fix argument type in docsting
r22484 cell: str
MinRK
only complete on current line...
r18478 multiline block of text
cursor_pos: integer
the cursor position
Returns
-------
Matthias Bussonnier
Fix a couple of warnings/errors in doc builds.
r23477 (line, offset): (string, integer)
MinRK
only complete on current line...
r18478 The line with the current cursor, and the character offset of the start of the line.
"""
offset = 0
lines = cell.splitlines(True)
for line in lines:
next_offset = offset + len(line)
if next_offset >= cursor_pos:
break
offset = next_offset
Doug Blank
TAB on empty line causes crash; with test
r18879 else:
line = ""
MinRK
only complete on current line...
r18478 return (line, offset)
MinRK
update completion_ and objection_info_request...
r16580 def token_at_cursor(cell, cursor_pos=0):
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 """Get the token at a given cursor
Used for introspection.
Min RK
prioritize function token for inspection...
r20471 Function calls are prioritized, so the token for the callable will be returned
if the cursor is anywhere inside the call.
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 Parameters
----------
cell : unicode
A block of Python code
MinRK
update completion_ and objection_info_request...
r16580 cursor_pos : int
The location of the cursor in the block where the token should be found
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 """
cell = cast_unicode_py2(cell)
names = []
tokens = []
Min RK
prioritize function token for inspection...
r20471 call_names = []
Min RK
handle multi-line tokens in token_at_cursor...
r21701
offsets = {1: 0} # lines start at 1
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 for tup in generate_tokens(StringIO(cell).readline):
tok = Token(*tup)
# token, text, start, end, line = tup
Min RK
handle multi-line tokens in token_at_cursor...
r21701 start_line, start_col = tok.start
end_line, end_col = tok.end
if end_line + 1 not in offsets:
# keep track of offsets for each line
lines = tok.line.splitlines(True)
Matthias Bussonnier
Update a couple of iteration idioms....
r23365 for lineno, line in enumerate(lines, start_line + 1):
Min RK
handle multi-line tokens in token_at_cursor...
r21701 if lineno not in offsets:
offsets[lineno] = offsets[lineno-1] + len(line)
offset = offsets[start_line]
MinRK
don't pick up tokens right of cursor...
r18453 # allow '|foo' to find 'foo' at the beginning of a line
boundary = cursor_pos + 1 if start_col == 0 else cursor_pos
if offset + start_col >= boundary:
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 # current token starts after the cursor,
# don't consume it
break
if tok.token == tokenize2.NAME and not iskeyword(tok.text):
if names and tokens and tokens[-1].token == tokenize2.OP and tokens[-1].text == '.':
names[-1] = "%s.%s" % (names[-1], tok.text)
else:
names.append(tok.text)
elif tok.token == tokenize2.OP:
if tok.text == '=' and names:
# don't inspect the lhs of an assignment
names.pop(-1)
Min RK
prioritize function token for inspection...
r20471 if tok.text == '(' and names:
# if we are inside a function call, inspect the function
call_names.append(names[-1])
elif tok.text == ')' and call_names:
call_names.pop(-1)
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578
Min RK
handle multi-line tokens in token_at_cursor...
r21701 tokens.append(tok)
if offsets[end_line] + end_col > cursor_pos:
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 # we found the cursor, stop reading
break
Min RK
prioritize function token for inspection...
r20471 if call_names:
return call_names[-1]
elif names:
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 return names[-1]
else:
return ''