##// END OF EJS Templates
Simplify the code for picking json serialization functions to expose the logic better
Simplify the code for picking json serialization functions to expose the logic better

File last commit:

r16580:de473a1c
r17294:4864a6a8
Show More
tokenutil.py
78 lines | 2.1 KiB | text/x-python | PythonLexer
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 """Token-related utilities"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import absolute_import, print_function
from collections import namedtuple
from io import StringIO
from keyword import iskeyword
from . import tokenize2
from .py3compat import cast_unicode_py2
Token = namedtuple('Token', ['token', 'text', 'start', 'end', 'line'])
def generate_tokens(readline):
"""wrap generate_tokens to catch EOF errors"""
try:
for token in tokenize2.generate_tokens(readline):
yield token
except tokenize2.TokenError:
# catch EOF error
return
MinRK
update completion_ and objection_info_request...
r16580 def token_at_cursor(cell, cursor_pos=0):
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 """Get the token at a given cursor
Used for introspection.
Parameters
----------
cell : unicode
A block of Python code
MinRK
update completion_ and objection_info_request...
r16580 cursor_pos : int
The location of the cursor in the block where the token should be found
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 """
cell = cast_unicode_py2(cell)
names = []
tokens = []
MinRK
update completion_ and objection_info_request...
r16580 offset = 0
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 for tup in generate_tokens(StringIO(cell).readline):
tok = Token(*tup)
# token, text, start, end, line = tup
start_col = tok.start[1]
end_col = tok.end[1]
MinRK
update completion_ and objection_info_request...
r16580 if offset + start_col > cursor_pos:
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 # current token starts after the cursor,
# don't consume it
break
if tok.token == tokenize2.NAME and not iskeyword(tok.text):
if names and tokens and tokens[-1].token == tokenize2.OP and tokens[-1].text == '.':
names[-1] = "%s.%s" % (names[-1], tok.text)
else:
names.append(tok.text)
elif tok.token == tokenize2.OP:
if tok.text == '=' and names:
# don't inspect the lhs of an assignment
names.pop(-1)
MinRK
update completion_ and objection_info_request...
r16580 if offset + end_col > cursor_pos:
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 # we found the cursor, stop reading
break
tokens.append(tok)
if tok.token == tokenize2.NEWLINE:
MinRK
update completion_ and objection_info_request...
r16580 offset += len(tok.line)
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578
if names:
return names[-1]
else:
return ''