##// END OF EJS Templates
Merge pull request #13852 from krassowski/greedy-completions...
Merge pull request #13852 from krassowski/greedy-completions Replace greedy completer with guarded evaluation, improve dict completions

File last commit:

r27098:7410af58
r27962:67d20af8 merge
Show More
test_tokenutil.py
141 lines | 3.8 KiB | text/x-python | PythonLexer
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 """Tests for tokenutil"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
Matthias Bussonnier
Remove yield test that are not support by pytest anymore...
r26183 import pytest
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578
Doug Blank
TAB on empty line causes crash; with test
r18879 from IPython.utils.tokenutil import token_at_cursor, line_at_cursor
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578
MinRK
update completion_ and objection_info_request...
r16580 def expect_token(expected, cell, cursor_pos):
token = token_at_cursor(cell, cursor_pos)
offset = 0
for line in cell.splitlines():
if offset + len(line) >= cursor_pos:
break
else:
Matthias Bussonnier
Correctly count newline in tokenize....
r21655 offset += len(line)+1
MinRK
update completion_ and objection_info_request...
r16580 column = cursor_pos - offset
Samuel Gaist
[utils][tests][tokenutil] Remove nose
r26928 line_with_cursor = "%s|%s" % (line[:column], line[column:])
assert token == expected, "Expected %r, got %r in: %r (pos %i)" % (
expected,
token,
line_with_cursor,
cursor_pos,
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 )
Samuel Gaist
[utils][tests][tokenutil] Remove nose
r26928
def test_simple():
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 cell = "foo"
for i in range(len(cell)):
expect_token("foo", cell, i)
def test_function():
cell = "foo(a=5, b='10')"
expected = 'foo'
MinRK
don't pick up tokens right of cursor...
r18453 # up to `foo(|a=`
for i in range(cell.find('a=') + 1):
expect_token("foo", cell, i)
# find foo after `=`
for i in [cell.find('=') + 1, cell.rfind('=') + 1]:
expect_token("foo", cell, i)
# in between `5,|` and `|b=`
for i in range(cell.find(','), cell.find('b=')):
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 expect_token("foo", cell, i)
def test_multiline():
cell = '\n'.join([
'a = 5',
'b = hello("string", there)'
])
expected = 'hello'
MinRK
don't pick up tokens right of cursor...
r18453 start = cell.index(expected) + 1
MinRK
update completion_ and objection_info_request...
r16580 for i in range(start, start + len(expected)):
expect_token(expected, cell, i)
Min RK
prioritize function token for inspection...
r20471 expected = 'hello'
MinRK
don't pick up tokens right of cursor...
r18453 start = cell.index(expected) + 1
MinRK
update completion_ and objection_info_request...
r16580 for i in range(start, start + len(expected)):
expect_token(expected, cell, i)
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578
Min RK
handle multi-line tokens in token_at_cursor...
r21701 def test_multiline_token():
cell = '\n'.join([
'"""\n\nxxxxxxxxxx\n\n"""',
'5, """',
'docstring',
'multiline token',
'""", [',
'2, 3, "complicated"]',
'b = hello("string", there)'
])
expected = 'hello'
start = cell.index(expected) + 1
for i in range(start, start + len(expected)):
expect_token(expected, cell, i)
expected = 'hello'
start = cell.index(expected) + 1
for i in range(start, start + len(expected)):
expect_token(expected, cell, i)
Min RK
prioritize function token for inspection...
r20471 def test_nested_call():
cell = "foo(bar(a=5), b=10)"
expected = 'foo'
start = cell.index('bar') + 1
for i in range(start, start + 3):
expect_token(expected, cell, i)
expected = 'bar'
start = cell.index('a=')
for i in range(start, start + 3):
expect_token(expected, cell, i)
expected = 'foo'
start = cell.index(')') + 1
for i in range(start, len(cell)-1):
expect_token(expected, cell, i)
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 def test_attrs():
Min RK
prioritize function token for inspection...
r20471 cell = "a = obj.attr.subattr"
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 expected = 'obj'
MinRK
don't pick up tokens right of cursor...
r18453 idx = cell.find('obj') + 1
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 for i in range(idx, idx + 3):
expect_token(expected, cell, i)
MinRK
don't pick up tokens right of cursor...
r18453 idx = cell.find('.attr') + 2
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 expected = 'obj.attr'
for i in range(idx, idx + 4):
expect_token(expected, cell, i)
MinRK
don't pick up tokens right of cursor...
r18453 idx = cell.find('.subattr') + 2
MinRK
add utils.tokenutil for getting the token at a cursor offset
r16578 expected = 'obj.attr.subattr'
for i in range(idx, len(cell)):
expect_token(expected, cell, i)
Doug Blank
TAB on empty line causes crash; with test
r18879
def test_line_at_cursor():
cell = ""
(line, offset) = line_at_cursor(cell, cursor_pos=11)
Samuel Gaist
[utils][tests][tokenutil] Remove nose
r26928 assert line == ""
assert offset == 0
Matthias Bussonnier
Correctly count newline in tokenize....
r21655
Thomas Kluyver
Identify position after a newline as the start of the next line...
r23959 # The position after a newline should be the start of the following line.
cell = "One\nTwo\n"
(line, offset) = line_at_cursor(cell, cursor_pos=4)
Samuel Gaist
[utils][tests][tokenutil] Remove nose
r26928 assert line == "Two\n"
assert offset == 4
Thomas Kluyver
Identify position after a newline as the start of the next line...
r23959
Thomas Kluyver
Fix line_at_cursor for end of last line without trailing newline
r23966 # The end of a cell should be on the last line
cell = "pri\npri"
(line, offset) = line_at_cursor(cell, cursor_pos=7)
Samuel Gaist
[utils][tests][tokenutil] Remove nose
r26928 assert line == "pri"
assert offset == 4
Thomas Kluyver
Fix line_at_cursor for end of last line without trailing newline
r23966
Matthias Bussonnier
Remove yield test that are not support by pytest anymore...
r26183
@pytest.mark.parametrize(
"c, token",
zip(
list(range(16, 22)) + list(range(22, 28)),
["int"] * (22 - 16) + ["map"] * (28 - 22),
),
)
def test_multiline_statement(c, token):
Matthias Bussonnier
Correctly count newline in tokenize....
r21655 cell = """a = (1,
3)
int()
map()
"""
Matthias Bussonnier
Remove yield test that are not support by pytest anymore...
r26183 expect_token(token, cell, c)