##// END OF EJS Templates
prioritize function token for inspection...
Min RK -
Show More
@@ -1,75 +1,90 b''
1 """Tests for tokenutil"""
1 """Tests for tokenutil"""
2 # Copyright (c) IPython Development Team.
2 # Copyright (c) IPython Development Team.
3 # Distributed under the terms of the Modified BSD License.
3 # Distributed under the terms of the Modified BSD License.
4
4
5 import nose.tools as nt
5 import nose.tools as nt
6
6
7 from IPython.utils.tokenutil import token_at_cursor, line_at_cursor
7 from IPython.utils.tokenutil import token_at_cursor, line_at_cursor
8
8
9 def expect_token(expected, cell, cursor_pos):
9 def expect_token(expected, cell, cursor_pos):
10 token = token_at_cursor(cell, cursor_pos)
10 token = token_at_cursor(cell, cursor_pos)
11 offset = 0
11 offset = 0
12 for line in cell.splitlines():
12 for line in cell.splitlines():
13 if offset + len(line) >= cursor_pos:
13 if offset + len(line) >= cursor_pos:
14 break
14 break
15 else:
15 else:
16 offset += len(line)
16 offset += len(line)
17 column = cursor_pos - offset
17 column = cursor_pos - offset
18 line_with_cursor = '%s|%s' % (line[:column], line[column:])
18 line_with_cursor = '%s|%s' % (line[:column], line[column:])
19 nt.assert_equal(token, expected,
19 nt.assert_equal(token, expected,
20 "Expected %r, got %r in: %r (pos %i)" % (
20 "Expected %r, got %r in: %r (pos %i)" % (
21 expected, token, line_with_cursor, cursor_pos)
21 expected, token, line_with_cursor, cursor_pos)
22 )
22 )
23
23
24 def test_simple():
24 def test_simple():
25 cell = "foo"
25 cell = "foo"
26 for i in range(len(cell)):
26 for i in range(len(cell)):
27 expect_token("foo", cell, i)
27 expect_token("foo", cell, i)
28
28
29 def test_function():
29 def test_function():
30 cell = "foo(a=5, b='10')"
30 cell = "foo(a=5, b='10')"
31 expected = 'foo'
31 expected = 'foo'
32 # up to `foo(|a=`
32 # up to `foo(|a=`
33 for i in range(cell.find('a=') + 1):
33 for i in range(cell.find('a=') + 1):
34 expect_token("foo", cell, i)
34 expect_token("foo", cell, i)
35 # find foo after `=`
35 # find foo after `=`
36 for i in [cell.find('=') + 1, cell.rfind('=') + 1]:
36 for i in [cell.find('=') + 1, cell.rfind('=') + 1]:
37 expect_token("foo", cell, i)
37 expect_token("foo", cell, i)
38 # in between `5,|` and `|b=`
38 # in between `5,|` and `|b=`
39 for i in range(cell.find(','), cell.find('b=')):
39 for i in range(cell.find(','), cell.find('b=')):
40 expect_token("foo", cell, i)
40 expect_token("foo", cell, i)
41
41
42 def test_multiline():
42 def test_multiline():
43 cell = '\n'.join([
43 cell = '\n'.join([
44 'a = 5',
44 'a = 5',
45 'b = hello("string", there)'
45 'b = hello("string", there)'
46 ])
46 ])
47 expected = 'hello'
47 expected = 'hello'
48 start = cell.index(expected) + 1
48 start = cell.index(expected) + 1
49 for i in range(start, start + len(expected)):
49 for i in range(start, start + len(expected)):
50 expect_token(expected, cell, i)
50 expect_token(expected, cell, i)
51 expected = 'there'
51 expected = 'hello'
52 start = cell.index(expected) + 1
52 start = cell.index(expected) + 1
53 for i in range(start, start + len(expected)):
53 for i in range(start, start + len(expected)):
54 expect_token(expected, cell, i)
54 expect_token(expected, cell, i)
55
55
56 def test_nested_call():
57 cell = "foo(bar(a=5), b=10)"
58 expected = 'foo'
59 start = cell.index('bar') + 1
60 for i in range(start, start + 3):
61 expect_token(expected, cell, i)
62 expected = 'bar'
63 start = cell.index('a=')
64 for i in range(start, start + 3):
65 expect_token(expected, cell, i)
66 expected = 'foo'
67 start = cell.index(')') + 1
68 for i in range(start, len(cell)-1):
69 expect_token(expected, cell, i)
70
56 def test_attrs():
71 def test_attrs():
57 cell = "foo(a=obj.attr.subattr)"
72 cell = "a = obj.attr.subattr"
58 expected = 'obj'
73 expected = 'obj'
59 idx = cell.find('obj') + 1
74 idx = cell.find('obj') + 1
60 for i in range(idx, idx + 3):
75 for i in range(idx, idx + 3):
61 expect_token(expected, cell, i)
76 expect_token(expected, cell, i)
62 idx = cell.find('.attr') + 2
77 idx = cell.find('.attr') + 2
63 expected = 'obj.attr'
78 expected = 'obj.attr'
64 for i in range(idx, idx + 4):
79 for i in range(idx, idx + 4):
65 expect_token(expected, cell, i)
80 expect_token(expected, cell, i)
66 idx = cell.find('.subattr') + 2
81 idx = cell.find('.subattr') + 2
67 expected = 'obj.attr.subattr'
82 expected = 'obj.attr.subattr'
68 for i in range(idx, len(cell)):
83 for i in range(idx, len(cell)):
69 expect_token(expected, cell, i)
84 expect_token(expected, cell, i)
70
85
71 def test_line_at_cursor():
86 def test_line_at_cursor():
72 cell = ""
87 cell = ""
73 (line, offset) = line_at_cursor(cell, cursor_pos=11)
88 (line, offset) = line_at_cursor(cell, cursor_pos=11)
74 assert line == "", ("Expected '', got %r" % line)
89 assert line == "", ("Expected '', got %r" % line)
75 assert offset == 0, ("Expected '', got %r" % line)
90 assert offset == 0, ("Expected '', got %r" % line)
@@ -1,110 +1,121 b''
1 """Token-related utilities"""
1 """Token-related utilities"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from __future__ import absolute_import, print_function
6 from __future__ import absolute_import, print_function
7
7
8 from collections import namedtuple
8 from collections import namedtuple
9 from io import StringIO
9 from io import StringIO
10 from keyword import iskeyword
10 from keyword import iskeyword
11
11
12 from . import tokenize2
12 from . import tokenize2
13 from .py3compat import cast_unicode_py2
13 from .py3compat import cast_unicode_py2
14
14
15 Token = namedtuple('Token', ['token', 'text', 'start', 'end', 'line'])
15 Token = namedtuple('Token', ['token', 'text', 'start', 'end', 'line'])
16
16
17 def generate_tokens(readline):
17 def generate_tokens(readline):
18 """wrap generate_tokens to catch EOF errors"""
18 """wrap generate_tokens to catch EOF errors"""
19 try:
19 try:
20 for token in tokenize2.generate_tokens(readline):
20 for token in tokenize2.generate_tokens(readline):
21 yield token
21 yield token
22 except tokenize2.TokenError:
22 except tokenize2.TokenError:
23 # catch EOF error
23 # catch EOF error
24 return
24 return
25
25
26 def line_at_cursor(cell, cursor_pos=0):
26 def line_at_cursor(cell, cursor_pos=0):
27 """Return the line in a cell at a given cursor position
27 """Return the line in a cell at a given cursor position
28
28
29 Used for calling line-based APIs that don't support multi-line input, yet.
29 Used for calling line-based APIs that don't support multi-line input, yet.
30
30
31 Parameters
31 Parameters
32 ----------
32 ----------
33
33
34 cell: text
34 cell: text
35 multiline block of text
35 multiline block of text
36 cursor_pos: integer
36 cursor_pos: integer
37 the cursor position
37 the cursor position
38
38
39 Returns
39 Returns
40 -------
40 -------
41
41
42 (line, offset): (text, integer)
42 (line, offset): (text, integer)
43 The line with the current cursor, and the character offset of the start of the line.
43 The line with the current cursor, and the character offset of the start of the line.
44 """
44 """
45 offset = 0
45 offset = 0
46 lines = cell.splitlines(True)
46 lines = cell.splitlines(True)
47 for line in lines:
47 for line in lines:
48 next_offset = offset + len(line)
48 next_offset = offset + len(line)
49 if next_offset >= cursor_pos:
49 if next_offset >= cursor_pos:
50 break
50 break
51 offset = next_offset
51 offset = next_offset
52 else:
52 else:
53 line = ""
53 line = ""
54 return (line, offset)
54 return (line, offset)
55
55
56 def token_at_cursor(cell, cursor_pos=0):
56 def token_at_cursor(cell, cursor_pos=0):
57 """Get the token at a given cursor
57 """Get the token at a given cursor
58
58
59 Used for introspection.
59 Used for introspection.
60
60
61 Function calls are prioritized, so the token for the callable will be returned
62 if the cursor is anywhere inside the call.
63
61 Parameters
64 Parameters
62 ----------
65 ----------
63
66
64 cell : unicode
67 cell : unicode
65 A block of Python code
68 A block of Python code
66 cursor_pos : int
69 cursor_pos : int
67 The location of the cursor in the block where the token should be found
70 The location of the cursor in the block where the token should be found
68 """
71 """
69 cell = cast_unicode_py2(cell)
72 cell = cast_unicode_py2(cell)
70 names = []
73 names = []
71 tokens = []
74 tokens = []
72 offset = 0
75 offset = 0
76 call_names = []
73 for tup in generate_tokens(StringIO(cell).readline):
77 for tup in generate_tokens(StringIO(cell).readline):
74
78
75 tok = Token(*tup)
79 tok = Token(*tup)
76
80
77 # token, text, start, end, line = tup
81 # token, text, start, end, line = tup
78 start_col = tok.start[1]
82 start_col = tok.start[1]
79 end_col = tok.end[1]
83 end_col = tok.end[1]
80 # allow '|foo' to find 'foo' at the beginning of a line
84 # allow '|foo' to find 'foo' at the beginning of a line
81 boundary = cursor_pos + 1 if start_col == 0 else cursor_pos
85 boundary = cursor_pos + 1 if start_col == 0 else cursor_pos
82 if offset + start_col >= boundary:
86 if offset + start_col >= boundary:
83 # current token starts after the cursor,
87 # current token starts after the cursor,
84 # don't consume it
88 # don't consume it
85 break
89 break
86
90
87 if tok.token == tokenize2.NAME and not iskeyword(tok.text):
91 if tok.token == tokenize2.NAME and not iskeyword(tok.text):
88 if names and tokens and tokens[-1].token == tokenize2.OP and tokens[-1].text == '.':
92 if names and tokens and tokens[-1].token == tokenize2.OP and tokens[-1].text == '.':
89 names[-1] = "%s.%s" % (names[-1], tok.text)
93 names[-1] = "%s.%s" % (names[-1], tok.text)
90 else:
94 else:
91 names.append(tok.text)
95 names.append(tok.text)
92 elif tok.token == tokenize2.OP:
96 elif tok.token == tokenize2.OP:
93 if tok.text == '=' and names:
97 if tok.text == '=' and names:
94 # don't inspect the lhs of an assignment
98 # don't inspect the lhs of an assignment
95 names.pop(-1)
99 names.pop(-1)
100 if tok.text == '(' and names:
101 # if we are inside a function call, inspect the function
102 call_names.append(names[-1])
103 elif tok.text == ')' and call_names:
104 call_names.pop(-1)
96
105
97 if offset + end_col > cursor_pos:
106 if offset + end_col > cursor_pos:
98 # we found the cursor, stop reading
107 # we found the cursor, stop reading
99 break
108 break
100
109
101 tokens.append(tok)
110 tokens.append(tok)
102 if tok.token == tokenize2.NEWLINE:
111 if tok.token == tokenize2.NEWLINE:
103 offset += len(tok.line)
112 offset += len(tok.line)
104
113
105 if names:
114 if call_names:
115 return call_names[-1]
116 elif names:
106 return names[-1]
117 return names[-1]
107 else:
118 else:
108 return ''
119 return ''
109
120
110
121
General Comments 0
You need to be logged in to leave comments. Login now