##// END OF EJS Templates
handle multi-line tokens in token_at_cursor...
Min RK -
Show More
@@ -53,6 +53,25 b' def test_multiline():'
53 53 for i in range(start, start + len(expected)):
54 54 expect_token(expected, cell, i)
55 55
56 def test_multiline_token():
57 cell = '\n'.join([
58 '"""\n\nxxxxxxxxxx\n\n"""',
59 '5, """',
60 'docstring',
61 'multiline token',
62 '""", [',
63 '2, 3, "complicated"]',
64 'b = hello("string", there)'
65 ])
66 expected = 'hello'
67 start = cell.index(expected) + 1
68 for i in range(start, start + len(expected)):
69 expect_token(expected, cell, i)
70 expected = 'hello'
71 start = cell.index(expected) + 1
72 for i in range(start, start + len(expected)):
73 expect_token(expected, cell, i)
74
56 75 def test_nested_call():
57 76 cell = "foo(bar(a=5), b=10)"
58 77 expected = 'foo'
@@ -72,15 +72,24 b' def token_at_cursor(cell, cursor_pos=0):'
72 72 cell = cast_unicode_py2(cell)
73 73 names = []
74 74 tokens = []
75 offset = 0
76 75 call_names = []
76
77 offsets = {1: 0} # lines start at 1
77 78 for tup in generate_tokens(StringIO(cell).readline):
78 79
79 80 tok = Token(*tup)
80 81
81 82 # token, text, start, end, line = tup
82 start_col = tok.start[1]
83 end_col = tok.end[1]
83 start_line, start_col = tok.start
84 end_line, end_col = tok.end
85 if end_line + 1 not in offsets:
86 # keep track of offsets for each line
87 lines = tok.line.splitlines(True)
88 for lineno, line in zip(range(start_line + 1, end_line + 2), lines):
89 if lineno not in offsets:
90 offsets[lineno] = offsets[lineno-1] + len(line)
91
92 offset = offsets[start_line]
84 93 # allow '|foo' to find 'foo' at the beginning of a line
85 94 boundary = cursor_pos + 1 if start_col == 0 else cursor_pos
86 95 if offset + start_col >= boundary:
@@ -103,14 +112,12 b' def token_at_cursor(cell, cursor_pos=0):'
103 112 elif tok.text == ')' and call_names:
104 113 call_names.pop(-1)
105 114
106 if offset + end_col > cursor_pos:
115 tokens.append(tok)
116
117 if offsets[end_line] + end_col > cursor_pos:
107 118 # we found the cursor, stop reading
108 119 break
109 120
110 tokens.append(tok)
111 if tok.token in (tokenize2.NEWLINE, tokenize2.NL):
112 offset += len(tok.line)
113
114 121 if call_names:
115 122 return call_names[-1]
116 123 elif names:
General Comments 0
You need to be logged in to leave comments. Login now