Show More
@@ -110,6 +110,50 b' class CoroutineInputTransformer(InputTransformer):' | |||
|
110 | 110 | """ |
|
111 | 111 | return self.coro.send(None) |
|
112 | 112 | |
|
113 | class TokenInputTransformer(InputTransformer): | |
|
114 | """Wrapper for a token-based input transformer. | |
|
115 | ||
|
116 | func should accept a list of tokens (5-tuples, see tokenize docs), and | |
|
117 | return an iterable which can be passed to tokenize.untokenize(). | |
|
118 | """ | |
|
119 | def __init__(self, func): | |
|
120 | self.func = func | |
|
121 | self.current_line = "" | |
|
122 | self.tokenizer = tokenize.generate_tokens(self.get_line) | |
|
123 | self.line_used= False | |
|
124 | ||
|
125 | def get_line(self): | |
|
126 | if self.line_used: | |
|
127 | raise tokenize.TokenError | |
|
128 | self.line_used = True | |
|
129 | return self.current_line | |
|
130 | ||
|
131 | def push(self, line): | |
|
132 | self.current_line += line + "\n" | |
|
133 | self.line_used = False | |
|
134 | tokens = [] | |
|
135 | try: | |
|
136 | for intok in self.tokenizer: | |
|
137 | tokens.append(intok) | |
|
138 | if intok[0] in (tokenize.NEWLINE, tokenize.NL): | |
|
139 | # Stop before we try to pull a line we don't have yet | |
|
140 | break | |
|
141 | except tokenize.TokenError: | |
|
142 | # Multi-line statement - stop and try again with the next line | |
|
143 | self.tokenizer = tokenize.generate_tokens(self.get_line) | |
|
144 | return None | |
|
145 | ||
|
146 | self.current_line = "" | |
|
147 | # Python bug 8478 - untokenize doesn't work quite correctly with a | |
|
148 | # generator. We call list() to avoid this. | |
|
149 | return tokenize.untokenize(list(self.func(tokens))).rstrip('\n') | |
|
150 | ||
|
151 | def reset(self): | |
|
152 | l = self.current_line | |
|
153 | self.current_line = "" | |
|
154 | if l: | |
|
155 | return l.rstrip('\n') | |
|
156 | ||
|
113 | 157 | |
|
114 | 158 | # Utilities |
|
115 | 159 | def _make_help_call(target, esc, lspace, next_input=None): |
@@ -1,3 +1,4 b'' | |||
|
1 | import tokenize | |
|
1 | 2 | import unittest |
|
2 | 3 | import nose.tools as nt |
|
3 | 4 | |
@@ -324,3 +325,44 b' def test_has_comment():' | |||
|
324 | 325 | ('a #comment not "string"', True), |
|
325 | 326 | ] |
|
326 | 327 | tt.check_pairs(ipt.has_comment, tests) |
|
328 | ||
|
329 | @ipt.TokenInputTransformer.wrap | |
|
330 | def decistmt(tokens): | |
|
331 | """Substitute Decimals for floats in a string of statements. | |
|
332 | ||
|
333 | Based on an example from the tokenize module docs. | |
|
334 | """ | |
|
335 | result = [] | |
|
336 | for toknum, tokval, _, _, _ in tokens: | |
|
337 | if toknum == tokenize.NUMBER and '.' in tokval: # replace NUMBER tokens | |
|
338 | for newtok in [ | |
|
339 | (tokenize.NAME, 'Decimal'), | |
|
340 | (tokenize.OP, '('), | |
|
341 | (tokenize.STRING, repr(tokval)), | |
|
342 | (tokenize.OP, ')') | |
|
343 | ]: | |
|
344 | yield newtok | |
|
345 | else: | |
|
346 | yield (toknum, tokval) | |
|
347 | ||
|
348 | ||
|
349 | ||
|
350 | def test_token_input_transformer(): | |
|
351 | tests = [(u'1.2', u_fmt(u"Decimal ({u}'1.2')")), | |
|
352 | (u'"1.2"', u'"1.2"'), | |
|
353 | ] | |
|
354 | tt.check_pairs(transform_and_reset(decistmt), tests) | |
|
355 | ml_tests = \ | |
|
356 | [ [(u"a = 1.2; b = '''x", None), | |
|
357 | (u"y'''", u_fmt(u"a =Decimal ({u}'1.2');b ='''x\ny'''")), | |
|
358 | ], | |
|
359 | [(u"a = [1.2,", u_fmt(u"a =[Decimal ({u}'1.2'),")), | |
|
360 | (u"3]", u"3 ]"), | |
|
361 | ], | |
|
362 | [(u"a = '''foo", None), # Test resetting when within a multi-line string | |
|
363 | (u"bar", None), | |
|
364 | (None, u"a = '''foo\nbar"), | |
|
365 | ], | |
|
366 | ] | |
|
367 | for example in ml_tests: | |
|
368 | transform_checker(example, decistmt) |
General Comments 0
You need to be logged in to leave comments.
Login now