##// END OF EJS Templates
Give input lines to tokenize one at a time...
Thomas Kluyver -
Show More
@@ -120,25 +120,18 b' class TokenInputTransformer(InputTransformer):'
120 120 """
121 121 def __init__(self, func):
122 122 self.func = func
123 self.current_line = ""
124 self.line_used = False
123 self.buf = []
125 124 self.reset_tokenizer()
126
125
127 126 def reset_tokenizer(self):
128 self.tokenizer = generate_tokens(self.get_line)
129
130 def get_line(self):
131 if self.line_used:
132 raise TokenError
133 self.line_used = True
134 return self.current_line
135
127 it = iter(self.buf)
128 self.tokenizer = generate_tokens(it.__next__)
129
136 130 def push(self, line):
137 self.current_line += line + "\n"
138 if self.current_line.isspace():
131 self.buf.append(line + '\n')
132 if all(l.isspace() for l in self.buf):
139 133 return self.reset()
140
141 self.line_used = False
134
142 135 tokens = []
143 136 stop_at_NL = False
144 137 try:
@@ -158,13 +151,13 b' class TokenInputTransformer(InputTransformer):'
158 151 return self.output(tokens)
159 152
160 153 def output(self, tokens):
161 self.current_line = ""
154 self.buf.clear()
162 155 self.reset_tokenizer()
163 156 return untokenize(self.func(tokens)).rstrip('\n')
164 157
165 158 def reset(self):
166 l = self.current_line
167 self.current_line = ""
159 l = ''.join(self.buf)
160 self.buf.clear()
168 161 self.reset_tokenizer()
169 162 if l:
170 163 return l.rstrip('\n')
General Comments 0
You need to be logged in to leave comments. Login now