##// END OF EJS Templates
Give input lines to tokenize one at a time...
Thomas Kluyver -
Show More
@@ -120,25 +120,18 b' class TokenInputTransformer(InputTransformer):'
120 """
120 """
121 def __init__(self, func):
121 def __init__(self, func):
122 self.func = func
122 self.func = func
123 self.current_line = ""
123 self.buf = []
124 self.line_used = False
125 self.reset_tokenizer()
124 self.reset_tokenizer()
126
125
127 def reset_tokenizer(self):
126 def reset_tokenizer(self):
128 self.tokenizer = generate_tokens(self.get_line)
127 it = iter(self.buf)
129
128 self.tokenizer = generate_tokens(it.__next__)
130 def get_line(self):
129
131 if self.line_used:
132 raise TokenError
133 self.line_used = True
134 return self.current_line
135
136 def push(self, line):
130 def push(self, line):
137 self.current_line += line + "\n"
131 self.buf.append(line + '\n')
138 if self.current_line.isspace():
132 if all(l.isspace() for l in self.buf):
139 return self.reset()
133 return self.reset()
140
134
141 self.line_used = False
142 tokens = []
135 tokens = []
143 stop_at_NL = False
136 stop_at_NL = False
144 try:
137 try:
@@ -158,13 +151,13 b' class TokenInputTransformer(InputTransformer):'
158 return self.output(tokens)
151 return self.output(tokens)
159
152
160 def output(self, tokens):
153 def output(self, tokens):
161 self.current_line = ""
154 self.buf.clear()
162 self.reset_tokenizer()
155 self.reset_tokenizer()
163 return untokenize(self.func(tokens)).rstrip('\n')
156 return untokenize(self.func(tokens)).rstrip('\n')
164
157
165 def reset(self):
158 def reset(self):
166 l = self.current_line
159 l = ''.join(self.buf)
167 self.current_line = ""
160 self.buf.clear()
168 self.reset_tokenizer()
161 self.reset_tokenizer()
169 if l:
162 if l:
170 return l.rstrip('\n')
163 return l.rstrip('\n')
General Comments 0
You need to be logged in to leave comments. Login now