Show More
@@ -233,9 +233,7 b' if sys.version_info[0] >= 3:' | |||
|
233 | 233 | """ |
|
234 | 234 | st = tokens[j] |
|
235 | 235 | if st.type == token.STRING and st.string.startswith(("'", '"')): |
|
236 |
|
|
|
237 | st.start, st.end, st.line) | |
|
238 | tokens[j] = rt | |
|
236 | tokens[j] = st._replace(string='u%s' % st.string) | |
|
239 | 237 | |
|
240 | 238 | for i, t in enumerate(tokens): |
|
241 | 239 | # Convert most string literals to byte literals. String literals |
@@ -266,8 +264,7 b' if sys.version_info[0] >= 3:' | |||
|
266 | 264 | continue |
|
267 | 265 | |
|
268 | 266 | # String literal. Prefix to make a b'' string. |
|
269 | yield tokenize.TokenInfo(t.type, 'b%s' % s, t.start, t.end, | |
|
270 | t.line) | |
|
267 | yield t._replace(string='b%s' % t.string) | |
|
271 | 268 | continue |
|
272 | 269 | |
|
273 | 270 | # Insert compatibility imports at "from __future__ import" line. |
@@ -287,10 +284,8 b' if sys.version_info[0] >= 3:' | |||
|
287 | 284 | for u in tokenize.tokenize(io.BytesIO(l).readline): |
|
288 | 285 | if u.type in (tokenize.ENCODING, token.ENDMARKER): |
|
289 | 286 | continue |
|
290 |
yield |
|
|
291 |
|
|
|
292 | (r, c + u.end[1]), | |
|
293 | '') | |
|
287 | yield u._replace( | |
|
288 | start=(r, c + u.start[1]), end=(r, c + u.end[1])) | |
|
294 | 289 | continue |
|
295 | 290 | |
|
296 | 291 | # This looks like a function call. |
@@ -322,8 +317,7 b' if sys.version_info[0] >= 3:' | |||
|
322 | 317 | # It changes iteritems to items as iteritems is not |
|
323 | 318 | # present in Python 3 world. |
|
324 | 319 | elif fn == 'iteritems': |
|
325 |
yield |
|
|
326 | t.start, t.end, t.line) | |
|
320 | yield t._replace(string='items') | |
|
327 | 321 | continue |
|
328 | 322 | |
|
329 | 323 | # Emit unmodified token. |
General Comments 0
You need to be logged in to leave comments.
Login now