From e5f3033d3ff9b3ec0d66297b298e18c8580e8f4b 2017-02-18 08:57:51 From: Thomas Kluyver Date: 2017-02-18 08:57:51 Subject: [PATCH] Merge pull request #10300 from Carreau/clean-enumerate Update a couple of iteration idioms. --- diff --git a/IPython/core/ultratb.py b/IPython/core/ultratb.py index 813ea7b..4352a61 100644 --- a/IPython/core/ultratb.py +++ b/IPython/core/ultratb.py @@ -189,11 +189,11 @@ def findsource(object): # use the one with the least indentation, which is the one # that's most probably not inside a function definition. candidates = [] - for i in range(len(lines)): - match = pat.match(lines[i]) + for i, line in enumerate(lines): + match = pat.match(line) if match: # if it's at toplevel, it's already the best one - if lines[i][0] == 'c': + if line[0] == 'c': return lines, i # else add whitespace to candidate list candidates.append((match.group(1), i)) @@ -358,7 +358,7 @@ def _fixed_getinnerframes(etb, context=1, tb_offset=0): aux = traceback.extract_tb(etb) assert len(records) == len(aux) - for i, (file, lnum, _, _) in zip(range(len(records)), aux): + for i, (file, lnum, _, _) in enumerate(aux): maybeStart = lnum - 1 - context // 2 start = max(maybeStart, 0) end = start + context diff --git a/IPython/utils/tokenutil.py b/IPython/utils/tokenutil.py index e7fbc5d..724dc69 100644 --- a/IPython/utils/tokenutil.py +++ b/IPython/utils/tokenutil.py @@ -84,7 +84,7 @@ def token_at_cursor(cell, cursor_pos=0): if end_line + 1 not in offsets: # keep track of offsets for each line lines = tok.line.splitlines(True) - for lineno, line in zip(range(start_line + 1, end_line + 2), lines): + for lineno, line in enumerate(lines, start_line + 1): if lineno not in offsets: offsets[lineno] = offsets[lineno-1] + len(line)