##// END OF EJS Templates
Merge pull request #1893 from minrk/compositeerr...
Merge pull request #1893 from minrk/compositeerr Update Parallel Magics and Exception Display Based on feedback from @fperez, a few small changes to parallel exception handling and magics: Exception changes: * apply_requests trigger showtraceback machinery, so apply errors are as pretty as execute ones * InteractiveShell.showtraceback handles RemoteErrors, so it only draws the remote traceback, rather than the unhelpful local one. Magics changes: * removed parallelmagic extension * creating a Client *implies* activate of a lazily-evaluated directview on all engines * can activate Magics on multiple views with different suffixes: ```python eall = rc.activate('all', 'all') e0 = rc.activate(0, '0') %pxall a=5 %px0 print a ``` * add %pxconfig magic for changing default block/targets for a collection of magics * add targets arg to %%px cell magic * %result renamed to %pxresult for consistency (%result kept for bw compat) * %pxresult now only draws most recent result, but accepts all the output-formatting args of %%px * add --out arg to %%px for storing the AsyncResult object in the user_ns * changed %px to not be verbose by default, and added verbosity control to %pxconfig.

File last commit:

r4872:34c10438
r7503:60e66298 merge
Show More
completion_lexer.py
74 lines | 2.4 KiB | text/x-python | PythonLexer
# System library imports
from pygments.token import Token, is_token_subtype
class CompletionLexer(object):
""" Uses Pygments and some auxillary information to lex code snippets for
symbol contexts.
"""
# Maps Lexer names to a list of possible name separators
separator_map = { 'C' : [ '.', '->' ],
'C++' : [ '.', '->', '::' ],
'Python' : [ '.' ] }
def __init__(self, lexer):
""" Create a CompletionLexer using the specified Pygments lexer.
"""
self.lexer = lexer
def get_context(self, string):
""" Assuming the cursor is at the end of the specified string, get the
context (a list of names) for the symbol at cursor position.
"""
context = []
reversed_tokens = list(self._lexer.get_tokens(string))
reversed_tokens.reverse()
# Pygments often tacks on a newline when none is specified in the input.
# Remove this newline.
if reversed_tokens and reversed_tokens[0][1].endswith('\n') and \
not string.endswith('\n'):
reversed_tokens.pop(0)
current_op = ''
for token, text in reversed_tokens:
if is_token_subtype(token, Token.Name):
# Handle a trailing separator, e.g 'foo.bar.'
if current_op in self._name_separators:
if not context:
context.insert(0, '')
# Handle non-separator operators and punction.
elif current_op:
break
context.insert(0, text)
current_op = ''
# Pygments doesn't understand that, e.g., '->' is a single operator
# in C++. This is why we have to build up an operator from
# potentially several tokens.
elif token is Token.Operator or token is Token.Punctuation:
current_op = text + current_op
# Break on anything that is not a Operator, Punctuation, or Name.
else:
break
return context
def get_lexer(self, lexer):
return self._lexer
def set_lexer(self, lexer, name_separators=None):
self._lexer = lexer
if name_separators is None:
self._name_separators = self.separator_map.get(lexer.name, ['.'])
else:
self._name_separators = list(name_separators)
lexer = property(get_lexer, set_lexer)