##// END OF EJS Templates
disallow no-prefix `ipython foo=bar` argument style....
disallow no-prefix `ipython foo=bar` argument style. This style is in rc1, but will be removed in rc2. Since they don't match any flag pattern, rc1-style arguments will be interpreted by IPython as files to be run. So `ipython gui=foo -i` will exec gui=foo, and pass '-i' to gui=foo. Presumably this file won't exist, so there will be an error: Error in executing file in user namespace: gui=foo Assignments *must* have two leading '-', as in: ipython --foo=bar all flags (non-assignments) can be specified with one or two leading '-', as in: ipython -i --pylab -pdb --pprint script.py or ipython --i -pylab --pdb -pprint script.py but help only reports two-leading, as single-leading options will likely be removed on moving to argparse, where they will be replaced by single-letter aliases. The common remaining invalid option will be: ipython -foo=bar and a suggestion for 'did you mean --foo=bar'? will be presented in these cases.

File last commit:

r2720:158efdd4
r4197:368e365a
Show More
completion_lexer.py
74 lines | 2.4 KiB | text/x-python | PythonLexer
# System library imports
from pygments.token import Token, is_token_subtype
class CompletionLexer(object):
""" Uses Pygments and some auxillary information to lex code snippets for
symbol contexts.
"""
# Maps Lexer names to a list of possible name separators
separator_map = { 'C' : [ '.', '->' ],
'C++' : [ '.', '->', '::' ],
'Python' : [ '.' ] }
def __init__(self, lexer):
""" Create a CompletionLexer using the specified Pygments lexer.
"""
self.lexer = lexer
def get_context(self, string):
""" Assuming the cursor is at the end of the specified string, get the
context (a list of names) for the symbol at cursor position.
"""
context = []
reversed_tokens = list(self._lexer.get_tokens(string))
reversed_tokens.reverse()
# Pygments often tacks on a newline when none is specified in the input.
# Remove this newline.
if reversed_tokens and reversed_tokens[0][1].endswith('\n') and \
not string.endswith('\n'):
reversed_tokens.pop(0)
current_op = ''
for token, text in reversed_tokens:
if is_token_subtype(token, Token.Name):
# Handle a trailing separator, e.g 'foo.bar.'
if current_op in self._name_separators:
if not context:
context.insert(0, '')
# Handle non-separator operators and punction.
elif current_op:
break
context.insert(0, text)
current_op = ''
# Pygments doesn't understand that, e.g., '->' is a single operator
# in C++. This is why we have to build up an operator from
# potentially several tokens.
elif token is Token.Operator or token is Token.Punctuation:
current_op = text + current_op
# Break on anything that is not a Operator, Punctuation, or Name.
else:
break
return context
def get_lexer(self, lexer):
return self._lexer
def set_lexer(self, lexer, name_separators=None):
self._lexer = lexer
if name_separators is None:
self._name_separators = self.separator_map.get(lexer.name, ['.'])
else:
self._name_separators = list(name_separators)
lexer = property(get_lexer, set_lexer)