revset.py
2915 lines
| 89.9 KiB
| text/x-python
|
PythonLexer
/ mercurial / revset.py
Matt Mackall
|
r11275 | # revset.py - revision set queries for mercurial | ||
# | ||||
# Copyright 2010 Matt Mackall <mpm@selenic.com> | ||||
# | ||||
# This software may be used and distributed according to the terms of the | ||||
# GNU General Public License version 2 or any later version. | ||||
Bryan O'Sullivan
|
r16834 | import re | ||
Pierre-Yves David
|
r15819 | import parser, util, error, discovery, hbisect, phases | ||
Matt Mackall
|
r16417 | import node | ||
Lucas Moscovicz
|
r20690 | import heapq | ||
Martin Geisler
|
r12085 | import match as matchmod | ||
Durham Goode
|
r20613 | import ancestor as ancestormod | ||
Patrick Mezard
|
r13593 | from i18n import _ | ||
FUJIWARA Katsunori
|
r15726 | import encoding | ||
Pierre-Yves David
|
r17469 | import obsolete as obsmod | ||
FUJIWARA Katsunori
|
r20286 | import pathutil | ||
Pierre-Yves David
|
r18251 | import repoview | ||
Matt Mackall
|
r11275 | |||
Patrick Mezard
|
r16409 | def _revancestors(repo, revs, followfirst): | ||
"""Like revlog.ancestors(), but supports followfirst.""" | ||||
cut = followfirst and 1 or None | ||||
cl = repo.changelog | ||||
Lucas Moscovicz
|
r20690 | |||
def iterate(): | ||||
Lucas Moscovicz
|
r20691 | revqueue, revsnode = None, None | ||
h = [] | ||||
revs.descending() | ||||
revqueue = util.deque(revs) | ||||
if revqueue: | ||||
revsnode = revqueue.popleft() | ||||
heapq.heappush(h, -revsnode) | ||||
seen = set([node.nullrev]) | ||||
Lucas Moscovicz
|
r20690 | while h: | ||
current = -heapq.heappop(h) | ||||
if current not in seen: | ||||
Lucas Moscovicz
|
r20691 | if revsnode and current == revsnode: | ||
if revqueue: | ||||
revsnode = revqueue.popleft() | ||||
heapq.heappush(h, -revsnode) | ||||
Lucas Moscovicz
|
r20690 | seen.add(current) | ||
yield current | ||||
for parent in cl.parentrevs(current)[:cut]: | ||||
if parent != node.nullrev: | ||||
heapq.heappush(h, -parent) | ||||
Lucas Moscovicz
|
r20707 | return _descgeneratorset(iterate()) | ||
Patrick Mezard
|
r16409 | |||
def _revdescendants(repo, revs, followfirst): | ||||
"""Like revlog.descendants() but supports followfirst.""" | ||||
cut = followfirst and 1 or None | ||||
Lucas Moscovicz
|
r20692 | def iterate(): | ||
cl = repo.changelog | ||||
first = min(revs) | ||||
nullrev = node.nullrev | ||||
if first == nullrev: | ||||
# Are there nodes with a null first parent and a non-null | ||||
# second one? Maybe. Do we care? Probably not. | ||||
for i in cl: | ||||
Patrick Mezard
|
r16409 | yield i | ||
Lucas Moscovicz
|
r20692 | else: | ||
seen = set(revs) | ||||
for i in cl.revs(first + 1): | ||||
for x in cl.parentrevs(i)[:cut]: | ||||
if x != nullrev and x in seen: | ||||
seen.add(i) | ||||
yield i | ||||
break | ||||
Lucas Moscovicz
|
r20706 | return _ascgeneratorset(iterate()) | ||
Patrick Mezard
|
r16409 | |||
Bryan O'Sullivan
|
r16862 | def _revsbetween(repo, roots, heads): | ||
"""Return all paths between roots and heads, inclusive of both endpoint | ||||
sets.""" | ||||
if not roots: | ||||
Lucas Moscovicz
|
r20364 | return baseset([]) | ||
Bryan O'Sullivan
|
r16862 | parentrevs = repo.changelog.parentrevs | ||
Pierre-Yves David
|
r22487 | visit = list(heads) | ||
Bryan O'Sullivan
|
r16862 | reachable = set() | ||
seen = {} | ||||
minroot = min(roots) | ||||
roots = set(roots) | ||||
# open-code the post-order traversal due to the tiny size of | ||||
# sys.getrecursionlimit() | ||||
while visit: | ||||
rev = visit.pop() | ||||
if rev in roots: | ||||
reachable.add(rev) | ||||
parents = parentrevs(rev) | ||||
seen[rev] = parents | ||||
for parent in parents: | ||||
if parent >= minroot and parent not in seen: | ||||
visit.append(parent) | ||||
if not reachable: | ||||
Lucas Moscovicz
|
r20364 | return baseset([]) | ||
Bryan O'Sullivan
|
r16862 | for rev in sorted(seen): | ||
for parent in seen[rev]: | ||||
if parent in reachable: | ||||
reachable.add(rev) | ||||
Lucas Moscovicz
|
r20364 | return baseset(sorted(reachable)) | ||
Bryan O'Sullivan
|
r16862 | |||
Matt Mackall
|
r11275 | elements = { | ||
"(": (20, ("group", 1, ")"), ("func", 1, ")")), | ||||
Kevin Gessner
|
r14070 | "~": (18, None, ("ancestor", 18)), | ||
"^": (18, None, ("parent", 18), ("parentpost", 18)), | ||||
Matt Mackall
|
r12616 | "-": (5, ("negate", 19), ("minus", 5)), | ||
Matt Mackall
|
r11278 | "::": (17, ("dagrangepre", 17), ("dagrange", 17), | ||
("dagrangepost", 17)), | ||||
"..": (17, ("dagrangepre", 17), ("dagrange", 17), | ||||
("dagrangepost", 17)), | ||||
":": (15, ("rangepre", 15), ("range", 15), ("rangepost", 15)), | ||||
Matt Mackall
|
r11275 | "not": (10, ("not", 10)), | ||
"!": (10, ("not", 10)), | ||||
"and": (5, None, ("and", 5)), | ||||
"&": (5, None, ("and", 5)), | ||||
"or": (4, None, ("or", 4)), | ||||
"|": (4, None, ("or", 4)), | ||||
"+": (4, None, ("or", 4)), | ||||
",": (2, None, ("list", 2)), | ||||
")": (0, None, None), | ||||
"symbol": (0, ("symbol",), None), | ||||
"string": (0, ("string",), None), | ||||
"end": (0, None, None), | ||||
} | ||||
keywords = set(['and', 'or', 'not']) | ||||
Matt Mackall
|
r20779 | def tokenize(program, lookup=None): | ||
Matt Mackall
|
r17886 | ''' | ||
Parse a revset statement into a stream of tokens | ||||
Check that @ is a valid unquoted token character (issue3686): | ||||
>>> list(tokenize("@::")) | ||||
[('symbol', '@', 0), ('::', None, 1), ('end', None, 3)] | ||||
''' | ||||
Matt Mackall
|
r11275 | pos, l = 0, len(program) | ||
while pos < l: | ||||
c = program[pos] | ||||
if c.isspace(): # skip inter-token whitespace | ||||
pass | ||||
Matt Mackall
|
r11278 | elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully | ||
Matt Mackall
|
r11289 | yield ('::', None, pos) | ||
Matt Mackall
|
r11278 | pos += 1 # skip ahead | ||
Matt Mackall
|
r11275 | elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully | ||
Matt Mackall
|
r11289 | yield ('..', None, pos) | ||
Matt Mackall
|
r11275 | pos += 1 # skip ahead | ||
Kevin Gessner
|
r14070 | elif c in "():,-|&+!~^": # handle simple operators | ||
Matt Mackall
|
r11289 | yield (c, None, pos) | ||
Brodie Rao
|
r12408 | elif (c in '"\'' or c == 'r' and | ||
program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings | ||||
if c == 'r': | ||||
pos += 1 | ||||
c = program[pos] | ||||
decode = lambda x: x | ||||
else: | ||||
decode = lambda x: x.decode('string-escape') | ||||
Matt Mackall
|
r11275 | pos += 1 | ||
s = pos | ||||
while pos < l: # find closing quote | ||||
d = program[pos] | ||||
if d == '\\': # skip over escaped characters | ||||
pos += 2 | ||||
continue | ||||
if d == c: | ||||
Brodie Rao
|
r12408 | yield ('string', decode(program[s:pos]), s) | ||
Matt Mackall
|
r11275 | break | ||
pos += 1 | ||||
else: | ||||
Martin Geisler
|
r11383 | raise error.ParseError(_("unterminated string"), s) | ||
Brodie Rao
|
r16683 | # gather up a symbol/keyword | ||
Matt Mackall
|
r17886 | elif c.isalnum() or c in '._@' or ord(c) > 127: | ||
Matt Mackall
|
r11275 | s = pos | ||
pos += 1 | ||||
while pos < l: # find end of symbol | ||||
d = program[pos] | ||||
Matt Mackall
|
r20780 | if not (d.isalnum() or d in "-._/@" or ord(d) > 127): | ||
Matt Mackall
|
r11275 | break | ||
if d == '.' and program[pos - 1] == '.': # special case for .. | ||||
pos -= 1 | ||||
break | ||||
pos += 1 | ||||
sym = program[s:pos] | ||||
if sym in keywords: # operator keywords | ||||
Matt Mackall
|
r11289 | yield (sym, None, s) | ||
Matt Mackall
|
r20780 | elif '-' in sym: | ||
# some jerk gave us foo-bar-baz, try to check if it's a symbol | ||||
if lookup and lookup(sym): | ||||
# looks like a real symbol | ||||
yield ('symbol', sym, s) | ||||
else: | ||||
# looks like an expression | ||||
parts = sym.split('-') | ||||
for p in parts[:-1]: | ||||
if p: # possible consecutive - | ||||
yield ('symbol', p, s) | ||||
s += len(p) | ||||
yield ('-', None, pos) | ||||
s += 1 | ||||
if parts[-1]: # possible trailing - | ||||
yield ('symbol', parts[-1], s) | ||||
Matt Mackall
|
r11275 | else: | ||
Matt Mackall
|
r11289 | yield ('symbol', sym, s) | ||
Matt Mackall
|
r11275 | pos -= 1 | ||
else: | ||||
Martin Geisler
|
r11383 | raise error.ParseError(_("syntax error"), pos) | ||
Matt Mackall
|
r11275 | pos += 1 | ||
Matt Mackall
|
r11289 | yield ('end', None, pos) | ||
Matt Mackall
|
r11275 | |||
# helpers | ||||
def getstring(x, err): | ||||
Matt Mackall
|
r11406 | if x and (x[0] == 'string' or x[0] == 'symbol'): | ||
Matt Mackall
|
r11275 | return x[1] | ||
Matt Mackall
|
r11289 | raise error.ParseError(err) | ||
Matt Mackall
|
r11275 | |||
def getlist(x): | ||||
if not x: | ||||
return [] | ||||
if x[0] == 'list': | ||||
return getlist(x[1]) + [x[2]] | ||||
return [x] | ||||
Matt Mackall
|
r11339 | def getargs(x, min, max, err): | ||
Matt Mackall
|
r11275 | l = getlist(x) | ||
Patrick Mezard
|
r16161 | if len(l) < min or (max >= 0 and len(l) > max): | ||
Matt Mackall
|
r11289 | raise error.ParseError(err) | ||
Matt Mackall
|
r11275 | return l | ||
def getset(repo, subset, x): | ||||
if not x: | ||||
Martin Geisler
|
r11383 | raise error.ParseError(_("missing argument")) | ||
Lucas Moscovicz
|
r20527 | s = methods[x[0]](repo, subset, *x[1:]) | ||
if util.safehasattr(s, 'set'): | ||||
return s | ||||
return baseset(s) | ||||
Matt Mackall
|
r11275 | |||
Matt Harbison
|
r17003 | def _getrevsource(repo, r): | ||
extra = repo[r].extra() | ||||
for label in ('source', 'transplant_source', 'rebase_source'): | ||||
if label in extra: | ||||
try: | ||||
return repo[extra[label]].rev() | ||||
except error.RepoLookupError: | ||||
pass | ||||
return None | ||||
Matt Mackall
|
r11275 | # operator methods | ||
def stringset(repo, subset, x): | ||||
x = repo[x].rev() | ||||
Matt Mackall
|
r11282 | if x == -1 and len(subset) == len(repo): | ||
Lucas Moscovicz
|
r20364 | return baseset([-1]) | ||
Idan Kamara
|
r13938 | if len(subset) == len(repo) or x in subset: | ||
Lucas Moscovicz
|
r20364 | return baseset([x]) | ||
return baseset([]) | ||||
Matt Mackall
|
r11275 | |||
def symbolset(repo, subset, x): | ||||
if x in symbols: | ||||
Martin Geisler
|
r11383 | raise error.ParseError(_("can't use %s here") % x) | ||
Matt Mackall
|
r11275 | return stringset(repo, subset, x) | ||
def rangeset(repo, subset, x, y): | ||||
Lucas Moscovicz
|
r20364 | cl = baseset(repo.changelog) | ||
FUJIWARA Katsunori
|
r18473 | m = getset(repo, cl, x) | ||
n = getset(repo, cl, y) | ||||
Matt Mackall
|
r11456 | |||
if not m or not n: | ||||
Lucas Moscovicz
|
r20364 | return baseset([]) | ||
Matt Mackall
|
r11456 | m, n = m[0], n[-1] | ||
Matt Mackall
|
r11275 | if m < n: | ||
Lucas Moscovicz
|
r20526 | r = spanset(repo, m, n + 1) | ||
Matt Mackall
|
r11456 | else: | ||
Lucas Moscovicz
|
r20526 | r = spanset(repo, m, n - 1) | ||
return r & subset | ||||
Matt Mackall
|
r11275 | |||
Bryan O'Sullivan
|
r16860 | def dagrange(repo, subset, x, y): | ||
Lucas Moscovicz
|
r20526 | r = spanset(repo) | ||
Alexander Plavin
|
r18991 | xs = _revsbetween(repo, getset(repo, r, x), getset(repo, r, y)) | ||
Lucas Moscovicz
|
r20365 | s = subset.set() | ||
Pierre-Yves David
|
r21217 | return xs.filter(s.__contains__) | ||
Bryan O'Sullivan
|
r16860 | |||
Matt Mackall
|
r11275 | def andset(repo, subset, x, y): | ||
return getset(repo, getset(repo, subset, x), y) | ||||
def orset(repo, subset, x, y): | ||||
Augie Fackler
|
r13932 | xl = getset(repo, subset, x) | ||
Lucas Moscovicz
|
r20366 | yl = getset(repo, subset - xl, y) | ||
Augie Fackler
|
r13932 | return xl + yl | ||
Matt Mackall
|
r11275 | |||
def notset(repo, subset, x): | ||||
Lucas Moscovicz
|
r20366 | return subset - getset(repo, subset, x) | ||
Matt Mackall
|
r11275 | |||
def listset(repo, subset, a, b): | ||||
Martin Geisler
|
r11383 | raise error.ParseError(_("can't use a list in this context")) | ||
Matt Mackall
|
r11275 | |||
def func(repo, subset, a, b): | ||||
if a[0] == 'symbol' and a[1] in symbols: | ||||
return symbols[a[1]](repo, subset, b) | ||||
Martin Geisler
|
r11383 | raise error.ParseError(_("not a function: %s") % a[1]) | ||
Matt Mackall
|
r11275 | |||
# functions | ||||
Idan Kamara
|
r13915 | def adds(repo, subset, x): | ||
"""``adds(pattern)`` | ||||
Changesets that add a file matching pattern. | ||||
FUJIWARA Katsunori
|
r20289 | |||
The pattern without explicit kind like ``glob:`` is expected to be | ||||
relative to the current directory and match against a file or a | ||||
directory. | ||||
Idan Kamara
|
r13915 | """ | ||
# i18n: "adds" is a keyword | ||||
pat = getstring(x, _("adds requires a pattern")) | ||||
return checkstatus(repo, subset, pat, 1) | ||||
def ancestor(repo, subset, x): | ||||
Paul Cavallaro
|
r18536 | """``ancestor(*changeset)`` | ||
Mads Kiilerich
|
r20991 | A greatest common ancestor of the changesets. | ||
Paul Cavallaro
|
r18536 | |||
Accepts 0 or more changesets. | ||||
Will return empty list when passed no args. | ||||
Greatest common ancestor of a single changeset is that changeset. | ||||
Idan Kamara
|
r13915 | """ | ||
# i18n: "ancestor" is a keyword | ||||
Paul Cavallaro
|
r18536 | l = getlist(x) | ||
Lucas Moscovicz
|
r20526 | rl = spanset(repo) | ||
Paul Cavallaro
|
r18536 | anc = None | ||
Idan Kamara
|
r13915 | |||
Paul Cavallaro
|
r18536 | # (getset(repo, rl, i) for i in l) generates a list of lists | ||
for revs in (getset(repo, rl, i) for i in l): | ||||
for r in revs: | ||||
if anc is None: | ||||
Mads Kiilerich
|
r20991 | anc = repo[r] | ||
Paul Cavallaro
|
r18536 | else: | ||
Mads Kiilerich
|
r20991 | anc = anc.ancestor(repo[r]) | ||
if anc is not None and anc.rev() in subset: | ||||
return baseset([anc.rev()]) | ||||
Lucas Moscovicz
|
r20364 | return baseset([]) | ||
Idan Kamara
|
r13915 | |||
Patrick Mezard
|
r16409 | def _ancestors(repo, subset, x, followfirst=False): | ||
Lucas Moscovicz
|
r20526 | args = getset(repo, spanset(repo), x) | ||
Patrick Mezard
|
r16409 | if not args: | ||
Lucas Moscovicz
|
r20364 | return baseset([]) | ||
Lucas Moscovicz
|
r20690 | s = _revancestors(repo, args, followfirst) | ||
Pierre-Yves David
|
r21217 | return subset.filter(s.__contains__) | ||
Patrick Mezard
|
r16409 | |||
Idan Kamara
|
r13915 | def ancestors(repo, subset, x): | ||
"""``ancestors(set)`` | ||||
Changesets that are ancestors of a changeset in set. | ||||
""" | ||||
Patrick Mezard
|
r16409 | return _ancestors(repo, subset, x) | ||
def _firstancestors(repo, subset, x): | ||||
# ``_firstancestors(set)`` | ||||
# Like ``ancestors(set)`` but follows only the first parents. | ||||
return _ancestors(repo, subset, x, followfirst=True) | ||||
Idan Kamara
|
r13915 | |||
Kevin Gessner
|
r14070 | def ancestorspec(repo, subset, x, n): | ||
"""``set~n`` | ||||
Brodie Rao
|
r16683 | Changesets that are the Nth ancestor (first parents only) of a changeset | ||
in set. | ||||
Kevin Gessner
|
r14070 | """ | ||
try: | ||||
n = int(n[1]) | ||||
Matt Mackall
|
r14851 | except (TypeError, ValueError): | ||
Kevin Gessner
|
r14070 | raise error.ParseError(_("~ expects a number")) | ||
ps = set() | ||||
cl = repo.changelog | ||||
Lucas Moscovicz
|
r20424 | for r in getset(repo, baseset(cl), x): | ||
Kevin Gessner
|
r14070 | for i in range(n): | ||
r = cl.parentrevs(r)[0] | ||||
ps.add(r) | ||||
Pierre-Yves David
|
r22531 | return subset & ps | ||
Kevin Gessner
|
r14070 | |||
Idan Kamara
|
r13915 | def author(repo, subset, x): | ||
"""``author(string)`` | ||||
Alias for ``user(string)``. | ||||
""" | ||||
# i18n: "author" is a keyword | ||||
FUJIWARA Katsunori
|
r15726 | n = encoding.lower(getstring(x, _("author requires a string"))) | ||
Simon King
|
r16823 | kind, pattern, matcher = _substringmatcher(n) | ||
Lucas Moscovicz
|
r20611 | return subset.filter(lambda x: matcher(encoding.lower(repo[x].user()))) | ||
Idan Kamara
|
r13915 | |||
Durham Goode
|
r20613 | def only(repo, subset, x): | ||
"""``only(set, [set])`` | ||||
Changesets that are ancestors of the first set that are not ancestors | ||||
of any other head in the repo. If a second set is specified, the result | ||||
is ancestors of the first set that are not ancestors of the second set | ||||
(i.e. ::<set1> - ::<set2>). | ||||
""" | ||||
cl = repo.changelog | ||||
Wagner Bruna
|
r21173 | # i18n: "only" is a keyword | ||
Durham Goode
|
r20613 | args = getargs(x, 1, 2, _('only takes one or two arguments')) | ||
include = getset(repo, spanset(repo), args[0]).set() | ||||
if len(args) == 1: | ||||
Matt Harbison
|
r21925 | if len(include) == 0: | ||
return baseset([]) | ||||
Durham Goode
|
r20613 | descendants = set(_revdescendants(repo, include, False)) | ||
exclude = [rev for rev in cl.headrevs() | ||||
if not rev in descendants and not rev in include] | ||||
else: | ||||
exclude = getset(repo, spanset(repo), args[1]) | ||||
results = set(ancestormod.missingancestors(include, exclude, cl.parentrevs)) | ||||
Pierre-Yves David
|
r21217 | return lazyset(subset, results.__contains__) | ||
Idan Kamara
|
r13915 | |||
"Yann E. MORIN"
|
r15134 | def bisect(repo, subset, x): | ||
"""``bisect(string)`` | ||||
"Yann E. MORIN"
|
r15153 | Changesets marked in the specified bisect status: | ||
"Yann E. MORIN"
|
r15136 | |||
"Yann E. MORIN"
|
r15153 | - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip | ||
Mads Kiilerich
|
r17424 | - ``goods``, ``bads`` : csets topologically good/bad | ||
"Yann E. MORIN"
|
r15153 | - ``range`` : csets taking part in the bisection | ||
- ``pruned`` : csets that are goods, bads or skipped | ||||
- ``untested`` : csets whose fate is yet unknown | ||||
- ``ignored`` : csets ignored due to DAG topology | ||||
Bryan O'Sullivan
|
r16647 | - ``current`` : the cset currently being bisected | ||
Idan Kamara
|
r13915 | """ | ||
FUJIWARA Katsunori
|
r17259 | # i18n: "bisect" is a keyword | ||
"Yann E. MORIN"
|
r15135 | status = getstring(x, _("bisect requires a string")).lower() | ||
Bryan O'Sullivan
|
r16467 | state = set(hbisect.get(repo, status)) | ||
Pierre-Yves David
|
r22532 | return subset & state | ||
Idan Kamara
|
r13915 | |||
"Yann E. MORIN"
|
r15134 | # Backward-compatibility | ||
# - no help entry so that we do not advertise it any more | ||||
def bisected(repo, subset, x): | ||||
return bisect(repo, subset, x) | ||||
Idan Kamara
|
r13915 | def bookmark(repo, subset, x): | ||
"""``bookmark([name])`` | ||||
The named bookmark or all bookmarks. | ||||
Simon King
|
r16822 | |||
If `name` starts with `re:`, the remainder of the name is treated as | ||||
a regular expression. To match a bookmark that actually starts with `re:`, | ||||
use the prefix `literal:`. | ||||
Idan Kamara
|
r13915 | """ | ||
# i18n: "bookmark" is a keyword | ||||
args = getargs(x, 0, 1, _('bookmark takes one or no arguments')) | ||||
if args: | ||||
bm = getstring(args[0], | ||||
# i18n: "bookmark" is a keyword | ||||
_('the argument to bookmark must be a string')) | ||||
Simon King
|
r16822 | kind, pattern, matcher = _stringmatcher(bm) | ||
Pierre-Yves David
|
r22499 | bms = set() | ||
Simon King
|
r16822 | if kind == 'literal': | ||
Michael O'Connor
|
r22105 | bmrev = repo._bookmarks.get(pattern, None) | ||
Simon King
|
r16822 | if not bmrev: | ||
raise util.Abort(_("bookmark '%s' does not exist") % bm) | ||||
Pierre-Yves David
|
r22499 | bms.add(repo[bmrev].rev()) | ||
Simon King
|
r16822 | else: | ||
matchrevs = set() | ||||
Kevin Bullock
|
r18495 | for name, bmrev in repo._bookmarks.iteritems(): | ||
Simon King
|
r16822 | if matcher(name): | ||
matchrevs.add(bmrev) | ||||
if not matchrevs: | ||||
raise util.Abort(_("no bookmarks exist that match '%s'") | ||||
% pattern) | ||||
for bmrev in matchrevs: | ||||
Pierre-Yves David
|
r22499 | bms.add(repo[bmrev].rev()) | ||
else: | ||||
bms = set([repo[r].rev() | ||||
for r in repo._bookmarks.values()]) | ||||
Pierre-Yves David
|
r22500 | bms -= set([node.nullrev]) | ||
Pierre-Yves David
|
r22530 | return subset & bms | ||
Idan Kamara
|
r13915 | |||
def branch(repo, subset, x): | ||||
"""``branch(string or set)`` | ||||
All changesets belonging to the given branch or the branches of the given | ||||
changesets. | ||||
Simon King
|
r16821 | |||
If `string` starts with `re:`, the remainder of the name is treated as | ||||
a regular expression. To match a branch that actually starts with `re:`, | ||||
use the prefix `literal:`. | ||||
Idan Kamara
|
r13915 | """ | ||
try: | ||||
b = getstring(x, '') | ||||
except error.ParseError: | ||||
# not a string, but another revspec, e.g. tip() | ||||
pass | ||||
Simon King
|
r16821 | else: | ||
kind, pattern, matcher = _stringmatcher(b) | ||||
if kind == 'literal': | ||||
# note: falls through to the revspec case if no branch with | ||||
# this name exists | ||||
if pattern in repo.branchmap(): | ||||
Lucas Moscovicz
|
r20611 | return subset.filter(lambda r: matcher(repo[r].branch())) | ||
Simon King
|
r16821 | else: | ||
Lucas Moscovicz
|
r20611 | return subset.filter(lambda r: matcher(repo[r].branch())) | ||
Idan Kamara
|
r13915 | |||
Lucas Moscovicz
|
r20526 | s = getset(repo, spanset(repo), x) | ||
Idan Kamara
|
r13915 | b = set() | ||
for r in s: | ||||
b.add(repo[r].branch()) | ||||
Lucas Moscovicz
|
r20365 | s = s.set() | ||
Lucas Moscovicz
|
r20611 | return subset.filter(lambda r: r in s or repo[r].branch() in b) | ||
Idan Kamara
|
r13915 | |||
Pierre-Yves David
|
r17829 | def bumped(repo, subset, x): | ||
"""``bumped()`` | ||||
Mutable changesets marked as successors of public changesets. | ||||
Only non-public and non-obsolete changesets can be `bumped`. | ||||
""" | ||||
# i18n: "bumped" is a keyword | ||||
getargs(x, 0, 0, _("bumped takes no arguments")) | ||||
bumped = obsmod.getrevs(repo, 'bumped') | ||||
Lucas Moscovicz
|
r20367 | return subset & bumped | ||
Pierre-Yves David
|
r17829 | |||
Tomasz Kleczek
|
r17913 | def bundle(repo, subset, x): | ||
"""``bundle()`` | ||||
Changesets in the bundle. | ||||
Bundle must be specified by the -R option.""" | ||||
try: | ||||
Mads Kiilerich
|
r18411 | bundlerevs = repo.changelog.bundlerevs | ||
Tomasz Kleczek
|
r17913 | except AttributeError: | ||
raise util.Abort(_("no bundle provided - specify with -R")) | ||||
Lucas Moscovicz
|
r20367 | return subset & bundlerevs | ||
Tomasz Kleczek
|
r17913 | |||
Idan Kamara
|
r13915 | def checkstatus(repo, subset, pat, field): | ||
Patrick Mezard
|
r16521 | hasset = matchmod.patkind(pat) == 'set' | ||
Lucas Moscovicz
|
r20457 | |||
def matches(x): | ||||
m = None | ||||
fname = None | ||||
c = repo[x] | ||||
Patrick Mezard
|
r16521 | if not m or hasset: | ||
m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c) | ||||
if not m.anypats() and len(m.files()) == 1: | ||||
fname = m.files()[0] | ||||
if fname is not None: | ||||
if fname not in c.files(): | ||||
Lucas Moscovicz
|
r20457 | return False | ||
Idan Kamara
|
r13915 | else: | ||
for f in c.files(): | ||||
if m(f): | ||||
break | ||||
else: | ||||
Lucas Moscovicz
|
r20457 | return False | ||
Idan Kamara
|
r13915 | files = repo.status(c.p1().node(), c.node())[field] | ||
Patrick Mezard
|
r16521 | if fname is not None: | ||
if fname in files: | ||||
Lucas Moscovicz
|
r20457 | return True | ||
Idan Kamara
|
r13915 | else: | ||
for f in files: | ||||
if m(f): | ||||
Lucas Moscovicz
|
r20457 | return True | ||
Lucas Moscovicz
|
r20611 | return subset.filter(matches) | ||
Idan Kamara
|
r13915 | |||
Patrick Mezard
|
r16396 | def _children(repo, narrow, parentset): | ||
Matt Mackall
|
r15899 | cs = set() | ||
Siddharth Agarwal
|
r18063 | if not parentset: | ||
Matt Mackall
|
r20709 | return baseset(cs) | ||
Matt Mackall
|
r15899 | pr = repo.changelog.parentrevs | ||
Siddharth Agarwal
|
r18063 | minrev = min(parentset) | ||
Patrick Mezard
|
r16394 | for r in narrow: | ||
Siddharth Agarwal
|
r18063 | if r <= minrev: | ||
continue | ||||
Matt Mackall
|
r15899 | for p in pr(r): | ||
Patrick Mezard
|
r16396 | if p in parentset: | ||
Matt Mackall
|
r15899 | cs.add(r) | ||
Matt Mackall
|
r20709 | return baseset(cs) | ||
Matt Mackall
|
r15899 | |||
Idan Kamara
|
r13915 | def children(repo, subset, x): | ||
"""``children(set)`` | ||||
Child changesets of changesets in set. | ||||
""" | ||||
Lucas Moscovicz
|
r20365 | s = getset(repo, baseset(repo), x).set() | ||
Matt Mackall
|
r15899 | cs = _children(repo, subset, s) | ||
Lucas Moscovicz
|
r20367 | return subset & cs | ||
Idan Kamara
|
r13915 | |||
def closed(repo, subset, x): | ||||
"""``closed()`` | ||||
Changeset is closed. | ||||
""" | ||||
# i18n: "closed" is a keyword | ||||
getargs(x, 0, 0, _("closed takes no arguments")) | ||||
Lucas Moscovicz
|
r20611 | return subset.filter(lambda r: repo[r].closesbranch()) | ||
Idan Kamara
|
r13915 | |||
def contains(repo, subset, x): | ||||
"""``contains(pattern)`` | ||||
Greg Hurrell
|
r21199 | The revision's manifest contains a file matching pattern (but might not | ||
modify it). See :hg:`help patterns` for information about file patterns. | ||||
FUJIWARA Katsunori
|
r20289 | |||
The pattern without explicit kind like ``glob:`` is expected to be | ||||
relative to the current directory and match against a file exactly | ||||
for efficiency. | ||||
Idan Kamara
|
r13915 | """ | ||
# i18n: "contains" is a keyword | ||||
pat = getstring(x, _("contains requires a pattern")) | ||||
Lucas Moscovicz
|
r20461 | |||
def matches(x): | ||||
if not matchmod.patkind(pat): | ||||
pats = pathutil.canonpath(repo.root, repo.getcwd(), pat) | ||||
if pats in repo[x]: | ||||
return True | ||||
else: | ||||
c = repo[x] | ||||
m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c) | ||||
Matt Mackall
|
r15964 | for f in c.manifest(): | ||
Idan Kamara
|
r13915 | if m(f): | ||
Lucas Moscovicz
|
r20461 | return True | ||
return False | ||||
Lucas Moscovicz
|
r20611 | return subset.filter(matches) | ||
Idan Kamara
|
r13915 | |||
Matt Harbison
|
r17002 | def converted(repo, subset, x): | ||
"""``converted([id])`` | ||||
Changesets converted from the given identifier in the old repository if | ||||
present, or all converted changesets if no identifier is specified. | ||||
""" | ||||
# There is exactly no chance of resolving the revision, so do a simple | ||||
# string compare and hope for the best | ||||
FUJIWARA Katsunori
|
r17259 | rev = None | ||
Matt Harbison
|
r17002 | # i18n: "converted" is a keyword | ||
l = getargs(x, 0, 1, _('converted takes one or no arguments')) | ||||
if l: | ||||
FUJIWARA Katsunori
|
r17259 | # i18n: "converted" is a keyword | ||
Matt Harbison
|
r17002 | rev = getstring(l[0], _('converted requires a revision')) | ||
def _matchvalue(r): | ||||
source = repo[r].extra().get('convert_revision', None) | ||||
return source is not None and (rev is None or source.startswith(rev)) | ||||
Lucas Moscovicz
|
r20611 | return subset.filter(lambda r: _matchvalue(r)) | ||
Matt Harbison
|
r17002 | |||
Idan Kamara
|
r13915 | def date(repo, subset, x): | ||
"""``date(interval)`` | ||||
Changesets within the interval, see :hg:`help dates`. | ||||
""" | ||||
# i18n: "date" is a keyword | ||||
ds = getstring(x, _("date requires a string")) | ||||
dm = util.matchdate(ds) | ||||
Lucas Moscovicz
|
r20611 | return subset.filter(lambda x: dm(repo[x].date()[0])) | ||
Idan Kamara
|
r13915 | |||
Thomas Arendsen Hein
|
r14650 | def desc(repo, subset, x): | ||
"""``desc(string)`` | ||||
Search commit message for string. The match is case-insensitive. | ||||
""" | ||||
# i18n: "desc" is a keyword | ||||
FUJIWARA Katsunori
|
r15726 | ds = encoding.lower(getstring(x, _("desc requires a string"))) | ||
Lucas Moscovicz
|
r20452 | |||
def matches(x): | ||||
c = repo[x] | ||||
return ds in encoding.lower(c.description()) | ||||
Lucas Moscovicz
|
r20611 | return subset.filter(matches) | ||
Thomas Arendsen Hein
|
r14650 | |||
Patrick Mezard
|
r16409 | def _descendants(repo, subset, x, followfirst=False): | ||
Lucas Moscovicz
|
r20526 | args = getset(repo, spanset(repo), x) | ||
Patrick Mezard
|
r16409 | if not args: | ||
Lucas Moscovicz
|
r20364 | return baseset([]) | ||
Lucas Moscovicz
|
r20692 | s = _revdescendants(repo, args, followfirst) | ||
Durham Goode
|
r20894 | |||
# Both sets need to be ascending in order to lazily return the union | ||||
# in the correct order. | ||||
args.ascending() | ||||
Durham Goode
|
r22449 | result = (orderedlazyset(s, subset.__contains__, ascending=True) + | ||
orderedlazyset(args, subset.__contains__, ascending=True)) | ||||
Durham Goode
|
r20894 | |||
# Wrap result in a lazyset since it's an _addset, which doesn't implement | ||||
# all the necessary functions to be consumed by callers. | ||||
return orderedlazyset(result, lambda r: True, ascending=True) | ||||
Patrick Mezard
|
r16409 | |||
Idan Kamara
|
r13915 | def descendants(repo, subset, x): | ||
"""``descendants(set)`` | ||||
Changesets which are descendants of changesets in set. | ||||
""" | ||||
Patrick Mezard
|
r16409 | return _descendants(repo, subset, x) | ||
def _firstdescendants(repo, subset, x): | ||||
# ``_firstdescendants(set)`` | ||||
# Like ``descendants(set)`` but follows only the first parents. | ||||
return _descendants(repo, subset, x, followfirst=True) | ||||
Idan Kamara
|
r13915 | |||
Matt Harbison
|
r17186 | def destination(repo, subset, x): | ||
"""``destination([set])`` | ||||
Changesets that were created by a graft, transplant or rebase operation, | ||||
with the given revisions specified as the source. Omitting the optional set | ||||
is the same as passing all(). | ||||
""" | ||||
if x is not None: | ||||
Lucas Moscovicz
|
r20526 | args = getset(repo, spanset(repo), x).set() | ||
Matt Harbison
|
r17186 | else: | ||
Lucas Moscovicz
|
r20526 | args = getall(repo, spanset(repo), x).set() | ||
Matt Harbison
|
r17186 | |||
dests = set() | ||||
# subset contains all of the possible destinations that can be returned, so | ||||
# iterate over them and see if their source(s) were provided in the args. | ||||
# Even if the immediate src of r is not in the args, src's source (or | ||||
# further back) may be. Scanning back further than the immediate src allows | ||||
# transitive transplants and rebases to yield the same results as transitive | ||||
# grafts. | ||||
for r in subset: | ||||
src = _getrevsource(repo, r) | ||||
lineage = None | ||||
while src is not None: | ||||
if lineage is None: | ||||
lineage = list() | ||||
lineage.append(r) | ||||
# The visited lineage is a match if the current source is in the arg | ||||
# set. Since every candidate dest is visited by way of iterating | ||||
timeless@mozdev.org
|
r17494 | # subset, any dests further back in the lineage will be tested by a | ||
Matt Harbison
|
r17186 | # different iteration over subset. Likewise, if the src was already | ||
# selected, the current lineage can be selected without going back | ||||
# further. | ||||
if src in args or src in dests: | ||||
dests.update(lineage) | ||||
break | ||||
r = src | ||||
src = _getrevsource(repo, r) | ||||
Pierre-Yves David
|
r21217 | return subset.filter(dests.__contains__) | ||
Matt Harbison
|
r17186 | |||
Pierre-Yves David
|
r18071 | def divergent(repo, subset, x): | ||
"""``divergent()`` | ||||
Final successors of changesets with an alternative set of final successors. | ||||
""" | ||||
# i18n: "divergent" is a keyword | ||||
getargs(x, 0, 0, _("divergent takes no arguments")) | ||||
divergent = obsmod.getrevs(repo, 'divergent') | ||||
Pierre-Yves David
|
r22533 | return subset & divergent | ||
Pierre-Yves David
|
r18071 | |||
Pierre-Yves David
|
r15819 | def draft(repo, subset, x): | ||
"""``draft()`` | ||||
Changeset in draft phase.""" | ||||
FUJIWARA Katsunori
|
r17259 | # i18n: "draft" is a keyword | ||
Pierre-Yves David
|
r15819 | getargs(x, 0, 0, _("draft takes no arguments")) | ||
Patrick Mezard
|
r16657 | pc = repo._phasecache | ||
Lucas Moscovicz
|
r20611 | return subset.filter(lambda r: pc.phase(repo, r) == phases.draft) | ||
Pierre-Yves David
|
r15819 | |||
Pierre-Yves David
|
r17173 | def extinct(repo, subset, x): | ||
"""``extinct()`` | ||||
Patrick Mezard
|
r17291 | Obsolete changesets with obsolete descendants only. | ||
""" | ||||
FUJIWARA Katsunori
|
r17259 | # i18n: "extinct" is a keyword | ||
FUJIWARA Katsunori
|
r17258 | getargs(x, 0, 0, _("extinct takes no arguments")) | ||
Pierre-Yves David
|
r17825 | extincts = obsmod.getrevs(repo, 'extinct') | ||
Lucas Moscovicz
|
r20367 | return subset & extincts | ||
Pierre-Yves David
|
r17173 | |||
Henrik Stuart
|
r16661 | def extra(repo, subset, x): | ||
"""``extra(label, [value])`` | ||||
Changesets with the given label in the extra metadata, with the given | ||||
Simon King
|
r16824 | optional value. | ||
If `value` starts with `re:`, the remainder of the value is treated as | ||||
a regular expression. To match a value that actually starts with `re:`, | ||||
use the prefix `literal:`. | ||||
""" | ||||
Henrik Stuart
|
r16661 | |||
FUJIWARA Katsunori
|
r17259 | # i18n: "extra" is a keyword | ||
Henrik Stuart
|
r16661 | l = getargs(x, 1, 2, _('extra takes at least 1 and at most 2 arguments')) | ||
FUJIWARA Katsunori
|
r17259 | # i18n: "extra" is a keyword | ||
Henrik Stuart
|
r16661 | label = getstring(l[0], _('first argument to extra must be a string')) | ||
value = None | ||||
if len(l) > 1: | ||||
FUJIWARA Katsunori
|
r17259 | # i18n: "extra" is a keyword | ||
Henrik Stuart
|
r16661 | value = getstring(l[1], _('second argument to extra must be a string')) | ||
Simon King
|
r16824 | kind, value, matcher = _stringmatcher(value) | ||
Henrik Stuart
|
r16661 | |||
def _matchvalue(r): | ||||
extra = repo[r].extra() | ||||
Simon King
|
r16824 | return label in extra and (value is None or matcher(extra[label])) | ||
Henrik Stuart
|
r16661 | |||
Lucas Moscovicz
|
r20611 | return subset.filter(lambda r: _matchvalue(r)) | ||
Pierre-Yves David
|
r15819 | |||
Matt Mackall
|
r14342 | def filelog(repo, subset, x): | ||
"""``filelog(pattern)`` | ||||
Changesets connected to the specified filelog. | ||||
FUJIWARA Katsunori
|
r17244 | |||
Greg Hurrell
|
r21199 | For performance reasons, visits only revisions mentioned in the file-level | ||
filelog, rather than filtering through all changesets (much faster, but | ||||
doesn't include deletes or duplicate changes). For a slower, more accurate | ||||
result, use ``file()``. | ||||
FUJIWARA Katsunori
|
r20289 | |||
The pattern without explicit kind like ``glob:`` is expected to be | ||||
relative to the current directory and match against a file exactly | ||||
for efficiency. | ||||
Matt Mackall
|
r14342 | """ | ||
FUJIWARA Katsunori
|
r17259 | # i18n: "filelog" is a keyword | ||
Matt Mackall
|
r14342 | pat = getstring(x, _("filelog requires a pattern")) | ||
s = set() | ||||
Matt Mackall
|
r15964 | if not matchmod.patkind(pat): | ||
FUJIWARA Katsunori
|
r20288 | f = pathutil.canonpath(repo.root, repo.getcwd(), pat) | ||
FUJIWARA Katsunori
|
r20287 | fl = repo.file(f) | ||
for fr in fl: | ||||
s.add(fl.linkrev(fr)) | ||||
Matt Mackall
|
r14342 | else: | ||
FUJIWARA Katsunori
|
r20288 | m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=repo[None]) | ||
Matt Mackall
|
r14342 | for f in repo[None]: | ||
if m(f): | ||||
fl = repo.file(f) | ||||
for fr in fl: | ||||
s.add(fl.linkrev(fr)) | ||||
Pierre-Yves David
|
r22534 | return subset & s | ||
Matt Mackall
|
r14342 | |||
Matt Mackall
|
r15117 | def first(repo, subset, x): | ||
"""``first(set, [n])`` | ||||
An alias for limit(). | ||||
""" | ||||
return limit(repo, subset, x) | ||||
Patrick Mezard
|
r16185 | def _follow(repo, subset, x, name, followfirst=False): | ||
l = getargs(x, 0, 1, _("%s takes no arguments or a filename") % name) | ||||
c = repo['.'] | ||||
if l: | ||||
x = getstring(l[0], _("%s expected a filename") % name) | ||||
if x in c: | ||||
cx = c[x] | ||||
s = set(ctx.rev() for ctx in cx.ancestors(followfirst=followfirst)) | ||||
# include the revision responsible for the most recent version | ||||
s.add(cx.linkrev()) | ||||
else: | ||||
Lucas Moscovicz
|
r20364 | return baseset([]) | ||
Patrick Mezard
|
r16185 | else: | ||
Lucas Moscovicz
|
r20690 | s = _revancestors(repo, baseset([c.rev()]), followfirst) | ||
Patrick Mezard
|
r16185 | |||
Pierre-Yves David
|
r22535 | return subset & s | ||
Patrick Mezard
|
r16185 | |||
Idan Kamara
|
r13915 | def follow(repo, subset, x): | ||
Matt Mackall
|
r14343 | """``follow([file])`` | ||
An alias for ``::.`` (ancestors of the working copy's first parent). | ||||
If a filename is specified, the history of the given file is followed, | ||||
including copies. | ||||
""" | ||||
Patrick Mezard
|
r16185 | return _follow(repo, subset, x, 'follow') | ||
Matt Mackall
|
r14343 | |||
Patrick Mezard
|
r16174 | def _followfirst(repo, subset, x): | ||
# ``followfirst([file])`` | ||||
# Like ``follow([file])`` but follows only the first parent of | ||||
# every revision or file revision. | ||||
Patrick Mezard
|
r16185 | return _follow(repo, subset, x, '_followfirst', followfirst=True) | ||
Matt Mackall
|
r14343 | |||
Idan Kamara
|
r13915 | def getall(repo, subset, x): | ||
"""``all()`` | ||||
All changesets, the same as ``0:tip``. | ||||
""" | ||||
# i18n: "all" is a keyword | ||||
getargs(x, 0, 0, _("all takes no arguments")) | ||||
return subset | ||||
def grep(repo, subset, x): | ||||
"""``grep(regex)`` | ||||
Like ``keyword(string)`` but accepts a regex. Use ``grep(r'...')`` | ||||
Martin Geisler
|
r14357 | to ensure special escape characters are handled correctly. Unlike | ||
``keyword(string)``, the match is case-sensitive. | ||||
Idan Kamara
|
r13915 | """ | ||
try: | ||||
# i18n: "grep" is a keyword | ||||
gr = re.compile(getstring(x, _("grep requires a string"))) | ||||
except re.error, e: | ||||
raise error.ParseError(_('invalid match pattern: %s') % e) | ||||
Lucas Moscovicz
|
r20453 | |||
def matches(x): | ||||
c = repo[x] | ||||
Idan Kamara
|
r13915 | for e in c.files() + [c.user(), c.description()]: | ||
if gr.search(e): | ||||
Lucas Moscovicz
|
r20453 | return True | ||
return False | ||||
Lucas Moscovicz
|
r20611 | return subset.filter(matches) | ||
Idan Kamara
|
r13915 | |||
Patrick Mezard
|
r16161 | def _matchfiles(repo, subset, x): | ||
# _matchfiles takes a revset list of prefixed arguments: | ||||
# | ||||
# [p:foo, i:bar, x:baz] | ||||
# | ||||
# builds a match object from them and filters subset. Allowed | ||||
# prefixes are 'p:' for regular patterns, 'i:' for include | ||||
Patrick Mezard
|
r16181 | # patterns and 'x:' for exclude patterns. Use 'r:' prefix to pass | ||
# a revision identifier, or the empty string to reference the | ||||
# working directory, from which the match object is | ||||
Patrick Mezard
|
r16411 | # initialized. Use 'd:' to set the default matching mode, default | ||
# to 'glob'. At most one 'r:' and 'd:' argument can be passed. | ||||
Patrick Mezard
|
r16161 | |||
# i18n: "_matchfiles" is a keyword | ||||
l = getargs(x, 1, -1, _("_matchfiles requires at least one argument")) | ||||
pats, inc, exc = [], [], [] | ||||
hasset = False | ||||
Patrick Mezard
|
r16411 | rev, default = None, None | ||
Patrick Mezard
|
r16161 | for arg in l: | ||
FUJIWARA Katsunori
|
r17259 | # i18n: "_matchfiles" is a keyword | ||
Patrick Mezard
|
r16161 | s = getstring(arg, _("_matchfiles requires string arguments")) | ||
prefix, value = s[:2], s[2:] | ||||
if prefix == 'p:': | ||||
pats.append(value) | ||||
elif prefix == 'i:': | ||||
inc.append(value) | ||||
elif prefix == 'x:': | ||||
exc.append(value) | ||||
Patrick Mezard
|
r16181 | elif prefix == 'r:': | ||
if rev is not None: | ||||
FUJIWARA Katsunori
|
r17259 | # i18n: "_matchfiles" is a keyword | ||
Patrick Mezard
|
r16181 | raise error.ParseError(_('_matchfiles expected at most one ' | ||
'revision')) | ||||
rev = value | ||||
Patrick Mezard
|
r16411 | elif prefix == 'd:': | ||
if default is not None: | ||||
FUJIWARA Katsunori
|
r17259 | # i18n: "_matchfiles" is a keyword | ||
Patrick Mezard
|
r16411 | raise error.ParseError(_('_matchfiles expected at most one ' | ||
'default mode')) | ||||
default = value | ||||
Patrick Mezard
|
r16161 | else: | ||
FUJIWARA Katsunori
|
r17259 | # i18n: "_matchfiles" is a keyword | ||
Patrick Mezard
|
r16161 | raise error.ParseError(_('invalid _matchfiles prefix: %s') % prefix) | ||
if not hasset and matchmod.patkind(value) == 'set': | ||||
hasset = True | ||||
Patrick Mezard
|
r16411 | if not default: | ||
default = 'glob' | ||||
Lucas Moscovicz
|
r20458 | |||
def matches(x): | ||||
m = None | ||||
c = repo[x] | ||||
Patrick Mezard
|
r16181 | if not m or (hasset and rev is None): | ||
ctx = c | ||||
if rev is not None: | ||||
ctx = repo[rev or None] | ||||
Patrick Mezard
|
r16161 | m = matchmod.match(repo.root, repo.getcwd(), pats, include=inc, | ||
Patrick Mezard
|
r16411 | exclude=exc, ctx=ctx, default=default) | ||
Patrick Mezard
|
r16161 | for f in c.files(): | ||
if m(f): | ||||
Lucas Moscovicz
|
r20458 | return True | ||
return False | ||||
Lucas Moscovicz
|
r20611 | return subset.filter(matches) | ||
Patrick Mezard
|
r16161 | |||
Idan Kamara
|
r13915 | def hasfile(repo, subset, x): | ||
"""``file(pattern)`` | ||||
Changesets affecting files matched by pattern. | ||||
FUJIWARA Katsunori
|
r17244 | |||
Greg Ward
|
r17265 | For a faster but less accurate result, consider using ``filelog()`` | ||
instead. | ||||
FUJIWARA Katsunori
|
r20289 | |||
This predicate uses ``glob:`` as the default kind of pattern. | ||||
Idan Kamara
|
r13915 | """ | ||
# i18n: "file" is a keyword | ||||
pat = getstring(x, _("file requires a pattern")) | ||||
Patrick Mezard
|
r16161 | return _matchfiles(repo, subset, ('string', 'p:' + pat)) | ||
Idan Kamara
|
r13915 | |||
def head(repo, subset, x): | ||||
"""``head()`` | ||||
Changeset is a named branch head. | ||||
""" | ||||
# i18n: "head" is a keyword | ||||
getargs(x, 0, 0, _("head takes no arguments")) | ||||
hs = set() | ||||
for b, ls in repo.branchmap().iteritems(): | ||||
hs.update(repo[h].rev() for h in ls) | ||||
Durham Goode
|
r20713 | return baseset(hs).filter(subset.__contains__) | ||
Idan Kamara
|
r13915 | |||
def heads(repo, subset, x): | ||||
"""``heads(set)`` | ||||
Members of set with no children in set. | ||||
""" | ||||
s = getset(repo, subset, x) | ||||
Lucas Moscovicz
|
r20366 | ps = parents(repo, subset, x) | ||
return s - ps | ||||
Idan Kamara
|
r13915 | |||
Patrick Mezard
|
r17390 | def hidden(repo, subset, x): | ||
"""``hidden()`` | ||||
Hidden changesets. | ||||
""" | ||||
# i18n: "hidden" is a keyword | ||||
getargs(x, 0, 0, _("hidden takes no arguments")) | ||||
Kevin Bullock
|
r18382 | hiddenrevs = repoview.filterrevs(repo, 'visible') | ||
Lucas Moscovicz
|
r20367 | return subset & hiddenrevs | ||
Patrick Mezard
|
r17390 | |||
Idan Kamara
|
r13915 | def keyword(repo, subset, x): | ||
"""``keyword(string)`` | ||||
Search commit message, user name, and names of changed files for | ||||
Martin Geisler
|
r14357 | string. The match is case-insensitive. | ||
Idan Kamara
|
r13915 | """ | ||
# i18n: "keyword" is a keyword | ||||
FUJIWARA Katsunori
|
r15726 | kw = encoding.lower(getstring(x, _("keyword requires a string"))) | ||
Lucas Moscovicz
|
r20447 | |||
def matches(r): | ||||
Idan Kamara
|
r13915 | c = repo[r] | ||
Lucas Moscovicz
|
r20447 | return util.any(kw in encoding.lower(t) for t in c.files() + [c.user(), | ||
c.description()]) | ||||
Lucas Moscovicz
|
r20611 | return subset.filter(matches) | ||
Idan Kamara
|
r13915 | |||
def limit(repo, subset, x): | ||||
Matt Mackall
|
r15116 | """``limit(set, [n])`` | ||
First n members of set, defaulting to 1. | ||||
Idan Kamara
|
r13915 | """ | ||
# i18n: "limit" is a keyword | ||||
Matt Mackall
|
r15116 | l = getargs(x, 1, 2, _("limit requires one or two arguments")) | ||
Idan Kamara
|
r13915 | try: | ||
Matt Mackall
|
r15116 | lim = 1 | ||
if len(l) == 2: | ||||
# i18n: "limit" is a keyword | ||||
lim = int(getstring(l[1], _("limit requires a number"))) | ||||
Matt Mackall
|
r14851 | except (TypeError, ValueError): | ||
Idan Kamara
|
r13915 | # i18n: "limit" is a keyword | ||
raise error.ParseError(_("limit expects a number")) | ||||
Lucas Moscovicz
|
r20365 | ss = subset.set() | ||
Lucas Moscovicz
|
r20526 | os = getset(repo, spanset(repo), l[0]) | ||
Lucas Moscovicz
|
r20446 | bs = baseset([]) | ||
it = iter(os) | ||||
for x in xrange(lim): | ||||
try: | ||||
y = it.next() | ||||
if y in ss: | ||||
bs.append(y) | ||||
except (StopIteration): | ||||
break | ||||
return bs | ||||
Idan Kamara
|
r13915 | |||
Matt Mackall
|
r14061 | def last(repo, subset, x): | ||
Matt Mackall
|
r15116 | """``last(set, [n])`` | ||
Last n members of set, defaulting to 1. | ||||
Matt Mackall
|
r14061 | """ | ||
# i18n: "last" is a keyword | ||||
Matt Mackall
|
r15116 | l = getargs(x, 1, 2, _("last requires one or two arguments")) | ||
Matt Mackall
|
r14061 | try: | ||
Matt Mackall
|
r15116 | lim = 1 | ||
if len(l) == 2: | ||||
# i18n: "last" is a keyword | ||||
lim = int(getstring(l[1], _("last requires a number"))) | ||||
Matt Mackall
|
r14851 | except (TypeError, ValueError): | ||
Matt Mackall
|
r14061 | # i18n: "last" is a keyword | ||
raise error.ParseError(_("last expects a number")) | ||||
Lucas Moscovicz
|
r20365 | ss = subset.set() | ||
Lucas Moscovicz
|
r20534 | os = getset(repo, spanset(repo), l[0]) | ||
os.reverse() | ||||
bs = baseset([]) | ||||
it = iter(os) | ||||
for x in xrange(lim): | ||||
try: | ||||
y = it.next() | ||||
if y in ss: | ||||
bs.append(y) | ||||
except (StopIteration): | ||||
break | ||||
return bs | ||||
Matt Mackall
|
r14061 | |||
Idan Kamara
|
r13915 | def maxrev(repo, subset, x): | ||
"""``max(set)`` | ||||
Changeset with highest revision number in set. | ||||
""" | ||||
Lucas Moscovicz
|
r20526 | os = getset(repo, spanset(repo), x) | ||
Mads Kiilerich
|
r14153 | if os: | ||
Lucas Moscovicz
|
r20754 | m = os.max() | ||
Idan Kamara
|
r13915 | if m in subset: | ||
Lucas Moscovicz
|
r20364 | return baseset([m]) | ||
return baseset([]) | ||||
Idan Kamara
|
r13915 | |||
def merge(repo, subset, x): | ||||
"""``merge()`` | ||||
Changeset is a merge changeset. | ||||
""" | ||||
# i18n: "merge" is a keyword | ||||
getargs(x, 0, 0, _("merge takes no arguments")) | ||||
cl = repo.changelog | ||||
Lucas Moscovicz
|
r20611 | return subset.filter(lambda r: cl.parentrevs(r)[1] != -1) | ||
Idan Kamara
|
r13915 | |||
Ivan Andrus
|
r17753 | def branchpoint(repo, subset, x): | ||
"""``branchpoint()`` | ||||
Changesets with more than one child. | ||||
""" | ||||
# i18n: "branchpoint" is a keyword | ||||
getargs(x, 0, 0, _("branchpoint takes no arguments")) | ||||
cl = repo.changelog | ||||
if not subset: | ||||
Lucas Moscovicz
|
r20364 | return baseset([]) | ||
Ivan Andrus
|
r17753 | baserev = min(subset) | ||
parentscount = [0]*(len(repo) - baserev) | ||||
Pierre-Yves David
|
r17785 | for r in cl.revs(start=baserev + 1): | ||
Ivan Andrus
|
r17753 | for p in cl.parentrevs(r): | ||
if p >= baserev: | ||||
parentscount[p - baserev] += 1 | ||||
Lucas Moscovicz
|
r20611 | return subset.filter(lambda r: parentscount[r - baserev] > 1) | ||
Ivan Andrus
|
r17753 | |||
Idan Kamara
|
r13915 | def minrev(repo, subset, x): | ||
"""``min(set)`` | ||||
Changeset with lowest revision number in set. | ||||
""" | ||||
Lucas Moscovicz
|
r20526 | os = getset(repo, spanset(repo), x) | ||
Mads Kiilerich
|
r14153 | if os: | ||
Lucas Moscovicz
|
r20754 | m = os.min() | ||
Idan Kamara
|
r13915 | if m in subset: | ||
Lucas Moscovicz
|
r20364 | return baseset([m]) | ||
return baseset([]) | ||||
Idan Kamara
|
r13915 | |||
def modifies(repo, subset, x): | ||||
"""``modifies(pattern)`` | ||||
Changesets modifying files matched by pattern. | ||||
FUJIWARA Katsunori
|
r20289 | |||
The pattern without explicit kind like ``glob:`` is expected to be | ||||
relative to the current directory and match against a file or a | ||||
directory. | ||||
Idan Kamara
|
r13915 | """ | ||
# i18n: "modifies" is a keyword | ||||
pat = getstring(x, _("modifies requires a pattern")) | ||||
return checkstatus(repo, subset, pat, 0) | ||||
Matt Mackall
|
r16417 | def node_(repo, subset, x): | ||
Patrick Mezard
|
r12821 | """``id(string)`` | ||
Wagner Bruna
|
r12859 | Revision non-ambiguously specified by the given hex string prefix. | ||
Patrick Mezard
|
r12821 | """ | ||
Martin Geisler
|
r12815 | # i18n: "id" is a keyword | ||
Benoit Boissinot
|
r12736 | l = getargs(x, 1, 1, _("id requires one argument")) | ||
Martin Geisler
|
r12815 | # i18n: "id" is a keyword | ||
Benoit Boissinot
|
r12736 | n = getstring(l[0], _("id requires a string")) | ||
Augie Fackler
|
r12716 | if len(n) == 40: | ||
rn = repo[n].rev() | ||||
else: | ||||
Matt Harbison
|
r16735 | rn = None | ||
pm = repo.changelog._partialmatch(n) | ||||
if pm is not None: | ||||
rn = repo.changelog.rev(pm) | ||||
Lucas Moscovicz
|
r20611 | return subset.filter(lambda r: r == rn) | ||
Augie Fackler
|
r12716 | |||
Pierre-Yves David
|
r17170 | def obsolete(repo, subset, x): | ||
"""``obsolete()`` | ||||
Mutable changeset with a newer version.""" | ||||
FUJIWARA Katsunori
|
r17259 | # i18n: "obsolete" is a keyword | ||
Pierre-Yves David
|
r17170 | getargs(x, 0, 0, _("obsolete takes no arguments")) | ||
Pierre-Yves David
|
r17825 | obsoletes = obsmod.getrevs(repo, 'obsolete') | ||
Lucas Moscovicz
|
r20367 | return subset & obsoletes | ||
Pierre-Yves David
|
r17170 | |||
Matt Harbison
|
r17185 | def origin(repo, subset, x): | ||
"""``origin([set])`` | ||||
Changesets that were specified as a source for the grafts, transplants or | ||||
rebases that created the given revisions. Omitting the optional set is the | ||||
same as passing all(). If a changeset created by these operations is itself | ||||
specified as a source for one of these operations, only the source changeset | ||||
for the first operation is selected. | ||||
""" | ||||
if x is not None: | ||||
Lucas Moscovicz
|
r20526 | args = getset(repo, spanset(repo), x).set() | ||
Matt Harbison
|
r17185 | else: | ||
Lucas Moscovicz
|
r20526 | args = getall(repo, spanset(repo), x).set() | ||
Matt Harbison
|
r17185 | |||
def _firstsrc(rev): | ||||
src = _getrevsource(repo, rev) | ||||
if src is None: | ||||
return None | ||||
while True: | ||||
prev = _getrevsource(repo, src) | ||||
if prev is None: | ||||
return src | ||||
src = prev | ||||
o = set([_firstsrc(r) for r in args]) | ||||
Pierre-Yves David
|
r22498 | o -= set([None]) | ||
Pierre-Yves David
|
r22536 | return subset & o | ||
Matt Harbison
|
r17185 | |||
Idan Kamara
|
r13915 | def outgoing(repo, subset, x): | ||
"""``outgoing([path])`` | ||||
Changesets not found in the specified destination repository, or the | ||||
default push location. | ||||
Patrick Mezard
|
r12821 | """ | ||
Idan Kamara
|
r13915 | import hg # avoid start-up nasties | ||
# i18n: "outgoing" is a keyword | ||||
Mads Kiilerich
|
r14717 | l = getargs(x, 0, 1, _("outgoing takes one or no arguments")) | ||
Idan Kamara
|
r13915 | # i18n: "outgoing" is a keyword | ||
dest = l and getstring(l[0], _("outgoing requires a repository path")) or '' | ||||
dest = repo.ui.expandpath(dest or 'default-push', dest or 'default') | ||||
dest, branches = hg.parseurl(dest) | ||||
revs, checkout = hg.addbranchrevs(repo, repo, branches, []) | ||||
if revs: | ||||
revs = [repo.lookup(rev) for rev in revs] | ||||
Matt Mackall
|
r14556 | other = hg.peer(repo, {}, dest) | ||
Idan Kamara
|
r13915 | repo.ui.pushbuffer() | ||
Pierre-Yves David
|
r15837 | outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs) | ||
Idan Kamara
|
r13915 | repo.ui.popbuffer() | ||
cl = repo.changelog | ||||
Pierre-Yves David
|
r15837 | o = set([cl.rev(r) for r in outgoing.missing]) | ||
Pierre-Yves David
|
r22529 | return subset & o | ||
Augie Fackler
|
r12716 | |||
Matt Mackall
|
r11275 | def p1(repo, subset, x): | ||
Kevin Bullock
|
r12928 | """``p1([set])`` | ||
First parent of changesets in set, or the working directory. | ||||
Patrick Mezard
|
r12821 | """ | ||
Kevin Bullock
|
r12928 | if x is None: | ||
Matt Mackall
|
r13878 | p = repo[x].p1().rev() | ||
Pierre-Yves David
|
r22538 | if p >= 0: | ||
return subset & baseset([p]) | ||||
return baseset([]) | ||||
Kevin Bullock
|
r12928 | |||
Matt Mackall
|
r11275 | ps = set() | ||
cl = repo.changelog | ||||
Lucas Moscovicz
|
r20526 | for r in getset(repo, spanset(repo), x): | ||
Matt Mackall
|
r11275 | ps.add(cl.parentrevs(r)[0]) | ||
Pierre-Yves David
|
r22495 | ps -= set([node.nullrev]) | ||
Lucas Moscovicz
|
r20367 | return subset & ps | ||
Matt Mackall
|
r11275 | |||
def p2(repo, subset, x): | ||||
Kevin Bullock
|
r12928 | """``p2([set])`` | ||
Second parent of changesets in set, or the working directory. | ||||
Patrick Mezard
|
r12821 | """ | ||
Kevin Bullock
|
r12928 | if x is None: | ||
ps = repo[x].parents() | ||||
try: | ||||
Patrick Mezard
|
r12935 | p = ps[1].rev() | ||
Pierre-Yves David
|
r22539 | if p >= 0: | ||
return subset & baseset([p]) | ||||
return baseset([]) | ||||
Kevin Bullock
|
r12928 | except IndexError: | ||
Lucas Moscovicz
|
r20364 | return baseset([]) | ||
Kevin Bullock
|
r12928 | |||
Matt Mackall
|
r11275 | ps = set() | ||
cl = repo.changelog | ||||
Lucas Moscovicz
|
r20526 | for r in getset(repo, spanset(repo), x): | ||
Matt Mackall
|
r11275 | ps.add(cl.parentrevs(r)[1]) | ||
Pierre-Yves David
|
r22495 | ps -= set([node.nullrev]) | ||
Lucas Moscovicz
|
r20367 | return subset & ps | ||
Matt Mackall
|
r11275 | |||
def parents(repo, subset, x): | ||||
Kevin Bullock
|
r12929 | """``parents([set])`` | ||
The set of all parents for all changesets in set, or the working directory. | ||||
Patrick Mezard
|
r12821 | """ | ||
Kevin Bullock
|
r12929 | if x is None: | ||
Pierre-Yves David
|
r22496 | ps = set(p.rev() for p in repo[x].parents()) | ||
else: | ||||
ps = set() | ||||
cl = repo.changelog | ||||
for r in getset(repo, spanset(repo), x): | ||||
ps.update(cl.parentrevs(r)) | ||||
Pierre-Yves David
|
r22497 | ps -= set([node.nullrev]) | ||
Durham Goode
|
r22450 | return baseset(ps) & subset | ||
Matt Mackall
|
r11275 | |||
Kevin Gessner
|
r14070 | def parentspec(repo, subset, x, n): | ||
"""``set^0`` | ||||
The set. | ||||
``set^1`` (or ``set^``), ``set^2`` | ||||
First or second parent, respectively, of all changesets in set. | ||||
Patrick Mezard
|
r12821 | """ | ||
Brodie Rao
|
r12320 | try: | ||
Kevin Gessner
|
r14070 | n = int(n[1]) | ||
Kevin Gessner
|
r14072 | if n not in (0, 1, 2): | ||
Kevin Gessner
|
r14070 | raise ValueError | ||
Matt Mackall
|
r14851 | except (TypeError, ValueError): | ||
Kevin Gessner
|
r14070 | raise error.ParseError(_("^ expects a number 0, 1, or 2")) | ||
ps = set() | ||||
Matt Mackall
|
r11275 | cl = repo.changelog | ||
Lucas Moscovicz
|
r20364 | for r in getset(repo, baseset(cl), x): | ||
Kevin Gessner
|
r14070 | if n == 0: | ||
ps.add(r) | ||||
elif n == 1: | ||||
ps.add(cl.parentrevs(r)[0]) | ||||
elif n == 2: | ||||
parents = cl.parentrevs(r) | ||||
if len(parents) > 1: | ||||
ps.add(parents[1]) | ||||
Lucas Moscovicz
|
r20367 | return subset & ps | ||
Matt Mackall
|
r11275 | |||
Wagner Bruna
|
r11944 | def present(repo, subset, x): | ||
Patrick Mezard
|
r12821 | """``present(set)`` | ||
An empty set, if any revision in set isn't found; otherwise, | ||||
all revisions in set. | ||||
FUJIWARA Katsunori
|
r16748 | |||
If any of specified revisions is not present in the local repository, | ||||
the query is normally aborted. But this predicate allows the query | ||||
to continue even in such cases. | ||||
Patrick Mezard
|
r12821 | """ | ||
Wagner Bruna
|
r11944 | try: | ||
return getset(repo, subset, x) | ||||
except error.RepoLookupError: | ||||
Lucas Moscovicz
|
r20364 | return baseset([]) | ||
Wagner Bruna
|
r11944 | |||
Pierre-Yves David
|
r15819 | def public(repo, subset, x): | ||
"""``public()`` | ||||
Changeset in public phase.""" | ||||
FUJIWARA Katsunori
|
r17259 | # i18n: "public" is a keyword | ||
Pierre-Yves David
|
r15819 | getargs(x, 0, 0, _("public takes no arguments")) | ||
Patrick Mezard
|
r16657 | pc = repo._phasecache | ||
Lucas Moscovicz
|
r20611 | return subset.filter(lambda r: pc.phase(repo, r) == phases.public) | ||
Pierre-Yves David
|
r15819 | |||
Matt Mackall
|
r15936 | def remote(repo, subset, x): | ||
FUJIWARA Katsunori
|
r16007 | """``remote([id [,path]])`` | ||
Matt Mackall
|
r15936 | Local revision that corresponds to the given identifier in a | ||
remote repository, if present. Here, the '.' identifier is a | ||||
synonym for the current local branch. | ||||
""" | ||||
import hg # avoid start-up nasties | ||||
# i18n: "remote" is a keyword | ||||
FUJIWARA Katsunori
|
r16007 | l = getargs(x, 0, 2, _("remote takes one, two or no arguments")) | ||
Matt Mackall
|
r15936 | |||
q = '.' | ||||
if len(l) > 0: | ||||
# i18n: "remote" is a keyword | ||||
q = getstring(l[0], _("remote requires a string id")) | ||||
if q == '.': | ||||
q = repo['.'].branch() | ||||
dest = '' | ||||
if len(l) > 1: | ||||
# i18n: "remote" is a keyword | ||||
dest = getstring(l[1], _("remote requires a repository path")) | ||||
dest = repo.ui.expandpath(dest or 'default') | ||||
dest, branches = hg.parseurl(dest) | ||||
revs, checkout = hg.addbranchrevs(repo, repo, branches, []) | ||||
if revs: | ||||
revs = [repo.lookup(rev) for rev in revs] | ||||
other = hg.peer(repo, {}, dest) | ||||
n = other.lookup(q) | ||||
if n in repo: | ||||
r = repo[n].rev() | ||||
FUJIWARA Katsunori
|
r16006 | if r in subset: | ||
Lucas Moscovicz
|
r20364 | return baseset([r]) | ||
return baseset([]) | ||||
Matt Mackall
|
r15936 | |||
Matt Mackall
|
r11275 | def removes(repo, subset, x): | ||
Patrick Mezard
|
r12821 | """``removes(pattern)`` | ||
Changesets which remove files matching pattern. | ||||
FUJIWARA Katsunori
|
r20289 | |||
The pattern without explicit kind like ``glob:`` is expected to be | ||||
relative to the current directory and match against a file or a | ||||
directory. | ||||
Patrick Mezard
|
r12821 | """ | ||
Martin Geisler
|
r12815 | # i18n: "removes" is a keyword | ||
Benoit Boissinot
|
r12736 | pat = getstring(x, _("removes requires a pattern")) | ||
Matt Mackall
|
r11275 | return checkstatus(repo, subset, pat, 2) | ||
Idan Kamara
|
r13915 | def rev(repo, subset, x): | ||
"""``rev(number)`` | ||||
Revision with the given numeric identifier. | ||||
Patrick Mezard
|
r12821 | """ | ||
Idan Kamara
|
r13915 | # i18n: "rev" is a keyword | ||
l = getargs(x, 1, 1, _("rev requires one argument")) | ||||
try: | ||||
# i18n: "rev" is a keyword | ||||
l = int(getstring(l[0], _("rev requires a number"))) | ||||
Matt Mackall
|
r14851 | except (TypeError, ValueError): | ||
Idan Kamara
|
r13915 | # i18n: "rev" is a keyword | ||
raise error.ParseError(_("rev expects a number")) | ||||
Pierre-Yves David
|
r22537 | return subset & baseset([l]) | ||
Matt Mackall
|
r11275 | |||
Angel Ezquerra
|
r16402 | def matching(repo, subset, x): | ||
"""``matching(revision [, field])`` | ||||
Changesets in which a given set of fields match the set of fields in the | ||||
selected revision or set. | ||||
FUJIWARA Katsunori
|
r16528 | |||
Angel Ezquerra
|
r16402 | To match more than one field pass the list of fields to match separated | ||
FUJIWARA Katsunori
|
r16528 | by spaces (e.g. ``author description``). | ||
Valid fields are most regular revision fields and some special fields. | ||||
Regular revision fields are ``description``, ``author``, ``branch``, | ||||
Angel Ezquerra
|
r17102 | ``date``, ``files``, ``phase``, ``parents``, ``substate``, ``user`` | ||
and ``diff``. | ||||
Note that ``author`` and ``user`` are synonyms. ``diff`` refers to the | ||||
contents of the revision. Two revisions matching their ``diff`` will | ||||
also match their ``files``. | ||||
FUJIWARA Katsunori
|
r16528 | |||
Special fields are ``summary`` and ``metadata``: | ||||
``summary`` matches the first line of the description. | ||||
Jesse Glick
|
r16639 | ``metadata`` is equivalent to matching ``description user date`` | ||
FUJIWARA Katsunori
|
r16528 | (i.e. it matches the main metadata fields). | ||
``metadata`` is the default field which is used when no fields are | ||||
specified. You can match more than one field at a time. | ||||
Angel Ezquerra
|
r16402 | """ | ||
FUJIWARA Katsunori
|
r17259 | # i18n: "matching" is a keyword | ||
Angel Ezquerra
|
r16402 | l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments")) | ||
Lucas Moscovicz
|
r20364 | revs = getset(repo, baseset(repo.changelog), l[0]) | ||
Angel Ezquerra
|
r16402 | |||
fieldlist = ['metadata'] | ||||
if len(l) > 1: | ||||
fieldlist = getstring(l[1], | ||||
FUJIWARA Katsunori
|
r17259 | # i18n: "matching" is a keyword | ||
Angel Ezquerra
|
r16402 | _("matching requires a string " | ||
"as its second argument")).split() | ||||
Angel Ezquerra
|
r17102 | # Make sure that there are no repeated fields, | ||
# expand the 'special' 'metadata' field type | ||||
# and check the 'files' whenever we check the 'diff' | ||||
Angel Ezquerra
|
r16402 | fields = [] | ||
for field in fieldlist: | ||||
if field == 'metadata': | ||||
fields += ['user', 'description', 'date'] | ||||
Angel Ezquerra
|
r17102 | elif field == 'diff': | ||
# a revision matching the diff must also match the files | ||||
# since matching the diff is very costly, make sure to | ||||
# also match the files first | ||||
fields += ['files', 'diff'] | ||||
Angel Ezquerra
|
r16402 | else: | ||
if field == 'author': | ||||
field = 'user' | ||||
fields.append(field) | ||||
fields = set(fields) | ||||
Angel Ezquerra
|
r16444 | if 'summary' in fields and 'description' in fields: | ||
# If a revision matches its description it also matches its summary | ||||
fields.discard('summary') | ||||
Angel Ezquerra
|
r16402 | |||
# We may want to match more than one field | ||||
Angel Ezquerra
|
r16446 | # Not all fields take the same amount of time to be matched | ||
# Sort the selected fields in order of increasing matching cost | ||||
Patrick Mezard
|
r16453 | fieldorder = ['phase', 'parents', 'user', 'date', 'branch', 'summary', | ||
Angel Ezquerra
|
r17102 | 'files', 'description', 'substate', 'diff'] | ||
Angel Ezquerra
|
r16446 | def fieldkeyfunc(f): | ||
try: | ||||
return fieldorder.index(f) | ||||
except ValueError: | ||||
# assume an unknown field is very costly | ||||
return len(fieldorder) | ||||
fields = list(fields) | ||||
fields.sort(key=fieldkeyfunc) | ||||
Angel Ezquerra
|
r16402 | # Each field will be matched with its own "getfield" function | ||
# which will be added to the getfieldfuncs array of functions | ||||
getfieldfuncs = [] | ||||
_funcs = { | ||||
'user': lambda r: repo[r].user(), | ||||
'branch': lambda r: repo[r].branch(), | ||||
'date': lambda r: repo[r].date(), | ||||
'description': lambda r: repo[r].description(), | ||||
'files': lambda r: repo[r].files(), | ||||
'parents': lambda r: repo[r].parents(), | ||||
'phase': lambda r: repo[r].phase(), | ||||
'substate': lambda r: repo[r].substate, | ||||
'summary': lambda r: repo[r].description().splitlines()[0], | ||||
Angel Ezquerra
|
r17102 | 'diff': lambda r: list(repo[r].diff(git=True),) | ||
Angel Ezquerra
|
r16402 | } | ||
for info in fields: | ||||
getfield = _funcs.get(info, None) | ||||
if getfield is None: | ||||
raise error.ParseError( | ||||
FUJIWARA Katsunori
|
r17259 | # i18n: "matching" is a keyword | ||
Angel Ezquerra
|
r16402 | _("unexpected field name passed to matching: %s") % info) | ||
getfieldfuncs.append(getfield) | ||||
# convert the getfield array of functions into a "getinfo" function | ||||
# which returns an array of field values (or a single value if there | ||||
# is only one field to match) | ||||
Angel Ezquerra
|
r16445 | getinfo = lambda r: [f(r) for f in getfieldfuncs] | ||
Angel Ezquerra
|
r16402 | |||
Lucas Moscovicz
|
r20459 | def matches(x): | ||
for rev in revs: | ||||
target = getinfo(rev) | ||||
Angel Ezquerra
|
r16445 | match = True | ||
for n, f in enumerate(getfieldfuncs): | ||||
Lucas Moscovicz
|
r20459 | if target[n] != f(x): | ||
Angel Ezquerra
|
r16445 | match = False | ||
if match: | ||||
Lucas Moscovicz
|
r20459 | return True | ||
return False | ||||
Lucas Moscovicz
|
r20611 | return subset.filter(matches) | ||
Angel Ezquerra
|
r16402 | |||
Matt Mackall
|
r11275 | def reverse(repo, subset, x): | ||
Patrick Mezard
|
r12821 | """``reverse(set)`` | ||
Reverse order of set. | ||||
""" | ||||
Matt Mackall
|
r11275 | l = getset(repo, subset, x) | ||
l.reverse() | ||||
return l | ||||
Idan Kamara
|
r13915 | def roots(repo, subset, x): | ||
"""``roots(set)`` | ||||
Patrick Mezard
|
r16394 | Changesets in set with no parent changeset in set. | ||
Patrick Mezard
|
r12821 | """ | ||
Durham Goode
|
r20895 | s = getset(repo, spanset(repo), x).set() | ||
subset = baseset([r for r in s if r in subset.set()]) | ||||
Patrick Mezard
|
r16394 | cs = _children(repo, subset, s) | ||
Lucas Moscovicz
|
r20366 | return subset - cs | ||
Wagner Bruna
|
r11944 | |||
Pierre-Yves David
|
r15819 | def secret(repo, subset, x): | ||
"""``secret()`` | ||||
Changeset in secret phase.""" | ||||
FUJIWARA Katsunori
|
r17259 | # i18n: "secret" is a keyword | ||
Pierre-Yves David
|
r15819 | getargs(x, 0, 0, _("secret takes no arguments")) | ||
Patrick Mezard
|
r16657 | pc = repo._phasecache | ||
Lucas Moscovicz
|
r20611 | return subset.filter(lambda x: pc.phase(repo, x) == phases.secret) | ||
Pierre-Yves David
|
r15819 | |||
Matt Mackall
|
r11275 | def sort(repo, subset, x): | ||
Patrick Mezard
|
r12821 | """``sort(set[, [-]key...])`` | ||
Sort set by keys. The default sort order is ascending, specify a key | ||||
as ``-key`` to sort in descending order. | ||||
The keys can be: | ||||
- ``rev`` for the revision number, | ||||
- ``branch`` for the branch name, | ||||
- ``desc`` for the commit message (description), | ||||
- ``user`` for user name (``author`` can be used as an alias), | ||||
- ``date`` for the commit date | ||||
""" | ||||
Martin Geisler
|
r12815 | # i18n: "sort" is a keyword | ||
Benoit Boissinot
|
r12736 | l = getargs(x, 1, 2, _("sort requires one or two arguments")) | ||
Matt Mackall
|
r11275 | keys = "rev" | ||
if len(l) == 2: | ||||
FUJIWARA Katsunori
|
r17259 | # i18n: "sort" is a keyword | ||
Martin Geisler
|
r11383 | keys = getstring(l[1], _("sort spec must be a string")) | ||
Matt Mackall
|
r11275 | |||
s = l[0] | ||||
keys = keys.split() | ||||
l = [] | ||||
def invert(s): | ||||
return "".join(chr(255 - ord(c)) for c in s) | ||||
Lucas Moscovicz
|
r20719 | revs = getset(repo, subset, s) | ||
if keys == ["rev"]: | ||||
revs.sort() | ||||
return revs | ||||
elif keys == ["-rev"]: | ||||
revs.sort(reverse=True) | ||||
return revs | ||||
for r in revs: | ||||
Matt Mackall
|
r11275 | c = repo[r] | ||
e = [] | ||||
for k in keys: | ||||
if k == 'rev': | ||||
e.append(r) | ||||
elif k == '-rev': | ||||
e.append(-r) | ||||
elif k == 'branch': | ||||
e.append(c.branch()) | ||||
elif k == '-branch': | ||||
e.append(invert(c.branch())) | ||||
elif k == 'desc': | ||||
e.append(c.description()) | ||||
elif k == '-desc': | ||||
e.append(invert(c.description())) | ||||
elif k in 'user author': | ||||
e.append(c.user()) | ||||
elif k in '-user -author': | ||||
e.append(invert(c.user())) | ||||
elif k == 'date': | ||||
e.append(c.date()[0]) | ||||
elif k == '-date': | ||||
e.append(-c.date()[0]) | ||||
else: | ||||
Martin Geisler
|
r11383 | raise error.ParseError(_("unknown sort key %r") % k) | ||
Matt Mackall
|
r11275 | e.append(r) | ||
l.append(e) | ||||
l.sort() | ||||
Lucas Moscovicz
|
r20364 | return baseset([e[-1] for e in l]) | ||
Matt Mackall
|
r11275 | |||
Simon King
|
r16819 | def _stringmatcher(pattern): | ||
""" | ||||
accepts a string, possibly starting with 're:' or 'literal:' prefix. | ||||
returns the matcher name, pattern, and matcher function. | ||||
missing or unknown prefixes are treated as literal matches. | ||||
helper for tests: | ||||
>>> def test(pattern, *tests): | ||||
... kind, pattern, matcher = _stringmatcher(pattern) | ||||
... return (kind, pattern, [bool(matcher(t)) for t in tests]) | ||||
exact matching (no prefix): | ||||
>>> test('abcdefg', 'abc', 'def', 'abcdefg') | ||||
('literal', 'abcdefg', [False, False, True]) | ||||
regex matching ('re:' prefix) | ||||
>>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar') | ||||
('re', 'a.+b', [False, False, True]) | ||||
force exact matches ('literal:' prefix) | ||||
>>> test('literal:re:foobar', 'foobar', 're:foobar') | ||||
('literal', 're:foobar', [False, True]) | ||||
unknown prefixes are ignored and treated as literals | ||||
>>> test('foo:bar', 'foo', 'bar', 'foo:bar') | ||||
('literal', 'foo:bar', [False, False, True]) | ||||
""" | ||||
if pattern.startswith('re:'): | ||||
pattern = pattern[3:] | ||||
try: | ||||
regex = re.compile(pattern) | ||||
except re.error, e: | ||||
raise error.ParseError(_('invalid regular expression: %s') | ||||
% e) | ||||
return 're', pattern, regex.search | ||||
elif pattern.startswith('literal:'): | ||||
pattern = pattern[8:] | ||||
return 'literal', pattern, pattern.__eq__ | ||||
Simon King
|
r16823 | def _substringmatcher(pattern): | ||
kind, pattern, matcher = _stringmatcher(pattern) | ||||
if kind == 'literal': | ||||
matcher = lambda s: pattern in s | ||||
return kind, pattern, matcher | ||||
Simon King
|
r16819 | |||
Augie Fackler
|
r12715 | def tag(repo, subset, x): | ||
Martin Geisler
|
r14356 | """``tag([name])`` | ||
Patrick Mezard
|
r12821 | The specified tag by name, or all tagged revisions if no name is given. | ||
Matt Harbison
|
r20824 | |||
If `name` starts with `re:`, the remainder of the name is treated as | ||||
a regular expression. To match a tag that actually starts with `re:`, | ||||
use the prefix `literal:`. | ||||
Patrick Mezard
|
r12821 | """ | ||
Martin Geisler
|
r12815 | # i18n: "tag" is a keyword | ||
Augie Fackler
|
r12715 | args = getargs(x, 0, 1, _("tag takes one or no arguments")) | ||
Matt Mackall
|
r11280 | cl = repo.changelog | ||
Augie Fackler
|
r12715 | if args: | ||
Simon King
|
r16820 | pattern = getstring(args[0], | ||
# i18n: "tag" is a keyword | ||||
_('the argument to tag must be a string')) | ||||
kind, pattern, matcher = _stringmatcher(pattern) | ||||
if kind == 'literal': | ||||
Matt Mackall
|
r16825 | # avoid resolving all tags | ||
tn = repo._tagscache.tags.get(pattern, None) | ||||
if tn is None: | ||||
Simon King
|
r16820 | raise util.Abort(_("tag '%s' does not exist") % pattern) | ||
Matt Mackall
|
r16825 | s = set([repo[tn].rev()]) | ||
Simon King
|
r16820 | else: | ||
s = set([cl.rev(n) for t, n in repo.tagslist() if matcher(t)]) | ||||
Augie Fackler
|
r12715 | else: | ||
s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip']) | ||||
Lucas Moscovicz
|
r20367 | return subset & s | ||
Matt Mackall
|
r11280 | |||
Patrick Mezard
|
r12821 | def tagged(repo, subset, x): | ||
return tag(repo, subset, x) | ||||
Pierre-Yves David
|
r17171 | def unstable(repo, subset, x): | ||
"""``unstable()`` | ||||
Patrick Mezard
|
r17291 | Non-obsolete changesets with obsolete ancestors. | ||
""" | ||||
FUJIWARA Katsunori
|
r17259 | # i18n: "unstable" is a keyword | ||
FUJIWARA Katsunori
|
r17258 | getargs(x, 0, 0, _("unstable takes no arguments")) | ||
Pierre-Yves David
|
r17825 | unstables = obsmod.getrevs(repo, 'unstable') | ||
Lucas Moscovicz
|
r20367 | return subset & unstables | ||
Pierre-Yves David
|
r17171 | |||
Idan Kamara
|
r13915 | def user(repo, subset, x): | ||
"""``user(string)`` | ||||
Martin Geisler
|
r14357 | User name contains string. The match is case-insensitive. | ||
Simon King
|
r16823 | |||
If `string` starts with `re:`, the remainder of the string is treated as | ||||
a regular expression. To match a user that actually contains `re:`, use | ||||
the prefix `literal:`. | ||||
Matt Mackall
|
r13359 | """ | ||
Idan Kamara
|
r13915 | return author(repo, subset, x) | ||
Matt Mackall
|
r13359 | |||
Matt Mackall
|
r15898 | # for internal use | ||
def _list(repo, subset, x): | ||||
s = getstring(x, "internal error") | ||||
if not s: | ||||
Lucas Moscovicz
|
r20364 | return baseset([]) | ||
Matt Mackall
|
r15898 | ls = [repo[r].rev() for r in s.split('\0')] | ||
Lucas Moscovicz
|
r20365 | s = subset.set() | ||
return baseset([r for r in ls if r in s]) | ||||
Matt Mackall
|
r15898 | |||
Lucas Moscovicz
|
r20566 | # for internal use | ||
def _intlist(repo, subset, x): | ||||
s = getstring(x, "internal error") | ||||
if not s: | ||||
return baseset([]) | ||||
ls = [int(r) for r in s.split('\0')] | ||||
s = subset.set() | ||||
return baseset([r for r in ls if r in s]) | ||||
Lucas Moscovicz
|
r20569 | # for internal use | ||
def _hexlist(repo, subset, x): | ||||
s = getstring(x, "internal error") | ||||
if not s: | ||||
return baseset([]) | ||||
cl = repo.changelog | ||||
ls = [cl.rev(node.bin(r)) for r in s.split('\0')] | ||||
s = subset.set() | ||||
return baseset([r for r in ls if r in s]) | ||||
Matt Mackall
|
r15898 | |||
Matt Mackall
|
r11275 | symbols = { | ||
Matt Mackall
|
r11284 | "adds": adds, | ||
"all": getall, | ||||
Matt Mackall
|
r11275 | "ancestor": ancestor, | ||
"ancestors": ancestors, | ||||
Patrick Mezard
|
r16409 | "_firstancestors": _firstancestors, | ||
Matt Mackall
|
r11284 | "author": author, | ||
Durham Goode
|
r20613 | "only": only, | ||
"Yann E. MORIN"
|
r15134 | "bisect": bisect, | ||
Benoit Boissinot
|
r13602 | "bisected": bisected, | ||
Matt Mackall
|
r13359 | "bookmark": bookmark, | ||
Matt Mackall
|
r11275 | "branch": branch, | ||
Ivan Andrus
|
r17753 | "branchpoint": branchpoint, | ||
Pierre-Yves David
|
r17829 | "bumped": bumped, | ||
Tomasz Kleczek
|
r17913 | "bundle": bundle, | ||
Matt Mackall
|
r11284 | "children": children, | ||
"closed": closed, | ||||
"contains": contains, | ||||
Matt Harbison
|
r17002 | "converted": converted, | ||
Matt Mackall
|
r11284 | "date": date, | ||
Thomas Arendsen Hein
|
r14650 | "desc": desc, | ||
Matt Mackall
|
r11284 | "descendants": descendants, | ||
Patrick Mezard
|
r16409 | "_firstdescendants": _firstdescendants, | ||
Matt Harbison
|
r17186 | "destination": destination, | ||
Pierre-Yves David
|
r18071 | "divergent": divergent, | ||
Pierre-Yves David
|
r15819 | "draft": draft, | ||
Pierre-Yves David
|
r17173 | "extinct": extinct, | ||
Henrik Stuart
|
r16661 | "extra": extra, | ||
Matt Mackall
|
r11284 | "file": hasfile, | ||
Matt Mackall
|
r14342 | "filelog": filelog, | ||
Matt Mackall
|
r15117 | "first": first, | ||
Matt Mackall
|
r11284 | "follow": follow, | ||
Patrick Mezard
|
r16174 | "_followfirst": _followfirst, | ||
Matt Mackall
|
r11284 | "grep": grep, | ||
"head": head, | ||||
"heads": heads, | ||||
Patrick Mezard
|
r17390 | "hidden": hidden, | ||
Matt Mackall
|
r16417 | "id": node_, | ||
Matt Mackall
|
r11275 | "keyword": keyword, | ||
Matt Mackall
|
r14061 | "last": last, | ||
Matt Mackall
|
r11284 | "limit": limit, | ||
Patrick Mezard
|
r16161 | "_matchfiles": _matchfiles, | ||
Matt Mackall
|
r11284 | "max": maxrev, | ||
Thomas Arendsen Hein
|
r14649 | "merge": merge, | ||
Nicolas Dumazet
|
r11708 | "min": minrev, | ||
Matt Mackall
|
r11284 | "modifies": modifies, | ||
Pierre-Yves David
|
r17170 | "obsolete": obsolete, | ||
Matt Harbison
|
r17185 | "origin": origin, | ||
Matt Mackall
|
r11284 | "outgoing": outgoing, | ||
Matt Mackall
|
r11275 | "p1": p1, | ||
"p2": p2, | ||||
"parents": parents, | ||||
Wagner Bruna
|
r11944 | "present": present, | ||
Pierre-Yves David
|
r15819 | "public": public, | ||
Matt Mackall
|
r15936 | "remote": remote, | ||
Matt Mackall
|
r11284 | "removes": removes, | ||
Thomas Arendsen Hein
|
r14649 | "rev": rev, | ||
Matt Mackall
|
r11284 | "reverse": reverse, | ||
Matt Mackall
|
r11275 | "roots": roots, | ||
Matt Mackall
|
r11284 | "sort": sort, | ||
Pierre-Yves David
|
r15819 | "secret": secret, | ||
Angel Ezquerra
|
r16402 | "matching": matching, | ||
Augie Fackler
|
r12715 | "tag": tag, | ||
Patrick Mezard
|
r12821 | "tagged": tagged, | ||
"user": user, | ||||
Pierre-Yves David
|
r17171 | "unstable": unstable, | ||
Matt Mackall
|
r15898 | "_list": _list, | ||
Lucas Moscovicz
|
r20566 | "_intlist": _intlist, | ||
Lucas Moscovicz
|
r20569 | "_hexlist": _hexlist, | ||
Matt Mackall
|
r11275 | } | ||
Alexander Plavin
|
r19721 | # symbols which can't be used for a DoS attack for any given input | ||
# (e.g. those which accept regexes as plain strings shouldn't be included) | ||||
# functions that just return a lot of changesets (like all) don't count here | ||||
safesymbols = set([ | ||||
"adds", | ||||
"all", | ||||
"ancestor", | ||||
"ancestors", | ||||
"_firstancestors", | ||||
"author", | ||||
"bisect", | ||||
"bisected", | ||||
"bookmark", | ||||
"branch", | ||||
"branchpoint", | ||||
"bumped", | ||||
"bundle", | ||||
"children", | ||||
"closed", | ||||
"converted", | ||||
"date", | ||||
"desc", | ||||
"descendants", | ||||
"_firstdescendants", | ||||
"destination", | ||||
"divergent", | ||||
"draft", | ||||
"extinct", | ||||
"extra", | ||||
"file", | ||||
"filelog", | ||||
"first", | ||||
"follow", | ||||
"_followfirst", | ||||
"head", | ||||
"heads", | ||||
"hidden", | ||||
"id", | ||||
"keyword", | ||||
"last", | ||||
"limit", | ||||
"_matchfiles", | ||||
"max", | ||||
"merge", | ||||
"min", | ||||
"modifies", | ||||
"obsolete", | ||||
"origin", | ||||
"outgoing", | ||||
"p1", | ||||
"p2", | ||||
"parents", | ||||
"present", | ||||
"public", | ||||
"remote", | ||||
"removes", | ||||
"rev", | ||||
"reverse", | ||||
"roots", | ||||
"sort", | ||||
"secret", | ||||
"matching", | ||||
"tag", | ||||
"tagged", | ||||
"user", | ||||
"unstable", | ||||
"_list", | ||||
Lucas Moscovicz
|
r20566 | "_intlist", | ||
Lucas Moscovicz
|
r20569 | "_hexlist", | ||
Alexander Plavin
|
r19721 | ]) | ||
Matt Mackall
|
r11275 | methods = { | ||
"range": rangeset, | ||||
Bryan O'Sullivan
|
r16860 | "dagrange": dagrange, | ||
Matt Mackall
|
r11275 | "string": stringset, | ||
"symbol": symbolset, | ||||
"and": andset, | ||||
"or": orset, | ||||
"not": notset, | ||||
"list": listset, | ||||
"func": func, | ||||
Kevin Gessner
|
r14070 | "ancestor": ancestorspec, | ||
"parent": parentspec, | ||||
"parentpost": p1, | ||||
Matt Mackall
|
r11275 | } | ||
Matt Mackall
|
r11279 | def optimize(x, small): | ||
Martin Geisler
|
r13031 | if x is None: | ||
Matt Mackall
|
r11279 | return 0, x | ||
Matt Mackall
|
r11275 | smallbonus = 1 | ||
if small: | ||||
smallbonus = .5 | ||||
op = x[0] | ||||
Matt Mackall
|
r11283 | if op == 'minus': | ||
Matt Mackall
|
r11279 | return optimize(('and', x[1], ('not', x[2])), small) | ||
elif op == 'dagrangepre': | ||||
return optimize(('func', ('symbol', 'ancestors'), x[1]), small) | ||||
elif op == 'dagrangepost': | ||||
return optimize(('func', ('symbol', 'descendants'), x[1]), small) | ||||
elif op == 'rangepre': | ||||
return optimize(('range', ('string', '0'), x[1]), small) | ||||
elif op == 'rangepost': | ||||
return optimize(('range', x[1], ('string', 'tip')), small) | ||||
Matt Mackall
|
r11467 | elif op == 'negate': | ||
return optimize(('string', | ||||
'-' + getstring(x[1], _("can't negate that"))), small) | ||||
Matt Mackall
|
r11279 | elif op in 'string symbol negate': | ||
return smallbonus, x # single revisions are small | ||||
Bryan O'Sullivan
|
r16859 | elif op == 'and': | ||
Matt Mackall
|
r11279 | wa, ta = optimize(x[1], True) | ||
wb, tb = optimize(x[2], True) | ||||
Siddharth Agarwal
|
r20499 | |||
# (::x and not ::y)/(not ::y and ::x) have a fast path | ||||
Siddharth Agarwal
|
r21893 | def isonly(revs, bases): | ||
Siddharth Agarwal
|
r20499 | return ( | ||
revs[0] == 'func' | ||||
and getstring(revs[1], _('not a symbol')) == 'ancestors' | ||||
and bases[0] == 'not' | ||||
and bases[1][0] == 'func' | ||||
and getstring(bases[1][1], _('not a symbol')) == 'ancestors') | ||||
Matt Mackall
|
r11279 | w = min(wa, wb) | ||
Siddharth Agarwal
|
r21893 | if isonly(ta, tb): | ||
return w, ('func', ('symbol', 'only'), ('list', ta[2], tb[1][2])) | ||||
if isonly(tb, ta): | ||||
return w, ('func', ('symbol', 'only'), ('list', tb[2], ta[1][2])) | ||||
Siddharth Agarwal
|
r20499 | |||
Matt Mackall
|
r11279 | if wa > wb: | ||
return w, (op, tb, ta) | ||||
return w, (op, ta, tb) | ||||
elif op == 'or': | ||||
wa, ta = optimize(x[1], False) | ||||
wb, tb = optimize(x[2], False) | ||||
if wb < wa: | ||||
wb, wa = wa, wb | ||||
return max(wa, wb), (op, ta, tb) | ||||
Matt Mackall
|
r11275 | elif op == 'not': | ||
Matt Mackall
|
r11279 | o = optimize(x[1], not small) | ||
return o[0], (op, o[1]) | ||||
Kevin Gessner
|
r14070 | elif op == 'parentpost': | ||
o = optimize(x[1], small) | ||||
return o[0], (op, o[1]) | ||||
Matt Mackall
|
r11275 | elif op == 'group': | ||
Matt Mackall
|
r11279 | return optimize(x[1], small) | ||
Bryan O'Sullivan
|
r16860 | elif op in 'dagrange range list parent ancestorspec': | ||
Matt Mackall
|
r14842 | if op == 'parent': | ||
# x^:y means (x^) : y, not x ^ (:y) | ||||
post = ('parentpost', x[1]) | ||||
if x[2][0] == 'dagrangepre': | ||||
return optimize(('dagrange', post, x[2][1]), small) | ||||
elif x[2][0] == 'rangepre': | ||||
return optimize(('range', post, x[2][1]), small) | ||||
Matt Mackall
|
r11279 | wa, ta = optimize(x[1], small) | ||
wb, tb = optimize(x[2], small) | ||||
return wa + wb, (op, ta, tb) | ||||
Matt Mackall
|
r11275 | elif op == 'func': | ||
Martin Geisler
|
r11383 | f = getstring(x[1], _("not a symbol")) | ||
Matt Mackall
|
r11279 | wa, ta = optimize(x[2], small) | ||
Thomas Arendsen Hein
|
r14650 | if f in ("author branch closed date desc file grep keyword " | ||
"outgoing user"): | ||||
Matt Mackall
|
r11279 | w = 10 # slow | ||
Matt Mackall
|
r12351 | elif f in "modifies adds removes": | ||
Matt Mackall
|
r11279 | w = 30 # slower | ||
Matt Mackall
|
r11275 | elif f == "contains": | ||
Matt Mackall
|
r11279 | w = 100 # very slow | ||
Matt Mackall
|
r11275 | elif f == "ancestor": | ||
Matt Mackall
|
r11279 | w = 1 * smallbonus | ||
Durham Goode
|
r22451 | elif f in "reverse limit first _intlist": | ||
Matt Mackall
|
r11279 | w = 0 | ||
Matt Mackall
|
r11275 | elif f in "sort": | ||
Matt Mackall
|
r11279 | w = 10 # assume most sorts look at changelog | ||
Matt Mackall
|
r11275 | else: | ||
Matt Mackall
|
r11279 | w = 1 | ||
return w + wa, (op, x[1], ta) | ||||
return 1, x | ||||
Matt Mackall
|
r11275 | |||
Patrick Mezard
|
r16771 | _aliasarg = ('func', ('symbol', '_aliasarg')) | ||
def _getaliasarg(tree): | ||||
"""If tree matches ('func', ('symbol', '_aliasarg'), ('string', X)) | ||||
return X, None otherwise. | ||||
""" | ||||
if (len(tree) == 3 and tree[:2] == _aliasarg | ||||
and tree[2][0] == 'string'): | ||||
return tree[2][1] | ||||
return None | ||||
def _checkaliasarg(tree, known=None): | ||||
"""Check tree contains no _aliasarg construct or only ones which | ||||
value is in known. Used to avoid alias placeholders injection. | ||||
""" | ||||
if isinstance(tree, tuple): | ||||
arg = _getaliasarg(tree) | ||||
if arg is not None and (not known or arg not in known): | ||||
raise error.ParseError(_("not a function: %s") % '_aliasarg') | ||||
for t in tree: | ||||
_checkaliasarg(t, known) | ||||
Alexander Solovyov
|
r14098 | class revsetalias(object): | ||
funcre = re.compile('^([^(]+)\(([^)]+)\)$') | ||||
Mads Kiilerich
|
r14723 | args = None | ||
Alexander Solovyov
|
r14098 | |||
Mads Kiilerich
|
r14723 | def __init__(self, name, value): | ||
Alexander Solovyov
|
r14098 | '''Aliases like: | ||
h = heads(default) | ||||
b($1) = ancestors($1) - ancestors(default) | ||||
''' | ||||
Patrick Mezard
|
r16096 | m = self.funcre.search(name) | ||
if m: | ||||
self.name = m.group(1) | ||||
self.tree = ('func', ('symbol', m.group(1))) | ||||
self.args = [x.strip() for x in m.group(2).split(',')] | ||||
for arg in self.args: | ||||
Patrick Mezard
|
r16771 | # _aliasarg() is an unknown symbol only used separate | ||
# alias argument placeholders from regular strings. | ||||
value = value.replace(arg, '_aliasarg(%r)' % (arg,)) | ||||
Patrick Mezard
|
r16096 | else: | ||
self.name = name | ||||
self.tree = ('symbol', name) | ||||
self.replacement, pos = parse(value) | ||||
if pos != len(value): | ||||
raise error.ParseError(_('invalid token'), pos) | ||||
Patrick Mezard
|
r16771 | # Check for placeholder injection | ||
_checkaliasarg(self.replacement, self.args) | ||||
Alexander Solovyov
|
r14098 | |||
Patrick Mezard
|
r16096 | def _getalias(aliases, tree): | ||
"""If tree looks like an unexpanded alias, return it. Return None | ||||
otherwise. | ||||
""" | ||||
if isinstance(tree, tuple) and tree: | ||||
if tree[0] == 'symbol' and len(tree) == 2: | ||||
name = tree[1] | ||||
alias = aliases.get(name) | ||||
if alias and alias.args is None and alias.tree == tree: | ||||
return alias | ||||
if tree[0] == 'func' and len(tree) > 1: | ||||
if tree[1][0] == 'symbol' and len(tree[1]) == 2: | ||||
name = tree[1][1] | ||||
alias = aliases.get(name) | ||||
if alias and alias.args is not None and alias.tree == tree[:2]: | ||||
return alias | ||||
return None | ||||
Alexander Solovyov
|
r14098 | |||
Patrick Mezard
|
r16096 | def _expandargs(tree, args): | ||
Patrick Mezard
|
r16771 | """Replace _aliasarg instances with the substitution value of the | ||
same name in args, recursively. | ||||
Patrick Mezard
|
r16096 | """ | ||
Patrick Mezard
|
r16771 | if not tree or not isinstance(tree, tuple): | ||
Patrick Mezard
|
r16096 | return tree | ||
Patrick Mezard
|
r16771 | arg = _getaliasarg(tree) | ||
if arg is not None: | ||||
return args[arg] | ||||
Patrick Mezard
|
r16096 | return tuple(_expandargs(t, args) for t in tree) | ||
Patrick Mezard
|
r16838 | def _expandaliases(aliases, tree, expanding, cache): | ||
Patrick Mezard
|
r16096 | """Expand aliases in tree, recursively. | ||
'aliases' is a dictionary mapping user defined aliases to | ||||
revsetalias objects. | ||||
""" | ||||
if not isinstance(tree, tuple): | ||||
# Do not expand raw strings | ||||
Alexander Solovyov
|
r14098 | return tree | ||
Patrick Mezard
|
r16096 | alias = _getalias(aliases, tree) | ||
if alias is not None: | ||||
if alias in expanding: | ||||
raise error.ParseError(_('infinite expansion of revset alias "%s" ' | ||||
'detected') % alias.name) | ||||
expanding.append(alias) | ||||
Patrick Mezard
|
r16838 | if alias.name not in cache: | ||
cache[alias.name] = _expandaliases(aliases, alias.replacement, | ||||
expanding, cache) | ||||
result = cache[alias.name] | ||||
Patrick Mezard
|
r16772 | expanding.pop() | ||
Patrick Mezard
|
r16096 | if alias.args is not None: | ||
l = getlist(tree[2]) | ||||
if len(l) != len(alias.args): | ||||
raise error.ParseError( | ||||
_('invalid number of arguments: %s') % len(l)) | ||||
Patrick Mezard
|
r16838 | l = [_expandaliases(aliases, a, [], cache) for a in l] | ||
Patrick Mezard
|
r16096 | result = _expandargs(result, dict(zip(alias.args, l))) | ||
else: | ||||
Patrick Mezard
|
r16838 | result = tuple(_expandaliases(aliases, t, expanding, cache) | ||
Patrick Mezard
|
r16096 | for t in tree) | ||
return result | ||||
Alexander Solovyov
|
r14098 | |||
def findaliases(ui, tree): | ||||
Patrick Mezard
|
r16771 | _checkaliasarg(tree) | ||
Patrick Mezard
|
r16096 | aliases = {} | ||
Alexander Solovyov
|
r14098 | for k, v in ui.configitems('revsetalias'): | ||
alias = revsetalias(k, v) | ||||
Patrick Mezard
|
r16096 | aliases[alias.name] = alias | ||
Patrick Mezard
|
r16838 | return _expandaliases(aliases, tree, [], {}) | ||
Alexander Solovyov
|
r14098 | |||
Matt Mackall
|
r20779 | def parse(spec, lookup=None): | ||
Yuya Nishihara
|
r20208 | p = parser.parser(tokenize, elements) | ||
Matt Mackall
|
r20779 | return p.parse(spec, lookup=lookup) | ||
def match(ui, spec, repo=None): | ||||
Matt Mackall
|
r11385 | if not spec: | ||
raise error.ParseError(_("empty query")) | ||||
Matt Mackall
|
r20779 | lookup = None | ||
if repo: | ||||
lookup = repo.__contains__ | ||||
tree, pos = parse(spec, lookup) | ||||
Bernhard Leiner
|
r14496 | if (pos != len(spec)): | ||
Mads Kiilerich
|
r14701 | raise error.ParseError(_("invalid token"), pos) | ||
Matt Mackall
|
r14900 | if ui: | ||
tree = findaliases(ui, tree) | ||||
Matt Mackall
|
r11279 | weight, tree = optimize(tree, True) | ||
Matt Mackall
|
r11275 | def mfunc(repo, subset): | ||
Lucas Moscovicz
|
r20527 | if util.safehasattr(subset, 'set'): | ||
return getset(repo, subset, tree) | ||||
return getset(repo, baseset(subset), tree) | ||||
Matt Mackall
|
r11275 | return mfunc | ||
Patrick Mezard
|
r12821 | |||
Matt Mackall
|
r14901 | def formatspec(expr, *args): | ||
''' | ||||
This is a convenience function for using revsets internally, and | ||||
escapes arguments appropriately. Aliases are intentionally ignored | ||||
so that intended expression behavior isn't accidentally subverted. | ||||
Supported arguments: | ||||
Matt Mackall
|
r15266 | %r = revset expression, parenthesized | ||
Matt Mackall
|
r14901 | %d = int(arg), no quoting | ||
%s = string(arg), escaped and single-quoted | ||||
%b = arg.branch(), escaped and single-quoted | ||||
%n = hex(arg), single-quoted | ||||
%% = a literal '%' | ||||
Matt Mackall
|
r15266 | Prefixing the type with 'l' specifies a parenthesized list of that type. | ||
Matt Mackall
|
r15140 | |||
Matt Mackall
|
r15268 | >>> formatspec('%r:: and %lr', '10 or 11', ("this()", "that()")) | ||
'(10 or 11):: and ((this()) or (that()))' | ||||
Matt Mackall
|
r14901 | >>> formatspec('%d:: and not %d::', 10, 20) | ||
'10:: and not 20::' | ||||
Matt Mackall
|
r15325 | >>> formatspec('%ld or %ld', [], [1]) | ||
Matt Mackall
|
r15898 | "_list('') or 1" | ||
Matt Mackall
|
r14901 | >>> formatspec('keyword(%s)', 'foo\\xe9') | ||
"keyword('foo\\\\xe9')" | ||||
>>> b = lambda: 'default' | ||||
>>> b.branch = b | ||||
>>> formatspec('branch(%b)', b) | ||||
"branch('default')" | ||||
Matt Mackall
|
r15140 | >>> formatspec('root(%ls)', ['a', 'b', 'c', 'd']) | ||
Matt Mackall
|
r15898 | "root(_list('a\\x00b\\x00c\\x00d'))" | ||
Matt Mackall
|
r14901 | ''' | ||
def quote(s): | ||||
return repr(str(s)) | ||||
Matt Mackall
|
r15140 | def argtype(c, arg): | ||
if c == 'd': | ||||
return str(int(arg)) | ||||
elif c == 's': | ||||
return quote(arg) | ||||
Matt Mackall
|
r15266 | elif c == 'r': | ||
parse(arg) # make sure syntax errors are confined | ||||
return '(%s)' % arg | ||||
Matt Mackall
|
r15140 | elif c == 'n': | ||
Matt Mackall
|
r16417 | return quote(node.hex(arg)) | ||
Matt Mackall
|
r15140 | elif c == 'b': | ||
return quote(arg.branch()) | ||||
Matt Mackall
|
r15595 | def listexp(s, t): | ||
l = len(s) | ||||
if l == 0: | ||||
Matt Mackall
|
r15898 | return "_list('')" | ||
elif l == 1: | ||||
Matt Mackall
|
r15595 | return argtype(t, s[0]) | ||
Matt Mackall
|
r15898 | elif t == 'd': | ||
Lucas Moscovicz
|
r20566 | return "_intlist('%s')" % "\0".join(str(int(a)) for a in s) | ||
Matt Mackall
|
r15898 | elif t == 's': | ||
return "_list('%s')" % "\0".join(s) | ||||
elif t == 'n': | ||||
Lucas Moscovicz
|
r20569 | return "_hexlist('%s')" % "\0".join(node.hex(a) for a in s) | ||
Matt Mackall
|
r15898 | elif t == 'b': | ||
return "_list('%s')" % "\0".join(a.branch() for a in s) | ||||
Martin Geisler
|
r15791 | m = l // 2 | ||
Matt Mackall
|
r15595 | return '(%s or %s)' % (listexp(s[:m], t), listexp(s[m:], t)) | ||
Matt Mackall
|
r14901 | ret = '' | ||
pos = 0 | ||||
arg = 0 | ||||
while pos < len(expr): | ||||
c = expr[pos] | ||||
if c == '%': | ||||
pos += 1 | ||||
d = expr[pos] | ||||
if d == '%': | ||||
ret += d | ||||
Matt Mackall
|
r15268 | elif d in 'dsnbr': | ||
Matt Mackall
|
r15140 | ret += argtype(d, args[arg]) | ||
Matt Mackall
|
r14901 | arg += 1 | ||
Matt Mackall
|
r15140 | elif d == 'l': | ||
# a list of some type | ||||
pos += 1 | ||||
d = expr[pos] | ||||
Matt Mackall
|
r15596 | ret += listexp(list(args[arg]), d) | ||
Matt Mackall
|
r14901 | arg += 1 | ||
else: | ||||
raise util.Abort('unexpected revspec format character %s' % d) | ||||
else: | ||||
ret += c | ||||
pos += 1 | ||||
return ret | ||||
Patrick Mezard
|
r16218 | def prettyformat(tree): | ||
def _prettyformat(tree, level, lines): | ||||
if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'): | ||||
lines.append((level, str(tree))) | ||||
else: | ||||
lines.append((level, '(%s' % tree[0])) | ||||
for s in tree[1:]: | ||||
_prettyformat(s, level + 1, lines) | ||||
lines[-1:] = [(lines[-1][0], lines[-1][1] + ')')] | ||||
lines = [] | ||||
_prettyformat(tree, 0, lines) | ||||
output = '\n'.join((' '*l + s) for l, s in lines) | ||||
return output | ||||
Alexander Plavin
|
r19719 | def depth(tree): | ||
if isinstance(tree, tuple): | ||||
return max(map(depth, tree)) + 1 | ||||
else: | ||||
return 0 | ||||
Alexander Plavin
|
r19720 | def funcsused(tree): | ||
if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'): | ||||
return set() | ||||
else: | ||||
funcs = set() | ||||
for s in tree[1:]: | ||||
funcs |= funcsused(s) | ||||
if tree[0] == 'func': | ||||
funcs.add(tree[1][1]) | ||||
return funcs | ||||
Lucas Moscovicz
|
r20364 | class baseset(list): | ||
Lucas Moscovicz
|
r20416 | """Basic data structure that represents a revset and contains the basic | ||
operation that it should be able to perform. | ||||
Lucas Moscovicz
|
r20727 | |||
Every method in this class should be implemented by any smartset class. | ||||
Lucas Moscovicz
|
r20416 | """ | ||
Pierre-Yves David
|
r20752 | def __init__(self, data=()): | ||
Lucas Moscovicz
|
r20365 | super(baseset, self).__init__(data) | ||
self._set = None | ||||
Lucas Moscovicz
|
r20657 | def ascending(self): | ||
Lucas Moscovicz
|
r20727 | """Sorts the set in ascending order (in place). | ||
This is part of the mandatory API for smartset.""" | ||||
Lucas Moscovicz
|
r20657 | self.sort() | ||
def descending(self): | ||||
Lucas Moscovicz
|
r20727 | """Sorts the set in descending order (in place). | ||
This is part of the mandatory API for smartset.""" | ||||
Lucas Moscovicz
|
r20657 | self.sort(reverse=True) | ||
Lucas Moscovicz
|
r20748 | def min(self): | ||
return min(self) | ||||
def max(self): | ||||
return max(self) | ||||
Lucas Moscovicz
|
r20365 | def set(self): | ||
Lucas Moscovicz
|
r20727 | """Returns a set or a smartset containing all the elements. | ||
The returned structure should be the fastest option for membership | ||||
testing. | ||||
This is part of the mandatory API for smartset.""" | ||||
Lucas Moscovicz
|
r20365 | if not self._set: | ||
self._set = set(self) | ||||
return self._set | ||||
Lucas Moscovicz
|
r20364 | |||
Pierre-Yves David
|
r22503 | @util.propertycache | ||
def __contains__(self): | ||||
return self.set().__contains__ | ||||
Lucas Moscovicz
|
r20726 | def __sub__(self, other): | ||
Lucas Moscovicz
|
r20727 | """Returns a new object with the substraction of the two collections. | ||
This is part of the mandatory API for smartset.""" | ||||
Gregory Szorc
|
r21939 | # If we are operating on 2 baseset, do the computation now since all | ||
# data is available. The alternative is to involve a lazyset, which | ||||
# may be slow. | ||||
if isinstance(other, baseset): | ||||
other = other.set() | ||||
return baseset([x for x in self if x not in other]) | ||||
Matt Mackall
|
r21870 | return self.filter(lambda x: x not in other) | ||
Lucas Moscovicz
|
r20366 | |||
Lucas Moscovicz
|
r20726 | def __and__(self, other): | ||
Lucas Moscovicz
|
r20727 | """Returns a new object with the intersection of the two collections. | ||
Lucas Moscovicz
|
r20367 | |||
Lucas Moscovicz
|
r20727 | This is part of the mandatory API for smartset.""" | ||
Lucas Moscovicz
|
r20726 | if isinstance(other, baseset): | ||
other = other.set() | ||||
return baseset([y for y in self if y in other]) | ||||
Lucas Moscovicz
|
r20727 | |||
Lucas Moscovicz
|
r20726 | def __add__(self, other): | ||
Lucas Moscovicz
|
r20727 | """Returns a new object with the union of the two collections. | ||
This is part of the mandatory API for smartset.""" | ||||
Lucas Moscovicz
|
r20417 | s = self.set() | ||
Lucas Moscovicz
|
r20726 | l = [r for r in other if r not in s] | ||
Lucas Moscovicz
|
r20417 | return baseset(list(self) + l) | ||
Lucas Moscovicz
|
r20725 | def isascending(self): | ||
Lucas Moscovicz
|
r20727 | """Returns True if the collection is ascending order, False if not. | ||
This is part of the mandatory API for smartset.""" | ||||
Lucas Moscovicz
|
r20725 | return False | ||
def isdescending(self): | ||||
Lucas Moscovicz
|
r20727 | """Returns True if the collection is descending order, False if not. | ||
This is part of the mandatory API for smartset.""" | ||||
Lucas Moscovicz
|
r20725 | return False | ||
Lucas Moscovicz
|
r20726 | def filter(self, condition): | ||
Lucas Moscovicz
|
r20727 | """Returns this smartset filtered by condition as a new smartset. | ||
`condition` is a callable which takes a revision number and returns a | ||||
boolean. | ||||
This is part of the mandatory API for smartset.""" | ||||
Lucas Moscovicz
|
r20726 | return lazyset(self, condition) | ||
Lucas Moscovicz
|
r20610 | |||
Lucas Moscovicz
|
r20749 | class _orderedsetmixin(object): | ||
"""Mixin class with utility methods for smartsets | ||||
This should be extended by smartsets which have the isascending(), | ||||
isdescending() and reverse() methods""" | ||||
def _first(self): | ||||
"""return the first revision in the set""" | ||||
for r in self: | ||||
return r | ||||
Pierre-Yves David
|
r20863 | raise ValueError('arg is an empty sequence') | ||
Lucas Moscovicz
|
r20749 | |||
def _last(self): | ||||
"""return the last revision in the set""" | ||||
self.reverse() | ||||
m = self._first() | ||||
self.reverse() | ||||
return m | ||||
def min(self): | ||||
"""return the smallest element in the set""" | ||||
if self.isascending(): | ||||
return self._first() | ||||
return self._last() | ||||
def max(self): | ||||
"""return the largest element in the set""" | ||||
if self.isascending(): | ||||
return self._last() | ||||
return self._first() | ||||
Lucas Moscovicz
|
r20427 | class lazyset(object): | ||
"""Duck type for baseset class which iterates lazily over the revisions in | ||||
the subset and contains a function which tests for membership in the | ||||
revset | ||||
""" | ||||
Lucas Moscovicz
|
r20586 | def __init__(self, subset, condition=lambda x: True): | ||
Pierre-Yves David
|
r20738 | """ | ||
condition: a function that decide whether a revision in the subset | ||||
belongs to the revset or not. | ||||
""" | ||||
Lucas Moscovicz
|
r20427 | self._subset = subset | ||
self._condition = condition | ||||
Lucas Moscovicz
|
r20512 | self._cache = {} | ||
Lucas Moscovicz
|
r20427 | |||
Lucas Moscovicz
|
r20657 | def ascending(self): | ||
self._subset.sort() | ||||
def descending(self): | ||||
self._subset.sort(reverse=True) | ||||
Lucas Moscovicz
|
r20748 | def min(self): | ||
return min(self) | ||||
def max(self): | ||||
return max(self) | ||||
Lucas Moscovicz
|
r20427 | def __contains__(self, x): | ||
Lucas Moscovicz
|
r20512 | c = self._cache | ||
if x not in c: | ||||
Pierre-Yves David
|
r22527 | v = c[x] = x in self._subset and self._condition(x) | ||
return v | ||||
Lucas Moscovicz
|
r20512 | return c[x] | ||
Lucas Moscovicz
|
r20427 | |||
def __iter__(self): | ||||
cond = self._condition | ||||
for x in self._subset: | ||||
if cond(x): | ||||
yield x | ||||
Lucas Moscovicz
|
r20428 | def __and__(self, x): | ||
Pierre-Yves David
|
r21214 | return lazyset(self, x.__contains__) | ||
Lucas Moscovicz
|
r20428 | |||
def __sub__(self, x): | ||||
return lazyset(self, lambda r: r not in x) | ||||
def __add__(self, x): | ||||
Lucas Moscovicz
|
r20734 | return _addset(self, x) | ||
Lucas Moscovicz
|
r20428 | |||
Lucas Moscovicz
|
r20552 | def __nonzero__(self): | ||
for r in self: | ||||
return True | ||||
return False | ||||
Lucas Moscovicz
|
r20429 | def __len__(self): | ||
# Basic implementation to be changed in future patches. | ||||
l = baseset([r for r in self]) | ||||
return len(l) | ||||
def __getitem__(self, x): | ||||
# Basic implementation to be changed in future patches. | ||||
l = baseset([r for r in self]) | ||||
return l[x] | ||||
def sort(self, reverse=False): | ||||
Lucas Moscovicz
|
r20714 | if not util.safehasattr(self._subset, 'sort'): | ||
self._subset = baseset(self._subset) | ||||
Lucas Moscovicz
|
r20429 | self._subset.sort(reverse=reverse) | ||
def reverse(self): | ||||
self._subset.reverse() | ||||
def set(self): | ||||
return set([r for r in self]) | ||||
Lucas Moscovicz
|
r20725 | def isascending(self): | ||
return False | ||||
def isdescending(self): | ||||
return False | ||||
Lucas Moscovicz
|
r20610 | def filter(self, l): | ||
return lazyset(self, l) | ||||
Lucas Moscovicz
|
r20751 | class orderedlazyset(_orderedsetmixin, lazyset): | ||
Lucas Moscovicz
|
r20609 | """Subclass of lazyset which subset can be ordered either ascending or | ||
descendingly | ||||
""" | ||||
def __init__(self, subset, condition, ascending=True): | ||||
super(orderedlazyset, self).__init__(subset, condition) | ||||
self._ascending = ascending | ||||
Lucas Moscovicz
|
r20610 | def filter(self, l): | ||
return orderedlazyset(self, l, ascending=self._ascending) | ||||
Lucas Moscovicz
|
r20657 | def ascending(self): | ||
if not self._ascending: | ||||
self.reverse() | ||||
def descending(self): | ||||
if self._ascending: | ||||
self.reverse() | ||||
Lucas Moscovicz
|
r20612 | def __and__(self, x): | ||
Pierre-Yves David
|
r21215 | return orderedlazyset(self, x.__contains__, | ||
Lucas Moscovicz
|
r20612 | ascending=self._ascending) | ||
def __sub__(self, x): | ||||
return orderedlazyset(self, lambda r: r not in x, | ||||
ascending=self._ascending) | ||||
Lucas Moscovicz
|
r20734 | def __add__(self, x): | ||
kwargs = {} | ||||
if self.isascending() and x.isascending(): | ||||
kwargs['ascending'] = True | ||||
if self.isdescending() and x.isdescending(): | ||||
kwargs['ascending'] = False | ||||
return _addset(self, x, **kwargs) | ||||
Lucas Moscovicz
|
r20658 | def sort(self, reverse=False): | ||
if reverse: | ||||
if self._ascending: | ||||
self._subset.sort(reverse=reverse) | ||||
else: | ||||
if not self._ascending: | ||||
self._subset.sort(reverse=reverse) | ||||
self._ascending = not reverse | ||||
Lucas Moscovicz
|
r20725 | def isascending(self): | ||
return self._ascending | ||||
def isdescending(self): | ||||
return not self._ascending | ||||
Lucas Moscovicz
|
r20657 | def reverse(self): | ||
self._subset.reverse() | ||||
self._ascending = not self._ascending | ||||
Lucas Moscovicz
|
r20753 | class _addset(_orderedsetmixin): | ||
Lucas Moscovicz
|
r20708 | """Represent the addition of two sets | ||
Wrapper structure for lazily adding two structures without losing much | ||||
Lucas Moscovicz
|
r20694 | performance on the __contains__ method | ||
Lucas Moscovicz
|
r20708 | |||
Lucas Moscovicz
|
r20712 | If the ascending attribute is set, that means the two structures are | ||
ordered in either an ascending or descending way. Therefore, we can add | ||||
Mads Kiilerich
|
r21024 | them maintaining the order by iterating over both at the same time | ||
Lucas Moscovicz
|
r20712 | |||
Lucas Moscovicz
|
r20708 | This class does not duck-type baseset and it's only supposed to be used | ||
internally | ||||
Lucas Moscovicz
|
r20694 | """ | ||
Lucas Moscovicz
|
r20712 | def __init__(self, revs1, revs2, ascending=None): | ||
Lucas Moscovicz
|
r20694 | self._r1 = revs1 | ||
self._r2 = revs2 | ||||
self._iter = None | ||||
Lucas Moscovicz
|
r20712 | self._ascending = ascending | ||
Lucas Moscovicz
|
r20720 | self._genlist = None | ||
Pierre-Yves David
|
r20845 | def __len__(self): | ||
return len(self._list) | ||||
Lucas Moscovicz
|
r20720 | @util.propertycache | ||
def _list(self): | ||||
if not self._genlist: | ||||
self._genlist = baseset(self._iterator()) | ||||
return self._genlist | ||||
Lucas Moscovicz
|
r20694 | |||
Lucas Moscovicz
|
r20728 | def filter(self, condition): | ||
if self._ascending is not None: | ||||
return orderedlazyset(self, condition, ascending=self._ascending) | ||||
return lazyset(self, condition) | ||||
Lucas Moscovicz
|
r20729 | def ascending(self): | ||
if self._ascending is None: | ||||
self.sort() | ||||
self._ascending = True | ||||
else: | ||||
if not self._ascending: | ||||
self.reverse() | ||||
def descending(self): | ||||
if self._ascending is None: | ||||
self.sort(reverse=True) | ||||
self._ascending = False | ||||
else: | ||||
if self._ascending: | ||||
self.reverse() | ||||
Lucas Moscovicz
|
r20730 | def __and__(self, other): | ||
filterfunc = other.__contains__ | ||||
if self._ascending is not None: | ||||
return orderedlazyset(self, filterfunc, ascending=self._ascending) | ||||
return lazyset(self, filterfunc) | ||||
Lucas Moscovicz
|
r20731 | def __sub__(self, other): | ||
filterfunc = lambda r: r not in other | ||||
if self._ascending is not None: | ||||
return orderedlazyset(self, filterfunc, ascending=self._ascending) | ||||
return lazyset(self, filterfunc) | ||||
Lucas Moscovicz
|
r20732 | def __add__(self, other): | ||
"""When both collections are ascending or descending, preserve the order | ||||
""" | ||||
kwargs = {} | ||||
if self._ascending is not None: | ||||
if self.isascending() and other.isascending(): | ||||
kwargs['ascending'] = True | ||||
if self.isdescending() and other.isdescending(): | ||||
kwargs['ascending'] = False | ||||
return _addset(self, other, **kwargs) | ||||
Lucas Moscovicz
|
r20694 | def _iterator(self): | ||
Lucas Moscovicz
|
r20722 | """Iterate over both collections without repeating elements | ||
If the ascending attribute is not set, iterate over the first one and | ||||
then over the second one checking for membership on the first one so we | ||||
dont yield any duplicates. | ||||
If the ascending attribute is set, iterate over both collections at the | ||||
same time, yielding only one value at a time in the given order. | ||||
""" | ||||
Lucas Moscovicz
|
r20694 | if not self._iter: | ||
def gen(): | ||||
Lucas Moscovicz
|
r20722 | if self._ascending is None: | ||
for r in self._r1: | ||||
Lucas Moscovicz
|
r20694 | yield r | ||
Lucas Moscovicz
|
r20722 | s = self._r1.set() | ||
for r in self._r2: | ||||
if r not in s: | ||||
yield r | ||||
else: | ||||
iter1 = iter(self._r1) | ||||
iter2 = iter(self._r2) | ||||
val1 = None | ||||
val2 = None | ||||
choice = max | ||||
if self._ascending: | ||||
choice = min | ||||
try: | ||||
# Consume both iterators in an ordered way until one is | ||||
# empty | ||||
while True: | ||||
if val1 is None: | ||||
val1 = iter1.next() | ||||
if val2 is None: | ||||
val2 = iter2.next() | ||||
next = choice(val1, val2) | ||||
yield next | ||||
if val1 == next: | ||||
val1 = None | ||||
if val2 == next: | ||||
val2 = None | ||||
except StopIteration: | ||||
# Flush any remaining values and consume the other one | ||||
it = iter2 | ||||
if val1 is not None: | ||||
yield val1 | ||||
it = iter1 | ||||
elif val2 is not None: | ||||
# might have been equality and both are empty | ||||
yield val2 | ||||
for val in it: | ||||
yield val | ||||
Lucas Moscovicz
|
r20705 | self._iter = _generatorset(gen()) | ||
Lucas Moscovicz
|
r20694 | |||
return self._iter | ||||
def __iter__(self): | ||||
Lucas Moscovicz
|
r20721 | if self._genlist: | ||
return iter(self._genlist) | ||||
return iter(self._iterator()) | ||||
Lucas Moscovicz
|
r20694 | |||
def __contains__(self, x): | ||||
return x in self._r1 or x in self._r2 | ||||
Lucas Moscovicz
|
r20711 | def set(self): | ||
return self | ||||
Lucas Moscovicz
|
r20724 | def sort(self, reverse=False): | ||
"""Sort the added set | ||||
For this we use the cached list with all the generated values and if we | ||||
know they are ascending or descending we can sort them in a smart way. | ||||
""" | ||||
if self._ascending is None: | ||||
self._list.sort(reverse=reverse) | ||||
self._ascending = not reverse | ||||
else: | ||||
if bool(self._ascending) == bool(reverse): | ||||
self.reverse() | ||||
Lucas Moscovicz
|
r20733 | def isascending(self): | ||
return self._ascending is not None and self._ascending | ||||
def isdescending(self): | ||||
return self._ascending is not None and not self._ascending | ||||
Lucas Moscovicz
|
r20723 | def reverse(self): | ||
self._list.reverse() | ||||
if self._ascending is not None: | ||||
self._ascending = not self._ascending | ||||
Lucas Moscovicz
|
r20705 | class _generatorset(object): | ||
"""Wrap a generator for lazy iteration | ||||
Wrapper structure for generators that provides lazy membership and can | ||||
Lucas Moscovicz
|
r20540 | be iterated more than once. | ||
When asked for membership it generates values until either it finds the | ||||
requested one or has gone through all the elements in the generator | ||||
Lucas Moscovicz
|
r20705 | |||
This class does not duck-type baseset and it's only supposed to be used | ||||
internally | ||||
Lucas Moscovicz
|
r20540 | """ | ||
Lucas Moscovicz
|
r20536 | def __init__(self, gen): | ||
Pierre-Yves David
|
r20739 | """ | ||
gen: a generator producing the values for the generatorset. | ||||
""" | ||||
Lucas Moscovicz
|
r20536 | self._gen = gen | ||
self._cache = {} | ||||
Lucas Moscovicz
|
r20540 | self._genlist = baseset([]) | ||
Lucas Moscovicz
|
r20703 | self._finished = False | ||
Lucas Moscovicz
|
r20540 | |||
Lucas Moscovicz
|
r20536 | def __contains__(self, x): | ||
if x in self._cache: | ||||
return self._cache[x] | ||||
Gregory Szorc
|
r20828 | # Use new values only, as existing values would be cached. | ||
for l in self._consumegen(): | ||||
Lucas Moscovicz
|
r20634 | if l == x: | ||
return True | ||||
Lucas Moscovicz
|
r20536 | |||
self._cache[x] = False | ||||
return False | ||||
def __iter__(self): | ||||
Durham Goode
|
r20833 | if self._finished: | ||
for x in self._genlist: | ||||
yield x | ||||
return | ||||
Pierre-Yves David
|
r22494 | # We have to use this complex iteration strategy to allow multiple | ||
# iterations at the same time. We need to be able to catch revision | ||||
# removed from `consumegen` and added to genlist in another instance. | ||||
# | ||||
# Getting rid of it would provide an about 15% speed up on this | ||||
# iteration. | ||||
Durham Goode
|
r20833 | i = 0 | ||
genlist = self._genlist | ||||
consume = self._consumegen() | ||||
while True: | ||||
if i < len(genlist): | ||||
yield genlist[i] | ||||
else: | ||||
yield consume.next() | ||||
i += 1 | ||||
Gregory Szorc
|
r20828 | |||
def _consumegen(self): | ||||
Pierre-Yves David
|
r22528 | cache = self._cache | ||
genlist = self._genlist.append | ||||
Lucas Moscovicz
|
r20634 | for item in self._gen: | ||
Pierre-Yves David
|
r22528 | cache[item] = True | ||
genlist(item) | ||||
Lucas Moscovicz
|
r20634 | yield item | ||
Lucas Moscovicz
|
r20703 | self._finished = True | ||
Lucas Moscovicz
|
r20536 | def set(self): | ||
return self | ||||
Lucas Moscovicz
|
r20703 | def sort(self, reverse=False): | ||
if not self._finished: | ||||
for i in self: | ||||
continue | ||||
self._genlist.sort(reverse=reverse) | ||||
Lucas Moscovicz
|
r20706 | class _ascgeneratorset(_generatorset): | ||
"""Wrap a generator of ascending elements for lazy iteration | ||||
Same structure as _generatorset but stops iterating after it goes past | ||||
Lucas Moscovicz
|
r20643 | the value when asked for membership and the element is not contained | ||
Lucas Moscovicz
|
r20706 | |||
This class does not duck-type baseset and it's only supposed to be used | ||||
internally | ||||
Lucas Moscovicz
|
r20643 | """ | ||
def __contains__(self, x): | ||||
if x in self._cache: | ||||
return self._cache[x] | ||||
Gregory Szorc
|
r20828 | # Use new values only, as existing values would be cached. | ||
for l in self._consumegen(): | ||||
Lucas Moscovicz
|
r20643 | if l == x: | ||
return True | ||||
if l > x: | ||||
break | ||||
self._cache[x] = False | ||||
return False | ||||
Lucas Moscovicz
|
r20707 | class _descgeneratorset(_generatorset): | ||
"""Wrap a generator of descending elements for lazy iteration | ||||
Same structure as _generatorset but stops iterating after it goes past | ||||
Lucas Moscovicz
|
r20643 | the value when asked for membership and the element is not contained | ||
Lucas Moscovicz
|
r20707 | |||
This class does not duck-type baseset and it's only supposed to be used | ||||
internally | ||||
Lucas Moscovicz
|
r20643 | """ | ||
def __contains__(self, x): | ||||
if x in self._cache: | ||||
return self._cache[x] | ||||
Gregory Szorc
|
r20828 | # Use new values only, as existing values would be cached. | ||
for l in self._consumegen(): | ||||
Lucas Moscovicz
|
r20643 | if l == x: | ||
return True | ||||
if l < x: | ||||
break | ||||
self._cache[x] = False | ||||
return False | ||||
Pierre-Yves David
|
r22509 | def spanset(repo, start=None, end=None): | ||
"""factory function to dispatch between fullreposet and actual spanset | ||||
Feel free to update all spanset call sites and kill this function at some | ||||
point. | ||||
""" | ||||
if start is None and end is None: | ||||
return fullreposet(repo) | ||||
return _spanset(repo, start, end) | ||||
class _spanset(_orderedsetmixin): | ||||
Lucas Moscovicz
|
r20482 | """Duck type for baseset class which represents a range of revisions and | ||
can work lazily and without having all the range in memory | ||||
Lucas Moscovicz
|
r20737 | |||
Note that spanset(x, y) behave almost like xrange(x, y) except for two | ||||
notable points: | ||||
- when x < y it will be automatically descending, | ||||
- revision filtered with this repoview will be skipped. | ||||
Lucas Moscovicz
|
r20482 | """ | ||
Lucas Moscovicz
|
r20525 | def __init__(self, repo, start=0, end=None): | ||
Lucas Moscovicz
|
r20737 | """ | ||
start: first revision included the set | ||||
(default to 0) | ||||
end: first revision excluded (last+1) | ||||
(default to len(repo) | ||||
Spanset will be descending if `end` < `start`. | ||||
""" | ||||
Lucas Moscovicz
|
r20482 | self._start = start | ||
Lucas Moscovicz
|
r20525 | if end is not None: | ||
self._end = end | ||||
else: | ||||
self._end = len(repo) | ||||
self._hiddenrevs = repo.changelog.filteredrevs | ||||
Lucas Moscovicz
|
r20521 | |||
Lucas Moscovicz
|
r20657 | def ascending(self): | ||
Pierre-Yves David
|
r22482 | if not self.isascending(): | ||
Lucas Moscovicz
|
r20657 | self.reverse() | ||
def descending(self): | ||||
Pierre-Yves David
|
r22482 | if not self.isdescending(): | ||
Lucas Moscovicz
|
r20657 | self.reverse() | ||
Lucas Moscovicz
|
r20482 | def __iter__(self): | ||
Pierre-Yves David
|
r22482 | if self.isascending(): | ||
Lucas Moscovicz
|
r20521 | iterrange = xrange(self._start, self._end) | ||
Lucas Moscovicz
|
r20482 | else: | ||
Lucas Moscovicz
|
r20521 | iterrange = xrange(self._start, self._end, -1) | ||
if self._hiddenrevs: | ||||
s = self._hiddenrevs | ||||
for r in iterrange: | ||||
if r not in s: | ||||
yield r | ||||
else: | ||||
for r in iterrange: | ||||
Lucas Moscovicz
|
r20482 | yield r | ||
Pierre-Yves David
|
r21201 | def __contains__(self, rev): | ||
Pierre-Yves David
|
r22526 | start = self._start | ||
end = self._end | ||||
hidden = self._hiddenrevs | ||||
return (((end < rev <= start) or (start <= rev and rev < end)) | ||||
and not (hidden and rev in hidden)) | ||||
Lucas Moscovicz
|
r20482 | |||
Lucas Moscovicz
|
r20716 | def __nonzero__(self): | ||
for r in self: | ||||
return True | ||||
return False | ||||
Lucas Moscovicz
|
r20483 | def __and__(self, x): | ||
Lucas Moscovicz
|
r20538 | if isinstance(x, baseset): | ||
x = x.set() | ||||
Pierre-Yves David
|
r22483 | return orderedlazyset(self, x.__contains__, | ||
ascending=self.isascending()) | ||||
Lucas Moscovicz
|
r20483 | |||
def __sub__(self, x): | ||||
Lucas Moscovicz
|
r20538 | if isinstance(x, baseset): | ||
x = x.set() | ||||
Pierre-Yves David
|
r22483 | return orderedlazyset(self, lambda r: r not in x, | ||
ascending=self.isascending()) | ||||
Lucas Moscovicz
|
r20483 | |||
def __add__(self, x): | ||||
Lucas Moscovicz
|
r20734 | kwargs = {} | ||
if self.isascending() and x.isascending(): | ||||
kwargs['ascending'] = True | ||||
if self.isdescending() and x.isdescending(): | ||||
kwargs['ascending'] = False | ||||
return _addset(self, x, **kwargs) | ||||
Lucas Moscovicz
|
r20483 | |||
Lucas Moscovicz
|
r20484 | def __len__(self): | ||
Lucas Moscovicz
|
r20521 | if not self._hiddenrevs: | ||
return abs(self._end - self._start) | ||||
else: | ||||
count = 0 | ||||
Pierre-Yves David
|
r21205 | start = self._start | ||
end = self._end | ||||
Lucas Moscovicz
|
r20521 | for rev in self._hiddenrevs: | ||
Pierre-Yves David
|
r21284 | if (end < rev <= start) or (start <= rev < end): | ||
Lucas Moscovicz
|
r20521 | count += 1 | ||
return abs(self._end - self._start) - count | ||||
Lucas Moscovicz
|
r20484 | |||
def __getitem__(self, x): | ||||
# Basic implementation to be changed in future patches. | ||||
l = baseset([r for r in self]) | ||||
return l[x] | ||||
def sort(self, reverse=False): | ||||
Lucas Moscovicz
|
r20718 | if bool(reverse) != (self._start > self._end): | ||
Lucas Moscovicz
|
r20484 | self.reverse() | ||
def reverse(self): | ||||
Lucas Moscovicz
|
r20737 | # Just switch the _start and _end parameters | ||
Pierre-Yves David
|
r22482 | if self.isascending(): | ||
Lucas Moscovicz
|
r20484 | self._start, self._end = self._end - 1, self._start - 1 | ||
else: | ||||
self._start, self._end = self._end + 1, self._start + 1 | ||||
def set(self): | ||||
return self | ||||
Lucas Moscovicz
|
r20725 | def isascending(self): | ||
Pierre-Yves David
|
r22481 | return self._start <= self._end | ||
Lucas Moscovicz
|
r20725 | |||
def isdescending(self): | ||||
Pierre-Yves David
|
r22481 | return self._start >= self._end | ||
Lucas Moscovicz
|
r20725 | |||
Lucas Moscovicz
|
r20610 | def filter(self, l): | ||
Pierre-Yves David
|
r22483 | return orderedlazyset(self, l, ascending=self.isascending()) | ||
Lucas Moscovicz
|
r20610 | |||
Pierre-Yves David
|
r22509 | class fullreposet(_spanset): | ||
Pierre-Yves David
|
r22508 | """a set containing all revisions in the repo | ||
This class exists to host special optimisation. | ||||
""" | ||||
def __init__(self, repo): | ||||
super(fullreposet, self).__init__(repo) | ||||
Pierre-Yves David
|
r22510 | def __and__(self, other): | ||
"""fullrepo & other -> other | ||||
As self contains the whole repo, all of the other set should also be in | ||||
self. Therefor `self & other = other`. | ||||
This boldly assumes the other contains valid revs only. | ||||
""" | ||||
# other not a smartset, make is so | ||||
if not util.safehasattr(other, 'set'): | ||||
# filter out hidden revision | ||||
# (this boldly assumes all smartset are pure) | ||||
# | ||||
# `other` was used with "&", let's assume this is a set like | ||||
# object. | ||||
other = baseset(other - self._hiddenrevs) | ||||
elif not util.safehasattr(other, 'ascending'): | ||||
# "other" is _generatorset not a real smart set | ||||
# we fallback to the old way (sad kitten) | ||||
return super(fullreposet, self).__and__(other) | ||||
# preserve order: | ||||
# | ||||
# this is probably useless and harmful in multiple cases but matches | ||||
# the current behavior. | ||||
if self.isascending(): | ||||
other.ascending() | ||||
else: | ||||
other.descending() | ||||
return other | ||||
Patrick Mezard
|
r12823 | # tell hggettext to extract docstrings from these functions: | ||
i18nfunctions = symbols.values() | ||||