revset.py
2349 lines
| 79.1 KiB
| text/x-python
|
PythonLexer
/ mercurial / revset.py
Matt Mackall
|
r11275 | # revset.py - revision set queries for mercurial | ||
# | ||||
# Copyright 2010 Matt Mackall <mpm@selenic.com> | ||||
# | ||||
# This software may be used and distributed according to the terms of the | ||||
# GNU General Public License version 2 or any later version. | ||||
Gregory Szorc
|
r25971 | from __future__ import absolute_import | ||
Lucas Moscovicz
|
r20690 | import heapq | ||
Gregory Szorc
|
r25971 | import re | ||
from .i18n import _ | ||||
from . import ( | ||||
Pierre-Yves David
|
r26713 | destutil, | ||
Gregory Szorc
|
r25971 | encoding, | ||
error, | ||||
hbisect, | ||||
match as matchmod, | ||||
node, | ||||
obsolete as obsmod, | ||||
pathutil, | ||||
phases, | ||||
FUJIWARA Katsunori
|
r27584 | registrar, | ||
Gregory Szorc
|
r25971 | repoview, | ||
Yuya Nishihara
|
r31024 | revsetlang, | ||
Yuya Nishihara
|
r32659 | scmutil, | ||
Yuya Nishihara
|
r30881 | smartset, | ||
Gregory Szorc
|
r25971 | util, | ||
) | ||||
Matt Mackall
|
r11275 | |||
Yuya Nishihara
|
r31024 | # helpers for processing parsed tree | ||
getsymbol = revsetlang.getsymbol | ||||
getstring = revsetlang.getstring | ||||
getinteger = revsetlang.getinteger | ||||
Denis Laxalde
|
r31998 | getboolean = revsetlang.getboolean | ||
Yuya Nishihara
|
r31024 | getlist = revsetlang.getlist | ||
getrange = revsetlang.getrange | ||||
getargs = revsetlang.getargs | ||||
getargsdict = revsetlang.getargsdict | ||||
# constants used as an argument of match() and matchany() | ||||
anyorder = revsetlang.anyorder | ||||
defineorder = revsetlang.defineorder | ||||
followorder = revsetlang.followorder | ||||
Yuya Nishihara
|
r30881 | baseset = smartset.baseset | ||
generatorset = smartset.generatorset | ||||
spanset = smartset.spanset | ||||
fullreposet = smartset.fullreposet | ||||
Patrick Mezard
|
r16409 | def _revancestors(repo, revs, followfirst): | ||
"""Like revlog.ancestors(), but supports followfirst.""" | ||||
Jordi Gutiérrez Hermoso
|
r24306 | if followfirst: | ||
cut = 1 | ||||
else: | ||||
cut = None | ||||
Patrick Mezard
|
r16409 | cl = repo.changelog | ||
Lucas Moscovicz
|
r20690 | |||
def iterate(): | ||||
Pierre-Yves David
|
r22832 | revs.sort(reverse=True) | ||
Pierre-Yves David
|
r24939 | irevs = iter(revs) | ||
Lucas Moscovicz
|
r20691 | h = [] | ||
Pierre-Yves David
|
r25143 | inputrev = next(irevs, None) | ||
if inputrev is not None: | ||||
Pierre-Yves David
|
r24939 | heapq.heappush(h, -inputrev) | ||
Lucas Moscovicz
|
r20691 | |||
Yuya Nishihara
|
r23956 | seen = set() | ||
Lucas Moscovicz
|
r20690 | while h: | ||
current = -heapq.heappop(h) | ||||
Pierre-Yves David
|
r24940 | if current == inputrev: | ||
Pierre-Yves David
|
r25143 | inputrev = next(irevs, None) | ||
if inputrev is not None: | ||||
Pierre-Yves David
|
r24940 | heapq.heappush(h, -inputrev) | ||
Lucas Moscovicz
|
r20690 | if current not in seen: | ||
seen.add(current) | ||||
yield current | ||||
Pulkit Goyal
|
r32442 | try: | ||
for parent in cl.parentrevs(current)[:cut]: | ||||
if parent != node.nullrev: | ||||
heapq.heappush(h, -parent) | ||||
except error.WdirUnsupported: | ||||
for parent in repo[current].parents()[:cut]: | ||||
if parent.rev() != node.nullrev: | ||||
heapq.heappush(h, -parent.rev()) | ||||
Lucas Moscovicz
|
r20690 | |||
Pierre-Yves David
|
r22795 | return generatorset(iterate(), iterasc=False) | ||
Patrick Mezard
|
r16409 | |||
def _revdescendants(repo, revs, followfirst): | ||||
"""Like revlog.descendants() but supports followfirst.""" | ||||
Jordi Gutiérrez Hermoso
|
r24306 | if followfirst: | ||
cut = 1 | ||||
else: | ||||
cut = None | ||||
Patrick Mezard
|
r16409 | |||
Lucas Moscovicz
|
r20692 | def iterate(): | ||
cl = repo.changelog | ||||
Pierre-Yves David
|
r25549 | # XXX this should be 'parentset.min()' assuming 'parentset' is a | ||
# smartset (and if it is not, it should.) | ||||
Lucas Moscovicz
|
r20692 | first = min(revs) | ||
nullrev = node.nullrev | ||||
if first == nullrev: | ||||
# Are there nodes with a null first parent and a non-null | ||||
# second one? Maybe. Do we care? Probably not. | ||||
for i in cl: | ||||
Patrick Mezard
|
r16409 | yield i | ||
Lucas Moscovicz
|
r20692 | else: | ||
seen = set(revs) | ||||
for i in cl.revs(first + 1): | ||||
for x in cl.parentrevs(i)[:cut]: | ||||
if x != nullrev and x in seen: | ||||
seen.add(i) | ||||
yield i | ||||
break | ||||
Pierre-Yves David
|
r22795 | return generatorset(iterate(), iterasc=True) | ||
Patrick Mezard
|
r16409 | |||
Yuya Nishihara
|
r26095 | def _reachablerootspure(repo, minroot, roots, heads, includepath): | ||
Laurent Charignon
|
r26002 | """return (heads(::<roots> and ::<heads>)) | ||
If includepath is True, return (<roots>::<heads>).""" | ||||
Bryan O'Sullivan
|
r16862 | if not roots: | ||
Yuya Nishihara
|
r26094 | return [] | ||
Bryan O'Sullivan
|
r16862 | parentrevs = repo.changelog.parentrevs | ||
Yuya Nishihara
|
r26053 | roots = set(roots) | ||
Pierre-Yves David
|
r22487 | visit = list(heads) | ||
Bryan O'Sullivan
|
r16862 | reachable = set() | ||
seen = {} | ||||
Pierre-Yves David
|
r25566 | # prefetch all the things! (because python is slow) | ||
reached = reachable.add | ||||
dovisit = visit.append | ||||
nextvisit = visit.pop | ||||
Bryan O'Sullivan
|
r16862 | # open-code the post-order traversal due to the tiny size of | ||
# sys.getrecursionlimit() | ||||
while visit: | ||||
Pierre-Yves David
|
r25566 | rev = nextvisit() | ||
Bryan O'Sullivan
|
r16862 | if rev in roots: | ||
Pierre-Yves David
|
r25566 | reached(rev) | ||
Laurent Charignon
|
r26002 | if not includepath: | ||
continue | ||||
Bryan O'Sullivan
|
r16862 | parents = parentrevs(rev) | ||
seen[rev] = parents | ||||
for parent in parents: | ||||
if parent >= minroot and parent not in seen: | ||||
Pierre-Yves David
|
r25566 | dovisit(parent) | ||
Bryan O'Sullivan
|
r16862 | if not reachable: | ||
Pierre-Yves David
|
r22802 | return baseset() | ||
Laurent Charignon
|
r26002 | if not includepath: | ||
return reachable | ||||
Bryan O'Sullivan
|
r16862 | for rev in sorted(seen): | ||
for parent in seen[rev]: | ||||
if parent in reachable: | ||||
Pierre-Yves David
|
r25566 | reached(rev) | ||
Pierre-Yves David
|
r26091 | return reachable | ||
Bryan O'Sullivan
|
r16862 | |||
Laurent Charignon
|
r26006 | def reachableroots(repo, roots, heads, includepath=False): | ||
"""return (heads(::<roots> and ::<heads>)) | ||||
If includepath is True, return (<roots>::<heads>).""" | ||||
if not roots: | ||||
return baseset() | ||||
Pierre-Yves David
|
r26093 | minroot = roots.min() | ||
Yuya Nishihara
|
r26053 | roots = list(roots) | ||
Laurent Charignon
|
r26006 | heads = list(heads) | ||
try: | ||||
Yuya Nishihara
|
r26094 | revs = repo.changelog.reachableroots(minroot, heads, roots, includepath) | ||
Laurent Charignon
|
r26006 | except AttributeError: | ||
Yuya Nishihara
|
r26095 | revs = _reachablerootspure(repo, minroot, roots, heads, includepath) | ||
Yuya Nishihara
|
r26094 | revs = baseset(revs) | ||
revs.sort() | ||||
return revs | ||||
Laurent Charignon
|
r26006 | |||
Matt Mackall
|
r11275 | # helpers | ||
def getset(repo, subset, x): | ||||
if not x: | ||||
Martin Geisler
|
r11383 | raise error.ParseError(_("missing argument")) | ||
Yuya Nishihara
|
r31809 | return methods[x[0]](repo, subset, *x[1:]) | ||
Matt Mackall
|
r11275 | |||
Matt Harbison
|
r17003 | def _getrevsource(repo, r): | ||
extra = repo[r].extra() | ||||
for label in ('source', 'transplant_source', 'rebase_source'): | ||||
if label in extra: | ||||
try: | ||||
return repo[extra[label]].rev() | ||||
except error.RepoLookupError: | ||||
pass | ||||
return None | ||||
Matt Mackall
|
r11275 | # operator methods | ||
def stringset(repo, subset, x): | ||||
Yuya Nishihara
|
r32659 | x = scmutil.intrev(repo[x]) | ||
Yuya Nishihara
|
r25265 | if (x in subset | ||
or x == node.nullrev and isinstance(subset, fullreposet)): | ||||
Lucas Moscovicz
|
r20364 | return baseset([x]) | ||
Pierre-Yves David
|
r22802 | return baseset() | ||
Matt Mackall
|
r11275 | |||
Yuya Nishihara
|
r29932 | def rangeset(repo, subset, x, y, order): | ||
Pierre-Yves David
|
r23162 | m = getset(repo, fullreposet(repo), x) | ||
n = getset(repo, fullreposet(repo), y) | ||||
Matt Mackall
|
r11456 | |||
if not m or not n: | ||||
Pierre-Yves David
|
r22802 | return baseset() | ||
Yuya Nishihara
|
r30043 | return _makerangeset(repo, subset, m.first(), n.last(), order) | ||
Yuya Nishihara
|
r30803 | def rangeall(repo, subset, x, order): | ||
assert x is None | ||||
return _makerangeset(repo, subset, 0, len(repo) - 1, order) | ||||
Yuya Nishihara
|
r30044 | def rangepre(repo, subset, y, order): | ||
# ':y' can't be rewritten to '0:y' since '0' may be hidden | ||||
n = getset(repo, fullreposet(repo), y) | ||||
if not n: | ||||
return baseset() | ||||
return _makerangeset(repo, subset, 0, n.last(), order) | ||||
Yuya Nishihara
|
r30803 | def rangepost(repo, subset, x, order): | ||
m = getset(repo, fullreposet(repo), x) | ||||
if not m: | ||||
return baseset() | ||||
return _makerangeset(repo, subset, m.first(), len(repo) - 1, order) | ||||
Yuya Nishihara
|
r30043 | def _makerangeset(repo, subset, m, n, order): | ||
Yuya Nishihara
|
r25766 | if m == n: | ||
r = baseset([m]) | ||||
elif n == node.wdirrev: | ||||
r = spanset(repo, m, len(repo)) + baseset([n]) | ||||
elif m == node.wdirrev: | ||||
r = baseset([m]) + spanset(repo, len(repo) - 1, n - 1) | ||||
elif m < n: | ||||
Lucas Moscovicz
|
r20526 | r = spanset(repo, m, n + 1) | ||
Matt Mackall
|
r11456 | else: | ||
Lucas Moscovicz
|
r20526 | r = spanset(repo, m, n - 1) | ||
Yuya Nishihara
|
r29944 | |||
if order == defineorder: | ||||
return r & subset | ||||
else: | ||||
# carrying the sorting over when possible would be more efficient | ||||
return subset & r | ||||
Matt Mackall
|
r11275 | |||
Yuya Nishihara
|
r29932 | def dagrange(repo, subset, x, y, order): | ||
Yuya Nishihara
|
r24115 | r = fullreposet(repo) | ||
Laurent Charignon
|
r26002 | xs = reachableroots(repo, getset(repo, r, x), getset(repo, r, y), | ||
includepath=True) | ||||
Yuya Nishihara
|
r29139 | return subset & xs | ||
Bryan O'Sullivan
|
r16860 | |||
Yuya Nishihara
|
r29932 | def andset(repo, subset, x, y, order): | ||
Matt Mackall
|
r11275 | return getset(repo, getset(repo, subset, x), y) | ||
Yuya Nishihara
|
r29932 | def differenceset(repo, subset, x, y, order): | ||
Durham Goode
|
r28217 | return getset(repo, subset, x) - getset(repo, subset, y) | ||
Yuya Nishihara
|
r29929 | def _orsetlist(repo, subset, xs): | ||
Yuya Nishihara
|
r25929 | assert xs | ||
if len(xs) == 1: | ||||
return getset(repo, subset, xs[0]) | ||||
p = len(xs) // 2 | ||||
Yuya Nishihara
|
r29929 | a = _orsetlist(repo, subset, xs[:p]) | ||
b = _orsetlist(repo, subset, xs[p:]) | ||||
Yuya Nishihara
|
r25929 | return a + b | ||
Matt Mackall
|
r11275 | |||
Yuya Nishihara
|
r29932 | def orset(repo, subset, x, order): | ||
Yuya Nishihara
|
r29934 | xs = getlist(x) | ||
if order == followorder: | ||||
# slow path to take the subset order | ||||
return subset & _orsetlist(repo, fullreposet(repo), xs) | ||||
else: | ||||
return _orsetlist(repo, subset, xs) | ||||
Yuya Nishihara
|
r29929 | |||
Yuya Nishihara
|
r29932 | def notset(repo, subset, x, order): | ||
Lucas Moscovicz
|
r20366 | return subset - getset(repo, subset, x) | ||
Matt Mackall
|
r11275 | |||
Yuya Nishihara
|
r27987 | def listset(repo, subset, *xs): | ||
timeless
|
r27517 | raise error.ParseError(_("can't use a list in this context"), | ||
hint=_('see hg help "revsets.x or y"')) | ||||
Matt Mackall
|
r11275 | |||
Yuya Nishihara
|
r25704 | def keyvaluepair(repo, subset, k, v): | ||
raise error.ParseError(_("can't use a key-value pair in this context")) | ||||
Yuya Nishihara
|
r29932 | def func(repo, subset, a, b, order): | ||
Yuya Nishihara
|
r29441 | f = getsymbol(a) | ||
if f in symbols: | ||||
Augie Fackler
|
r30392 | func = symbols[f] | ||
if getattr(func, '_takeorder', False): | ||||
return func(repo, subset, b, order) | ||||
return func(repo, subset, b) | ||||
Matt Harbison
|
r25632 | |||
keep = lambda fn: getattr(fn, '__doc__', None) is not None | ||||
syms = [s for (s, fn) in symbols.items() if keep(fn)] | ||||
Yuya Nishihara
|
r29441 | raise error.UnknownIdentifier(f, syms) | ||
Matt Mackall
|
r11275 | |||
# functions | ||||
FUJIWARA Katsunori
|
r27584 | # symbols are callables like: | ||
# fn(repo, subset, x) | ||||
# with: | ||||
# repo - current repository instance | ||||
# subset - of revisions to be examined | ||||
# x - argument in tree form | ||||
symbols = {} | ||||
FUJIWARA Katsunori
|
r27587 | # symbols which can't be used for a DoS attack for any given input | ||
# (e.g. those which accept regexes as plain strings shouldn't be included) | ||||
# functions that just return a lot of changesets (like all) don't count here | ||||
safesymbols = set() | ||||
FUJIWARA Katsunori
|
r28395 | predicate = registrar.revsetpredicate() | ||
FUJIWARA Katsunori
|
r27587 | |||
FUJIWARA Katsunori
|
r27584 | @predicate('_destupdate') | ||
Pierre-Yves David
|
r26713 | def _destupdate(repo, subset, x): | ||
# experimental revset for update destination | ||||
Martin von Zweigbergk
|
r30962 | args = getargsdict(x, 'limit', 'clean') | ||
Pierre-Yves David
|
r26713 | return subset & baseset([destutil.destupdate(repo, **args)[0]]) | ||
FUJIWARA Katsunori
|
r27584 | @predicate('_destmerge') | ||
Pierre-Yves David
|
r26716 | def _destmerge(repo, subset, x): | ||
# experimental revset for merge destination | ||||
Pierre-Yves David
|
r28139 | sourceset = None | ||
if x is not None: | ||||
sourceset = getset(repo, fullreposet(repo), x) | ||||
return subset & baseset([destutil.destmerge(repo, sourceset=sourceset)]) | ||||
Pierre-Yves David
|
r26303 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('adds(pattern)', safe=True) | ||
Idan Kamara
|
r13915 | def adds(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets that add a file matching pattern. | ||
FUJIWARA Katsunori
|
r20289 | |||
The pattern without explicit kind like ``glob:`` is expected to be | ||||
relative to the current directory and match against a file or a | ||||
directory. | ||||
Idan Kamara
|
r13915 | """ | ||
# i18n: "adds" is a keyword | ||||
pat = getstring(x, _("adds requires a pattern")) | ||||
return checkstatus(repo, subset, pat, 1) | ||||
FUJIWARA Katsunori
|
r27587 | @predicate('ancestor(*changeset)', safe=True) | ||
Idan Kamara
|
r13915 | def ancestor(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """A greatest common ancestor of the changesets. | ||
Paul Cavallaro
|
r18536 | |||
Accepts 0 or more changesets. | ||||
Will return empty list when passed no args. | ||||
Greatest common ancestor of a single changeset is that changeset. | ||||
Idan Kamara
|
r13915 | """ | ||
# i18n: "ancestor" is a keyword | ||||
Paul Cavallaro
|
r18536 | l = getlist(x) | ||
Yuya Nishihara
|
r24115 | rl = fullreposet(repo) | ||
Paul Cavallaro
|
r18536 | anc = None | ||
Idan Kamara
|
r13915 | |||
Paul Cavallaro
|
r18536 | # (getset(repo, rl, i) for i in l) generates a list of lists | ||
for revs in (getset(repo, rl, i) for i in l): | ||||
for r in revs: | ||||
if anc is None: | ||||
Mads Kiilerich
|
r20991 | anc = repo[r] | ||
Paul Cavallaro
|
r18536 | else: | ||
Mads Kiilerich
|
r20991 | anc = anc.ancestor(repo[r]) | ||
if anc is not None and anc.rev() in subset: | ||||
return baseset([anc.rev()]) | ||||
Pierre-Yves David
|
r22802 | return baseset() | ||
Idan Kamara
|
r13915 | |||
Patrick Mezard
|
r16409 | def _ancestors(repo, subset, x, followfirst=False): | ||
Yuya Nishihara
|
r24115 | heads = getset(repo, fullreposet(repo), x) | ||
Mads Kiilerich
|
r22944 | if not heads: | ||
Pierre-Yves David
|
r22802 | return baseset() | ||
Mads Kiilerich
|
r22944 | s = _revancestors(repo, heads, followfirst) | ||
Pierre-Yves David
|
r23003 | return subset & s | ||
Patrick Mezard
|
r16409 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('ancestors(set)', safe=True) | ||
Idan Kamara
|
r13915 | def ancestors(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets that are ancestors of a changeset in set. | ||
Idan Kamara
|
r13915 | """ | ||
Patrick Mezard
|
r16409 | return _ancestors(repo, subset, x) | ||
FUJIWARA Katsunori
|
r27587 | @predicate('_firstancestors', safe=True) | ||
Patrick Mezard
|
r16409 | def _firstancestors(repo, subset, x): | ||
# ``_firstancestors(set)`` | ||||
# Like ``ancestors(set)`` but follows only the first parents. | ||||
return _ancestors(repo, subset, x, followfirst=True) | ||||
Idan Kamara
|
r13915 | |||
David Soria Parra
|
r32699 | def _childrenspec(repo, subset, x, n, order): | ||
"""Changesets that are the Nth child of a changeset | ||||
in set. | ||||
""" | ||||
cs = set() | ||||
for r in getset(repo, fullreposet(repo), x): | ||||
for i in range(n): | ||||
c = repo[r].children() | ||||
if len(c) == 0: | ||||
break | ||||
if len(c) > 1: | ||||
raise error.RepoLookupError( | ||||
_("revision in set has more than one child")) | ||||
r = c[0] | ||||
else: | ||||
cs.add(r) | ||||
return subset & cs | ||||
Yuya Nishihara
|
r29932 | def ancestorspec(repo, subset, x, n, order): | ||
Kevin Gessner
|
r14070 | """``set~n`` | ||
Brodie Rao
|
r16683 | Changesets that are the Nth ancestor (first parents only) of a changeset | ||
in set. | ||||
Kevin Gessner
|
r14070 | """ | ||
Yuya Nishihara
|
r30801 | n = getinteger(n, _("~ expects a number")) | ||
David Soria Parra
|
r32699 | if n < 0: | ||
# children lookup | ||||
return _childrenspec(repo, subset, x, -n, order) | ||||
Kevin Gessner
|
r14070 | ps = set() | ||
cl = repo.changelog | ||||
Pierre-Yves David
|
r23163 | for r in getset(repo, fullreposet(repo), x): | ||
Kevin Gessner
|
r14070 | for i in range(n): | ||
Pulkit Goyal
|
r32441 | try: | ||
r = cl.parentrevs(r)[0] | ||||
except error.WdirUnsupported: | ||||
r = repo[r].parents()[0].rev() | ||||
Kevin Gessner
|
r14070 | ps.add(r) | ||
Pierre-Yves David
|
r22531 | return subset & ps | ||
Kevin Gessner
|
r14070 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('author(string)', safe=True) | ||
Idan Kamara
|
r13915 | def author(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Alias for ``user(string)``. | ||
Idan Kamara
|
r13915 | """ | ||
# i18n: "author" is a keyword | ||||
Matt Harbison
|
r30782 | n = getstring(x, _("author requires a string")) | ||
kind, pattern, matcher = _substringmatcher(n, casesensitive=False) | ||||
return subset.filter(lambda x: matcher(repo[x].user()), | ||||
Yuya Nishihara
|
r28424 | condrepr=('<user %r>', n)) | ||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('bisect(string)', safe=True) | ||
"Yann E. MORIN"
|
r15134 | def bisect(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets marked in the specified bisect status: | ||
"Yann E. MORIN"
|
r15136 | |||
"Yann E. MORIN"
|
r15153 | - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip | ||
Mads Kiilerich
|
r17424 | - ``goods``, ``bads`` : csets topologically good/bad | ||
"Yann E. MORIN"
|
r15153 | - ``range`` : csets taking part in the bisection | ||
- ``pruned`` : csets that are goods, bads or skipped | ||||
- ``untested`` : csets whose fate is yet unknown | ||||
- ``ignored`` : csets ignored due to DAG topology | ||||
Bryan O'Sullivan
|
r16647 | - ``current`` : the cset currently being bisected | ||
Idan Kamara
|
r13915 | """ | ||
FUJIWARA Katsunori
|
r17259 | # i18n: "bisect" is a keyword | ||
"Yann E. MORIN"
|
r15135 | status = getstring(x, _("bisect requires a string")).lower() | ||
Bryan O'Sullivan
|
r16467 | state = set(hbisect.get(repo, status)) | ||
Pierre-Yves David
|
r22532 | return subset & state | ||
Idan Kamara
|
r13915 | |||
"Yann E. MORIN"
|
r15134 | # Backward-compatibility | ||
# - no help entry so that we do not advertise it any more | ||||
FUJIWARA Katsunori
|
r27587 | @predicate('bisected', safe=True) | ||
"Yann E. MORIN"
|
r15134 | def bisected(repo, subset, x): | ||
return bisect(repo, subset, x) | ||||
FUJIWARA Katsunori
|
r27587 | @predicate('bookmark([name])', safe=True) | ||
Idan Kamara
|
r13915 | def bookmark(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """The named bookmark or all bookmarks. | ||
Simon King
|
r16822 | |||
Yuya Nishihara
|
r30799 | Pattern matching is supported for `name`. See :hg:`help revisions.patterns`. | ||
Idan Kamara
|
r13915 | """ | ||
# i18n: "bookmark" is a keyword | ||||
args = getargs(x, 0, 1, _('bookmark takes one or no arguments')) | ||||
if args: | ||||
bm = getstring(args[0], | ||||
# i18n: "bookmark" is a keyword | ||||
_('the argument to bookmark must be a string')) | ||||
Matt Harbison
|
r26481 | kind, pattern, matcher = util.stringmatcher(bm) | ||
Pierre-Yves David
|
r22499 | bms = set() | ||
Simon King
|
r16822 | if kind == 'literal': | ||
Michael O'Connor
|
r22105 | bmrev = repo._bookmarks.get(pattern, None) | ||
Simon King
|
r16822 | if not bmrev: | ||
FUJIWARA Katsunori
|
r23978 | raise error.RepoLookupError(_("bookmark '%s' does not exist") | ||
Yuya Nishihara
|
r26538 | % pattern) | ||
Pierre-Yves David
|
r22499 | bms.add(repo[bmrev].rev()) | ||
Simon King
|
r16822 | else: | ||
matchrevs = set() | ||||
Kevin Bullock
|
r18495 | for name, bmrev in repo._bookmarks.iteritems(): | ||
Simon King
|
r16822 | if matcher(name): | ||
matchrevs.add(bmrev) | ||||
if not matchrevs: | ||||
FUJIWARA Katsunori
|
r23978 | raise error.RepoLookupError(_("no bookmarks exist" | ||
" that match '%s'") % pattern) | ||||
Simon King
|
r16822 | for bmrev in matchrevs: | ||
Pierre-Yves David
|
r22499 | bms.add(repo[bmrev].rev()) | ||
else: | ||||
Martin von Zweigbergk
|
r32291 | bms = {repo[r].rev() for r in repo._bookmarks.values()} | ||
bms -= {node.nullrev} | ||||
Pierre-Yves David
|
r22530 | return subset & bms | ||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('branch(string or set)', safe=True) | ||
Idan Kamara
|
r13915 | def branch(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """ | ||
Idan Kamara
|
r13915 | All changesets belonging to the given branch or the branches of the given | ||
changesets. | ||||
Simon King
|
r16821 | |||
Matt Harbison
|
r30784 | Pattern matching is supported for `string`. See | ||
Yuya Nishihara
|
r30799 | :hg:`help revisions.patterns`. | ||
Idan Kamara
|
r13915 | """ | ||
Durham Goode
|
r24374 | getbi = repo.revbranchcache().branchinfo | ||
Yuya Nishihara
|
r32683 | def getbranch(r): | ||
try: | ||||
return getbi(r)[0] | ||||
except error.WdirUnsupported: | ||||
return repo[r].branch() | ||||
Mads Kiilerich
|
r23787 | |||
Idan Kamara
|
r13915 | try: | ||
b = getstring(x, '') | ||||
except error.ParseError: | ||||
# not a string, but another revspec, e.g. tip() | ||||
pass | ||||
Simon King
|
r16821 | else: | ||
Matt Harbison
|
r26481 | kind, pattern, matcher = util.stringmatcher(b) | ||
Simon King
|
r16821 | if kind == 'literal': | ||
# note: falls through to the revspec case if no branch with | ||||
Yuya Nishihara
|
r26537 | # this name exists and pattern kind is not specified explicitly | ||
Simon King
|
r16821 | if pattern in repo.branchmap(): | ||
Yuya Nishihara
|
r32683 | return subset.filter(lambda r: matcher(getbranch(r)), | ||
Yuya Nishihara
|
r28424 | condrepr=('<branch %r>', b)) | ||
Yuya Nishihara
|
r26537 | if b.startswith('literal:'): | ||
raise error.RepoLookupError(_("branch '%s' does not exist") | ||||
% pattern) | ||||
Simon King
|
r16821 | else: | ||
Yuya Nishihara
|
r32683 | return subset.filter(lambda r: matcher(getbranch(r)), | ||
Yuya Nishihara
|
r28424 | condrepr=('<branch %r>', b)) | ||
Idan Kamara
|
r13915 | |||
Yuya Nishihara
|
r24115 | s = getset(repo, fullreposet(repo), x) | ||
Idan Kamara
|
r13915 | b = set() | ||
for r in s: | ||||
Yuya Nishihara
|
r32683 | b.add(getbranch(r)) | ||
Pierre-Yves David
|
r22867 | c = s.__contains__ | ||
Yuya Nishihara
|
r32683 | return subset.filter(lambda r: c(r) or getbranch(r) in b, | ||
Yuya Nishihara
|
r28424 | condrepr=lambda: '<branch %r>' % sorted(b)) | ||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('bumped()', safe=True) | ||
Pierre-Yves David
|
r17829 | def bumped(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Mutable changesets marked as successors of public changesets. | ||
Pierre-Yves David
|
r17829 | |||
Only non-public and non-obsolete changesets can be `bumped`. | ||||
""" | ||||
# i18n: "bumped" is a keyword | ||||
getargs(x, 0, 0, _("bumped takes no arguments")) | ||||
bumped = obsmod.getrevs(repo, 'bumped') | ||||
Lucas Moscovicz
|
r20367 | return subset & bumped | ||
Pierre-Yves David
|
r17829 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('bundle()', safe=True) | ||
Tomasz Kleczek
|
r17913 | def bundle(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets in the bundle. | ||
Tomasz Kleczek
|
r17913 | |||
Bundle must be specified by the -R option.""" | ||||
try: | ||||
Mads Kiilerich
|
r18411 | bundlerevs = repo.changelog.bundlerevs | ||
Tomasz Kleczek
|
r17913 | except AttributeError: | ||
Pierre-Yves David
|
r26587 | raise error.Abort(_("no bundle provided - specify with -R")) | ||
Lucas Moscovicz
|
r20367 | return subset & bundlerevs | ||
Tomasz Kleczek
|
r17913 | |||
Idan Kamara
|
r13915 | def checkstatus(repo, subset, pat, field): | ||
Patrick Mezard
|
r16521 | hasset = matchmod.patkind(pat) == 'set' | ||
Lucas Moscovicz
|
r20457 | |||
Martin von Zweigbergk
|
r23115 | mcache = [None] | ||
Lucas Moscovicz
|
r20457 | def matches(x): | ||
c = repo[x] | ||||
Martin von Zweigbergk
|
r23115 | if not mcache[0] or hasset: | ||
mcache[0] = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c) | ||||
m = mcache[0] | ||||
fname = None | ||||
if not m.anypats() and len(m.files()) == 1: | ||||
fname = m.files()[0] | ||||
Patrick Mezard
|
r16521 | if fname is not None: | ||
if fname not in c.files(): | ||||
Lucas Moscovicz
|
r20457 | return False | ||
Idan Kamara
|
r13915 | else: | ||
for f in c.files(): | ||||
if m(f): | ||||
break | ||||
else: | ||||
Lucas Moscovicz
|
r20457 | return False | ||
Idan Kamara
|
r13915 | files = repo.status(c.p1().node(), c.node())[field] | ||
Patrick Mezard
|
r16521 | if fname is not None: | ||
if fname in files: | ||||
Lucas Moscovicz
|
r20457 | return True | ||
Idan Kamara
|
r13915 | else: | ||
for f in files: | ||||
if m(f): | ||||
Lucas Moscovicz
|
r20457 | return True | ||
Yuya Nishihara
|
r28424 | return subset.filter(matches, condrepr=('<status[%r] %r>', field, pat)) | ||
Idan Kamara
|
r13915 | |||
Martin von Zweigbergk
|
r29406 | def _children(repo, subset, parentset): | ||
Pierre-Yves David
|
r25550 | if not parentset: | ||
return baseset() | ||||
Matt Mackall
|
r15899 | cs = set() | ||
pr = repo.changelog.parentrevs | ||||
Pierre-Yves David
|
r25567 | minrev = parentset.min() | ||
Yuya Nishihara
|
r30699 | nullrev = node.nullrev | ||
Martin von Zweigbergk
|
r29406 | for r in subset: | ||
Siddharth Agarwal
|
r18063 | if r <= minrev: | ||
continue | ||||
Yuya Nishihara
|
r30699 | p1, p2 = pr(r) | ||
if p1 in parentset: | ||||
cs.add(r) | ||||
if p2 != nullrev and p2 in parentset: | ||||
cs.add(r) | ||||
Matt Mackall
|
r20709 | return baseset(cs) | ||
Matt Mackall
|
r15899 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('children(set)', safe=True) | ||
Idan Kamara
|
r13915 | def children(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Child changesets of changesets in set. | ||
Idan Kamara
|
r13915 | """ | ||
Pierre-Yves David
|
r23164 | s = getset(repo, fullreposet(repo), x) | ||
Matt Mackall
|
r15899 | cs = _children(repo, subset, s) | ||
Lucas Moscovicz
|
r20367 | return subset & cs | ||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('closed()', safe=True) | ||
Idan Kamara
|
r13915 | def closed(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changeset is closed. | ||
Idan Kamara
|
r13915 | """ | ||
# i18n: "closed" is a keyword | ||||
getargs(x, 0, 0, _("closed takes no arguments")) | ||||
Yuya Nishihara
|
r28424 | return subset.filter(lambda r: repo[r].closesbranch(), | ||
condrepr='<branch closed>') | ||||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27584 | @predicate('contains(pattern)') | ||
Idan Kamara
|
r13915 | def contains(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """The revision's manifest contains a file matching pattern (but might not | ||
Greg Hurrell
|
r21199 | modify it). See :hg:`help patterns` for information about file patterns. | ||
FUJIWARA Katsunori
|
r20289 | |||
The pattern without explicit kind like ``glob:`` is expected to be | ||||
relative to the current directory and match against a file exactly | ||||
for efficiency. | ||||
Idan Kamara
|
r13915 | """ | ||
# i18n: "contains" is a keyword | ||||
pat = getstring(x, _("contains requires a pattern")) | ||||
Lucas Moscovicz
|
r20461 | |||
def matches(x): | ||||
if not matchmod.patkind(pat): | ||||
pats = pathutil.canonpath(repo.root, repo.getcwd(), pat) | ||||
if pats in repo[x]: | ||||
return True | ||||
else: | ||||
c = repo[x] | ||||
m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c) | ||||
Matt Mackall
|
r15964 | for f in c.manifest(): | ||
Idan Kamara
|
r13915 | if m(f): | ||
Lucas Moscovicz
|
r20461 | return True | ||
return False | ||||
Yuya Nishihara
|
r28424 | return subset.filter(matches, condrepr=('<contains %r>', pat)) | ||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('converted([id])', safe=True) | ||
Matt Harbison
|
r17002 | def converted(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets converted from the given identifier in the old repository if | ||
Matt Harbison
|
r17002 | present, or all converted changesets if no identifier is specified. | ||
""" | ||||
# There is exactly no chance of resolving the revision, so do a simple | ||||
# string compare and hope for the best | ||||
FUJIWARA Katsunori
|
r17259 | rev = None | ||
Matt Harbison
|
r17002 | # i18n: "converted" is a keyword | ||
l = getargs(x, 0, 1, _('converted takes one or no arguments')) | ||||
if l: | ||||
FUJIWARA Katsunori
|
r17259 | # i18n: "converted" is a keyword | ||
Matt Harbison
|
r17002 | rev = getstring(l[0], _('converted requires a revision')) | ||
def _matchvalue(r): | ||||
source = repo[r].extra().get('convert_revision', None) | ||||
return source is not None and (rev is None or source.startswith(rev)) | ||||
Yuya Nishihara
|
r28424 | return subset.filter(lambda r: _matchvalue(r), | ||
condrepr=('<converted %r>', rev)) | ||||
Matt Harbison
|
r17002 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('date(interval)', safe=True) | ||
Idan Kamara
|
r13915 | def date(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets within the interval, see :hg:`help dates`. | ||
Idan Kamara
|
r13915 | """ | ||
# i18n: "date" is a keyword | ||||
ds = getstring(x, _("date requires a string")) | ||||
dm = util.matchdate(ds) | ||||
Yuya Nishihara
|
r28424 | return subset.filter(lambda x: dm(repo[x].date()[0]), | ||
condrepr=('<date %r>', ds)) | ||||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('desc(string)', safe=True) | ||
Thomas Arendsen Hein
|
r14650 | def desc(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Search commit message for string. The match is case-insensitive. | ||
Matt Harbison
|
r30783 | |||
Matt Harbison
|
r30784 | Pattern matching is supported for `string`. See | ||
Yuya Nishihara
|
r30799 | :hg:`help revisions.patterns`. | ||
Thomas Arendsen Hein
|
r14650 | """ | ||
# i18n: "desc" is a keyword | ||||
Matt Harbison
|
r30783 | ds = getstring(x, _("desc requires a string")) | ||
kind, pattern, matcher = _substringmatcher(ds, casesensitive=False) | ||||
return subset.filter(lambda r: matcher(repo[r].description()), | ||||
condrepr=('<desc %r>', ds)) | ||||
Thomas Arendsen Hein
|
r14650 | |||
Patrick Mezard
|
r16409 | def _descendants(repo, subset, x, followfirst=False): | ||
Yuya Nishihara
|
r24115 | roots = getset(repo, fullreposet(repo), x) | ||
Mads Kiilerich
|
r22944 | if not roots: | ||
Pierre-Yves David
|
r22802 | return baseset() | ||
Mads Kiilerich
|
r22944 | s = _revdescendants(repo, roots, followfirst) | ||
Durham Goode
|
r20894 | |||
# Both sets need to be ascending in order to lazily return the union | ||||
# in the correct order. | ||||
Mads Kiilerich
|
r22944 | base = subset & roots | ||
Pierre-Yves David
|
r22860 | desc = subset & s | ||
result = base + desc | ||||
if subset.isascending(): | ||||
result.sort() | ||||
elif subset.isdescending(): | ||||
result.sort(reverse=True) | ||||
else: | ||||
result = subset & result | ||||
Pierre-Yves David
|
r22830 | return result | ||
Patrick Mezard
|
r16409 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('descendants(set)', safe=True) | ||
Idan Kamara
|
r13915 | def descendants(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets which are descendants of changesets in set. | ||
Idan Kamara
|
r13915 | """ | ||
Patrick Mezard
|
r16409 | return _descendants(repo, subset, x) | ||
FUJIWARA Katsunori
|
r27587 | @predicate('_firstdescendants', safe=True) | ||
Patrick Mezard
|
r16409 | def _firstdescendants(repo, subset, x): | ||
# ``_firstdescendants(set)`` | ||||
# Like ``descendants(set)`` but follows only the first parents. | ||||
return _descendants(repo, subset, x, followfirst=True) | ||||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('destination([set])', safe=True) | ||
Matt Harbison
|
r17186 | def destination(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets that were created by a graft, transplant or rebase operation, | ||
Matt Harbison
|
r17186 | with the given revisions specified as the source. Omitting the optional set | ||
is the same as passing all(). | ||||
""" | ||||
if x is not None: | ||||
Yuya Nishihara
|
r24115 | sources = getset(repo, fullreposet(repo), x) | ||
Matt Harbison
|
r17186 | else: | ||
Yuya Nishihara
|
r24201 | sources = fullreposet(repo) | ||
Matt Harbison
|
r17186 | |||
dests = set() | ||||
# subset contains all of the possible destinations that can be returned, so | ||||
Mads Kiilerich
|
r22944 | # iterate over them and see if their source(s) were provided in the arg set. | ||
# Even if the immediate src of r is not in the arg set, src's source (or | ||||
Matt Harbison
|
r17186 | # further back) may be. Scanning back further than the immediate src allows | ||
# transitive transplants and rebases to yield the same results as transitive | ||||
# grafts. | ||||
for r in subset: | ||||
src = _getrevsource(repo, r) | ||||
lineage = None | ||||
while src is not None: | ||||
if lineage is None: | ||||
lineage = list() | ||||
lineage.append(r) | ||||
# The visited lineage is a match if the current source is in the arg | ||||
# set. Since every candidate dest is visited by way of iterating | ||||
timeless@mozdev.org
|
r17494 | # subset, any dests further back in the lineage will be tested by a | ||
Matt Harbison
|
r17186 | # different iteration over subset. Likewise, if the src was already | ||
# selected, the current lineage can be selected without going back | ||||
# further. | ||||
Mads Kiilerich
|
r22944 | if src in sources or src in dests: | ||
Matt Harbison
|
r17186 | dests.update(lineage) | ||
break | ||||
r = src | ||||
src = _getrevsource(repo, r) | ||||
Yuya Nishihara
|
r28424 | return subset.filter(dests.__contains__, | ||
condrepr=lambda: '<destination %r>' % sorted(dests)) | ||||
Matt Harbison
|
r17186 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('divergent()', safe=True) | ||
Pierre-Yves David
|
r18071 | def divergent(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """ | ||
Pierre-Yves David
|
r18071 | Final successors of changesets with an alternative set of final successors. | ||
""" | ||||
# i18n: "divergent" is a keyword | ||||
getargs(x, 0, 0, _("divergent takes no arguments")) | ||||
divergent = obsmod.getrevs(repo, 'divergent') | ||||
Pierre-Yves David
|
r22533 | return subset & divergent | ||
Pierre-Yves David
|
r18071 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('extinct()', safe=True) | ||
Pierre-Yves David
|
r17173 | def extinct(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Obsolete changesets with obsolete descendants only. | ||
Patrick Mezard
|
r17291 | """ | ||
FUJIWARA Katsunori
|
r17259 | # i18n: "extinct" is a keyword | ||
FUJIWARA Katsunori
|
r17258 | getargs(x, 0, 0, _("extinct takes no arguments")) | ||
Pierre-Yves David
|
r17825 | extincts = obsmod.getrevs(repo, 'extinct') | ||
Lucas Moscovicz
|
r20367 | return subset & extincts | ||
Pierre-Yves David
|
r17173 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('extra(label, [value])', safe=True) | ||
Henrik Stuart
|
r16661 | def extra(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets with the given label in the extra metadata, with the given | ||
Simon King
|
r16824 | optional value. | ||
Matt Harbison
|
r30784 | Pattern matching is supported for `value`. See | ||
Yuya Nishihara
|
r30799 | :hg:`help revisions.patterns`. | ||
Simon King
|
r16824 | """ | ||
Yuya Nishihara
|
r25767 | args = getargsdict(x, 'extra', 'label value') | ||
Yuya Nishihara
|
r25706 | if 'label' not in args: | ||
# i18n: "extra" is a keyword | ||||
raise error.ParseError(_('extra takes at least 1 argument')) | ||||
FUJIWARA Katsunori
|
r17259 | # i18n: "extra" is a keyword | ||
Yuya Nishihara
|
r25706 | label = getstring(args['label'], _('first argument to extra must be ' | ||
'a string')) | ||||
Henrik Stuart
|
r16661 | value = None | ||
Yuya Nishihara
|
r25706 | if 'value' in args: | ||
FUJIWARA Katsunori
|
r17259 | # i18n: "extra" is a keyword | ||
Yuya Nishihara
|
r25706 | value = getstring(args['value'], _('second argument to extra must be ' | ||
'a string')) | ||||
Matt Harbison
|
r26481 | kind, value, matcher = util.stringmatcher(value) | ||
Henrik Stuart
|
r16661 | |||
def _matchvalue(r): | ||||
extra = repo[r].extra() | ||||
Simon King
|
r16824 | return label in extra and (value is None or matcher(extra[label])) | ||
Henrik Stuart
|
r16661 | |||
Yuya Nishihara
|
r28424 | return subset.filter(lambda r: _matchvalue(r), | ||
condrepr=('<extra[%r] %r>', label, value)) | ||||
Pierre-Yves David
|
r15819 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('filelog(pattern)', safe=True) | ||
Matt Mackall
|
r14342 | def filelog(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets connected to the specified filelog. | ||
FUJIWARA Katsunori
|
r17244 | |||
Greg Hurrell
|
r21199 | For performance reasons, visits only revisions mentioned in the file-level | ||
filelog, rather than filtering through all changesets (much faster, but | ||||
doesn't include deletes or duplicate changes). For a slower, more accurate | ||||
result, use ``file()``. | ||||
FUJIWARA Katsunori
|
r20289 | |||
The pattern without explicit kind like ``glob:`` is expected to be | ||||
relative to the current directory and match against a file exactly | ||||
for efficiency. | ||||
Pierre-Yves David
|
r23719 | |||
If some linkrev points to revisions filtered by the current repoview, we'll | ||||
work around it to return a non-filtered value. | ||||
Matt Mackall
|
r14342 | """ | ||
FUJIWARA Katsunori
|
r17259 | # i18n: "filelog" is a keyword | ||
Matt Mackall
|
r14342 | pat = getstring(x, _("filelog requires a pattern")) | ||
s = set() | ||||
Pierre-Yves David
|
r23719 | cl = repo.changelog | ||
Matt Mackall
|
r14342 | |||
Matt Mackall
|
r15964 | if not matchmod.patkind(pat): | ||
FUJIWARA Katsunori
|
r20288 | f = pathutil.canonpath(repo.root, repo.getcwd(), pat) | ||
Pierre-Yves David
|
r23719 | files = [f] | ||
Matt Mackall
|
r14342 | else: | ||
FUJIWARA Katsunori
|
r20288 | m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=repo[None]) | ||
Pierre-Yves David
|
r23719 | files = (f for f in repo[None] if m(f)) | ||
for f in files: | ||||
fl = repo.file(f) | ||||
Matt Mackall
|
r27945 | known = {} | ||
scanpos = 0 | ||||
Pierre-Yves David
|
r23719 | for fr in list(fl): | ||
Matt Mackall
|
r27945 | fn = fl.node(fr) | ||
if fn in known: | ||||
s.add(known[fn]) | ||||
Martin von Zweigbergk
|
r23821 | continue | ||
Matt Mackall
|
r27945 | |||
lr = fl.linkrev(fr) | ||||
if lr in cl: | ||||
s.add(lr) | ||||
elif scanpos is not None: | ||||
# lowest matching changeset is filtered, scan further | ||||
# ahead in changelog | ||||
start = max(lr, scanpos) + 1 | ||||
scanpos = None | ||||
for r in cl.revs(start): | ||||
# minimize parsing of non-matching entries | ||||
if f in cl.revision(r) and f in cl.readfiles(r): | ||||
try: | ||||
# try to use manifest delta fastpath | ||||
n = repo[r].filenode(f) | ||||
if n not in known: | ||||
if n == fn: | ||||
s.add(r) | ||||
scanpos = r | ||||
break | ||||
else: | ||||
known[n] = r | ||||
except error.ManifestLookupError: | ||||
# deletion in changelog | ||||
continue | ||||
Matt Mackall
|
r14342 | |||
Pierre-Yves David
|
r22534 | return subset & s | ||
Matt Mackall
|
r14342 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('first(set, [n])', safe=True) | ||
Matt Mackall
|
r15117 | def first(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """An alias for limit(). | ||
Matt Mackall
|
r15117 | """ | ||
return limit(repo, subset, x) | ||||
Patrick Mezard
|
r16185 | def _follow(repo, subset, x, name, followfirst=False): | ||
Gábor Stefanik
|
r29814 | l = getargs(x, 0, 2, _("%s takes no arguments or a pattern " | ||
"and an optional revset") % name) | ||||
Patrick Mezard
|
r16185 | c = repo['.'] | ||
if l: | ||||
liscju
|
r26102 | x = getstring(l[0], _("%s expected a pattern") % name) | ||
Gábor Stefanik
|
r29814 | rev = None | ||
if len(l) >= 2: | ||||
Yuya Nishihara
|
r30178 | revs = getset(repo, fullreposet(repo), l[1]) | ||
if len(revs) != 1: | ||||
Gábor Stefanik
|
r29814 | raise error.RepoLookupError( | ||
Yuya Nishihara
|
r30178 | _("%s expected one starting revision") % name) | ||
rev = revs.last() | ||||
Gábor Stefanik
|
r29814 | c = repo[rev] | ||
liscju
|
r26102 | matcher = matchmod.match(repo.root, repo.getcwd(), [x], | ||
Gábor Stefanik
|
r29814 | ctx=repo[rev], default='path') | ||
liscju
|
r26102 | |||
Durham Goode
|
r28008 | files = c.manifest().walk(matcher) | ||
liscju
|
r26102 | s = set() | ||
Durham Goode
|
r28008 | for fname in files: | ||
fctx = c[fname] | ||||
s = s.union(set(c.rev() for c in fctx.ancestors(followfirst))) | ||||
# include the revision responsible for the most recent version | ||||
s.add(fctx.introrev()) | ||||
Patrick Mezard
|
r16185 | else: | ||
Lucas Moscovicz
|
r20690 | s = _revancestors(repo, baseset([c.rev()]), followfirst) | ||
Patrick Mezard
|
r16185 | |||
Pierre-Yves David
|
r22535 | return subset & s | ||
Patrick Mezard
|
r16185 | |||
Gábor Stefanik
|
r29814 | @predicate('follow([pattern[, startrev]])', safe=True) | ||
Idan Kamara
|
r13915 | def follow(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """ | ||
Yuya Nishihara
|
r24366 | An alias for ``::.`` (ancestors of the working directory's first parent). | ||
liscju
|
r26102 | If pattern is specified, the histories of files matching given | ||
Gábor Stefanik
|
r29814 | pattern in the revision given by startrev are followed, including copies. | ||
Matt Mackall
|
r14343 | """ | ||
Patrick Mezard
|
r16185 | return _follow(repo, subset, x, 'follow') | ||
Matt Mackall
|
r14343 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('_followfirst', safe=True) | ||
Patrick Mezard
|
r16174 | def _followfirst(repo, subset, x): | ||
Gábor Stefanik
|
r29814 | # ``followfirst([pattern[, startrev]])`` | ||
# Like ``follow([pattern[, startrev]])`` but follows only the first parent | ||||
# of every revisions or files revisions. | ||||
Patrick Mezard
|
r16185 | return _follow(repo, subset, x, '_followfirst', followfirst=True) | ||
Matt Mackall
|
r14343 | |||
Denis Laxalde
|
r31938 | @predicate('followlines(file, fromline:toline[, startrev=., descend=False])', | ||
safe=True) | ||||
Denis Laxalde
|
r30719 | def followlines(repo, subset, x): | ||
"""Changesets modifying `file` in line range ('fromline', 'toline'). | ||||
Yuya Nishihara
|
r30800 | Line range corresponds to 'file' content at 'startrev' and should hence be | ||
consistent with file size. If startrev is not specified, working directory's | ||||
Denis Laxalde
|
r30719 | parent is used. | ||
Denis Laxalde
|
r31938 | |||
By default, ancestors of 'startrev' are returned. If 'descend' is True, | ||||
descendants of 'startrev' are returned though renames are (currently) not | ||||
followed in this direction. | ||||
Denis Laxalde
|
r30719 | """ | ||
from . import context # avoid circular import issues | ||||
Denis Laxalde
|
r31938 | args = getargsdict(x, 'followlines', 'file *lines startrev descend') | ||
Yuya Nishihara
|
r30804 | if len(args['lines']) != 1: | ||
raise error.ParseError(_("followlines requires a line range")) | ||||
Denis Laxalde
|
r30719 | |||
rev = '.' | ||||
Yuya Nishihara
|
r30800 | if 'startrev' in args: | ||
revs = getset(repo, fullreposet(repo), args['startrev']) | ||||
Yuya Nishihara
|
r30754 | if len(revs) != 1: | ||
raise error.ParseError( | ||||
FUJIWARA Katsunori
|
r32086 | # i18n: "followlines" is a keyword | ||
Yuya Nishihara
|
r30754 | _("followlines expects exactly one revision")) | ||
rev = revs.last() | ||||
pat = getstring(args['file'], _("followlines requires a pattern")) | ||||
Denis Laxalde
|
r30719 | if not matchmod.patkind(pat): | ||
fname = pathutil.canonpath(repo.root, repo.getcwd(), pat) | ||||
else: | ||||
m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=repo[rev]) | ||||
files = [f for f in repo[rev] if m(f)] | ||||
if len(files) != 1: | ||||
FUJIWARA Katsunori
|
r32086 | # i18n: "followlines" is a keyword | ||
Denis Laxalde
|
r30719 | raise error.ParseError(_("followlines expects exactly one file")) | ||
fname = files[0] | ||||
FUJIWARA Katsunori
|
r32086 | # i18n: "followlines" is a keyword | ||
Yuya Nishihara
|
r30804 | lr = getrange(args['lines'][0], _("followlines expects a line range")) | ||
Yuya Nishihara
|
r30801 | fromline, toline = [getinteger(a, _("line range bounds must be integers")) | ||
Yuya Nishihara
|
r30804 | for a in lr] | ||
Denis Laxalde
|
r31662 | fromline, toline = util.processlinerange(fromline, toline) | ||
Denis Laxalde
|
r30719 | |||
fctx = repo[rev].filectx(fname) | ||||
Denis Laxalde
|
r31998 | descend = False | ||
if 'descend' in args: | ||||
descend = getboolean(args['descend'], | ||||
FUJIWARA Katsunori
|
r32086 | # i18n: "descend" is a keyword | ||
_("descend argument must be a boolean")) | ||||
Denis Laxalde
|
r31998 | if descend: | ||
Denis Laxalde
|
r31938 | rs = generatorset( | ||
(c.rev() for c, _linerange | ||||
in context.blockdescendants(fctx, fromline, toline)), | ||||
iterasc=True) | ||||
else: | ||||
rs = generatorset( | ||||
(c.rev() for c, _linerange | ||||
in context.blockancestors(fctx, fromline, toline)), | ||||
iterasc=False) | ||||
return subset & rs | ||||
Denis Laxalde
|
r30719 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('all()', safe=True) | ||
Idan Kamara
|
r13915 | def getall(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """All changesets, the same as ``0:tip``. | ||
Idan Kamara
|
r13915 | """ | ||
# i18n: "all" is a keyword | ||||
getargs(x, 0, 0, _("all takes no arguments")) | ||||
Yuya Nishihara
|
r24202 | return subset & spanset(repo) # drop "null" if any | ||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27584 | @predicate('grep(regex)') | ||
Idan Kamara
|
r13915 | def grep(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Like ``keyword(string)`` but accepts a regex. Use ``grep(r'...')`` | ||
Martin Geisler
|
r14357 | to ensure special escape characters are handled correctly. Unlike | ||
``keyword(string)``, the match is case-sensitive. | ||||
Idan Kamara
|
r13915 | """ | ||
try: | ||||
# i18n: "grep" is a keyword | ||||
gr = re.compile(getstring(x, _("grep requires a string"))) | ||||
Gregory Szorc
|
r25660 | except re.error as e: | ||
Idan Kamara
|
r13915 | raise error.ParseError(_('invalid match pattern: %s') % e) | ||
Lucas Moscovicz
|
r20453 | |||
def matches(x): | ||||
c = repo[x] | ||||
Idan Kamara
|
r13915 | for e in c.files() + [c.user(), c.description()]: | ||
if gr.search(e): | ||||
Lucas Moscovicz
|
r20453 | return True | ||
return False | ||||
Yuya Nishihara
|
r28424 | return subset.filter(matches, condrepr=('<grep %r>', gr.pattern)) | ||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('_matchfiles', safe=True) | ||
Patrick Mezard
|
r16161 | def _matchfiles(repo, subset, x): | ||
# _matchfiles takes a revset list of prefixed arguments: | ||||
# | ||||
# [p:foo, i:bar, x:baz] | ||||
# | ||||
# builds a match object from them and filters subset. Allowed | ||||
# prefixes are 'p:' for regular patterns, 'i:' for include | ||||
Patrick Mezard
|
r16181 | # patterns and 'x:' for exclude patterns. Use 'r:' prefix to pass | ||
# a revision identifier, or the empty string to reference the | ||||
# working directory, from which the match object is | ||||
Patrick Mezard
|
r16411 | # initialized. Use 'd:' to set the default matching mode, default | ||
# to 'glob'. At most one 'r:' and 'd:' argument can be passed. | ||||
Patrick Mezard
|
r16161 | |||
Yuya Nishihara
|
r28271 | l = getargs(x, 1, -1, "_matchfiles requires at least one argument") | ||
Patrick Mezard
|
r16161 | pats, inc, exc = [], [], [] | ||
Patrick Mezard
|
r16411 | rev, default = None, None | ||
Patrick Mezard
|
r16161 | for arg in l: | ||
Yuya Nishihara
|
r28271 | s = getstring(arg, "_matchfiles requires string arguments") | ||
Patrick Mezard
|
r16161 | prefix, value = s[:2], s[2:] | ||
if prefix == 'p:': | ||||
pats.append(value) | ||||
elif prefix == 'i:': | ||||
inc.append(value) | ||||
elif prefix == 'x:': | ||||
exc.append(value) | ||||
Patrick Mezard
|
r16181 | elif prefix == 'r:': | ||
if rev is not None: | ||||
Yuya Nishihara
|
r28271 | raise error.ParseError('_matchfiles expected at most one ' | ||
'revision') | ||||
Martin von Zweigbergk
|
r23950 | if value != '': # empty means working directory; leave rev as None | ||
rev = value | ||||
Patrick Mezard
|
r16411 | elif prefix == 'd:': | ||
if default is not None: | ||||
Yuya Nishihara
|
r28271 | raise error.ParseError('_matchfiles expected at most one ' | ||
'default mode') | ||||
Patrick Mezard
|
r16411 | default = value | ||
Patrick Mezard
|
r16161 | else: | ||
Yuya Nishihara
|
r28271 | raise error.ParseError('invalid _matchfiles prefix: %s' % prefix) | ||
Patrick Mezard
|
r16411 | if not default: | ||
default = 'glob' | ||||
Lucas Moscovicz
|
r20458 | |||
Matt Mackall
|
r23061 | m = matchmod.match(repo.root, repo.getcwd(), pats, include=inc, | ||
exclude=exc, ctx=repo[rev], default=default) | ||||
Pierre-Yves David
|
r27028 | # This directly read the changelog data as creating changectx for all | ||
# revisions is quite expensive. | ||||
Laurent Charignon
|
r27440 | getfiles = repo.changelog.readfiles | ||
Pierre-Yves David
|
r27028 | wdirrev = node.wdirrev | ||
Lucas Moscovicz
|
r20458 | def matches(x): | ||
Pierre-Yves David
|
r27028 | if x == wdirrev: | ||
files = repo[x].files() | ||||
else: | ||||
Laurent Charignon
|
r27440 | files = getfiles(x) | ||
Pierre-Yves David
|
r27028 | for f in files: | ||
Patrick Mezard
|
r16161 | if m(f): | ||
Lucas Moscovicz
|
r20458 | return True | ||
return False | ||||
Yuya Nishihara
|
r28424 | return subset.filter(matches, | ||
condrepr=('<matchfiles patterns=%r, include=%r ' | ||||
'exclude=%r, default=%r, rev=%r>', | ||||
pats, inc, exc, default, rev)) | ||||
Patrick Mezard
|
r16161 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('file(pattern)', safe=True) | ||
Idan Kamara
|
r13915 | def hasfile(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets affecting files matched by pattern. | ||
FUJIWARA Katsunori
|
r17244 | |||
Greg Ward
|
r17265 | For a faster but less accurate result, consider using ``filelog()`` | ||
instead. | ||||
FUJIWARA Katsunori
|
r20289 | |||
This predicate uses ``glob:`` as the default kind of pattern. | ||||
Idan Kamara
|
r13915 | """ | ||
# i18n: "file" is a keyword | ||||
pat = getstring(x, _("file requires a pattern")) | ||||
Patrick Mezard
|
r16161 | return _matchfiles(repo, subset, ('string', 'p:' + pat)) | ||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('head()', safe=True) | ||
Idan Kamara
|
r13915 | def head(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changeset is a named branch head. | ||
Idan Kamara
|
r13915 | """ | ||
# i18n: "head" is a keyword | ||||
getargs(x, 0, 0, _("head takes no arguments")) | ||||
hs = set() | ||||
Pierre-Yves David
|
r25620 | cl = repo.changelog | ||
Martin von Zweigbergk
|
r29407 | for ls in repo.branchmap().itervalues(): | ||
Pierre-Yves David
|
r25620 | hs.update(cl.rev(h) for h in ls) | ||
Martin von Zweigbergk
|
r29408 | return subset & baseset(hs) | ||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('heads(set)', safe=True) | ||
Idan Kamara
|
r13915 | def heads(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Members of set with no children in set. | ||
Idan Kamara
|
r13915 | """ | ||
s = getset(repo, subset, x) | ||||
Lucas Moscovicz
|
r20366 | ps = parents(repo, subset, x) | ||
return s - ps | ||||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('hidden()', safe=True) | ||
Patrick Mezard
|
r17390 | def hidden(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Hidden changesets. | ||
Patrick Mezard
|
r17390 | """ | ||
# i18n: "hidden" is a keyword | ||||
getargs(x, 0, 0, _("hidden takes no arguments")) | ||||
Kevin Bullock
|
r18382 | hiddenrevs = repoview.filterrevs(repo, 'visible') | ||
Lucas Moscovicz
|
r20367 | return subset & hiddenrevs | ||
Patrick Mezard
|
r17390 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('keyword(string)', safe=True) | ||
Idan Kamara
|
r13915 | def keyword(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Search commit message, user name, and names of changed files for | ||
Martin Geisler
|
r14357 | string. The match is case-insensitive. | ||
Matt Harbison
|
r30772 | |||
For a regular expression or case sensitive search of these fields, use | ||||
``grep(regex)``. | ||||
Idan Kamara
|
r13915 | """ | ||
# i18n: "keyword" is a keyword | ||||
FUJIWARA Katsunori
|
r15726 | kw = encoding.lower(getstring(x, _("keyword requires a string"))) | ||
Lucas Moscovicz
|
r20447 | |||
def matches(r): | ||||
Idan Kamara
|
r13915 | c = repo[r] | ||
Pierre-Yves David
|
r25551 | return any(kw in encoding.lower(t) | ||
for t in c.files() + [c.user(), c.description()]) | ||||
Lucas Moscovicz
|
r20447 | |||
Yuya Nishihara
|
r28424 | return subset.filter(matches, condrepr=('<keyword %r>', kw)) | ||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('limit(set[, n[, offset]])', safe=True) | ||
Idan Kamara
|
r13915 | def limit(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """First n members of set, defaulting to 1, starting from offset. | ||
Idan Kamara
|
r13915 | """ | ||
Yuya Nishihara
|
r26638 | args = getargsdict(x, 'limit', 'set n offset') | ||
Yuya Nishihara
|
r26637 | if 'set' not in args: | ||
# i18n: "limit" is a keyword | ||||
Yuya Nishihara
|
r26638 | raise error.ParseError(_("limit requires one to three arguments")) | ||
Yuya Nishihara
|
r30802 | # i18n: "limit" is a keyword | ||
lim = getinteger(args.get('n'), _("limit expects a number"), default=1) | ||||
# i18n: "limit" is a keyword | ||||
ofs = getinteger(args.get('offset'), _("limit expects a number"), default=0) | ||||
Yuya Nishihara
|
r30801 | if ofs < 0: | ||
raise error.ParseError(_("negative offset")) | ||||
Yuya Nishihara
|
r26637 | os = getset(repo, fullreposet(repo), args['set']) | ||
Pierre-Yves David
|
r22804 | result = [] | ||
Lucas Moscovicz
|
r20446 | it = iter(os) | ||
Yuya Nishihara
|
r26638 | for x in xrange(ofs): | ||
y = next(it, None) | ||||
if y is None: | ||||
break | ||||
Lucas Moscovicz
|
r20446 | for x in xrange(lim): | ||
Pierre-Yves David
|
r25144 | y = next(it, None) | ||
if y is None: | ||||
Lucas Moscovicz
|
r20446 | break | ||
Yuya Nishihara
|
r26636 | elif y in subset: | ||
Pierre-Yves David
|
r25144 | result.append(y) | ||
Yuya Nishihara
|
r28426 | return baseset(result, datarepr=('<limit n=%d, offset=%d, %r, %r>', | ||
lim, ofs, subset, os)) | ||||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('last(set, [n])', safe=True) | ||
Matt Mackall
|
r14061 | def last(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Last n members of set, defaulting to 1. | ||
Matt Mackall
|
r14061 | """ | ||
# i18n: "last" is a keyword | ||||
Matt Mackall
|
r15116 | l = getargs(x, 1, 2, _("last requires one or two arguments")) | ||
Yuya Nishihara
|
r30801 | lim = 1 | ||
if len(l) == 2: | ||||
Matt Mackall
|
r14061 | # i18n: "last" is a keyword | ||
Yuya Nishihara
|
r30801 | lim = getinteger(l[1], _("last expects a number")) | ||
Yuya Nishihara
|
r24115 | os = getset(repo, fullreposet(repo), l[0]) | ||
Lucas Moscovicz
|
r20534 | os.reverse() | ||
Pierre-Yves David
|
r22805 | result = [] | ||
Lucas Moscovicz
|
r20534 | it = iter(os) | ||
for x in xrange(lim): | ||||
Pierre-Yves David
|
r25145 | y = next(it, None) | ||
if y is None: | ||||
Lucas Moscovicz
|
r20534 | break | ||
Yuya Nishihara
|
r26636 | elif y in subset: | ||
Pierre-Yves David
|
r25145 | result.append(y) | ||
Yuya Nishihara
|
r28426 | return baseset(result, datarepr=('<last n=%d, %r, %r>', lim, subset, os)) | ||
Matt Mackall
|
r14061 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('max(set)', safe=True) | ||
Idan Kamara
|
r13915 | def maxrev(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changeset with highest revision number in set. | ||
Idan Kamara
|
r13915 | """ | ||
Yuya Nishihara
|
r24115 | os = getset(repo, fullreposet(repo), x) | ||
Durham Goode
|
r26305 | try: | ||
Lucas Moscovicz
|
r20754 | m = os.max() | ||
Idan Kamara
|
r13915 | if m in subset: | ||
Yuya Nishihara
|
r28427 | return baseset([m], datarepr=('<max %r, %r>', subset, os)) | ||
Durham Goode
|
r26305 | except ValueError: | ||
# os.max() throws a ValueError when the collection is empty. | ||||
# Same as python's max(). | ||||
pass | ||||
Yuya Nishihara
|
r28427 | return baseset(datarepr=('<max %r, %r>', subset, os)) | ||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('merge()', safe=True) | ||
Idan Kamara
|
r13915 | def merge(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changeset is a merge changeset. | ||
Idan Kamara
|
r13915 | """ | ||
# i18n: "merge" is a keyword | ||||
getargs(x, 0, 0, _("merge takes no arguments")) | ||||
cl = repo.changelog | ||||
Yuya Nishihara
|
r28424 | return subset.filter(lambda r: cl.parentrevs(r)[1] != -1, | ||
condrepr='<merge>') | ||||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('branchpoint()', safe=True) | ||
Ivan Andrus
|
r17753 | def branchpoint(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets with more than one child. | ||
Ivan Andrus
|
r17753 | """ | ||
# i18n: "branchpoint" is a keyword | ||||
getargs(x, 0, 0, _("branchpoint takes no arguments")) | ||||
cl = repo.changelog | ||||
if not subset: | ||||
Pierre-Yves David
|
r22802 | return baseset() | ||
Pierre-Yves David
|
r25549 | # XXX this should be 'parentset.min()' assuming 'parentset' is a smartset | ||
# (and if it is not, it should.) | ||||
Ivan Andrus
|
r17753 | baserev = min(subset) | ||
parentscount = [0]*(len(repo) - baserev) | ||||
Pierre-Yves David
|
r17785 | for r in cl.revs(start=baserev + 1): | ||
Ivan Andrus
|
r17753 | for p in cl.parentrevs(r): | ||
if p >= baserev: | ||||
parentscount[p - baserev] += 1 | ||||
Yuya Nishihara
|
r28424 | return subset.filter(lambda r: parentscount[r - baserev] > 1, | ||
condrepr='<branchpoint>') | ||||
Ivan Andrus
|
r17753 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('min(set)', safe=True) | ||
Idan Kamara
|
r13915 | def minrev(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changeset with lowest revision number in set. | ||
Idan Kamara
|
r13915 | """ | ||
Yuya Nishihara
|
r24115 | os = getset(repo, fullreposet(repo), x) | ||
Durham Goode
|
r26305 | try: | ||
Lucas Moscovicz
|
r20754 | m = os.min() | ||
Idan Kamara
|
r13915 | if m in subset: | ||
Yuya Nishihara
|
r28427 | return baseset([m], datarepr=('<min %r, %r>', subset, os)) | ||
Durham Goode
|
r26305 | except ValueError: | ||
# os.min() throws a ValueError when the collection is empty. | ||||
# Same as python's min(). | ||||
pass | ||||
Yuya Nishihara
|
r28427 | return baseset(datarepr=('<min %r, %r>', subset, os)) | ||
Idan Kamara
|
r13915 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('modifies(pattern)', safe=True) | ||
Idan Kamara
|
r13915 | def modifies(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets modifying files matched by pattern. | ||
FUJIWARA Katsunori
|
r20289 | |||
The pattern without explicit kind like ``glob:`` is expected to be | ||||
relative to the current directory and match against a file or a | ||||
directory. | ||||
Idan Kamara
|
r13915 | """ | ||
# i18n: "modifies" is a keyword | ||||
pat = getstring(x, _("modifies requires a pattern")) | ||||
return checkstatus(repo, subset, pat, 0) | ||||
FUJIWARA Katsunori
|
r27584 | @predicate('named(namespace)') | ||
Sean Farley
|
r23836 | def named(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """The changesets in a given namespace. | ||
Sean Farley
|
r23836 | |||
Matt Harbison
|
r30784 | Pattern matching is supported for `namespace`. See | ||
Yuya Nishihara
|
r30799 | :hg:`help revisions.patterns`. | ||
Sean Farley
|
r23836 | """ | ||
# i18n: "named" is a keyword | ||||
args = getargs(x, 1, 1, _('named requires a namespace argument')) | ||||
ns = getstring(args[0], | ||||
# i18n: "named" is a keyword | ||||
_('the argument to named must be a string')) | ||||
Matt Harbison
|
r26481 | kind, pattern, matcher = util.stringmatcher(ns) | ||
Sean Farley
|
r23836 | namespaces = set() | ||
if kind == 'literal': | ||||
if pattern not in repo.names: | ||||
FUJIWARA Katsunori
|
r23978 | raise error.RepoLookupError(_("namespace '%s' does not exist") | ||
% ns) | ||||
Sean Farley
|
r23836 | namespaces.add(repo.names[pattern]) | ||
else: | ||||
for name, ns in repo.names.iteritems(): | ||||
if matcher(name): | ||||
namespaces.add(ns) | ||||
if not namespaces: | ||||
FUJIWARA Katsunori
|
r23978 | raise error.RepoLookupError(_("no namespace exists" | ||
" that match '%s'") % pattern) | ||||
Sean Farley
|
r23836 | |||
names = set() | ||||
for ns in namespaces: | ||||
for name in ns.listnames(repo): | ||||
FUJIWARA Katsunori
|
r24151 | if name not in ns.deprecated: | ||
names.update(repo[n].rev() for n in ns.nodes(repo, name)) | ||||
Sean Farley
|
r23836 | |||
Martin von Zweigbergk
|
r32291 | names -= {node.nullrev} | ||
Sean Farley
|
r23836 | return subset & names | ||
FUJIWARA Katsunori
|
r27587 | @predicate('id(string)', safe=True) | ||
Matt Mackall
|
r16417 | def node_(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Revision non-ambiguously specified by the given hex string prefix. | ||
Patrick Mezard
|
r12821 | """ | ||
Martin Geisler
|
r12815 | # i18n: "id" is a keyword | ||
Benoit Boissinot
|
r12736 | l = getargs(x, 1, 1, _("id requires one argument")) | ||
Martin Geisler
|
r12815 | # i18n: "id" is a keyword | ||
Benoit Boissinot
|
r12736 | n = getstring(l[0], _("id requires a string")) | ||
Augie Fackler
|
r12716 | if len(n) == 40: | ||
Alexander Drozdov
|
r24904 | try: | ||
rn = repo.changelog.rev(node.bin(n)) | ||||
Yuya Nishihara
|
r32659 | except error.WdirUnsupported: | ||
rn = node.wdirrev | ||||
Alexander Drozdov
|
r24904 | except (LookupError, TypeError): | ||
rn = None | ||||
Augie Fackler
|
r12716 | else: | ||
Matt Harbison
|
r16735 | rn = None | ||
Yuya Nishihara
|
r32684 | try: | ||
pm = repo.changelog._partialmatch(n) | ||||
if pm is not None: | ||||
Yuya Nishihara
|
r32659 | rn = repo.changelog.rev(pm) | ||
Yuya Nishihara
|
r32684 | except error.WdirUnsupported: | ||
rn = node.wdirrev | ||||
Matt Harbison
|
r16735 | |||
Pierre-Yves David
|
r23005 | if rn is None: | ||
return baseset() | ||||
result = baseset([rn]) | ||||
return result & subset | ||||
Augie Fackler
|
r12716 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('obsolete()', safe=True) | ||
Pierre-Yves David
|
r17170 | def obsolete(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Mutable changeset with a newer version.""" | ||
FUJIWARA Katsunori
|
r17259 | # i18n: "obsolete" is a keyword | ||
Pierre-Yves David
|
r17170 | getargs(x, 0, 0, _("obsolete takes no arguments")) | ||
Pierre-Yves David
|
r17825 | obsoletes = obsmod.getrevs(repo, 'obsolete') | ||
Lucas Moscovicz
|
r20367 | return subset & obsoletes | ||
Pierre-Yves David
|
r17170 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('only(set, [set])', safe=True) | ||
Yuya Nishihara
|
r23466 | def only(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets that are ancestors of the first set that are not ancestors | ||
Yuya Nishihara
|
r23466 | of any other head in the repo. If a second set is specified, the result | ||
is ancestors of the first set that are not ancestors of the second set | ||||
(i.e. ::<set1> - ::<set2>). | ||||
""" | ||||
cl = repo.changelog | ||||
# i18n: "only" is a keyword | ||||
args = getargs(x, 1, 2, _('only takes one or two arguments')) | ||||
Yuya Nishihara
|
r24115 | include = getset(repo, fullreposet(repo), args[0]) | ||
Yuya Nishihara
|
r23466 | if len(args) == 1: | ||
if not include: | ||||
return baseset() | ||||
descendants = set(_revdescendants(repo, include, False)) | ||||
exclude = [rev for rev in cl.headrevs() | ||||
if not rev in descendants and not rev in include] | ||||
else: | ||||
Yuya Nishihara
|
r24115 | exclude = getset(repo, fullreposet(repo), args[1]) | ||
Yuya Nishihara
|
r23466 | |||
results = set(cl.findmissingrevs(common=exclude, heads=include)) | ||||
Pierre-Yves David
|
r25554 | # XXX we should turn this into a baseset instead of a set, smartset may do | ||
Mads Kiilerich
|
r30332 | # some optimizations from the fact this is a baseset. | ||
Yuya Nishihara
|
r23466 | return subset & results | ||
FUJIWARA Katsunori
|
r27587 | @predicate('origin([set])', safe=True) | ||
Matt Harbison
|
r17185 | def origin(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """ | ||
Matt Harbison
|
r17185 | Changesets that were specified as a source for the grafts, transplants or | ||
rebases that created the given revisions. Omitting the optional set is the | ||||
same as passing all(). If a changeset created by these operations is itself | ||||
specified as a source for one of these operations, only the source changeset | ||||
for the first operation is selected. | ||||
""" | ||||
if x is not None: | ||||
Yuya Nishihara
|
r24115 | dests = getset(repo, fullreposet(repo), x) | ||
Matt Harbison
|
r17185 | else: | ||
Yuya Nishihara
|
r24201 | dests = fullreposet(repo) | ||
Matt Harbison
|
r17185 | |||
def _firstsrc(rev): | ||||
src = _getrevsource(repo, rev) | ||||
if src is None: | ||||
return None | ||||
while True: | ||||
prev = _getrevsource(repo, src) | ||||
if prev is None: | ||||
return src | ||||
src = prev | ||||
Martin von Zweigbergk
|
r32291 | o = {_firstsrc(r) for r in dests} | ||
o -= {None} | ||||
Pierre-Yves David
|
r25554 | # XXX we should turn this into a baseset instead of a set, smartset may do | ||
Mads Kiilerich
|
r30332 | # some optimizations from the fact this is a baseset. | ||
Pierre-Yves David
|
r22536 | return subset & o | ||
Matt Harbison
|
r17185 | |||
Yuya Nishihara
|
r30850 | @predicate('outgoing([path])', safe=False) | ||
Idan Kamara
|
r13915 | def outgoing(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets not found in the specified destination repository, or the | ||
Idan Kamara
|
r13915 | default push location. | ||
Patrick Mezard
|
r12821 | """ | ||
Gregory Szorc
|
r24722 | # Avoid cycles. | ||
Gregory Szorc
|
r25971 | from . import ( | ||
discovery, | ||||
hg, | ||||
) | ||||
Idan Kamara
|
r13915 | # i18n: "outgoing" is a keyword | ||
Mads Kiilerich
|
r14717 | l = getargs(x, 0, 1, _("outgoing takes one or no arguments")) | ||
Idan Kamara
|
r13915 | # i18n: "outgoing" is a keyword | ||
dest = l and getstring(l[0], _("outgoing requires a repository path")) or '' | ||||
dest = repo.ui.expandpath(dest or 'default-push', dest or 'default') | ||||
dest, branches = hg.parseurl(dest) | ||||
revs, checkout = hg.addbranchrevs(repo, repo, branches, []) | ||||
if revs: | ||||
revs = [repo.lookup(rev) for rev in revs] | ||||
Matt Mackall
|
r14556 | other = hg.peer(repo, {}, dest) | ||
Idan Kamara
|
r13915 | repo.ui.pushbuffer() | ||
Pierre-Yves David
|
r15837 | outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs) | ||
Idan Kamara
|
r13915 | repo.ui.popbuffer() | ||
cl = repo.changelog | ||||
Martin von Zweigbergk
|
r32291 | o = {cl.rev(r) for r in outgoing.missing} | ||
Pierre-Yves David
|
r22529 | return subset & o | ||
Augie Fackler
|
r12716 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('p1([set])', safe=True) | ||
Matt Mackall
|
r11275 | def p1(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """First parent of changesets in set, or the working directory. | ||
Patrick Mezard
|
r12821 | """ | ||
Kevin Bullock
|
r12928 | if x is None: | ||
Matt Mackall
|
r13878 | p = repo[x].p1().rev() | ||
Pierre-Yves David
|
r22538 | if p >= 0: | ||
return subset & baseset([p]) | ||||
Pierre-Yves David
|
r22802 | return baseset() | ||
Kevin Bullock
|
r12928 | |||
Matt Mackall
|
r11275 | ps = set() | ||
cl = repo.changelog | ||||
Yuya Nishihara
|
r24115 | for r in getset(repo, fullreposet(repo), x): | ||
Pulkit Goyal
|
r32403 | try: | ||
ps.add(cl.parentrevs(r)[0]) | ||||
except error.WdirUnsupported: | ||||
ps.add(repo[r].parents()[0].rev()) | ||||
Martin von Zweigbergk
|
r32291 | ps -= {node.nullrev} | ||
Pierre-Yves David
|
r25554 | # XXX we should turn this into a baseset instead of a set, smartset may do | ||
Mads Kiilerich
|
r30332 | # some optimizations from the fact this is a baseset. | ||
Lucas Moscovicz
|
r20367 | return subset & ps | ||
Matt Mackall
|
r11275 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('p2([set])', safe=True) | ||
Matt Mackall
|
r11275 | def p2(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Second parent of changesets in set, or the working directory. | ||
Patrick Mezard
|
r12821 | """ | ||
Kevin Bullock
|
r12928 | if x is None: | ||
ps = repo[x].parents() | ||||
try: | ||||
Patrick Mezard
|
r12935 | p = ps[1].rev() | ||
Pierre-Yves David
|
r22539 | if p >= 0: | ||
return subset & baseset([p]) | ||||
Pierre-Yves David
|
r22802 | return baseset() | ||
Kevin Bullock
|
r12928 | except IndexError: | ||
Pierre-Yves David
|
r22802 | return baseset() | ||
Kevin Bullock
|
r12928 | |||
Matt Mackall
|
r11275 | ps = set() | ||
cl = repo.changelog | ||||
Yuya Nishihara
|
r24115 | for r in getset(repo, fullreposet(repo), x): | ||
Pulkit Goyal
|
r32440 | try: | ||
ps.add(cl.parentrevs(r)[1]) | ||||
except error.WdirUnsupported: | ||||
parents = repo[r].parents() | ||||
if len(parents) == 2: | ||||
ps.add(parents[1]) | ||||
Martin von Zweigbergk
|
r32291 | ps -= {node.nullrev} | ||
Pierre-Yves David
|
r25554 | # XXX we should turn this into a baseset instead of a set, smartset may do | ||
Mads Kiilerich
|
r30332 | # some optimizations from the fact this is a baseset. | ||
Lucas Moscovicz
|
r20367 | return subset & ps | ||
Matt Mackall
|
r11275 | |||
Yuya Nishihara
|
r29932 | def parentpost(repo, subset, x, order): | ||
Yuya Nishihara
|
r29931 | return p1(repo, subset, x) | ||
FUJIWARA Katsunori
|
r27587 | @predicate('parents([set])', safe=True) | ||
Matt Mackall
|
r11275 | def parents(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """ | ||
Kevin Bullock
|
r12929 | The set of all parents for all changesets in set, or the working directory. | ||
Patrick Mezard
|
r12821 | """ | ||
Kevin Bullock
|
r12929 | if x is None: | ||
Pierre-Yves David
|
r22496 | ps = set(p.rev() for p in repo[x].parents()) | ||
else: | ||||
ps = set() | ||||
cl = repo.changelog | ||||
Pierre-Yves David
|
r25716 | up = ps.update | ||
parentrevs = cl.parentrevs | ||||
Yuya Nishihara
|
r24115 | for r in getset(repo, fullreposet(repo), x): | ||
Pulkit Goyal
|
r32439 | try: | ||
up(parentrevs(r)) | ||||
except error.WdirUnsupported: | ||||
Pierre-Yves David
|
r25716 | up(p.rev() for p in repo[r].parents()) | ||
Martin von Zweigbergk
|
r32291 | ps -= {node.nullrev} | ||
Pierre-Yves David
|
r22712 | return subset & ps | ||
Matt Mackall
|
r11275 | |||
Jun Wu
|
r31017 | def _phase(repo, subset, *targets): | ||
"""helper to select all rev in <targets> phases""" | ||||
s = repo._phasecache.getrevset(repo, targets) | ||||
return subset & s | ||||
Pierre-Yves David
|
r25621 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('draft()', safe=True) | ||
Pierre-Yves David
|
r25621 | def draft(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changeset in draft phase.""" | ||
Pierre-Yves David
|
r25621 | # i18n: "draft" is a keyword | ||
getargs(x, 0, 0, _("draft takes no arguments")) | ||||
target = phases.draft | ||||
return _phase(repo, subset, target) | ||||
FUJIWARA Katsunori
|
r27587 | @predicate('secret()', safe=True) | ||
Pierre-Yves David
|
r25621 | def secret(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changeset in secret phase.""" | ||
Pierre-Yves David
|
r25621 | # i18n: "secret" is a keyword | ||
getargs(x, 0, 0, _("secret takes no arguments")) | ||||
target = phases.secret | ||||
return _phase(repo, subset, target) | ||||
Yuya Nishihara
|
r29932 | def parentspec(repo, subset, x, n, order): | ||
Kevin Gessner
|
r14070 | """``set^0`` | ||
The set. | ||||
``set^1`` (or ``set^``), ``set^2`` | ||||
First or second parent, respectively, of all changesets in set. | ||||
Patrick Mezard
|
r12821 | """ | ||
Brodie Rao
|
r12320 | try: | ||
Kevin Gessner
|
r14070 | n = int(n[1]) | ||
Kevin Gessner
|
r14072 | if n not in (0, 1, 2): | ||
Kevin Gessner
|
r14070 | raise ValueError | ||
Matt Mackall
|
r14851 | except (TypeError, ValueError): | ||
Kevin Gessner
|
r14070 | raise error.ParseError(_("^ expects a number 0, 1, or 2")) | ||
ps = set() | ||||
Matt Mackall
|
r11275 | cl = repo.changelog | ||
Pierre-Yves David
|
r23165 | for r in getset(repo, fullreposet(repo), x): | ||
Kevin Gessner
|
r14070 | if n == 0: | ||
ps.add(r) | ||||
elif n == 1: | ||||
Pulkit Goyal
|
r32436 | try: | ||
ps.add(cl.parentrevs(r)[0]) | ||||
except error.WdirUnsupported: | ||||
ps.add(repo[r].parents()[0].rev()) | ||||
Pulkit Goyal
|
r32438 | else: | ||
Pulkit Goyal
|
r32436 | try: | ||
parents = cl.parentrevs(r) | ||||
if parents[1] != node.nullrev: | ||||
ps.add(parents[1]) | ||||
except error.WdirUnsupported: | ||||
parents = repo[r].parents() | ||||
if len(parents) == 2: | ||||
ps.add(parents[1].rev()) | ||||
Lucas Moscovicz
|
r20367 | return subset & ps | ||
Matt Mackall
|
r11275 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('present(set)', safe=True) | ||
Wagner Bruna
|
r11944 | def present(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """An empty set, if any revision in set isn't found; otherwise, | ||
Patrick Mezard
|
r12821 | all revisions in set. | ||
FUJIWARA Katsunori
|
r16748 | |||
If any of specified revisions is not present in the local repository, | ||||
the query is normally aborted. But this predicate allows the query | ||||
to continue even in such cases. | ||||
Patrick Mezard
|
r12821 | """ | ||
Wagner Bruna
|
r11944 | try: | ||
return getset(repo, subset, x) | ||||
except error.RepoLookupError: | ||||
Pierre-Yves David
|
r22802 | return baseset() | ||
Wagner Bruna
|
r11944 | |||
Yuya Nishihara
|
r25224 | # for internal use | ||
FUJIWARA Katsunori
|
r27587 | @predicate('_notpublic', safe=True) | ||
Laurent Charignon
|
r25191 | def _notpublic(repo, subset, x): | ||
Yuya Nishihara
|
r25225 | getargs(x, 0, 0, "_notpublic takes no arguments") | ||
Jun Wu
|
r31017 | return _phase(repo, subset, phases.draft, phases.secret) | ||
Laurent Charignon
|
r25191 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('public()', safe=True) | ||
Pierre-Yves David
|
r15819 | def public(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changeset in public phase.""" | ||
FUJIWARA Katsunori
|
r17259 | # i18n: "public" is a keyword | ||
Pierre-Yves David
|
r15819 | getargs(x, 0, 0, _("public takes no arguments")) | ||
Pierre-Yves David
|
r23019 | phase = repo._phasecache.phase | ||
target = phases.public | ||||
condition = lambda r: phase(repo, r) == target | ||||
Yuya Nishihara
|
r28424 | return subset.filter(condition, condrepr=('<phase %r>', target), | ||
cache=False) | ||||
Pierre-Yves David
|
r15819 | |||
Yuya Nishihara
|
r30850 | @predicate('remote([id [,path]])', safe=False) | ||
Matt Mackall
|
r15936 | def remote(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Local revision that corresponds to the given identifier in a | ||
Matt Mackall
|
r15936 | remote repository, if present. Here, the '.' identifier is a | ||
synonym for the current local branch. | ||||
""" | ||||
Gregory Szorc
|
r25971 | from . import hg # avoid start-up nasties | ||
Matt Mackall
|
r15936 | # i18n: "remote" is a keyword | ||
timeless
|
r27293 | l = getargs(x, 0, 2, _("remote takes zero, one, or two arguments")) | ||
Matt Mackall
|
r15936 | |||
q = '.' | ||||
if len(l) > 0: | ||||
# i18n: "remote" is a keyword | ||||
q = getstring(l[0], _("remote requires a string id")) | ||||
if q == '.': | ||||
q = repo['.'].branch() | ||||
dest = '' | ||||
if len(l) > 1: | ||||
# i18n: "remote" is a keyword | ||||
dest = getstring(l[1], _("remote requires a repository path")) | ||||
dest = repo.ui.expandpath(dest or 'default') | ||||
dest, branches = hg.parseurl(dest) | ||||
revs, checkout = hg.addbranchrevs(repo, repo, branches, []) | ||||
if revs: | ||||
revs = [repo.lookup(rev) for rev in revs] | ||||
other = hg.peer(repo, {}, dest) | ||||
n = other.lookup(q) | ||||
if n in repo: | ||||
r = repo[n].rev() | ||||
FUJIWARA Katsunori
|
r16006 | if r in subset: | ||
Lucas Moscovicz
|
r20364 | return baseset([r]) | ||
Pierre-Yves David
|
r22802 | return baseset() | ||
Matt Mackall
|
r15936 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('removes(pattern)', safe=True) | ||
Matt Mackall
|
r11275 | def removes(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets which remove files matching pattern. | ||
FUJIWARA Katsunori
|
r20289 | |||
The pattern without explicit kind like ``glob:`` is expected to be | ||||
relative to the current directory and match against a file or a | ||||
directory. | ||||
Patrick Mezard
|
r12821 | """ | ||
Martin Geisler
|
r12815 | # i18n: "removes" is a keyword | ||
Benoit Boissinot
|
r12736 | pat = getstring(x, _("removes requires a pattern")) | ||
Matt Mackall
|
r11275 | return checkstatus(repo, subset, pat, 2) | ||
FUJIWARA Katsunori
|
r27587 | @predicate('rev(number)', safe=True) | ||
Idan Kamara
|
r13915 | def rev(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Revision with the given numeric identifier. | ||
Patrick Mezard
|
r12821 | """ | ||
Idan Kamara
|
r13915 | # i18n: "rev" is a keyword | ||
l = getargs(x, 1, 1, _("rev requires one argument")) | ||||
try: | ||||
# i18n: "rev" is a keyword | ||||
l = int(getstring(l[0], _("rev requires a number"))) | ||||
Matt Mackall
|
r14851 | except (TypeError, ValueError): | ||
Idan Kamara
|
r13915 | # i18n: "rev" is a keyword | ||
raise error.ParseError(_("rev expects a number")) | ||||
Yuya Nishihara
|
r32659 | if l not in repo.changelog and l not in (node.nullrev, node.wdirrev): | ||
Yuya Nishihara
|
r23062 | return baseset() | ||
Pierre-Yves David
|
r22537 | return subset & baseset([l]) | ||
Matt Mackall
|
r11275 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('matching(revision [, field])', safe=True) | ||
Angel Ezquerra
|
r16402 | def matching(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets in which a given set of fields match the set of fields in the | ||
Angel Ezquerra
|
r16402 | selected revision or set. | ||
FUJIWARA Katsunori
|
r16528 | |||
Angel Ezquerra
|
r16402 | To match more than one field pass the list of fields to match separated | ||
FUJIWARA Katsunori
|
r16528 | by spaces (e.g. ``author description``). | ||
Valid fields are most regular revision fields and some special fields. | ||||
Regular revision fields are ``description``, ``author``, ``branch``, | ||||
Angel Ezquerra
|
r17102 | ``date``, ``files``, ``phase``, ``parents``, ``substate``, ``user`` | ||
and ``diff``. | ||||
Note that ``author`` and ``user`` are synonyms. ``diff`` refers to the | ||||
contents of the revision. Two revisions matching their ``diff`` will | ||||
also match their ``files``. | ||||
FUJIWARA Katsunori
|
r16528 | |||
Special fields are ``summary`` and ``metadata``: | ||||
``summary`` matches the first line of the description. | ||||
Jesse Glick
|
r16639 | ``metadata`` is equivalent to matching ``description user date`` | ||
FUJIWARA Katsunori
|
r16528 | (i.e. it matches the main metadata fields). | ||
``metadata`` is the default field which is used when no fields are | ||||
specified. You can match more than one field at a time. | ||||
Angel Ezquerra
|
r16402 | """ | ||
FUJIWARA Katsunori
|
r17259 | # i18n: "matching" is a keyword | ||
Angel Ezquerra
|
r16402 | l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments")) | ||
Pierre-Yves David
|
r23166 | revs = getset(repo, fullreposet(repo), l[0]) | ||
Angel Ezquerra
|
r16402 | |||
fieldlist = ['metadata'] | ||||
if len(l) > 1: | ||||
fieldlist = getstring(l[1], | ||||
FUJIWARA Katsunori
|
r17259 | # i18n: "matching" is a keyword | ||
Angel Ezquerra
|
r16402 | _("matching requires a string " | ||
"as its second argument")).split() | ||||
Angel Ezquerra
|
r17102 | # Make sure that there are no repeated fields, | ||
# expand the 'special' 'metadata' field type | ||||
# and check the 'files' whenever we check the 'diff' | ||||
Angel Ezquerra
|
r16402 | fields = [] | ||
for field in fieldlist: | ||||
if field == 'metadata': | ||||
fields += ['user', 'description', 'date'] | ||||
Angel Ezquerra
|
r17102 | elif field == 'diff': | ||
# a revision matching the diff must also match the files | ||||
# since matching the diff is very costly, make sure to | ||||
# also match the files first | ||||
fields += ['files', 'diff'] | ||||
Angel Ezquerra
|
r16402 | else: | ||
if field == 'author': | ||||
field = 'user' | ||||
fields.append(field) | ||||
fields = set(fields) | ||||
Angel Ezquerra
|
r16444 | if 'summary' in fields and 'description' in fields: | ||
# If a revision matches its description it also matches its summary | ||||
fields.discard('summary') | ||||
Angel Ezquerra
|
r16402 | |||
# We may want to match more than one field | ||||
Angel Ezquerra
|
r16446 | # Not all fields take the same amount of time to be matched | ||
# Sort the selected fields in order of increasing matching cost | ||||
Patrick Mezard
|
r16453 | fieldorder = ['phase', 'parents', 'user', 'date', 'branch', 'summary', | ||
Angel Ezquerra
|
r17102 | 'files', 'description', 'substate', 'diff'] | ||
Angel Ezquerra
|
r16446 | def fieldkeyfunc(f): | ||
try: | ||||
return fieldorder.index(f) | ||||
except ValueError: | ||||
# assume an unknown field is very costly | ||||
return len(fieldorder) | ||||
fields = list(fields) | ||||
fields.sort(key=fieldkeyfunc) | ||||
Angel Ezquerra
|
r16402 | # Each field will be matched with its own "getfield" function | ||
# which will be added to the getfieldfuncs array of functions | ||||
getfieldfuncs = [] | ||||
_funcs = { | ||||
'user': lambda r: repo[r].user(), | ||||
'branch': lambda r: repo[r].branch(), | ||||
'date': lambda r: repo[r].date(), | ||||
'description': lambda r: repo[r].description(), | ||||
'files': lambda r: repo[r].files(), | ||||
'parents': lambda r: repo[r].parents(), | ||||
'phase': lambda r: repo[r].phase(), | ||||
'substate': lambda r: repo[r].substate, | ||||
'summary': lambda r: repo[r].description().splitlines()[0], | ||||
Angel Ezquerra
|
r17102 | 'diff': lambda r: list(repo[r].diff(git=True),) | ||
Angel Ezquerra
|
r16402 | } | ||
for info in fields: | ||||
getfield = _funcs.get(info, None) | ||||
if getfield is None: | ||||
raise error.ParseError( | ||||
FUJIWARA Katsunori
|
r17259 | # i18n: "matching" is a keyword | ||
Angel Ezquerra
|
r16402 | _("unexpected field name passed to matching: %s") % info) | ||
getfieldfuncs.append(getfield) | ||||
# convert the getfield array of functions into a "getinfo" function | ||||
# which returns an array of field values (or a single value if there | ||||
# is only one field to match) | ||||
Angel Ezquerra
|
r16445 | getinfo = lambda r: [f(r) for f in getfieldfuncs] | ||
Angel Ezquerra
|
r16402 | |||
Lucas Moscovicz
|
r20459 | def matches(x): | ||
for rev in revs: | ||||
target = getinfo(rev) | ||||
Angel Ezquerra
|
r16445 | match = True | ||
for n, f in enumerate(getfieldfuncs): | ||||
Lucas Moscovicz
|
r20459 | if target[n] != f(x): | ||
Angel Ezquerra
|
r16445 | match = False | ||
if match: | ||||
Lucas Moscovicz
|
r20459 | return True | ||
return False | ||||
Yuya Nishihara
|
r28424 | return subset.filter(matches, condrepr=('<matching%r %r>', fields, revs)) | ||
Angel Ezquerra
|
r16402 | |||
Yuya Nishihara
|
r29945 | @predicate('reverse(set)', safe=True, takeorder=True) | ||
def reverse(repo, subset, x, order): | ||||
FUJIWARA Katsunori
|
r27584 | """Reverse order of set. | ||
Patrick Mezard
|
r12821 | """ | ||
Matt Mackall
|
r11275 | l = getset(repo, subset, x) | ||
Yuya Nishihara
|
r29945 | if order == defineorder: | ||
l.reverse() | ||||
Matt Mackall
|
r11275 | return l | ||
FUJIWARA Katsunori
|
r27587 | @predicate('roots(set)', safe=True) | ||
Idan Kamara
|
r13915 | def roots(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets in set with no parent changeset in set. | ||
Patrick Mezard
|
r12821 | """ | ||
Yuya Nishihara
|
r24115 | s = getset(repo, fullreposet(repo), x) | ||
Pierre-Yves David
|
r25647 | parents = repo.changelog.parentrevs | ||
def filter(r): | ||||
for p in parents(r): | ||||
if 0 <= p and p in s: | ||||
return False | ||||
return True | ||||
Yuya Nishihara
|
r28424 | return subset & s.filter(filter, condrepr='<roots>') | ||
Wagner Bruna
|
r11944 | |||
Yuya Nishihara
|
r29265 | _sortkeyfuncs = { | ||
'rev': lambda c: c.rev(), | ||||
'branch': lambda c: c.branch(), | ||||
'desc': lambda c: c.description(), | ||||
'user': lambda c: c.user(), | ||||
'author': lambda c: c.user(), | ||||
'date': lambda c: c.date()[0], | ||||
} | ||||
Yuya Nishihara
|
r29365 | def _getsortargs(x): | ||
"""Parse sort options into (set, [(key, reverse)], opts)""" | ||||
Martijn Pieters
|
r29348 | args = getargsdict(x, 'sort', 'set keys topo.firstbranch') | ||
Martijn Pieters
|
r29238 | if 'set' not in args: | ||
# i18n: "sort" is a keyword | ||||
raise error.ParseError(_('sort requires one or two arguments')) | ||||
Matt Mackall
|
r11275 | keys = "rev" | ||
Martijn Pieters
|
r29238 | if 'keys' in args: | ||
FUJIWARA Katsunori
|
r17259 | # i18n: "sort" is a keyword | ||
Martijn Pieters
|
r29238 | keys = getstring(args['keys'], _("sort spec must be a string")) | ||
Yuya Nishihara
|
r29363 | keyflags = [] | ||
for k in keys.split(): | ||||
fk = k | ||||
reverse = (k[0] == '-') | ||||
if reverse: | ||||
k = k[1:] | ||||
if k not in _sortkeyfuncs and k != 'topo': | ||||
raise error.ParseError(_("unknown sort key %r") % fk) | ||||
keyflags.append((k, reverse)) | ||||
if len(keyflags) > 1 and any(k == 'topo' for k, reverse in keyflags): | ||||
Martijn Pieters
|
r29348 | # i18n: "topo" is a keyword | ||
FUJIWARA Katsunori
|
r29646 | raise error.ParseError(_('topo sort order cannot be combined ' | ||
'with other sort keys')) | ||||
Martijn Pieters
|
r29348 | |||
Yuya Nishihara
|
r29364 | opts = {} | ||
Martijn Pieters
|
r29348 | if 'topo.firstbranch' in args: | ||
Yuya Nishihara
|
r29363 | if any(k == 'topo' for k, reverse in keyflags): | ||
Yuya Nishihara
|
r29364 | opts['topo.firstbranch'] = args['topo.firstbranch'] | ||
Martijn Pieters
|
r29348 | else: | ||
# i18n: "topo" and "topo.firstbranch" are keywords | ||||
FUJIWARA Katsunori
|
r29646 | raise error.ParseError(_('topo.firstbranch can only be used ' | ||
'when using the topo sort key')) | ||||
Martijn Pieters
|
r29348 | |||
Yuya Nishihara
|
r29365 | return args['set'], keyflags, opts | ||
Yuya Nishihara
|
r29946 | @predicate('sort(set[, [-]key... [, ...]])', safe=True, takeorder=True) | ||
def sort(repo, subset, x, order): | ||||
Yuya Nishihara
|
r29365 | """Sort set by keys. The default sort order is ascending, specify a key | ||
as ``-key`` to sort in descending order. | ||||
The keys can be: | ||||
- ``rev`` for the revision number, | ||||
- ``branch`` for the branch name, | ||||
- ``desc`` for the commit message (description), | ||||
- ``user`` for user name (``author`` can be used as an alias), | ||||
- ``date`` for the commit date | ||||
- ``topo`` for a reverse topographical sort | ||||
The ``topo`` sort order cannot be combined with other sort keys. This sort | ||||
takes one optional argument, ``topo.firstbranch``, which takes a revset that | ||||
specifies what topographical branches to prioritize in the sort. | ||||
""" | ||||
s, keyflags, opts = _getsortargs(x) | ||||
Yuya Nishihara
|
r29364 | revs = getset(repo, subset, s) | ||
Yuya Nishihara
|
r29946 | if not keyflags or order != defineorder: | ||
Lucas Moscovicz
|
r20719 | return revs | ||
Yuya Nishihara
|
r29363 | if len(keyflags) == 1 and keyflags[0][0] == "rev": | ||
revs.sort(reverse=keyflags[0][1]) | ||||
Lucas Moscovicz
|
r20719 | return revs | ||
Yuya Nishihara
|
r29363 | elif keyflags[0][0] == "topo": | ||
Yuya Nishihara
|
r29364 | firstbranch = () | ||
if 'topo.firstbranch' in opts: | ||||
firstbranch = getset(repo, subset, opts['topo.firstbranch']) | ||||
Martijn Pieters
|
r29348 | revs = baseset(_toposort(revs, repo.changelog.parentrevs, firstbranch), | ||
istopo=True) | ||||
Yuya Nishihara
|
r29363 | if keyflags[0][1]: | ||
Martijn Pieters
|
r29348 | revs.reverse() | ||
return revs | ||||
Yuya Nishihara
|
r29001 | # sort() is guaranteed to be stable | ||
ctxs = [repo[r] for r in revs] | ||||
Yuya Nishihara
|
r29363 | for k, reverse in reversed(keyflags): | ||
ctxs.sort(key=_sortkeyfuncs[k], reverse=reverse) | ||||
Yuya Nishihara
|
r29001 | return baseset([c.rev() for c in ctxs]) | ||
Matt Mackall
|
r11275 | |||
Martijn Pieters
|
r29348 | def _toposort(revs, parentsfunc, firstbranch=()): | ||
Martijn Pieters
|
r29347 | """Yield revisions from heads to roots one (topo) branch at a time. | ||
This function aims to be used by a graph generator that wishes to minimize | ||||
the number of parallel branches and their interleaving. | ||||
Example iteration order (numbers show the "true" order in a changelog): | ||||
o 4 | ||||
| | ||||
o 1 | ||||
| | ||||
| o 3 | ||||
| | | ||||
| o 2 | ||||
|/ | ||||
o 0 | ||||
Note that the ancestors of merges are understood by the current | ||||
algorithm to be on the same branch. This means no reordering will | ||||
occur behind a merge. | ||||
""" | ||||
### Quick summary of the algorithm | ||||
# | ||||
# This function is based around a "retention" principle. We keep revisions | ||||
# in memory until we are ready to emit a whole branch that immediately | ||||
# "merges" into an existing one. This reduces the number of parallel | ||||
# branches with interleaved revisions. | ||||
# | ||||
# During iteration revs are split into two groups: | ||||
# A) revision already emitted | ||||
# B) revision in "retention". They are stored as different subgroups. | ||||
# | ||||
# for each REV, we do the following logic: | ||||
# | ||||
# 1) if REV is a parent of (A), we will emit it. If there is a | ||||
# retention group ((B) above) that is blocked on REV being | ||||
# available, we emit all the revisions out of that retention | ||||
# group first. | ||||
# | ||||
# 2) else, we'll search for a subgroup in (B) awaiting for REV to be | ||||
# available, if such subgroup exist, we add REV to it and the subgroup is | ||||
# now awaiting for REV.parents() to be available. | ||||
# | ||||
# 3) finally if no such group existed in (B), we create a new subgroup. | ||||
# | ||||
# | ||||
# To bootstrap the algorithm, we emit the tipmost revision (which | ||||
# puts it in group (A) from above). | ||||
revs.sort(reverse=True) | ||||
# Set of parents of revision that have been emitted. They can be considered | ||||
# unblocked as the graph generator is already aware of them so there is no | ||||
# need to delay the revisions that reference them. | ||||
# | ||||
# If someone wants to prioritize a branch over the others, pre-filling this | ||||
# set will force all other branches to wait until this branch is ready to be | ||||
# emitted. | ||||
unblocked = set(firstbranch) | ||||
# list of groups waiting to be displayed, each group is defined by: | ||||
# | ||||
# (revs: lists of revs waiting to be displayed, | ||||
# blocked: set of that cannot be displayed before those in 'revs') | ||||
# | ||||
# The second value ('blocked') correspond to parents of any revision in the | ||||
# group ('revs') that is not itself contained in the group. The main idea | ||||
# of this algorithm is to delay as much as possible the emission of any | ||||
# revision. This means waiting for the moment we are about to display | ||||
# these parents to display the revs in a group. | ||||
# | ||||
# This first implementation is smart until it encounters a merge: it will | ||||
# emit revs as soon as any parent is about to be emitted and can grow an | ||||
# arbitrary number of revs in 'blocked'. In practice this mean we properly | ||||
# retains new branches but gives up on any special ordering for ancestors | ||||
# of merges. The implementation can be improved to handle this better. | ||||
# | ||||
# The first subgroup is special. It corresponds to all the revision that | ||||
# were already emitted. The 'revs' lists is expected to be empty and the | ||||
# 'blocked' set contains the parents revisions of already emitted revision. | ||||
# | ||||
# You could pre-seed the <parents> set of groups[0] to a specific | ||||
# changesets to select what the first emitted branch should be. | ||||
groups = [([], unblocked)] | ||||
pendingheap = [] | ||||
pendingset = set() | ||||
heapq.heapify(pendingheap) | ||||
heappop = heapq.heappop | ||||
heappush = heapq.heappush | ||||
for currentrev in revs: | ||||
# Heap works with smallest element, we want highest so we invert | ||||
if currentrev not in pendingset: | ||||
heappush(pendingheap, -currentrev) | ||||
pendingset.add(currentrev) | ||||
# iterates on pending rev until after the current rev have been | ||||
# processed. | ||||
rev = None | ||||
while rev != currentrev: | ||||
rev = -heappop(pendingheap) | ||||
pendingset.remove(rev) | ||||
# Seek for a subgroup blocked, waiting for the current revision. | ||||
matching = [i for i, g in enumerate(groups) if rev in g[1]] | ||||
if matching: | ||||
# The main idea is to gather together all sets that are blocked | ||||
# on the same revision. | ||||
# | ||||
# Groups are merged when a common blocking ancestor is | ||||
# observed. For example, given two groups: | ||||
# | ||||
# revs [5, 4] waiting for 1 | ||||
# revs [3, 2] waiting for 1 | ||||
# | ||||
# These two groups will be merged when we process | ||||
# 1. In theory, we could have merged the groups when | ||||
# we added 2 to the group it is now in (we could have | ||||
# noticed the groups were both blocked on 1 then), but | ||||
# the way it works now makes the algorithm simpler. | ||||
# | ||||
# We also always keep the oldest subgroup first. We can | ||||
# probably improve the behavior by having the longest set | ||||
# first. That way, graph algorithms could minimise the length | ||||
# of parallel lines their drawing. This is currently not done. | ||||
targetidx = matching.pop(0) | ||||
trevs, tparents = groups[targetidx] | ||||
for i in matching: | ||||
gr = groups[i] | ||||
trevs.extend(gr[0]) | ||||
tparents |= gr[1] | ||||
# delete all merged subgroups (except the one we kept) | ||||
# (starting from the last subgroup for performance and | ||||
# sanity reasons) | ||||
for i in reversed(matching): | ||||
del groups[i] | ||||
else: | ||||
# This is a new head. We create a new subgroup for it. | ||||
targetidx = len(groups) | ||||
Martin von Zweigbergk
|
r32291 | groups.append(([], {rev})) | ||
Martijn Pieters
|
r29347 | |||
gr = groups[targetidx] | ||||
# We now add the current nodes to this subgroups. This is done | ||||
# after the subgroup merging because all elements from a subgroup | ||||
# that relied on this rev must precede it. | ||||
# | ||||
# we also update the <parents> set to include the parents of the | ||||
# new nodes. | ||||
if rev == currentrev: # only display stuff in rev | ||||
gr[0].append(rev) | ||||
gr[1].remove(rev) | ||||
parents = [p for p in parentsfunc(rev) if p > node.nullrev] | ||||
gr[1].update(parents) | ||||
for p in parents: | ||||
if p not in pendingset: | ||||
pendingset.add(p) | ||||
heappush(pendingheap, -p) | ||||
# Look for a subgroup to display | ||||
# | ||||
# When unblocked is empty (if clause), we were not waiting for any | ||||
# revisions during the first iteration (if no priority was given) or | ||||
# if we emitted a whole disconnected set of the graph (reached a | ||||
# root). In that case we arbitrarily take the oldest known | ||||
# subgroup. The heuristic could probably be better. | ||||
# | ||||
# Otherwise (elif clause) if the subgroup is blocked on | ||||
# a revision we just emitted, we can safely emit it as | ||||
# well. | ||||
if not unblocked: | ||||
if len(groups) > 1: # display other subset | ||||
targetidx = 1 | ||||
gr = groups[1] | ||||
elif not gr[1] & unblocked: | ||||
gr = None | ||||
if gr is not None: | ||||
# update the set of awaited revisions with the one from the | ||||
# subgroup | ||||
unblocked |= gr[1] | ||||
# output all revisions in the subgroup | ||||
for r in gr[0]: | ||||
yield r | ||||
# delete the subgroup that you just output | ||||
# unless it is groups[0] in which case you just empty it. | ||||
if targetidx: | ||||
del groups[targetidx] | ||||
else: | ||||
gr[0][:] = [] | ||||
# Check if we have some subgroup waiting for revisions we are not going to | ||||
# iterate over | ||||
for g in groups: | ||||
for r in g[0]: | ||||
yield r | ||||
FUJIWARA Katsunori
|
r27584 | @predicate('subrepo([pattern])') | ||
Matt Harbison
|
r24446 | def subrepo(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Changesets that add, modify or remove the given subrepo. If no subrepo | ||
Matt Harbison
|
r24446 | pattern is named, any subrepo changes are returned. | ||
""" | ||||
# i18n: "subrepo" is a keyword | ||||
args = getargs(x, 0, 1, _('subrepo takes at most one argument')) | ||||
Yuya Nishihara
|
r28272 | pat = None | ||
Matt Harbison
|
r24446 | if len(args) != 0: | ||
pat = getstring(args[0], _("subrepo requires a pattern")) | ||||
m = matchmod.exact(repo.root, repo.root, ['.hgsubstate']) | ||||
def submatches(names): | ||||
Matt Harbison
|
r26481 | k, p, m = util.stringmatcher(pat) | ||
Matt Harbison
|
r24446 | for name in names: | ||
if m(name): | ||||
yield name | ||||
def matches(x): | ||||
c = repo[x] | ||||
s = repo.status(c.p1().node(), c.node(), match=m) | ||||
Yuya Nishihara
|
r28272 | if pat is None: | ||
Matt Harbison
|
r24446 | return s.added or s.modified or s.removed | ||
if s.added: | ||||
Augie Fackler
|
r25149 | return any(submatches(c.substate.keys())) | ||
Matt Harbison
|
r24446 | |||
if s.modified: | ||||
subs = set(c.p1().substate.keys()) | ||||
subs.update(c.substate.keys()) | ||||
for path in submatches(subs): | ||||
if c.p1().substate.get(path) != c.substate.get(path): | ||||
return True | ||||
if s.removed: | ||||
Augie Fackler
|
r25149 | return any(submatches(c.p1().substate.keys())) | ||
Matt Harbison
|
r24446 | |||
return False | ||||
Yuya Nishihara
|
r28424 | return subset.filter(matches, condrepr=('<subrepo %r>', pat)) | ||
Matt Harbison
|
r24446 | |||
Matt Harbison
|
r30782 | def _substringmatcher(pattern, casesensitive=True): | ||
kind, pattern, matcher = util.stringmatcher(pattern, | ||||
casesensitive=casesensitive) | ||||
Simon King
|
r16823 | if kind == 'literal': | ||
Matt Harbison
|
r30782 | if not casesensitive: | ||
pattern = encoding.lower(pattern) | ||||
matcher = lambda s: pattern in encoding.lower(s) | ||||
else: | ||||
matcher = lambda s: pattern in s | ||||
Simon King
|
r16823 | return kind, pattern, matcher | ||
Simon King
|
r16819 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('tag([name])', safe=True) | ||
Augie Fackler
|
r12715 | def tag(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """The specified tag by name, or all tagged revisions if no name is given. | ||
Matt Harbison
|
r20824 | |||
Matt Harbison
|
r30784 | Pattern matching is supported for `name`. See | ||
Yuya Nishihara
|
r30799 | :hg:`help revisions.patterns`. | ||
Patrick Mezard
|
r12821 | """ | ||
Martin Geisler
|
r12815 | # i18n: "tag" is a keyword | ||
Augie Fackler
|
r12715 | args = getargs(x, 0, 1, _("tag takes one or no arguments")) | ||
Matt Mackall
|
r11280 | cl = repo.changelog | ||
Augie Fackler
|
r12715 | if args: | ||
Simon King
|
r16820 | pattern = getstring(args[0], | ||
# i18n: "tag" is a keyword | ||||
_('the argument to tag must be a string')) | ||||
Matt Harbison
|
r26481 | kind, pattern, matcher = util.stringmatcher(pattern) | ||
Simon King
|
r16820 | if kind == 'literal': | ||
Matt Mackall
|
r16825 | # avoid resolving all tags | ||
tn = repo._tagscache.tags.get(pattern, None) | ||||
if tn is None: | ||||
FUJIWARA Katsunori
|
r23978 | raise error.RepoLookupError(_("tag '%s' does not exist") | ||
% pattern) | ||||
Martin von Zweigbergk
|
r32291 | s = {repo[tn].rev()} | ||
Simon King
|
r16820 | else: | ||
Martin von Zweigbergk
|
r32291 | s = {cl.rev(n) for t, n in repo.tagslist() if matcher(t)} | ||
Augie Fackler
|
r12715 | else: | ||
Martin von Zweigbergk
|
r32291 | s = {cl.rev(n) for t, n in repo.tagslist() if t != 'tip'} | ||
Lucas Moscovicz
|
r20367 | return subset & s | ||
Matt Mackall
|
r11280 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('tagged', safe=True) | ||
Patrick Mezard
|
r12821 | def tagged(repo, subset, x): | ||
return tag(repo, subset, x) | ||||
FUJIWARA Katsunori
|
r27587 | @predicate('unstable()', safe=True) | ||
Pierre-Yves David
|
r17171 | def unstable(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """Non-obsolete changesets with obsolete ancestors. | ||
Patrick Mezard
|
r17291 | """ | ||
FUJIWARA Katsunori
|
r17259 | # i18n: "unstable" is a keyword | ||
FUJIWARA Katsunori
|
r17258 | getargs(x, 0, 0, _("unstable takes no arguments")) | ||
Pierre-Yves David
|
r17825 | unstables = obsmod.getrevs(repo, 'unstable') | ||
Lucas Moscovicz
|
r20367 | return subset & unstables | ||
Pierre-Yves David
|
r17171 | |||
FUJIWARA Katsunori
|
r27587 | @predicate('user(string)', safe=True) | ||
Idan Kamara
|
r13915 | def user(repo, subset, x): | ||
FUJIWARA Katsunori
|
r27584 | """User name contains string. The match is case-insensitive. | ||
Simon King
|
r16823 | |||
Matt Harbison
|
r30784 | Pattern matching is supported for `string`. See | ||
Yuya Nishihara
|
r30799 | :hg:`help revisions.patterns`. | ||
Matt Mackall
|
r13359 | """ | ||
Idan Kamara
|
r13915 | return author(repo, subset, x) | ||
Matt Mackall
|
r13359 | |||
Pulkit Goyal
|
r32435 | @predicate('wdir()', safe=True) | ||
Yuya Nishihara
|
r24419 | def wdir(repo, subset, x): | ||
Yuya Nishihara
|
r30701 | """Working directory. (EXPERIMENTAL)""" | ||
Yuya Nishihara
|
r24419 | # i18n: "wdir" is a keyword | ||
getargs(x, 0, 0, _("wdir takes no arguments")) | ||||
Yuya Nishihara
|
r25765 | if node.wdirrev in subset or isinstance(subset, fullreposet): | ||
return baseset([node.wdirrev]) | ||||
Yuya Nishihara
|
r24419 | return baseset() | ||
Yuya Nishihara
|
r29935 | def _orderedlist(repo, subset, x): | ||
Matt Mackall
|
r15898 | s = getstring(x, "internal error") | ||
if not s: | ||||
Pierre-Yves David
|
r22802 | return baseset() | ||
Yuya Nishihara
|
r25341 | # remove duplicates here. it's difficult for caller to deduplicate sets | ||
# because different symbols can point to the same rev. | ||||
Yuya Nishihara
|
r25344 | cl = repo.changelog | ||
Yuya Nishihara
|
r25341 | ls = [] | ||
seen = set() | ||||
for t in s.split('\0'): | ||||
Yuya Nishihara
|
r25344 | try: | ||
# fast path for integer revision | ||||
r = int(t) | ||||
if str(r) != t or r not in cl: | ||||
raise ValueError | ||||
Durham Goode
|
r26143 | revs = [r] | ||
Yuya Nishihara
|
r25344 | except ValueError: | ||
Durham Goode
|
r26143 | revs = stringset(repo, subset, t) | ||
for r in revs: | ||||
if r in seen: | ||||
continue | ||||
if (r in subset | ||||
or r == node.nullrev and isinstance(subset, fullreposet)): | ||||
ls.append(r) | ||||
seen.add(r) | ||||
Yuya Nishihara
|
r25341 | return baseset(ls) | ||
Matt Mackall
|
r15898 | |||
Lucas Moscovicz
|
r20566 | # for internal use | ||
Yuya Nishihara
|
r29935 | @predicate('_list', safe=True, takeorder=True) | ||
def _list(repo, subset, x, order): | ||||
if order == followorder: | ||||
# slow path to take the subset order | ||||
return subset & _orderedlist(repo, fullreposet(repo), x) | ||||
else: | ||||
return _orderedlist(repo, subset, x) | ||||
def _orderedintlist(repo, subset, x): | ||||
Lucas Moscovicz
|
r20566 | s = getstring(x, "internal error") | ||
if not s: | ||||
Pierre-Yves David
|
r22802 | return baseset() | ||
Lucas Moscovicz
|
r20566 | ls = [int(r) for r in s.split('\0')] | ||
Pierre-Yves David
|
r22876 | s = subset | ||
Lucas Moscovicz
|
r20566 | return baseset([r for r in ls if r in s]) | ||
Lucas Moscovicz
|
r20569 | # for internal use | ||
Yuya Nishihara
|
r29935 | @predicate('_intlist', safe=True, takeorder=True) | ||
def _intlist(repo, subset, x, order): | ||||
if order == followorder: | ||||
# slow path to take the subset order | ||||
return subset & _orderedintlist(repo, fullreposet(repo), x) | ||||
else: | ||||
return _orderedintlist(repo, subset, x) | ||||
def _orderedhexlist(repo, subset, x): | ||||
Lucas Moscovicz
|
r20569 | s = getstring(x, "internal error") | ||
if not s: | ||||
Pierre-Yves David
|
r22802 | return baseset() | ||
Lucas Moscovicz
|
r20569 | cl = repo.changelog | ||
ls = [cl.rev(node.bin(r)) for r in s.split('\0')] | ||||
Pierre-Yves David
|
r22877 | s = subset | ||
Lucas Moscovicz
|
r20569 | return baseset([r for r in ls if r in s]) | ||
Matt Mackall
|
r15898 | |||
Yuya Nishihara
|
r29935 | # for internal use | ||
@predicate('_hexlist', safe=True, takeorder=True) | ||||
def _hexlist(repo, subset, x, order): | ||||
if order == followorder: | ||||
# slow path to take the subset order | ||||
return subset & _orderedhexlist(repo, fullreposet(repo), x) | ||||
else: | ||||
return _orderedhexlist(repo, subset, x) | ||||
Matt Mackall
|
r11275 | methods = { | ||
"range": rangeset, | ||||
Yuya Nishihara
|
r30803 | "rangeall": rangeall, | ||
Yuya Nishihara
|
r30044 | "rangepre": rangepre, | ||
Yuya Nishihara
|
r30803 | "rangepost": rangepost, | ||
Bryan O'Sullivan
|
r16860 | "dagrange": dagrange, | ||
Matt Mackall
|
r11275 | "string": stringset, | ||
Jordi Gutiérrez Hermoso
|
r24932 | "symbol": stringset, | ||
Matt Mackall
|
r11275 | "and": andset, | ||
"or": orset, | ||||
"not": notset, | ||||
Durham Goode
|
r28217 | "difference": differenceset, | ||
Matt Mackall
|
r11275 | "list": listset, | ||
Yuya Nishihara
|
r25704 | "keyvalue": keyvaluepair, | ||
Matt Mackall
|
r11275 | "func": func, | ||
Kevin Gessner
|
r14070 | "ancestor": ancestorspec, | ||
"parent": parentspec, | ||||
Yuya Nishihara
|
r29931 | "parentpost": parentpost, | ||
Matt Mackall
|
r11275 | } | ||
Laurent Charignon
|
r24518 | def posttreebuilthook(tree, repo): | ||
# hook for extensions to execute code on the optimized tree | ||||
pass | ||||
Yuya Nishihara
|
r29955 | def match(ui, spec, repo=None, order=defineorder): | ||
"""Create a matcher for a single revision spec | ||||
If order=followorder, a matcher takes the ordering specified by the input | ||||
set. | ||||
""" | ||||
return matchany(ui, [spec], repo=repo, order=order) | ||||
def matchany(ui, specs, repo=None, order=defineorder): | ||||
Yuya Nishihara
|
r25927 | """Create a matcher that will include any revisions matching one of the | ||
Yuya Nishihara
|
r29955 | given specs | ||
If order=followorder, a matcher takes the ordering specified by the input | ||||
set. | ||||
""" | ||||
Yuya Nishihara
|
r25927 | if not specs: | ||
def mfunc(repo, subset=None): | ||||
return baseset() | ||||
return mfunc | ||||
if not all(specs): | ||||
raise error.ParseError(_("empty query")) | ||||
lookup = None | ||||
if repo: | ||||
lookup = repo.__contains__ | ||||
if len(specs) == 1: | ||||
Yuya Nishihara
|
r31024 | tree = revsetlang.parse(specs[0], lookup) | ||
Yuya Nishihara
|
r25927 | else: | ||
Yuya Nishihara
|
r31024 | tree = ('or', | ||
('list',) + tuple(revsetlang.parse(s, lookup) for s in specs)) | ||||
Yuya Nishihara
|
r29906 | |||
Matt Mackall
|
r14900 | if ui: | ||
Yuya Nishihara
|
r31024 | tree = revsetlang.expandaliases(ui, tree) | ||
tree = revsetlang.foldconcat(tree) | ||||
tree = revsetlang.analyze(tree, order) | ||||
tree = revsetlang.optimize(tree) | ||||
Laurent Charignon
|
r24518 | posttreebuilthook(tree, repo) | ||
Yuya Nishihara
|
r29906 | return makematcher(tree) | ||
def makematcher(tree): | ||||
"""Create a matcher from an evaluatable tree""" | ||||
Yuya Nishihara
|
r24114 | def mfunc(repo, subset=None): | ||
if subset is None: | ||||
Yuya Nishihara
|
r24115 | subset = fullreposet(repo) | ||
Yuya Nishihara
|
r31810 | return getset(repo, subset, tree) | ||
Matt Mackall
|
r11275 | return mfunc | ||
Patrick Mezard
|
r12821 | |||
FUJIWARA Katsunori
|
r28393 | def loadpredicate(ui, extname, registrarobj): | ||
"""Load revset predicates from specified registrarobj | ||||
""" | ||||
for name, func in registrarobj._table.iteritems(): | ||||
symbols[name] = func | ||||
if func._safe: | ||||
safesymbols.add(name) | ||||
FUJIWARA Katsunori
|
r28395 | # load built-in predicates explicitly to setup safesymbols | ||
loadpredicate(None, None, predicate) | ||||
Patrick Mezard
|
r12823 | # tell hggettext to extract docstrings from these functions: | ||
i18nfunctions = symbols.values() | ||||