##// END OF EJS Templates
index: move more fields onto nodetree type...
index: move more fields onto nodetree type The fields moves are the ones that are not related to how the nodetree is used in the index and that will make sense for the new nodetree instance for a subset of the index that I'll add later. Differential Revision: https://phab.mercurial-scm.org/D4109

File last commit:

r38892:a01200b2 default
r38949:c2c25355 default
Show More
scmutil.py
1753 lines | 61.1 KiB | text/x-python | PythonLexer
Adrian Buehlmann
add: introduce a warning message for non-portable filenames (issue2756) (BC)...
r13962 # scmutil.py - Mercurial core utility functions
#
# Copyright Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
Gregory Szorc
scmutil: use absolute_import
r27482 from __future__ import absolute_import
import errno
import glob
Augie Fackler
cleanup: replace uses of util.(md5|sha1|sha256|sha512) with hashlib.\1...
r29341 import hashlib
Gregory Szorc
scmutil: use absolute_import
r27482 import os
import re
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 import socket
Yuya Nishihara
extdata: use subprocess so we don't have to chdir() manually
r34462 import subprocess
obsolete: reports the number of local changeset obsoleted when unbundling...
r33249 import weakref
Gregory Szorc
scmutil: use absolute_import
r27482
from .i18n import _
Yuya Nishihara
scmutil: introduce binnode(ctx) as paired function with intrev(ctx)...
r32656 from .node import (
Martin von Zweigbergk
scmutil: handle full hex nodeids in revsymbol()...
r37546 bin,
Jun Wu
scmutil: add a cleanupnodes method for developers...
r33088 hex,
nullid,
Yuya Nishihara
scmutil: extract helper functions that returns human-readable change id...
r34328 short,
Yuya Nishihara
scmutil: introduce binnode(ctx) as paired function with intrev(ctx)...
r32656 wdirid,
wdirrev,
)
Gregory Szorc
scmutil: use absolute_import
r27482 from . import (
encoding,
error,
match as matchmod,
Jun Wu
scmutil: add a cleanupnodes method for developers...
r33088 obsolete,
obsolete: reports the number of local changeset obsoleted when unbundling...
r33249 obsutil,
Gregory Szorc
scmutil: use absolute_import
r27482 pathutil,
phases,
Pulkit Goyal
py3: make scmutil.rcpath() return bytes...
r30305 pycompat,
Yuya Nishihara
revset: split language services to revsetlang module (API)...
r31024 revsetlang,
Gregory Szorc
scmutil: use absolute_import
r27482 similar,
Matt Mackall
extdata: add extdatasource reader...
r34457 url,
Gregory Szorc
scmutil: use absolute_import
r27482 util,
Mark Thomas
scmutil: handle conflicting files and dirs in origbackuppath...
r34544 vfs,
Gregory Szorc
scmutil: use absolute_import
r27482 )
Kevin Bullock
scmutil: split platform-specific bits into their own modules...
r18690
Yuya Nishihara
stringutil: bulk-replace call sites to point to new module...
r37102 from .utils import (
Yuya Nishihara
procutil: bulk-replace function calls to point to new module
r37138 procutil,
Yuya Nishihara
stringutil: bulk-replace call sites to point to new module...
r37102 stringutil,
)
Jun Wu
codemod: use pycompat.iswindows...
r34646 if pycompat.iswindows:
Gregory Szorc
scmutil: use absolute_import
r27482 from . import scmwindows as scmplatform
Kevin Bullock
scmutil: split platform-specific bits into their own modules...
r18690 else:
Gregory Szorc
scmutil: use absolute_import
r27482 from . import scmposix as scmplatform
Kevin Bullock
scmutil: split platform-specific bits into their own modules...
r18690
Yuya Nishihara
scmutil: extend termwidth() to return terminal height, renamed to termsize()...
r30314 termsize = scmplatform.termsize
Adrian Buehlmann
add: introduce a warning message for non-portable filenames (issue2756) (BC)...
r13962
Martin von Zweigbergk
status: create class for status lists...
r22913 class status(tuple):
'''Named tuple with a list of files per status. The 'deleted', 'unknown'
and 'ignored' properties are only relevant to the working copy.
'''
__slots__ = ()
def __new__(cls, modified, added, removed, deleted, unknown, ignored,
clean):
return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
ignored, clean))
@property
def modified(self):
Martin von Zweigbergk
status: update and move documentation of status types to status class...
r22915 '''files that have been modified'''
Martin von Zweigbergk
status: create class for status lists...
r22913 return self[0]
@property
def added(self):
Martin von Zweigbergk
status: update and move documentation of status types to status class...
r22915 '''files that have been added'''
Martin von Zweigbergk
status: create class for status lists...
r22913 return self[1]
@property
def removed(self):
Martin von Zweigbergk
status: update and move documentation of status types to status class...
r22915 '''files that have been removed'''
Martin von Zweigbergk
status: create class for status lists...
r22913 return self[2]
@property
def deleted(self):
Martin von Zweigbergk
status: update and move documentation of status types to status class...
r22915 '''files that are in the dirstate, but have been deleted from the
working copy (aka "missing")
'''
Martin von Zweigbergk
status: create class for status lists...
r22913 return self[3]
@property
def unknown(self):
Martin von Zweigbergk
status: update and move documentation of status types to status class...
r22915 '''files not in the dirstate that are not ignored'''
Martin von Zweigbergk
status: create class for status lists...
r22913 return self[4]
@property
def ignored(self):
Martin von Zweigbergk
status: update and move documentation of status types to status class...
r22915 '''files not in the dirstate that are ignored (by _dirignore())'''
Martin von Zweigbergk
status: create class for status lists...
r22913 return self[5]
@property
def clean(self):
Martin von Zweigbergk
status: update and move documentation of status types to status class...
r22915 '''files that have not been modified'''
Martin von Zweigbergk
status: create class for status lists...
r22913 return self[6]
def __repr__(self, *args, **kwargs):
Augie Fackler
scmutil: fix __repr__ of status tuple...
r37940 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
r'unknown=%s, ignored=%s, clean=%s>') %
Yuya Nishihara
stringutil: flip the default of pprint() to bprefix=False...
r37961 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
Martin von Zweigbergk
status: create class for status lists...
r22913
Augie Fackler
itersubrepos: move to scmutil to break a direct import cycle
r20392 def itersubrepos(ctx1, ctx2):
"""find subrepos in ctx1 or ctx2"""
# Create a (subpath, ctx) mapping where we prefer subpaths from
# ctx1. The subpaths from ctx2 are important when the .hgsub file
# has been modified (in ctx2) but not yet committed (in ctx1).
subpaths = dict.fromkeys(ctx2.substate, ctx2)
subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
Matt Harbison
scmutil: consistently return subrepos relative to ctx1 from itersubrepos()...
r25418
missing = set()
for subpath in ctx2.substate:
if subpath not in ctx1.substate:
del subpaths[subpath]
missing.add(subpath)
Augie Fackler
itersubrepos: move to scmutil to break a direct import cycle
r20392 for subpath, ctx in sorted(subpaths.iteritems()):
yield subpath, ctx.sub(subpath)
Matt Harbison
scmutil: consistently return subrepos relative to ctx1 from itersubrepos()...
r25418 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
# status and diff will have an accurate result when it does
# 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
# against itself.
for subpath in missing:
yield subpath, ctx2.nullsub(subpath, ctx1)
Patrick Mezard
discovery: add extinct changesets to outgoing.excluded...
r17248 def nochangesfound(ui, repo, excluded=None):
'''Report no changes for push/pull, excluded is None or a list of
nodes excluded from the push/pull.
'''
secretlist = []
if excluded:
for n in excluded:
ctx = repo[n]
if ctx.phase() >= phases.secret and not ctx.extinct():
secretlist.append(n)
Matt Mackall
scmutil: unify some 'no changes found' messages...
r15993 if secretlist:
ui.status(_("no changes found (ignored %d secret changesets)\n")
% len(secretlist))
else:
ui.status(_("no changes found\n"))
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 def callcatch(ui, func):
"""call func() with global exception handling
return func() if no exception happens. otherwise do some error handling
and return an exit code accordingly. does not handle all exceptions.
"""
try:
Yuya Nishihara
dispatch: print traceback in scmutil.callcatch() if --traceback specified...
r32041 try:
return func()
except: # re-raises
ui.traceback()
raise
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 # Global exception handling, alphabetically
# Mercurial-specific first, followed by built-in and library exceptions
except error.LockHeld as inst:
if inst.errno == errno.ETIMEDOUT:
FUJIWARA Katsunori
lock: avoid unintentional lock acquisition at failure of readlock...
r32089 reason = _('timed out waiting for lock held by %r') % inst.locker
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 else:
FUJIWARA Katsunori
lock: avoid unintentional lock acquisition at failure of readlock...
r32089 reason = _('lock held by %r') % inst.locker
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("abort: %s: %s\n") % (
inst.desc or stringutil.forcebytestr(inst.filename), reason))
FUJIWARA Katsunori
lock: avoid unintentional lock acquisition at failure of readlock...
r32089 if not inst.locker:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("(lock might be very busy)\n"))
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 except error.LockUnavailable as inst:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("abort: could not lock %s: %s\n") %
(inst.desc or stringutil.forcebytestr(inst.filename),
encoding.strtolocal(inst.strerror)))
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 except error.OutOfBandError as inst:
if inst.args:
msg = _("abort: remote error:\n")
else:
msg = _("abort: remote error\n")
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(msg)
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 if inst.args:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(''.join(inst.args))
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 if inst.hint:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error('(%s)\n' % inst.hint)
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 except error.RepoError as inst:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("abort: %s!\n") % inst)
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 if inst.hint:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("(%s)\n") % inst.hint)
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 except error.ResponseError as inst:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("abort: %s") % inst.args[0])
Augie Fackler
scmutil: avoid using basestring and add explicit handling of unicodes...
r36679 msg = inst.args[1]
if isinstance(msg, type(u'')):
msg = pycompat.sysbytes(msg)
Augie Fackler
scmutil: fix oversight in b76248e51605c6 where I forgot to use msg...
r36713 if not isinstance(msg, bytes):
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(" %r\n" % (msg,))
Augie Fackler
scmutil: fix oversight in b76248e51605c6 where I forgot to use msg...
r36713 elif not msg:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_(" empty string\n"))
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 else:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 except error.CensoredNodeError as inst:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("abort: file censored %s!\n") % inst)
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 except error.RevlogError as inst:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("abort: %s!\n") % inst)
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 except error.InterventionRequired as inst:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error("%s\n" % inst)
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 if inst.hint:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("(%s)\n") % inst.hint)
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 return 1
Yuya Nishihara
revlog: map rev(wdirid) to WdirUnsupported exception...
r32657 except error.WdirUnsupported:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("abort: working directory revision cannot be specified\n"))
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 except error.Abort as inst:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("abort: %s\n") % inst)
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 if inst.hint:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("(%s)\n") % inst.hint)
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 except ImportError as inst:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
Yuya Nishihara
stringutil: bulk-replace call sites to point to new module...
r37102 m = stringutil.forcebytestr(inst).split()[-1]
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 if m in "mpatch bdiff".split():
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("(did you forget to compile extensions?)\n"))
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 elif m in "zlib".split():
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("(is your Python install correct?)\n"))
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 except IOError as inst:
if util.safehasattr(inst, "code"):
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 elif util.safehasattr(inst, "reason"):
try: # usually it is in the form (errno, strerror)
reason = inst.reason.args[1]
except (AttributeError, IndexError):
# it might be anything, for example a string
reason = inst.reason
Pulkit Goyal
py3: replace `unicode` with pycompat.unicode...
r38332 if isinstance(reason, pycompat.unicode):
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 # SSLError of Python 2.7.9 contains a unicode
Pulkit Goyal
py3: use encoding.unitolocal instead of .encode(encoding.encoding)
r32152 reason = encoding.unitolocal(reason)
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("abort: error: %s\n") % reason)
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 elif (util.safehasattr(inst, "args")
and inst.args and inst.args[0] == errno.EPIPE):
pass
elif getattr(inst, "strerror", None):
if getattr(inst, "filename", None):
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("abort: %s: %s\n") % (
Yuya Nishihara
py3: fix some unicode madness in global exception catcher
r36659 encoding.strtolocal(inst.strerror),
Yuya Nishihara
stringutil: bulk-replace call sites to point to new module...
r37102 stringutil.forcebytestr(inst.filename)))
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 else:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 else:
raise
except OSError as inst:
if getattr(inst, "filename", None) is not None:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("abort: %s: '%s'\n") % (
Yuya Nishihara
py3: fix some unicode madness in global exception catcher
r36659 encoding.strtolocal(inst.strerror),
Yuya Nishihara
stringutil: bulk-replace call sites to point to new module...
r37102 stringutil.forcebytestr(inst.filename)))
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 else:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 except MemoryError:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("abort: out of memory\n"))
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520 except SystemExit as inst:
# Commands shouldn't sys.exit directly, but give a return code.
# Just in case catch this and and pass exit code to caller.
return inst.code
except socket.error as inst:
Rodrigo Damazio Bovendorp
dispatch: making all hg abortions be output with a specific label...
r38791 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
Jun Wu
dispatch: move part of callcatch to scmutil...
r30520
return -1
Kevin Bullock
scmutil: add bad character checking to checknewlabel...
r17821 def checknewlabel(repo, lbl, kind):
Durham Goode
translations: change label integer error to not specify the kind of label...
r19070 # Do not use the "kind" parameter in ui output.
# It makes strings difficult to translate.
Kevin Bullock
scmutil: add function to validate new branch, tag, and bookmark names...
r17817 if lbl in ['tip', '.', 'null']:
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(_("the name '%s' is reserved") % lbl)
Kevin Bullock
scmutil: add bad character checking to checknewlabel...
r17821 for c in (':', '\0', '\n', '\r'):
if c in lbl:
Augie Fackler
scmutil: fix a repr in an error message on Python 3...
r36587 raise error.Abort(
_("%r cannot be used in a name") % pycompat.bytestr(c))
Durham Goode
bookmark: don't allow integers as bookmark/branch/tag names...
r18566 try:
int(lbl)
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(_("cannot use an integer as a name"))
Durham Goode
bookmark: don't allow integers as bookmark/branch/tag names...
r18566 except ValueError:
pass
Boris Feld
label: enforce the lack of leading or trailing white space...
r36162 if lbl.strip() != lbl:
raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
Kevin Bullock
scmutil: add function to validate new branch, tag, and bookmark names...
r17817
Adrian Buehlmann
move checkfilename from util to scmutil...
r13974 def checkfilename(f):
'''Check that the filename f is an acceptable filename for a tracked file'''
if '\r' in f or '\n' in f:
Yuya Nishihara
py3: remove b'' from error message of disallowed filename
r38353 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
% pycompat.bytestr(f))
Adrian Buehlmann
move checkfilename from util to scmutil...
r13974
Adrian Buehlmann
add: introduce a warning message for non-portable filenames (issue2756) (BC)...
r13962 def checkportable(ui, f):
'''Check if filename f is portable and warn or abort depending on config'''
Adrian Buehlmann
move checkfilename from util to scmutil...
r13974 checkfilename(f)
Adrian Buehlmann
scmutil: introduce casecollisionauditor...
r14138 abort, warn = checkportabilityalert(ui)
if abort or warn:
Adrian Buehlmann
add: introduce a warning message for non-portable filenames (issue2756) (BC)...
r13962 msg = util.checkwinfilename(f)
if msg:
Yuya Nishihara
procutil: bulk-replace function calls to point to new module
r37138 msg = "%s: %s" % (msg, procutil.shellquote(f))
Adrian Buehlmann
scmutil: introduce casecollisionauditor...
r14138 if abort:
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(msg)
Adrian Buehlmann
scmutil: introduce casecollisionauditor...
r14138 ui.warn(_("warning: %s\n") % msg)
Kevin Gessner
add: notify when adding a file that would cause a case-folding collision...
r14068
Kevin Gessner
scmutil: refactor ui.portablefilenames processing...
r14067 def checkportabilityalert(ui):
'''check if the user's config requests nothing, a warning, or abort for
non-portable filenames'''
Jun Wu
codemod: register core configitems using a script...
r33499 val = ui.config('ui', 'portablefilenames')
Kevin Gessner
scmutil: refactor ui.portablefilenames processing...
r14067 lval = val.lower()
Yuya Nishihara
stringutil: bulk-replace call sites to point to new module...
r37102 bval = stringutil.parsebool(val)
Jun Wu
codemod: use pycompat.iswindows...
r34646 abort = pycompat.iswindows or lval == 'abort'
Kevin Gessner
scmutil: refactor ui.portablefilenames processing...
r14067 warn = bval or lval == 'warn'
if bval is None and not (warn or abort or lval == 'ignore'):
Adrian Buehlmann
add: introduce a warning message for non-portable filenames (issue2756) (BC)...
r13962 raise error.ConfigError(
_("ui.portablefilenames value is invalid ('%s')") % val)
Kevin Gessner
scmutil: refactor ui.portablefilenames processing...
r14067 return abort, warn
Adrian Buehlmann
scmutil: introduce casecollisionauditor...
r14138 class casecollisionauditor(object):
Joshua Redstone
scmutil: 25% speedup in casecollisionauditor...
r17201 def __init__(self, ui, abort, dirstate):
Adrian Buehlmann
scmutil: introduce casecollisionauditor...
r14138 self._ui = ui
self._abort = abort
Joshua Redstone
scmutil: 25% speedup in casecollisionauditor...
r17201 allfiles = '\0'.join(dirstate._map)
self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
self._dirstate = dirstate
# The purpose of _newfiles is so that we don't complain about
# case collisions if someone were to call this object with the
# same filename twice.
self._newfiles = set()
Kevin Gessner
scmutil: refactor ui.portablefilenames processing...
r14067
Adrian Buehlmann
scmutil: introduce casecollisionauditor...
r14138 def __call__(self, f):
FUJIWARA Katsunori
scmutil: skip checks in "casecollisionauditor" if filename is already checked...
r20006 if f in self._newfiles:
return
FUJIWARA Katsunori
i18n: use UTF-8 string to lower filename for case collision check...
r14980 fl = encoding.lower(f)
FUJIWARA Katsunori
scmutil: skip checks in "casecollisionauditor" if filename is already checked...
r20006 if fl in self._loweredfiles and f not in self._dirstate:
Adrian Buehlmann
scmutil: introduce casecollisionauditor...
r14138 msg = _('possible case-folding collision for %s') % f
if self._abort:
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(msg)
Adrian Buehlmann
scmutil: introduce casecollisionauditor...
r14138 self._ui.warn(_("warning: %s\n") % msg)
Joshua Redstone
scmutil: 25% speedup in casecollisionauditor...
r17201 self._loweredfiles.add(fl)
self._newfiles.add(f)
Adrian Buehlmann
move opener from util to scmutil
r13970
Gregory Szorc
repoview: move function for computing filtered hash...
r24723 def filteredhash(repo, maxrev):
"""build hash of filtered revisions in the current repoview.
Multiple caches perform up-to-date validation by checking that the
tiprev and tipnode stored in the cache file match the current repository.
However, this is not sufficient for validating repoviews because the set
of revisions in the view may change without the repository tiprev and
tipnode changing.
This function hashes all the revs filtered from the view and returns
that SHA-1 digest.
"""
cl = repo.changelog
if not cl.filteredrevs:
return None
key = None
revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
if revs:
Augie Fackler
cleanup: replace uses of util.(md5|sha1|sha256|sha512) with hashlib.\1...
r29341 s = hashlib.sha1()
Gregory Szorc
repoview: move function for computing filtered hash...
r24723 for rev in revs:
Augie Fackler
scmutil: fix key generation to portably bytestringify integer
r31349 s.update('%d;' % rev)
Gregory Szorc
repoview: move function for computing filtered hash...
r24723 key = s.digest()
return key
Adrian Buehlmann
move walkrepos from util to scmutil
r13975 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
Mads Kiilerich
help: improve hgweb help...
r17104 '''yield every hg repository under path, always recursively.
The recurse flag will only control recursion into repo working dirs'''
Adrian Buehlmann
move walkrepos from util to scmutil
r13975 def errhandler(err):
if err.filename == path:
raise err
Augie Fackler
walkrepos: use getattr instead of hasattr for samestat
r14961 samestat = getattr(os.path, 'samestat', None)
if followsym and samestat is not None:
Adrian Buehlmann
scmutil: rename local function _add_dir_if_not_there
r14227 def adddir(dirlst, dirname):
Adrian Buehlmann
move walkrepos from util to scmutil
r13975 dirstat = os.stat(dirname)
Martin von Zweigbergk
walkrepos: don't reimplement any()...
r36356 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
Adrian Buehlmann
move walkrepos from util to scmutil
r13975 if not match:
dirlst.append(dirstat)
return not match
else:
followsym = False
if (seen_dirs is None) and followsym:
seen_dirs = []
Adrian Buehlmann
scmutil: rename local function _add_dir_if_not_there
r14227 adddir(seen_dirs, path)
Adrian Buehlmann
move walkrepos from util to scmutil
r13975 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
dirs.sort()
if '.hg' in dirs:
yield root # found a repository
qroot = os.path.join(root, '.hg', 'patches')
if os.path.isdir(os.path.join(qroot, '.hg')):
yield qroot # we have a patch queue repo here
if recurse:
# avoid recursing inside the .hg directory
dirs.remove('.hg')
else:
dirs[:] = [] # don't descend further
elif followsym:
newdirs = []
for d in dirs:
fname = os.path.join(root, d)
Adrian Buehlmann
scmutil: rename local function _add_dir_if_not_there
r14227 if adddir(seen_dirs, fname):
Adrian Buehlmann
move walkrepos from util to scmutil
r13975 if os.path.islink(fname):
for hgname in walkrepos(fname, True, seen_dirs):
yield hgname
else:
newdirs.append(d)
dirs[:] = newdirs
Adrian Buehlmann
move rcpath from util to scmutil
r13984
Yuya Nishihara
scmutil: introduce binnode(ctx) as paired function with intrev(ctx)...
r32656 def binnode(ctx):
"""Return binary node id for a given basectx"""
node = ctx.node()
if node is None:
return wdirid
return node
Yuya Nishihara
scmutil: pass ctx object to intrev()...
r32654 def intrev(ctx):
"""Return integer for a given basectx that can be used in comparison or
Yuya Nishihara
scmutil: add function to help handling workingctx in arithmetic operation...
r24582 arithmetic operation"""
Yuya Nishihara
scmutil: pass ctx object to intrev()...
r32654 rev = ctx.rev()
Yuya Nishihara
scmutil: add function to help handling workingctx in arithmetic operation...
r24582 if rev is None:
Yuya Nishihara
changeset_printer: use node.wdirrev to calculate meaningful parentrevs...
r25739 return wdirrev
Yuya Nishihara
scmutil: add function to help handling workingctx in arithmetic operation...
r24582 return rev
Yuya Nishihara
scmutil: extract helper functions that returns human-readable change id...
r34328 def formatchangeid(ctx):
"""Format changectx as '{rev}:{node|formatnode}', which is the default
Yuya Nishihara
cmdutil: drop aliases for logcmdutil functions (API)...
r35906 template provided by logcmdutil.changesettemplater"""
Yuya Nishihara
scmutil: extract helper functions that returns human-readable change id...
r34328 repo = ctx.repo()
return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
def formatrevnode(ui, rev, node):
"""Format given revision and node depending on the current verbosity"""
if ui.debugflag:
hexfunc = hex
else:
hexfunc = short
return '%d:%s' % (rev, hexfunc(node))
Martin von Zweigbergk
scmutil: rename resolvepartialhexnodeid() to resolvehexnodeidprefix()...
r37696 def resolvehexnodeidprefix(repo, prefix):
Martin von Zweigbergk
revisions: allow "x123" to refer to nodeid prefix "123"...
r38891 if (prefix.startswith('x') and
repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
prefix = prefix[1:]
Martin von Zweigbergk
lookup: add option to disambiguate prefix within revset...
r38878 try:
# Uses unfiltered repo because it's faster when prefix is ambiguous/
# This matches the shortesthexnodeidprefix() function below.
node = repo.unfiltered().changelog._partialmatch(prefix)
except error.AmbiguousPrefixLookupError:
revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
if revset:
# Clear config to avoid infinite recursion
configoverrides = {('experimental',
'revisions.disambiguatewithin'): None}
with repo.ui.configoverride(configoverrides):
revs = repo.anyrevs([revset], user=True)
matches = []
for rev in revs:
node = repo.changelog.node(rev)
if hex(node).startswith(prefix):
matches.append(node)
if len(matches) == 1:
return matches[0]
raise
Martin von Zweigbergk
context: extract partial nodeid lookup method to scmutil...
r37522 if node is None:
return
repo.changelog.rev(node) # make sure node isn't filtered
return node
Martin von Zweigbergk
shortest: make isrev() a top-level function...
r38890 def mayberevnum(repo, prefix):
"""Checks if the given prefix may be mistaken for a revision number"""
try:
i = int(prefix)
# if we are a pure int, then starting with zero will not be
# confused as a rev; or, obviously, if the int is larger
# than the value of the tip rev
if prefix[0:1] == b'0' or i > len(repo):
return False
return True
except ValueError:
return False
Martin von Zweigbergk
shortest: cache disambiguation revset...
r38889 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
"""Find the shortest unambiguous prefix that matches hexnode.
If "cache" is not None, it must be a dictionary that can be used for
caching between calls to this method.
"""
Martin von Zweigbergk
scmutil: make shortesthexnodeidprefix() use unfiltered repo...
r37726 # _partialmatch() of filtered changelog could take O(len(repo)) time,
# which would be unacceptably slow. so we look for hash collision in
# unfiltered space, which means some hashes may be slightly longer.
Martin von Zweigbergk
shortest: move revnum-disambiguation out of revlog...
r37990
def disambiguate(prefix):
"""Disambiguate against revnums."""
Martin von Zweigbergk
shortest: use 'x' prefix to disambiguate from revnum if configured...
r38892 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
if mayberevnum(repo, prefix):
return 'x' + prefix
else:
return prefix
Martin von Zweigbergk
shortest: move revnum-disambiguation out of revlog...
r37990 hexnode = hex(node)
Martin von Zweigbergk
shortest: avoid magic number "41"...
r38000 for length in range(len(prefix), len(hexnode) + 1):
Martin von Zweigbergk
shortest: move revnum-disambiguation out of revlog...
r37990 prefix = hexnode[:length]
Martin von Zweigbergk
shortest: make isrev() a top-level function...
r38890 if not mayberevnum(repo, prefix):
Martin von Zweigbergk
shortest: move revnum-disambiguation out of revlog...
r37990 return prefix
Martin von Zweigbergk
shortest: make isrev() a top-level function...
r38890 cl = repo.unfiltered().changelog
Martin von Zweigbergk
scmutil: make shortest() respect disambiguation revset...
r38879 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
if revset:
Martin von Zweigbergk
shortest: cache disambiguation revset...
r38889 revs = None
if cache is not None:
revs = cache.get('disambiguationrevset')
if revs is None:
revs = repo.anyrevs([revset], user=True)
if cache is not None:
cache['disambiguationrevset'] = revs
Martin von Zweigbergk
scmutil: make shortest() respect disambiguation revset...
r38879 if cl.rev(node) in revs:
hexnode = hex(node)
for length in range(minlength, len(hexnode) + 1):
matches = []
prefix = hexnode[:length]
for rev in revs:
otherhexnode = repo[rev].hex()
if prefix == otherhexnode[:length]:
matches.append(otherhexnode)
if len(matches) == 1:
return disambiguate(prefix)
Martin von Zweigbergk
shortest: don't keep checking for longer prefix if node doesn't exist (API)...
r37882 try:
Martin von Zweigbergk
shortest: move revnum-disambiguation out of revlog...
r37990 return disambiguate(cl.shortest(node, minlength))
Martin von Zweigbergk
shortest: don't keep checking for longer prefix if node doesn't exist (API)...
r37882 except error.LookupError:
raise error.RepoLookupError()
Martin von Zweigbergk
scmutil: introduce shortesthexnodeidprefix()...
r37698
Martin von Zweigbergk
revset: use revsymbol() for checking if a symbol is valid...
r37368 def isrevsymbol(repo, symbol):
Martin von Zweigbergk
scmutil: document that isrevsymbol() raises on ambiguous node prefix...
r37695 """Checks if a symbol exists in the repo.
Martin von Zweigbergk
revlog: use specialized exception for ambiguous prefix lookup...
r38877 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
symbol is an ambiguous nodeid prefix.
Martin von Zweigbergk
scmutil: document that isrevsymbol() raises on ambiguous node prefix...
r37695 """
Martin von Zweigbergk
revset: use revsymbol() for checking if a symbol is valid...
r37368 try:
revsymbol(repo, symbol)
return True
except error.RepoLookupError:
return False
Martin von Zweigbergk
scmutil: add method for looking up a context given a revision symbol...
r37289 def revsymbol(repo, symbol):
"""Returns a context given a single revision symbol (as string).
This is similar to revsingle(), but accepts only a single revision symbol,
i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
not "max(public())".
"""
if not isinstance(symbol, bytes):
msg = ("symbol (%s of type %s) was not a string, did you mean "
"repo[symbol]?" % (symbol, type(symbol)))
raise error.ProgrammingError(msg)
Martin von Zweigbergk
context: move handling of filtering error to revsymbol() (API)...
r37403 try:
Martin von Zweigbergk
context: handle stringified ints in revsymbol()...
r37545 if symbol in ('.', 'tip', 'null'):
return repo[symbol]
try:
r = int(symbol)
if '%d' % r != symbol:
raise ValueError
l = len(repo.changelog)
if r < 0:
r += l
if r < 0 or r >= l and r != wdirrev:
raise ValueError
return repo[r]
except error.FilteredIndexError:
raise
except (ValueError, OverflowError, IndexError):
pass
Martin von Zweigbergk
scmutil: handle full hex nodeids in revsymbol()...
r37546 if len(symbol) == 40:
try:
node = bin(symbol)
rev = repo.changelog.rev(node)
return repo[rev]
except error.FilteredLookupError:
raise
except (TypeError, LookupError):
pass
Martin von Zweigbergk
context: handle namespaces in revsymbol()...
r37547 # look up bookmarks through the name interface
try:
node = repo.names.singlenode(repo, symbol)
rev = repo.changelog.rev(node)
return repo[rev]
except KeyError:
pass
Martin von Zweigbergk
scmutil: use resolvehexnodeidprefix() from revsymbol()...
r37697 node = resolvehexnodeidprefix(repo, symbol)
Martin von Zweigbergk
context: handle partial nodeids in revsymbol()...
r37548 if node is not None:
rev = repo.changelog.rev(node)
return repo[rev]
Martin von Zweigbergk
revsymbol: stop delegating to repo.__getitem__ for unhandled symbols (API)...
r37549 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
Martin von Zweigbergk
context: handle stringified ints in revsymbol()...
r37545
Martin von Zweigbergk
scmutil: handle full hex nodeids in revsymbol()...
r37546 except error.WdirUnsupported:
return repo[None]
Martin von Zweigbergk
context: move handling of filtering error to revsymbol() (API)...
r37403 except (error.FilteredIndexError, error.FilteredLookupError,
error.FilteredRepoLookupError):
raise _filterederror(repo, symbol)
def _filterederror(repo, changeid):
"""build an exception to be raised about a filtered changeid
This is extracted in a function to help extensions (eg: evolve) to
experiment with various message variants."""
if repo.filtername.startswith('visible'):
# Check if the changeset is obsolete
unfilteredrepo = repo.unfiltered()
ctx = revsymbol(unfilteredrepo, changeid)
# If the changeset is obsolete, enrich the message with the reason
# that made this changeset not visible
if ctx.obsolete():
msg = obsutil._getfilteredreason(repo, changeid, ctx)
else:
msg = _("hidden revision '%s'") % changeid
hint = _('use --hidden to access hidden revisions')
return error.FilteredRepoLookupError(msg, hint=hint)
msg = _("filtered revision '%s' (not in '%s' subset)")
msg %= (changeid, repo.filtername)
return error.FilteredRepoLookupError(msg)
Martin von Zweigbergk
scmutil: add method for looking up a context given a revision symbol...
r37289
Jun Wu
rebase: initial support for multiple destinations...
r34007 def revsingle(repo, revspec, default='.', localalias=None):
Matt Mackall
revsingle: fix silly API issue (issue2992)
r19509 if not revspec and revspec != 0:
Matt Mackall
scmutil: move revsingle/pair/range from cmdutil...
r14319 return repo[default]
Jun Wu
rebase: initial support for multiple destinations...
r34007 l = revrange(repo, [revspec], localalias=localalias)
Pierre-Yves David
revset-limit: use boolean testing instead of `len(revs) < 1`...
r22814 if not l:
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(_('empty revision set'))
Pierre-Yves David
revsingle: use `last` instead of direct indexing...
r22815 return repo[l.last()]
Matt Mackall
scmutil: move revsingle/pair/range from cmdutil...
r14319
Yuya Nishihara
revpair: restrict odd-range handling to top-level x:y expression (issue4774)...
r26020 def _pairspec(revspec):
Yuya Nishihara
revset: split language services to revsetlang module (API)...
r31024 tree = revsetlang.parse(revspec)
Yuya Nishihara
revpair: restrict odd-range handling to top-level x:y expression (issue4774)...
r26020 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
Matt Mackall
scmutil: move revsingle/pair/range from cmdutil...
r14319 def revpair(repo, revs):
if not revs:
Martin von Zweigbergk
scmutil: make revpair() return context objects (API)...
r37269 return repo['.'], repo[None]
Matt Mackall
scmutil: move revsingle/pair/range from cmdutil...
r14319
l = revrange(repo, revs)
Pierre-Yves David
revpair: smartset compatibility...
r20862 if not l:
first = second = None
elif l.isascending():
first = l.min()
second = l.max()
elif l.isdescending():
first = l.max()
second = l.min()
else:
Pierre-Yves David
revpair: use `first` and `last` instead of direct indexing...
r22816 first = l.first()
second = l.last()
Pierre-Yves David
revpair: smartset compatibility...
r20862
if first is None:
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(_('empty revision range'))
Matt Harbison
scmutil: abort if an empty revision is given to revpair()...
r26836 if (first == second and len(revs) >= 2
and not all(revrange(repo, [r]) for r in revs)):
raise error.Abort(_('empty revision on one side of range'))
Matt Mackall
scmutil: move revsingle/pair/range from cmdutil...
r14319
Yuya Nishihara
revpair: restrict odd-range handling to top-level x:y expression (issue4774)...
r26020 # if top-level is range expression, the result must always be a pair
if first == second and len(revs) == 1 and not _pairspec(revs[0]):
Martin von Zweigbergk
scmutil: make revpair() return context objects (API)...
r37269 return repo[first], repo[None]
Matt Mackall
scmutil: move revsingle/pair/range from cmdutil...
r14319
Martin von Zweigbergk
scmutil: make revpair() return context objects (API)...
r37269 return repo[first], repo[second]
Matt Mackall
scmutil: move revsingle/pair/range from cmdutil...
r14319
Jun Wu
rebase: initial support for multiple destinations...
r34007 def revrange(repo, specs, localalias=None):
Gregory Szorc
scmutil: improve documentation of revset APIs...
r29417 """Execute 1 to many revsets and return the union.
This is the preferred mechanism for executing revsets using user-specified
config options, such as revset aliases.
The revsets specified by ``specs`` will be executed via a chained ``OR``
expression. If ``specs`` is empty, an empty result is returned.
``specs`` can contain integers, in which case they are assumed to be
revision numbers.
It is assumed the revsets are already formatted. If you have arguments
Yuya Nishihara
revset: split language services to revsetlang module (API)...
r31024 that need to be expanded in the revset, call ``revsetlang.formatspec()``
Gregory Szorc
scmutil: improve documentation of revset APIs...
r29417 and pass the result as an element of ``specs``.
Specifying a single revset is allowed.
Returns a ``revset.abstractsmartset`` which is a list-like interface over
integer revisions.
"""
Yuya Nishihara
revrange: evaluate all revset specs at once...
r25928 allspecs = []
Gregory Szorc
scmutil: improve documentation of revset APIs...
r29417 for spec in specs:
Yuya Nishihara
revrange: drop old-style parser in favor of revset (API)...
r25904 if isinstance(spec, int):
Yuya Nishihara
revset: split language services to revsetlang module (API)...
r31024 spec = revsetlang.formatspec('rev(%d)', spec)
Yuya Nishihara
revrange: evaluate all revset specs at once...
r25928 allspecs.append(spec)
Jun Wu
rebase: initial support for multiple destinations...
r34007 return repo.anyrevs(allspecs, user=True, localalias=localalias)
Matt Mackall
scmutil: fold in wdutil
r14320
Yuya Nishihara
changeset_printer: move _meaningful_parentrevs() to scmutil...
r26433 def meaningfulparents(repo, ctx):
"""Return list of meaningful (or all if debug) parentrevs for rev.
For merges (two non-nullrev revisions) both parents are meaningful.
Otherwise the first parent revision is considered meaningful if it
is not the preceding revision.
"""
parents = ctx.parents()
if len(parents) > 1:
return parents
if repo.ui.debugflag:
return [parents[0], repo['null']]
Yuya Nishihara
scmutil: pass ctx object to intrev()...
r32654 if parents[0].rev() >= intrev(ctx) - 1:
Yuya Nishihara
changeset_printer: move _meaningful_parentrevs() to scmutil...
r26433 return []
return parents
Matt Mackall
scmutil: fold in wdutil
r14320 def expandpats(pats):
Mads Kiilerich
match: improve documentation - docstrings and more descriptive variable naming...
r21111 '''Expand bare globs when running on windows.
On posix we assume it already has already been done by sh.'''
Matt Mackall
scmutil: fold in wdutil
r14320 if not util.expandglobs:
return list(pats)
ret = []
Mads Kiilerich
match: improve documentation - docstrings and more descriptive variable naming...
r21111 for kindpat in pats:
kind, pat = matchmod._patsplit(kindpat, None)
Matt Mackall
scmutil: fold in wdutil
r14320 if kind is None:
try:
Mads Kiilerich
match: improve documentation - docstrings and more descriptive variable naming...
r21111 globbed = glob.glob(pat)
Matt Mackall
scmutil: fold in wdutil
r14320 except re.error:
Mads Kiilerich
match: improve documentation - docstrings and more descriptive variable naming...
r21111 globbed = [pat]
Matt Mackall
scmutil: fold in wdutil
r14320 if globbed:
ret.extend(globbed)
continue
Mads Kiilerich
match: improve documentation - docstrings and more descriptive variable naming...
r21111 ret.append(kindpat)
Matt Mackall
scmutil: fold in wdutil
r14320 return ret
Pierre-Yves David
match: remove a mutable default argument...
r26326 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
Matt Harbison
scmutil: add an optional parameter to matcher factories for a bad() override...
r25467 badfn=None):
Mads Kiilerich
match: improve documentation - docstrings and more descriptive variable naming...
r21111 '''Return a matcher and the patterns that were used.
Matt Harbison
scmutil: add an optional parameter to matcher factories for a bad() override...
r25467 The matcher will warn about bad matches, unless an alternate badfn callback
is provided.'''
Matt Mackall
scmutil: fold in wdutil
r14320 if pats == ("",):
pats = []
Pierre-Yves David
match: remove a mutable default argument...
r26326 if opts is None:
opts = {}
Matt Mackall
scmutil: fold in wdutil
r14320 if not globbed and default == 'relpath':
pats = expandpats(pats or [])
Matt Mackall
scmutil: match now accepts a context or a repo
r14670
Matt Harbison
scmutil: add an optional parameter to matcher factories for a bad() override...
r25467 def bad(f, msg):
Matt Harbison
scmutil: replace 'ctx._repo' with 'ctx.repo()'
r24338 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
Matt Harbison
scmutil: use the optional badfn argument when building a matcher
r25466
Matt Harbison
scmutil: add an optional parameter to matcher factories for a bad() override...
r25467 if badfn is None:
badfn = bad
Matt Harbison
scmutil: use the optional badfn argument when building a matcher
r25466 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
default, listsubrepos=opts.get('subrepos'), badfn=badfn)
Martin von Zweigbergk
matcher: make e.g. 'relpath:.' lead to fast paths...
r24447 if m.always():
pats = []
Patrick Mezard
graphlog: restore FILE glob expansion on Windows...
r16171 return m, pats
Pierre-Yves David
match: remove a mutable default argument...
r26328 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
badfn=None):
Mads Kiilerich
match: improve documentation - docstrings and more descriptive variable naming...
r21111 '''Return a matcher that will warn about bad matches.'''
Matt Harbison
scmutil: add an optional parameter to matcher factories for a bad() override...
r25467 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
Matt Mackall
scmutil: fold in wdutil
r14320
def matchall(repo):
Mads Kiilerich
match: improve documentation - docstrings and more descriptive variable naming...
r21111 '''Return a matcher that will efficiently match everything.'''
Matt Mackall
scmutil: fold in wdutil
r14320 return matchmod.always(repo.root, repo.getcwd())
Matt Harbison
scmutil: add an optional parameter to matcher factories for a bad() override...
r25467 def matchfiles(repo, files, badfn=None):
Mads Kiilerich
match: improve documentation - docstrings and more descriptive variable naming...
r21111 '''Return a matcher that will efficiently match exactly these files.'''
Matt Harbison
scmutil: add an optional parameter to matcher factories for a bad() override...
r25467 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
Matt Mackall
scmutil: fold in wdutil
r14320
Denis Laxalde
revset: extract a parsefollowlinespattern helper function...
r34855 def parsefollowlinespattern(repo, rev, pat, msg):
"""Return a file name from `pat` pattern suitable for usage in followlines
logic.
"""
if not matchmod.patkind(pat):
return pathutil.canonpath(repo.root, repo.getcwd(), pat)
else:
ctx = repo[rev]
m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
files = [f for f in ctx if m(f)]
if len(files) != 1:
raise error.ParseError(msg)
return files[0]
Siddharth Agarwal
origpath: move from cmdutil to scmutil...
r27651 def origpath(ui, repo, filepath):
'''customize where .orig files are created
Fetch user defined path from config file: [ui] origbackuppath = <path>
Mark Thomas
scmutil: don't append .orig to backups in origbackuppath (BC)...
r34145 Fall back to default (filepath with .orig suffix) if not specified
Siddharth Agarwal
origpath: move from cmdutil to scmutil...
r27651 '''
Jun Wu
codemod: register core configitems using a script...
r33499 origbackuppath = ui.config('ui', 'origbackuppath')
Mark Thomas
scmutil: handle conflicting files and dirs in origbackuppath...
r34544 if not origbackuppath:
Siddharth Agarwal
origpath: move from cmdutil to scmutil...
r27651 return filepath + ".orig"
Mark Thomas
scmutil: handle conflicting files and dirs in origbackuppath...
r34544 # Convert filepath from an absolute path into a path inside the repo.
filepathfromroot = util.normpath(os.path.relpath(filepath,
start=repo.root))
origvfs = vfs.vfs(repo.wjoin(origbackuppath))
origbackupdir = origvfs.dirname(filepathfromroot)
if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
Siddharth Agarwal
origpath: move from cmdutil to scmutil...
r27651
Mark Thomas
scmutil: handle conflicting files and dirs in origbackuppath...
r34544 # Remove any files that conflict with the backup file's path
for f in reversed(list(util.finddirs(filepathfromroot))):
if origvfs.isfileorlink(f):
ui.note(_('removing conflicting file: %s\n')
% origvfs.join(f))
origvfs.unlink(f)
break
Siddharth Agarwal
origpath: move from cmdutil to scmutil...
r27651
Mark Thomas
scmutil: handle conflicting files and dirs in origbackuppath...
r34544 origvfs.makedirs(origbackupdir)
Mark Thomas
scmutil: don't try to delete origbackup symlinks to directories (issue5731)...
r35008 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
Mark Thomas
scmutil: handle conflicting files and dirs in origbackuppath...
r34544 ui.note(_('removing conflicting directory: %s\n')
% origvfs.join(filepathfromroot))
origvfs.rmtree(filepathfromroot, forcibly=True)
return origvfs.join(filepathfromroot)
Siddharth Agarwal
origpath: move from cmdutil to scmutil...
r27651
Jun Wu
scmutil: make cleanupnodes delete divergent bookmarks...
r33331 class _containsnode(object):
"""proxy __contains__(node) to container.__contains__ which accepts revs"""
def __init__(self, repo, revcontainer):
self._torev = repo.changelog.rev
self._revcontains = revcontainer.__contains__
def __contains__(self, node):
return self._revcontains(self._torev(node))
Martin von Zweigbergk
scmutil: make cleanupnodes optionally also fix the phase...
r38442 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
Sushil khanchi
rebase: support "history-editing-backup" config option...
r38835 fixphase=False, targetphase=None, backup=True):
Jun Wu
scmutil: add a cleanupnodes method for developers...
r33088 """do common cleanups when old nodes are replaced by new nodes
That includes writing obsmarkers or stripping nodes, and moving bookmarks.
(we might also want to move working directory parent in the future)
Jun Wu
rebase: move bookmarks with --keep (issue5682)...
r34364 By default, bookmark moves are calculated automatically from 'replacements',
but 'moves' can be used to override that. Also, 'moves' may include
additional bookmark moves that should not have associated obsmarkers.
Martin von Zweigbergk
cleanupnodes: rename "mapping" to "replacements"...
r34363 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
have replacements. operation is a string, like "rebase".
Pulkit Goyal
scmutil: add capability to cleanupnodes to take obsmarker metadata...
r34794
metadata is dictionary containing metadata to be stored in obsmarker if
obsolescence is enabled.
Jun Wu
scmutil: add a cleanupnodes method for developers...
r33088 """
Martin von Zweigbergk
scmutil: make cleanupnodes optionally also fix the phase...
r38442 assert fixphase or targetphase is None
Jun Wu
rebase: move bookmarks with --keep (issue5682)...
r34364 if not replacements and not moves:
return
# translate mapping's other forms
Martin von Zweigbergk
cleanupnodes: rename "mapping" to "replacements"...
r34363 if not util.safehasattr(replacements, 'items'):
replacements = {n: () for n in replacements}
Jun Wu
scmutil: add a cleanupnodes method for developers...
r33088
Martin von Zweigbergk
cleanupnodes: separate out bookmark destination calculation from actual update...
r34362 # Calculate bookmark movements
Jun Wu
rebase: move bookmarks with --keep (issue5682)...
r34364 if moves is None:
moves = {}
Martin von Zweigbergk
cleanupnodes: rename "mapping" to "replacements"...
r34363 # Unfiltered repo is needed since nodes in replacements might be hidden.
Martin von Zweigbergk
cleanupnodes: separate out bookmark destination calculation from actual update...
r34362 unfi = repo.unfiltered()
Martin von Zweigbergk
cleanupnodes: rename "mapping" to "replacements"...
r34363 for oldnode, newnodes in replacements.items():
Jun Wu
rebase: move bookmarks with --keep (issue5682)...
r34364 if oldnode in moves:
continue
Martin von Zweigbergk
cleanupnodes: separate out bookmark destination calculation from actual update...
r34362 if len(newnodes) > 1:
# usually a split, take the one with biggest rev number
newnode = next(unfi.set('max(%ln)', newnodes)).node()
elif len(newnodes) == 0:
# move bookmark backwards
roots = list(unfi.set('max((::%n) - %ln)', oldnode,
Martin von Zweigbergk
cleanupnodes: rename "mapping" to "replacements"...
r34363 list(replacements)))
Martin von Zweigbergk
cleanupnodes: separate out bookmark destination calculation from actual update...
r34362 if roots:
newnode = roots[0].node()
else:
newnode = nullid
else:
newnode = newnodes[0]
moves[oldnode] = newnode
Martin von Zweigbergk
scmutil: make cleanupnodes optionally also fix the phase...
r38442 allnewnodes = [n for ns in replacements.values() for n in ns]
toretract = {}
toadvance = {}
if fixphase:
precursors = {}
for oldnode, newnodes in replacements.items():
for newnode in newnodes:
precursors.setdefault(newnode, []).append(oldnode)
allnewnodes.sort(key=lambda n: unfi[n].rev())
newphases = {}
def phase(ctx):
return newphases.get(ctx.node(), ctx.phase())
for newnode in allnewnodes:
ctx = unfi[newnode]
Martin von Zweigbergk
cleanupnodes: preserve phase of parents of new nodes...
r38451 parentphase = max(phase(p) for p in ctx.parents())
Martin von Zweigbergk
scmutil: make cleanupnodes optionally also fix the phase...
r38442 if targetphase is None:
oldphase = max(unfi[oldnode].phase()
for oldnode in precursors[newnode])
newphase = max(oldphase, parentphase)
else:
Martin von Zweigbergk
cleanupnodes: preserve phase of parents of new nodes...
r38451 newphase = max(targetphase, parentphase)
Martin von Zweigbergk
scmutil: make cleanupnodes optionally also fix the phase...
r38442 newphases[newnode] = newphase
if newphase > ctx.phase():
toretract.setdefault(newphase, []).append(newnode)
elif newphase < ctx.phase():
toadvance.setdefault(newphase, []).append(newnode)
Jun Wu
scmutil: add a cleanupnodes method for developers...
r33088 with repo.transaction('cleanup') as tr:
# Move bookmarks
bmarks = repo._bookmarks
Boris Feld
bookmark: use 'divergent2delete' in 'scmutil.cleanupnode'
r33511 bmarkchanges = []
Martin von Zweigbergk
cleanupnodes: separate out bookmark destination calculation from actual update...
r34362 for oldnode, newnode in moves.items():
Jun Wu
scmutil: add a cleanupnodes method for developers...
r33088 oldbmarks = repo.nodebookmarks(oldnode)
if not oldbmarks:
continue
Jun Wu
scmutil: make cleanupnodes delete divergent bookmarks...
r33331 from . import bookmarks # avoid import cycle
Jun Wu
scmutil: add a cleanupnodes method for developers...
r33088 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
Yuya Nishihara
pycompat: move rapply() from util...
r38594 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
Yuya Nishihara
py3: drop b'' from debug message "moving bookmarks"
r36854 hex(oldnode), hex(newnode)))
Jun Wu
scmutil: make cleanupnodes delete divergent bookmarks...
r33331 # Delete divergent bookmarks being parents of related newnodes
deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
allnewnodes, newnode, oldnode)
deletenodes = _containsnode(repo, deleterevs)
Jun Wu
scmutil: add a cleanupnodes method for developers...
r33088 for name in oldbmarks:
Boris Feld
bookmark: use 'divergent2delete' in 'scmutil.cleanupnode'
r33511 bmarkchanges.append((name, newnode))
for b in bookmarks.divergent2delete(repo, deletenodes, name):
bmarkchanges.append((b, None))
if bmarkchanges:
bmarks.applychanges(repo, tr, bmarkchanges)
Jun Wu
scmutil: add a cleanupnodes method for developers...
r33088
Martin von Zweigbergk
scmutil: make cleanupnodes optionally also fix the phase...
r38442 for phase, nodes in toretract.items():
phases.retractboundary(repo, tr, phase, nodes)
for phase, nodes in toadvance.items():
phases.advanceboundary(repo, tr, phase, nodes)
Jun Wu
scmutil: add a cleanupnodes method for developers...
r33088 # Obsolete or strip nodes
if obsolete.isenabled(repo, obsolete.createmarkersopt):
# If a node is already obsoleted, and we want to obsolete it
# without a successor, skip that obssolete request since it's
# unnecessary. That's the "if s or not isobs(n)" check below.
# Also sort the node in topology order, that might be useful for
# some obsstore logic.
# NOTE: the filtering and sorting might belong to createmarkers.
Jun Wu
scmutil: make cleanupnodes handle filtered node...
r33330 isobs = unfi.obsstore.successors.__contains__
torev = unfi.changelog.rev
sortfunc = lambda ns: torev(ns[0])
Octobus
cleanupnode: do not use generator for node mapping...
r33352 rels = [(unfi[n], tuple(unfi[m] for m in s))
Martin von Zweigbergk
cleanupnodes: rename "mapping" to "replacements"...
r34363 for n, s in sorted(replacements.items(), key=sortfunc)
Jun Wu
scmutil: add a cleanupnodes method for developers...
r33088 if s or not isobs(n)]
Jun Wu
rebase: move bookmarks with --keep (issue5682)...
r34364 if rels:
Pulkit Goyal
scmutil: add capability to cleanupnodes to take obsmarker metadata...
r34794 obsolete.createmarkers(repo, rels, operation=operation,
metadata=metadata)
Jun Wu
scmutil: add a cleanupnodes method for developers...
r33088 else:
from . import repair # avoid import cycle
Jun Wu
rebase: move bookmarks with --keep (issue5682)...
r34364 tostrip = list(replacements)
if tostrip:
Sushil khanchi
rebase: support "history-editing-backup" config option...
r38835 repair.delayedstrip(repo.ui, repo, tostrip, operation,
backup=backup)
Jun Wu
scmutil: add a cleanupnodes method for developers...
r33088
Sushil khanchi
addremove: remove dry_run, similarity from scmutil.addremove (API)...
r37286 def addremove(repo, matcher, prefix, opts=None):
Pierre-Yves David
addremove: remove a mutable default argument...
r26329 if opts is None:
opts = {}
Matt Harbison
scmutil: pass a matcher to scmutil.addremove() instead of a list of patterns...
r23533 m = matcher
Sushil khanchi
addremove: remove dry_run, similarity from scmutil.addremove (API)...
r37286 dry_run = opts.get('dry_run')
Yuya Nishihara
addremove: pass command-level similarity value down to scmutil.addremove()...
r37322 try:
similarity = float(opts.get('similarity') or 0)
except ValueError:
raise error.Abort(_('similarity must be a number'))
if similarity < 0 or similarity > 100:
raise error.Abort(_('similarity must be between 0 and 100'))
similarity /= 100.0
Matt Harbison
scmutil: pass a matcher to scmutil.addremove() instead of a list of patterns...
r23533
Matt Harbison
commit: propagate --addremove to subrepos if -S is specified (issue3759)...
r23537 ret = 0
join = lambda f: os.path.join(prefix, f)
wctx = repo[None]
for subpath in sorted(wctx.substate):
Hannes Oldenburg
subrepo: cleanup of subrepo filematcher logic...
r29802 submatch = matchmod.subdirmatcher(subpath, m)
if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
Matt Harbison
commit: propagate --addremove to subrepos if -S is specified (issue3759)...
r23537 sub = wctx.sub(subpath)
try:
Sushil khanchi
addremove: remove dry_run, similarity from scmutil.addremove (API)...
r37286 if sub.addremove(submatch, prefix, opts):
Matt Harbison
commit: propagate --addremove to subrepos if -S is specified (issue3759)...
r23537 ret = 1
except error.LookupError:
repo.ui.status(_("skipping missing subrepository: %s\n")
% join(subpath))
Matt Mackall
addremove: return 1 if we failed to handle any explicit files
r16167 rejected = []
Matt Harbison
addremove: warn when addremove fails to operate on a named path...
r23534 def badfn(f, msg):
if f in m.files():
Matt Harbison
addremove: replace match.bad() monkey patching with match.badmatch()...
r25434 m.bad(f, msg)
Matt Harbison
addremove: warn when addremove fails to operate on a named path...
r23534 rejected.append(f)
Matt Mackall
addremove: return 1 if we failed to handle any explicit files
r16167
Matt Harbison
addremove: replace match.bad() monkey patching with match.badmatch()...
r25434 badmatch = matchmod.badmatch(m, badfn)
added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
badmatch)
Siddharth Agarwal
scmutil.addremove: pull ui.status printing out of the loop...
r18863
Martin von Zweigbergk
addremove: add back forgotten files (BC)...
r23259 unknownset = set(unknown + forgotten)
Siddharth Agarwal
scmutil.addremove: pull ui.status printing out of the loop...
r18863 toprint = unknownset.copy()
toprint.update(deleted)
for abs in sorted(toprint):
if repo.ui.verbose or not m.exact(abs):
if abs in unknownset:
Matt Harbison
narrowmatcher: propagate the rel() method...
r23686 status = _('adding %s\n') % m.uipath(abs)
Siddharth Agarwal
scmutil.addremove: pull ui.status printing out of the loop...
r18863 else:
Matt Harbison
narrowmatcher: propagate the rel() method...
r23686 status = _('removing %s\n') % m.uipath(abs)
Siddharth Agarwal
scmutil.addremove: pull ui.status printing out of the loop...
r18863 repo.ui.status(status)
Siddharth Agarwal
scmutil.addremove: factor out code to find renames...
r19152 renames = _findrenames(repo, m, added + unknown, removed + deleted,
similarity)
Matt Mackall
scmutil: fold in wdutil
r14320
if not dry_run:
Martin von Zweigbergk
addremove: add back forgotten files (BC)...
r23259 _markchanges(repo, unknown + forgotten, deleted, renames)
Matt Mackall
scmutil: fold in wdutil
r14320
Matt Mackall
addremove: return 1 if we failed to handle any explicit files
r16167 for f in rejected:
if f in m.files():
return 1
Matt Harbison
commit: propagate --addremove to subrepos if -S is specified (issue3759)...
r23537 return ret
Matt Mackall
addremove: return 1 if we failed to handle any explicit files
r16167
Siddharth Agarwal
scmutil: add a function to mark that files have been operated on...
r19154 def marktouched(repo, files, similarity=0.0):
'''Assert that files have somehow been operated upon. files are relative to
the repo root.'''
Matt Harbison
scmutil: add an optional parameter to matcher factories for a bad() override...
r25467 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
Siddharth Agarwal
scmutil: add a function to mark that files have been operated on...
r19154 rejected = []
Martin von Zweigbergk
addremove: add back forgotten files (BC)...
r23259 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
Siddharth Agarwal
scmutil: add a function to mark that files have been operated on...
r19154
if repo.ui.verbose:
Martin von Zweigbergk
addremove: add back forgotten files (BC)...
r23259 unknownset = set(unknown + forgotten)
Siddharth Agarwal
scmutil: add a function to mark that files have been operated on...
r19154 toprint = unknownset.copy()
toprint.update(deleted)
for abs in sorted(toprint):
if abs in unknownset:
status = _('adding %s\n') % abs
else:
status = _('removing %s\n') % abs
repo.ui.status(status)
renames = _findrenames(repo, m, added + unknown, removed + deleted,
similarity)
Martin von Zweigbergk
addremove: add back forgotten files (BC)...
r23259 _markchanges(repo, unknown + forgotten, deleted, renames)
Siddharth Agarwal
scmutil: add a function to mark that files have been operated on...
r19154
for f in rejected:
if f in m.files():
return 1
return 0
Siddharth Agarwal
scmutil.addremove: factor out dirstate walk into another function...
r19150 def _interestingfiles(repo, matcher):
'''Walk dirstate with matcher, looking for files that addremove would care
about.
This is different from dirstate.status because it doesn't care about
whether files are modified or clean.'''
Martin von Zweigbergk
addremove: add back forgotten files (BC)...
r23259 added, unknown, deleted, removed, forgotten = [], [], [], [], []
Yuya Nishihara
pathauditor: disable cache of audited paths by default (issue5628)...
r33722 audit_path = pathutil.pathauditor(repo.root, cached=True)
Siddharth Agarwal
scmutil.addremove: factor out dirstate walk into another function...
r19150
ctx = repo[None]
dirstate = repo.dirstate
Martin von Zweigbergk
dirstate: use keyword arguments to clarify walk()'s callers...
r34344 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
unknown=True, ignored=False, full=False)
Siddharth Agarwal
scmutil.addremove: factor out dirstate walk into another function...
r19150 for abs, st in walkresults.iteritems():
dstate = dirstate[abs]
if dstate == '?' and audit_path.check(abs):
unknown.append(abs)
elif dstate != 'r' and not st:
deleted.append(abs)
Martin von Zweigbergk
addremove: add back forgotten files (BC)...
r23259 elif dstate == 'r' and st:
forgotten.append(abs)
Siddharth Agarwal
scmutil.addremove: factor out dirstate walk into another function...
r19150 # for finding renames
Martin von Zweigbergk
addremove: add back forgotten files (BC)...
r23259 elif dstate == 'r' and not st:
Siddharth Agarwal
scmutil.addremove: factor out dirstate walk into another function...
r19150 removed.append(abs)
elif dstate == 'a':
added.append(abs)
Martin von Zweigbergk
addremove: add back forgotten files (BC)...
r23259 return added, unknown, deleted, removed, forgotten
Siddharth Agarwal
scmutil.addremove: factor out dirstate walk into another function...
r19150
Siddharth Agarwal
scmutil.addremove: factor out code to find renames...
r19152 def _findrenames(repo, matcher, added, removed, similarity):
'''Find renames from removed files to added ones.'''
renames = {}
if similarity > 0:
for old, new, score in similar.findrenames(repo, added, removed,
similarity):
if (repo.ui.verbose or not matcher.exact(old)
or not matcher.exact(new)):
repo.ui.status(_('recording removal of %s as rename to %s '
'(%d%% similar)\n') %
(matcher.rel(old), matcher.rel(new),
score * 100))
renames[new] = old
return renames
Siddharth Agarwal
scmutil.addremove: factor out code to mark added/removed/renames...
r19153 def _markchanges(repo, unknown, deleted, renames):
'''Marks the files in unknown as added, the files in deleted as removed,
and the files in renames as copied.'''
wctx = repo[None]
Bryan O'Sullivan
with: use context manager in _markchanges
r27851 with repo.wlock():
Siddharth Agarwal
scmutil.addremove: factor out code to mark added/removed/renames...
r19153 wctx.forget(deleted)
wctx.add(unknown)
for new, old in renames.iteritems():
wctx.copy(old, new)
Matt Mackall
scmutil: fold in wdutil
r14320 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
"""Update the dirstate to reflect the intent of copying src to dst. For
different reasons it might not end with dst being marked as copied from src.
"""
origsrc = repo.dirstate.copied(src) or src
if dst == origsrc: # copying back a copy?
if repo.dirstate[dst] not in 'mn' and not dryrun:
repo.dirstate.normallookup(dst)
else:
if repo.dirstate[origsrc] == 'a' and origsrc == src:
if not ui.quiet:
ui.warn(_("%s has not been committed yet, so no copy "
"data will be stored for %s.\n")
% (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
if repo.dirstate[dst] in '?r' and not dryrun:
wctx.add([dst])
elif not dryrun:
wctx.copy(origsrc, dst)
Adrian Buehlmann
introduce new function scmutil.readrequires...
r14482
def readrequires(opener, supported):
'''Reads and parses .hg/requires and checks if all entries found
are in the list of supported features.'''
requirements = set(opener.read("requires").splitlines())
Pierre-Yves David
requirements: show all missing features in the error message....
r14746 missings = []
Adrian Buehlmann
introduce new function scmutil.readrequires...
r14482 for r in requirements:
if r not in supported:
Augie Fackler
scmutil: fix requires-file isalnum() check on first byte...
r36331 if not r or not r[0:1].isalnum():
Matt Mackall
requires: note apparent corruption
r14484 raise error.RequirementError(_(".hg/requires file is corrupt"))
Pierre-Yves David
requirements: show all missing features in the error message....
r14746 missings.append(r)
missings.sort()
if missings:
Brodie Rao
cleanup: eradicate long lines
r16683 raise error.RequirementError(
Mads Kiilerich
repo: rephrase the "missing requirement" error message...
r20820 _("repository requires features unknown to this Mercurial: %s")
% " ".join(missings),
Matt Mackall
urls: bulk-change primary website URLs
r26421 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
Mads Kiilerich
repo: rephrase the "missing requirement" error message...
r20820 " for more information"))
Adrian Buehlmann
introduce new function scmutil.readrequires...
r14482 return requirements
Idan Kamara
scmutil: introduce filecache...
r14928
Drew Gottlieb
requires: move requires file writing func from localrepo to scmutil...
r24934 def writerequires(opener, requirements):
Gregory Szorc
scmutil: use context managers for file handles...
r27706 with opener('requires', 'w') as fp:
for r in sorted(requirements):
fp.write("%s\n" % r)
Drew Gottlieb
requires: move requires file writing func from localrepo to scmutil...
r24934
Siddharth Agarwal
scmutil: rename filecacheentry to filecachesubentry...
r20043 class filecachesubentry(object):
Siddharth Agarwal
scmutil.filecacheentry: make stat argument to constructor mandatory...
r20042 def __init__(self, path, stat):
Idan Kamara
scmutil: introduce filecache...
r14928 self.path = path
Idan Kamara
filecache: allow filecacheentry to be created without stating in __init__...
r18315 self.cachestat = None
self._cacheable = None
Idan Kamara
scmutil: introduce filecache...
r14928
Idan Kamara
filecache: allow filecacheentry to be created without stating in __init__...
r18315 if stat:
Siddharth Agarwal
scmutil: rename filecacheentry to filecachesubentry...
r20043 self.cachestat = filecachesubentry.stat(self.path)
Idan Kamara
filecache: allow filecacheentry to be created without stating in __init__...
r18315
if self.cachestat:
self._cacheable = self.cachestat.cacheable()
else:
# None means we don't know yet
self._cacheable = None
Idan Kamara
scmutil: introduce filecache...
r14928
def refresh(self):
if self.cacheable():
Siddharth Agarwal
scmutil: rename filecacheentry to filecachesubentry...
r20043 self.cachestat = filecachesubentry.stat(self.path)
Idan Kamara
scmutil: introduce filecache...
r14928
def cacheable(self):
if self._cacheable is not None:
return self._cacheable
# we don't know yet, assume it is for now
return True
def changed(self):
# no point in going further if we can't cache it
if not self.cacheable():
return True
Siddharth Agarwal
scmutil: rename filecacheentry to filecachesubentry...
r20043 newstat = filecachesubentry.stat(self.path)
Idan Kamara
scmutil: introduce filecache...
r14928
# we may not know if it's cacheable yet, check again now
if newstat and self._cacheable is None:
self._cacheable = newstat.cacheable()
# check again
if not self._cacheable:
return True
if self.cachestat != newstat:
self.cachestat = newstat
return True
else:
return False
@staticmethod
def stat(path):
try:
return util.cachestat(path)
Gregory Szorc
global: mass rewrite to use modern exception syntax...
r25660 except OSError as e:
Idan Kamara
scmutil: introduce filecache...
r14928 if e.errno != errno.ENOENT:
raise
Siddharth Agarwal
scmutil: introduce a filecacheentry that can watch multiple paths
r20044 class filecacheentry(object):
def __init__(self, paths, stat=True):
self._entries = []
for path in paths:
self._entries.append(filecachesubentry(path, stat))
def changed(self):
'''true if any entry has changed'''
for entry in self._entries:
if entry.changed():
return True
return False
def refresh(self):
for entry in self._entries:
entry.refresh()
Idan Kamara
scmutil: introduce filecache...
r14928 class filecache(object):
Gregory Szorc
scmutil: rewrite docstring for filecache...
r38698 """A property like decorator that tracks files under .hg/ for updates.
Idan Kamara
scmutil: introduce filecache...
r14928
Gregory Szorc
scmutil: rewrite docstring for filecache...
r38698 On first access, the files defined as arguments are stat()ed and the
results cached. The decorated function is called. The results are stashed
away in a ``_filecache`` dict on the object whose method is decorated.
Idan Kamara
scmutil: introduce filecache...
r14928
Gregory Szorc
scmutil: rewrite docstring for filecache...
r38698 On subsequent access, the cached result is returned.
On external property set operations, stat() calls are performed and the new
value is cached.
On property delete operations, cached data is removed.
Idan Kamara
scmutil: introduce filecache...
r14928
Gregory Szorc
scmutil: rewrite docstring for filecache...
r38698 When using the property API, cached data is always returned, if available:
no stat() is performed to check if the file has changed and if the function
needs to be called to reflect file changes.
Siddharth Agarwal
scmutil.filecache: support watching over multiple files
r20045
Gregory Szorc
scmutil: rewrite docstring for filecache...
r38698 Others can muck about with the state of the ``_filecache`` dict. e.g. they
can populate an entry before the property's getter is called. In this case,
entries in ``_filecache`` will be used during property operations,
if available. If the underlying file changes, it is up to external callers
to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
method result as well as possibly calling ``del obj._filecache[attr]`` to
remove the ``filecacheentry``.
"""
Siddharth Agarwal
scmutil.filecache: support watching over multiple files
r20045 def __init__(self, *paths):
self.paths = paths
Idan Kamara
filecache: refactor path join logic to a function...
r16198
def join(self, obj, fname):
Siddharth Agarwal
scmutil.filecache: support watching over multiple files
r20045 """Used to compute the runtime path of a cached file.
Idan Kamara
filecache: refactor path join logic to a function...
r16198
Users should subclass filecache and provide their own version of this
function to call the appropriate join function on 'obj' (an instance
of the class that its member function was decorated).
"""
Pierre-Yves David
filecache: make 'join' abstract...
r31285 raise NotImplementedError
Idan Kamara
scmutil: introduce filecache...
r14928
def __call__(self, func):
self.func = func
Augie Fackler
scmutil: clean up bytes/string cache decorator mess on Python 3 again...
r37886 self.sname = func.__name__
self.name = pycompat.sysbytes(self.sname)
Idan Kamara
scmutil: introduce filecache...
r14928 return self
def __get__(self, obj, type=None):
Martijn Pieters
scmutil: allow access to filecache descriptor on class...
r29373 # if accessed on the class, return the descriptor itself.
if obj is None:
return self
Idan Kamara
scmutil: update cached copy when filecached attribute is assigned (issue3263)...
r16115 # do we need to check if the file changed?
Augie Fackler
scmutil: clean up bytes/string cache decorator mess on Python 3 again...
r37886 if self.sname in obj.__dict__:
Idan Kamara
filecache: create an entry in _filecache when __set__ is called for a missing one...
r18316 assert self.name in obj._filecache, self.name
Augie Fackler
scmutil: clean up bytes/string cache decorator mess on Python 3 again...
r37886 return obj.__dict__[self.sname]
Idan Kamara
scmutil: update cached copy when filecached attribute is assigned (issue3263)...
r16115
Idan Kamara
scmutil: introduce filecache...
r14928 entry = obj._filecache.get(self.name)
if entry:
if entry.changed():
entry.obj = self.func(obj)
else:
Siddharth Agarwal
scmutil.filecache: support watching over multiple files
r20045 paths = [self.join(obj, path) for path in self.paths]
Idan Kamara
scmutil: introduce filecache...
r14928
# We stat -before- creating the object so our cache doesn't lie if
# a writer modified between the time we read and stat
Siddharth Agarwal
scmutil.filecache: support watching over multiple files
r20045 entry = filecacheentry(paths, True)
Idan Kamara
scmutil: introduce filecache...
r14928 entry.obj = self.func(obj)
obj._filecache[self.name] = entry
Augie Fackler
scmutil: clean up bytes/string cache decorator mess on Python 3 again...
r37886 obj.__dict__[self.sname] = entry.obj
Idan Kamara
scmutil: introduce filecache...
r14928 return entry.obj
Idan Kamara
scmutil: update cached copy when filecached attribute is assigned (issue3263)...
r16115
def __set__(self, obj, value):
Idan Kamara
filecache: create an entry in _filecache when __set__ is called for a missing one...
r18316 if self.name not in obj._filecache:
# we add an entry for the missing value because X in __dict__
# implies X in _filecache
Siddharth Agarwal
scmutil.filecache: support watching over multiple files
r20045 paths = [self.join(obj, path) for path in self.paths]
ce = filecacheentry(paths, False)
Idan Kamara
filecache: create an entry in _filecache when __set__ is called for a missing one...
r18316 obj._filecache[self.name] = ce
else:
ce = obj._filecache[self.name]
ce.obj = value # update cached copy
Augie Fackler
scmutil: clean up bytes/string cache decorator mess on Python 3 again...
r37886 obj.__dict__[self.sname] = value # update copy returned by obj.x
Idan Kamara
scmutil: update cached copy when filecached attribute is assigned (issue3263)...
r16115
def __delete__(self, obj):
try:
Augie Fackler
scmutil: clean up bytes/string cache decorator mess on Python 3 again...
r37886 del obj.__dict__[self.sname]
Idan Kamara
scmutil: update cached copy when filecached attribute is assigned (issue3263)...
r16115 except KeyError:
Augie Fackler
scmutil: clean up bytes/string cache decorator mess on Python 3 again...
r37886 raise AttributeError(self.sname)
Siddharth Agarwal
scmutil: add a way for a subprocess to be run with an inheritable lock...
r26490
Matt Mackall
extdata: add extdatasource reader...
r34457 def extdatasource(repo, source):
"""Gather a map of rev -> value dict from the specified source
A source spec is treated as a URL, with a special case shell: type
for parsing the output from a shell command.
The data is parsed as a series of newline-separated records where
each record is a revision specifier optionally followed by a space
and a freeform string value. If the revision is known locally, it
is converted to a rev, otherwise the record is skipped.
Note that both key and value are treated as UTF-8 and converted to
the local encoding. This allows uniformity between local and
remote data sources.
"""
spec = repo.ui.config("extdata", source)
if not spec:
raise error.Abort(_("unknown extdata source '%s'") % source)
data = {}
Yuya Nishihara
extdata: use subprocess so we don't have to chdir() manually
r34462 src = proc = None
Matt Mackall
extdata: add extdatasource reader...
r34457 try:
Yuya Nishihara
extdata: use subprocess so we don't have to chdir() manually
r34462 if spec.startswith("shell:"):
# external commands should be run relative to the repo root
cmd = spec[6:]
proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
Yuya Nishihara
procutil: bulk-replace function calls to point to new module
r37138 close_fds=procutil.closefds,
Yuya Nishihara
extdata: use subprocess so we don't have to chdir() manually
r34462 stdout=subprocess.PIPE, cwd=repo.root)
src = proc.stdout
else:
# treat as a URL or file
src = url.open(repo.ui, spec)
Yuya Nishihara
extdata: just use iterator to read lines one by one
r34461 for l in src:
Matt Mackall
extdata: add extdatasource reader...
r34457 if " " in l:
k, v = l.strip().split(" ", 1)
else:
k, v = l.strip(), ""
k = encoding.tolocal(k)
Yuya Nishihara
extdata: ignore ambiguous identifier as well
r34460 try:
Martin von Zweigbergk
extdatasource: use revsymbol() for converting to node...
r37378 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
Yuya Nishihara
extdata: ignore ambiguous identifier as well
r34460 except (error.LookupError, error.RepoLookupError):
pass # we ignore data for nodes that don't exist locally
Matt Mackall
extdata: add extdatasource reader...
r34457 finally:
Yuya Nishihara
extdata: use subprocess so we don't have to chdir() manually
r34462 if proc:
proc.communicate()
if src:
src.close()
Yuya Nishihara
extdata: abort if external command exits with non-zero status (BC)...
r35413 if proc and proc.returncode != 0:
raise error.Abort(_("extdata command '%s' failed: %s")
Yuya Nishihara
procutil: make explainexit() simply return a message (API)...
r37481 % (cmd, procutil.explainexit(proc.returncode)))
Matt Mackall
extdata: add extdatasource reader...
r34457
return data
Siddharth Agarwal
scmutil: add a way for a subprocess to be run with an inheritable lock...
r26490 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
if lock is None:
raise error.LockInheritanceContractViolation(
'lock can only be inherited while held')
if environ is None:
environ = {}
with lock.inherit() as locker:
environ[envvar] = locker
return repo.ui.system(cmd, environ=environ, *args, **kwargs)
Siddharth Agarwal
scmutil: add a way for a repo's wlock to be inherited by a subprocess...
r26491
def wlocksub(repo, cmd, *args, **kwargs):
"""run cmd as a subprocess that allows inheriting repo's wlock
This can only be called while the wlock is held. This takes all the
arguments that ui.system does, and returns the exit code of the
subprocess."""
return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
**kwargs)
Pierre-Yves David
scmutil: extract general delta config handling in a function...
r26906
Martin von Zweigbergk
progress: create helper class for incrementing progress...
r38364 class progress(object):
def __init__(self, ui, topic, unit="", total=None):
self.ui = ui
self.pos = 0
self.topic = topic
self.unit = unit
self.total = total
Martin von Zweigbergk
progress: make the progress helper a context manager...
r38393 def __enter__(self):
Danny Hooper
scmutil: fix __enter__ in progress context manager...
r38522 return self
Martin von Zweigbergk
progress: make the progress helper a context manager...
r38393
def __exit__(self, exc_type, exc_value, exc_tb):
self.complete()
Martin von Zweigbergk
progress: create helper class for incrementing progress...
r38364 def update(self, pos, item="", total=None):
Martin von Zweigbergk
progress: enforce use of complete() on the helper class...
r38438 assert pos is not None
Martin von Zweigbergk
progress: create helper class for incrementing progress...
r38364 if total:
self.total = total
self.pos = pos
self._print(item)
def increment(self, step=1, item="", total=None):
self.update(self.pos + step, item, total)
Martin von Zweigbergk
progress: hide update(None) in a new complete() method...
r38392 def complete(self):
Martin von Zweigbergk
progress: enforce use of complete() on the helper class...
r38438 self.ui.progress(self.topic, None)
Martin von Zweigbergk
progress: hide update(None) in a new complete() method...
r38392
Martin von Zweigbergk
progress: create helper class for incrementing progress...
r38364 def _print(self, item):
self.ui.progress(self.topic, self.pos, item, self.unit,
self.total)
Pierre-Yves David
scmutil: extract general delta config handling in a function...
r26906 def gdinitconfig(ui):
"""helper function to know if a repo should be created as general delta
Pierre-Yves David
format: introduce 'format.usegeneraldelta`...
r26907 """
# experimental config: format.generaldelta
configitems: register the 'format.generaldelta' config
r33235 return (ui.configbool('format', 'generaldelta')
Boris Feld
sparse-revlog: also use sparse-revlog config as a general delta trigger...
r38782 or ui.configbool('format', 'usegeneraldelta')
or ui.configbool('format', 'sparse-revlog'))
Pierre-Yves David
scmutil: extract general delta config handling in a function...
r26906
Pierre-Yves David
format: introduce 'format.usegeneraldelta`...
r26907 def gddeltaconfig(ui):
"""helper function to know if incoming delta should be optimised
"""
Pierre-Yves David
scmutil: extract general delta config handling in a function...
r26906 # experimental config: format.generaldelta
configitems: register the 'format.generaldelta' config
r33235 return ui.configbool('format', 'generaldelta')
Kostia Balytskyi
scmutil: add a simple key-value file helper...
r31553
class simplekeyvaluefile(object):
"""A simple file with key=value lines
Keys must be alphanumerics and start with a letter, values must not
contain '\n' characters"""
Kostia Balytskyi
scmutil: make simplekeyvaluefile able to have a non-key-value first line...
r32270 firstlinekey = '__firstline'
Kostia Balytskyi
scmutil: add a simple key-value file helper...
r31553
def __init__(self, vfs, path, keys=None):
self.vfs = vfs
self.path = path
Kostia Balytskyi
scmutil: make simplekeyvaluefile able to have a non-key-value first line...
r32270 def read(self, firstlinenonkeyval=False):
"""Read the contents of a simple key-value file
'firstlinenonkeyval' indicates whether the first line of file should
be treated as a key-value pair or reuturned fully under the
__firstline key."""
Kostia Balytskyi
scmutil: add a simple key-value file helper...
r31553 lines = self.vfs.readlines(self.path)
Kostia Balytskyi
scmutil: make simplekeyvaluefile able to have a non-key-value first line...
r32270 d = {}
if firstlinenonkeyval:
if not lines:
e = _("empty simplekeyvalue file")
raise error.CorruptedState(e)
# we don't want to include '\n' in the __firstline
d[self.firstlinekey] = lines[0][:-1]
del lines[0]
Kostia Balytskyi
scmutil: add a simple key-value file helper...
r31553 try:
Kostia Balytskyi
scmutil: add simplekeyvaluefile reading test...
r32269 # the 'if line.strip()' part prevents us from failing on empty
# lines which only contain '\n' therefore are not skipped
# by 'if line'
Kostia Balytskyi
scmutil: make simplekeyvaluefile able to have a non-key-value first line...
r32270 updatedict = dict(line[:-1].split('=', 1) for line in lines
if line.strip())
if self.firstlinekey in updatedict:
e = _("%r can't be used as a key")
raise error.CorruptedState(e % self.firstlinekey)
d.update(updatedict)
Kostia Balytskyi
scmutil: add a simple key-value file helper...
r31553 except ValueError as e:
raise error.CorruptedState(str(e))
return d
Kostia Balytskyi
scmutil: make simplekeyvaluefile able to have a non-key-value first line...
r32270 def write(self, data, firstline=None):
Kostia Balytskyi
scmutil: add a simple key-value file helper...
r31553 """Write key=>value mapping to a file
data is a dict. Keys must be alphanumerical and start with a letter.
Kostia Balytskyi
scmutil: make simplekeyvaluefile able to have a non-key-value first line...
r32270 Values must not contain newline characters.
If 'firstline' is not None, it is written to file before
everything else, as it is, not in a key=value form"""
Kostia Balytskyi
scmutil: add a simple key-value file helper...
r31553 lines = []
Kostia Balytskyi
scmutil: make simplekeyvaluefile able to have a non-key-value first line...
r32270 if firstline is not None:
lines.append('%s\n' % firstline)
Kostia Balytskyi
scmutil: add a simple key-value file helper...
r31553 for k, v in data.items():
Kostia Balytskyi
scmutil: make simplekeyvaluefile able to have a non-key-value first line...
r32270 if k == self.firstlinekey:
e = "key name '%s' is reserved" % self.firstlinekey
raise error.ProgrammingError(e)
Pulkit Goyal
py3: slice on bytes to prevent getting the ascii values...
r35931 if not k[0:1].isalpha():
Kostia Balytskyi
scmutil: add a simple key-value file helper...
r31553 e = "keys must start with a letter in a key-value file"
raise error.ProgrammingError(e)
if not k.isalnum():
e = "invalid key name in a simple key-value file"
raise error.ProgrammingError(e)
if '\n' in v:
e = "invalid value in a simple key-value file"
raise error.ProgrammingError(e)
lines.append("%s=%s\n" % (k, v))
with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
fp.write(''.join(lines))
obsolete: reports the number of local changeset obsoleted when unbundling...
r33249
Boris Feld
transaction-summary: display the summary for all transactions...
r33541 _reportobsoletedsource = [
Boris Feld
debugobsolete: also report the number of obsoleted changesets...
r33542 'debugobsolete',
Boris Feld
transaction-summary: display the summary for all transactions...
r33541 'pull',
'push',
'serve',
'unbundle',
]
Denis Laxalde
transaction-summary: show the range of new revisions upon pull/unbundle (BC)...
r34662 _reportnewcssource = [
'pull',
'unbundle',
]
Matt Harbison
scmutil: teach the file prefetch hook to handle multiple commits...
r37780 def prefetchfiles(repo, revs, match):
"""Invokes the registered file prefetch functions, allowing extensions to
ensure the corresponding files are available locally, before the command
uses them."""
if match:
# The command itself will complain about files that don't exist, so
# don't duplicate the message.
match = matchmod.badmatch(match, lambda fn, msg: None)
else:
match = matchall(repo)
fileprefetchhooks(repo, revs, match)
# a list of (repo, revs, match) prefetch functions
Matt Harbison
cmdutil: convert the prefetchfiles() hook to a callback mechanism (API)...
r36154 fileprefetchhooks = util.hooks()
Martin von Zweigbergk
evolution: report new unstable changesets...
r35727 # A marker that tells the evolve extension to suppress its own reporting
_reportstroubledchangesets = True
Boris Feld
transaction-summary: display the summary for all transactions...
r33541 def registersummarycallback(repo, otr, txnname=''):
obsolete: reports the number of local changeset obsoleted when unbundling...
r33249 """register a callback to issue a summary after the transaction is closed
"""
Denis Laxalde
scmutil: factor out transaction name lookup in registersummarycallback()...
r34620 def txmatch(sources):
return any(txnname.startswith(source) for source in sources)
Denis Laxalde
scmutil: factor out building of transaction summary callback...
r34621 categories = []
def reportsummary(func):
"""decorator for report callbacks."""
Boris Feld
tr-summary: keep a weakref to the unfiltered repository...
r35140 # The repoview life cycle is shorter than the one of the actual
# underlying repository. So the filtered object can die before the
# weakref is used leading to troubles. We keep a reference to the
# unfiltered object and restore the filtering when retrieving the
# repository through the weakref.
filtername = repo.filtername
reporef = weakref.ref(repo.unfiltered())
Denis Laxalde
scmutil: factor out building of transaction summary callback...
r34621 def wrapped(tr):
Denis Laxalde
scmutil: factor out transaction name lookup in registersummarycallback()...
r34620 repo = reporef()
Boris Feld
tr-summary: keep a weakref to the unfiltered repository...
r35140 if filtername:
repo = repo.filtered(filtername)
Denis Laxalde
scmutil: factor out building of transaction summary callback...
r34621 func(repo, tr)
Martin von Zweigbergk
scmutil: 0-pad transaction report callback category...
r35766 newcat = '%02i-txnreport' % len(categories)
Denis Laxalde
scmutil: factor out building of transaction summary callback...
r34621 otr.addpostclose(newcat, wrapped)
categories.append(newcat)
return wrapped
if txmatch(_reportobsoletedsource):
@reportsummary
def reportobsoleted(repo, tr):
Denis Laxalde
scmutil: factor out transaction name lookup in registersummarycallback()...
r34620 obsoleted = obsutil.getobsoleted(repo, tr)
if obsoleted:
repo.ui.status(_('obsoleted %i changesets\n')
% len(obsoleted))
Denis Laxalde
transaction-summary: show the range of new revisions upon pull/unbundle (BC)...
r34662
Martin von Zweigbergk
evolution: make reporting of new unstable changesets optional...
r35728 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
repo.ui.configbool('experimental', 'evolution.report-instabilities')):
Martin von Zweigbergk
evolution: report new unstable changesets...
r35727 instabilitytypes = [
('orphan', 'orphan'),
('phase-divergent', 'phasedivergent'),
('content-divergent', 'contentdivergent'),
]
def getinstabilitycounts(repo):
filtered = repo.changelog.filteredrevs
counts = {}
for instability, revset in instabilitytypes:
counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
filtered)
return counts
oldinstabilitycounts = getinstabilitycounts(repo)
@reportsummary
def reportnewinstabilities(repo, tr):
newinstabilitycounts = getinstabilitycounts(repo)
for instability, revset in instabilitytypes:
delta = (newinstabilitycounts[instability] -
oldinstabilitycounts[instability])
Pulkit Goyal
scmutil: move construction of instability count message to separate fn...
r38474 msg = getinstabilitymessage(delta, instability)
if msg:
repo.ui.warn(msg)
Martin von Zweigbergk
evolution: report new unstable changesets...
r35727
Denis Laxalde
transaction-summary: show the range of new revisions upon pull/unbundle (BC)...
r34662 if txmatch(_reportnewcssource):
@reportsummary
def reportnewcs(repo, tr):
"""Report the range of new revisions pulled/unbundled."""
Gregory Szorc
global: use pycompat.xrange()...
r38806 newrevs = tr.changes.get('revs', pycompat.xrange(0, 0))
Denis Laxalde
transaction-summary: show the range of new revisions upon pull/unbundle (BC)...
r34662 if not newrevs:
return
# Compute the bounds of new revisions' range, excluding obsoletes.
unfi = repo.unfiltered()
Denis Laxalde
transaction-summary: use a revset to filter obsoletes in reportnewcs()...
r34738 revs = unfi.revs('%ld and not obsolete()', newrevs)
if not revs:
Denis Laxalde
transaction-summary: show the range of new revisions upon pull/unbundle (BC)...
r34662 # Got only obsoletes.
return
Denis Laxalde
transaction-summary: use a revset to filter obsoletes in reportnewcs()...
r34738 minrev, maxrev = repo[revs.min()], repo[revs.max()]
Denis Laxalde
transaction-summary: show the range of new revisions upon pull/unbundle (BC)...
r34662
if minrev == maxrev:
revrange = minrev
else:
revrange = '%s:%s' % (minrev, maxrev)
repo.ui.status(_('new changesets %s\n') % revrange)
Matt Harbison
convert: allow the sink object to be wrapped when the extension isn't loaded...
r35169
Denis Laxalde
transaction-summary: show phase changes statistics in pull/unbundle...
r38189 @reportsummary
def reportphasechanges(repo, tr):
"""Report statistics of phase changes for changesets pre-existing
pull/unbundle.
"""
Gregory Szorc
global: use pycompat.xrange()...
r38806 newrevs = tr.changes.get('revs', pycompat.xrange(0, 0))
Denis Laxalde
transaction-summary: show phase changes statistics in pull/unbundle...
r38189 phasetracking = tr.changes.get('phases', {})
if not phasetracking:
return
published = [
rev for rev, (old, new) in phasetracking.iteritems()
if new == phases.public and rev not in newrevs
]
if not published:
return
Boris Feld
phases: use "published" in the phase movement message...
r38268 repo.ui.status(_('%d local changesets published\n')
Denis Laxalde
transaction-summary: show phase changes statistics in pull/unbundle...
r38189 % len(published))
Pulkit Goyal
scmutil: move construction of instability count message to separate fn...
r38474 def getinstabilitymessage(delta, instability):
"""function to return the message to show warning about new instabilities
exists as a separate function so that extension can wrap to show more
information like how to fix instabilities"""
if delta > 0:
return _('%i new %s changesets\n') % (delta, instability)
Boris Feld
scmutil: extra utility to display a reasonable amount of nodes...
r35185 def nodesummaries(repo, nodes, maxnumnodes=4):
if len(nodes) <= maxnumnodes or repo.ui.verbose:
return ' '.join(short(h) for h in nodes)
first = ' '.join(short(h) for h in nodes[:maxnumnodes])
Boris Feld
scmutil: improve format pattern used in nodesummaries...
r35207 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
Boris Feld
scmutil: extra utility to display a reasonable amount of nodes...
r35185
Boris Feld
server: introduce a 'experimental.single-head-per-branch' option...
r35186 def enforcesinglehead(repo, tr, desc):
"""check that no named branch has multiple heads"""
if desc in ('strip', 'repair'):
# skip the logic during strip
return
visible = repo.filtered('visible')
# possible improvement: we could restrict the check to affected branch
for name, heads in visible.branchmap().iteritems():
if len(heads) > 1:
msg = _('rejecting multiple heads on branch "%s"')
msg %= name
hint = _('%d heads: %s')
hint %= (len(heads), nodesummaries(repo, heads))
raise error.Abort(msg, hint=hint)
Matt Harbison
convert: allow the sink object to be wrapped when the extension isn't loaded...
r35169 def wrapconvertsink(sink):
"""Allow extensions to wrap the sink returned by convcmd.convertsink()
before it is used, whether or not the convert extension was formally loaded.
"""
return sink
Pulkit Goyal
scmutil: add utility fn to return repo object with user passed revs unhidden...
r35512
def unhidehashlikerevs(repo, specs, hiddentype):
"""parse the user specs and unhide changesets whose hash or revision number
is passed.
hiddentype can be: 1) 'warn': warn while unhiding changesets
2) 'nowarn': don't warn while unhiding changesets
returns a repo object with the required changesets unhidden
"""
if not repo.filtername or not repo.ui.configbool('experimental',
'directaccess'):
return repo
Pulkit Goyal
scmutil: use a tuple of possible values instead of using startswith()...
r35515 if repo.filtername not in ('visible', 'visible-hidden'):
Pulkit Goyal
scmutil: add utility fn to return repo object with user passed revs unhidden...
r35512 return repo
symbols = set()
for spec in specs:
try:
tree = revsetlang.parse(spec)
except error.ParseError: # will be reported by scmutil.revrange()
continue
symbols.update(revsetlang.gethashlikesymbols(tree))
if not symbols:
return repo
revs = _getrevsfromsymbols(repo, symbols)
if not revs:
return repo
if hiddentype == 'warn':
unfi = repo.unfiltered()
revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
repo.ui.warn(_("warning: accessing hidden changesets for write "
"operation: %s\n") % revstr)
Pulkit Goyal
scmutil: use a tuple of possible values instead of using startswith()...
r35515 # we have to use new filtername to separate branch/tags cache until we can
# disbale these cache when revisions are dynamically pinned.
Pulkit Goyal
scmutil: add utility fn to return repo object with user passed revs unhidden...
r35512 return repo.filtered('visible-hidden', revs)
def _getrevsfromsymbols(repo, symbols):
"""parse the list of symbols and returns a set of revision numbers of hidden
changesets present in symbols"""
revs = set()
unfi = repo.unfiltered()
unficl = unfi.changelog
cl = repo.changelog
tiprev = len(unficl)
allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
for s in symbols:
try:
n = int(s)
if n <= tiprev:
if not allowrevnums:
continue
else:
if n not in cl:
revs.add(n)
continue
except ValueError:
pass
try:
Martin von Zweigbergk
directaccess: use resolvehexnodeidprefix() instead of _partialmatch()...
r37885 s = resolvehexnodeidprefix(unfi, s)
Yuya Nishihara
directaccess: do not abort by 'ff...' hash...
r37112 except (error.LookupError, error.WdirUnsupported):
Pulkit Goyal
scmutil: add utility fn to return repo object with user passed revs unhidden...
r35512 s = None
if s is not None:
rev = unficl.rev(s)
if rev not in cl:
revs.add(rev)
return revs
David Demelier
scmutil: move repair.stripbmrevset as scmutil.bookmarkrevs (API)
r38146
def bookmarkrevs(repo, mark):
"""
Select revisions reachable by a given bookmark
"""
return repo.revs("ancestors(bookmark(%s)) - "
"ancestors(head() and not bookmark(%s)) - "
"ancestors(bookmark() and not bookmark(%s))",
mark, mark, mark)