__init__.py
1291 lines
| 42.2 KiB
| text/x-python
|
PythonLexer
Augie Fackler
|
r40530 | # __init__.py - remotefilelog extension | ||
# | ||||
# Copyright 2013 Facebook, Inc. | ||||
# | ||||
# This software may be used and distributed according to the terms of the | ||||
# GNU General Public License version 2 or any later version. | ||||
"""remotefilelog causes Mercurial to lazilly fetch file contents (EXPERIMENTAL) | ||||
Augie Fackler
|
r40547 | This extension is HIGHLY EXPERIMENTAL. There are NO BACKWARDS COMPATIBILITY | ||
GUARANTEES. This means that repositories created with this extension may | ||||
only be usable with the exact version of this extension/Mercurial that was | ||||
used. The extension attempts to enforce this in order to prevent repository | ||||
corruption. | ||||
remotefilelog works by fetching file contents lazily and storing them | ||||
in a cache on the client rather than in revlogs. This allows enormous | ||||
histories to be transferred only partially, making them easier to | ||||
operate on. | ||||
Augie Fackler
|
r40530 | Configs: | ||
``packs.maxchainlen`` specifies the maximum delta chain length in pack files | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``packs.maxpacksize`` specifies the maximum pack file size | ||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``packs.maxpackfilecount`` specifies the maximum number of packs in the | ||
shared cache (trees only for now) | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.backgroundprefetch`` runs prefetch in background when True | ||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.bgprefetchrevs`` specifies revisions to fetch on commit and | ||
update, and on other commands that use them. Different from pullprefetch. | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.gcrepack`` does garbage collection during repack when True | ||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.nodettl`` specifies maximum TTL of a node in seconds before | ||
it is garbage collected | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.repackonhggc`` runs repack on hg gc when True | ||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.prefetchdays`` specifies the maximum age of a commit in | ||
days after which it is no longer prefetched. | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.prefetchdelay`` specifies delay between background | ||
prefetches in seconds after operations that change the working copy parent | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.data.gencountlimit`` constraints the minimum number of data | ||
pack files required to be considered part of a generation. In particular, | ||||
minimum number of packs files > gencountlimit. | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.data.generations`` list for specifying the lower bound of | ||
each generation of the data pack files. For example, list ['100MB','1MB'] | ||||
or ['1MB', '100MB'] will lead to three generations: [0, 1MB), [ | ||||
1MB, 100MB) and [100MB, infinity). | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.data.maxrepackpacks`` the maximum number of pack files to | ||
include in an incremental data repack. | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.data.repackmaxpacksize`` the maximum size of a pack file for | ||
it to be considered for an incremental data repack. | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.data.repacksizelimit`` the maximum total size of pack files | ||
to include in an incremental data repack. | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.history.gencountlimit`` constraints the minimum number of | ||
history pack files required to be considered part of a generation. In | ||||
particular, minimum number of packs files > gencountlimit. | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.history.generations`` list for specifying the lower bound of | ||
Augie Fackler
|
r40768 | each generation of the history pack files. For example, list [ | ||
Augie Fackler
|
r40530 | '100MB', '1MB'] or ['1MB', '100MB'] will lead to three generations: [ | ||
0, 1MB), [1MB, 100MB) and [100MB, infinity). | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.history.maxrepackpacks`` the maximum number of pack files to | ||
include in an incremental history repack. | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.history.repackmaxpacksize`` the maximum size of a pack file | ||
for it to be considered for an incremental history repack. | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.history.repacksizelimit`` the maximum total size of pack | ||
files to include in an incremental history repack. | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.backgroundrepack`` automatically consolidate packs in the | ||
background | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.cachepath`` path to cache | ||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.cachegroup`` if set, make cache directory sgid to this | ||
group | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.cacheprocess`` binary to invoke for fetching file data | ||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.debug`` turn on remotefilelog-specific debug output | ||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.excludepattern`` pattern of files to exclude from pulls | ||
Augie Fackler
|
r40581 | |||
``remotefilelog.includepattern`` pattern of files to include in pulls | ||||
Augie Fackler
|
r40530 | ``remotefilelog.fetchwarning``: message to print when too many | ||
single-file fetches occur | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.getfilesstep`` number of files to request in a single RPC | ||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.getfilestype`` if set to 'threaded' use threads to fetch | ||
files, otherwise use optimistic fetching | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.pullprefetch`` revset for selecting files that should be | ||
eagerly downloaded rather than lazily | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.reponame`` name of the repo. If set, used to partition | ||
data from other repos in a shared store. | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.server`` if true, enable server-side functionality | ||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.servercachepath`` path for caching blobs on the server | ||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.serverexpiration`` number of days to keep cached server | ||
blobs | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.validatecache`` if set, check cache entries for corruption | ||
before returning blobs | ||||
Augie Fackler
|
r40581 | |||
Augie Fackler
|
r40530 | ``remotefilelog.validatecachelog`` if set, check cache entries for | ||
corruption before returning metadata | ||||
""" | ||||
from __future__ import absolute_import | ||||
import os | ||||
import time | ||||
import traceback | ||||
from mercurial.node import hex | ||||
from mercurial.i18n import _ | ||||
Gregory Szorc
|
r43355 | from mercurial.pycompat import open | ||
Augie Fackler
|
r40530 | from mercurial import ( | ||
changegroup, | ||||
changelog, | ||||
cmdutil, | ||||
commands, | ||||
configitems, | ||||
context, | ||||
copies, | ||||
debugcommands as hgdebugcommands, | ||||
dispatch, | ||||
error, | ||||
exchange, | ||||
extensions, | ||||
hg, | ||||
localrepo, | ||||
Rodrigo Damazio Bovendorp
|
r45632 | match as matchmod, | ||
Augie Fackler
|
r40530 | merge, | ||
Pulkit Goyal
|
r45851 | mergestate as mergestatemod, | ||
Augie Fackler
|
r40530 | node as nodemod, | ||
patch, | ||||
Pulkit Goyal
|
r40646 | pycompat, | ||
Augie Fackler
|
r40530 | registrar, | ||
repair, | ||||
repoview, | ||||
revset, | ||||
scmutil, | ||||
smartset, | ||||
Pulkit Goyal
|
r40548 | streamclone, | ||
Augie Fackler
|
r40530 | util, | ||
) | ||||
from . import ( | ||||
Augie Fackler
|
r40543 | constants, | ||
Augie Fackler
|
r40530 | debugcommands, | ||
fileserverclient, | ||||
remotefilectx, | ||||
remotefilelog, | ||||
remotefilelogserver, | ||||
repack as repackmod, | ||||
shallowbundle, | ||||
shallowrepo, | ||||
shallowstore, | ||||
shallowutil, | ||||
shallowverifier, | ||||
) | ||||
# ensures debug commands are registered | ||||
hgdebugcommands.command | ||||
cmdtable = {} | ||||
command = registrar.command(cmdtable) | ||||
configtable = {} | ||||
configitem = registrar.configitem(configtable) | ||||
Augie Fackler
|
r43347 | configitem(b'remotefilelog', b'debug', default=False) | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43347 | configitem(b'remotefilelog', b'reponame', default=b'') | ||
configitem(b'remotefilelog', b'cachepath', default=None) | ||||
configitem(b'remotefilelog', b'cachegroup', default=None) | ||||
configitem(b'remotefilelog', b'cacheprocess', default=None) | ||||
configitem(b'remotefilelog', b'cacheprocess.includepath', default=None) | ||||
configitem(b"remotefilelog", b"cachelimit", default=b"1000 GB") | ||||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43346 | configitem( | ||
Augie Fackler
|
r43347 | b'remotefilelog', | ||
b'fallbackpath', | ||||
Augie Fackler
|
r43346 | default=configitems.dynamicdefault, | ||
Augie Fackler
|
r43347 | alias=[(b'remotefilelog', b'fallbackrepo')], | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43347 | configitem(b'remotefilelog', b'validatecachelog', default=None) | ||
configitem(b'remotefilelog', b'validatecache', default=b'on') | ||||
configitem(b'remotefilelog', b'server', default=None) | ||||
configitem(b'remotefilelog', b'servercachepath', default=None) | ||||
configitem(b"remotefilelog", b"serverexpiration", default=30) | ||||
configitem(b'remotefilelog', b'backgroundrepack', default=False) | ||||
configitem(b'remotefilelog', b'bgprefetchrevs', default=None) | ||||
configitem(b'remotefilelog', b'pullprefetch', default=None) | ||||
configitem(b'remotefilelog', b'backgroundprefetch', default=False) | ||||
configitem(b'remotefilelog', b'prefetchdelay', default=120) | ||||
configitem(b'remotefilelog', b'prefetchdays', default=14) | ||||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43347 | configitem(b'remotefilelog', b'getfilesstep', default=10000) | ||
configitem(b'remotefilelog', b'getfilestype', default=b'optimistic') | ||||
configitem(b'remotefilelog', b'batchsize', configitems.dynamicdefault) | ||||
configitem(b'remotefilelog', b'fetchwarning', default=b'') | ||||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43347 | configitem(b'remotefilelog', b'includepattern', default=None) | ||
configitem(b'remotefilelog', b'excludepattern', default=None) | ||||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43347 | configitem(b'remotefilelog', b'gcrepack', default=False) | ||
configitem(b'remotefilelog', b'repackonhggc', default=False) | ||||
configitem(b'repack', b'chainorphansbysize', default=True, experimental=True) | ||||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43347 | configitem(b'packs', b'maxpacksize', default=0) | ||
configitem(b'packs', b'maxchainlen', default=1000) | ||||
Augie Fackler
|
r40530 | |||
r44298 | configitem(b'devel', b'remotefilelog.bg-wait', default=False) | |||
Boris Feld
|
r43110 | |||
Augie Fackler
|
r40530 | # default TTL limit is 30 days | ||
_defaultlimit = 60 * 60 * 24 * 30 | ||||
Augie Fackler
|
r43347 | configitem(b'remotefilelog', b'nodettl', default=_defaultlimit) | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43347 | configitem(b'remotefilelog', b'data.gencountlimit', default=2), | ||
configitem( | ||||
b'remotefilelog', b'data.generations', default=[b'1GB', b'100MB', b'1MB'] | ||||
) | ||||
configitem(b'remotefilelog', b'data.maxrepackpacks', default=50) | ||||
configitem(b'remotefilelog', b'data.repackmaxpacksize', default=b'4GB') | ||||
configitem(b'remotefilelog', b'data.repacksizelimit', default=b'100MB') | ||||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43347 | configitem(b'remotefilelog', b'history.gencountlimit', default=2), | ||
configitem(b'remotefilelog', b'history.generations', default=[b'100MB']) | ||||
configitem(b'remotefilelog', b'history.maxrepackpacks', default=50) | ||||
configitem(b'remotefilelog', b'history.repackmaxpacksize', default=b'400MB') | ||||
configitem(b'remotefilelog', b'history.repacksizelimit', default=b'100MB') | ||||
Augie Fackler
|
r40530 | |||
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for | ||||
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should | ||||
# be specifying the version(s) of Mercurial they are tested with, or | ||||
# leave the attribute unspecified. | ||||
Augie Fackler
|
r43347 | testedwith = b'ships-with-hg-core' | ||
Augie Fackler
|
r40530 | |||
repoclass = localrepo.localrepository | ||||
Augie Fackler
|
r40545 | repoclass._basesupported.add(constants.SHALLOWREPO_REQUIREMENT) | ||
Augie Fackler
|
r40530 | |||
Pulkit Goyal
|
r40549 | isenabled = shallowutil.isenabled | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def uisetup(ui): | ||
"""Wraps user facing Mercurial commands to swap them out with shallow | ||||
versions. | ||||
""" | ||||
hg.wirepeersetupfuncs.append(fileserverclient.peersetup) | ||||
Augie Fackler
|
r43347 | entry = extensions.wrapcommand(commands.table, b'clone', cloneshallow) | ||
Augie Fackler
|
r43346 | entry[1].append( | ||
( | ||||
Augie Fackler
|
r43347 | b'', | ||
b'shallow', | ||||
Augie Fackler
|
r43346 | None, | ||
Martin von Zweigbergk
|
r43387 | _(b"create a shallow clone which uses remote file history"), | ||
Augie Fackler
|
r43346 | ) | ||
) | ||||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43346 | extensions.wrapcommand( | ||
Augie Fackler
|
r43347 | commands.table, b'debugindex', debugcommands.debugindex | ||
Augie Fackler
|
r43346 | ) | ||
extensions.wrapcommand( | ||||
Augie Fackler
|
r43347 | commands.table, b'debugindexdot', debugcommands.debugindexdot | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r43347 | extensions.wrapcommand(commands.table, b'log', log) | ||
extensions.wrapcommand(commands.table, b'pull', pull) | ||||
Augie Fackler
|
r40530 | |||
# Prevent 'hg manifest --all' | ||||
def _manifest(orig, ui, repo, *args, **opts): | ||||
Augie Fackler
|
r43906 | if isenabled(repo) and opts.get('all'): | ||
Augie Fackler
|
r43347 | raise error.Abort(_(b"--all is not supported in a shallow repo")) | ||
Augie Fackler
|
r40530 | |||
return orig(ui, repo, *args, **opts) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | extensions.wrapcommand(commands.table, b"manifest", _manifest) | ||
Augie Fackler
|
r40530 | |||
# Wrap remotefilelog with lfs code | ||||
def _lfsloaded(loaded=False): | ||||
lfsmod = None | ||||
try: | ||||
Augie Fackler
|
r43347 | lfsmod = extensions.find(b'lfs') | ||
Augie Fackler
|
r40530 | except KeyError: | ||
pass | ||||
if lfsmod: | ||||
lfsmod.wrapfilelog(remotefilelog.remotefilelog) | ||||
fileserverclient._lfsmod = lfsmod | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | extensions.afterloaded(b'lfs', _lfsloaded) | ||
Augie Fackler
|
r40530 | |||
# debugdata needs remotefilelog.len to work | ||||
Augie Fackler
|
r43347 | extensions.wrapcommand(commands.table, b'debugdata', debugdatashallow) | ||
Augie Fackler
|
r40530 | |||
Martin von Zweigbergk
|
r42460 | changegroup.cgpacker = shallowbundle.shallowcg1packer | ||
extensions.wrapfunction( | ||||
Augie Fackler
|
r43347 | changegroup, b'_addchangegroupfiles', shallowbundle.addchangegroupfiles | ||
Augie Fackler
|
r43346 | ) | ||
extensions.wrapfunction( | ||||
Augie Fackler
|
r43347 | changegroup, b'makechangegroup', shallowbundle.makechangegroup | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r43347 | extensions.wrapfunction(localrepo, b'makestore', storewrapper) | ||
extensions.wrapfunction(exchange, b'pull', exchangepull) | ||||
extensions.wrapfunction(merge, b'applyupdates', applyupdates) | ||||
extensions.wrapfunction(merge, b'_checkunknownfiles', checkunknownfiles) | ||||
extensions.wrapfunction(context.workingctx, b'_checklookup', checklookup) | ||||
extensions.wrapfunction(scmutil, b'_findrenames', findrenames) | ||||
Augie Fackler
|
r43346 | extensions.wrapfunction( | ||
Augie Fackler
|
r43347 | copies, b'_computeforwardmissing', computeforwardmissing | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r43347 | extensions.wrapfunction(dispatch, b'runcommand', runcommand) | ||
extensions.wrapfunction(repair, b'_collectbrokencsets', _collectbrokencsets) | ||||
extensions.wrapfunction(context.changectx, b'filectx', filectx) | ||||
extensions.wrapfunction(context.workingctx, b'filectx', workingfilectx) | ||||
extensions.wrapfunction(patch, b'trydiff', trydiff) | ||||
extensions.wrapfunction(hg, b'verify', _verify) | ||||
scmutil.fileprefetchhooks.add(b'remotefilelog', _fileprefetchhook) | ||||
Martin von Zweigbergk
|
r42460 | |||
# disappointing hacks below | ||||
Augie Fackler
|
r43347 | extensions.wrapfunction(scmutil, b'getrenamedfn', getrenamedfn) | ||
extensions.wrapfunction(revset, b'filelog', filelogrevset) | ||||
revset.symbols[b'filelog'] = revset.filelog | ||||
extensions.wrapfunction(cmdutil, b'walkfilerevs', walkfilerevs) | ||||
Martin von Zweigbergk
|
r42460 | |||
Augie Fackler
|
r40530 | def cloneshallow(orig, ui, repo, *args, **opts): | ||
Augie Fackler
|
r43906 | if opts.get('shallow'): | ||
Augie Fackler
|
r40530 | repos = [] | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def pull_shallow(orig, self, *args, **kwargs): | ||
Pulkit Goyal
|
r40549 | if not isenabled(self): | ||
Augie Fackler
|
r40530 | repos.append(self.unfiltered()) | ||
# set up the client hooks so the post-clone update works | ||||
setupclient(self.ui, self.unfiltered()) | ||||
# setupclient fixed the class on the repo itself | ||||
# but we also need to fix it on the repoview | ||||
if isinstance(self, repoview.repoview): | ||||
Augie Fackler
|
r43346 | self.__class__.__bases__ = ( | ||
self.__class__.__bases__[0], | ||||
self.unfiltered().__class__, | ||||
) | ||||
Augie Fackler
|
r40545 | self.requirements.add(constants.SHALLOWREPO_REQUIREMENT) | ||
Pulkit Goyal
|
r46053 | with self.lock(): | ||
# acquire store lock before writing requirements as some | ||||
# requirements might be written to .hg/store/requires | ||||
scmutil.writereporequirements(self) | ||||
Augie Fackler
|
r40530 | |||
# Since setupclient hadn't been called, exchange.pull was not | ||||
# wrapped. So we need to manually invoke our version of it. | ||||
return exchangepull(orig, self, *args, **kwargs) | ||||
else: | ||||
return orig(self, *args, **kwargs) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | extensions.wrapfunction(exchange, b'pull', pull_shallow) | ||
Augie Fackler
|
r40530 | |||
# Wrap the stream logic to add requirements and to pass include/exclude | ||||
# patterns around. | ||||
def setup_streamout(repo, remote): | ||||
# Replace remote.stream_out with a version that sends file | ||||
# patterns. | ||||
def stream_out_shallow(orig): | ||||
caps = remote.capabilities() | ||||
Augie Fackler
|
r40543 | if constants.NETWORK_CAP_LEGACY_SSH_GETFILES in caps: | ||
Augie Fackler
|
r40530 | opts = {} | ||
if repo.includepattern: | ||||
Augie Fackler
|
r43906 | opts['includepattern'] = b'\0'.join(repo.includepattern) | ||
Augie Fackler
|
r40530 | if repo.excludepattern: | ||
Augie Fackler
|
r43906 | opts['excludepattern'] = b'\0'.join(repo.excludepattern) | ||
Augie Fackler
|
r43347 | return remote._callstream(b'stream_out_shallow', **opts) | ||
Augie Fackler
|
r40530 | else: | ||
return orig() | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | extensions.wrapfunction(remote, b'stream_out', stream_out_shallow) | ||
Augie Fackler
|
r43346 | |||
Pulkit Goyal
|
r40548 | def stream_wrap(orig, op): | ||
setup_streamout(op.repo, op.remote) | ||||
return orig(op) | ||||
Augie Fackler
|
r43346 | |||
Pulkit Goyal
|
r40548 | extensions.wrapfunction( | ||
Augie Fackler
|
r43347 | streamclone, b'maybeperformlegacystreamclone', stream_wrap | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | |||
Pulkit Goyal
|
r40548 | def canperformstreamclone(orig, pullop, bundle2=False): | ||
# remotefilelog is currently incompatible with the | ||||
# bundle2 flavor of streamclones, so force us to use | ||||
# v1 instead. | ||||
Augie Fackler
|
r43347 | if b'v2' in pullop.remotebundle2caps.get(b'stream', []): | ||
pullop.remotebundle2caps[b'stream'] = [ | ||||
c for c in pullop.remotebundle2caps[b'stream'] if c != b'v2' | ||||
Augie Fackler
|
r43346 | ] | ||
Pulkit Goyal
|
r40548 | if bundle2: | ||
return False, None | ||||
supported, requirements = orig(pullop, bundle2=bundle2) | ||||
if requirements is not None: | ||||
Augie Fackler
|
r40545 | requirements.add(constants.SHALLOWREPO_REQUIREMENT) | ||
Pulkit Goyal
|
r40548 | return supported, requirements | ||
Augie Fackler
|
r43346 | |||
Pulkit Goyal
|
r40548 | extensions.wrapfunction( | ||
Augie Fackler
|
r43347 | streamclone, b'canperformstreamclone', canperformstreamclone | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | |||
try: | ||||
orig(ui, repo, *args, **opts) | ||||
finally: | ||||
Augie Fackler
|
r43906 | if opts.get('shallow'): | ||
Augie Fackler
|
r40530 | for r in repos: | ||
Augie Fackler
|
r43347 | if util.safehasattr(r, b'fileservice'): | ||
Augie Fackler
|
r40530 | r.fileservice.close() | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def debugdatashallow(orig, *args, **kwds): | ||
oldlen = remotefilelog.remotefilelog.__len__ | ||||
try: | ||||
remotefilelog.remotefilelog.__len__ = lambda x: 1 | ||||
return orig(*args, **kwds) | ||||
finally: | ||||
remotefilelog.remotefilelog.__len__ = oldlen | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def reposetup(ui, repo): | ||
Matt Harbison
|
r41088 | if not repo.local(): | ||
Augie Fackler
|
r40530 | return | ||
# put here intentionally bc doesnt work in uisetup | ||||
Augie Fackler
|
r43347 | ui.setconfig(b'hooks', b'update.prefetch', wcpprefetch) | ||
ui.setconfig(b'hooks', b'commit.prefetch', wcpprefetch) | ||||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43347 | isserverenabled = ui.configbool(b'remotefilelog', b'server') | ||
Pulkit Goyal
|
r40549 | isshallowclient = isenabled(repo) | ||
Augie Fackler
|
r40530 | |||
if isserverenabled and isshallowclient: | ||||
Augie Fackler
|
r43347 | raise RuntimeError(b"Cannot be both a server and shallow client.") | ||
Augie Fackler
|
r40530 | |||
if isshallowclient: | ||||
setupclient(ui, repo) | ||||
if isserverenabled: | ||||
remotefilelogserver.setupserver(ui, repo) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def setupclient(ui, repo): | ||
if not isinstance(repo, localrepo.localrepository): | ||||
return | ||||
# Even clients get the server setup since they need to have the | ||||
# wireprotocol endpoints registered. | ||||
remotefilelogserver.onetimesetup(ui) | ||||
onetimeclientsetup(ui) | ||||
shallowrepo.wraprepo(repo) | ||||
repo.store = shallowstore.wrapstore(repo.store) | ||||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42459 | def storewrapper(orig, requirements, path, vfstype): | ||
s = orig(requirements, path, vfstype) | ||||
if constants.SHALLOWREPO_REQUIREMENT in requirements: | ||||
s = shallowstore.wrapstore(s) | ||||
return s | ||||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42459 | # prefetch files before update | ||
Augie Fackler
|
r43346 | def applyupdates( | ||
Pulkit Goyal
|
r45894 | orig, repo, mresult, wctx, mctx, overwrite, wantfiledata, **opts | ||
Augie Fackler
|
r43346 | ): | ||
Martin von Zweigbergk
|
r42459 | if isenabled(repo): | ||
manifest = mctx.manifest() | ||||
files = [] | ||||
Pulkit Goyal
|
r45894 | for f, args, msg in mresult.getactions([mergestatemod.ACTION_GET]): | ||
Martin von Zweigbergk
|
r42459 | files.append((f, hex(manifest[f]))) | ||
# batch fetch the needed files from the server | ||||
repo.fileservice.prefetch(files) | ||||
Pulkit Goyal
|
r45894 | return orig(repo, mresult, wctx, mctx, overwrite, wantfiledata, **opts) | ||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42459 | |||
# Prefetch merge checkunknownfiles | ||||
Pulkit Goyal
|
r45844 | def checkunknownfiles(orig, repo, wctx, mctx, force, mresult, *args, **kwargs): | ||
Martin von Zweigbergk
|
r42459 | if isenabled(repo): | ||
files = [] | ||||
sparsematch = repo.maybesparsematch(mctx.rev()) | ||||
Pulkit Goyal
|
r45906 | for f, (m, actionargs, msg) in mresult.filemap(): | ||
Martin von Zweigbergk
|
r42459 | if sparsematch and not sparsematch(f): | ||
continue | ||||
Pulkit Goyal
|
r45851 | if m in ( | ||
mergestatemod.ACTION_CREATED, | ||||
mergestatemod.ACTION_DELETED_CHANGED, | ||||
mergestatemod.ACTION_CREATED_MERGE, | ||||
): | ||||
Martin von Zweigbergk
|
r42459 | files.append((f, hex(mctx.filenode(f)))) | ||
Pulkit Goyal
|
r45851 | elif m == mergestatemod.ACTION_LOCAL_DIR_RENAME_GET: | ||
Martin von Zweigbergk
|
r42459 | f2 = actionargs[0] | ||
files.append((f2, hex(mctx.filenode(f2)))) | ||||
# batch fetch the needed files from the server | ||||
repo.fileservice.prefetch(files) | ||||
Pulkit Goyal
|
r45844 | return orig(repo, wctx, mctx, force, mresult, *args, **kwargs) | ||
Martin von Zweigbergk
|
r42459 | |||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42459 | # Prefetch files before status attempts to look at their size and contents | ||
def checklookup(orig, self, files): | ||||
repo = self._repo | ||||
if isenabled(repo): | ||||
prefetchfiles = [] | ||||
for parent in self._parents: | ||||
for f in files: | ||||
if f in parent: | ||||
prefetchfiles.append((f, hex(parent.filenode(f)))) | ||||
# batch fetch the needed files from the server | ||||
repo.fileservice.prefetch(prefetchfiles) | ||||
return orig(self, files) | ||||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42459 | # Prefetch the logic that compares added and removed files for renames | ||
def findrenames(orig, repo, matcher, added, removed, *args, **kwargs): | ||||
if isenabled(repo): | ||||
files = [] | ||||
Augie Fackler
|
r43347 | pmf = repo[b'.'].manifest() | ||
Martin von Zweigbergk
|
r42459 | for f in removed: | ||
if f in pmf: | ||||
files.append((f, hex(pmf[f]))) | ||||
# batch fetch the needed files from the server | ||||
repo.fileservice.prefetch(files) | ||||
return orig(repo, matcher, added, removed, *args, **kwargs) | ||||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42459 | # prefetch files before pathcopies check | ||
def computeforwardmissing(orig, a, b, match=None): | ||||
missing = orig(a, b, match=match) | ||||
repo = a._repo | ||||
if isenabled(repo): | ||||
mb = b.manifest() | ||||
files = [] | ||||
sparsematch = repo.maybesparsematch(b.rev()) | ||||
if sparsematch: | ||||
sparsemissing = set() | ||||
for f in missing: | ||||
if sparsematch(f): | ||||
files.append((f, hex(mb[f]))) | ||||
sparsemissing.add(f) | ||||
missing = sparsemissing | ||||
# batch fetch the needed files from the server | ||||
repo.fileservice.prefetch(files) | ||||
return missing | ||||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42459 | # close cache miss server connection after the command has finished | ||
def runcommand(orig, lui, repo, *args, **kwargs): | ||||
fileservice = None | ||||
# repo can be None when running in chg: | ||||
# - at startup, reposetup was called because serve is not norepo | ||||
# - a norepo command like "help" is called | ||||
if repo and isenabled(repo): | ||||
fileservice = repo.fileservice | ||||
try: | ||||
return orig(lui, repo, *args, **kwargs) | ||||
finally: | ||||
if fileservice: | ||||
fileservice.close() | ||||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42459 | # prevent strip from stripping remotefilelogs | ||
def _collectbrokencsets(orig, repo, files, striprev): | ||||
if isenabled(repo): | ||||
files = list([f for f in files if not repo.shallowmatch(f)]) | ||||
return orig(repo, files, striprev) | ||||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42459 | # changectx wrappers | ||
def filectx(orig, self, path, fileid=None, filelog=None): | ||||
if fileid is None: | ||||
fileid = self.filenode(path) | ||||
Augie Fackler
|
r43346 | if isenabled(self._repo) and self._repo.shallowmatch(path): | ||
return remotefilectx.remotefilectx( | ||||
self._repo, path, fileid=fileid, changectx=self, filelog=filelog | ||||
) | ||||
Martin von Zweigbergk
|
r42459 | return orig(self, path, fileid=fileid, filelog=filelog) | ||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42459 | def workingfilectx(orig, self, path, filelog=None): | ||
Augie Fackler
|
r43346 | if isenabled(self._repo) and self._repo.shallowmatch(path): | ||
return remotefilectx.remoteworkingfilectx( | ||||
self._repo, path, workingctx=self, filelog=filelog | ||||
) | ||||
Martin von Zweigbergk
|
r42459 | return orig(self, path, filelog=filelog) | ||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42459 | # prefetch required revisions before a diff | ||
Augie Fackler
|
r43346 | def trydiff( | ||
orig, | ||||
repo, | ||||
revs, | ||||
ctx1, | ||||
ctx2, | ||||
modified, | ||||
added, | ||||
removed, | ||||
copy, | ||||
getfilectx, | ||||
*args, | ||||
**kwargs | ||||
): | ||||
Martin von Zweigbergk
|
r42459 | if isenabled(repo): | ||
prefetch = [] | ||||
mf1 = ctx1.manifest() | ||||
for fname in modified + added + removed: | ||||
if fname in mf1: | ||||
fnode = getfilectx(fname, ctx1).filenode() | ||||
# fnode can be None if it's a edited working ctx file | ||||
if fnode: | ||||
prefetch.append((fname, hex(fnode))) | ||||
if fname not in removed: | ||||
fnode = getfilectx(fname, ctx2).filenode() | ||||
if fnode: | ||||
prefetch.append((fname, hex(fnode))) | ||||
repo.fileservice.prefetch(prefetch) | ||||
Augie Fackler
|
r43346 | return orig( | ||
repo, | ||||
revs, | ||||
ctx1, | ||||
ctx2, | ||||
modified, | ||||
added, | ||||
removed, | ||||
copy, | ||||
getfilectx, | ||||
*args, | ||||
**kwargs | ||||
) | ||||
Martin von Zweigbergk
|
r42459 | |||
# Prevent verify from processing files | ||||
# a stub for mercurial.hg.verify() | ||||
def _verify(orig, repo, level=None): | ||||
lock = repo.lock() | ||||
try: | ||||
return shallowverifier.shallowverifier(repo).verify() | ||||
finally: | ||||
lock.release() | ||||
Augie Fackler
|
r40530 | clientonetime = False | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def onetimeclientsetup(ui): | ||
global clientonetime | ||||
if clientonetime: | ||||
return | ||||
clientonetime = True | ||||
# Don't commit filelogs until we know the commit hash, since the hash | ||||
# is present in the filelog blob. | ||||
# This violates Mercurial's filelog->manifest->changelog write order, | ||||
# but is generally fine for client repos. | ||||
pendingfilecommits = [] | ||||
Augie Fackler
|
r43346 | |||
def addrawrevision( | ||||
orig, | ||||
self, | ||||
rawtext, | ||||
transaction, | ||||
link, | ||||
p1, | ||||
p2, | ||||
node, | ||||
flags, | ||||
cachedelta=None, | ||||
_metatuple=None, | ||||
): | ||||
Augie Fackler
|
r40530 | if isinstance(link, int): | ||
pendingfilecommits.append( | ||||
Augie Fackler
|
r43346 | ( | ||
self, | ||||
rawtext, | ||||
transaction, | ||||
link, | ||||
p1, | ||||
p2, | ||||
node, | ||||
flags, | ||||
cachedelta, | ||||
_metatuple, | ||||
) | ||||
) | ||||
Augie Fackler
|
r40530 | return node | ||
else: | ||||
Augie Fackler
|
r43346 | return orig( | ||
self, | ||||
rawtext, | ||||
transaction, | ||||
link, | ||||
p1, | ||||
p2, | ||||
node, | ||||
flags, | ||||
cachedelta, | ||||
_metatuple=_metatuple, | ||||
) | ||||
Augie Fackler
|
r40530 | extensions.wrapfunction( | ||
Augie Fackler
|
r43347 | remotefilelog.remotefilelog, b'addrawrevision', addrawrevision | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | |||
Daniel Ploch
|
r44017 | def changelogadd(orig, self, *args, **kwargs): | ||
Augie Fackler
|
r40530 | oldlen = len(self) | ||
Daniel Ploch
|
r44017 | node = orig(self, *args, **kwargs) | ||
Augie Fackler
|
r40530 | newlen = len(self) | ||
if oldlen != newlen: | ||||
for oldargs in pendingfilecommits: | ||||
log, rt, tr, link, p1, p2, n, fl, c, m = oldargs | ||||
linknode = self.node(link) | ||||
if linknode == node: | ||||
log.addrawrevision(rt, tr, linknode, p1, p2, n, fl, c, m) | ||||
else: | ||||
raise error.ProgrammingError( | ||||
Augie Fackler
|
r43347 | b'pending multiple integer revisions are not supported' | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | else: | ||
# "link" is actually wrong here (it is set to len(changelog)) | ||||
# if changelog remains unchanged, skip writing file revisions | ||||
# but still do a sanity check about pending multiple revisions | ||||
Augie Fackler
|
r44937 | if len({x[3] for x in pendingfilecommits}) > 1: | ||
Augie Fackler
|
r40530 | raise error.ProgrammingError( | ||
Augie Fackler
|
r43347 | b'pending multiple integer revisions are not supported' | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | del pendingfilecommits[:] | ||
return node | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | extensions.wrapfunction(changelog.changelog, b'add', changelogadd) | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42699 | def getrenamedfn(orig, repo, endrev=None): | ||
Martin von Zweigbergk
|
r42700 | if not isenabled(repo) or copies.usechangesetcentricalgo(repo): | ||
Martin von Zweigbergk
|
r42699 | return orig(repo, endrev) | ||
Augie Fackler
|
r40530 | rcache = {} | ||
def getrenamed(fn, rev): | ||||
'''looks up all renames for a file (up to endrev) the first | ||||
time the file is given. It indexes on the changerev and only | ||||
parses the manifest if linkrev != changerev. | ||||
Returns rename info for fn at changerev rev.''' | ||||
if rev in rcache.setdefault(fn, {}): | ||||
return rcache[fn][rev] | ||||
try: | ||||
fctx = repo[rev].filectx(fn) | ||||
for ancestor in fctx.ancestors(): | ||||
if ancestor.path() == fn: | ||||
renamed = ancestor.renamed() | ||||
Martin von Zweigbergk
|
r41228 | rcache[fn][ancestor.rev()] = renamed and renamed[0] | ||
Augie Fackler
|
r40530 | |||
Martin von Zweigbergk
|
r41228 | renamed = fctx.renamed() | ||
return renamed and renamed[0] | ||||
Augie Fackler
|
r40530 | except error.LookupError: | ||
return None | ||||
return getrenamed | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def walkfilerevs(orig, repo, match, follow, revs, fncache): | ||
Pulkit Goyal
|
r40549 | if not isenabled(repo): | ||
Augie Fackler
|
r40530 | return orig(repo, match, follow, revs, fncache) | ||
# remotefilelog's can't be walked in rev order, so throw. | ||||
# The caller will see the exception and walk the commit tree instead. | ||||
if not follow: | ||||
Augie Fackler
|
r43347 | raise cmdutil.FileWalkError(b"Cannot walk via filelog") | ||
Augie Fackler
|
r40530 | |||
wanted = set() | ||||
minrev, maxrev = min(revs), max(revs) | ||||
Augie Fackler
|
r43347 | pctx = repo[b'.'] | ||
Augie Fackler
|
r40530 | for filename in match.files(): | ||
if filename not in pctx: | ||||
Augie Fackler
|
r43346 | raise error.Abort( | ||
Martin von Zweigbergk
|
r43387 | _(b'cannot follow file not in parent revision: "%s"') % filename | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | fctx = pctx[filename] | ||
linkrev = fctx.linkrev() | ||||
if linkrev >= minrev and linkrev <= maxrev: | ||||
fncache.setdefault(linkrev, []).append(filename) | ||||
wanted.add(linkrev) | ||||
for ancestor in fctx.ancestors(): | ||||
linkrev = ancestor.linkrev() | ||||
if linkrev >= minrev and linkrev <= maxrev: | ||||
fncache.setdefault(linkrev, []).append(ancestor.path()) | ||||
wanted.add(linkrev) | ||||
return wanted | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def filelogrevset(orig, repo, subset, x): | ||
"""``filelog(pattern)`` | ||||
Changesets connected to the specified filelog. | ||||
For performance reasons, ``filelog()`` does not show every changeset | ||||
that affects the requested file(s). See :hg:`help log` for details. For | ||||
a slower, more accurate result, use ``file()``. | ||||
""" | ||||
Pulkit Goyal
|
r40549 | if not isenabled(repo): | ||
Augie Fackler
|
r40530 | return orig(repo, subset, x) | ||
# i18n: "filelog" is a keyword | ||||
Augie Fackler
|
r43347 | pat = revset.getstring(x, _(b"filelog requires a pattern")) | ||
Rodrigo Damazio Bovendorp
|
r45632 | m = matchmod.match( | ||
Augie Fackler
|
r43347 | repo.root, repo.getcwd(), [pat], default=b'relpath', ctx=repo[None] | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | s = set() | ||
Rodrigo Damazio Bovendorp
|
r45632 | if not matchmod.patkind(pat): | ||
Augie Fackler
|
r40530 | # slow | ||
for r in subset: | ||||
ctx = repo[r] | ||||
cfiles = ctx.files() | ||||
for f in m.files(): | ||||
if f in cfiles: | ||||
s.add(ctx.rev()) | ||||
break | ||||
else: | ||||
# partial | ||||
files = (f for f in repo[None] if m(f)) | ||||
for f in files: | ||||
fctx = repo[None].filectx(f) | ||||
s.add(fctx.linkrev()) | ||||
for actx in fctx.ancestors(): | ||||
s.add(actx.linkrev()) | ||||
return smartset.baseset([r for r in subset if r in s]) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @command(b'gc', [], _(b'hg gc [REPO...]'), norepo=True) | ||
Augie Fackler
|
r40530 | def gc(ui, *args, **opts): | ||
'''garbage collect the client and server filelog caches | ||||
''' | ||||
cachepaths = set() | ||||
# get the system client cache | ||||
systemcache = shallowutil.getcachepath(ui, allowempty=True) | ||||
if systemcache: | ||||
cachepaths.add(systemcache) | ||||
# get repo client and server cache | ||||
repopaths = [] | ||||
Augie Fackler
|
r43347 | pwd = ui.environ.get(b'PWD') | ||
Augie Fackler
|
r40530 | if pwd: | ||
repopaths.append(pwd) | ||||
repopaths.extend(args) | ||||
repos = [] | ||||
for repopath in repopaths: | ||||
try: | ||||
repo = hg.peer(ui, {}, repopath) | ||||
repos.append(repo) | ||||
repocache = shallowutil.getcachepath(repo.ui, allowempty=True) | ||||
if repocache: | ||||
cachepaths.add(repocache) | ||||
except error.RepoError: | ||||
pass | ||||
# gc client cache | ||||
for cachepath in cachepaths: | ||||
gcclient(ui, cachepath) | ||||
# gc server cache | ||||
for repo in repos: | ||||
remotefilelogserver.gcserver(ui, repo._repo) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def gcclient(ui, cachepath): | ||
# get list of repos that use this cache | ||||
Augie Fackler
|
r43347 | repospath = os.path.join(cachepath, b'repos') | ||
Augie Fackler
|
r40530 | if not os.path.exists(repospath): | ||
Augie Fackler
|
r43347 | ui.warn(_(b"no known cache at %s\n") % cachepath) | ||
Augie Fackler
|
r40530 | return | ||
Augie Fackler
|
r43347 | reposfile = open(repospath, b'rb') | ||
Martin von Zweigbergk
|
r42224 | repos = {r[:-1] for r in reposfile.readlines()} | ||
Augie Fackler
|
r40530 | reposfile.close() | ||
# build list of useful files | ||||
validrepos = [] | ||||
keepkeys = set() | ||||
sharedcache = None | ||||
filesrepacked = False | ||||
count = 0 | ||||
Augie Fackler
|
r43346 | progress = ui.makeprogress( | ||
Augie Fackler
|
r43347 | _(b"analyzing repositories"), unit=b"repos", total=len(repos) | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | for path in repos: | ||
Martin von Zweigbergk
|
r40875 | progress.update(count) | ||
Augie Fackler
|
r40530 | count += 1 | ||
try: | ||||
path = ui.expandpath(os.path.normpath(path)) | ||||
except TypeError as e: | ||||
Augie Fackler
|
r43347 | ui.warn(_(b"warning: malformed path: %r:%s\n") % (path, e)) | ||
Augie Fackler
|
r40530 | traceback.print_exc() | ||
continue | ||||
try: | ||||
peer = hg.peer(ui, {}, path) | ||||
repo = peer._repo | ||||
except error.RepoError: | ||||
continue | ||||
validrepos.append(path) | ||||
# Protect against any repo or config changes that have happened since | ||||
# this repo was added to the repos file. We'd rather this loop succeed | ||||
# and too much be deleted, than the loop fail and nothing gets deleted. | ||||
Pulkit Goyal
|
r40549 | if not isenabled(repo): | ||
Augie Fackler
|
r40530 | continue | ||
Augie Fackler
|
r43347 | if not util.safehasattr(repo, b'name'): | ||
ui.warn( | ||||
_(b"repo %s is a misconfigured remotefilelog repo\n") % path | ||||
) | ||||
Augie Fackler
|
r40530 | continue | ||
# If garbage collection on repack and repack on hg gc are enabled | ||||
# then loose files are repacked and garbage collected. | ||||
# Otherwise regular garbage collection is performed. | ||||
Augie Fackler
|
r43347 | repackonhggc = repo.ui.configbool(b'remotefilelog', b'repackonhggc') | ||
gcrepack = repo.ui.configbool(b'remotefilelog', b'gcrepack') | ||||
Augie Fackler
|
r40530 | if repackonhggc and gcrepack: | ||
try: | ||||
repackmod.incrementalrepack(repo) | ||||
filesrepacked = True | ||||
continue | ||||
except (IOError, repackmod.RepackAlreadyRunning): | ||||
# If repack cannot be performed due to not enough disk space | ||||
# continue doing garbage collection of loose files w/o repack | ||||
pass | ||||
reponame = repo.name | ||||
if not sharedcache: | ||||
sharedcache = repo.sharedstore | ||||
# Compute a keepset which is not garbage collected | ||||
def keyfn(fname, fnode): | ||||
return fileserverclient.getcachekey(reponame, fname, hex(fnode)) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | keepkeys = repackmod.keepset(repo, keyfn=keyfn, lastkeepkeys=keepkeys) | ||
Martin von Zweigbergk
|
r40875 | progress.complete() | ||
Augie Fackler
|
r40530 | |||
# write list of valid repos back | ||||
oldumask = os.umask(0o002) | ||||
try: | ||||
Augie Fackler
|
r43347 | reposfile = open(repospath, b'wb') | ||
reposfile.writelines([(b"%s\n" % r) for r in validrepos]) | ||||
Augie Fackler
|
r40530 | reposfile.close() | ||
finally: | ||||
os.umask(oldumask) | ||||
# prune cache | ||||
if sharedcache is not None: | ||||
sharedcache.gc(keepkeys) | ||||
elif not filesrepacked: | ||||
Augie Fackler
|
r43347 | ui.warn(_(b"warning: no valid repos in repofile\n")) | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def log(orig, ui, repo, *pats, **opts): | ||
Pulkit Goyal
|
r40549 | if not isenabled(repo): | ||
Augie Fackler
|
r40530 | return orig(ui, repo, *pats, **opts) | ||
Augie Fackler
|
r43906 | follow = opts.get('follow') | ||
revs = opts.get('rev') | ||||
Augie Fackler
|
r40530 | if pats: | ||
# Force slowpath for non-follow patterns and follows that start from | ||||
# non-working-copy-parent revs. | ||||
if not follow or revs: | ||||
# This forces the slowpath | ||||
Augie Fackler
|
r43906 | opts['removed'] = True | ||
Augie Fackler
|
r40530 | |||
# If this is a non-follow log without any revs specified, recommend that | ||||
# the user add -f to speed it up. | ||||
if not follow and not revs: | ||||
Augie Fackler
|
r43347 | match = scmutil.match(repo[b'.'], pats, pycompat.byteskwargs(opts)) | ||
Augie Fackler
|
r40530 | isfile = not match.anypats() | ||
if isfile: | ||||
for file in match.files(): | ||||
if not os.path.isfile(repo.wjoin(file)): | ||||
isfile = False | ||||
break | ||||
if isfile: | ||||
Augie Fackler
|
r43346 | ui.warn( | ||
_( | ||||
Augie Fackler
|
r43347 | b"warning: file log can be slow on large repos - " | ||
+ b"use -f to speed it up\n" | ||||
Augie Fackler
|
r43346 | ) | ||
) | ||||
Augie Fackler
|
r40530 | |||
return orig(ui, repo, *pats, **opts) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def revdatelimit(ui, revset): | ||
"""Update revset so that only changesets no older than 'prefetchdays' days | ||||
are included. The default value is set to 14 days. If 'prefetchdays' is set | ||||
to zero or negative value then date restriction is not applied. | ||||
""" | ||||
Augie Fackler
|
r43347 | days = ui.configint(b'remotefilelog', b'prefetchdays') | ||
Augie Fackler
|
r40530 | if days > 0: | ||
Augie Fackler
|
r43347 | revset = b'(%s) & date(-%s)' % (revset, days) | ||
Augie Fackler
|
r40530 | return revset | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def readytofetch(repo): | ||
"""Check that enough time has passed since the last background prefetch. | ||||
This only relates to prefetches after operations that change the working | ||||
copy parent. Default delay between background prefetches is 2 minutes. | ||||
""" | ||||
Augie Fackler
|
r43347 | timeout = repo.ui.configint(b'remotefilelog', b'prefetchdelay') | ||
fname = repo.vfs.join(b'lastprefetch') | ||||
Augie Fackler
|
r40530 | |||
ready = False | ||||
Augie Fackler
|
r43347 | with open(fname, b'a'): | ||
Augie Fackler
|
r40530 | # the with construct above is used to avoid race conditions | ||
modtime = os.path.getmtime(fname) | ||||
if (time.time() - modtime) > timeout: | ||||
os.utime(fname, None) | ||||
ready = True | ||||
return ready | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def wcpprefetch(ui, repo, **kwargs): | ||
"""Prefetches in background revisions specified by bgprefetchrevs revset. | ||||
Does background repack if backgroundrepack flag is set in config. | ||||
""" | ||||
Pulkit Goyal
|
r40549 | shallow = isenabled(repo) | ||
Augie Fackler
|
r43347 | bgprefetchrevs = ui.config(b'remotefilelog', b'bgprefetchrevs') | ||
Augie Fackler
|
r40530 | isready = readytofetch(repo) | ||
if not (shallow and bgprefetchrevs and isready): | ||||
return | ||||
Augie Fackler
|
r43347 | bgrepack = repo.ui.configbool(b'remotefilelog', b'backgroundrepack') | ||
Augie Fackler
|
r40530 | # update a revset with a date limit | ||
bgprefetchrevs = revdatelimit(ui, bgprefetchrevs) | ||||
Kyle Lippincott
|
r44217 | def anon(unused_success): | ||
Augie Fackler
|
r43347 | if util.safehasattr(repo, b'ranprefetch') and repo.ranprefetch: | ||
Augie Fackler
|
r40530 | return | ||
repo.ranprefetch = True | ||||
repo.backgroundprefetch(bgprefetchrevs, repack=bgrepack) | ||||
repo._afterlock(anon) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def pull(orig, ui, repo, *pats, **opts): | ||
result = orig(ui, repo, *pats, **opts) | ||||
Pulkit Goyal
|
r40549 | if isenabled(repo): | ||
Augie Fackler
|
r40530 | # prefetch if it's configured | ||
Augie Fackler
|
r43347 | prefetchrevset = ui.config(b'remotefilelog', b'pullprefetch') | ||
bgrepack = repo.ui.configbool(b'remotefilelog', b'backgroundrepack') | ||||
bgprefetch = repo.ui.configbool(b'remotefilelog', b'backgroundprefetch') | ||||
Augie Fackler
|
r40530 | |||
if prefetchrevset: | ||||
Augie Fackler
|
r43347 | ui.status(_(b"prefetching file contents\n")) | ||
Augie Fackler
|
r40530 | revs = scmutil.revrange(repo, [prefetchrevset]) | ||
Augie Fackler
|
r43347 | base = repo[b'.'].rev() | ||
Augie Fackler
|
r40530 | if bgprefetch: | ||
r44303 | repo.backgroundprefetch(prefetchrevset, repack=bgrepack) | |||
Augie Fackler
|
r40530 | else: | ||
repo.prefetch(revs, base=base) | ||||
if bgrepack: | ||||
r44303 | repackmod.backgroundrepack(repo, incremental=True) | |||
Augie Fackler
|
r40530 | elif bgrepack: | ||
r44303 | repackmod.backgroundrepack(repo, incremental=True) | |||
Augie Fackler
|
r40530 | |||
return result | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def exchangepull(orig, repo, remote, *args, **kwargs): | ||
# Hook into the callstream/getbundle to insert bundle capabilities | ||||
# during a pull. | ||||
Augie Fackler
|
r43346 | def localgetbundle( | ||
orig, source, heads=None, common=None, bundlecaps=None, **kwargs | ||||
): | ||||
Augie Fackler
|
r40530 | if not bundlecaps: | ||
bundlecaps = set() | ||||
Augie Fackler
|
r40544 | bundlecaps.add(constants.BUNDLE2_CAPABLITY) | ||
Augie Fackler
|
r43346 | return orig( | ||
source, heads=heads, common=common, bundlecaps=bundlecaps, **kwargs | ||||
) | ||||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43347 | if util.safehasattr(remote, b'_callstream'): | ||
Augie Fackler
|
r40530 | remote._localrepo = repo | ||
Augie Fackler
|
r43347 | elif util.safehasattr(remote, b'getbundle'): | ||
extensions.wrapfunction(remote, b'getbundle', localgetbundle) | ||||
Augie Fackler
|
r40530 | |||
return orig(repo, remote, *args, **kwargs) | ||||
Augie Fackler
|
r43346 | |||
Rodrigo Damazio Bovendorp
|
r45632 | def _fileprefetchhook(repo, revmatches): | ||
Pulkit Goyal
|
r40549 | if isenabled(repo): | ||
Augie Fackler
|
r40530 | allfiles = [] | ||
Rodrigo Damazio Bovendorp
|
r45632 | for rev, match in revmatches: | ||
Augie Fackler
|
r40530 | if rev == nodemod.wdirrev or rev is None: | ||
continue | ||||
ctx = repo[rev] | ||||
mf = ctx.manifest() | ||||
sparsematch = repo.maybesparsematch(ctx.rev()) | ||||
for path in ctx.walk(match): | ||||
if (not sparsematch or sparsematch(path)) and path in mf: | ||||
allfiles.append((path, hex(mf[path]))) | ||||
repo.fileservice.prefetch(allfiles) | ||||
Augie Fackler
|
r43346 | |||
@command( | ||||
Augie Fackler
|
r43347 | b'debugremotefilelog', | ||
[(b'd', b'decompress', None, _(b'decompress the filelog first')),], | ||||
_(b'hg debugremotefilelog <path>'), | ||||
Augie Fackler
|
r43346 | norepo=True, | ||
) | ||||
Augie Fackler
|
r40530 | def debugremotefilelog(ui, path, **opts): | ||
return debugcommands.debugremotefilelog(ui, path, **opts) | ||||
Augie Fackler
|
r43346 | |||
@command( | ||||
Augie Fackler
|
r43347 | b'verifyremotefilelog', | ||
[(b'd', b'decompress', None, _(b'decompress the filelogs first')),], | ||||
_(b'hg verifyremotefilelogs <directory>'), | ||||
Augie Fackler
|
r43346 | norepo=True, | ||
) | ||||
Augie Fackler
|
r40530 | def verifyremotefilelog(ui, path, **opts): | ||
return debugcommands.verifyremotefilelog(ui, path, **opts) | ||||
Augie Fackler
|
r43346 | |||
@command( | ||||
Augie Fackler
|
r43347 | b'debugdatapack', | ||
Augie Fackler
|
r43346 | [ | ||
Augie Fackler
|
r43347 | (b'', b'long', None, _(b'print the long hashes')), | ||
(b'', b'node', b'', _(b'dump the contents of node'), b'NODE'), | ||||
Augie Fackler
|
r43346 | ], | ||
Augie Fackler
|
r43347 | _(b'hg debugdatapack <paths>'), | ||
Augie Fackler
|
r43346 | norepo=True, | ||
) | ||||
Augie Fackler
|
r40530 | def debugdatapack(ui, *paths, **opts): | ||
return debugcommands.debugdatapack(ui, *paths, **opts) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @command(b'debughistorypack', [], _(b'hg debughistorypack <path>'), norepo=True) | ||
Augie Fackler
|
r40530 | def debughistorypack(ui, path, **opts): | ||
return debugcommands.debughistorypack(ui, path) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @command(b'debugkeepset', [], _(b'hg debugkeepset')) | ||
Augie Fackler
|
r40530 | def debugkeepset(ui, repo, **opts): | ||
# The command is used to measure keepset computation time | ||||
def keyfn(fname, fnode): | ||||
return fileserverclient.getcachekey(repo.name, fname, hex(fnode)) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | repackmod.keepset(repo, keyfn) | ||
return | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @command(b'debugwaitonrepack', [], _(b'hg debugwaitonrepack')) | ||
Augie Fackler
|
r40530 | def debugwaitonrepack(ui, repo, **opts): | ||
return debugcommands.debugwaitonrepack(repo) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @command(b'debugwaitonprefetch', [], _(b'hg debugwaitonprefetch')) | ||
Augie Fackler
|
r40530 | def debugwaitonprefetch(ui, repo, **opts): | ||
return debugcommands.debugwaitonprefetch(repo) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def resolveprefetchopts(ui, opts): | ||
Augie Fackler
|
r43347 | if not opts.get(b'rev'): | ||
revset = [b'.', b'draft()'] | ||||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43347 | prefetchrevset = ui.config(b'remotefilelog', b'pullprefetch', None) | ||
Augie Fackler
|
r40530 | if prefetchrevset: | ||
Augie Fackler
|
r43347 | revset.append(b'(%s)' % prefetchrevset) | ||
bgprefetchrevs = ui.config(b'remotefilelog', b'bgprefetchrevs', None) | ||||
Augie Fackler
|
r40530 | if bgprefetchrevs: | ||
Augie Fackler
|
r43347 | revset.append(b'(%s)' % bgprefetchrevs) | ||
revset = b'+'.join(revset) | ||||
Augie Fackler
|
r40530 | |||
# update a revset with a date limit | ||||
revset = revdatelimit(ui, revset) | ||||
Augie Fackler
|
r43347 | opts[b'rev'] = [revset] | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43347 | if not opts.get(b'base'): | ||
opts[b'base'] = None | ||||
Augie Fackler
|
r40530 | |||
return opts | ||||
Augie Fackler
|
r43346 | |||
@command( | ||||
Augie Fackler
|
r43347 | b'prefetch', | ||
Augie Fackler
|
r43346 | [ | ||
Augie Fackler
|
r43347 | (b'r', b'rev', [], _(b'prefetch the specified revisions'), _(b'REV')), | ||
(b'', b'repack', False, _(b'run repack after prefetch')), | ||||
(b'b', b'base', b'', _(b"rev that is assumed to already be local")), | ||||
Augie Fackler
|
r43346 | ] | ||
+ commands.walkopts, | ||||
Augie Fackler
|
r43347 | _(b'hg prefetch [OPTIONS] [FILE...]'), | ||
Rodrigo Damazio
|
r43451 | helpcategory=command.CATEGORY_MAINTENANCE, | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | def prefetch(ui, repo, *pats, **opts): | ||
"""prefetch file revisions from the server | ||||
Prefetchs file revisions for the specified revs and stores them in the | ||||
local remotefilelog cache. If no rev is specified, the default rev is | ||||
used which is the union of dot, draft, pullprefetch and bgprefetchrev. | ||||
File names or patterns can be used to limit which files are downloaded. | ||||
Return 0 on success. | ||||
""" | ||||
Pulkit Goyal
|
r40646 | opts = pycompat.byteskwargs(opts) | ||
Pulkit Goyal
|
r40549 | if not isenabled(repo): | ||
Augie Fackler
|
r43347 | raise error.Abort(_(b"repo is not shallow")) | ||
Augie Fackler
|
r40530 | |||
opts = resolveprefetchopts(ui, opts) | ||||
Augie Fackler
|
r43347 | revs = scmutil.revrange(repo, opts.get(b'rev')) | ||
repo.prefetch(revs, opts.get(b'base'), pats, opts) | ||||
Augie Fackler
|
r40530 | |||
# Run repack in background | ||||
Augie Fackler
|
r43347 | if opts.get(b'repack'): | ||
r44303 | repackmod.backgroundrepack(repo, incremental=True) | |||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43346 | @command( | ||
Augie Fackler
|
r43347 | b'repack', | ||
Augie Fackler
|
r43346 | [ | ||
Augie Fackler
|
r43347 | (b'', b'background', None, _(b'run in a background process'), None), | ||
(b'', b'incremental', None, _(b'do an incremental repack'), None), | ||||
Augie Fackler
|
r43346 | ( | ||
Augie Fackler
|
r43347 | b'', | ||
b'packsonly', | ||||
Augie Fackler
|
r43346 | None, | ||
Augie Fackler
|
r43347 | _(b'only repack packs (skip loose objects)'), | ||
Augie Fackler
|
r43346 | None, | ||
), | ||||
], | ||||
Augie Fackler
|
r43347 | _(b'hg repack [OPTIONS]'), | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | def repack_(ui, repo, *pats, **opts): | ||
Augie Fackler
|
r43906 | if opts.get('background'): | ||
Augie Fackler
|
r43346 | repackmod.backgroundrepack( | ||
repo, | ||||
Augie Fackler
|
r43906 | incremental=opts.get('incremental'), | ||
packsonly=opts.get('packsonly', False), | ||||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | return | ||
Augie Fackler
|
r43906 | options = {b'packsonly': opts.get('packsonly')} | ||
Augie Fackler
|
r40530 | |||
try: | ||||
Augie Fackler
|
r43906 | if opts.get('incremental'): | ||
Augie Fackler
|
r40530 | repackmod.incrementalrepack(repo, options=options) | ||
else: | ||||
repackmod.fullrepack(repo, options=options) | ||||
except repackmod.RepackAlreadyRunning as ex: | ||||
# Don't propogate the exception if the repack is already in | ||||
# progress, since we want the command to exit 0. | ||||
Augie Fackler
|
r43347 | repo.ui.warn(b'%s\n' % ex) | ||