shallowrepo.py
353 lines
| 11.7 KiB
| text/x-python
|
PythonLexer
Augie Fackler
|
r40530 | # shallowrepo.py - shallow repository that uses remote filelogs | ||
# | ||||
# Copyright 2013 Facebook, Inc. | ||||
# | ||||
# This software may be used and distributed according to the terms of the | ||||
# GNU General Public License version 2 or any later version. | ||||
from __future__ import absolute_import | ||||
import os | ||||
from mercurial.i18n import _ | ||||
from mercurial.node import hex, nullid, nullrev | ||||
from mercurial import ( | ||||
encoding, | ||||
error, | ||||
localrepo, | ||||
match, | ||||
scmutil, | ||||
sparse, | ||||
util, | ||||
) | ||||
from mercurial.utils import procutil | ||||
from . import ( | ||||
connectionpool, | ||||
constants, | ||||
contentstore, | ||||
datapack, | ||||
fileserverclient, | ||||
historypack, | ||||
metadatastore, | ||||
remotefilectx, | ||||
remotefilelog, | ||||
shallowutil, | ||||
) | ||||
# These make*stores functions are global so that other extensions can replace | ||||
# them. | ||||
def makelocalstores(repo): | ||||
"""In-repo stores, like .hg/store/data; can not be discarded.""" | ||||
Augie Fackler
|
r43347 | localpath = os.path.join(repo.svfs.vfs.base, b'data') | ||
Augie Fackler
|
r40530 | if not os.path.exists(localpath): | ||
os.makedirs(localpath) | ||||
# Instantiate local data stores | ||||
localcontent = contentstore.remotefilelogcontentstore( | ||||
Augie Fackler
|
r43346 | repo, localpath, repo.name, shared=False | ||
) | ||||
Augie Fackler
|
r40530 | localmetadata = metadatastore.remotefilelogmetadatastore( | ||
Augie Fackler
|
r43346 | repo, localpath, repo.name, shared=False | ||
) | ||||
Augie Fackler
|
r40530 | return localcontent, localmetadata | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def makecachestores(repo): | ||
"""Typically machine-wide, cache of remote data; can be discarded.""" | ||||
# Instantiate shared cache stores | ||||
cachepath = shallowutil.getcachepath(repo.ui) | ||||
cachecontent = contentstore.remotefilelogcontentstore( | ||||
Augie Fackler
|
r43346 | repo, cachepath, repo.name, shared=True | ||
) | ||||
Augie Fackler
|
r40530 | cachemetadata = metadatastore.remotefilelogmetadatastore( | ||
Augie Fackler
|
r43346 | repo, cachepath, repo.name, shared=True | ||
) | ||||
Augie Fackler
|
r40530 | |||
repo.sharedstore = cachecontent | ||||
repo.shareddatastores.append(cachecontent) | ||||
repo.sharedhistorystores.append(cachemetadata) | ||||
return cachecontent, cachemetadata | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def makeremotestores(repo, cachecontent, cachemetadata): | ||
"""These stores fetch data from a remote server.""" | ||||
# Instantiate remote stores | ||||
repo.fileservice = fileserverclient.fileserverclient(repo) | ||||
remotecontent = contentstore.remotecontentstore( | ||||
Augie Fackler
|
r43346 | repo.ui, repo.fileservice, cachecontent | ||
) | ||||
Augie Fackler
|
r40530 | remotemetadata = metadatastore.remotemetadatastore( | ||
Augie Fackler
|
r43346 | repo.ui, repo.fileservice, cachemetadata | ||
) | ||||
Augie Fackler
|
r40530 | return remotecontent, remotemetadata | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def makepackstores(repo): | ||
"""Packs are more efficient (to read from) cache stores.""" | ||||
# Instantiate pack stores | ||||
Augie Fackler
|
r43346 | packpath = shallowutil.getcachepackpath(repo, constants.FILEPACK_CATEGORY) | ||
Augie Fackler
|
r40530 | packcontentstore = datapack.datapackstore(repo.ui, packpath) | ||
packmetadatastore = historypack.historypackstore(repo.ui, packpath) | ||||
repo.shareddatastores.append(packcontentstore) | ||||
repo.sharedhistorystores.append(packmetadatastore) | ||||
Augie Fackler
|
r43346 | shallowutil.reportpackmetrics( | ||
Augie Fackler
|
r43347 | repo.ui, b'filestore', packcontentstore, packmetadatastore | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | return packcontentstore, packmetadatastore | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def makeunionstores(repo): | ||
"""Union stores iterate the other stores and return the first result.""" | ||||
repo.shareddatastores = [] | ||||
repo.sharedhistorystores = [] | ||||
packcontentstore, packmetadatastore = makepackstores(repo) | ||||
cachecontent, cachemetadata = makecachestores(repo) | ||||
localcontent, localmetadata = makelocalstores(repo) | ||||
Augie Fackler
|
r43346 | remotecontent, remotemetadata = makeremotestores( | ||
repo, cachecontent, cachemetadata | ||||
) | ||||
Augie Fackler
|
r40530 | |||
# Instantiate union stores | ||||
repo.contentstore = contentstore.unioncontentstore( | ||||
Augie Fackler
|
r43346 | packcontentstore, | ||
cachecontent, | ||||
localcontent, | ||||
remotecontent, | ||||
writestore=localcontent, | ||||
) | ||||
Augie Fackler
|
r40530 | repo.metadatastore = metadatastore.unionmetadatastore( | ||
Augie Fackler
|
r43346 | packmetadatastore, | ||
cachemetadata, | ||||
localmetadata, | ||||
remotemetadata, | ||||
writestore=localmetadata, | ||||
) | ||||
Augie Fackler
|
r40530 | |||
fileservicedatawrite = cachecontent | ||||
fileservicehistorywrite = cachemetadata | ||||
Augie Fackler
|
r43346 | repo.fileservice.setstore( | ||
repo.contentstore, | ||||
repo.metadatastore, | ||||
fileservicedatawrite, | ||||
fileservicehistorywrite, | ||||
) | ||||
shallowutil.reportpackmetrics( | ||||
Augie Fackler
|
r43347 | repo.ui, b'filestore', packcontentstore, packmetadatastore | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | |||
def wraprepo(repo): | ||||
class shallowrepository(repo.__class__): | ||||
@util.propertycache | ||||
def name(self): | ||||
Augie Fackler
|
r43347 | return self.ui.config(b'remotefilelog', b'reponame') | ||
Augie Fackler
|
r40530 | |||
@util.propertycache | ||||
def fallbackpath(self): | ||||
Augie Fackler
|
r43346 | path = repo.ui.config( | ||
Augie Fackler
|
r43347 | b"remotefilelog", | ||
b"fallbackpath", | ||||
repo.ui.config(b'paths', b'default'), | ||||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | if not path: | ||
Augie Fackler
|
r43346 | raise error.Abort( | ||
Augie Fackler
|
r43347 | b"no remotefilelog server " | ||
b"configured - is your .hg/hgrc trusted?" | ||||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | |||
return path | ||||
def maybesparsematch(self, *revs, **kwargs): | ||||
''' | ||||
A wrapper that allows the remotefilelog to invoke sparsematch() if | ||||
this is a sparse repository, or returns None if this is not a | ||||
sparse repository. | ||||
''' | ||||
if revs: | ||||
Kyle Lippincott
|
r41107 | ret = sparse.matcher(repo, revs=revs) | ||
else: | ||||
ret = sparse.matcher(repo) | ||||
if ret.always(): | ||||
return None | ||||
return ret | ||||
Augie Fackler
|
r40530 | |||
def file(self, f): | ||||
Augie Fackler
|
r43347 | if f[0] == b'/': | ||
Augie Fackler
|
r40530 | f = f[1:] | ||
if self.shallowmatch(f): | ||||
return remotefilelog.remotefilelog(self.svfs, f, self) | ||||
else: | ||||
return super(shallowrepository, self).file(f) | ||||
def filectx(self, path, *args, **kwargs): | ||||
if self.shallowmatch(path): | ||||
return remotefilectx.remotefilectx(self, path, *args, **kwargs) | ||||
else: | ||||
Augie Fackler
|
r43346 | return super(shallowrepository, self).filectx( | ||
path, *args, **kwargs | ||||
) | ||||
Augie Fackler
|
r40530 | |||
@localrepo.unfilteredmethod | ||||
Valentin Gatien-Baron
|
r42839 | def commitctx(self, ctx, error=False, origctx=None): | ||
Augie Fackler
|
r40530 | """Add a new revision to current repository. | ||
Revision information is passed via the context argument. | ||||
""" | ||||
# some contexts already have manifest nodes, they don't need any | ||||
# prefetching (for example if we're just editing a commit message | ||||
# we can reuse manifest | ||||
if not ctx.manifestnode(): | ||||
# prefetch files that will likely be compared | ||||
m1 = ctx.p1().manifest() | ||||
files = [] | ||||
for f in ctx.modified() + ctx.added(): | ||||
fparent1 = m1.get(f, nullid) | ||||
if fparent1 != nullid: | ||||
files.append((f, hex(fparent1))) | ||||
self.fileservice.prefetch(files) | ||||
Augie Fackler
|
r43346 | return super(shallowrepository, self).commitctx( | ||
ctx, error=error, origctx=origctx | ||||
) | ||||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43346 | def backgroundprefetch( | ||
self, | ||||
revs, | ||||
base=None, | ||||
repack=False, | ||||
pats=None, | ||||
opts=None, | ||||
ensurestart=False, | ||||
): | ||||
Augie Fackler
|
r40530 | """Runs prefetch in background with optional repack | ||
""" | ||||
Augie Fackler
|
r43347 | cmd = [procutil.hgexecutable(), b'-R', repo.origroot, b'prefetch'] | ||
Augie Fackler
|
r40530 | if repack: | ||
Augie Fackler
|
r43347 | cmd.append(b'--repack') | ||
Augie Fackler
|
r40530 | if revs: | ||
Augie Fackler
|
r43347 | cmd += [b'-r', revs] | ||
Augie Fackler
|
r42697 | # We know this command will find a binary, so don't block | ||
# on it starting. | ||||
Augie Fackler
|
r43346 | procutil.runbgcommand( | ||
cmd, encoding.environ, ensurestart=ensurestart | ||||
) | ||||
Augie Fackler
|
r40530 | |||
def prefetch(self, revs, base=None, pats=None, opts=None): | ||||
"""Prefetches all the necessary file revisions for the given revs | ||||
Optionally runs repack in background | ||||
""" | ||||
Augie Fackler
|
r43346 | with repo._lock( | ||
repo.svfs, | ||||
Augie Fackler
|
r43347 | b'prefetchlock', | ||
Augie Fackler
|
r43346 | True, | ||
None, | ||||
None, | ||||
Augie Fackler
|
r43347 | _(b'prefetching in %s') % repo.origroot, | ||
Augie Fackler
|
r43346 | ): | ||
Augie Fackler
|
r40530 | self._prefetch(revs, base, pats, opts) | ||
def _prefetch(self, revs, base=None, pats=None, opts=None): | ||||
fallbackpath = self.fallbackpath | ||||
if fallbackpath: | ||||
# If we know a rev is on the server, we should fetch the server | ||||
# version of those files, since our local file versions might | ||||
# become obsolete if the local commits are stripped. | ||||
Augie Fackler
|
r43347 | localrevs = repo.revs(b'outgoing(%s)', fallbackpath) | ||
Augie Fackler
|
r40530 | if base is not None and base != nullrev: | ||
Augie Fackler
|
r43346 | serverbase = list( | ||
Augie Fackler
|
r43347 | repo.revs( | ||
b'first(reverse(::%s) - %ld)', base, localrevs | ||||
) | ||||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | if serverbase: | ||
base = serverbase[0] | ||||
else: | ||||
localrevs = repo | ||||
mfl = repo.manifestlog | ||||
Augie Fackler
|
r43347 | mfrevlog = mfl.getstorage(b'') | ||
Augie Fackler
|
r40530 | if base is not None: | ||
mfdict = mfl[repo[base].manifestnode()].read() | ||||
skip = set(mfdict.iteritems()) | ||||
else: | ||||
skip = set() | ||||
# Copy the skip set to start large and avoid constant resizing, | ||||
# and since it's likely to be very similar to the prefetch set. | ||||
files = skip.copy() | ||||
serverfiles = skip.copy() | ||||
visited = set() | ||||
visited.add(nullrev) | ||||
revcount = len(revs) | ||||
Augie Fackler
|
r43347 | progress = self.ui.makeprogress(_(b'prefetching'), total=revcount) | ||
Martin von Zweigbergk
|
r40880 | progress.update(0) | ||
Augie Fackler
|
r40530 | for rev in sorted(revs): | ||
ctx = repo[rev] | ||||
if pats: | ||||
m = scmutil.match(ctx, pats, opts) | ||||
sparsematch = repo.maybesparsematch(rev) | ||||
mfnode = ctx.manifestnode() | ||||
mfrev = mfrevlog.rev(mfnode) | ||||
# Decompressing manifests is expensive. | ||||
# When possible, only read the deltas. | ||||
p1, p2 = mfrevlog.parentrevs(mfrev) | ||||
if p1 in visited and p2 in visited: | ||||
mfdict = mfl[mfnode].readfast() | ||||
else: | ||||
mfdict = mfl[mfnode].read() | ||||
diff = mfdict.iteritems() | ||||
if pats: | ||||
diff = (pf for pf in diff if m(pf[0])) | ||||
if sparsematch: | ||||
diff = (pf for pf in diff if sparsematch(pf[0])) | ||||
if rev not in localrevs: | ||||
serverfiles.update(diff) | ||||
else: | ||||
files.update(diff) | ||||
visited.add(mfrev) | ||||
Martin von Zweigbergk
|
r40880 | progress.increment() | ||
Augie Fackler
|
r40530 | |||
files.difference_update(skip) | ||||
serverfiles.difference_update(skip) | ||||
Martin von Zweigbergk
|
r40880 | progress.complete() | ||
Augie Fackler
|
r40530 | |||
# Fetch files known to be on the server | ||||
if serverfiles: | ||||
results = [(path, hex(fnode)) for (path, fnode) in serverfiles] | ||||
repo.fileservice.prefetch(results, force=True) | ||||
# Fetch files that may or may not be on the server | ||||
if files: | ||||
results = [(path, hex(fnode)) for (path, fnode) in files] | ||||
repo.fileservice.prefetch(results) | ||||
def close(self): | ||||
super(shallowrepository, self).close() | ||||
self.connectionpool.close() | ||||
repo.__class__ = shallowrepository | ||||
Martin von Zweigbergk
|
r41825 | repo.shallowmatch = match.always() | ||
Augie Fackler
|
r40530 | |||
makeunionstores(repo) | ||||
Augie Fackler
|
r43346 | repo.includepattern = repo.ui.configlist( | ||
Augie Fackler
|
r43347 | b"remotefilelog", b"includepattern", None | ||
Augie Fackler
|
r43346 | ) | ||
repo.excludepattern = repo.ui.configlist( | ||||
Augie Fackler
|
r43347 | b"remotefilelog", b"excludepattern", None | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r43347 | if not util.safehasattr(repo, b'connectionpool'): | ||
Augie Fackler
|
r40530 | repo.connectionpool = connectionpool.connectionpool(repo) | ||
if repo.includepattern or repo.excludepattern: | ||||
Augie Fackler
|
r43346 | repo.shallowmatch = match.match( | ||
Augie Fackler
|
r43347 | repo.root, b'', None, repo.includepattern, repo.excludepattern | ||
Augie Fackler
|
r43346 | ) | ||