lfcommands.py
673 lines
| 22.1 KiB
| text/x-python
|
PythonLexer
various
|
r15168 | # Copyright 2009-2010 Gregory P. Ward | ||
# Copyright 2009-2010 Intelerad Medical Systems Incorporated | ||||
# Copyright 2010-2011 Fog Creek Software | ||||
# Copyright 2010-2011 Unity Technologies | ||||
# | ||||
# This software may be used and distributed according to the terms of the | ||||
# GNU General Public License version 2 or any later version. | ||||
Greg Ward
|
r15252 | '''High-level command function for lfconvert, plus the cmdtable.''' | ||
various
|
r15168 | |||
Manuel Jacob
|
r50143 | import binascii | ||
liscju
|
r29308 | import os | ||
various
|
r15168 | import shutil | ||
from mercurial.i18n import _ | ||||
Joerg Sonnenberger
|
r46729 | from mercurial.node import ( | ||
bin, | ||||
hex, | ||||
) | ||||
various
|
r15168 | |||
liscju
|
r29308 | from mercurial import ( | ||
cmdutil, | ||||
context, | ||||
error, | ||||
Matt Harbison
|
r41091 | exthelper, | ||
liscju
|
r29308 | hg, | ||
lock, | ||||
Martin von Zweigbergk
|
r48928 | logcmdutil, | ||
liscju
|
r29317 | match as matchmod, | ||
Pulkit Goyal
|
r35349 | pycompat, | ||
liscju
|
r29308 | scmutil, | ||
util, | ||||
) | ||||
Augie Fackler
|
r44519 | from mercurial.utils import hashutil | ||
Matt Harbison
|
r25325 | |||
liscju
|
r29308 | from ..convert import ( | ||
convcmd, | ||||
filemap, | ||||
) | ||||
Augie Fackler
|
r43346 | from . import lfutil, storefactory | ||
liscju
|
r29308 | |||
release = lock.release | ||||
various
|
r15168 | |||
# -- Commands ---------------------------------------------------------- | ||||
Matt Harbison
|
r41091 | eh = exthelper.exthelper() | ||
Gregory Szorc
|
r21242 | |||
Augie Fackler
|
r43346 | |||
@eh.command( | ||||
Augie Fackler
|
r43347 | b'lfconvert', | ||
Augie Fackler
|
r43346 | [ | ||
( | ||||
Augie Fackler
|
r43347 | b's', | ||
b'size', | ||||
b'', | ||||
_(b'minimum size (MB) for files to be converted as largefiles'), | ||||
b'SIZE', | ||||
Augie Fackler
|
r43346 | ), | ||
( | ||||
Augie Fackler
|
r43347 | b'', | ||
b'to-normal', | ||||
Augie Fackler
|
r43346 | False, | ||
Augie Fackler
|
r43347 | _(b'convert from a largefiles repo to a normal repo'), | ||
Augie Fackler
|
r43346 | ), | ||
Gregory Szorc
|
r21242 | ], | ||
Augie Fackler
|
r43347 | _(b'hg lfconvert SOURCE DEST [FILE ...]'), | ||
Gregory Szorc
|
r21785 | norepo=True, | ||
Augie Fackler
|
r43346 | inferrepo=True, | ||
) | ||||
various
|
r15168 | def lfconvert(ui, src, dest, *pats, **opts): | ||
Augie Fackler
|
r46554 | """convert a normal repository to a largefiles repository | ||
various
|
r15168 | |||
Greg Ward
|
r15230 | Convert repository SOURCE to a new repository DEST, identical to | ||
SOURCE except that certain files will be converted as largefiles: | ||||
specifically, any file that matches any PATTERN *or* whose size is | ||||
above the minimum size threshold is converted as a largefile. The | ||||
size used to determine whether or not to track a file as a | ||||
largefile is the size of the first version of the file. The | ||||
minimum size can be specified either with --size or in | ||||
configuration as ``largefiles.size``. | ||||
After running this command you will need to make sure that | ||||
largefiles is enabled anywhere you intend to push the new | ||||
repository. | ||||
Greg Ward
|
r15332 | Use --to-normal to convert largefiles back to normal files; after | ||
Augie Fackler
|
r46554 | this, the DEST repository can be used without largefiles at all.""" | ||
various
|
r15168 | |||
Pulkit Goyal
|
r35349 | opts = pycompat.byteskwargs(opts) | ||
Augie Fackler
|
r43347 | if opts[b'to_normal']: | ||
various
|
r15168 | tolfile = False | ||
else: | ||||
tolfile = True | ||||
Augie Fackler
|
r43347 | size = lfutil.getminsize(ui, True, opts.get(b'size'), default=None) | ||
Greg Ward
|
r15340 | |||
if not hg.islocal(src): | ||||
Augie Fackler
|
r43347 | raise error.Abort(_(b'%s is not a local Mercurial repo') % src) | ||
Greg Ward
|
r15340 | if not hg.islocal(dest): | ||
Augie Fackler
|
r43347 | raise error.Abort(_(b'%s is not a local Mercurial repo') % dest) | ||
Greg Ward
|
r15340 | |||
Greg Ward
|
r15339 | rsrc = hg.repository(ui, src) | ||
Augie Fackler
|
r43347 | ui.status(_(b'initializing destination %s\n') % dest) | ||
Greg Ward
|
r15339 | rdst = hg.repository(ui, dest, create=True) | ||
various
|
r15168 | |||
Matt Mackall
|
r15171 | success = False | ||
Mads Kiilerich
|
r16717 | dstwlock = dstlock = None | ||
various
|
r15168 | try: | ||
# Get a list of all changesets in the source. The easy way to do this | ||||
Mads Kiilerich
|
r17424 | # is to simply walk the changelog, using changelog.nodesbetween(). | ||
various
|
r15168 | # Take a look at mercurial/revlog.py:639 for more details. | ||
# Use a generator instead of a list to decrease memory usage | ||||
Augie Fackler
|
r43346 | ctxs = ( | ||
rsrc[ctx] | ||||
for ctx in rsrc.changelog.nodesbetween(None, rsrc.heads())[0] | ||||
) | ||||
Joerg Sonnenberger
|
r47771 | revmap = {rsrc.nullid: rdst.nullid} | ||
various
|
r15168 | if tolfile: | ||
Matt Harbison
|
r25325 | # Lock destination to prevent modification while it is converted to. | ||
# Don't need to lock src because we are just reading from its | ||||
# history which can't change. | ||||
dstwlock = rdst.wlock() | ||||
dstlock = rdst.lock() | ||||
various
|
r15168 | lfiles = set() | ||
normalfiles = set() | ||||
if not pats: | ||||
Augie Fackler
|
r43347 | pats = ui.configlist(lfutil.longname, b'patterns') | ||
various
|
r15168 | if pats: | ||
Augie Fackler
|
r43347 | matcher = matchmod.match(rsrc.root, b'', list(pats)) | ||
various
|
r15168 | else: | ||
matcher = None | ||||
lfiletohash = {} | ||||
Augie Fackler
|
r43346 | with ui.makeprogress( | ||
Augie Fackler
|
r43347 | _(b'converting revisions'), | ||
unit=_(b'revisions'), | ||||
total=rsrc[b'tip'].rev(), | ||||
Augie Fackler
|
r43346 | ) as progress: | ||
Matt Harbison
|
r39427 | for ctx in ctxs: | ||
progress.update(ctx.rev()) | ||||
Augie Fackler
|
r43346 | _lfconvert_addchangeset( | ||
rsrc, | ||||
rdst, | ||||
ctx, | ||||
revmap, | ||||
lfiles, | ||||
normalfiles, | ||||
matcher, | ||||
size, | ||||
lfiletohash, | ||||
) | ||||
various
|
r15168 | |||
liscju
|
r28559 | if rdst.wvfs.exists(lfutil.shortname): | ||
rdst.wvfs.rmtree(lfutil.shortname) | ||||
various
|
r15168 | |||
for f in lfiletohash.keys(): | ||||
liscju
|
r28559 | if rdst.wvfs.isfile(f): | ||
rdst.wvfs.unlink(f) | ||||
various
|
r15168 | try: | ||
liscju
|
r28559 | rdst.wvfs.removedirs(rdst.wvfs.dirname(f)) | ||
Matt Mackall
|
r15171 | except OSError: | ||
various
|
r15168 | pass | ||
Eli Carter
|
r15303 | # If there were any files converted to largefiles, add largefiles | ||
# to the destination repository's requirements. | ||||
if lfiles: | ||||
Augie Fackler
|
r43347 | rdst.requirements.add(b'largefiles') | ||
Pulkit Goyal
|
r45666 | scmutil.writereporequirements(rdst) | ||
various
|
r15168 | else: | ||
Augie Fackler
|
r43346 | |||
Matt Harbison
|
r25325 | class lfsource(filemap.filemap_source): | ||
def __init__(self, ui, source): | ||||
super(lfsource, self).__init__(ui, source, None) | ||||
Augie Fackler
|
r43347 | self.filemapper.rename[lfutil.shortname] = b'.' | ||
Matt Harbison
|
r25325 | |||
def getfile(self, name, rev): | ||||
realname, realrev = rev | ||||
f = super(lfsource, self).getfile(name, rev) | ||||
Augie Fackler
|
r43346 | if ( | ||
not realname.startswith(lfutil.shortnameslash) | ||||
or f[0] is None | ||||
): | ||||
Matt Harbison
|
r25325 | return f | ||
# Substitute in the largefile data for the hash | ||||
hash = f[0].strip() | ||||
path = lfutil.findfile(rsrc, hash) | ||||
various
|
r15168 | |||
Matt Harbison
|
r25325 | if path is None: | ||
Augie Fackler
|
r43346 | raise error.Abort( | ||
Augie Fackler
|
r43347 | _(b"missing largefile for '%s' in %s") | ||
Augie Fackler
|
r43346 | % (realname, realrev) | ||
) | ||||
Bryan O'Sullivan
|
r27774 | return util.readfile(path), f[1] | ||
Matt Harbison
|
r25325 | |||
class converter(convcmd.converter): | ||||
def __init__(self, ui, source, dest, revmapfile, opts): | ||||
src = lfsource(ui, source) | ||||
Augie Fackler
|
r43346 | super(converter, self).__init__( | ||
ui, src, dest, revmapfile, opts | ||||
) | ||||
Matt Harbison
|
r25325 | |||
found, missing = downloadlfiles(ui, rsrc) | ||||
if missing != 0: | ||||
Augie Fackler
|
r43347 | raise error.Abort(_(b"all largefiles must be present locally")) | ||
Matt Harbison
|
r25325 | |||
Matt Harbison
|
r25560 | orig = convcmd.converter | ||
Matt Harbison
|
r25325 | convcmd.converter = converter | ||
Matt Harbison
|
r25560 | |||
try: | ||||
Augie Fackler
|
r43347 | convcmd.convert( | ||
ui, src, dest, source_type=b'hg', dest_type=b'hg' | ||||
) | ||||
Matt Harbison
|
r25560 | finally: | ||
convcmd.converter = orig | ||||
Matt Mackall
|
r15171 | success = True | ||
various
|
r15168 | finally: | ||
Matt Harbison
|
r25325 | if tolfile: | ||
rdst.dirstate.clear() | ||||
release(dstlock, dstwlock) | ||||
Matt Mackall
|
r15171 | if not success: | ||
# we failed, remove the new directory | ||||
shutil.rmtree(rdst.root) | ||||
various
|
r15168 | |||
Augie Fackler
|
r43346 | |||
def _lfconvert_addchangeset( | ||||
rsrc, rdst, ctx, revmap, lfiles, normalfiles, matcher, size, lfiletohash | ||||
): | ||||
various
|
r15168 | # Convert src parents to dst parents | ||
Levi Bard
|
r15811 | parents = _convertparents(ctx, revmap) | ||
various
|
r15168 | |||
# Generate list of changed files | ||||
Levi Bard
|
r15811 | files = _getchangedfiles(ctx, parents) | ||
various
|
r15168 | |||
dstfiles = [] | ||||
for f in files: | ||||
if f not in lfiles and f not in normalfiles: | ||||
islfile = _islfile(f, ctx, matcher, size) | ||||
# If this file was renamed or copied then copy | ||||
Mads Kiilerich
|
r17424 | # the largefile-ness of its predecessor | ||
various
|
r15168 | if f in ctx.manifest(): | ||
fctx = ctx.filectx(f) | ||||
Martin von Zweigbergk
|
r41941 | renamed = fctx.copysource() | ||
Sean Farley
|
r39746 | if renamed is None: | ||
# the code below assumes renamed to be a boolean or a list | ||||
# and won't quite work with the value None | ||||
renamed = False | ||||
Martin von Zweigbergk
|
r41941 | renamedlfile = renamed and renamed in lfiles | ||
various
|
r15168 | islfile |= renamedlfile | ||
Augie Fackler
|
r43347 | if b'l' in fctx.flags(): | ||
various
|
r15168 | if renamedlfile: | ||
Pierre-Yves David
|
r26587 | raise error.Abort( | ||
Augie Fackler
|
r43347 | _(b'renamed/copied largefile %s becomes symlink') | ||
% f | ||||
Augie Fackler
|
r43346 | ) | ||
various
|
r15168 | islfile = False | ||
if islfile: | ||||
lfiles.add(f) | ||||
else: | ||||
normalfiles.add(f) | ||||
if f in lfiles: | ||||
FUJIWARA Katsunori
|
r31618 | fstandin = lfutil.standin(f) | ||
dstfiles.append(fstandin) | ||||
Greg Ward
|
r15254 | # largefile in manifest if it has not been removed/renamed | ||
various
|
r15168 | if f in ctx.manifest(): | ||
Levi Bard
|
r15808 | fctx = ctx.filectx(f) | ||
Augie Fackler
|
r43347 | if b'l' in fctx.flags(): | ||
Martin von Zweigbergk
|
r41941 | renamed = fctx.copysource() | ||
if renamed and renamed in lfiles: | ||||
Augie Fackler
|
r43347 | raise error.Abort( | ||
_(b'largefile %s becomes symlink') % f | ||||
) | ||||
various
|
r15168 | |||
Greg Ward
|
r15254 | # largefile was modified, update standins | ||
Augie Fackler
|
r44519 | m = hashutil.sha1(b'') | ||
various
|
r15168 | m.update(ctx[f].data()) | ||
Joerg Sonnenberger
|
r46729 | hash = hex(m.digest()) | ||
various
|
r15168 | if f not in lfiletohash or lfiletohash[f] != hash: | ||
Mads Kiilerich
|
r19089 | rdst.wwrite(f, ctx[f].data(), ctx[f].flags()) | ||
Augie Fackler
|
r43347 | executable = b'x' in ctx[f].flags() | ||
Augie Fackler
|
r43346 | lfutil.writestandin(rdst, fstandin, hash, executable) | ||
various
|
r15168 | lfiletohash[f] = hash | ||
else: | ||||
# normal file | ||||
dstfiles.append(f) | ||||
def getfilectx(repo, memctx, f): | ||||
FUJIWARA Katsunori
|
r31613 | srcfname = lfutil.splitstandin(f) | ||
if srcfname is not None: | ||||
various
|
r15168 | # if the file isn't in the manifest then it was removed | ||
FUJIWARA Katsunori
|
r31612 | # or renamed, return None to indicate this | ||
various
|
r15168 | try: | ||
fctx = ctx.filectx(srcfname) | ||||
except error.LookupError: | ||||
Mads Kiilerich
|
r22296 | return None | ||
Martin von Zweigbergk
|
r41941 | renamed = fctx.copysource() | ||
various
|
r15168 | if renamed: | ||
Greg Ward
|
r15254 | # standin is always a largefile because largefile-ness | ||
various
|
r15168 | # doesn't change after rename or copy | ||
Martin von Zweigbergk
|
r41941 | renamed = lfutil.standin(renamed) | ||
various
|
r15168 | |||
Augie Fackler
|
r43346 | return context.memfilectx( | ||
repo, | ||||
memctx, | ||||
f, | ||||
Augie Fackler
|
r43347 | lfiletohash[srcfname] + b'\n', | ||
b'l' in fctx.flags(), | ||||
b'x' in fctx.flags(), | ||||
Augie Fackler
|
r43346 | renamed, | ||
) | ||||
various
|
r15168 | else: | ||
Sean Farley
|
r21689 | return _getnormalcontext(repo, ctx, f, revmap) | ||
various
|
r15168 | |||
# Commit | ||||
Levi Bard
|
r15811 | _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap) | ||
Augie Fackler
|
r43346 | |||
Levi Bard
|
r15811 | def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap): | ||
Augie Fackler
|
r43346 | mctx = context.memctx( | ||
rdst, | ||||
parents, | ||||
ctx.description(), | ||||
dstfiles, | ||||
getfilectx, | ||||
ctx.user(), | ||||
ctx.date(), | ||||
ctx.extra(), | ||||
) | ||||
various
|
r15168 | ret = rdst.commitctx(mctx) | ||
FUJIWARA Katsunori
|
r23276 | lfutil.copyalltostore(rdst, ret) | ||
Patrick Mezard
|
r16551 | rdst.setparents(ret) | ||
various
|
r15168 | revmap[ctx.node()] = rdst.changelog.tip() | ||
Augie Fackler
|
r43346 | |||
Levi Bard
|
r15811 | # Generate list of changed files | ||
def _getchangedfiles(ctx, parents): | ||||
files = set(ctx.files()) | ||||
Joerg Sonnenberger
|
r47771 | if ctx.repo().nullid not in parents: | ||
Levi Bard
|
r15811 | mc = ctx.manifest() | ||
Martin von Zweigbergk
|
r41445 | for pctx in ctx.parents(): | ||
for fn in pctx.manifest().diff(mc): | ||||
files.add(fn) | ||||
Levi Bard
|
r15811 | return files | ||
Augie Fackler
|
r43346 | |||
Levi Bard
|
r15811 | # Convert src parents to dst parents | ||
def _convertparents(ctx, revmap): | ||||
parents = [] | ||||
for p in ctx.parents(): | ||||
parents.append(revmap[p.node()]) | ||||
while len(parents) < 2: | ||||
Joerg Sonnenberger
|
r47771 | parents.append(ctx.repo().nullid) | ||
Levi Bard
|
r15811 | return parents | ||
Augie Fackler
|
r43346 | |||
Levi Bard
|
r15811 | # Get memfilectx for a normal file | ||
Sean Farley
|
r21689 | def _getnormalcontext(repo, ctx, f, revmap): | ||
Levi Bard
|
r15811 | try: | ||
fctx = ctx.filectx(f) | ||||
except error.LookupError: | ||||
Mads Kiilerich
|
r22296 | return None | ||
Martin von Zweigbergk
|
r41941 | renamed = fctx.copysource() | ||
Levi Bard
|
r15811 | |||
data = fctx.data() | ||||
Augie Fackler
|
r43347 | if f == b'.hgtags': | ||
Augie Fackler
|
r43346 | data = _converttags(repo.ui, revmap, data) | ||
return context.memfilectx( | ||||
Augie Fackler
|
r43347 | repo, ctx, f, data, b'l' in fctx.flags(), b'x' in fctx.flags(), renamed | ||
Augie Fackler
|
r43346 | ) | ||
Levi Bard
|
r15811 | |||
# Remap tag data using a revision map | ||||
def _converttags(ui, revmap, data): | ||||
newdata = [] | ||||
for line in data.splitlines(): | ||||
try: | ||||
Augie Fackler
|
r43347 | id, name = line.split(b' ', 1) | ||
Levi Bard
|
r15811 | except ValueError: | ||
Augie Fackler
|
r43347 | ui.warn(_(b'skipping incorrectly formatted tag %s\n') % line) | ||
Levi Bard
|
r15811 | continue | ||
try: | ||||
Joerg Sonnenberger
|
r46729 | newid = bin(id) | ||
Manuel Jacob
|
r50143 | except binascii.Error: | ||
Augie Fackler
|
r43347 | ui.warn(_(b'skipping incorrectly formatted id %s\n') % id) | ||
Levi Bard
|
r15811 | continue | ||
try: | ||||
Joerg Sonnenberger
|
r46729 | newdata.append(b'%s %s\n' % (hex(revmap[newid]), name)) | ||
Levi Bard
|
r15811 | except KeyError: | ||
Augie Fackler
|
r43347 | ui.warn(_(b'no mapping for id %s\n') % id) | ||
Levi Bard
|
r15811 | continue | ||
Augie Fackler
|
r43347 | return b''.join(newdata) | ||
Levi Bard
|
r15811 | |||
Augie Fackler
|
r43346 | |||
various
|
r15168 | def _islfile(file, ctx, matcher, size): | ||
Augie Fackler
|
r46554 | """Return true if file should be considered a largefile, i.e. | ||
matcher matches it or it is larger than size.""" | ||||
Greg Ward
|
r15252 | # never store special .hg* files as largefiles | ||
Augie Fackler
|
r43347 | if file == b'.hgtags' or file == b'.hgignore' or file == b'.hgsigs': | ||
various
|
r15168 | return False | ||
if matcher and matcher(file): | ||||
return True | ||||
try: | ||||
return ctx.filectx(file).size() >= size * 1024 * 1024 | ||||
except error.LookupError: | ||||
return False | ||||
Augie Fackler
|
r43346 | |||
various
|
r15168 | def uploadlfiles(ui, rsrc, rdst, files): | ||
'''upload largefiles to the central store''' | ||||
Benjamin Pollack
|
r15317 | if not files: | ||
various
|
r15168 | return | ||
liscju
|
r29355 | store = storefactory.openstore(rsrc, rdst, put=True) | ||
various
|
r15168 | |||
at = 0 | ||||
Augie Fackler
|
r43347 | ui.debug(b"sending statlfile command for %d largefiles\n" % len(files)) | ||
Na'Tosha Bard
|
r17127 | retval = store.exists(files) | ||
Augie Fackler
|
r36329 | files = [h for h in files if not retval[h]] | ||
Augie Fackler
|
r43347 | ui.debug(b"%d largefiles need to be uploaded\n" % len(files)) | ||
Na'Tosha Bard
|
r17127 | |||
Augie Fackler
|
r43346 | with ui.makeprogress( | ||
Augie Fackler
|
r43347 | _(b'uploading largefiles'), unit=_(b'files'), total=len(files) | ||
Augie Fackler
|
r43346 | ) as progress: | ||
Matt Harbison
|
r39427 | for hash in files: | ||
progress.update(at) | ||||
source = lfutil.findfile(rsrc, hash) | ||||
if not source: | ||||
Augie Fackler
|
r43346 | raise error.Abort( | ||
_( | ||||
Augie Fackler
|
r43347 | b'largefile %s missing from store' | ||
b' (needs to be uploaded)' | ||||
Augie Fackler
|
r43346 | ) | ||
% hash | ||||
) | ||||
Matt Harbison
|
r39427 | # XXX check for errors here | ||
store.put(source, hash) | ||||
at += 1 | ||||
various
|
r15168 | |||
Augie Fackler
|
r43346 | |||
various
|
r15168 | def verifylfiles(ui, repo, all=False, contents=False): | ||
Augie Fackler
|
r46554 | """Verify that every largefile revision in the current changeset | ||
various
|
r15168 | exists in the central store. With --contents, also verify that | ||
Mads Kiilerich
|
r18574 | the contents of each local largefile file revision are correct (SHA-1 hash | ||
various
|
r15168 | matches the revision ID). With --all, check every changeset in | ||
Augie Fackler
|
r46554 | this repository.""" | ||
various
|
r15168 | if all: | ||
Augie Fackler
|
r43347 | revs = repo.revs(b'all()') | ||
various
|
r15168 | else: | ||
Augie Fackler
|
r43347 | revs = [b'.'] | ||
various
|
r15168 | |||
liscju
|
r29355 | store = storefactory.openstore(repo) | ||
various
|
r15168 | return store.verify(revs, contents=contents) | ||
Augie Fackler
|
r43346 | |||
Na'Tosha Bard
|
r16700 | def cachelfiles(ui, repo, node, filelist=None): | ||
Augie Fackler
|
r46554 | """cachelfiles ensures that all largefiles needed by the specified revision | ||
various
|
r15168 | are present in the repository's largefile cache. | ||
returns a tuple (cached, missing). cached is the list of files downloaded | ||||
by this operation; missing is the list of files that were needed but could | ||||
Augie Fackler
|
r46554 | not be found.""" | ||
various
|
r15168 | lfiles = lfutil.listlfiles(repo, node) | ||
Na'Tosha Bard
|
r16700 | if filelist: | ||
lfiles = set(lfiles) & set(filelist) | ||||
various
|
r15168 | toget = [] | ||
FUJIWARA Katsunori
|
r31654 | ctx = repo[node] | ||
various
|
r15168 | for lfile in lfiles: | ||
Mads Kiilerich
|
r18728 | try: | ||
FUJIWARA Katsunori
|
r31740 | expectedhash = lfutil.readasstandin(ctx[lfutil.standin(lfile)]) | ||
Manuel Jacob
|
r50201 | except FileNotFoundError: | ||
continue # node must be None and standin wasn't found in wctx | ||||
Mads Kiilerich
|
r18728 | if not lfutil.findfile(repo, expectedhash): | ||
various
|
r15168 | toget.append((lfile, expectedhash)) | ||
if toget: | ||||
liscju
|
r29355 | store = storefactory.openstore(repo) | ||
various
|
r15168 | ret = store.get(toget) | ||
return ret | ||||
return ([], []) | ||||
Augie Fackler
|
r43346 | |||
Yuya Nishihara
|
r46025 | def downloadlfiles(ui, repo): | ||
Yuya Nishihara
|
r46026 | tonode = repo.changelog.node | ||
Na'Tosha Bard
|
r16691 | totalsuccess = 0 | ||
totalmissing = 0 | ||||
Yuya Nishihara
|
r46027 | for rev in repo.revs(b'file(%s)', b'path:' + lfutil.shortname): | ||
Yuya Nishihara
|
r46026 | success, missing = cachelfiles(ui, repo, tonode(rev)) | ||
Yuya Nishihara
|
r46025 | totalsuccess += len(success) | ||
totalmissing += len(missing) | ||||
Augie Fackler
|
r43347 | ui.status(_(b"%d additional largefiles cached\n") % totalsuccess) | ||
Na'Tosha Bard
|
r16691 | if totalmissing > 0: | ||
Augie Fackler
|
r43347 | ui.status(_(b"%d largefiles failed to download\n") % totalmissing) | ||
Na'Tosha Bard
|
r16691 | return totalsuccess, totalmissing | ||
Augie Fackler
|
r43346 | |||
def updatelfiles( | ||||
ui, repo, filelist=None, printmessage=None, normallookup=False | ||||
): | ||||
Augie Fackler
|
r46554 | """Update largefiles according to standins in the working directory | ||
FUJIWARA Katsunori
|
r23189 | |||
If ``printmessage`` is other than ``None``, it means "print (or | ||||
ignore, for false) message forcibly". | ||||
Augie Fackler
|
r46554 | """ | ||
FUJIWARA Katsunori
|
r23189 | statuswriter = lfutil.getstatuswriter(ui, repo, printmessage) | ||
Bryan O'Sullivan
|
r27820 | with repo.wlock(): | ||
various
|
r15168 | lfdirstate = lfutil.openlfdirstate(ui, repo) | ||
lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate) | ||||
if filelist is not None: | ||||
FUJIWARA Katsunori
|
r22197 | filelist = set(filelist) | ||
various
|
r15168 | lfiles = [f for f in lfiles if f in filelist] | ||
r50913 | update = {} | |||
dropped = set() | ||||
updated, removed = 0, 0 | ||||
wvfs = repo.wvfs | ||||
wctx = repo[None] | ||||
for lfile in lfiles: | ||||
lfileorig = os.path.relpath( | ||||
scmutil.backuppath(ui, repo, lfile), start=repo.root | ||||
) | ||||
standin = lfutil.standin(lfile) | ||||
standinorig = os.path.relpath( | ||||
scmutil.backuppath(ui, repo, standin), start=repo.root | ||||
) | ||||
if wvfs.exists(standin): | ||||
if wvfs.exists(standinorig) and wvfs.exists(lfile): | ||||
shutil.copyfile(wvfs.join(lfile), wvfs.join(lfileorig)) | ||||
wvfs.unlinkpath(standinorig) | ||||
expecthash = lfutil.readasstandin(wctx[standin]) | ||||
if expecthash != b'': | ||||
if lfile not in wctx: # not switched to normal file | ||||
if repo.dirstate.get_entry(standin).any_tracked: | ||||
wvfs.unlinkpath(lfile, ignoremissing=True) | ||||
else: | ||||
dropped.add(lfile) | ||||
Matt Harbison
|
r35175 | |||
r50913 | # allocate an entry in largefiles dirstate to prevent | |||
# lfilesrepo.status() from reporting missing files as | ||||
# removed. | ||||
lfdirstate.hacky_extension_update_file( | ||||
lfile, | ||||
p1_tracked=True, | ||||
wc_tracked=True, | ||||
possibly_dirty=True, | ||||
) | ||||
update[lfile] = expecthash | ||||
else: | ||||
# Remove lfiles for which the standin is deleted, unless the | ||||
# lfile is added to the repository again. This happens when a | ||||
# largefile is converted back to a normal file: the standin | ||||
# disappears, but a new (normal) file appears as the lfile. | ||||
if ( | ||||
wvfs.exists(lfile) | ||||
and repo.dirstate.normalize(lfile) not in wctx | ||||
): | ||||
wvfs.unlinkpath(lfile) | ||||
removed += 1 | ||||
Mads Kiilerich
|
r20063 | |||
# largefile processing might be slow and be interrupted - be prepared | ||||
Pulkit Goyal
|
r48982 | lfdirstate.write(repo.currenttransaction()) | ||
Mads Kiilerich
|
r20063 | |||
Pulkit Goyal
|
r48458 | if lfiles: | ||
lfiles = [f for f in lfiles if f not in dropped] | ||||
Matt Harbison
|
r35175 | |||
Pulkit Goyal
|
r48458 | for f in dropped: | ||
repo.wvfs.unlinkpath(lfutil.standin(f)) | ||||
# This needs to happen for dropped files, otherwise they stay in | ||||
# the M state. | ||||
r48815 | lfdirstate._map.reset_state(f) | |||
Mads Kiilerich
|
r20063 | |||
Pulkit Goyal
|
r48458 | statuswriter(_(b'getting changed largefiles\n')) | ||
cachelfiles(ui, repo, None, lfiles) | ||||
r48451 | ||||
r50914 | for lfile in lfiles: | |||
update1 = 0 | ||||
Mads Kiilerich
|
r20063 | |||
r50914 | expecthash = update.get(lfile) | |||
if expecthash: | ||||
if not lfutil.copyfromcache(repo, expecthash, lfile): | ||||
# failed ... but already removed and set to normallookup | ||||
continue | ||||
# Synchronize largefile dirstate to the last modified | ||||
# time of the file | ||||
lfdirstate.hacky_extension_update_file( | ||||
lfile, | ||||
p1_tracked=True, | ||||
wc_tracked=True, | ||||
) | ||||
update1 = 1 | ||||
# copy the exec mode of largefile standin from the repository's | ||||
# dirstate to its state in the lfdirstate. | ||||
standin = lfutil.standin(lfile) | ||||
if wvfs.exists(standin): | ||||
# exec is decided by the users permissions using mask 0o100 | ||||
standinexec = wvfs.stat(standin).st_mode & 0o100 | ||||
st = wvfs.stat(lfile) | ||||
mode = st.st_mode | ||||
if standinexec != mode & 0o100: | ||||
# first remove all X bits, then shift all R bits to X | ||||
mode &= ~0o111 | ||||
if standinexec: | ||||
mode |= (mode >> 2) & 0o111 & ~util.umask | ||||
wvfs.chmod(lfile, mode) | ||||
Mads Kiilerich
|
r20063 | update1 = 1 | ||
r50914 | updated += update1 | |||
Mads Kiilerich
|
r20063 | |||
r50914 | lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup) | |||
various
|
r15168 | |||
Pulkit Goyal
|
r48982 | lfdirstate.write(repo.currenttransaction()) | ||
FUJIWARA Katsunori
|
r23189 | if lfiles: | ||
Augie Fackler
|
r43346 | statuswriter( | ||
Augie Fackler
|
r43347 | _(b'%d largefiles updated, %d removed\n') % (updated, removed) | ||
Augie Fackler
|
r43346 | ) | ||
various
|
r15168 | |||
Augie Fackler
|
r43346 | @eh.command( | ||
Augie Fackler
|
r43347 | b'lfpull', | ||
[(b'r', b'rev', [], _(b'pull largefiles for these revisions'))] | ||||
Augie Fackler
|
r43346 | + cmdutil.remoteopts, | ||
Augie Fackler
|
r43347 | _(b'-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'), | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r43347 | def lfpull(ui, repo, source=b"default", **opts): | ||
Mads Kiilerich
|
r18976 | """pull largefiles for the specified revisions from the specified source | ||
Pull largefiles that are referenced from local changesets but missing | ||||
locally, pulling from a remote repository to the local cache. | ||||
If SOURCE is omitted, the 'default' path will be used. | ||||
See :hg:`help urls` for more information. | ||||
.. container:: verbose | ||||
Some examples: | ||||
- pull largefiles for all branch heads:: | ||||
hg lfpull -r "head() and not closed()" | ||||
- pull largefiles on the default branch:: | ||||
hg lfpull -r "branch(default)" | ||||
""" | ||||
repo.lfpullsource = source | ||||
Augie Fackler
|
r43906 | revs = opts.get('rev', []) | ||
Mads Kiilerich
|
r18976 | if not revs: | ||
Augie Fackler
|
r43347 | raise error.Abort(_(b'no revisions specified')) | ||
Martin von Zweigbergk
|
r48928 | revs = logcmdutil.revrange(repo, revs) | ||
Mads Kiilerich
|
r18976 | |||
numcached = 0 | ||||
for rev in revs: | ||||
Augie Fackler
|
r43347 | ui.note(_(b'pulling largefiles for revision %d\n') % rev) | ||
Mads Kiilerich
|
r18976 | (cached, missing) = cachelfiles(ui, repo, rev) | ||
numcached += len(cached) | ||||
Augie Fackler
|
r43347 | ui.status(_(b"%d largefiles cached\n") % numcached) | ||
Boris Feld
|
r35579 | |||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @eh.command(b'debuglfput', [] + cmdutil.remoteopts, _(b'FILE')) | ||
Boris Feld
|
r35579 | def debuglfput(ui, repo, filepath, **kwargs): | ||
hash = lfutil.hashfile(filepath) | ||||
storefactory.openstore(repo).put(filepath, hash) | ||||
Augie Fackler
|
r43347 | ui.write(b'%s\n' % hash) | ||
Boris Feld
|
r35579 | return 0 | ||