lfcommands.py
605 lines
| 21.6 KiB
| text/x-python
|
PythonLexer
various
|
r15168 | # Copyright 2009-2010 Gregory P. Ward | ||
# Copyright 2009-2010 Intelerad Medical Systems Incorporated | ||||
# Copyright 2010-2011 Fog Creek Software | ||||
# Copyright 2010-2011 Unity Technologies | ||||
# | ||||
# This software may be used and distributed according to the terms of the | ||||
# GNU General Public License version 2 or any later version. | ||||
Greg Ward
|
r15252 | '''High-level command function for lfconvert, plus the cmdtable.''' | ||
liscju
|
r29308 | from __future__ import absolute_import | ||
various
|
r15168 | |||
liscju
|
r29308 | import errno | ||
Augie Fackler
|
r29341 | import hashlib | ||
liscju
|
r29308 | import os | ||
various
|
r15168 | import shutil | ||
from mercurial.i18n import _ | ||||
liscju
|
r29308 | from mercurial import ( | ||
cmdutil, | ||||
context, | ||||
error, | ||||
Matt Harbison
|
r41091 | exthelper, | ||
liscju
|
r29308 | hg, | ||
lock, | ||||
liscju
|
r29317 | match as matchmod, | ||
liscju
|
r29308 | node, | ||
Pulkit Goyal
|
r35349 | pycompat, | ||
liscju
|
r29308 | scmutil, | ||
util, | ||||
) | ||||
Matt Harbison
|
r25325 | |||
liscju
|
r29308 | from ..convert import ( | ||
convcmd, | ||||
filemap, | ||||
) | ||||
from . import ( | ||||
lfutil, | ||||
storefactory | ||||
) | ||||
release = lock.release | ||||
various
|
r15168 | |||
# -- Commands ---------------------------------------------------------- | ||||
Matt Harbison
|
r41091 | eh = exthelper.exthelper() | ||
Gregory Szorc
|
r21242 | |||
Matt Harbison
|
r41091 | @eh.command('lfconvert', | ||
Gregory Szorc
|
r21242 | [('s', 'size', '', | ||
_('minimum size (MB) for files to be converted as largefiles'), 'SIZE'), | ||||
('', 'to-normal', False, | ||||
_('convert from a largefiles repo to a normal repo')), | ||||
], | ||||
Gregory Szorc
|
r21770 | _('hg lfconvert SOURCE DEST [FILE ...]'), | ||
Gregory Szorc
|
r21785 | norepo=True, | ||
inferrepo=True) | ||||
various
|
r15168 | def lfconvert(ui, src, dest, *pats, **opts): | ||
Greg Ward
|
r15230 | '''convert a normal repository to a largefiles repository | ||
various
|
r15168 | |||
Greg Ward
|
r15230 | Convert repository SOURCE to a new repository DEST, identical to | ||
SOURCE except that certain files will be converted as largefiles: | ||||
specifically, any file that matches any PATTERN *or* whose size is | ||||
above the minimum size threshold is converted as a largefile. The | ||||
size used to determine whether or not to track a file as a | ||||
largefile is the size of the first version of the file. The | ||||
minimum size can be specified either with --size or in | ||||
configuration as ``largefiles.size``. | ||||
After running this command you will need to make sure that | ||||
largefiles is enabled anywhere you intend to push the new | ||||
repository. | ||||
Greg Ward
|
r15332 | Use --to-normal to convert largefiles back to normal files; after | ||
Greg Ward
|
r15230 | this, the DEST repository can be used without largefiles at all.''' | ||
various
|
r15168 | |||
Pulkit Goyal
|
r35349 | opts = pycompat.byteskwargs(opts) | ||
Greg Ward
|
r15332 | if opts['to_normal']: | ||
various
|
r15168 | tolfile = False | ||
else: | ||||
tolfile = True | ||||
Greg Ward
|
r15227 | size = lfutil.getminsize(ui, True, opts.get('size'), default=None) | ||
Greg Ward
|
r15340 | |||
if not hg.islocal(src): | ||||
Pierre-Yves David
|
r26587 | raise error.Abort(_('%s is not a local Mercurial repo') % src) | ||
Greg Ward
|
r15340 | if not hg.islocal(dest): | ||
Pierre-Yves David
|
r26587 | raise error.Abort(_('%s is not a local Mercurial repo') % dest) | ||
Greg Ward
|
r15340 | |||
Greg Ward
|
r15339 | rsrc = hg.repository(ui, src) | ||
ui.status(_('initializing destination %s\n') % dest) | ||||
rdst = hg.repository(ui, dest, create=True) | ||||
various
|
r15168 | |||
Matt Mackall
|
r15171 | success = False | ||
Mads Kiilerich
|
r16717 | dstwlock = dstlock = None | ||
various
|
r15168 | try: | ||
# Get a list of all changesets in the source. The easy way to do this | ||||
Mads Kiilerich
|
r17424 | # is to simply walk the changelog, using changelog.nodesbetween(). | ||
various
|
r15168 | # Take a look at mercurial/revlog.py:639 for more details. | ||
# Use a generator instead of a list to decrease memory usage | ||||
ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None, | ||||
rsrc.heads())[0]) | ||||
revmap = {node.nullid: node.nullid} | ||||
if tolfile: | ||||
Matt Harbison
|
r25325 | # Lock destination to prevent modification while it is converted to. | ||
# Don't need to lock src because we are just reading from its | ||||
# history which can't change. | ||||
dstwlock = rdst.wlock() | ||||
dstlock = rdst.lock() | ||||
various
|
r15168 | lfiles = set() | ||
normalfiles = set() | ||||
if not pats: | ||||
Boris Feld
|
r34757 | pats = ui.configlist(lfutil.longname, 'patterns') | ||
various
|
r15168 | if pats: | ||
liscju
|
r29317 | matcher = matchmod.match(rsrc.root, '', list(pats)) | ||
various
|
r15168 | else: | ||
matcher = None | ||||
lfiletohash = {} | ||||
Matt Harbison
|
r39427 | with ui.makeprogress(_('converting revisions'), | ||
unit=_('revisions'), | ||||
total=rsrc['tip'].rev()) as progress: | ||||
for ctx in ctxs: | ||||
progress.update(ctx.rev()) | ||||
_lfconvert_addchangeset(rsrc, rdst, ctx, revmap, | ||||
lfiles, normalfiles, matcher, size, lfiletohash) | ||||
various
|
r15168 | |||
liscju
|
r28559 | if rdst.wvfs.exists(lfutil.shortname): | ||
rdst.wvfs.rmtree(lfutil.shortname) | ||||
various
|
r15168 | |||
for f in lfiletohash.keys(): | ||||
liscju
|
r28559 | if rdst.wvfs.isfile(f): | ||
rdst.wvfs.unlink(f) | ||||
various
|
r15168 | try: | ||
liscju
|
r28559 | rdst.wvfs.removedirs(rdst.wvfs.dirname(f)) | ||
Matt Mackall
|
r15171 | except OSError: | ||
various
|
r15168 | pass | ||
Eli Carter
|
r15303 | # If there were any files converted to largefiles, add largefiles | ||
# to the destination repository's requirements. | ||||
if lfiles: | ||||
rdst.requirements.add('largefiles') | ||||
rdst._writerequirements() | ||||
various
|
r15168 | else: | ||
Matt Harbison
|
r25325 | class lfsource(filemap.filemap_source): | ||
def __init__(self, ui, source): | ||||
super(lfsource, self).__init__(ui, source, None) | ||||
self.filemapper.rename[lfutil.shortname] = '.' | ||||
def getfile(self, name, rev): | ||||
realname, realrev = rev | ||||
f = super(lfsource, self).getfile(name, rev) | ||||
if (not realname.startswith(lfutil.shortnameslash) | ||||
or f[0] is None): | ||||
return f | ||||
# Substitute in the largefile data for the hash | ||||
hash = f[0].strip() | ||||
path = lfutil.findfile(rsrc, hash) | ||||
various
|
r15168 | |||
Matt Harbison
|
r25325 | if path is None: | ||
Pierre-Yves David
|
r26587 | raise error.Abort(_("missing largefile for '%s' in %s") | ||
Matt Harbison
|
r25325 | % (realname, realrev)) | ||
Bryan O'Sullivan
|
r27774 | return util.readfile(path), f[1] | ||
Matt Harbison
|
r25325 | |||
class converter(convcmd.converter): | ||||
def __init__(self, ui, source, dest, revmapfile, opts): | ||||
src = lfsource(ui, source) | ||||
super(converter, self).__init__(ui, src, dest, revmapfile, | ||||
opts) | ||||
found, missing = downloadlfiles(ui, rsrc) | ||||
if missing != 0: | ||||
Pierre-Yves David
|
r26587 | raise error.Abort(_("all largefiles must be present locally")) | ||
Matt Harbison
|
r25325 | |||
Matt Harbison
|
r25560 | orig = convcmd.converter | ||
Matt Harbison
|
r25325 | convcmd.converter = converter | ||
Matt Harbison
|
r25560 | |||
try: | ||||
Matt Harbison
|
r35171 | convcmd.convert(ui, src, dest, source_type='hg', dest_type='hg') | ||
Matt Harbison
|
r25560 | finally: | ||
convcmd.converter = orig | ||||
Matt Mackall
|
r15171 | success = True | ||
various
|
r15168 | finally: | ||
Matt Harbison
|
r25325 | if tolfile: | ||
rdst.dirstate.clear() | ||||
release(dstlock, dstwlock) | ||||
Matt Mackall
|
r15171 | if not success: | ||
# we failed, remove the new directory | ||||
shutil.rmtree(rdst.root) | ||||
various
|
r15168 | |||
def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles, | ||||
matcher, size, lfiletohash): | ||||
# Convert src parents to dst parents | ||||
Levi Bard
|
r15811 | parents = _convertparents(ctx, revmap) | ||
various
|
r15168 | |||
# Generate list of changed files | ||||
Levi Bard
|
r15811 | files = _getchangedfiles(ctx, parents) | ||
various
|
r15168 | |||
dstfiles = [] | ||||
for f in files: | ||||
if f not in lfiles and f not in normalfiles: | ||||
islfile = _islfile(f, ctx, matcher, size) | ||||
# If this file was renamed or copied then copy | ||||
Mads Kiilerich
|
r17424 | # the largefile-ness of its predecessor | ||
various
|
r15168 | if f in ctx.manifest(): | ||
fctx = ctx.filectx(f) | ||||
renamed = fctx.renamed() | ||||
Sean Farley
|
r39746 | if renamed is None: | ||
# the code below assumes renamed to be a boolean or a list | ||||
# and won't quite work with the value None | ||||
renamed = False | ||||
various
|
r15168 | renamedlfile = renamed and renamed[0] in lfiles | ||
islfile |= renamedlfile | ||||
if 'l' in fctx.flags(): | ||||
if renamedlfile: | ||||
Pierre-Yves David
|
r26587 | raise error.Abort( | ||
Martin Geisler
|
r15380 | _('renamed/copied largefile %s becomes symlink') | ||
Matt Mackall
|
r15170 | % f) | ||
various
|
r15168 | islfile = False | ||
if islfile: | ||||
lfiles.add(f) | ||||
else: | ||||
normalfiles.add(f) | ||||
if f in lfiles: | ||||
FUJIWARA Katsunori
|
r31618 | fstandin = lfutil.standin(f) | ||
dstfiles.append(fstandin) | ||||
Greg Ward
|
r15254 | # largefile in manifest if it has not been removed/renamed | ||
various
|
r15168 | if f in ctx.manifest(): | ||
Levi Bard
|
r15808 | fctx = ctx.filectx(f) | ||
if 'l' in fctx.flags(): | ||||
renamed = fctx.renamed() | ||||
various
|
r15168 | if renamed and renamed[0] in lfiles: | ||
Pierre-Yves David
|
r26587 | raise error.Abort(_('largefile %s becomes symlink') % f) | ||
various
|
r15168 | |||
Greg Ward
|
r15254 | # largefile was modified, update standins | ||
Augie Fackler
|
r29341 | m = hashlib.sha1('') | ||
various
|
r15168 | m.update(ctx[f].data()) | ||
Pulkit Goyal
|
r40711 | hash = node.hex(m.digest()) | ||
various
|
r15168 | if f not in lfiletohash or lfiletohash[f] != hash: | ||
Mads Kiilerich
|
r19089 | rdst.wwrite(f, ctx[f].data(), ctx[f].flags()) | ||
various
|
r15168 | executable = 'x' in ctx[f].flags() | ||
FUJIWARA Katsunori
|
r31618 | lfutil.writestandin(rdst, fstandin, hash, | ||
various
|
r15168 | executable) | ||
lfiletohash[f] = hash | ||||
else: | ||||
# normal file | ||||
dstfiles.append(f) | ||||
def getfilectx(repo, memctx, f): | ||||
FUJIWARA Katsunori
|
r31613 | srcfname = lfutil.splitstandin(f) | ||
if srcfname is not None: | ||||
various
|
r15168 | # if the file isn't in the manifest then it was removed | ||
FUJIWARA Katsunori
|
r31612 | # or renamed, return None to indicate this | ||
various
|
r15168 | try: | ||
fctx = ctx.filectx(srcfname) | ||||
except error.LookupError: | ||||
Mads Kiilerich
|
r22296 | return None | ||
various
|
r15168 | renamed = fctx.renamed() | ||
if renamed: | ||||
Greg Ward
|
r15254 | # standin is always a largefile because largefile-ness | ||
various
|
r15168 | # doesn't change after rename or copy | ||
renamed = lfutil.standin(renamed[0]) | ||||
Martin von Zweigbergk
|
r35401 | return context.memfilectx(repo, memctx, f, | ||
lfiletohash[srcfname] + '\n', | ||||
Sean Farley
|
r21689 | 'l' in fctx.flags(), 'x' in fctx.flags(), | ||
renamed) | ||||
various
|
r15168 | else: | ||
Sean Farley
|
r21689 | return _getnormalcontext(repo, ctx, f, revmap) | ||
various
|
r15168 | |||
# Commit | ||||
Levi Bard
|
r15811 | _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap) | ||
def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap): | ||||
various
|
r15168 | mctx = context.memctx(rdst, parents, ctx.description(), dstfiles, | ||
getfilectx, ctx.user(), ctx.date(), ctx.extra()) | ||||
ret = rdst.commitctx(mctx) | ||||
FUJIWARA Katsunori
|
r23276 | lfutil.copyalltostore(rdst, ret) | ||
Patrick Mezard
|
r16551 | rdst.setparents(ret) | ||
various
|
r15168 | revmap[ctx.node()] = rdst.changelog.tip() | ||
Levi Bard
|
r15811 | # Generate list of changed files | ||
def _getchangedfiles(ctx, parents): | ||||
files = set(ctx.files()) | ||||
if node.nullid not in parents: | ||||
mc = ctx.manifest() | ||||
Martin von Zweigbergk
|
r41445 | for pctx in ctx.parents(): | ||
for fn in pctx.manifest().diff(mc): | ||||
files.add(fn) | ||||
Levi Bard
|
r15811 | return files | ||
# Convert src parents to dst parents | ||||
def _convertparents(ctx, revmap): | ||||
parents = [] | ||||
for p in ctx.parents(): | ||||
parents.append(revmap[p.node()]) | ||||
while len(parents) < 2: | ||||
parents.append(node.nullid) | ||||
return parents | ||||
# Get memfilectx for a normal file | ||||
Sean Farley
|
r21689 | def _getnormalcontext(repo, ctx, f, revmap): | ||
Levi Bard
|
r15811 | try: | ||
fctx = ctx.filectx(f) | ||||
except error.LookupError: | ||||
Mads Kiilerich
|
r22296 | return None | ||
Levi Bard
|
r15811 | renamed = fctx.renamed() | ||
if renamed: | ||||
renamed = renamed[0] | ||||
data = fctx.data() | ||||
if f == '.hgtags': | ||||
Sean Farley
|
r21689 | data = _converttags (repo.ui, revmap, data) | ||
Martin von Zweigbergk
|
r35401 | return context.memfilectx(repo, ctx, f, data, 'l' in fctx.flags(), | ||
Levi Bard
|
r15811 | 'x' in fctx.flags(), renamed) | ||
# Remap tag data using a revision map | ||||
def _converttags(ui, revmap, data): | ||||
newdata = [] | ||||
for line in data.splitlines(): | ||||
try: | ||||
id, name = line.split(' ', 1) | ||||
except ValueError: | ||||
FUJIWARA Katsunori
|
r20868 | ui.warn(_('skipping incorrectly formatted tag %s\n') | ||
% line) | ||||
Levi Bard
|
r15811 | continue | ||
try: | ||||
newid = node.bin(id) | ||||
except TypeError: | ||||
FUJIWARA Katsunori
|
r20868 | ui.warn(_('skipping incorrectly formatted id %s\n') | ||
% id) | ||||
Levi Bard
|
r15811 | continue | ||
try: | ||||
newdata.append('%s %s\n' % (node.hex(revmap[newid]), | ||||
name)) | ||||
except KeyError: | ||||
Matt Mackall
|
r16231 | ui.warn(_('no mapping for id %s\n') % id) | ||
Levi Bard
|
r15811 | continue | ||
return ''.join(newdata) | ||||
various
|
r15168 | def _islfile(file, ctx, matcher, size): | ||
Greg Ward
|
r15252 | '''Return true if file should be considered a largefile, i.e. | ||
matcher matches it or it is larger than size.''' | ||||
# never store special .hg* files as largefiles | ||||
various
|
r15168 | if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs': | ||
return False | ||||
if matcher and matcher(file): | ||||
return True | ||||
try: | ||||
return ctx.filectx(file).size() >= size * 1024 * 1024 | ||||
except error.LookupError: | ||||
return False | ||||
def uploadlfiles(ui, rsrc, rdst, files): | ||||
'''upload largefiles to the central store''' | ||||
Benjamin Pollack
|
r15317 | if not files: | ||
various
|
r15168 | return | ||
liscju
|
r29355 | store = storefactory.openstore(rsrc, rdst, put=True) | ||
various
|
r15168 | |||
at = 0 | ||||
Na'Tosha Bard
|
r17127 | ui.debug("sending statlfile command for %d largefiles\n" % len(files)) | ||
retval = store.exists(files) | ||||
Augie Fackler
|
r36329 | files = [h for h in files if not retval[h]] | ||
Na'Tosha Bard
|
r17127 | ui.debug("%d largefiles need to be uploaded\n" % len(files)) | ||
Matt Harbison
|
r39427 | with ui.makeprogress(_('uploading largefiles'), unit=_('files'), | ||
total=len(files)) as progress: | ||||
for hash in files: | ||||
progress.update(at) | ||||
source = lfutil.findfile(rsrc, hash) | ||||
if not source: | ||||
raise error.Abort(_('largefile %s missing from store' | ||||
' (needs to be uploaded)') % hash) | ||||
# XXX check for errors here | ||||
store.put(source, hash) | ||||
at += 1 | ||||
various
|
r15168 | |||
def verifylfiles(ui, repo, all=False, contents=False): | ||||
Mads Kiilerich
|
r18574 | '''Verify that every largefile revision in the current changeset | ||
various
|
r15168 | exists in the central store. With --contents, also verify that | ||
Mads Kiilerich
|
r18574 | the contents of each local largefile file revision are correct (SHA-1 hash | ||
various
|
r15168 | matches the revision ID). With --all, check every changeset in | ||
this repository.''' | ||||
if all: | ||||
Matt Harbison
|
r25508 | revs = repo.revs('all()') | ||
various
|
r15168 | else: | ||
revs = ['.'] | ||||
liscju
|
r29355 | store = storefactory.openstore(repo) | ||
various
|
r15168 | return store.verify(revs, contents=contents) | ||
Na'Tosha Bard
|
r16700 | def cachelfiles(ui, repo, node, filelist=None): | ||
various
|
r15168 | '''cachelfiles ensures that all largefiles needed by the specified revision | ||
are present in the repository's largefile cache. | ||||
returns a tuple (cached, missing). cached is the list of files downloaded | ||||
by this operation; missing is the list of files that were needed but could | ||||
not be found.''' | ||||
lfiles = lfutil.listlfiles(repo, node) | ||||
Na'Tosha Bard
|
r16700 | if filelist: | ||
lfiles = set(lfiles) & set(filelist) | ||||
various
|
r15168 | toget = [] | ||
FUJIWARA Katsunori
|
r31654 | ctx = repo[node] | ||
various
|
r15168 | for lfile in lfiles: | ||
Mads Kiilerich
|
r18728 | try: | ||
FUJIWARA Katsunori
|
r31740 | expectedhash = lfutil.readasstandin(ctx[lfutil.standin(lfile)]) | ||
Gregory Szorc
|
r25660 | except IOError as err: | ||
Mads Kiilerich
|
r18728 | if err.errno == errno.ENOENT: | ||
continue # node must be None and standin wasn't found in wctx | ||||
raise | ||||
if not lfutil.findfile(repo, expectedhash): | ||||
various
|
r15168 | toget.append((lfile, expectedhash)) | ||
if toget: | ||||
liscju
|
r29355 | store = storefactory.openstore(repo) | ||
various
|
r15168 | ret = store.get(toget) | ||
return ret | ||||
return ([], []) | ||||
Na'Tosha Bard
|
r16691 | def downloadlfiles(ui, repo, rev=None): | ||
Martin von Zweigbergk
|
r34085 | match = scmutil.match(repo[None], [repo.wjoin(lfutil.shortname)], {}) | ||
Na'Tosha Bard
|
r16691 | def prepare(ctx, fns): | ||
pass | ||||
totalsuccess = 0 | ||||
totalmissing = 0 | ||||
Mads Kiilerich
|
r18722 | if rev != []: # walkchangerevs on empty list would return all revs | ||
Martin von Zweigbergk
|
r34085 | for ctx in cmdutil.walkchangerevs(repo, match, {'rev' : rev}, | ||
Mads Kiilerich
|
r18722 | prepare): | ||
success, missing = cachelfiles(ui, repo, ctx.node()) | ||||
totalsuccess += len(success) | ||||
totalmissing += len(missing) | ||||
Na'Tosha Bard
|
r16691 | ui.status(_("%d additional largefiles cached\n") % totalsuccess) | ||
if totalmissing > 0: | ||||
ui.status(_("%d largefiles failed to download\n") % totalmissing) | ||||
return totalsuccess, totalmissing | ||||
FUJIWARA Katsunori
|
r23189 | def updatelfiles(ui, repo, filelist=None, printmessage=None, | ||
Mads Kiilerich
|
r24788 | normallookup=False): | ||
FUJIWARA Katsunori
|
r23189 | '''Update largefiles according to standins in the working directory | ||
If ``printmessage`` is other than ``None``, it means "print (or | ||||
ignore, for false) message forcibly". | ||||
''' | ||||
statuswriter = lfutil.getstatuswriter(ui, repo, printmessage) | ||||
Bryan O'Sullivan
|
r27820 | with repo.wlock(): | ||
various
|
r15168 | lfdirstate = lfutil.openlfdirstate(ui, repo) | ||
lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate) | ||||
if filelist is not None: | ||||
FUJIWARA Katsunori
|
r22197 | filelist = set(filelist) | ||
various
|
r15168 | lfiles = [f for f in lfiles if f in filelist] | ||
Mads Kiilerich
|
r20063 | update = {} | ||
Matt Harbison
|
r35175 | dropped = set() | ||
various
|
r15168 | updated, removed = 0, 0 | ||
liscju
|
r28559 | wvfs = repo.wvfs | ||
FUJIWARA Katsunori
|
r31654 | wctx = repo[None] | ||
Mads Kiilerich
|
r20062 | for lfile in lfiles: | ||
liscju
|
r28559 | rellfile = lfile | ||
rellfileorig = os.path.relpath( | ||||
scmutil.origpath(ui, repo, wvfs.join(rellfile)), | ||||
start=repo.root) | ||||
relstandin = lfutil.standin(lfile) | ||||
relstandinorig = os.path.relpath( | ||||
scmutil.origpath(ui, repo, wvfs.join(relstandin)), | ||||
start=repo.root) | ||||
if wvfs.exists(relstandin): | ||||
if (wvfs.exists(relstandinorig) and | ||||
wvfs.exists(rellfile)): | ||||
shutil.copyfile(wvfs.join(rellfile), | ||||
wvfs.join(rellfileorig)) | ||||
wvfs.unlinkpath(relstandinorig) | ||||
FUJIWARA Katsunori
|
r31735 | expecthash = lfutil.readasstandin(wctx[relstandin]) | ||
Mads Kiilerich
|
r24788 | if expecthash != '': | ||
FUJIWARA Katsunori
|
r31654 | if lfile not in wctx: # not switched to normal file | ||
Matt Harbison
|
r35175 | if repo.dirstate[relstandin] != '?': | ||
wvfs.unlinkpath(rellfile, ignoremissing=True) | ||||
else: | ||||
dropped.add(rellfile) | ||||
Mads Kiilerich
|
r23139 | # use normallookup() to allocate an entry in largefiles | ||
Mads Kiilerich
|
r24180 | # dirstate to prevent lfilesrepo.status() from reporting | ||
# missing files as removed. | ||||
Mads Kiilerich
|
r20063 | lfdirstate.normallookup(lfile) | ||
update[lfile] = expecthash | ||||
Mads Kiilerich
|
r20062 | else: | ||
# Remove lfiles for which the standin is deleted, unless the | ||||
# lfile is added to the repository again. This happens when a | ||||
# largefile is converted back to a normal file: the standin | ||||
# disappears, but a new (normal) file appears as the lfile. | ||||
liscju
|
r28559 | if (wvfs.exists(rellfile) and | ||
FUJIWARA Katsunori
|
r31654 | repo.dirstate.normalize(lfile) not in wctx): | ||
liscju
|
r28559 | wvfs.unlinkpath(rellfile) | ||
Mads Kiilerich
|
r20062 | removed += 1 | ||
Mads Kiilerich
|
r20063 | |||
# largefile processing might be slow and be interrupted - be prepared | ||||
lfdirstate.write() | ||||
if lfiles: | ||||
Matt Harbison
|
r35175 | lfiles = [f for f in lfiles if f not in dropped] | ||
for f in dropped: | ||||
repo.wvfs.unlinkpath(lfutil.standin(f)) | ||||
# This needs to happen for dropped files, otherwise they stay in | ||||
# the M state. | ||||
lfutil.synclfdirstate(repo, lfdirstate, f, normallookup) | ||||
FUJIWARA Katsunori
|
r23189 | statuswriter(_('getting changed largefiles\n')) | ||
Mads Kiilerich
|
r20063 | cachelfiles(ui, repo, None, lfiles) | ||
for lfile in lfiles: | ||||
update1 = 0 | ||||
expecthash = update.get(lfile) | ||||
if expecthash: | ||||
if not lfutil.copyfromcache(repo, expecthash, lfile): | ||||
# failed ... but already removed and set to normallookup | ||||
continue | ||||
# Synchronize largefile dirstate to the last modified | ||||
# time of the file | ||||
lfdirstate.normal(lfile) | ||||
update1 = 1 | ||||
Mads Kiilerich
|
r30269 | # copy the exec mode of largefile standin from the repository's | ||
Mads Kiilerich
|
r20063 | # dirstate to its state in the lfdirstate. | ||
liscju
|
r28559 | rellfile = lfile | ||
relstandin = lfutil.standin(lfile) | ||||
if wvfs.exists(relstandin): | ||||
Mads Kiilerich
|
r30269 | # exec is decided by the users permissions using mask 0o100 | ||
Mads Kiilerich
|
r30141 | standinexec = wvfs.stat(relstandin).st_mode & 0o100 | ||
Mads Kiilerich
|
r30269 | st = wvfs.stat(rellfile) | ||
mode = st.st_mode | ||||
if standinexec != mode & 0o100: | ||||
# first remove all X bits, then shift all R bits to X | ||||
mode &= ~0o111 | ||||
Mads Kiilerich
|
r30141 | if standinexec: | ||
Mads Kiilerich
|
r30269 | mode |= (mode >> 2) & 0o111 & ~util.umask | ||
wvfs.chmod(rellfile, mode) | ||||
Mads Kiilerich
|
r20063 | update1 = 1 | ||
updated += update1 | ||||
FUJIWARA Katsunori
|
r22095 | lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup) | ||
various
|
r15168 | |||
lfdirstate.write() | ||||
FUJIWARA Katsunori
|
r23189 | if lfiles: | ||
statuswriter(_('%d largefiles updated, %d removed\n') % (updated, | ||||
various
|
r15168 | removed)) | ||
Matt Harbison
|
r41091 | @eh.command('lfpull', | ||
Gregory Szorc
|
r21242 | [('r', 'rev', [], _('pull largefiles for these revisions')) | ||
Yuya Nishihara
|
r32375 | ] + cmdutil.remoteopts, | ||
Gregory Szorc
|
r21242 | _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]')) | ||
Mads Kiilerich
|
r18976 | def lfpull(ui, repo, source="default", **opts): | ||
"""pull largefiles for the specified revisions from the specified source | ||||
Pull largefiles that are referenced from local changesets but missing | ||||
locally, pulling from a remote repository to the local cache. | ||||
If SOURCE is omitted, the 'default' path will be used. | ||||
See :hg:`help urls` for more information. | ||||
.. container:: verbose | ||||
Some examples: | ||||
- pull largefiles for all branch heads:: | ||||
hg lfpull -r "head() and not closed()" | ||||
- pull largefiles on the default branch:: | ||||
hg lfpull -r "branch(default)" | ||||
""" | ||||
repo.lfpullsource = source | ||||
Pulkit Goyal
|
r35349 | revs = opts.get(r'rev', []) | ||
Mads Kiilerich
|
r18976 | if not revs: | ||
Pierre-Yves David
|
r26587 | raise error.Abort(_('no revisions specified')) | ||
Mads Kiilerich
|
r18976 | revs = scmutil.revrange(repo, revs) | ||
numcached = 0 | ||||
for rev in revs: | ||||
Augie Fackler
|
r37761 | ui.note(_('pulling largefiles for revision %d\n') % rev) | ||
Mads Kiilerich
|
r18976 | (cached, missing) = cachelfiles(ui, repo, rev) | ||
numcached += len(cached) | ||||
ui.status(_("%d largefiles cached\n") % numcached) | ||||
Boris Feld
|
r35579 | |||
Matt Harbison
|
r41091 | @eh.command('debuglfput', | ||
Boris Feld
|
r35579 | [] + cmdutil.remoteopts, | ||
_('FILE')) | ||||
def debuglfput(ui, repo, filepath, **kwargs): | ||||
hash = lfutil.hashfile(filepath) | ||||
storefactory.openstore(repo).put(filepath, hash) | ||||
ui.write('%s\n' % hash) | ||||
return 0 | ||||