##// END OF EJS Templates
largefiles: use util.readfile in overrides
largefiles: use util.readfile in overrides

File last commit:

r27651:07fc2f21 default
r27772:00bd7262 default
Show More
lfcommands.py
552 lines | 20.1 KiB | text/x-python | PythonLexer
various
hgext: add largefiles extension...
r15168 # Copyright 2009-2010 Gregory P. Ward
# Copyright 2009-2010 Intelerad Medical Systems Incorporated
# Copyright 2010-2011 Fog Creek Software
# Copyright 2010-2011 Unity Technologies
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
Greg Ward
largefiles: improve comments, internal docstrings...
r15252 '''High-level command function for lfconvert, plus the cmdtable.'''
various
hgext: add largefiles extension...
r15168
Mads Kiilerich
largefiles: simplify cachelfiles - don't spend a lot of time checking hashes...
r18728 import os, errno
various
hgext: add largefiles extension...
r15168 import shutil
Na'Tosha Bard
largefiles: refactor downloading of all largefiles to generic function
r16691 from mercurial import util, match as match_, hg, node, context, error, \
Siddharth Agarwal
commands: don't infer repo for commands like update (issue2748)...
r17773 cmdutil, scmutil, commands
various
hgext: add largefiles extension...
r15168 from mercurial.i18n import _
Mads Kiilerich
largefiles: use wlock for lfconvert (issue3444)...
r16717 from mercurial.lock import release
various
hgext: add largefiles extension...
r15168
Matt Harbison
largefiles: use the convert extension for 'lfconvert --to-normal'...
r25325 from hgext.convert import convcmd
from hgext.convert import filemap
various
hgext: add largefiles extension...
r15168 import lfutil
import basestore
# -- Commands ----------------------------------------------------------
Gregory Szorc
largefiles: declare commands using decorator
r21242 cmdtable = {}
command = cmdutil.command(cmdtable)
@command('lfconvert',
[('s', 'size', '',
_('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
('', 'to-normal', False,
_('convert from a largefiles repo to a normal repo')),
],
Gregory Szorc
largefiles: define norepo in command decorator
r21770 _('hg lfconvert SOURCE DEST [FILE ...]'),
Gregory Szorc
largefiles: define inferrepo in command decorator
r21785 norepo=True,
inferrepo=True)
various
hgext: add largefiles extension...
r15168 def lfconvert(ui, src, dest, *pats, **opts):
Greg Ward
largefiles: improve help...
r15230 '''convert a normal repository to a largefiles repository
various
hgext: add largefiles extension...
r15168
Greg Ward
largefiles: improve help...
r15230 Convert repository SOURCE to a new repository DEST, identical to
SOURCE except that certain files will be converted as largefiles:
specifically, any file that matches any PATTERN *or* whose size is
above the minimum size threshold is converted as a largefile. The
size used to determine whether or not to track a file as a
largefile is the size of the first version of the file. The
minimum size can be specified either with --size or in
configuration as ``largefiles.size``.
After running this command you will need to make sure that
largefiles is enabled anywhere you intend to push the new
repository.
Greg Ward
largefiles: rename lfconvert --tonormal option to --to-normal
r15332 Use --to-normal to convert largefiles back to normal files; after
Greg Ward
largefiles: improve help...
r15230 this, the DEST repository can be used without largefiles at all.'''
various
hgext: add largefiles extension...
r15168
Greg Ward
largefiles: rename lfconvert --tonormal option to --to-normal
r15332 if opts['to_normal']:
various
hgext: add largefiles extension...
r15168 tolfile = False
else:
tolfile = True
Greg Ward
largefiles: factor out lfutil.getminsize()
r15227 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
Greg Ward
largefiles: rearrange how lfconvert detects non-local repos...
r15340
if not hg.islocal(src):
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(_('%s is not a local Mercurial repo') % src)
Greg Ward
largefiles: rearrange how lfconvert detects non-local repos...
r15340 if not hg.islocal(dest):
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(_('%s is not a local Mercurial repo') % dest)
Greg Ward
largefiles: rearrange how lfconvert detects non-local repos...
r15340
Greg Ward
largefiles: test lfconvert error handling; remove redundant code
r15339 rsrc = hg.repository(ui, src)
ui.status(_('initializing destination %s\n') % dest)
rdst = hg.repository(ui, dest, create=True)
various
hgext: add largefiles extension...
r15168
Matt Mackall
largefiles: eliminate naked exceptions
r15171 success = False
Mads Kiilerich
largefiles: use wlock for lfconvert (issue3444)...
r16717 dstwlock = dstlock = None
various
hgext: add largefiles extension...
r15168 try:
# Get a list of all changesets in the source. The easy way to do this
Mads Kiilerich
fix trivial spelling errors
r17424 # is to simply walk the changelog, using changelog.nodesbetween().
various
hgext: add largefiles extension...
r15168 # Take a look at mercurial/revlog.py:639 for more details.
# Use a generator instead of a list to decrease memory usage
ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
rsrc.heads())[0])
revmap = {node.nullid: node.nullid}
if tolfile:
Matt Harbison
largefiles: use the convert extension for 'lfconvert --to-normal'...
r25325 # Lock destination to prevent modification while it is converted to.
# Don't need to lock src because we are just reading from its
# history which can't change.
dstwlock = rdst.wlock()
dstlock = rdst.lock()
various
hgext: add largefiles extension...
r15168 lfiles = set()
normalfiles = set()
if not pats:
FUJIWARA Katsunori
largefiles: use "ui.configlist()" to get largefiles.patterns configuration...
r15579 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
various
hgext: add largefiles extension...
r15168 if pats:
matcher = match_.match(rsrc.root, '', list(pats))
else:
matcher = None
lfiletohash = {}
for ctx in ctxs:
ui.progress(_('converting revisions'), ctx.rev(),
unit=_('revision'), total=rsrc['tip'].rev())
_lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
lfiles, normalfiles, matcher, size, lfiletohash)
ui.progress(_('converting revisions'), None)
if os.path.exists(rdst.wjoin(lfutil.shortname)):
shutil.rmtree(rdst.wjoin(lfutil.shortname))
for f in lfiletohash.keys():
if os.path.isfile(rdst.wjoin(f)):
os.unlink(rdst.wjoin(f))
try:
os.removedirs(os.path.dirname(rdst.wjoin(f)))
Matt Mackall
largefiles: eliminate naked exceptions
r15171 except OSError:
various
hgext: add largefiles extension...
r15168 pass
Eli Carter
largefiles: include 'largefiles' in converted repository requirements
r15303 # If there were any files converted to largefiles, add largefiles
# to the destination repository's requirements.
if lfiles:
rdst.requirements.add('largefiles')
rdst._writerequirements()
various
hgext: add largefiles extension...
r15168 else:
Matt Harbison
largefiles: use the convert extension for 'lfconvert --to-normal'...
r25325 class lfsource(filemap.filemap_source):
def __init__(self, ui, source):
super(lfsource, self).__init__(ui, source, None)
self.filemapper.rename[lfutil.shortname] = '.'
def getfile(self, name, rev):
realname, realrev = rev
f = super(lfsource, self).getfile(name, rev)
if (not realname.startswith(lfutil.shortnameslash)
or f[0] is None):
return f
# Substitute in the largefile data for the hash
hash = f[0].strip()
path = lfutil.findfile(rsrc, hash)
various
hgext: add largefiles extension...
r15168
Matt Harbison
largefiles: use the convert extension for 'lfconvert --to-normal'...
r25325 if path is None:
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(_("missing largefile for '%s' in %s")
Matt Harbison
largefiles: use the convert extension for 'lfconvert --to-normal'...
r25325 % (realname, realrev))
fp = open(path, 'rb')
try:
return (fp.read(), f[1])
finally:
fp.close()
class converter(convcmd.converter):
def __init__(self, ui, source, dest, revmapfile, opts):
src = lfsource(ui, source)
super(converter, self).__init__(ui, src, dest, revmapfile,
opts)
found, missing = downloadlfiles(ui, rsrc)
if missing != 0:
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(_("all largefiles must be present locally"))
Matt Harbison
largefiles: use the convert extension for 'lfconvert --to-normal'...
r25325
Matt Harbison
largefiles: restore the original converter class after lfconvert --to-normal...
r25560 orig = convcmd.converter
Matt Harbison
largefiles: use the convert extension for 'lfconvert --to-normal'...
r25325 convcmd.converter = converter
Matt Harbison
largefiles: restore the original converter class after lfconvert --to-normal...
r25560
try:
convcmd.convert(ui, src, dest)
finally:
convcmd.converter = orig
Matt Mackall
largefiles: eliminate naked exceptions
r15171 success = True
various
hgext: add largefiles extension...
r15168 finally:
Matt Harbison
largefiles: use the convert extension for 'lfconvert --to-normal'...
r25325 if tolfile:
rdst.dirstate.clear()
release(dstlock, dstwlock)
Matt Mackall
largefiles: eliminate naked exceptions
r15171 if not success:
# we failed, remove the new directory
shutil.rmtree(rdst.root)
various
hgext: add largefiles extension...
r15168
def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
matcher, size, lfiletohash):
# Convert src parents to dst parents
Levi Bard
largefiles: remove pasted code...
r15811 parents = _convertparents(ctx, revmap)
various
hgext: add largefiles extension...
r15168
# Generate list of changed files
Levi Bard
largefiles: remove pasted code...
r15811 files = _getchangedfiles(ctx, parents)
various
hgext: add largefiles extension...
r15168
dstfiles = []
for f in files:
if f not in lfiles and f not in normalfiles:
islfile = _islfile(f, ctx, matcher, size)
# If this file was renamed or copied then copy
Mads Kiilerich
fix trivial spelling errors
r17424 # the largefile-ness of its predecessor
various
hgext: add largefiles extension...
r15168 if f in ctx.manifest():
fctx = ctx.filectx(f)
renamed = fctx.renamed()
renamedlfile = renamed and renamed[0] in lfiles
islfile |= renamedlfile
if 'l' in fctx.flags():
if renamedlfile:
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(
Martin Geisler
largefiles: fix uppercase in abort message
r15380 _('renamed/copied largefile %s becomes symlink')
Matt Mackall
largefiles: fix over-long lines
r15170 % f)
various
hgext: add largefiles extension...
r15168 islfile = False
if islfile:
lfiles.add(f)
else:
normalfiles.add(f)
if f in lfiles:
dstfiles.append(lfutil.standin(f))
Greg Ward
largefiles: more work on cleaning up comments...
r15254 # largefile in manifest if it has not been removed/renamed
various
hgext: add largefiles extension...
r15168 if f in ctx.manifest():
Levi Bard
largefiles: don't reference uninitialized variable (issue3092)
r15808 fctx = ctx.filectx(f)
if 'l' in fctx.flags():
renamed = fctx.renamed()
various
hgext: add largefiles extension...
r15168 if renamed and renamed[0] in lfiles:
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(_('largefile %s becomes symlink') % f)
various
hgext: add largefiles extension...
r15168
Greg Ward
largefiles: more work on cleaning up comments...
r15254 # largefile was modified, update standins
various
hgext: add largefiles extension...
r15168 m = util.sha1('')
m.update(ctx[f].data())
hash = m.hexdigest()
if f not in lfiletohash or lfiletohash[f] != hash:
Mads Kiilerich
largefiles: use repo.wwrite for writing standins (issue3909)
r19089 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
various
hgext: add largefiles extension...
r15168 executable = 'x' in ctx[f].flags()
lfutil.writestandin(rdst, lfutil.standin(f), hash,
executable)
lfiletohash[f] = hash
else:
# normal file
dstfiles.append(f)
def getfilectx(repo, memctx, f):
if lfutil.isstandin(f):
# if the file isn't in the manifest then it was removed
# or renamed, raise IOError to indicate this
srcfname = lfutil.splitstandin(f)
try:
fctx = ctx.filectx(srcfname)
except error.LookupError:
Mads Kiilerich
convert: use None value for missing files instead of overloading IOError...
r22296 return None
various
hgext: add largefiles extension...
r15168 renamed = fctx.renamed()
if renamed:
Greg Ward
largefiles: more work on cleaning up comments...
r15254 # standin is always a largefile because largefile-ness
various
hgext: add largefiles extension...
r15168 # doesn't change after rename or copy
renamed = lfutil.standin(renamed[0])
Sean Farley
memfilectx: call super.__init__ instead of duplicating code...
r21689 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
'l' in fctx.flags(), 'x' in fctx.flags(),
renamed)
various
hgext: add largefiles extension...
r15168 else:
Sean Farley
memfilectx: call super.__init__ instead of duplicating code...
r21689 return _getnormalcontext(repo, ctx, f, revmap)
various
hgext: add largefiles extension...
r15168
# Commit
Levi Bard
largefiles: remove pasted code...
r15811 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
various
hgext: add largefiles extension...
r15168 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
getfilectx, ctx.user(), ctx.date(), ctx.extra())
ret = rdst.commitctx(mctx)
FUJIWARA Katsunori
largefiles: move "copyalltostore" invocation into "markcommitted"...
r23276 lfutil.copyalltostore(rdst, ret)
Patrick Mezard
localrepo: add setparents() to adjust dirstate copies (issue3407)...
r16551 rdst.setparents(ret)
various
hgext: add largefiles extension...
r15168 revmap[ctx.node()] = rdst.changelog.tip()
Levi Bard
largefiles: remove pasted code...
r15811 # Generate list of changed files
def _getchangedfiles(ctx, parents):
files = set(ctx.files())
if node.nullid not in parents:
mc = ctx.manifest()
mp1 = ctx.parents()[0].manifest()
mp2 = ctx.parents()[1].manifest()
files |= (set(mp1) | set(mp2)) - set(mc)
for f in mc:
if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
files.add(f)
return files
# Convert src parents to dst parents
def _convertparents(ctx, revmap):
parents = []
for p in ctx.parents():
parents.append(revmap[p.node()])
while len(parents) < 2:
parents.append(node.nullid)
return parents
# Get memfilectx for a normal file
Sean Farley
memfilectx: call super.__init__ instead of duplicating code...
r21689 def _getnormalcontext(repo, ctx, f, revmap):
Levi Bard
largefiles: remove pasted code...
r15811 try:
fctx = ctx.filectx(f)
except error.LookupError:
Mads Kiilerich
convert: use None value for missing files instead of overloading IOError...
r22296 return None
Levi Bard
largefiles: remove pasted code...
r15811 renamed = fctx.renamed()
if renamed:
renamed = renamed[0]
data = fctx.data()
if f == '.hgtags':
Sean Farley
memfilectx: call super.__init__ instead of duplicating code...
r21689 data = _converttags (repo.ui, revmap, data)
return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
Levi Bard
largefiles: remove pasted code...
r15811 'x' in fctx.flags(), renamed)
# Remap tag data using a revision map
def _converttags(ui, revmap, data):
newdata = []
for line in data.splitlines():
try:
id, name = line.split(' ', 1)
except ValueError:
FUJIWARA Katsunori
i18n: fix "% inside _()" problems...
r20868 ui.warn(_('skipping incorrectly formatted tag %s\n')
% line)
Levi Bard
largefiles: remove pasted code...
r15811 continue
try:
newid = node.bin(id)
except TypeError:
FUJIWARA Katsunori
i18n: fix "% inside _()" problems...
r20868 ui.warn(_('skipping incorrectly formatted id %s\n')
% id)
Levi Bard
largefiles: remove pasted code...
r15811 continue
try:
newdata.append('%s %s\n' % (node.hex(revmap[newid]),
name))
except KeyError:
Matt Mackall
i18n: fix all remaining uses of % inside _()
r16231 ui.warn(_('no mapping for id %s\n') % id)
Levi Bard
largefiles: remove pasted code...
r15811 continue
return ''.join(newdata)
various
hgext: add largefiles extension...
r15168 def _islfile(file, ctx, matcher, size):
Greg Ward
largefiles: improve comments, internal docstrings...
r15252 '''Return true if file should be considered a largefile, i.e.
matcher matches it or it is larger than size.'''
# never store special .hg* files as largefiles
various
hgext: add largefiles extension...
r15168 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
return False
if matcher and matcher(file):
return True
try:
return ctx.filectx(file).size() >= size * 1024 * 1024
except error.LookupError:
return False
def uploadlfiles(ui, rsrc, rdst, files):
'''upload largefiles to the central store'''
Benjamin Pollack
largefiles: make the store primary, and the user cache secondary...
r15317 if not files:
various
hgext: add largefiles extension...
r15168 return
store = basestore._openstore(rsrc, rdst, put=True)
at = 0
Na'Tosha Bard
largefiles: batch statlfile requests when pushing a largefiles repo (issue3386)...
r17127 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
retval = store.exists(files)
files = filter(lambda h: not retval[h], files)
ui.debug("%d largefiles need to be uploaded\n" % len(files))
various
hgext: add largefiles extension...
r15168 for hash in files:
Matt Mackall
largefiles: fix over-long lines
r15170 ui.progress(_('uploading largefiles'), at, unit='largefile',
total=len(files))
various
hgext: add largefiles extension...
r15168 source = lfutil.findfile(rsrc, hash)
if not source:
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(_('largefile %s missing from store'
Greg Ward
largefiles: improve error reporting...
r15253 ' (needs to be uploaded)') % hash)
various
hgext: add largefiles extension...
r15168 # XXX check for errors here
store.put(source, hash)
at += 1
Matt Mackall
largefiles: mark a string for translation
r15173 ui.progress(_('uploading largefiles'), None)
various
hgext: add largefiles extension...
r15168
def verifylfiles(ui, repo, all=False, contents=False):
Mads Kiilerich
largefiles: docstrings for verify methods
r18574 '''Verify that every largefile revision in the current changeset
various
hgext: add largefiles extension...
r15168 exists in the central store. With --contents, also verify that
Mads Kiilerich
largefiles: docstrings for verify methods
r18574 the contents of each local largefile file revision are correct (SHA-1 hash
various
hgext: add largefiles extension...
r15168 matches the revision ID). With --all, check every changeset in
this repository.'''
if all:
Matt Harbison
largefiles: ignore hidden changesets with 'verify --large --lfa'...
r25508 revs = repo.revs('all()')
various
hgext: add largefiles extension...
r15168 else:
revs = ['.']
store = basestore._openstore(repo)
return store.verify(revs, contents=contents)
Na'Tosha Bard
largefiles: optimize performance when updating (issue3440)...
r16700 def cachelfiles(ui, repo, node, filelist=None):
various
hgext: add largefiles extension...
r15168 '''cachelfiles ensures that all largefiles needed by the specified revision
are present in the repository's largefile cache.
returns a tuple (cached, missing). cached is the list of files downloaded
by this operation; missing is the list of files that were needed but could
not be found.'''
lfiles = lfutil.listlfiles(repo, node)
Na'Tosha Bard
largefiles: optimize performance when updating (issue3440)...
r16700 if filelist:
lfiles = set(lfiles) & set(filelist)
various
hgext: add largefiles extension...
r15168 toget = []
for lfile in lfiles:
Mads Kiilerich
largefiles: simplify cachelfiles - don't spend a lot of time checking hashes...
r18728 try:
Na'Tosha Bard
largefiles: correctly download new largefiles when merging...
r15860 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
Gregory Szorc
global: mass rewrite to use modern exception syntax...
r25660 except IOError as err:
Mads Kiilerich
largefiles: simplify cachelfiles - don't spend a lot of time checking hashes...
r18728 if err.errno == errno.ENOENT:
continue # node must be None and standin wasn't found in wctx
raise
if not lfutil.findfile(repo, expectedhash):
various
hgext: add largefiles extension...
r15168 toget.append((lfile, expectedhash))
if toget:
store = basestore._openstore(repo)
ret = store.get(toget)
return ret
return ([], [])
Na'Tosha Bard
largefiles: refactor downloading of all largefiles to generic function
r16691 def downloadlfiles(ui, repo, rev=None):
matchfn = scmutil.match(repo[None],
[repo.wjoin(lfutil.shortname)], {})
def prepare(ctx, fns):
pass
totalsuccess = 0
totalmissing = 0
Mads Kiilerich
largefiles: fix download of largefiles from an empty list of changesets...
r18722 if rev != []: # walkchangerevs on empty list would return all revs
for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
prepare):
success, missing = cachelfiles(ui, repo, ctx.node())
totalsuccess += len(success)
totalmissing += len(missing)
Na'Tosha Bard
largefiles: refactor downloading of all largefiles to generic function
r16691 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
if totalmissing > 0:
ui.status(_("%d largefiles failed to download\n") % totalmissing)
return totalsuccess, totalmissing
FUJIWARA Katsunori
largefiles: get function to write status messages via "getstatuswriter()"...
r23189 def updatelfiles(ui, repo, filelist=None, printmessage=None,
Mads Kiilerich
largefiles: always consider updatelfiles 'checked' parameter set...
r24788 normallookup=False):
FUJIWARA Katsunori
largefiles: get function to write status messages via "getstatuswriter()"...
r23189 '''Update largefiles according to standins in the working directory
If ``printmessage`` is other than ``None``, it means "print (or
ignore, for false) message forcibly".
'''
statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
various
hgext: add largefiles extension...
r15168 wlock = repo.wlock()
try:
lfdirstate = lfutil.openlfdirstate(ui, repo)
lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
if filelist is not None:
FUJIWARA Katsunori
largefiles: update lfdirstate for unchanged largefiles during linear merging...
r22197 filelist = set(filelist)
various
hgext: add largefiles extension...
r15168 lfiles = [f for f in lfiles if f in filelist]
Mads Kiilerich
largefiles: update in two steps, handle interrupted updates better...
r20063 update = {}
various
hgext: add largefiles extension...
r15168 updated, removed = 0, 0
Mads Kiilerich
largefiles: inline _updatelfile, prepare for further refactorings
r20062 for lfile in lfiles:
abslfile = repo.wjoin(lfile)
Siddharth Agarwal
origpath: move from cmdutil to scmutil...
r27651 abslfileorig = scmutil.origpath(ui, repo, abslfile)
Mads Kiilerich
largefiles: inline _updatelfile, prepare for further refactorings
r20062 absstandin = repo.wjoin(lfutil.standin(lfile))
Siddharth Agarwal
origpath: move from cmdutil to scmutil...
r27651 absstandinorig = scmutil.origpath(ui, repo, absstandin)
Mads Kiilerich
largefiles: inline _updatelfile, prepare for further refactorings
r20062 if os.path.exists(absstandin):
Christian Delahousse
largefiles: specify where .orig files are kept...
r26944 if (os.path.exists(absstandinorig) and
Mads Kiilerich
largefiles: inline _updatelfile, prepare for further refactorings
r20062 os.path.exists(abslfile)):
Christian Delahousse
largefiles: specify where .orig files are kept...
r26944 shutil.copyfile(abslfile, abslfileorig)
util.unlinkpath(absstandinorig)
Mads Kiilerich
largefiles: inline _updatelfile, prepare for further refactorings
r20062 expecthash = lfutil.readstandin(repo, lfile)
Mads Kiilerich
largefiles: always consider updatelfiles 'checked' parameter set...
r24788 if expecthash != '':
Mads Kiilerich
largefiles: update in two steps, handle interrupted updates better...
r20063 if lfile not in repo[None]: # not switched to normal file
util.unlinkpath(abslfile, ignoremissing=True)
Mads Kiilerich
spelling: fixes from proofreading of spell checker issues
r23139 # use normallookup() to allocate an entry in largefiles
Mads Kiilerich
spelling: fixes from proofreading of spell checker issues
r24180 # dirstate to prevent lfilesrepo.status() from reporting
# missing files as removed.
Mads Kiilerich
largefiles: update in two steps, handle interrupted updates better...
r20063 lfdirstate.normallookup(lfile)
update[lfile] = expecthash
Mads Kiilerich
largefiles: inline _updatelfile, prepare for further refactorings
r20062 else:
# Remove lfiles for which the standin is deleted, unless the
# lfile is added to the repository again. This happens when a
# largefile is converted back to a normal file: the standin
# disappears, but a new (normal) file appears as the lfile.
if (os.path.exists(abslfile) and
repo.dirstate.normalize(lfile) not in repo[None]):
util.unlinkpath(abslfile)
removed += 1
Mads Kiilerich
largefiles: update in two steps, handle interrupted updates better...
r20063
# largefile processing might be slow and be interrupted - be prepared
lfdirstate.write()
if lfiles:
FUJIWARA Katsunori
largefiles: get function to write status messages via "getstatuswriter()"...
r23189 statuswriter(_('getting changed largefiles\n'))
Mads Kiilerich
largefiles: update in two steps, handle interrupted updates better...
r20063 cachelfiles(ui, repo, None, lfiles)
for lfile in lfiles:
update1 = 0
expecthash = update.get(lfile)
if expecthash:
if not lfutil.copyfromcache(repo, expecthash, lfile):
# failed ... but already removed and set to normallookup
continue
# Synchronize largefile dirstate to the last modified
# time of the file
lfdirstate.normal(lfile)
update1 = 1
# copy the state of largefile standin from the repository's
# dirstate to its state in the lfdirstate.
abslfile = repo.wjoin(lfile)
absstandin = repo.wjoin(lfutil.standin(lfile))
if os.path.exists(absstandin):
mode = os.stat(absstandin).st_mode
if mode != os.stat(abslfile).st_mode:
os.chmod(abslfile, mode)
update1 = 1
updated += update1
FUJIWARA Katsunori
largefiles: factor out synchronization of lfdirstate for future use
r22095 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
various
hgext: add largefiles extension...
r15168
lfdirstate.write()
FUJIWARA Katsunori
largefiles: get function to write status messages via "getstatuswriter()"...
r23189 if lfiles:
statuswriter(_('%d largefiles updated, %d removed\n') % (updated,
various
hgext: add largefiles extension...
r15168 removed))
finally:
wlock.release()
Gregory Szorc
largefiles: declare commands using decorator
r21242 @command('lfpull',
[('r', 'rev', [], _('pull largefiles for these revisions'))
] + commands.remoteopts,
_('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
Mads Kiilerich
largefiles: introduce lfpull command for pulling missing largefiles
r18976 def lfpull(ui, repo, source="default", **opts):
"""pull largefiles for the specified revisions from the specified source
Pull largefiles that are referenced from local changesets but missing
locally, pulling from a remote repository to the local cache.
If SOURCE is omitted, the 'default' path will be used.
See :hg:`help urls` for more information.
.. container:: verbose
Some examples:
- pull largefiles for all branch heads::
hg lfpull -r "head() and not closed()"
- pull largefiles on the default branch::
hg lfpull -r "branch(default)"
"""
repo.lfpullsource = source
revs = opts.get('rev', [])
if not revs:
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(_('no revisions specified'))
Mads Kiilerich
largefiles: introduce lfpull command for pulling missing largefiles
r18976 revs = scmutil.revrange(repo, revs)
numcached = 0
for rev in revs:
ui.note(_('pulling largefiles for revision %s\n') % rev)
(cached, missing) = cachelfiles(ui, repo, rev)
numcached += len(cached)
ui.status(_("%d largefiles cached\n") % numcached)