##// END OF EJS Templates
import: change "editform" to distinguish merge commits from others...
import: change "editform" to distinguish merge commits from others "editform" argument for "getcommiteditor" is decided according to the format below: COMMAND[.ROUTE] - COMMAND: name of command - ROUTE: name of route, if there are two or more routes in COMMAND This patch uses "normal.normal" and "normal.merge" as ROUTE of "editform" instead of "normal", to distinguish merge commits from other in "hg import" without "--bypass" case. This patch assumes "editform" variations for "hg import" below: import.normal.normal import.normal.merge import.bypass.normal import.bypass.merge Unlike other patches in this series, this patch uses "editor.sh" instead of "checkeditform.sh" for the name of the script to check "HGEDITFORM", because it has to do more than checking "HGEDITFORM". To invoke editor forcibly in "test-import-merge.t", this patch creates the patch not having patch description as "merge.nomsg.diff".

File last commit:

r22197:f72d7393 default
r22250:f3200bf4 default
Show More
lfcommands.py
573 lines | 20.9 KiB | text/x-python | PythonLexer
various
hgext: add largefiles extension...
r15168 # Copyright 2009-2010 Gregory P. Ward
# Copyright 2009-2010 Intelerad Medical Systems Incorporated
# Copyright 2010-2011 Fog Creek Software
# Copyright 2010-2011 Unity Technologies
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
Greg Ward
largefiles: improve comments, internal docstrings...
r15252 '''High-level command function for lfconvert, plus the cmdtable.'''
various
hgext: add largefiles extension...
r15168
Mads Kiilerich
largefiles: simplify cachelfiles - don't spend a lot of time checking hashes...
r18728 import os, errno
various
hgext: add largefiles extension...
r15168 import shutil
Na'Tosha Bard
largefiles: refactor downloading of all largefiles to generic function
r16691 from mercurial import util, match as match_, hg, node, context, error, \
Siddharth Agarwal
commands: don't infer repo for commands like update (issue2748)...
r17773 cmdutil, scmutil, commands
various
hgext: add largefiles extension...
r15168 from mercurial.i18n import _
Mads Kiilerich
largefiles: use wlock for lfconvert (issue3444)...
r16717 from mercurial.lock import release
various
hgext: add largefiles extension...
r15168
import lfutil
import basestore
# -- Commands ----------------------------------------------------------
Gregory Szorc
largefiles: declare commands using decorator
r21242 cmdtable = {}
command = cmdutil.command(cmdtable)
@command('lfconvert',
[('s', 'size', '',
_('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
('', 'to-normal', False,
_('convert from a largefiles repo to a normal repo')),
],
Gregory Szorc
largefiles: define norepo in command decorator
r21770 _('hg lfconvert SOURCE DEST [FILE ...]'),
Gregory Szorc
largefiles: define inferrepo in command decorator
r21785 norepo=True,
inferrepo=True)
various
hgext: add largefiles extension...
r15168 def lfconvert(ui, src, dest, *pats, **opts):
Greg Ward
largefiles: improve help...
r15230 '''convert a normal repository to a largefiles repository
various
hgext: add largefiles extension...
r15168
Greg Ward
largefiles: improve help...
r15230 Convert repository SOURCE to a new repository DEST, identical to
SOURCE except that certain files will be converted as largefiles:
specifically, any file that matches any PATTERN *or* whose size is
above the minimum size threshold is converted as a largefile. The
size used to determine whether or not to track a file as a
largefile is the size of the first version of the file. The
minimum size can be specified either with --size or in
configuration as ``largefiles.size``.
After running this command you will need to make sure that
largefiles is enabled anywhere you intend to push the new
repository.
Greg Ward
largefiles: rename lfconvert --tonormal option to --to-normal
r15332 Use --to-normal to convert largefiles back to normal files; after
Greg Ward
largefiles: improve help...
r15230 this, the DEST repository can be used without largefiles at all.'''
various
hgext: add largefiles extension...
r15168
Greg Ward
largefiles: rename lfconvert --tonormal option to --to-normal
r15332 if opts['to_normal']:
various
hgext: add largefiles extension...
r15168 tolfile = False
else:
tolfile = True
Greg Ward
largefiles: factor out lfutil.getminsize()
r15227 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
Greg Ward
largefiles: rearrange how lfconvert detects non-local repos...
r15340
if not hg.islocal(src):
raise util.Abort(_('%s is not a local Mercurial repo') % src)
if not hg.islocal(dest):
raise util.Abort(_('%s is not a local Mercurial repo') % dest)
Greg Ward
largefiles: test lfconvert error handling; remove redundant code
r15339 rsrc = hg.repository(ui, src)
ui.status(_('initializing destination %s\n') % dest)
rdst = hg.repository(ui, dest, create=True)
various
hgext: add largefiles extension...
r15168
Matt Mackall
largefiles: eliminate naked exceptions
r15171 success = False
Mads Kiilerich
largefiles: use wlock for lfconvert (issue3444)...
r16717 dstwlock = dstlock = None
various
hgext: add largefiles extension...
r15168 try:
# Lock destination to prevent modification while it is converted to.
# Don't need to lock src because we are just reading from its history
# which can't change.
Mads Kiilerich
largefiles: use wlock for lfconvert (issue3444)...
r16717 dstwlock = rdst.wlock()
Na'Tosha Bard
largefiles: remove use of underscores that breaks coding convention
r16247 dstlock = rdst.lock()
various
hgext: add largefiles extension...
r15168
# Get a list of all changesets in the source. The easy way to do this
Mads Kiilerich
fix trivial spelling errors
r17424 # is to simply walk the changelog, using changelog.nodesbetween().
various
hgext: add largefiles extension...
r15168 # Take a look at mercurial/revlog.py:639 for more details.
# Use a generator instead of a list to decrease memory usage
ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
rsrc.heads())[0])
revmap = {node.nullid: node.nullid}
if tolfile:
lfiles = set()
normalfiles = set()
if not pats:
FUJIWARA Katsunori
largefiles: use "ui.configlist()" to get largefiles.patterns configuration...
r15579 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
various
hgext: add largefiles extension...
r15168 if pats:
matcher = match_.match(rsrc.root, '', list(pats))
else:
matcher = None
lfiletohash = {}
for ctx in ctxs:
ui.progress(_('converting revisions'), ctx.rev(),
unit=_('revision'), total=rsrc['tip'].rev())
_lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
lfiles, normalfiles, matcher, size, lfiletohash)
ui.progress(_('converting revisions'), None)
if os.path.exists(rdst.wjoin(lfutil.shortname)):
shutil.rmtree(rdst.wjoin(lfutil.shortname))
for f in lfiletohash.keys():
if os.path.isfile(rdst.wjoin(f)):
os.unlink(rdst.wjoin(f))
try:
os.removedirs(os.path.dirname(rdst.wjoin(f)))
Matt Mackall
largefiles: eliminate naked exceptions
r15171 except OSError:
various
hgext: add largefiles extension...
r15168 pass
Eli Carter
largefiles: include 'largefiles' in converted repository requirements
r15303 # If there were any files converted to largefiles, add largefiles
# to the destination repository's requirements.
if lfiles:
rdst.requirements.add('largefiles')
rdst._writerequirements()
various
hgext: add largefiles extension...
r15168 else:
for ctx in ctxs:
ui.progress(_('converting revisions'), ctx.rev(),
unit=_('revision'), total=rsrc['tip'].rev())
_addchangeset(ui, rsrc, rdst, ctx, revmap)
ui.progress(_('converting revisions'), None)
Matt Mackall
largefiles: eliminate naked exceptions
r15171 success = True
various
hgext: add largefiles extension...
r15168 finally:
Mads Kiilerich
largefiles: use wlock for lfconvert (issue3444)...
r16717 rdst.dirstate.clear()
release(dstlock, dstwlock)
Matt Mackall
largefiles: eliminate naked exceptions
r15171 if not success:
# we failed, remove the new directory
shutil.rmtree(rdst.root)
various
hgext: add largefiles extension...
r15168
def _addchangeset(ui, rsrc, rdst, ctx, revmap):
Mads Kiilerich
check-code: indent 4 spaces in py files
r17299 # Convert src parents to dst parents
Levi Bard
largefiles: remove pasted code...
r15811 parents = _convertparents(ctx, revmap)
various
hgext: add largefiles extension...
r15168
# Generate list of changed files
Levi Bard
largefiles: remove pasted code...
r15811 files = _getchangedfiles(ctx, parents)
various
hgext: add largefiles extension...
r15168
def getfilectx(repo, memctx, f):
if lfutil.standin(f) in files:
# if the file isn't in the manifest then it was removed
# or renamed, raise IOError to indicate this
try:
fctx = ctx.filectx(lfutil.standin(f))
except error.LookupError:
Brodie Rao
cleanup: "raise SomeException()" -> "raise SomeException"
r16687 raise IOError
various
hgext: add largefiles extension...
r15168 renamed = fctx.renamed()
if renamed:
renamed = lfutil.splitstandin(renamed[0])
hash = fctx.data().strip()
path = lfutil.findfile(rsrc, hash)
Matt Harbison
largefiles: fix a traceback in lfconvert if a largefile is missing (issue3519)...
r17823
# If one file is missing, likely all files from this rev are
if path is None:
cachelfiles(ui, rsrc, ctx.node())
path = lfutil.findfile(rsrc, hash)
if path is None:
raise util.Abort(
_("missing largefile \'%s\' from revision %s")
% (f, node.hex(ctx.node())))
various
hgext: add largefiles extension...
r15168 data = ''
fd = None
try:
fd = open(path, 'rb')
data = fd.read()
finally:
Matt Mackall
largefiles: fix multistatement line
r15172 if fd:
fd.close()
Sean Farley
memfilectx: call super.__init__ instead of duplicating code...
r21689 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
various
hgext: add largefiles extension...
r15168 'x' in fctx.flags(), renamed)
else:
Sean Farley
memfilectx: call super.__init__ instead of duplicating code...
r21689 return _getnormalcontext(repo, ctx, f, revmap)
various
hgext: add largefiles extension...
r15168
dstfiles = []
for file in files:
if lfutil.isstandin(file):
dstfiles.append(lfutil.splitstandin(file))
else:
dstfiles.append(file)
# Commit
Levi Bard
largefiles: remove pasted code...
r15811 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
various
hgext: add largefiles extension...
r15168
def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
matcher, size, lfiletohash):
# Convert src parents to dst parents
Levi Bard
largefiles: remove pasted code...
r15811 parents = _convertparents(ctx, revmap)
various
hgext: add largefiles extension...
r15168
# Generate list of changed files
Levi Bard
largefiles: remove pasted code...
r15811 files = _getchangedfiles(ctx, parents)
various
hgext: add largefiles extension...
r15168
dstfiles = []
for f in files:
if f not in lfiles and f not in normalfiles:
islfile = _islfile(f, ctx, matcher, size)
# If this file was renamed or copied then copy
Mads Kiilerich
fix trivial spelling errors
r17424 # the largefile-ness of its predecessor
various
hgext: add largefiles extension...
r15168 if f in ctx.manifest():
fctx = ctx.filectx(f)
renamed = fctx.renamed()
renamedlfile = renamed and renamed[0] in lfiles
islfile |= renamedlfile
if 'l' in fctx.flags():
if renamedlfile:
raise util.Abort(
Martin Geisler
largefiles: fix uppercase in abort message
r15380 _('renamed/copied largefile %s becomes symlink')
Matt Mackall
largefiles: fix over-long lines
r15170 % f)
various
hgext: add largefiles extension...
r15168 islfile = False
if islfile:
lfiles.add(f)
else:
normalfiles.add(f)
if f in lfiles:
dstfiles.append(lfutil.standin(f))
Greg Ward
largefiles: more work on cleaning up comments...
r15254 # largefile in manifest if it has not been removed/renamed
various
hgext: add largefiles extension...
r15168 if f in ctx.manifest():
Levi Bard
largefiles: don't reference uninitialized variable (issue3092)
r15808 fctx = ctx.filectx(f)
if 'l' in fctx.flags():
renamed = fctx.renamed()
various
hgext: add largefiles extension...
r15168 if renamed and renamed[0] in lfiles:
raise util.Abort(_('largefile %s becomes symlink') % f)
Greg Ward
largefiles: more work on cleaning up comments...
r15254 # largefile was modified, update standins
various
hgext: add largefiles extension...
r15168 m = util.sha1('')
m.update(ctx[f].data())
hash = m.hexdigest()
if f not in lfiletohash or lfiletohash[f] != hash:
Mads Kiilerich
largefiles: use repo.wwrite for writing standins (issue3909)
r19089 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
various
hgext: add largefiles extension...
r15168 executable = 'x' in ctx[f].flags()
lfutil.writestandin(rdst, lfutil.standin(f), hash,
executable)
lfiletohash[f] = hash
else:
# normal file
dstfiles.append(f)
def getfilectx(repo, memctx, f):
if lfutil.isstandin(f):
# if the file isn't in the manifest then it was removed
# or renamed, raise IOError to indicate this
srcfname = lfutil.splitstandin(f)
try:
fctx = ctx.filectx(srcfname)
except error.LookupError:
Brodie Rao
cleanup: "raise SomeException()" -> "raise SomeException"
r16687 raise IOError
various
hgext: add largefiles extension...
r15168 renamed = fctx.renamed()
if renamed:
Greg Ward
largefiles: more work on cleaning up comments...
r15254 # standin is always a largefile because largefile-ness
various
hgext: add largefiles extension...
r15168 # doesn't change after rename or copy
renamed = lfutil.standin(renamed[0])
Sean Farley
memfilectx: call super.__init__ instead of duplicating code...
r21689 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
'l' in fctx.flags(), 'x' in fctx.flags(),
renamed)
various
hgext: add largefiles extension...
r15168 else:
Sean Farley
memfilectx: call super.__init__ instead of duplicating code...
r21689 return _getnormalcontext(repo, ctx, f, revmap)
various
hgext: add largefiles extension...
r15168
# Commit
Levi Bard
largefiles: remove pasted code...
r15811 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
various
hgext: add largefiles extension...
r15168 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
getfilectx, ctx.user(), ctx.date(), ctx.extra())
ret = rdst.commitctx(mctx)
Patrick Mezard
localrepo: add setparents() to adjust dirstate copies (issue3407)...
r16551 rdst.setparents(ret)
various
hgext: add largefiles extension...
r15168 revmap[ctx.node()] = rdst.changelog.tip()
Levi Bard
largefiles: remove pasted code...
r15811 # Generate list of changed files
def _getchangedfiles(ctx, parents):
files = set(ctx.files())
if node.nullid not in parents:
mc = ctx.manifest()
mp1 = ctx.parents()[0].manifest()
mp2 = ctx.parents()[1].manifest()
files |= (set(mp1) | set(mp2)) - set(mc)
for f in mc:
if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
files.add(f)
return files
# Convert src parents to dst parents
def _convertparents(ctx, revmap):
parents = []
for p in ctx.parents():
parents.append(revmap[p.node()])
while len(parents) < 2:
parents.append(node.nullid)
return parents
# Get memfilectx for a normal file
Sean Farley
memfilectx: call super.__init__ instead of duplicating code...
r21689 def _getnormalcontext(repo, ctx, f, revmap):
Levi Bard
largefiles: remove pasted code...
r15811 try:
fctx = ctx.filectx(f)
except error.LookupError:
Brodie Rao
cleanup: "raise SomeException()" -> "raise SomeException"
r16687 raise IOError
Levi Bard
largefiles: remove pasted code...
r15811 renamed = fctx.renamed()
if renamed:
renamed = renamed[0]
data = fctx.data()
if f == '.hgtags':
Sean Farley
memfilectx: call super.__init__ instead of duplicating code...
r21689 data = _converttags (repo.ui, revmap, data)
return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
Levi Bard
largefiles: remove pasted code...
r15811 'x' in fctx.flags(), renamed)
# Remap tag data using a revision map
def _converttags(ui, revmap, data):
newdata = []
for line in data.splitlines():
try:
id, name = line.split(' ', 1)
except ValueError:
FUJIWARA Katsunori
i18n: fix "% inside _()" problems...
r20868 ui.warn(_('skipping incorrectly formatted tag %s\n')
% line)
Levi Bard
largefiles: remove pasted code...
r15811 continue
try:
newid = node.bin(id)
except TypeError:
FUJIWARA Katsunori
i18n: fix "% inside _()" problems...
r20868 ui.warn(_('skipping incorrectly formatted id %s\n')
% id)
Levi Bard
largefiles: remove pasted code...
r15811 continue
try:
newdata.append('%s %s\n' % (node.hex(revmap[newid]),
name))
except KeyError:
Matt Mackall
i18n: fix all remaining uses of % inside _()
r16231 ui.warn(_('no mapping for id %s\n') % id)
Levi Bard
largefiles: remove pasted code...
r15811 continue
return ''.join(newdata)
various
hgext: add largefiles extension...
r15168 def _islfile(file, ctx, matcher, size):
Greg Ward
largefiles: improve comments, internal docstrings...
r15252 '''Return true if file should be considered a largefile, i.e.
matcher matches it or it is larger than size.'''
# never store special .hg* files as largefiles
various
hgext: add largefiles extension...
r15168 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
return False
if matcher and matcher(file):
return True
try:
return ctx.filectx(file).size() >= size * 1024 * 1024
except error.LookupError:
return False
def uploadlfiles(ui, rsrc, rdst, files):
'''upload largefiles to the central store'''
Benjamin Pollack
largefiles: make the store primary, and the user cache secondary...
r15317 if not files:
various
hgext: add largefiles extension...
r15168 return
store = basestore._openstore(rsrc, rdst, put=True)
at = 0
Na'Tosha Bard
largefiles: batch statlfile requests when pushing a largefiles repo (issue3386)...
r17127 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
retval = store.exists(files)
files = filter(lambda h: not retval[h], files)
ui.debug("%d largefiles need to be uploaded\n" % len(files))
various
hgext: add largefiles extension...
r15168 for hash in files:
Matt Mackall
largefiles: fix over-long lines
r15170 ui.progress(_('uploading largefiles'), at, unit='largefile',
total=len(files))
various
hgext: add largefiles extension...
r15168 source = lfutil.findfile(rsrc, hash)
if not source:
Greg Ward
largefiles: improve error reporting...
r15253 raise util.Abort(_('largefile %s missing from store'
' (needs to be uploaded)') % hash)
various
hgext: add largefiles extension...
r15168 # XXX check for errors here
store.put(source, hash)
at += 1
Matt Mackall
largefiles: mark a string for translation
r15173 ui.progress(_('uploading largefiles'), None)
various
hgext: add largefiles extension...
r15168
def verifylfiles(ui, repo, all=False, contents=False):
Mads Kiilerich
largefiles: docstrings for verify methods
r18574 '''Verify that every largefile revision in the current changeset
various
hgext: add largefiles extension...
r15168 exists in the central store. With --contents, also verify that
Mads Kiilerich
largefiles: docstrings for verify methods
r18574 the contents of each local largefile file revision are correct (SHA-1 hash
various
hgext: add largefiles extension...
r15168 matches the revision ID). With --all, check every changeset in
this repository.'''
if all:
# Pass a list to the function rather than an iterator because we know a
# list will work.
revs = range(len(repo))
else:
revs = ['.']
store = basestore._openstore(repo)
return store.verify(revs, contents=contents)
Na'Tosha Bard
largefiles: optimize performance when updating (issue3440)...
r16700 def cachelfiles(ui, repo, node, filelist=None):
various
hgext: add largefiles extension...
r15168 '''cachelfiles ensures that all largefiles needed by the specified revision
are present in the repository's largefile cache.
returns a tuple (cached, missing). cached is the list of files downloaded
by this operation; missing is the list of files that were needed but could
not be found.'''
lfiles = lfutil.listlfiles(repo, node)
Na'Tosha Bard
largefiles: optimize performance when updating (issue3440)...
r16700 if filelist:
lfiles = set(lfiles) & set(filelist)
various
hgext: add largefiles extension...
r15168 toget = []
for lfile in lfiles:
Mads Kiilerich
largefiles: simplify cachelfiles - don't spend a lot of time checking hashes...
r18728 try:
Na'Tosha Bard
largefiles: correctly download new largefiles when merging...
r15860 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
Mads Kiilerich
largefiles: simplify cachelfiles - don't spend a lot of time checking hashes...
r18728 except IOError, err:
if err.errno == errno.ENOENT:
continue # node must be None and standin wasn't found in wctx
raise
if not lfutil.findfile(repo, expectedhash):
various
hgext: add largefiles extension...
r15168 toget.append((lfile, expectedhash))
if toget:
store = basestore._openstore(repo)
ret = store.get(toget)
return ret
return ([], [])
Na'Tosha Bard
largefiles: refactor downloading of all largefiles to generic function
r16691 def downloadlfiles(ui, repo, rev=None):
matchfn = scmutil.match(repo[None],
[repo.wjoin(lfutil.shortname)], {})
def prepare(ctx, fns):
pass
totalsuccess = 0
totalmissing = 0
Mads Kiilerich
largefiles: fix download of largefiles from an empty list of changesets...
r18722 if rev != []: # walkchangerevs on empty list would return all revs
for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
prepare):
success, missing = cachelfiles(ui, repo, ctx.node())
totalsuccess += len(success)
totalmissing += len(missing)
Na'Tosha Bard
largefiles: refactor downloading of all largefiles to generic function
r16691 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
if totalmissing > 0:
ui.status(_("%d largefiles failed to download\n") % totalmissing)
return totalsuccess, totalmissing
FUJIWARA Katsunori
largefiles: use "normallookup" on "lfdirstate" while reverting...
r21934 def updatelfiles(ui, repo, filelist=None, printmessage=True,
normallookup=False):
various
hgext: add largefiles extension...
r15168 wlock = repo.wlock()
try:
lfdirstate = lfutil.openlfdirstate(ui, repo)
lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
if filelist is not None:
FUJIWARA Katsunori
largefiles: update lfdirstate for unchanged largefiles during linear merging...
r22197 filelist = set(filelist)
various
hgext: add largefiles extension...
r15168 lfiles = [f for f in lfiles if f in filelist]
Mads Kiilerich
largefiles: update in two steps, handle interrupted updates better...
r20063 update = {}
various
hgext: add largefiles extension...
r15168 updated, removed = 0, 0
Mads Kiilerich
largefiles: inline _updatelfile, prepare for further refactorings
r20062 for lfile in lfiles:
abslfile = repo.wjoin(lfile)
absstandin = repo.wjoin(lfutil.standin(lfile))
if os.path.exists(absstandin):
if (os.path.exists(absstandin + '.orig') and
os.path.exists(abslfile)):
shutil.copyfile(abslfile, abslfile + '.orig')
Mads Kiilerich
largefiles: update should only create a .orig backup of a largefile once...
r21083 util.unlinkpath(absstandin + '.orig')
Mads Kiilerich
largefiles: inline _updatelfile, prepare for further refactorings
r20062 expecthash = lfutil.readstandin(repo, lfile)
if (expecthash != '' and
(not os.path.exists(abslfile) or
expecthash != lfutil.hashfile(abslfile))):
Mads Kiilerich
largefiles: update in two steps, handle interrupted updates better...
r20063 if lfile not in repo[None]: # not switched to normal file
util.unlinkpath(abslfile, ignoremissing=True)
# use normallookup() to allocate entry in largefiles
# dirstate, because lack of it misleads
# lfilesrepo.status() into recognition that such cache
# missing files are REMOVED.
lfdirstate.normallookup(lfile)
update[lfile] = expecthash
Mads Kiilerich
largefiles: inline _updatelfile, prepare for further refactorings
r20062 else:
# Remove lfiles for which the standin is deleted, unless the
# lfile is added to the repository again. This happens when a
# largefile is converted back to a normal file: the standin
# disappears, but a new (normal) file appears as the lfile.
if (os.path.exists(abslfile) and
repo.dirstate.normalize(lfile) not in repo[None]):
util.unlinkpath(abslfile)
removed += 1
Mads Kiilerich
largefiles: update in two steps, handle interrupted updates better...
r20063
# largefile processing might be slow and be interrupted - be prepared
lfdirstate.write()
if lfiles:
if printmessage:
ui.status(_('getting changed largefiles\n'))
cachelfiles(ui, repo, None, lfiles)
for lfile in lfiles:
update1 = 0
expecthash = update.get(lfile)
if expecthash:
if not lfutil.copyfromcache(repo, expecthash, lfile):
# failed ... but already removed and set to normallookup
continue
# Synchronize largefile dirstate to the last modified
# time of the file
lfdirstate.normal(lfile)
update1 = 1
# copy the state of largefile standin from the repository's
# dirstate to its state in the lfdirstate.
abslfile = repo.wjoin(lfile)
absstandin = repo.wjoin(lfutil.standin(lfile))
if os.path.exists(absstandin):
mode = os.stat(absstandin).st_mode
if mode != os.stat(abslfile).st_mode:
os.chmod(abslfile, mode)
update1 = 1
updated += update1
FUJIWARA Katsunori
largefiles: factor out synchronization of lfdirstate for future use
r22095 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
various
hgext: add largefiles extension...
r15168
FUJIWARA Katsunori
largefiles: update lfdirstate for unchanged largefiles during linear merging...
r22197 if filelist is not None:
# If "local largefile" is chosen at file merging, it is
# not listed in "filelist" (= dirstate syncing is
# omitted), because the standin file is not changed before and
# after merging.
# But the status of such files may have to be changed by
# merging. For example, locally modified ("M") largefile
# has to become re-added("A"), if it is "normal" file in
# the target revision of linear-merging.
for lfile in lfdirstate:
if lfile not in filelist:
lfutil.synclfdirstate(repo, lfdirstate, lfile, True)
various
hgext: add largefiles extension...
r15168 lfdirstate.write()
Mads Kiilerich
largefiles: cleanup of printmessage handling - the printed flag was redundant
r20060 if printmessage and lfiles:
various
hgext: add largefiles extension...
r15168 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
removed))
finally:
wlock.release()
Gregory Szorc
largefiles: declare commands using decorator
r21242 @command('lfpull',
[('r', 'rev', [], _('pull largefiles for these revisions'))
] + commands.remoteopts,
_('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
Mads Kiilerich
largefiles: introduce lfpull command for pulling missing largefiles
r18976 def lfpull(ui, repo, source="default", **opts):
"""pull largefiles for the specified revisions from the specified source
Pull largefiles that are referenced from local changesets but missing
locally, pulling from a remote repository to the local cache.
If SOURCE is omitted, the 'default' path will be used.
See :hg:`help urls` for more information.
.. container:: verbose
Some examples:
- pull largefiles for all branch heads::
hg lfpull -r "head() and not closed()"
- pull largefiles on the default branch::
hg lfpull -r "branch(default)"
"""
repo.lfpullsource = source
revs = opts.get('rev', [])
if not revs:
raise util.Abort(_('no revisions specified'))
revs = scmutil.revrange(repo, revs)
numcached = 0
for rev in revs:
ui.note(_('pulling largefiles for revision %s\n') % rev)
(cached, missing) = cachelfiles(ui, repo, rev)
numcached += len(cached)
ui.status(_("%d largefiles cached\n") % numcached)