lfcommands.py
541 lines
| 20.0 KiB
| text/x-python
|
PythonLexer
various
|
r15168 | # Copyright 2009-2010 Gregory P. Ward | ||
# Copyright 2009-2010 Intelerad Medical Systems Incorporated | ||||
# Copyright 2010-2011 Fog Creek Software | ||||
# Copyright 2010-2011 Unity Technologies | ||||
# | ||||
# This software may be used and distributed according to the terms of the | ||||
# GNU General Public License version 2 or any later version. | ||||
Greg Ward
|
r15252 | '''High-level command function for lfconvert, plus the cmdtable.''' | ||
various
|
r15168 | |||
import os | ||||
import shutil | ||||
Na'Tosha Bard
|
r16691 | from mercurial import util, match as match_, hg, node, context, error, \ | ||
cmdutil, scmutil | ||||
various
|
r15168 | from mercurial.i18n import _ | ||
Mads Kiilerich
|
r16717 | from mercurial.lock import release | ||
various
|
r15168 | |||
import lfutil | ||||
import basestore | ||||
# -- Commands ---------------------------------------------------------- | ||||
def lfconvert(ui, src, dest, *pats, **opts): | ||||
Greg Ward
|
r15230 | '''convert a normal repository to a largefiles repository | ||
various
|
r15168 | |||
Greg Ward
|
r15230 | Convert repository SOURCE to a new repository DEST, identical to | ||
SOURCE except that certain files will be converted as largefiles: | ||||
specifically, any file that matches any PATTERN *or* whose size is | ||||
above the minimum size threshold is converted as a largefile. The | ||||
size used to determine whether or not to track a file as a | ||||
largefile is the size of the first version of the file. The | ||||
minimum size can be specified either with --size or in | ||||
configuration as ``largefiles.size``. | ||||
After running this command you will need to make sure that | ||||
largefiles is enabled anywhere you intend to push the new | ||||
repository. | ||||
Greg Ward
|
r15332 | Use --to-normal to convert largefiles back to normal files; after | ||
Greg Ward
|
r15230 | this, the DEST repository can be used without largefiles at all.''' | ||
various
|
r15168 | |||
Greg Ward
|
r15332 | if opts['to_normal']: | ||
various
|
r15168 | tolfile = False | ||
else: | ||||
tolfile = True | ||||
Greg Ward
|
r15227 | size = lfutil.getminsize(ui, True, opts.get('size'), default=None) | ||
Greg Ward
|
r15340 | |||
if not hg.islocal(src): | ||||
raise util.Abort(_('%s is not a local Mercurial repo') % src) | ||||
if not hg.islocal(dest): | ||||
raise util.Abort(_('%s is not a local Mercurial repo') % dest) | ||||
Greg Ward
|
r15339 | rsrc = hg.repository(ui, src) | ||
ui.status(_('initializing destination %s\n') % dest) | ||||
rdst = hg.repository(ui, dest, create=True) | ||||
various
|
r15168 | |||
Matt Mackall
|
r15171 | success = False | ||
Mads Kiilerich
|
r16717 | dstwlock = dstlock = None | ||
various
|
r15168 | try: | ||
# Lock destination to prevent modification while it is converted to. | ||||
# Don't need to lock src because we are just reading from its history | ||||
# which can't change. | ||||
Mads Kiilerich
|
r16717 | dstwlock = rdst.wlock() | ||
Na'Tosha Bard
|
r16247 | dstlock = rdst.lock() | ||
various
|
r15168 | |||
# Get a list of all changesets in the source. The easy way to do this | ||||
# is to simply walk the changelog, using changelog.nodesbewteen(). | ||||
# Take a look at mercurial/revlog.py:639 for more details. | ||||
# Use a generator instead of a list to decrease memory usage | ||||
ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None, | ||||
rsrc.heads())[0]) | ||||
revmap = {node.nullid: node.nullid} | ||||
if tolfile: | ||||
lfiles = set() | ||||
normalfiles = set() | ||||
if not pats: | ||||
FUJIWARA Katsunori
|
r15579 | pats = ui.configlist(lfutil.longname, 'patterns', default=[]) | ||
various
|
r15168 | if pats: | ||
matcher = match_.match(rsrc.root, '', list(pats)) | ||||
else: | ||||
matcher = None | ||||
lfiletohash = {} | ||||
for ctx in ctxs: | ||||
ui.progress(_('converting revisions'), ctx.rev(), | ||||
unit=_('revision'), total=rsrc['tip'].rev()) | ||||
_lfconvert_addchangeset(rsrc, rdst, ctx, revmap, | ||||
lfiles, normalfiles, matcher, size, lfiletohash) | ||||
ui.progress(_('converting revisions'), None) | ||||
if os.path.exists(rdst.wjoin(lfutil.shortname)): | ||||
shutil.rmtree(rdst.wjoin(lfutil.shortname)) | ||||
for f in lfiletohash.keys(): | ||||
if os.path.isfile(rdst.wjoin(f)): | ||||
os.unlink(rdst.wjoin(f)) | ||||
try: | ||||
os.removedirs(os.path.dirname(rdst.wjoin(f))) | ||||
Matt Mackall
|
r15171 | except OSError: | ||
various
|
r15168 | pass | ||
Eli Carter
|
r15303 | # If there were any files converted to largefiles, add largefiles | ||
# to the destination repository's requirements. | ||||
if lfiles: | ||||
rdst.requirements.add('largefiles') | ||||
rdst._writerequirements() | ||||
various
|
r15168 | else: | ||
for ctx in ctxs: | ||||
ui.progress(_('converting revisions'), ctx.rev(), | ||||
unit=_('revision'), total=rsrc['tip'].rev()) | ||||
_addchangeset(ui, rsrc, rdst, ctx, revmap) | ||||
ui.progress(_('converting revisions'), None) | ||||
Matt Mackall
|
r15171 | success = True | ||
various
|
r15168 | finally: | ||
Mads Kiilerich
|
r16717 | rdst.dirstate.clear() | ||
release(dstlock, dstwlock) | ||||
Matt Mackall
|
r15171 | if not success: | ||
# we failed, remove the new directory | ||||
shutil.rmtree(rdst.root) | ||||
various
|
r15168 | |||
def _addchangeset(ui, rsrc, rdst, ctx, revmap): | ||||
# Convert src parents to dst parents | ||||
Levi Bard
|
r15811 | parents = _convertparents(ctx, revmap) | ||
various
|
r15168 | |||
# Generate list of changed files | ||||
Levi Bard
|
r15811 | files = _getchangedfiles(ctx, parents) | ||
various
|
r15168 | |||
def getfilectx(repo, memctx, f): | ||||
if lfutil.standin(f) in files: | ||||
# if the file isn't in the manifest then it was removed | ||||
# or renamed, raise IOError to indicate this | ||||
try: | ||||
fctx = ctx.filectx(lfutil.standin(f)) | ||||
except error.LookupError: | ||||
Brodie Rao
|
r16687 | raise IOError | ||
various
|
r15168 | renamed = fctx.renamed() | ||
if renamed: | ||||
renamed = lfutil.splitstandin(renamed[0]) | ||||
hash = fctx.data().strip() | ||||
path = lfutil.findfile(rsrc, hash) | ||||
### TODO: What if the file is not cached? | ||||
data = '' | ||||
fd = None | ||||
try: | ||||
fd = open(path, 'rb') | ||||
data = fd.read() | ||||
finally: | ||||
Matt Mackall
|
r15172 | if fd: | ||
fd.close() | ||||
various
|
r15168 | return context.memfilectx(f, data, 'l' in fctx.flags(), | ||
'x' in fctx.flags(), renamed) | ||||
else: | ||||
Levi Bard
|
r15811 | return _getnormalcontext(repo.ui, ctx, f, revmap) | ||
various
|
r15168 | |||
dstfiles = [] | ||||
for file in files: | ||||
if lfutil.isstandin(file): | ||||
dstfiles.append(lfutil.splitstandin(file)) | ||||
else: | ||||
dstfiles.append(file) | ||||
# Commit | ||||
Levi Bard
|
r15811 | _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap) | ||
various
|
r15168 | |||
def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles, | ||||
matcher, size, lfiletohash): | ||||
# Convert src parents to dst parents | ||||
Levi Bard
|
r15811 | parents = _convertparents(ctx, revmap) | ||
various
|
r15168 | |||
# Generate list of changed files | ||||
Levi Bard
|
r15811 | files = _getchangedfiles(ctx, parents) | ||
various
|
r15168 | |||
dstfiles = [] | ||||
for f in files: | ||||
if f not in lfiles and f not in normalfiles: | ||||
islfile = _islfile(f, ctx, matcher, size) | ||||
# If this file was renamed or copied then copy | ||||
# the lfileness of its predecessor | ||||
if f in ctx.manifest(): | ||||
fctx = ctx.filectx(f) | ||||
renamed = fctx.renamed() | ||||
renamedlfile = renamed and renamed[0] in lfiles | ||||
islfile |= renamedlfile | ||||
if 'l' in fctx.flags(): | ||||
if renamedlfile: | ||||
raise util.Abort( | ||||
Martin Geisler
|
r15380 | _('renamed/copied largefile %s becomes symlink') | ||
Matt Mackall
|
r15170 | % f) | ||
various
|
r15168 | islfile = False | ||
if islfile: | ||||
lfiles.add(f) | ||||
else: | ||||
normalfiles.add(f) | ||||
if f in lfiles: | ||||
dstfiles.append(lfutil.standin(f)) | ||||
Greg Ward
|
r15254 | # largefile in manifest if it has not been removed/renamed | ||
various
|
r15168 | if f in ctx.manifest(): | ||
Levi Bard
|
r15808 | fctx = ctx.filectx(f) | ||
if 'l' in fctx.flags(): | ||||
renamed = fctx.renamed() | ||||
various
|
r15168 | if renamed and renamed[0] in lfiles: | ||
raise util.Abort(_('largefile %s becomes symlink') % f) | ||||
Greg Ward
|
r15254 | # largefile was modified, update standins | ||
various
|
r15168 | fullpath = rdst.wjoin(f) | ||
Hao Lian
|
r15371 | util.makedirs(os.path.dirname(fullpath)) | ||
various
|
r15168 | m = util.sha1('') | ||
m.update(ctx[f].data()) | ||||
hash = m.hexdigest() | ||||
if f not in lfiletohash or lfiletohash[f] != hash: | ||||
try: | ||||
fd = open(fullpath, 'wb') | ||||
fd.write(ctx[f].data()) | ||||
finally: | ||||
if fd: | ||||
fd.close() | ||||
executable = 'x' in ctx[f].flags() | ||||
os.chmod(fullpath, lfutil.getmode(executable)) | ||||
lfutil.writestandin(rdst, lfutil.standin(f), hash, | ||||
executable) | ||||
lfiletohash[f] = hash | ||||
else: | ||||
# normal file | ||||
dstfiles.append(f) | ||||
def getfilectx(repo, memctx, f): | ||||
if lfutil.isstandin(f): | ||||
# if the file isn't in the manifest then it was removed | ||||
# or renamed, raise IOError to indicate this | ||||
srcfname = lfutil.splitstandin(f) | ||||
try: | ||||
fctx = ctx.filectx(srcfname) | ||||
except error.LookupError: | ||||
Brodie Rao
|
r16687 | raise IOError | ||
various
|
r15168 | renamed = fctx.renamed() | ||
if renamed: | ||||
Greg Ward
|
r15254 | # standin is always a largefile because largefile-ness | ||
various
|
r15168 | # doesn't change after rename or copy | ||
renamed = lfutil.standin(renamed[0]) | ||||
Eli Carter
|
r15313 | return context.memfilectx(f, lfiletohash[srcfname] + '\n', 'l' in | ||
various
|
r15168 | fctx.flags(), 'x' in fctx.flags(), renamed) | ||
else: | ||||
Levi Bard
|
r15811 | return _getnormalcontext(repo.ui, ctx, f, revmap) | ||
various
|
r15168 | |||
# Commit | ||||
Levi Bard
|
r15811 | _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap) | ||
def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap): | ||||
various
|
r15168 | mctx = context.memctx(rdst, parents, ctx.description(), dstfiles, | ||
getfilectx, ctx.user(), ctx.date(), ctx.extra()) | ||||
ret = rdst.commitctx(mctx) | ||||
Patrick Mezard
|
r16551 | rdst.setparents(ret) | ||
various
|
r15168 | revmap[ctx.node()] = rdst.changelog.tip() | ||
Levi Bard
|
r15811 | # Generate list of changed files | ||
def _getchangedfiles(ctx, parents): | ||||
files = set(ctx.files()) | ||||
if node.nullid not in parents: | ||||
mc = ctx.manifest() | ||||
mp1 = ctx.parents()[0].manifest() | ||||
mp2 = ctx.parents()[1].manifest() | ||||
files |= (set(mp1) | set(mp2)) - set(mc) | ||||
for f in mc: | ||||
if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None): | ||||
files.add(f) | ||||
return files | ||||
# Convert src parents to dst parents | ||||
def _convertparents(ctx, revmap): | ||||
parents = [] | ||||
for p in ctx.parents(): | ||||
parents.append(revmap[p.node()]) | ||||
while len(parents) < 2: | ||||
parents.append(node.nullid) | ||||
return parents | ||||
# Get memfilectx for a normal file | ||||
def _getnormalcontext(ui, ctx, f, revmap): | ||||
try: | ||||
fctx = ctx.filectx(f) | ||||
except error.LookupError: | ||||
Brodie Rao
|
r16687 | raise IOError | ||
Levi Bard
|
r15811 | renamed = fctx.renamed() | ||
if renamed: | ||||
renamed = renamed[0] | ||||
data = fctx.data() | ||||
if f == '.hgtags': | ||||
data = _converttags (ui, revmap, data) | ||||
return context.memfilectx(f, data, 'l' in fctx.flags(), | ||||
'x' in fctx.flags(), renamed) | ||||
# Remap tag data using a revision map | ||||
def _converttags(ui, revmap, data): | ||||
newdata = [] | ||||
for line in data.splitlines(): | ||||
try: | ||||
id, name = line.split(' ', 1) | ||||
except ValueError: | ||||
ui.warn(_('skipping incorrectly formatted tag %s\n' | ||||
% line)) | ||||
continue | ||||
try: | ||||
newid = node.bin(id) | ||||
except TypeError: | ||||
ui.warn(_('skipping incorrectly formatted id %s\n' | ||||
% id)) | ||||
continue | ||||
try: | ||||
newdata.append('%s %s\n' % (node.hex(revmap[newid]), | ||||
name)) | ||||
except KeyError: | ||||
Matt Mackall
|
r16231 | ui.warn(_('no mapping for id %s\n') % id) | ||
Levi Bard
|
r15811 | continue | ||
return ''.join(newdata) | ||||
various
|
r15168 | def _islfile(file, ctx, matcher, size): | ||
Greg Ward
|
r15252 | '''Return true if file should be considered a largefile, i.e. | ||
matcher matches it or it is larger than size.''' | ||||
# never store special .hg* files as largefiles | ||||
various
|
r15168 | if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs': | ||
return False | ||||
if matcher and matcher(file): | ||||
return True | ||||
try: | ||||
return ctx.filectx(file).size() >= size * 1024 * 1024 | ||||
except error.LookupError: | ||||
return False | ||||
def uploadlfiles(ui, rsrc, rdst, files): | ||||
'''upload largefiles to the central store''' | ||||
Benjamin Pollack
|
r15317 | if not files: | ||
various
|
r15168 | return | ||
store = basestore._openstore(rsrc, rdst, put=True) | ||||
at = 0 | ||||
files = filter(lambda h: not store.exists(h), files) | ||||
for hash in files: | ||||
Matt Mackall
|
r15170 | ui.progress(_('uploading largefiles'), at, unit='largefile', | ||
total=len(files)) | ||||
various
|
r15168 | source = lfutil.findfile(rsrc, hash) | ||
if not source: | ||||
Greg Ward
|
r15253 | raise util.Abort(_('largefile %s missing from store' | ||
' (needs to be uploaded)') % hash) | ||||
various
|
r15168 | # XXX check for errors here | ||
store.put(source, hash) | ||||
at += 1 | ||||
Matt Mackall
|
r15173 | ui.progress(_('uploading largefiles'), None) | ||
various
|
r15168 | |||
def verifylfiles(ui, repo, all=False, contents=False): | ||||
'''Verify that every big file revision in the current changeset | ||||
exists in the central store. With --contents, also verify that | ||||
the contents of each big file revision are correct (SHA-1 hash | ||||
matches the revision ID). With --all, check every changeset in | ||||
this repository.''' | ||||
if all: | ||||
# Pass a list to the function rather than an iterator because we know a | ||||
# list will work. | ||||
revs = range(len(repo)) | ||||
else: | ||||
revs = ['.'] | ||||
store = basestore._openstore(repo) | ||||
return store.verify(revs, contents=contents) | ||||
Na'Tosha Bard
|
r16700 | def cachelfiles(ui, repo, node, filelist=None): | ||
various
|
r15168 | '''cachelfiles ensures that all largefiles needed by the specified revision | ||
are present in the repository's largefile cache. | ||||
returns a tuple (cached, missing). cached is the list of files downloaded | ||||
by this operation; missing is the list of files that were needed but could | ||||
not be found.''' | ||||
lfiles = lfutil.listlfiles(repo, node) | ||||
Na'Tosha Bard
|
r16700 | if filelist: | ||
lfiles = set(lfiles) & set(filelist) | ||||
various
|
r15168 | toget = [] | ||
for lfile in lfiles: | ||||
Na'Tosha Bard
|
r15860 | # If we are mid-merge, then we have to trust the standin that is in the | ||
# working copy to have the correct hashvalue. This is because the | ||||
# original hg.merge() already updated the standin as part of the normal | ||||
# merge process -- we just have to udpate the largefile to match. | ||||
Na'Tosha Bard
|
r15905 | if (getattr(repo, "_ismerging", False) and | ||
os.path.exists(repo.wjoin(lfutil.standin(lfile)))): | ||||
Na'Tosha Bard
|
r15860 | expectedhash = lfutil.readstandin(repo, lfile) | ||
else: | ||||
expectedhash = repo[node][lfutil.standin(lfile)].data().strip() | ||||
various
|
r15168 | # if it exists and its hash matches, it might have been locally | ||
# modified before updating and the user chose 'local'. in this case, | ||||
# it will not be in any store, so don't look for it. | ||||
Greg Ward
|
r15255 | if ((not os.path.exists(repo.wjoin(lfile)) or | ||
expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and | ||||
not lfutil.findfile(repo, expectedhash)): | ||||
various
|
r15168 | toget.append((lfile, expectedhash)) | ||
if toget: | ||||
store = basestore._openstore(repo) | ||||
ret = store.get(toget) | ||||
return ret | ||||
return ([], []) | ||||
Na'Tosha Bard
|
r16691 | def downloadlfiles(ui, repo, rev=None): | ||
matchfn = scmutil.match(repo[None], | ||||
[repo.wjoin(lfutil.shortname)], {}) | ||||
def prepare(ctx, fns): | ||||
pass | ||||
totalsuccess = 0 | ||||
totalmissing = 0 | ||||
for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev}, | ||||
prepare): | ||||
success, missing = cachelfiles(ui, repo, ctx.node()) | ||||
totalsuccess += len(success) | ||||
totalmissing += len(missing) | ||||
ui.status(_("%d additional largefiles cached\n") % totalsuccess) | ||||
if totalmissing > 0: | ||||
ui.status(_("%d largefiles failed to download\n") % totalmissing) | ||||
return totalsuccess, totalmissing | ||||
various
|
r15168 | def updatelfiles(ui, repo, filelist=None, printmessage=True): | ||
wlock = repo.wlock() | ||||
try: | ||||
lfdirstate = lfutil.openlfdirstate(ui, repo) | ||||
lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate) | ||||
if filelist is not None: | ||||
lfiles = [f for f in lfiles if f in filelist] | ||||
printed = False | ||||
if printmessage and lfiles: | ||||
ui.status(_('getting changed largefiles\n')) | ||||
printed = True | ||||
Na'Tosha Bard
|
r16700 | cachelfiles(ui, repo, '.', lfiles) | ||
various
|
r15168 | |||
updated, removed = 0, 0 | ||||
for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles): | ||||
# increment the appropriate counter according to _updatelfile's | ||||
# return value | ||||
updated += i > 0 and i or 0 | ||||
removed -= i < 0 and i or 0 | ||||
if printmessage and (removed or updated) and not printed: | ||||
ui.status(_('getting changed largefiles\n')) | ||||
printed = True | ||||
lfdirstate.write() | ||||
if printed and printmessage: | ||||
ui.status(_('%d largefiles updated, %d removed\n') % (updated, | ||||
removed)) | ||||
finally: | ||||
wlock.release() | ||||
def _updatelfile(repo, lfdirstate, lfile): | ||||
'''updates a single largefile and copies the state of its standin from | ||||
the repository's dirstate to its state in the lfdirstate. | ||||
returns 1 if the file was modified, -1 if the file was removed, 0 if the | ||||
file was unchanged, and None if the needed largefile was missing from the | ||||
cache.''' | ||||
ret = 0 | ||||
abslfile = repo.wjoin(lfile) | ||||
absstandin = repo.wjoin(lfutil.standin(lfile)) | ||||
if os.path.exists(absstandin): | ||||
if os.path.exists(absstandin+'.orig'): | ||||
shutil.copyfile(abslfile, abslfile+'.orig') | ||||
expecthash = lfutil.readstandin(repo, lfile) | ||||
Greg Ward
|
r15255 | if (expecthash != '' and | ||
(not os.path.exists(abslfile) or | ||||
expecthash != lfutil.hashfile(abslfile))): | ||||
various
|
r15168 | if not lfutil.copyfromcache(repo, expecthash, lfile): | ||
FUJIWARA Katsunori
|
r15472 | # use normallookup() to allocate entry in largefiles dirstate, | ||
Na'Tosha Bard
|
r16247 | # because lack of it misleads lfilesrepo.status() into | ||
FUJIWARA Katsunori
|
r15472 | # recognition that such cache missing files are REMOVED. | ||
lfdirstate.normallookup(lfile) | ||||
return None # don't try to set the mode | ||||
various
|
r15168 | ret = 1 | ||
mode = os.stat(absstandin).st_mode | ||||
if mode != os.stat(abslfile).st_mode: | ||||
os.chmod(abslfile, mode) | ||||
ret = 1 | ||||
else: | ||||
Martin Geisler
|
r15663 | # Remove lfiles for which the standin is deleted, unless the | ||
# lfile is added to the repository again. This happens when a | ||||
# largefile is converted back to a normal file: the standin | ||||
# disappears, but a new (normal) file appears as the lfile. | ||||
if os.path.exists(abslfile) and lfile not in repo[None]: | ||||
Patrick Mezard
|
r15900 | util.unlinkpath(abslfile) | ||
various
|
r15168 | ret = -1 | ||
state = repo.dirstate[lfutil.standin(lfile)] | ||||
if state == 'n': | ||||
Na'Tosha Bard
|
r15793 | # When rebasing, we need to synchronize the standin and the largefile, | ||
# because otherwise the largefile will get reverted. But for commit's | ||||
# sake, we have to mark the file as unclean. | ||||
if getattr(repo, "_isrebasing", False): | ||||
lfdirstate.normallookup(lfile) | ||||
else: | ||||
lfdirstate.normal(lfile) | ||||
various
|
r15168 | elif state == 'r': | ||
lfdirstate.remove(lfile) | ||||
elif state == 'a': | ||||
lfdirstate.add(lfile) | ||||
elif state == '?': | ||||
Na'Tosha Bard
|
r15224 | lfdirstate.drop(lfile) | ||
various
|
r15168 | return ret | ||
Na'Tosha Bard
|
r16439 | def catlfile(repo, lfile, rev, filename): | ||
hash = lfutil.readstandin(repo, lfile, rev) | ||||
if not lfutil.inusercache(repo.ui, hash): | ||||
store = basestore._openstore(repo) | ||||
success, missing = store.get([(lfile, hash)]) | ||||
if len(success) != 1: | ||||
raise util.Abort( | ||||
_('largefile %s is not in cache and could not be downloaded') | ||||
% lfile) | ||||
path = lfutil.usercachepath(repo.ui, hash) | ||||
fpout = cmdutil.makefileobj(repo, filename) | ||||
fpin = open(path, "rb") | ||||
fpout.write(fpin.read()) | ||||
fpout.close() | ||||
fpin.close() | ||||
return 0 | ||||
various
|
r15168 | # -- hg commands declarations ------------------------------------------------ | ||
cmdtable = { | ||||
'lfconvert': (lfconvert, | ||||
Greg Ward
|
r15230 | [('s', 'size', '', | ||
_('minimum size (MB) for files to be converted ' | ||||
'as largefiles'), | ||||
'SIZE'), | ||||
Greg Ward
|
r15332 | ('', 'to-normal', False, | ||
Greg Ward
|
r15230 | _('convert from a largefiles repo to a normal repo')), | ||
], | ||||
various
|
r15168 | _('hg lfconvert SOURCE DEST [FILE ...]')), | ||
} | ||||