##// END OF EJS Templates
largefile: use `update_file` instead of `normallookup` in `updatelfiles`...
marmoute -
r48522:47dce5a9 default
parent child Browse files
Show More
@@ -1,670 +1,675 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import errno
12 import errno
13 import os
13 import os
14 import shutil
14 import shutil
15
15
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 from mercurial.node import (
17 from mercurial.node import (
18 bin,
18 bin,
19 hex,
19 hex,
20 )
20 )
21
21
22 from mercurial import (
22 from mercurial import (
23 cmdutil,
23 cmdutil,
24 context,
24 context,
25 error,
25 error,
26 exthelper,
26 exthelper,
27 hg,
27 hg,
28 lock,
28 lock,
29 match as matchmod,
29 match as matchmod,
30 pycompat,
30 pycompat,
31 scmutil,
31 scmutil,
32 util,
32 util,
33 )
33 )
34 from mercurial.utils import hashutil
34 from mercurial.utils import hashutil
35
35
36 from ..convert import (
36 from ..convert import (
37 convcmd,
37 convcmd,
38 filemap,
38 filemap,
39 )
39 )
40
40
41 from . import lfutil, storefactory
41 from . import lfutil, storefactory
42
42
43 release = lock.release
43 release = lock.release
44
44
45 # -- Commands ----------------------------------------------------------
45 # -- Commands ----------------------------------------------------------
46
46
47 eh = exthelper.exthelper()
47 eh = exthelper.exthelper()
48
48
49
49
50 @eh.command(
50 @eh.command(
51 b'lfconvert',
51 b'lfconvert',
52 [
52 [
53 (
53 (
54 b's',
54 b's',
55 b'size',
55 b'size',
56 b'',
56 b'',
57 _(b'minimum size (MB) for files to be converted as largefiles'),
57 _(b'minimum size (MB) for files to be converted as largefiles'),
58 b'SIZE',
58 b'SIZE',
59 ),
59 ),
60 (
60 (
61 b'',
61 b'',
62 b'to-normal',
62 b'to-normal',
63 False,
63 False,
64 _(b'convert from a largefiles repo to a normal repo'),
64 _(b'convert from a largefiles repo to a normal repo'),
65 ),
65 ),
66 ],
66 ],
67 _(b'hg lfconvert SOURCE DEST [FILE ...]'),
67 _(b'hg lfconvert SOURCE DEST [FILE ...]'),
68 norepo=True,
68 norepo=True,
69 inferrepo=True,
69 inferrepo=True,
70 )
70 )
71 def lfconvert(ui, src, dest, *pats, **opts):
71 def lfconvert(ui, src, dest, *pats, **opts):
72 """convert a normal repository to a largefiles repository
72 """convert a normal repository to a largefiles repository
73
73
74 Convert repository SOURCE to a new repository DEST, identical to
74 Convert repository SOURCE to a new repository DEST, identical to
75 SOURCE except that certain files will be converted as largefiles:
75 SOURCE except that certain files will be converted as largefiles:
76 specifically, any file that matches any PATTERN *or* whose size is
76 specifically, any file that matches any PATTERN *or* whose size is
77 above the minimum size threshold is converted as a largefile. The
77 above the minimum size threshold is converted as a largefile. The
78 size used to determine whether or not to track a file as a
78 size used to determine whether or not to track a file as a
79 largefile is the size of the first version of the file. The
79 largefile is the size of the first version of the file. The
80 minimum size can be specified either with --size or in
80 minimum size can be specified either with --size or in
81 configuration as ``largefiles.size``.
81 configuration as ``largefiles.size``.
82
82
83 After running this command you will need to make sure that
83 After running this command you will need to make sure that
84 largefiles is enabled anywhere you intend to push the new
84 largefiles is enabled anywhere you intend to push the new
85 repository.
85 repository.
86
86
87 Use --to-normal to convert largefiles back to normal files; after
87 Use --to-normal to convert largefiles back to normal files; after
88 this, the DEST repository can be used without largefiles at all."""
88 this, the DEST repository can be used without largefiles at all."""
89
89
90 opts = pycompat.byteskwargs(opts)
90 opts = pycompat.byteskwargs(opts)
91 if opts[b'to_normal']:
91 if opts[b'to_normal']:
92 tolfile = False
92 tolfile = False
93 else:
93 else:
94 tolfile = True
94 tolfile = True
95 size = lfutil.getminsize(ui, True, opts.get(b'size'), default=None)
95 size = lfutil.getminsize(ui, True, opts.get(b'size'), default=None)
96
96
97 if not hg.islocal(src):
97 if not hg.islocal(src):
98 raise error.Abort(_(b'%s is not a local Mercurial repo') % src)
98 raise error.Abort(_(b'%s is not a local Mercurial repo') % src)
99 if not hg.islocal(dest):
99 if not hg.islocal(dest):
100 raise error.Abort(_(b'%s is not a local Mercurial repo') % dest)
100 raise error.Abort(_(b'%s is not a local Mercurial repo') % dest)
101
101
102 rsrc = hg.repository(ui, src)
102 rsrc = hg.repository(ui, src)
103 ui.status(_(b'initializing destination %s\n') % dest)
103 ui.status(_(b'initializing destination %s\n') % dest)
104 rdst = hg.repository(ui, dest, create=True)
104 rdst = hg.repository(ui, dest, create=True)
105
105
106 success = False
106 success = False
107 dstwlock = dstlock = None
107 dstwlock = dstlock = None
108 try:
108 try:
109 # Get a list of all changesets in the source. The easy way to do this
109 # Get a list of all changesets in the source. The easy way to do this
110 # is to simply walk the changelog, using changelog.nodesbetween().
110 # is to simply walk the changelog, using changelog.nodesbetween().
111 # Take a look at mercurial/revlog.py:639 for more details.
111 # Take a look at mercurial/revlog.py:639 for more details.
112 # Use a generator instead of a list to decrease memory usage
112 # Use a generator instead of a list to decrease memory usage
113 ctxs = (
113 ctxs = (
114 rsrc[ctx]
114 rsrc[ctx]
115 for ctx in rsrc.changelog.nodesbetween(None, rsrc.heads())[0]
115 for ctx in rsrc.changelog.nodesbetween(None, rsrc.heads())[0]
116 )
116 )
117 revmap = {rsrc.nullid: rdst.nullid}
117 revmap = {rsrc.nullid: rdst.nullid}
118 if tolfile:
118 if tolfile:
119 # Lock destination to prevent modification while it is converted to.
119 # Lock destination to prevent modification while it is converted to.
120 # Don't need to lock src because we are just reading from its
120 # Don't need to lock src because we are just reading from its
121 # history which can't change.
121 # history which can't change.
122 dstwlock = rdst.wlock()
122 dstwlock = rdst.wlock()
123 dstlock = rdst.lock()
123 dstlock = rdst.lock()
124
124
125 lfiles = set()
125 lfiles = set()
126 normalfiles = set()
126 normalfiles = set()
127 if not pats:
127 if not pats:
128 pats = ui.configlist(lfutil.longname, b'patterns')
128 pats = ui.configlist(lfutil.longname, b'patterns')
129 if pats:
129 if pats:
130 matcher = matchmod.match(rsrc.root, b'', list(pats))
130 matcher = matchmod.match(rsrc.root, b'', list(pats))
131 else:
131 else:
132 matcher = None
132 matcher = None
133
133
134 lfiletohash = {}
134 lfiletohash = {}
135 with ui.makeprogress(
135 with ui.makeprogress(
136 _(b'converting revisions'),
136 _(b'converting revisions'),
137 unit=_(b'revisions'),
137 unit=_(b'revisions'),
138 total=rsrc[b'tip'].rev(),
138 total=rsrc[b'tip'].rev(),
139 ) as progress:
139 ) as progress:
140 for ctx in ctxs:
140 for ctx in ctxs:
141 progress.update(ctx.rev())
141 progress.update(ctx.rev())
142 _lfconvert_addchangeset(
142 _lfconvert_addchangeset(
143 rsrc,
143 rsrc,
144 rdst,
144 rdst,
145 ctx,
145 ctx,
146 revmap,
146 revmap,
147 lfiles,
147 lfiles,
148 normalfiles,
148 normalfiles,
149 matcher,
149 matcher,
150 size,
150 size,
151 lfiletohash,
151 lfiletohash,
152 )
152 )
153
153
154 if rdst.wvfs.exists(lfutil.shortname):
154 if rdst.wvfs.exists(lfutil.shortname):
155 rdst.wvfs.rmtree(lfutil.shortname)
155 rdst.wvfs.rmtree(lfutil.shortname)
156
156
157 for f in lfiletohash.keys():
157 for f in lfiletohash.keys():
158 if rdst.wvfs.isfile(f):
158 if rdst.wvfs.isfile(f):
159 rdst.wvfs.unlink(f)
159 rdst.wvfs.unlink(f)
160 try:
160 try:
161 rdst.wvfs.removedirs(rdst.wvfs.dirname(f))
161 rdst.wvfs.removedirs(rdst.wvfs.dirname(f))
162 except OSError:
162 except OSError:
163 pass
163 pass
164
164
165 # If there were any files converted to largefiles, add largefiles
165 # If there were any files converted to largefiles, add largefiles
166 # to the destination repository's requirements.
166 # to the destination repository's requirements.
167 if lfiles:
167 if lfiles:
168 rdst.requirements.add(b'largefiles')
168 rdst.requirements.add(b'largefiles')
169 scmutil.writereporequirements(rdst)
169 scmutil.writereporequirements(rdst)
170 else:
170 else:
171
171
172 class lfsource(filemap.filemap_source):
172 class lfsource(filemap.filemap_source):
173 def __init__(self, ui, source):
173 def __init__(self, ui, source):
174 super(lfsource, self).__init__(ui, source, None)
174 super(lfsource, self).__init__(ui, source, None)
175 self.filemapper.rename[lfutil.shortname] = b'.'
175 self.filemapper.rename[lfutil.shortname] = b'.'
176
176
177 def getfile(self, name, rev):
177 def getfile(self, name, rev):
178 realname, realrev = rev
178 realname, realrev = rev
179 f = super(lfsource, self).getfile(name, rev)
179 f = super(lfsource, self).getfile(name, rev)
180
180
181 if (
181 if (
182 not realname.startswith(lfutil.shortnameslash)
182 not realname.startswith(lfutil.shortnameslash)
183 or f[0] is None
183 or f[0] is None
184 ):
184 ):
185 return f
185 return f
186
186
187 # Substitute in the largefile data for the hash
187 # Substitute in the largefile data for the hash
188 hash = f[0].strip()
188 hash = f[0].strip()
189 path = lfutil.findfile(rsrc, hash)
189 path = lfutil.findfile(rsrc, hash)
190
190
191 if path is None:
191 if path is None:
192 raise error.Abort(
192 raise error.Abort(
193 _(b"missing largefile for '%s' in %s")
193 _(b"missing largefile for '%s' in %s")
194 % (realname, realrev)
194 % (realname, realrev)
195 )
195 )
196 return util.readfile(path), f[1]
196 return util.readfile(path), f[1]
197
197
198 class converter(convcmd.converter):
198 class converter(convcmd.converter):
199 def __init__(self, ui, source, dest, revmapfile, opts):
199 def __init__(self, ui, source, dest, revmapfile, opts):
200 src = lfsource(ui, source)
200 src = lfsource(ui, source)
201
201
202 super(converter, self).__init__(
202 super(converter, self).__init__(
203 ui, src, dest, revmapfile, opts
203 ui, src, dest, revmapfile, opts
204 )
204 )
205
205
206 found, missing = downloadlfiles(ui, rsrc)
206 found, missing = downloadlfiles(ui, rsrc)
207 if missing != 0:
207 if missing != 0:
208 raise error.Abort(_(b"all largefiles must be present locally"))
208 raise error.Abort(_(b"all largefiles must be present locally"))
209
209
210 orig = convcmd.converter
210 orig = convcmd.converter
211 convcmd.converter = converter
211 convcmd.converter = converter
212
212
213 try:
213 try:
214 convcmd.convert(
214 convcmd.convert(
215 ui, src, dest, source_type=b'hg', dest_type=b'hg'
215 ui, src, dest, source_type=b'hg', dest_type=b'hg'
216 )
216 )
217 finally:
217 finally:
218 convcmd.converter = orig
218 convcmd.converter = orig
219 success = True
219 success = True
220 finally:
220 finally:
221 if tolfile:
221 if tolfile:
222 rdst.dirstate.clear()
222 rdst.dirstate.clear()
223 release(dstlock, dstwlock)
223 release(dstlock, dstwlock)
224 if not success:
224 if not success:
225 # we failed, remove the new directory
225 # we failed, remove the new directory
226 shutil.rmtree(rdst.root)
226 shutil.rmtree(rdst.root)
227
227
228
228
229 def _lfconvert_addchangeset(
229 def _lfconvert_addchangeset(
230 rsrc, rdst, ctx, revmap, lfiles, normalfiles, matcher, size, lfiletohash
230 rsrc, rdst, ctx, revmap, lfiles, normalfiles, matcher, size, lfiletohash
231 ):
231 ):
232 # Convert src parents to dst parents
232 # Convert src parents to dst parents
233 parents = _convertparents(ctx, revmap)
233 parents = _convertparents(ctx, revmap)
234
234
235 # Generate list of changed files
235 # Generate list of changed files
236 files = _getchangedfiles(ctx, parents)
236 files = _getchangedfiles(ctx, parents)
237
237
238 dstfiles = []
238 dstfiles = []
239 for f in files:
239 for f in files:
240 if f not in lfiles and f not in normalfiles:
240 if f not in lfiles and f not in normalfiles:
241 islfile = _islfile(f, ctx, matcher, size)
241 islfile = _islfile(f, ctx, matcher, size)
242 # If this file was renamed or copied then copy
242 # If this file was renamed or copied then copy
243 # the largefile-ness of its predecessor
243 # the largefile-ness of its predecessor
244 if f in ctx.manifest():
244 if f in ctx.manifest():
245 fctx = ctx.filectx(f)
245 fctx = ctx.filectx(f)
246 renamed = fctx.copysource()
246 renamed = fctx.copysource()
247 if renamed is None:
247 if renamed is None:
248 # the code below assumes renamed to be a boolean or a list
248 # the code below assumes renamed to be a boolean or a list
249 # and won't quite work with the value None
249 # and won't quite work with the value None
250 renamed = False
250 renamed = False
251 renamedlfile = renamed and renamed in lfiles
251 renamedlfile = renamed and renamed in lfiles
252 islfile |= renamedlfile
252 islfile |= renamedlfile
253 if b'l' in fctx.flags():
253 if b'l' in fctx.flags():
254 if renamedlfile:
254 if renamedlfile:
255 raise error.Abort(
255 raise error.Abort(
256 _(b'renamed/copied largefile %s becomes symlink')
256 _(b'renamed/copied largefile %s becomes symlink')
257 % f
257 % f
258 )
258 )
259 islfile = False
259 islfile = False
260 if islfile:
260 if islfile:
261 lfiles.add(f)
261 lfiles.add(f)
262 else:
262 else:
263 normalfiles.add(f)
263 normalfiles.add(f)
264
264
265 if f in lfiles:
265 if f in lfiles:
266 fstandin = lfutil.standin(f)
266 fstandin = lfutil.standin(f)
267 dstfiles.append(fstandin)
267 dstfiles.append(fstandin)
268 # largefile in manifest if it has not been removed/renamed
268 # largefile in manifest if it has not been removed/renamed
269 if f in ctx.manifest():
269 if f in ctx.manifest():
270 fctx = ctx.filectx(f)
270 fctx = ctx.filectx(f)
271 if b'l' in fctx.flags():
271 if b'l' in fctx.flags():
272 renamed = fctx.copysource()
272 renamed = fctx.copysource()
273 if renamed and renamed in lfiles:
273 if renamed and renamed in lfiles:
274 raise error.Abort(
274 raise error.Abort(
275 _(b'largefile %s becomes symlink') % f
275 _(b'largefile %s becomes symlink') % f
276 )
276 )
277
277
278 # largefile was modified, update standins
278 # largefile was modified, update standins
279 m = hashutil.sha1(b'')
279 m = hashutil.sha1(b'')
280 m.update(ctx[f].data())
280 m.update(ctx[f].data())
281 hash = hex(m.digest())
281 hash = hex(m.digest())
282 if f not in lfiletohash or lfiletohash[f] != hash:
282 if f not in lfiletohash or lfiletohash[f] != hash:
283 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
283 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
284 executable = b'x' in ctx[f].flags()
284 executable = b'x' in ctx[f].flags()
285 lfutil.writestandin(rdst, fstandin, hash, executable)
285 lfutil.writestandin(rdst, fstandin, hash, executable)
286 lfiletohash[f] = hash
286 lfiletohash[f] = hash
287 else:
287 else:
288 # normal file
288 # normal file
289 dstfiles.append(f)
289 dstfiles.append(f)
290
290
291 def getfilectx(repo, memctx, f):
291 def getfilectx(repo, memctx, f):
292 srcfname = lfutil.splitstandin(f)
292 srcfname = lfutil.splitstandin(f)
293 if srcfname is not None:
293 if srcfname is not None:
294 # if the file isn't in the manifest then it was removed
294 # if the file isn't in the manifest then it was removed
295 # or renamed, return None to indicate this
295 # or renamed, return None to indicate this
296 try:
296 try:
297 fctx = ctx.filectx(srcfname)
297 fctx = ctx.filectx(srcfname)
298 except error.LookupError:
298 except error.LookupError:
299 return None
299 return None
300 renamed = fctx.copysource()
300 renamed = fctx.copysource()
301 if renamed:
301 if renamed:
302 # standin is always a largefile because largefile-ness
302 # standin is always a largefile because largefile-ness
303 # doesn't change after rename or copy
303 # doesn't change after rename or copy
304 renamed = lfutil.standin(renamed)
304 renamed = lfutil.standin(renamed)
305
305
306 return context.memfilectx(
306 return context.memfilectx(
307 repo,
307 repo,
308 memctx,
308 memctx,
309 f,
309 f,
310 lfiletohash[srcfname] + b'\n',
310 lfiletohash[srcfname] + b'\n',
311 b'l' in fctx.flags(),
311 b'l' in fctx.flags(),
312 b'x' in fctx.flags(),
312 b'x' in fctx.flags(),
313 renamed,
313 renamed,
314 )
314 )
315 else:
315 else:
316 return _getnormalcontext(repo, ctx, f, revmap)
316 return _getnormalcontext(repo, ctx, f, revmap)
317
317
318 # Commit
318 # Commit
319 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
319 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
320
320
321
321
322 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
322 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
323 mctx = context.memctx(
323 mctx = context.memctx(
324 rdst,
324 rdst,
325 parents,
325 parents,
326 ctx.description(),
326 ctx.description(),
327 dstfiles,
327 dstfiles,
328 getfilectx,
328 getfilectx,
329 ctx.user(),
329 ctx.user(),
330 ctx.date(),
330 ctx.date(),
331 ctx.extra(),
331 ctx.extra(),
332 )
332 )
333 ret = rdst.commitctx(mctx)
333 ret = rdst.commitctx(mctx)
334 lfutil.copyalltostore(rdst, ret)
334 lfutil.copyalltostore(rdst, ret)
335 rdst.setparents(ret)
335 rdst.setparents(ret)
336 revmap[ctx.node()] = rdst.changelog.tip()
336 revmap[ctx.node()] = rdst.changelog.tip()
337
337
338
338
339 # Generate list of changed files
339 # Generate list of changed files
340 def _getchangedfiles(ctx, parents):
340 def _getchangedfiles(ctx, parents):
341 files = set(ctx.files())
341 files = set(ctx.files())
342 if ctx.repo().nullid not in parents:
342 if ctx.repo().nullid not in parents:
343 mc = ctx.manifest()
343 mc = ctx.manifest()
344 for pctx in ctx.parents():
344 for pctx in ctx.parents():
345 for fn in pctx.manifest().diff(mc):
345 for fn in pctx.manifest().diff(mc):
346 files.add(fn)
346 files.add(fn)
347 return files
347 return files
348
348
349
349
350 # Convert src parents to dst parents
350 # Convert src parents to dst parents
351 def _convertparents(ctx, revmap):
351 def _convertparents(ctx, revmap):
352 parents = []
352 parents = []
353 for p in ctx.parents():
353 for p in ctx.parents():
354 parents.append(revmap[p.node()])
354 parents.append(revmap[p.node()])
355 while len(parents) < 2:
355 while len(parents) < 2:
356 parents.append(ctx.repo().nullid)
356 parents.append(ctx.repo().nullid)
357 return parents
357 return parents
358
358
359
359
360 # Get memfilectx for a normal file
360 # Get memfilectx for a normal file
361 def _getnormalcontext(repo, ctx, f, revmap):
361 def _getnormalcontext(repo, ctx, f, revmap):
362 try:
362 try:
363 fctx = ctx.filectx(f)
363 fctx = ctx.filectx(f)
364 except error.LookupError:
364 except error.LookupError:
365 return None
365 return None
366 renamed = fctx.copysource()
366 renamed = fctx.copysource()
367
367
368 data = fctx.data()
368 data = fctx.data()
369 if f == b'.hgtags':
369 if f == b'.hgtags':
370 data = _converttags(repo.ui, revmap, data)
370 data = _converttags(repo.ui, revmap, data)
371 return context.memfilectx(
371 return context.memfilectx(
372 repo, ctx, f, data, b'l' in fctx.flags(), b'x' in fctx.flags(), renamed
372 repo, ctx, f, data, b'l' in fctx.flags(), b'x' in fctx.flags(), renamed
373 )
373 )
374
374
375
375
376 # Remap tag data using a revision map
376 # Remap tag data using a revision map
377 def _converttags(ui, revmap, data):
377 def _converttags(ui, revmap, data):
378 newdata = []
378 newdata = []
379 for line in data.splitlines():
379 for line in data.splitlines():
380 try:
380 try:
381 id, name = line.split(b' ', 1)
381 id, name = line.split(b' ', 1)
382 except ValueError:
382 except ValueError:
383 ui.warn(_(b'skipping incorrectly formatted tag %s\n') % line)
383 ui.warn(_(b'skipping incorrectly formatted tag %s\n') % line)
384 continue
384 continue
385 try:
385 try:
386 newid = bin(id)
386 newid = bin(id)
387 except TypeError:
387 except TypeError:
388 ui.warn(_(b'skipping incorrectly formatted id %s\n') % id)
388 ui.warn(_(b'skipping incorrectly formatted id %s\n') % id)
389 continue
389 continue
390 try:
390 try:
391 newdata.append(b'%s %s\n' % (hex(revmap[newid]), name))
391 newdata.append(b'%s %s\n' % (hex(revmap[newid]), name))
392 except KeyError:
392 except KeyError:
393 ui.warn(_(b'no mapping for id %s\n') % id)
393 ui.warn(_(b'no mapping for id %s\n') % id)
394 continue
394 continue
395 return b''.join(newdata)
395 return b''.join(newdata)
396
396
397
397
398 def _islfile(file, ctx, matcher, size):
398 def _islfile(file, ctx, matcher, size):
399 """Return true if file should be considered a largefile, i.e.
399 """Return true if file should be considered a largefile, i.e.
400 matcher matches it or it is larger than size."""
400 matcher matches it or it is larger than size."""
401 # never store special .hg* files as largefiles
401 # never store special .hg* files as largefiles
402 if file == b'.hgtags' or file == b'.hgignore' or file == b'.hgsigs':
402 if file == b'.hgtags' or file == b'.hgignore' or file == b'.hgsigs':
403 return False
403 return False
404 if matcher and matcher(file):
404 if matcher and matcher(file):
405 return True
405 return True
406 try:
406 try:
407 return ctx.filectx(file).size() >= size * 1024 * 1024
407 return ctx.filectx(file).size() >= size * 1024 * 1024
408 except error.LookupError:
408 except error.LookupError:
409 return False
409 return False
410
410
411
411
412 def uploadlfiles(ui, rsrc, rdst, files):
412 def uploadlfiles(ui, rsrc, rdst, files):
413 '''upload largefiles to the central store'''
413 '''upload largefiles to the central store'''
414
414
415 if not files:
415 if not files:
416 return
416 return
417
417
418 store = storefactory.openstore(rsrc, rdst, put=True)
418 store = storefactory.openstore(rsrc, rdst, put=True)
419
419
420 at = 0
420 at = 0
421 ui.debug(b"sending statlfile command for %d largefiles\n" % len(files))
421 ui.debug(b"sending statlfile command for %d largefiles\n" % len(files))
422 retval = store.exists(files)
422 retval = store.exists(files)
423 files = [h for h in files if not retval[h]]
423 files = [h for h in files if not retval[h]]
424 ui.debug(b"%d largefiles need to be uploaded\n" % len(files))
424 ui.debug(b"%d largefiles need to be uploaded\n" % len(files))
425
425
426 with ui.makeprogress(
426 with ui.makeprogress(
427 _(b'uploading largefiles'), unit=_(b'files'), total=len(files)
427 _(b'uploading largefiles'), unit=_(b'files'), total=len(files)
428 ) as progress:
428 ) as progress:
429 for hash in files:
429 for hash in files:
430 progress.update(at)
430 progress.update(at)
431 source = lfutil.findfile(rsrc, hash)
431 source = lfutil.findfile(rsrc, hash)
432 if not source:
432 if not source:
433 raise error.Abort(
433 raise error.Abort(
434 _(
434 _(
435 b'largefile %s missing from store'
435 b'largefile %s missing from store'
436 b' (needs to be uploaded)'
436 b' (needs to be uploaded)'
437 )
437 )
438 % hash
438 % hash
439 )
439 )
440 # XXX check for errors here
440 # XXX check for errors here
441 store.put(source, hash)
441 store.put(source, hash)
442 at += 1
442 at += 1
443
443
444
444
445 def verifylfiles(ui, repo, all=False, contents=False):
445 def verifylfiles(ui, repo, all=False, contents=False):
446 """Verify that every largefile revision in the current changeset
446 """Verify that every largefile revision in the current changeset
447 exists in the central store. With --contents, also verify that
447 exists in the central store. With --contents, also verify that
448 the contents of each local largefile file revision are correct (SHA-1 hash
448 the contents of each local largefile file revision are correct (SHA-1 hash
449 matches the revision ID). With --all, check every changeset in
449 matches the revision ID). With --all, check every changeset in
450 this repository."""
450 this repository."""
451 if all:
451 if all:
452 revs = repo.revs(b'all()')
452 revs = repo.revs(b'all()')
453 else:
453 else:
454 revs = [b'.']
454 revs = [b'.']
455
455
456 store = storefactory.openstore(repo)
456 store = storefactory.openstore(repo)
457 return store.verify(revs, contents=contents)
457 return store.verify(revs, contents=contents)
458
458
459
459
460 def cachelfiles(ui, repo, node, filelist=None):
460 def cachelfiles(ui, repo, node, filelist=None):
461 """cachelfiles ensures that all largefiles needed by the specified revision
461 """cachelfiles ensures that all largefiles needed by the specified revision
462 are present in the repository's largefile cache.
462 are present in the repository's largefile cache.
463
463
464 returns a tuple (cached, missing). cached is the list of files downloaded
464 returns a tuple (cached, missing). cached is the list of files downloaded
465 by this operation; missing is the list of files that were needed but could
465 by this operation; missing is the list of files that were needed but could
466 not be found."""
466 not be found."""
467 lfiles = lfutil.listlfiles(repo, node)
467 lfiles = lfutil.listlfiles(repo, node)
468 if filelist:
468 if filelist:
469 lfiles = set(lfiles) & set(filelist)
469 lfiles = set(lfiles) & set(filelist)
470 toget = []
470 toget = []
471
471
472 ctx = repo[node]
472 ctx = repo[node]
473 for lfile in lfiles:
473 for lfile in lfiles:
474 try:
474 try:
475 expectedhash = lfutil.readasstandin(ctx[lfutil.standin(lfile)])
475 expectedhash = lfutil.readasstandin(ctx[lfutil.standin(lfile)])
476 except IOError as err:
476 except IOError as err:
477 if err.errno == errno.ENOENT:
477 if err.errno == errno.ENOENT:
478 continue # node must be None and standin wasn't found in wctx
478 continue # node must be None and standin wasn't found in wctx
479 raise
479 raise
480 if not lfutil.findfile(repo, expectedhash):
480 if not lfutil.findfile(repo, expectedhash):
481 toget.append((lfile, expectedhash))
481 toget.append((lfile, expectedhash))
482
482
483 if toget:
483 if toget:
484 store = storefactory.openstore(repo)
484 store = storefactory.openstore(repo)
485 ret = store.get(toget)
485 ret = store.get(toget)
486 return ret
486 return ret
487
487
488 return ([], [])
488 return ([], [])
489
489
490
490
491 def downloadlfiles(ui, repo):
491 def downloadlfiles(ui, repo):
492 tonode = repo.changelog.node
492 tonode = repo.changelog.node
493 totalsuccess = 0
493 totalsuccess = 0
494 totalmissing = 0
494 totalmissing = 0
495 for rev in repo.revs(b'file(%s)', b'path:' + lfutil.shortname):
495 for rev in repo.revs(b'file(%s)', b'path:' + lfutil.shortname):
496 success, missing = cachelfiles(ui, repo, tonode(rev))
496 success, missing = cachelfiles(ui, repo, tonode(rev))
497 totalsuccess += len(success)
497 totalsuccess += len(success)
498 totalmissing += len(missing)
498 totalmissing += len(missing)
499 ui.status(_(b"%d additional largefiles cached\n") % totalsuccess)
499 ui.status(_(b"%d additional largefiles cached\n") % totalsuccess)
500 if totalmissing > 0:
500 if totalmissing > 0:
501 ui.status(_(b"%d largefiles failed to download\n") % totalmissing)
501 ui.status(_(b"%d largefiles failed to download\n") % totalmissing)
502 return totalsuccess, totalmissing
502 return totalsuccess, totalmissing
503
503
504
504
505 def updatelfiles(
505 def updatelfiles(
506 ui, repo, filelist=None, printmessage=None, normallookup=False
506 ui, repo, filelist=None, printmessage=None, normallookup=False
507 ):
507 ):
508 """Update largefiles according to standins in the working directory
508 """Update largefiles according to standins in the working directory
509
509
510 If ``printmessage`` is other than ``None``, it means "print (or
510 If ``printmessage`` is other than ``None``, it means "print (or
511 ignore, for false) message forcibly".
511 ignore, for false) message forcibly".
512 """
512 """
513 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
513 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
514 with repo.wlock():
514 with repo.wlock():
515 lfdirstate = lfutil.openlfdirstate(ui, repo)
515 lfdirstate = lfutil.openlfdirstate(ui, repo)
516 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
516 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
517
517
518 if filelist is not None:
518 if filelist is not None:
519 filelist = set(filelist)
519 filelist = set(filelist)
520 lfiles = [f for f in lfiles if f in filelist]
520 lfiles = [f for f in lfiles if f in filelist]
521
521
522 with lfdirstate.parentchange():
522 with lfdirstate.parentchange():
523 update = {}
523 update = {}
524 dropped = set()
524 dropped = set()
525 updated, removed = 0, 0
525 updated, removed = 0, 0
526 wvfs = repo.wvfs
526 wvfs = repo.wvfs
527 wctx = repo[None]
527 wctx = repo[None]
528 for lfile in lfiles:
528 for lfile in lfiles:
529 lfileorig = os.path.relpath(
529 lfileorig = os.path.relpath(
530 scmutil.backuppath(ui, repo, lfile), start=repo.root
530 scmutil.backuppath(ui, repo, lfile), start=repo.root
531 )
531 )
532 standin = lfutil.standin(lfile)
532 standin = lfutil.standin(lfile)
533 standinorig = os.path.relpath(
533 standinorig = os.path.relpath(
534 scmutil.backuppath(ui, repo, standin), start=repo.root
534 scmutil.backuppath(ui, repo, standin), start=repo.root
535 )
535 )
536 if wvfs.exists(standin):
536 if wvfs.exists(standin):
537 if wvfs.exists(standinorig) and wvfs.exists(lfile):
537 if wvfs.exists(standinorig) and wvfs.exists(lfile):
538 shutil.copyfile(wvfs.join(lfile), wvfs.join(lfileorig))
538 shutil.copyfile(wvfs.join(lfile), wvfs.join(lfileorig))
539 wvfs.unlinkpath(standinorig)
539 wvfs.unlinkpath(standinorig)
540 expecthash = lfutil.readasstandin(wctx[standin])
540 expecthash = lfutil.readasstandin(wctx[standin])
541 if expecthash != b'':
541 if expecthash != b'':
542 if lfile not in wctx: # not switched to normal file
542 if lfile not in wctx: # not switched to normal file
543 if repo.dirstate[standin] != b'?':
543 if repo.dirstate[standin] != b'?':
544 wvfs.unlinkpath(lfile, ignoremissing=True)
544 wvfs.unlinkpath(lfile, ignoremissing=True)
545 else:
545 else:
546 dropped.add(lfile)
546 dropped.add(lfile)
547
547
548 # use normallookup() to allocate an entry in largefiles
548 # use normallookup() to allocate an entry in largefiles
549 # dirstate to prevent lfilesrepo.status() from reporting
549 # dirstate to prevent lfilesrepo.status() from reporting
550 # missing files as removed.
550 # missing files as removed.
551 lfdirstate.normallookup(lfile)
551 lfdirstate.update_file(
552 lfile,
553 p1_tracked=True,
554 wc_tracked=True,
555 possibly_dirty=True,
556 )
552 update[lfile] = expecthash
557 update[lfile] = expecthash
553 else:
558 else:
554 # Remove lfiles for which the standin is deleted, unless the
559 # Remove lfiles for which the standin is deleted, unless the
555 # lfile is added to the repository again. This happens when a
560 # lfile is added to the repository again. This happens when a
556 # largefile is converted back to a normal file: the standin
561 # largefile is converted back to a normal file: the standin
557 # disappears, but a new (normal) file appears as the lfile.
562 # disappears, but a new (normal) file appears as the lfile.
558 if (
563 if (
559 wvfs.exists(lfile)
564 wvfs.exists(lfile)
560 and repo.dirstate.normalize(lfile) not in wctx
565 and repo.dirstate.normalize(lfile) not in wctx
561 ):
566 ):
562 wvfs.unlinkpath(lfile)
567 wvfs.unlinkpath(lfile)
563 removed += 1
568 removed += 1
564
569
565 # largefile processing might be slow and be interrupted - be prepared
570 # largefile processing might be slow and be interrupted - be prepared
566 lfdirstate.write()
571 lfdirstate.write()
567
572
568 if lfiles:
573 if lfiles:
569 lfiles = [f for f in lfiles if f not in dropped]
574 lfiles = [f for f in lfiles if f not in dropped]
570
575
571 for f in dropped:
576 for f in dropped:
572 repo.wvfs.unlinkpath(lfutil.standin(f))
577 repo.wvfs.unlinkpath(lfutil.standin(f))
573 # This needs to happen for dropped files, otherwise they stay in
578 # This needs to happen for dropped files, otherwise they stay in
574 # the M state.
579 # the M state.
575 lfdirstate._drop(f)
580 lfdirstate._drop(f)
576
581
577 statuswriter(_(b'getting changed largefiles\n'))
582 statuswriter(_(b'getting changed largefiles\n'))
578 cachelfiles(ui, repo, None, lfiles)
583 cachelfiles(ui, repo, None, lfiles)
579
584
580 with lfdirstate.parentchange():
585 with lfdirstate.parentchange():
581 for lfile in lfiles:
586 for lfile in lfiles:
582 update1 = 0
587 update1 = 0
583
588
584 expecthash = update.get(lfile)
589 expecthash = update.get(lfile)
585 if expecthash:
590 if expecthash:
586 if not lfutil.copyfromcache(repo, expecthash, lfile):
591 if not lfutil.copyfromcache(repo, expecthash, lfile):
587 # failed ... but already removed and set to normallookup
592 # failed ... but already removed and set to normallookup
588 continue
593 continue
589 # Synchronize largefile dirstate to the last modified
594 # Synchronize largefile dirstate to the last modified
590 # time of the file
595 # time of the file
591 lfdirstate.update_file(
596 lfdirstate.update_file(
592 lfile, p1_tracked=True, wc_tracked=True
597 lfile, p1_tracked=True, wc_tracked=True
593 )
598 )
594 update1 = 1
599 update1 = 1
595
600
596 # copy the exec mode of largefile standin from the repository's
601 # copy the exec mode of largefile standin from the repository's
597 # dirstate to its state in the lfdirstate.
602 # dirstate to its state in the lfdirstate.
598 standin = lfutil.standin(lfile)
603 standin = lfutil.standin(lfile)
599 if wvfs.exists(standin):
604 if wvfs.exists(standin):
600 # exec is decided by the users permissions using mask 0o100
605 # exec is decided by the users permissions using mask 0o100
601 standinexec = wvfs.stat(standin).st_mode & 0o100
606 standinexec = wvfs.stat(standin).st_mode & 0o100
602 st = wvfs.stat(lfile)
607 st = wvfs.stat(lfile)
603 mode = st.st_mode
608 mode = st.st_mode
604 if standinexec != mode & 0o100:
609 if standinexec != mode & 0o100:
605 # first remove all X bits, then shift all R bits to X
610 # first remove all X bits, then shift all R bits to X
606 mode &= ~0o111
611 mode &= ~0o111
607 if standinexec:
612 if standinexec:
608 mode |= (mode >> 2) & 0o111 & ~util.umask
613 mode |= (mode >> 2) & 0o111 & ~util.umask
609 wvfs.chmod(lfile, mode)
614 wvfs.chmod(lfile, mode)
610 update1 = 1
615 update1 = 1
611
616
612 updated += update1
617 updated += update1
613
618
614 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
619 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
615
620
616 lfdirstate.write()
621 lfdirstate.write()
617 if lfiles:
622 if lfiles:
618 statuswriter(
623 statuswriter(
619 _(b'%d largefiles updated, %d removed\n') % (updated, removed)
624 _(b'%d largefiles updated, %d removed\n') % (updated, removed)
620 )
625 )
621
626
622
627
623 @eh.command(
628 @eh.command(
624 b'lfpull',
629 b'lfpull',
625 [(b'r', b'rev', [], _(b'pull largefiles for these revisions'))]
630 [(b'r', b'rev', [], _(b'pull largefiles for these revisions'))]
626 + cmdutil.remoteopts,
631 + cmdutil.remoteopts,
627 _(b'-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'),
632 _(b'-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'),
628 )
633 )
629 def lfpull(ui, repo, source=b"default", **opts):
634 def lfpull(ui, repo, source=b"default", **opts):
630 """pull largefiles for the specified revisions from the specified source
635 """pull largefiles for the specified revisions from the specified source
631
636
632 Pull largefiles that are referenced from local changesets but missing
637 Pull largefiles that are referenced from local changesets but missing
633 locally, pulling from a remote repository to the local cache.
638 locally, pulling from a remote repository to the local cache.
634
639
635 If SOURCE is omitted, the 'default' path will be used.
640 If SOURCE is omitted, the 'default' path will be used.
636 See :hg:`help urls` for more information.
641 See :hg:`help urls` for more information.
637
642
638 .. container:: verbose
643 .. container:: verbose
639
644
640 Some examples:
645 Some examples:
641
646
642 - pull largefiles for all branch heads::
647 - pull largefiles for all branch heads::
643
648
644 hg lfpull -r "head() and not closed()"
649 hg lfpull -r "head() and not closed()"
645
650
646 - pull largefiles on the default branch::
651 - pull largefiles on the default branch::
647
652
648 hg lfpull -r "branch(default)"
653 hg lfpull -r "branch(default)"
649 """
654 """
650 repo.lfpullsource = source
655 repo.lfpullsource = source
651
656
652 revs = opts.get('rev', [])
657 revs = opts.get('rev', [])
653 if not revs:
658 if not revs:
654 raise error.Abort(_(b'no revisions specified'))
659 raise error.Abort(_(b'no revisions specified'))
655 revs = scmutil.revrange(repo, revs)
660 revs = scmutil.revrange(repo, revs)
656
661
657 numcached = 0
662 numcached = 0
658 for rev in revs:
663 for rev in revs:
659 ui.note(_(b'pulling largefiles for revision %d\n') % rev)
664 ui.note(_(b'pulling largefiles for revision %d\n') % rev)
660 (cached, missing) = cachelfiles(ui, repo, rev)
665 (cached, missing) = cachelfiles(ui, repo, rev)
661 numcached += len(cached)
666 numcached += len(cached)
662 ui.status(_(b"%d largefiles cached\n") % numcached)
667 ui.status(_(b"%d largefiles cached\n") % numcached)
663
668
664
669
665 @eh.command(b'debuglfput', [] + cmdutil.remoteopts, _(b'FILE'))
670 @eh.command(b'debuglfput', [] + cmdutil.remoteopts, _(b'FILE'))
666 def debuglfput(ui, repo, filepath, **kwargs):
671 def debuglfput(ui, repo, filepath, **kwargs):
667 hash = lfutil.hashfile(filepath)
672 hash = lfutil.hashfile(filepath)
668 storefactory.openstore(repo).put(filepath, hash)
673 storefactory.openstore(repo).put(filepath, hash)
669 ui.write(b'%s\n' % hash)
674 ui.write(b'%s\n' % hash)
670 return 0
675 return 0
General Comments 0
You need to be logged in to leave comments. Login now