##// END OF EJS Templates
largefiles: use util.readfile in lfconvert
Bryan O'Sullivan -
r27774:8ceaaf63 default
parent child Browse files
Show More
@@ -1,552 +1,547 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10
10
11 import os, errno
11 import os, errno
12 import shutil
12 import shutil
13
13
14 from mercurial import util, match as match_, hg, node, context, error, \
14 from mercurial import util, match as match_, hg, node, context, error, \
15 cmdutil, scmutil, commands
15 cmdutil, scmutil, commands
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 from mercurial.lock import release
17 from mercurial.lock import release
18
18
19 from hgext.convert import convcmd
19 from hgext.convert import convcmd
20 from hgext.convert import filemap
20 from hgext.convert import filemap
21
21
22 import lfutil
22 import lfutil
23 import basestore
23 import basestore
24
24
25 # -- Commands ----------------------------------------------------------
25 # -- Commands ----------------------------------------------------------
26
26
27 cmdtable = {}
27 cmdtable = {}
28 command = cmdutil.command(cmdtable)
28 command = cmdutil.command(cmdtable)
29
29
30 @command('lfconvert',
30 @command('lfconvert',
31 [('s', 'size', '',
31 [('s', 'size', '',
32 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
32 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
33 ('', 'to-normal', False,
33 ('', 'to-normal', False,
34 _('convert from a largefiles repo to a normal repo')),
34 _('convert from a largefiles repo to a normal repo')),
35 ],
35 ],
36 _('hg lfconvert SOURCE DEST [FILE ...]'),
36 _('hg lfconvert SOURCE DEST [FILE ...]'),
37 norepo=True,
37 norepo=True,
38 inferrepo=True)
38 inferrepo=True)
39 def lfconvert(ui, src, dest, *pats, **opts):
39 def lfconvert(ui, src, dest, *pats, **opts):
40 '''convert a normal repository to a largefiles repository
40 '''convert a normal repository to a largefiles repository
41
41
42 Convert repository SOURCE to a new repository DEST, identical to
42 Convert repository SOURCE to a new repository DEST, identical to
43 SOURCE except that certain files will be converted as largefiles:
43 SOURCE except that certain files will be converted as largefiles:
44 specifically, any file that matches any PATTERN *or* whose size is
44 specifically, any file that matches any PATTERN *or* whose size is
45 above the minimum size threshold is converted as a largefile. The
45 above the minimum size threshold is converted as a largefile. The
46 size used to determine whether or not to track a file as a
46 size used to determine whether or not to track a file as a
47 largefile is the size of the first version of the file. The
47 largefile is the size of the first version of the file. The
48 minimum size can be specified either with --size or in
48 minimum size can be specified either with --size or in
49 configuration as ``largefiles.size``.
49 configuration as ``largefiles.size``.
50
50
51 After running this command you will need to make sure that
51 After running this command you will need to make sure that
52 largefiles is enabled anywhere you intend to push the new
52 largefiles is enabled anywhere you intend to push the new
53 repository.
53 repository.
54
54
55 Use --to-normal to convert largefiles back to normal files; after
55 Use --to-normal to convert largefiles back to normal files; after
56 this, the DEST repository can be used without largefiles at all.'''
56 this, the DEST repository can be used without largefiles at all.'''
57
57
58 if opts['to_normal']:
58 if opts['to_normal']:
59 tolfile = False
59 tolfile = False
60 else:
60 else:
61 tolfile = True
61 tolfile = True
62 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
62 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
63
63
64 if not hg.islocal(src):
64 if not hg.islocal(src):
65 raise error.Abort(_('%s is not a local Mercurial repo') % src)
65 raise error.Abort(_('%s is not a local Mercurial repo') % src)
66 if not hg.islocal(dest):
66 if not hg.islocal(dest):
67 raise error.Abort(_('%s is not a local Mercurial repo') % dest)
67 raise error.Abort(_('%s is not a local Mercurial repo') % dest)
68
68
69 rsrc = hg.repository(ui, src)
69 rsrc = hg.repository(ui, src)
70 ui.status(_('initializing destination %s\n') % dest)
70 ui.status(_('initializing destination %s\n') % dest)
71 rdst = hg.repository(ui, dest, create=True)
71 rdst = hg.repository(ui, dest, create=True)
72
72
73 success = False
73 success = False
74 dstwlock = dstlock = None
74 dstwlock = dstlock = None
75 try:
75 try:
76 # Get a list of all changesets in the source. The easy way to do this
76 # Get a list of all changesets in the source. The easy way to do this
77 # is to simply walk the changelog, using changelog.nodesbetween().
77 # is to simply walk the changelog, using changelog.nodesbetween().
78 # Take a look at mercurial/revlog.py:639 for more details.
78 # Take a look at mercurial/revlog.py:639 for more details.
79 # Use a generator instead of a list to decrease memory usage
79 # Use a generator instead of a list to decrease memory usage
80 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
80 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
81 rsrc.heads())[0])
81 rsrc.heads())[0])
82 revmap = {node.nullid: node.nullid}
82 revmap = {node.nullid: node.nullid}
83 if tolfile:
83 if tolfile:
84 # Lock destination to prevent modification while it is converted to.
84 # Lock destination to prevent modification while it is converted to.
85 # Don't need to lock src because we are just reading from its
85 # Don't need to lock src because we are just reading from its
86 # history which can't change.
86 # history which can't change.
87 dstwlock = rdst.wlock()
87 dstwlock = rdst.wlock()
88 dstlock = rdst.lock()
88 dstlock = rdst.lock()
89
89
90 lfiles = set()
90 lfiles = set()
91 normalfiles = set()
91 normalfiles = set()
92 if not pats:
92 if not pats:
93 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
93 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
94 if pats:
94 if pats:
95 matcher = match_.match(rsrc.root, '', list(pats))
95 matcher = match_.match(rsrc.root, '', list(pats))
96 else:
96 else:
97 matcher = None
97 matcher = None
98
98
99 lfiletohash = {}
99 lfiletohash = {}
100 for ctx in ctxs:
100 for ctx in ctxs:
101 ui.progress(_('converting revisions'), ctx.rev(),
101 ui.progress(_('converting revisions'), ctx.rev(),
102 unit=_('revision'), total=rsrc['tip'].rev())
102 unit=_('revision'), total=rsrc['tip'].rev())
103 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
103 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
104 lfiles, normalfiles, matcher, size, lfiletohash)
104 lfiles, normalfiles, matcher, size, lfiletohash)
105 ui.progress(_('converting revisions'), None)
105 ui.progress(_('converting revisions'), None)
106
106
107 if os.path.exists(rdst.wjoin(lfutil.shortname)):
107 if os.path.exists(rdst.wjoin(lfutil.shortname)):
108 shutil.rmtree(rdst.wjoin(lfutil.shortname))
108 shutil.rmtree(rdst.wjoin(lfutil.shortname))
109
109
110 for f in lfiletohash.keys():
110 for f in lfiletohash.keys():
111 if os.path.isfile(rdst.wjoin(f)):
111 if os.path.isfile(rdst.wjoin(f)):
112 os.unlink(rdst.wjoin(f))
112 os.unlink(rdst.wjoin(f))
113 try:
113 try:
114 os.removedirs(os.path.dirname(rdst.wjoin(f)))
114 os.removedirs(os.path.dirname(rdst.wjoin(f)))
115 except OSError:
115 except OSError:
116 pass
116 pass
117
117
118 # If there were any files converted to largefiles, add largefiles
118 # If there were any files converted to largefiles, add largefiles
119 # to the destination repository's requirements.
119 # to the destination repository's requirements.
120 if lfiles:
120 if lfiles:
121 rdst.requirements.add('largefiles')
121 rdst.requirements.add('largefiles')
122 rdst._writerequirements()
122 rdst._writerequirements()
123 else:
123 else:
124 class lfsource(filemap.filemap_source):
124 class lfsource(filemap.filemap_source):
125 def __init__(self, ui, source):
125 def __init__(self, ui, source):
126 super(lfsource, self).__init__(ui, source, None)
126 super(lfsource, self).__init__(ui, source, None)
127 self.filemapper.rename[lfutil.shortname] = '.'
127 self.filemapper.rename[lfutil.shortname] = '.'
128
128
129 def getfile(self, name, rev):
129 def getfile(self, name, rev):
130 realname, realrev = rev
130 realname, realrev = rev
131 f = super(lfsource, self).getfile(name, rev)
131 f = super(lfsource, self).getfile(name, rev)
132
132
133 if (not realname.startswith(lfutil.shortnameslash)
133 if (not realname.startswith(lfutil.shortnameslash)
134 or f[0] is None):
134 or f[0] is None):
135 return f
135 return f
136
136
137 # Substitute in the largefile data for the hash
137 # Substitute in the largefile data for the hash
138 hash = f[0].strip()
138 hash = f[0].strip()
139 path = lfutil.findfile(rsrc, hash)
139 path = lfutil.findfile(rsrc, hash)
140
140
141 if path is None:
141 if path is None:
142 raise error.Abort(_("missing largefile for '%s' in %s")
142 raise error.Abort(_("missing largefile for '%s' in %s")
143 % (realname, realrev))
143 % (realname, realrev))
144 fp = open(path, 'rb')
144 return util.readfile(path), f[1]
145
146 try:
147 return (fp.read(), f[1])
148 finally:
149 fp.close()
150
145
151 class converter(convcmd.converter):
146 class converter(convcmd.converter):
152 def __init__(self, ui, source, dest, revmapfile, opts):
147 def __init__(self, ui, source, dest, revmapfile, opts):
153 src = lfsource(ui, source)
148 src = lfsource(ui, source)
154
149
155 super(converter, self).__init__(ui, src, dest, revmapfile,
150 super(converter, self).__init__(ui, src, dest, revmapfile,
156 opts)
151 opts)
157
152
158 found, missing = downloadlfiles(ui, rsrc)
153 found, missing = downloadlfiles(ui, rsrc)
159 if missing != 0:
154 if missing != 0:
160 raise error.Abort(_("all largefiles must be present locally"))
155 raise error.Abort(_("all largefiles must be present locally"))
161
156
162 orig = convcmd.converter
157 orig = convcmd.converter
163 convcmd.converter = converter
158 convcmd.converter = converter
164
159
165 try:
160 try:
166 convcmd.convert(ui, src, dest)
161 convcmd.convert(ui, src, dest)
167 finally:
162 finally:
168 convcmd.converter = orig
163 convcmd.converter = orig
169 success = True
164 success = True
170 finally:
165 finally:
171 if tolfile:
166 if tolfile:
172 rdst.dirstate.clear()
167 rdst.dirstate.clear()
173 release(dstlock, dstwlock)
168 release(dstlock, dstwlock)
174 if not success:
169 if not success:
175 # we failed, remove the new directory
170 # we failed, remove the new directory
176 shutil.rmtree(rdst.root)
171 shutil.rmtree(rdst.root)
177
172
178 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
173 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
179 matcher, size, lfiletohash):
174 matcher, size, lfiletohash):
180 # Convert src parents to dst parents
175 # Convert src parents to dst parents
181 parents = _convertparents(ctx, revmap)
176 parents = _convertparents(ctx, revmap)
182
177
183 # Generate list of changed files
178 # Generate list of changed files
184 files = _getchangedfiles(ctx, parents)
179 files = _getchangedfiles(ctx, parents)
185
180
186 dstfiles = []
181 dstfiles = []
187 for f in files:
182 for f in files:
188 if f not in lfiles and f not in normalfiles:
183 if f not in lfiles and f not in normalfiles:
189 islfile = _islfile(f, ctx, matcher, size)
184 islfile = _islfile(f, ctx, matcher, size)
190 # If this file was renamed or copied then copy
185 # If this file was renamed or copied then copy
191 # the largefile-ness of its predecessor
186 # the largefile-ness of its predecessor
192 if f in ctx.manifest():
187 if f in ctx.manifest():
193 fctx = ctx.filectx(f)
188 fctx = ctx.filectx(f)
194 renamed = fctx.renamed()
189 renamed = fctx.renamed()
195 renamedlfile = renamed and renamed[0] in lfiles
190 renamedlfile = renamed and renamed[0] in lfiles
196 islfile |= renamedlfile
191 islfile |= renamedlfile
197 if 'l' in fctx.flags():
192 if 'l' in fctx.flags():
198 if renamedlfile:
193 if renamedlfile:
199 raise error.Abort(
194 raise error.Abort(
200 _('renamed/copied largefile %s becomes symlink')
195 _('renamed/copied largefile %s becomes symlink')
201 % f)
196 % f)
202 islfile = False
197 islfile = False
203 if islfile:
198 if islfile:
204 lfiles.add(f)
199 lfiles.add(f)
205 else:
200 else:
206 normalfiles.add(f)
201 normalfiles.add(f)
207
202
208 if f in lfiles:
203 if f in lfiles:
209 dstfiles.append(lfutil.standin(f))
204 dstfiles.append(lfutil.standin(f))
210 # largefile in manifest if it has not been removed/renamed
205 # largefile in manifest if it has not been removed/renamed
211 if f in ctx.manifest():
206 if f in ctx.manifest():
212 fctx = ctx.filectx(f)
207 fctx = ctx.filectx(f)
213 if 'l' in fctx.flags():
208 if 'l' in fctx.flags():
214 renamed = fctx.renamed()
209 renamed = fctx.renamed()
215 if renamed and renamed[0] in lfiles:
210 if renamed and renamed[0] in lfiles:
216 raise error.Abort(_('largefile %s becomes symlink') % f)
211 raise error.Abort(_('largefile %s becomes symlink') % f)
217
212
218 # largefile was modified, update standins
213 # largefile was modified, update standins
219 m = util.sha1('')
214 m = util.sha1('')
220 m.update(ctx[f].data())
215 m.update(ctx[f].data())
221 hash = m.hexdigest()
216 hash = m.hexdigest()
222 if f not in lfiletohash or lfiletohash[f] != hash:
217 if f not in lfiletohash or lfiletohash[f] != hash:
223 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
218 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
224 executable = 'x' in ctx[f].flags()
219 executable = 'x' in ctx[f].flags()
225 lfutil.writestandin(rdst, lfutil.standin(f), hash,
220 lfutil.writestandin(rdst, lfutil.standin(f), hash,
226 executable)
221 executable)
227 lfiletohash[f] = hash
222 lfiletohash[f] = hash
228 else:
223 else:
229 # normal file
224 # normal file
230 dstfiles.append(f)
225 dstfiles.append(f)
231
226
232 def getfilectx(repo, memctx, f):
227 def getfilectx(repo, memctx, f):
233 if lfutil.isstandin(f):
228 if lfutil.isstandin(f):
234 # if the file isn't in the manifest then it was removed
229 # if the file isn't in the manifest then it was removed
235 # or renamed, raise IOError to indicate this
230 # or renamed, raise IOError to indicate this
236 srcfname = lfutil.splitstandin(f)
231 srcfname = lfutil.splitstandin(f)
237 try:
232 try:
238 fctx = ctx.filectx(srcfname)
233 fctx = ctx.filectx(srcfname)
239 except error.LookupError:
234 except error.LookupError:
240 return None
235 return None
241 renamed = fctx.renamed()
236 renamed = fctx.renamed()
242 if renamed:
237 if renamed:
243 # standin is always a largefile because largefile-ness
238 # standin is always a largefile because largefile-ness
244 # doesn't change after rename or copy
239 # doesn't change after rename or copy
245 renamed = lfutil.standin(renamed[0])
240 renamed = lfutil.standin(renamed[0])
246
241
247 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
242 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
248 'l' in fctx.flags(), 'x' in fctx.flags(),
243 'l' in fctx.flags(), 'x' in fctx.flags(),
249 renamed)
244 renamed)
250 else:
245 else:
251 return _getnormalcontext(repo, ctx, f, revmap)
246 return _getnormalcontext(repo, ctx, f, revmap)
252
247
253 # Commit
248 # Commit
254 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
249 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
255
250
256 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
251 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
257 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
252 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
258 getfilectx, ctx.user(), ctx.date(), ctx.extra())
253 getfilectx, ctx.user(), ctx.date(), ctx.extra())
259 ret = rdst.commitctx(mctx)
254 ret = rdst.commitctx(mctx)
260 lfutil.copyalltostore(rdst, ret)
255 lfutil.copyalltostore(rdst, ret)
261 rdst.setparents(ret)
256 rdst.setparents(ret)
262 revmap[ctx.node()] = rdst.changelog.tip()
257 revmap[ctx.node()] = rdst.changelog.tip()
263
258
264 # Generate list of changed files
259 # Generate list of changed files
265 def _getchangedfiles(ctx, parents):
260 def _getchangedfiles(ctx, parents):
266 files = set(ctx.files())
261 files = set(ctx.files())
267 if node.nullid not in parents:
262 if node.nullid not in parents:
268 mc = ctx.manifest()
263 mc = ctx.manifest()
269 mp1 = ctx.parents()[0].manifest()
264 mp1 = ctx.parents()[0].manifest()
270 mp2 = ctx.parents()[1].manifest()
265 mp2 = ctx.parents()[1].manifest()
271 files |= (set(mp1) | set(mp2)) - set(mc)
266 files |= (set(mp1) | set(mp2)) - set(mc)
272 for f in mc:
267 for f in mc:
273 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
268 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
274 files.add(f)
269 files.add(f)
275 return files
270 return files
276
271
277 # Convert src parents to dst parents
272 # Convert src parents to dst parents
278 def _convertparents(ctx, revmap):
273 def _convertparents(ctx, revmap):
279 parents = []
274 parents = []
280 for p in ctx.parents():
275 for p in ctx.parents():
281 parents.append(revmap[p.node()])
276 parents.append(revmap[p.node()])
282 while len(parents) < 2:
277 while len(parents) < 2:
283 parents.append(node.nullid)
278 parents.append(node.nullid)
284 return parents
279 return parents
285
280
286 # Get memfilectx for a normal file
281 # Get memfilectx for a normal file
287 def _getnormalcontext(repo, ctx, f, revmap):
282 def _getnormalcontext(repo, ctx, f, revmap):
288 try:
283 try:
289 fctx = ctx.filectx(f)
284 fctx = ctx.filectx(f)
290 except error.LookupError:
285 except error.LookupError:
291 return None
286 return None
292 renamed = fctx.renamed()
287 renamed = fctx.renamed()
293 if renamed:
288 if renamed:
294 renamed = renamed[0]
289 renamed = renamed[0]
295
290
296 data = fctx.data()
291 data = fctx.data()
297 if f == '.hgtags':
292 if f == '.hgtags':
298 data = _converttags (repo.ui, revmap, data)
293 data = _converttags (repo.ui, revmap, data)
299 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
294 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
300 'x' in fctx.flags(), renamed)
295 'x' in fctx.flags(), renamed)
301
296
302 # Remap tag data using a revision map
297 # Remap tag data using a revision map
303 def _converttags(ui, revmap, data):
298 def _converttags(ui, revmap, data):
304 newdata = []
299 newdata = []
305 for line in data.splitlines():
300 for line in data.splitlines():
306 try:
301 try:
307 id, name = line.split(' ', 1)
302 id, name = line.split(' ', 1)
308 except ValueError:
303 except ValueError:
309 ui.warn(_('skipping incorrectly formatted tag %s\n')
304 ui.warn(_('skipping incorrectly formatted tag %s\n')
310 % line)
305 % line)
311 continue
306 continue
312 try:
307 try:
313 newid = node.bin(id)
308 newid = node.bin(id)
314 except TypeError:
309 except TypeError:
315 ui.warn(_('skipping incorrectly formatted id %s\n')
310 ui.warn(_('skipping incorrectly formatted id %s\n')
316 % id)
311 % id)
317 continue
312 continue
318 try:
313 try:
319 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
314 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
320 name))
315 name))
321 except KeyError:
316 except KeyError:
322 ui.warn(_('no mapping for id %s\n') % id)
317 ui.warn(_('no mapping for id %s\n') % id)
323 continue
318 continue
324 return ''.join(newdata)
319 return ''.join(newdata)
325
320
326 def _islfile(file, ctx, matcher, size):
321 def _islfile(file, ctx, matcher, size):
327 '''Return true if file should be considered a largefile, i.e.
322 '''Return true if file should be considered a largefile, i.e.
328 matcher matches it or it is larger than size.'''
323 matcher matches it or it is larger than size.'''
329 # never store special .hg* files as largefiles
324 # never store special .hg* files as largefiles
330 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
325 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
331 return False
326 return False
332 if matcher and matcher(file):
327 if matcher and matcher(file):
333 return True
328 return True
334 try:
329 try:
335 return ctx.filectx(file).size() >= size * 1024 * 1024
330 return ctx.filectx(file).size() >= size * 1024 * 1024
336 except error.LookupError:
331 except error.LookupError:
337 return False
332 return False
338
333
339 def uploadlfiles(ui, rsrc, rdst, files):
334 def uploadlfiles(ui, rsrc, rdst, files):
340 '''upload largefiles to the central store'''
335 '''upload largefiles to the central store'''
341
336
342 if not files:
337 if not files:
343 return
338 return
344
339
345 store = basestore._openstore(rsrc, rdst, put=True)
340 store = basestore._openstore(rsrc, rdst, put=True)
346
341
347 at = 0
342 at = 0
348 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
343 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
349 retval = store.exists(files)
344 retval = store.exists(files)
350 files = filter(lambda h: not retval[h], files)
345 files = filter(lambda h: not retval[h], files)
351 ui.debug("%d largefiles need to be uploaded\n" % len(files))
346 ui.debug("%d largefiles need to be uploaded\n" % len(files))
352
347
353 for hash in files:
348 for hash in files:
354 ui.progress(_('uploading largefiles'), at, unit='largefile',
349 ui.progress(_('uploading largefiles'), at, unit='largefile',
355 total=len(files))
350 total=len(files))
356 source = lfutil.findfile(rsrc, hash)
351 source = lfutil.findfile(rsrc, hash)
357 if not source:
352 if not source:
358 raise error.Abort(_('largefile %s missing from store'
353 raise error.Abort(_('largefile %s missing from store'
359 ' (needs to be uploaded)') % hash)
354 ' (needs to be uploaded)') % hash)
360 # XXX check for errors here
355 # XXX check for errors here
361 store.put(source, hash)
356 store.put(source, hash)
362 at += 1
357 at += 1
363 ui.progress(_('uploading largefiles'), None)
358 ui.progress(_('uploading largefiles'), None)
364
359
365 def verifylfiles(ui, repo, all=False, contents=False):
360 def verifylfiles(ui, repo, all=False, contents=False):
366 '''Verify that every largefile revision in the current changeset
361 '''Verify that every largefile revision in the current changeset
367 exists in the central store. With --contents, also verify that
362 exists in the central store. With --contents, also verify that
368 the contents of each local largefile file revision are correct (SHA-1 hash
363 the contents of each local largefile file revision are correct (SHA-1 hash
369 matches the revision ID). With --all, check every changeset in
364 matches the revision ID). With --all, check every changeset in
370 this repository.'''
365 this repository.'''
371 if all:
366 if all:
372 revs = repo.revs('all()')
367 revs = repo.revs('all()')
373 else:
368 else:
374 revs = ['.']
369 revs = ['.']
375
370
376 store = basestore._openstore(repo)
371 store = basestore._openstore(repo)
377 return store.verify(revs, contents=contents)
372 return store.verify(revs, contents=contents)
378
373
379 def cachelfiles(ui, repo, node, filelist=None):
374 def cachelfiles(ui, repo, node, filelist=None):
380 '''cachelfiles ensures that all largefiles needed by the specified revision
375 '''cachelfiles ensures that all largefiles needed by the specified revision
381 are present in the repository's largefile cache.
376 are present in the repository's largefile cache.
382
377
383 returns a tuple (cached, missing). cached is the list of files downloaded
378 returns a tuple (cached, missing). cached is the list of files downloaded
384 by this operation; missing is the list of files that were needed but could
379 by this operation; missing is the list of files that were needed but could
385 not be found.'''
380 not be found.'''
386 lfiles = lfutil.listlfiles(repo, node)
381 lfiles = lfutil.listlfiles(repo, node)
387 if filelist:
382 if filelist:
388 lfiles = set(lfiles) & set(filelist)
383 lfiles = set(lfiles) & set(filelist)
389 toget = []
384 toget = []
390
385
391 for lfile in lfiles:
386 for lfile in lfiles:
392 try:
387 try:
393 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
388 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
394 except IOError as err:
389 except IOError as err:
395 if err.errno == errno.ENOENT:
390 if err.errno == errno.ENOENT:
396 continue # node must be None and standin wasn't found in wctx
391 continue # node must be None and standin wasn't found in wctx
397 raise
392 raise
398 if not lfutil.findfile(repo, expectedhash):
393 if not lfutil.findfile(repo, expectedhash):
399 toget.append((lfile, expectedhash))
394 toget.append((lfile, expectedhash))
400
395
401 if toget:
396 if toget:
402 store = basestore._openstore(repo)
397 store = basestore._openstore(repo)
403 ret = store.get(toget)
398 ret = store.get(toget)
404 return ret
399 return ret
405
400
406 return ([], [])
401 return ([], [])
407
402
408 def downloadlfiles(ui, repo, rev=None):
403 def downloadlfiles(ui, repo, rev=None):
409 matchfn = scmutil.match(repo[None],
404 matchfn = scmutil.match(repo[None],
410 [repo.wjoin(lfutil.shortname)], {})
405 [repo.wjoin(lfutil.shortname)], {})
411 def prepare(ctx, fns):
406 def prepare(ctx, fns):
412 pass
407 pass
413 totalsuccess = 0
408 totalsuccess = 0
414 totalmissing = 0
409 totalmissing = 0
415 if rev != []: # walkchangerevs on empty list would return all revs
410 if rev != []: # walkchangerevs on empty list would return all revs
416 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
411 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
417 prepare):
412 prepare):
418 success, missing = cachelfiles(ui, repo, ctx.node())
413 success, missing = cachelfiles(ui, repo, ctx.node())
419 totalsuccess += len(success)
414 totalsuccess += len(success)
420 totalmissing += len(missing)
415 totalmissing += len(missing)
421 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
416 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
422 if totalmissing > 0:
417 if totalmissing > 0:
423 ui.status(_("%d largefiles failed to download\n") % totalmissing)
418 ui.status(_("%d largefiles failed to download\n") % totalmissing)
424 return totalsuccess, totalmissing
419 return totalsuccess, totalmissing
425
420
426 def updatelfiles(ui, repo, filelist=None, printmessage=None,
421 def updatelfiles(ui, repo, filelist=None, printmessage=None,
427 normallookup=False):
422 normallookup=False):
428 '''Update largefiles according to standins in the working directory
423 '''Update largefiles according to standins in the working directory
429
424
430 If ``printmessage`` is other than ``None``, it means "print (or
425 If ``printmessage`` is other than ``None``, it means "print (or
431 ignore, for false) message forcibly".
426 ignore, for false) message forcibly".
432 '''
427 '''
433 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
428 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
434 wlock = repo.wlock()
429 wlock = repo.wlock()
435 try:
430 try:
436 lfdirstate = lfutil.openlfdirstate(ui, repo)
431 lfdirstate = lfutil.openlfdirstate(ui, repo)
437 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
432 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
438
433
439 if filelist is not None:
434 if filelist is not None:
440 filelist = set(filelist)
435 filelist = set(filelist)
441 lfiles = [f for f in lfiles if f in filelist]
436 lfiles = [f for f in lfiles if f in filelist]
442
437
443 update = {}
438 update = {}
444 updated, removed = 0, 0
439 updated, removed = 0, 0
445 for lfile in lfiles:
440 for lfile in lfiles:
446 abslfile = repo.wjoin(lfile)
441 abslfile = repo.wjoin(lfile)
447 abslfileorig = scmutil.origpath(ui, repo, abslfile)
442 abslfileorig = scmutil.origpath(ui, repo, abslfile)
448 absstandin = repo.wjoin(lfutil.standin(lfile))
443 absstandin = repo.wjoin(lfutil.standin(lfile))
449 absstandinorig = scmutil.origpath(ui, repo, absstandin)
444 absstandinorig = scmutil.origpath(ui, repo, absstandin)
450 if os.path.exists(absstandin):
445 if os.path.exists(absstandin):
451 if (os.path.exists(absstandinorig) and
446 if (os.path.exists(absstandinorig) and
452 os.path.exists(abslfile)):
447 os.path.exists(abslfile)):
453 shutil.copyfile(abslfile, abslfileorig)
448 shutil.copyfile(abslfile, abslfileorig)
454 util.unlinkpath(absstandinorig)
449 util.unlinkpath(absstandinorig)
455 expecthash = lfutil.readstandin(repo, lfile)
450 expecthash = lfutil.readstandin(repo, lfile)
456 if expecthash != '':
451 if expecthash != '':
457 if lfile not in repo[None]: # not switched to normal file
452 if lfile not in repo[None]: # not switched to normal file
458 util.unlinkpath(abslfile, ignoremissing=True)
453 util.unlinkpath(abslfile, ignoremissing=True)
459 # use normallookup() to allocate an entry in largefiles
454 # use normallookup() to allocate an entry in largefiles
460 # dirstate to prevent lfilesrepo.status() from reporting
455 # dirstate to prevent lfilesrepo.status() from reporting
461 # missing files as removed.
456 # missing files as removed.
462 lfdirstate.normallookup(lfile)
457 lfdirstate.normallookup(lfile)
463 update[lfile] = expecthash
458 update[lfile] = expecthash
464 else:
459 else:
465 # Remove lfiles for which the standin is deleted, unless the
460 # Remove lfiles for which the standin is deleted, unless the
466 # lfile is added to the repository again. This happens when a
461 # lfile is added to the repository again. This happens when a
467 # largefile is converted back to a normal file: the standin
462 # largefile is converted back to a normal file: the standin
468 # disappears, but a new (normal) file appears as the lfile.
463 # disappears, but a new (normal) file appears as the lfile.
469 if (os.path.exists(abslfile) and
464 if (os.path.exists(abslfile) and
470 repo.dirstate.normalize(lfile) not in repo[None]):
465 repo.dirstate.normalize(lfile) not in repo[None]):
471 util.unlinkpath(abslfile)
466 util.unlinkpath(abslfile)
472 removed += 1
467 removed += 1
473
468
474 # largefile processing might be slow and be interrupted - be prepared
469 # largefile processing might be slow and be interrupted - be prepared
475 lfdirstate.write()
470 lfdirstate.write()
476
471
477 if lfiles:
472 if lfiles:
478 statuswriter(_('getting changed largefiles\n'))
473 statuswriter(_('getting changed largefiles\n'))
479 cachelfiles(ui, repo, None, lfiles)
474 cachelfiles(ui, repo, None, lfiles)
480
475
481 for lfile in lfiles:
476 for lfile in lfiles:
482 update1 = 0
477 update1 = 0
483
478
484 expecthash = update.get(lfile)
479 expecthash = update.get(lfile)
485 if expecthash:
480 if expecthash:
486 if not lfutil.copyfromcache(repo, expecthash, lfile):
481 if not lfutil.copyfromcache(repo, expecthash, lfile):
487 # failed ... but already removed and set to normallookup
482 # failed ... but already removed and set to normallookup
488 continue
483 continue
489 # Synchronize largefile dirstate to the last modified
484 # Synchronize largefile dirstate to the last modified
490 # time of the file
485 # time of the file
491 lfdirstate.normal(lfile)
486 lfdirstate.normal(lfile)
492 update1 = 1
487 update1 = 1
493
488
494 # copy the state of largefile standin from the repository's
489 # copy the state of largefile standin from the repository's
495 # dirstate to its state in the lfdirstate.
490 # dirstate to its state in the lfdirstate.
496 abslfile = repo.wjoin(lfile)
491 abslfile = repo.wjoin(lfile)
497 absstandin = repo.wjoin(lfutil.standin(lfile))
492 absstandin = repo.wjoin(lfutil.standin(lfile))
498 if os.path.exists(absstandin):
493 if os.path.exists(absstandin):
499 mode = os.stat(absstandin).st_mode
494 mode = os.stat(absstandin).st_mode
500 if mode != os.stat(abslfile).st_mode:
495 if mode != os.stat(abslfile).st_mode:
501 os.chmod(abslfile, mode)
496 os.chmod(abslfile, mode)
502 update1 = 1
497 update1 = 1
503
498
504 updated += update1
499 updated += update1
505
500
506 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
501 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
507
502
508 lfdirstate.write()
503 lfdirstate.write()
509 if lfiles:
504 if lfiles:
510 statuswriter(_('%d largefiles updated, %d removed\n') % (updated,
505 statuswriter(_('%d largefiles updated, %d removed\n') % (updated,
511 removed))
506 removed))
512 finally:
507 finally:
513 wlock.release()
508 wlock.release()
514
509
515 @command('lfpull',
510 @command('lfpull',
516 [('r', 'rev', [], _('pull largefiles for these revisions'))
511 [('r', 'rev', [], _('pull largefiles for these revisions'))
517 ] + commands.remoteopts,
512 ] + commands.remoteopts,
518 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
513 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
519 def lfpull(ui, repo, source="default", **opts):
514 def lfpull(ui, repo, source="default", **opts):
520 """pull largefiles for the specified revisions from the specified source
515 """pull largefiles for the specified revisions from the specified source
521
516
522 Pull largefiles that are referenced from local changesets but missing
517 Pull largefiles that are referenced from local changesets but missing
523 locally, pulling from a remote repository to the local cache.
518 locally, pulling from a remote repository to the local cache.
524
519
525 If SOURCE is omitted, the 'default' path will be used.
520 If SOURCE is omitted, the 'default' path will be used.
526 See :hg:`help urls` for more information.
521 See :hg:`help urls` for more information.
527
522
528 .. container:: verbose
523 .. container:: verbose
529
524
530 Some examples:
525 Some examples:
531
526
532 - pull largefiles for all branch heads::
527 - pull largefiles for all branch heads::
533
528
534 hg lfpull -r "head() and not closed()"
529 hg lfpull -r "head() and not closed()"
535
530
536 - pull largefiles on the default branch::
531 - pull largefiles on the default branch::
537
532
538 hg lfpull -r "branch(default)"
533 hg lfpull -r "branch(default)"
539 """
534 """
540 repo.lfpullsource = source
535 repo.lfpullsource = source
541
536
542 revs = opts.get('rev', [])
537 revs = opts.get('rev', [])
543 if not revs:
538 if not revs:
544 raise error.Abort(_('no revisions specified'))
539 raise error.Abort(_('no revisions specified'))
545 revs = scmutil.revrange(repo, revs)
540 revs = scmutil.revrange(repo, revs)
546
541
547 numcached = 0
542 numcached = 0
548 for rev in revs:
543 for rev in revs:
549 ui.note(_('pulling largefiles for revision %s\n') % rev)
544 ui.note(_('pulling largefiles for revision %s\n') % rev)
550 (cached, missing) = cachelfiles(ui, repo, rev)
545 (cached, missing) = cachelfiles(ui, repo, rev)
551 numcached += len(cached)
546 numcached += len(cached)
552 ui.status(_("%d largefiles cached\n") % numcached)
547 ui.status(_("%d largefiles cached\n") % numcached)
General Comments 0
You need to be logged in to leave comments. Login now