##// END OF EJS Templates
with: use context manager for wlock in updatelfiles
Bryan O'Sullivan -
r27820:d2e9cc9e default
parent child Browse files
Show More
@@ -1,547 +1,544 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10
10
11 import os, errno
11 import os, errno
12 import shutil
12 import shutil
13
13
14 from mercurial import util, match as match_, hg, node, context, error, \
14 from mercurial import util, match as match_, hg, node, context, error, \
15 cmdutil, scmutil, commands
15 cmdutil, scmutil, commands
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 from mercurial.lock import release
17 from mercurial.lock import release
18
18
19 from hgext.convert import convcmd
19 from hgext.convert import convcmd
20 from hgext.convert import filemap
20 from hgext.convert import filemap
21
21
22 import lfutil
22 import lfutil
23 import basestore
23 import basestore
24
24
25 # -- Commands ----------------------------------------------------------
25 # -- Commands ----------------------------------------------------------
26
26
27 cmdtable = {}
27 cmdtable = {}
28 command = cmdutil.command(cmdtable)
28 command = cmdutil.command(cmdtable)
29
29
30 @command('lfconvert',
30 @command('lfconvert',
31 [('s', 'size', '',
31 [('s', 'size', '',
32 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
32 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
33 ('', 'to-normal', False,
33 ('', 'to-normal', False,
34 _('convert from a largefiles repo to a normal repo')),
34 _('convert from a largefiles repo to a normal repo')),
35 ],
35 ],
36 _('hg lfconvert SOURCE DEST [FILE ...]'),
36 _('hg lfconvert SOURCE DEST [FILE ...]'),
37 norepo=True,
37 norepo=True,
38 inferrepo=True)
38 inferrepo=True)
39 def lfconvert(ui, src, dest, *pats, **opts):
39 def lfconvert(ui, src, dest, *pats, **opts):
40 '''convert a normal repository to a largefiles repository
40 '''convert a normal repository to a largefiles repository
41
41
42 Convert repository SOURCE to a new repository DEST, identical to
42 Convert repository SOURCE to a new repository DEST, identical to
43 SOURCE except that certain files will be converted as largefiles:
43 SOURCE except that certain files will be converted as largefiles:
44 specifically, any file that matches any PATTERN *or* whose size is
44 specifically, any file that matches any PATTERN *or* whose size is
45 above the minimum size threshold is converted as a largefile. The
45 above the minimum size threshold is converted as a largefile. The
46 size used to determine whether or not to track a file as a
46 size used to determine whether or not to track a file as a
47 largefile is the size of the first version of the file. The
47 largefile is the size of the first version of the file. The
48 minimum size can be specified either with --size or in
48 minimum size can be specified either with --size or in
49 configuration as ``largefiles.size``.
49 configuration as ``largefiles.size``.
50
50
51 After running this command you will need to make sure that
51 After running this command you will need to make sure that
52 largefiles is enabled anywhere you intend to push the new
52 largefiles is enabled anywhere you intend to push the new
53 repository.
53 repository.
54
54
55 Use --to-normal to convert largefiles back to normal files; after
55 Use --to-normal to convert largefiles back to normal files; after
56 this, the DEST repository can be used without largefiles at all.'''
56 this, the DEST repository can be used without largefiles at all.'''
57
57
58 if opts['to_normal']:
58 if opts['to_normal']:
59 tolfile = False
59 tolfile = False
60 else:
60 else:
61 tolfile = True
61 tolfile = True
62 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
62 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
63
63
64 if not hg.islocal(src):
64 if not hg.islocal(src):
65 raise error.Abort(_('%s is not a local Mercurial repo') % src)
65 raise error.Abort(_('%s is not a local Mercurial repo') % src)
66 if not hg.islocal(dest):
66 if not hg.islocal(dest):
67 raise error.Abort(_('%s is not a local Mercurial repo') % dest)
67 raise error.Abort(_('%s is not a local Mercurial repo') % dest)
68
68
69 rsrc = hg.repository(ui, src)
69 rsrc = hg.repository(ui, src)
70 ui.status(_('initializing destination %s\n') % dest)
70 ui.status(_('initializing destination %s\n') % dest)
71 rdst = hg.repository(ui, dest, create=True)
71 rdst = hg.repository(ui, dest, create=True)
72
72
73 success = False
73 success = False
74 dstwlock = dstlock = None
74 dstwlock = dstlock = None
75 try:
75 try:
76 # Get a list of all changesets in the source. The easy way to do this
76 # Get a list of all changesets in the source. The easy way to do this
77 # is to simply walk the changelog, using changelog.nodesbetween().
77 # is to simply walk the changelog, using changelog.nodesbetween().
78 # Take a look at mercurial/revlog.py:639 for more details.
78 # Take a look at mercurial/revlog.py:639 for more details.
79 # Use a generator instead of a list to decrease memory usage
79 # Use a generator instead of a list to decrease memory usage
80 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
80 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
81 rsrc.heads())[0])
81 rsrc.heads())[0])
82 revmap = {node.nullid: node.nullid}
82 revmap = {node.nullid: node.nullid}
83 if tolfile:
83 if tolfile:
84 # Lock destination to prevent modification while it is converted to.
84 # Lock destination to prevent modification while it is converted to.
85 # Don't need to lock src because we are just reading from its
85 # Don't need to lock src because we are just reading from its
86 # history which can't change.
86 # history which can't change.
87 dstwlock = rdst.wlock()
87 dstwlock = rdst.wlock()
88 dstlock = rdst.lock()
88 dstlock = rdst.lock()
89
89
90 lfiles = set()
90 lfiles = set()
91 normalfiles = set()
91 normalfiles = set()
92 if not pats:
92 if not pats:
93 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
93 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
94 if pats:
94 if pats:
95 matcher = match_.match(rsrc.root, '', list(pats))
95 matcher = match_.match(rsrc.root, '', list(pats))
96 else:
96 else:
97 matcher = None
97 matcher = None
98
98
99 lfiletohash = {}
99 lfiletohash = {}
100 for ctx in ctxs:
100 for ctx in ctxs:
101 ui.progress(_('converting revisions'), ctx.rev(),
101 ui.progress(_('converting revisions'), ctx.rev(),
102 unit=_('revision'), total=rsrc['tip'].rev())
102 unit=_('revision'), total=rsrc['tip'].rev())
103 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
103 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
104 lfiles, normalfiles, matcher, size, lfiletohash)
104 lfiles, normalfiles, matcher, size, lfiletohash)
105 ui.progress(_('converting revisions'), None)
105 ui.progress(_('converting revisions'), None)
106
106
107 if os.path.exists(rdst.wjoin(lfutil.shortname)):
107 if os.path.exists(rdst.wjoin(lfutil.shortname)):
108 shutil.rmtree(rdst.wjoin(lfutil.shortname))
108 shutil.rmtree(rdst.wjoin(lfutil.shortname))
109
109
110 for f in lfiletohash.keys():
110 for f in lfiletohash.keys():
111 if os.path.isfile(rdst.wjoin(f)):
111 if os.path.isfile(rdst.wjoin(f)):
112 os.unlink(rdst.wjoin(f))
112 os.unlink(rdst.wjoin(f))
113 try:
113 try:
114 os.removedirs(os.path.dirname(rdst.wjoin(f)))
114 os.removedirs(os.path.dirname(rdst.wjoin(f)))
115 except OSError:
115 except OSError:
116 pass
116 pass
117
117
118 # If there were any files converted to largefiles, add largefiles
118 # If there were any files converted to largefiles, add largefiles
119 # to the destination repository's requirements.
119 # to the destination repository's requirements.
120 if lfiles:
120 if lfiles:
121 rdst.requirements.add('largefiles')
121 rdst.requirements.add('largefiles')
122 rdst._writerequirements()
122 rdst._writerequirements()
123 else:
123 else:
124 class lfsource(filemap.filemap_source):
124 class lfsource(filemap.filemap_source):
125 def __init__(self, ui, source):
125 def __init__(self, ui, source):
126 super(lfsource, self).__init__(ui, source, None)
126 super(lfsource, self).__init__(ui, source, None)
127 self.filemapper.rename[lfutil.shortname] = '.'
127 self.filemapper.rename[lfutil.shortname] = '.'
128
128
129 def getfile(self, name, rev):
129 def getfile(self, name, rev):
130 realname, realrev = rev
130 realname, realrev = rev
131 f = super(lfsource, self).getfile(name, rev)
131 f = super(lfsource, self).getfile(name, rev)
132
132
133 if (not realname.startswith(lfutil.shortnameslash)
133 if (not realname.startswith(lfutil.shortnameslash)
134 or f[0] is None):
134 or f[0] is None):
135 return f
135 return f
136
136
137 # Substitute in the largefile data for the hash
137 # Substitute in the largefile data for the hash
138 hash = f[0].strip()
138 hash = f[0].strip()
139 path = lfutil.findfile(rsrc, hash)
139 path = lfutil.findfile(rsrc, hash)
140
140
141 if path is None:
141 if path is None:
142 raise error.Abort(_("missing largefile for '%s' in %s")
142 raise error.Abort(_("missing largefile for '%s' in %s")
143 % (realname, realrev))
143 % (realname, realrev))
144 return util.readfile(path), f[1]
144 return util.readfile(path), f[1]
145
145
146 class converter(convcmd.converter):
146 class converter(convcmd.converter):
147 def __init__(self, ui, source, dest, revmapfile, opts):
147 def __init__(self, ui, source, dest, revmapfile, opts):
148 src = lfsource(ui, source)
148 src = lfsource(ui, source)
149
149
150 super(converter, self).__init__(ui, src, dest, revmapfile,
150 super(converter, self).__init__(ui, src, dest, revmapfile,
151 opts)
151 opts)
152
152
153 found, missing = downloadlfiles(ui, rsrc)
153 found, missing = downloadlfiles(ui, rsrc)
154 if missing != 0:
154 if missing != 0:
155 raise error.Abort(_("all largefiles must be present locally"))
155 raise error.Abort(_("all largefiles must be present locally"))
156
156
157 orig = convcmd.converter
157 orig = convcmd.converter
158 convcmd.converter = converter
158 convcmd.converter = converter
159
159
160 try:
160 try:
161 convcmd.convert(ui, src, dest)
161 convcmd.convert(ui, src, dest)
162 finally:
162 finally:
163 convcmd.converter = orig
163 convcmd.converter = orig
164 success = True
164 success = True
165 finally:
165 finally:
166 if tolfile:
166 if tolfile:
167 rdst.dirstate.clear()
167 rdst.dirstate.clear()
168 release(dstlock, dstwlock)
168 release(dstlock, dstwlock)
169 if not success:
169 if not success:
170 # we failed, remove the new directory
170 # we failed, remove the new directory
171 shutil.rmtree(rdst.root)
171 shutil.rmtree(rdst.root)
172
172
173 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
173 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
174 matcher, size, lfiletohash):
174 matcher, size, lfiletohash):
175 # Convert src parents to dst parents
175 # Convert src parents to dst parents
176 parents = _convertparents(ctx, revmap)
176 parents = _convertparents(ctx, revmap)
177
177
178 # Generate list of changed files
178 # Generate list of changed files
179 files = _getchangedfiles(ctx, parents)
179 files = _getchangedfiles(ctx, parents)
180
180
181 dstfiles = []
181 dstfiles = []
182 for f in files:
182 for f in files:
183 if f not in lfiles and f not in normalfiles:
183 if f not in lfiles and f not in normalfiles:
184 islfile = _islfile(f, ctx, matcher, size)
184 islfile = _islfile(f, ctx, matcher, size)
185 # If this file was renamed or copied then copy
185 # If this file was renamed or copied then copy
186 # the largefile-ness of its predecessor
186 # the largefile-ness of its predecessor
187 if f in ctx.manifest():
187 if f in ctx.manifest():
188 fctx = ctx.filectx(f)
188 fctx = ctx.filectx(f)
189 renamed = fctx.renamed()
189 renamed = fctx.renamed()
190 renamedlfile = renamed and renamed[0] in lfiles
190 renamedlfile = renamed and renamed[0] in lfiles
191 islfile |= renamedlfile
191 islfile |= renamedlfile
192 if 'l' in fctx.flags():
192 if 'l' in fctx.flags():
193 if renamedlfile:
193 if renamedlfile:
194 raise error.Abort(
194 raise error.Abort(
195 _('renamed/copied largefile %s becomes symlink')
195 _('renamed/copied largefile %s becomes symlink')
196 % f)
196 % f)
197 islfile = False
197 islfile = False
198 if islfile:
198 if islfile:
199 lfiles.add(f)
199 lfiles.add(f)
200 else:
200 else:
201 normalfiles.add(f)
201 normalfiles.add(f)
202
202
203 if f in lfiles:
203 if f in lfiles:
204 dstfiles.append(lfutil.standin(f))
204 dstfiles.append(lfutil.standin(f))
205 # largefile in manifest if it has not been removed/renamed
205 # largefile in manifest if it has not been removed/renamed
206 if f in ctx.manifest():
206 if f in ctx.manifest():
207 fctx = ctx.filectx(f)
207 fctx = ctx.filectx(f)
208 if 'l' in fctx.flags():
208 if 'l' in fctx.flags():
209 renamed = fctx.renamed()
209 renamed = fctx.renamed()
210 if renamed and renamed[0] in lfiles:
210 if renamed and renamed[0] in lfiles:
211 raise error.Abort(_('largefile %s becomes symlink') % f)
211 raise error.Abort(_('largefile %s becomes symlink') % f)
212
212
213 # largefile was modified, update standins
213 # largefile was modified, update standins
214 m = util.sha1('')
214 m = util.sha1('')
215 m.update(ctx[f].data())
215 m.update(ctx[f].data())
216 hash = m.hexdigest()
216 hash = m.hexdigest()
217 if f not in lfiletohash or lfiletohash[f] != hash:
217 if f not in lfiletohash or lfiletohash[f] != hash:
218 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
218 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
219 executable = 'x' in ctx[f].flags()
219 executable = 'x' in ctx[f].flags()
220 lfutil.writestandin(rdst, lfutil.standin(f), hash,
220 lfutil.writestandin(rdst, lfutil.standin(f), hash,
221 executable)
221 executable)
222 lfiletohash[f] = hash
222 lfiletohash[f] = hash
223 else:
223 else:
224 # normal file
224 # normal file
225 dstfiles.append(f)
225 dstfiles.append(f)
226
226
227 def getfilectx(repo, memctx, f):
227 def getfilectx(repo, memctx, f):
228 if lfutil.isstandin(f):
228 if lfutil.isstandin(f):
229 # if the file isn't in the manifest then it was removed
229 # if the file isn't in the manifest then it was removed
230 # or renamed, raise IOError to indicate this
230 # or renamed, raise IOError to indicate this
231 srcfname = lfutil.splitstandin(f)
231 srcfname = lfutil.splitstandin(f)
232 try:
232 try:
233 fctx = ctx.filectx(srcfname)
233 fctx = ctx.filectx(srcfname)
234 except error.LookupError:
234 except error.LookupError:
235 return None
235 return None
236 renamed = fctx.renamed()
236 renamed = fctx.renamed()
237 if renamed:
237 if renamed:
238 # standin is always a largefile because largefile-ness
238 # standin is always a largefile because largefile-ness
239 # doesn't change after rename or copy
239 # doesn't change after rename or copy
240 renamed = lfutil.standin(renamed[0])
240 renamed = lfutil.standin(renamed[0])
241
241
242 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
242 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
243 'l' in fctx.flags(), 'x' in fctx.flags(),
243 'l' in fctx.flags(), 'x' in fctx.flags(),
244 renamed)
244 renamed)
245 else:
245 else:
246 return _getnormalcontext(repo, ctx, f, revmap)
246 return _getnormalcontext(repo, ctx, f, revmap)
247
247
248 # Commit
248 # Commit
249 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
249 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
250
250
251 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
251 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
252 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
252 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
253 getfilectx, ctx.user(), ctx.date(), ctx.extra())
253 getfilectx, ctx.user(), ctx.date(), ctx.extra())
254 ret = rdst.commitctx(mctx)
254 ret = rdst.commitctx(mctx)
255 lfutil.copyalltostore(rdst, ret)
255 lfutil.copyalltostore(rdst, ret)
256 rdst.setparents(ret)
256 rdst.setparents(ret)
257 revmap[ctx.node()] = rdst.changelog.tip()
257 revmap[ctx.node()] = rdst.changelog.tip()
258
258
259 # Generate list of changed files
259 # Generate list of changed files
260 def _getchangedfiles(ctx, parents):
260 def _getchangedfiles(ctx, parents):
261 files = set(ctx.files())
261 files = set(ctx.files())
262 if node.nullid not in parents:
262 if node.nullid not in parents:
263 mc = ctx.manifest()
263 mc = ctx.manifest()
264 mp1 = ctx.parents()[0].manifest()
264 mp1 = ctx.parents()[0].manifest()
265 mp2 = ctx.parents()[1].manifest()
265 mp2 = ctx.parents()[1].manifest()
266 files |= (set(mp1) | set(mp2)) - set(mc)
266 files |= (set(mp1) | set(mp2)) - set(mc)
267 for f in mc:
267 for f in mc:
268 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
268 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
269 files.add(f)
269 files.add(f)
270 return files
270 return files
271
271
272 # Convert src parents to dst parents
272 # Convert src parents to dst parents
273 def _convertparents(ctx, revmap):
273 def _convertparents(ctx, revmap):
274 parents = []
274 parents = []
275 for p in ctx.parents():
275 for p in ctx.parents():
276 parents.append(revmap[p.node()])
276 parents.append(revmap[p.node()])
277 while len(parents) < 2:
277 while len(parents) < 2:
278 parents.append(node.nullid)
278 parents.append(node.nullid)
279 return parents
279 return parents
280
280
281 # Get memfilectx for a normal file
281 # Get memfilectx for a normal file
282 def _getnormalcontext(repo, ctx, f, revmap):
282 def _getnormalcontext(repo, ctx, f, revmap):
283 try:
283 try:
284 fctx = ctx.filectx(f)
284 fctx = ctx.filectx(f)
285 except error.LookupError:
285 except error.LookupError:
286 return None
286 return None
287 renamed = fctx.renamed()
287 renamed = fctx.renamed()
288 if renamed:
288 if renamed:
289 renamed = renamed[0]
289 renamed = renamed[0]
290
290
291 data = fctx.data()
291 data = fctx.data()
292 if f == '.hgtags':
292 if f == '.hgtags':
293 data = _converttags (repo.ui, revmap, data)
293 data = _converttags (repo.ui, revmap, data)
294 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
294 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
295 'x' in fctx.flags(), renamed)
295 'x' in fctx.flags(), renamed)
296
296
297 # Remap tag data using a revision map
297 # Remap tag data using a revision map
298 def _converttags(ui, revmap, data):
298 def _converttags(ui, revmap, data):
299 newdata = []
299 newdata = []
300 for line in data.splitlines():
300 for line in data.splitlines():
301 try:
301 try:
302 id, name = line.split(' ', 1)
302 id, name = line.split(' ', 1)
303 except ValueError:
303 except ValueError:
304 ui.warn(_('skipping incorrectly formatted tag %s\n')
304 ui.warn(_('skipping incorrectly formatted tag %s\n')
305 % line)
305 % line)
306 continue
306 continue
307 try:
307 try:
308 newid = node.bin(id)
308 newid = node.bin(id)
309 except TypeError:
309 except TypeError:
310 ui.warn(_('skipping incorrectly formatted id %s\n')
310 ui.warn(_('skipping incorrectly formatted id %s\n')
311 % id)
311 % id)
312 continue
312 continue
313 try:
313 try:
314 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
314 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
315 name))
315 name))
316 except KeyError:
316 except KeyError:
317 ui.warn(_('no mapping for id %s\n') % id)
317 ui.warn(_('no mapping for id %s\n') % id)
318 continue
318 continue
319 return ''.join(newdata)
319 return ''.join(newdata)
320
320
321 def _islfile(file, ctx, matcher, size):
321 def _islfile(file, ctx, matcher, size):
322 '''Return true if file should be considered a largefile, i.e.
322 '''Return true if file should be considered a largefile, i.e.
323 matcher matches it or it is larger than size.'''
323 matcher matches it or it is larger than size.'''
324 # never store special .hg* files as largefiles
324 # never store special .hg* files as largefiles
325 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
325 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
326 return False
326 return False
327 if matcher and matcher(file):
327 if matcher and matcher(file):
328 return True
328 return True
329 try:
329 try:
330 return ctx.filectx(file).size() >= size * 1024 * 1024
330 return ctx.filectx(file).size() >= size * 1024 * 1024
331 except error.LookupError:
331 except error.LookupError:
332 return False
332 return False
333
333
334 def uploadlfiles(ui, rsrc, rdst, files):
334 def uploadlfiles(ui, rsrc, rdst, files):
335 '''upload largefiles to the central store'''
335 '''upload largefiles to the central store'''
336
336
337 if not files:
337 if not files:
338 return
338 return
339
339
340 store = basestore._openstore(rsrc, rdst, put=True)
340 store = basestore._openstore(rsrc, rdst, put=True)
341
341
342 at = 0
342 at = 0
343 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
343 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
344 retval = store.exists(files)
344 retval = store.exists(files)
345 files = filter(lambda h: not retval[h], files)
345 files = filter(lambda h: not retval[h], files)
346 ui.debug("%d largefiles need to be uploaded\n" % len(files))
346 ui.debug("%d largefiles need to be uploaded\n" % len(files))
347
347
348 for hash in files:
348 for hash in files:
349 ui.progress(_('uploading largefiles'), at, unit='largefile',
349 ui.progress(_('uploading largefiles'), at, unit='largefile',
350 total=len(files))
350 total=len(files))
351 source = lfutil.findfile(rsrc, hash)
351 source = lfutil.findfile(rsrc, hash)
352 if not source:
352 if not source:
353 raise error.Abort(_('largefile %s missing from store'
353 raise error.Abort(_('largefile %s missing from store'
354 ' (needs to be uploaded)') % hash)
354 ' (needs to be uploaded)') % hash)
355 # XXX check for errors here
355 # XXX check for errors here
356 store.put(source, hash)
356 store.put(source, hash)
357 at += 1
357 at += 1
358 ui.progress(_('uploading largefiles'), None)
358 ui.progress(_('uploading largefiles'), None)
359
359
360 def verifylfiles(ui, repo, all=False, contents=False):
360 def verifylfiles(ui, repo, all=False, contents=False):
361 '''Verify that every largefile revision in the current changeset
361 '''Verify that every largefile revision in the current changeset
362 exists in the central store. With --contents, also verify that
362 exists in the central store. With --contents, also verify that
363 the contents of each local largefile file revision are correct (SHA-1 hash
363 the contents of each local largefile file revision are correct (SHA-1 hash
364 matches the revision ID). With --all, check every changeset in
364 matches the revision ID). With --all, check every changeset in
365 this repository.'''
365 this repository.'''
366 if all:
366 if all:
367 revs = repo.revs('all()')
367 revs = repo.revs('all()')
368 else:
368 else:
369 revs = ['.']
369 revs = ['.']
370
370
371 store = basestore._openstore(repo)
371 store = basestore._openstore(repo)
372 return store.verify(revs, contents=contents)
372 return store.verify(revs, contents=contents)
373
373
374 def cachelfiles(ui, repo, node, filelist=None):
374 def cachelfiles(ui, repo, node, filelist=None):
375 '''cachelfiles ensures that all largefiles needed by the specified revision
375 '''cachelfiles ensures that all largefiles needed by the specified revision
376 are present in the repository's largefile cache.
376 are present in the repository's largefile cache.
377
377
378 returns a tuple (cached, missing). cached is the list of files downloaded
378 returns a tuple (cached, missing). cached is the list of files downloaded
379 by this operation; missing is the list of files that were needed but could
379 by this operation; missing is the list of files that were needed but could
380 not be found.'''
380 not be found.'''
381 lfiles = lfutil.listlfiles(repo, node)
381 lfiles = lfutil.listlfiles(repo, node)
382 if filelist:
382 if filelist:
383 lfiles = set(lfiles) & set(filelist)
383 lfiles = set(lfiles) & set(filelist)
384 toget = []
384 toget = []
385
385
386 for lfile in lfiles:
386 for lfile in lfiles:
387 try:
387 try:
388 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
388 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
389 except IOError as err:
389 except IOError as err:
390 if err.errno == errno.ENOENT:
390 if err.errno == errno.ENOENT:
391 continue # node must be None and standin wasn't found in wctx
391 continue # node must be None and standin wasn't found in wctx
392 raise
392 raise
393 if not lfutil.findfile(repo, expectedhash):
393 if not lfutil.findfile(repo, expectedhash):
394 toget.append((lfile, expectedhash))
394 toget.append((lfile, expectedhash))
395
395
396 if toget:
396 if toget:
397 store = basestore._openstore(repo)
397 store = basestore._openstore(repo)
398 ret = store.get(toget)
398 ret = store.get(toget)
399 return ret
399 return ret
400
400
401 return ([], [])
401 return ([], [])
402
402
403 def downloadlfiles(ui, repo, rev=None):
403 def downloadlfiles(ui, repo, rev=None):
404 matchfn = scmutil.match(repo[None],
404 matchfn = scmutil.match(repo[None],
405 [repo.wjoin(lfutil.shortname)], {})
405 [repo.wjoin(lfutil.shortname)], {})
406 def prepare(ctx, fns):
406 def prepare(ctx, fns):
407 pass
407 pass
408 totalsuccess = 0
408 totalsuccess = 0
409 totalmissing = 0
409 totalmissing = 0
410 if rev != []: # walkchangerevs on empty list would return all revs
410 if rev != []: # walkchangerevs on empty list would return all revs
411 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
411 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
412 prepare):
412 prepare):
413 success, missing = cachelfiles(ui, repo, ctx.node())
413 success, missing = cachelfiles(ui, repo, ctx.node())
414 totalsuccess += len(success)
414 totalsuccess += len(success)
415 totalmissing += len(missing)
415 totalmissing += len(missing)
416 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
416 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
417 if totalmissing > 0:
417 if totalmissing > 0:
418 ui.status(_("%d largefiles failed to download\n") % totalmissing)
418 ui.status(_("%d largefiles failed to download\n") % totalmissing)
419 return totalsuccess, totalmissing
419 return totalsuccess, totalmissing
420
420
421 def updatelfiles(ui, repo, filelist=None, printmessage=None,
421 def updatelfiles(ui, repo, filelist=None, printmessage=None,
422 normallookup=False):
422 normallookup=False):
423 '''Update largefiles according to standins in the working directory
423 '''Update largefiles according to standins in the working directory
424
424
425 If ``printmessage`` is other than ``None``, it means "print (or
425 If ``printmessage`` is other than ``None``, it means "print (or
426 ignore, for false) message forcibly".
426 ignore, for false) message forcibly".
427 '''
427 '''
428 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
428 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
429 wlock = repo.wlock()
429 with repo.wlock():
430 try:
431 lfdirstate = lfutil.openlfdirstate(ui, repo)
430 lfdirstate = lfutil.openlfdirstate(ui, repo)
432 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
431 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
433
432
434 if filelist is not None:
433 if filelist is not None:
435 filelist = set(filelist)
434 filelist = set(filelist)
436 lfiles = [f for f in lfiles if f in filelist]
435 lfiles = [f for f in lfiles if f in filelist]
437
436
438 update = {}
437 update = {}
439 updated, removed = 0, 0
438 updated, removed = 0, 0
440 for lfile in lfiles:
439 for lfile in lfiles:
441 abslfile = repo.wjoin(lfile)
440 abslfile = repo.wjoin(lfile)
442 abslfileorig = scmutil.origpath(ui, repo, abslfile)
441 abslfileorig = scmutil.origpath(ui, repo, abslfile)
443 absstandin = repo.wjoin(lfutil.standin(lfile))
442 absstandin = repo.wjoin(lfutil.standin(lfile))
444 absstandinorig = scmutil.origpath(ui, repo, absstandin)
443 absstandinorig = scmutil.origpath(ui, repo, absstandin)
445 if os.path.exists(absstandin):
444 if os.path.exists(absstandin):
446 if (os.path.exists(absstandinorig) and
445 if (os.path.exists(absstandinorig) and
447 os.path.exists(abslfile)):
446 os.path.exists(abslfile)):
448 shutil.copyfile(abslfile, abslfileorig)
447 shutil.copyfile(abslfile, abslfileorig)
449 util.unlinkpath(absstandinorig)
448 util.unlinkpath(absstandinorig)
450 expecthash = lfutil.readstandin(repo, lfile)
449 expecthash = lfutil.readstandin(repo, lfile)
451 if expecthash != '':
450 if expecthash != '':
452 if lfile not in repo[None]: # not switched to normal file
451 if lfile not in repo[None]: # not switched to normal file
453 util.unlinkpath(abslfile, ignoremissing=True)
452 util.unlinkpath(abslfile, ignoremissing=True)
454 # use normallookup() to allocate an entry in largefiles
453 # use normallookup() to allocate an entry in largefiles
455 # dirstate to prevent lfilesrepo.status() from reporting
454 # dirstate to prevent lfilesrepo.status() from reporting
456 # missing files as removed.
455 # missing files as removed.
457 lfdirstate.normallookup(lfile)
456 lfdirstate.normallookup(lfile)
458 update[lfile] = expecthash
457 update[lfile] = expecthash
459 else:
458 else:
460 # Remove lfiles for which the standin is deleted, unless the
459 # Remove lfiles for which the standin is deleted, unless the
461 # lfile is added to the repository again. This happens when a
460 # lfile is added to the repository again. This happens when a
462 # largefile is converted back to a normal file: the standin
461 # largefile is converted back to a normal file: the standin
463 # disappears, but a new (normal) file appears as the lfile.
462 # disappears, but a new (normal) file appears as the lfile.
464 if (os.path.exists(abslfile) and
463 if (os.path.exists(abslfile) and
465 repo.dirstate.normalize(lfile) not in repo[None]):
464 repo.dirstate.normalize(lfile) not in repo[None]):
466 util.unlinkpath(abslfile)
465 util.unlinkpath(abslfile)
467 removed += 1
466 removed += 1
468
467
469 # largefile processing might be slow and be interrupted - be prepared
468 # largefile processing might be slow and be interrupted - be prepared
470 lfdirstate.write()
469 lfdirstate.write()
471
470
472 if lfiles:
471 if lfiles:
473 statuswriter(_('getting changed largefiles\n'))
472 statuswriter(_('getting changed largefiles\n'))
474 cachelfiles(ui, repo, None, lfiles)
473 cachelfiles(ui, repo, None, lfiles)
475
474
476 for lfile in lfiles:
475 for lfile in lfiles:
477 update1 = 0
476 update1 = 0
478
477
479 expecthash = update.get(lfile)
478 expecthash = update.get(lfile)
480 if expecthash:
479 if expecthash:
481 if not lfutil.copyfromcache(repo, expecthash, lfile):
480 if not lfutil.copyfromcache(repo, expecthash, lfile):
482 # failed ... but already removed and set to normallookup
481 # failed ... but already removed and set to normallookup
483 continue
482 continue
484 # Synchronize largefile dirstate to the last modified
483 # Synchronize largefile dirstate to the last modified
485 # time of the file
484 # time of the file
486 lfdirstate.normal(lfile)
485 lfdirstate.normal(lfile)
487 update1 = 1
486 update1 = 1
488
487
489 # copy the state of largefile standin from the repository's
488 # copy the state of largefile standin from the repository's
490 # dirstate to its state in the lfdirstate.
489 # dirstate to its state in the lfdirstate.
491 abslfile = repo.wjoin(lfile)
490 abslfile = repo.wjoin(lfile)
492 absstandin = repo.wjoin(lfutil.standin(lfile))
491 absstandin = repo.wjoin(lfutil.standin(lfile))
493 if os.path.exists(absstandin):
492 if os.path.exists(absstandin):
494 mode = os.stat(absstandin).st_mode
493 mode = os.stat(absstandin).st_mode
495 if mode != os.stat(abslfile).st_mode:
494 if mode != os.stat(abslfile).st_mode:
496 os.chmod(abslfile, mode)
495 os.chmod(abslfile, mode)
497 update1 = 1
496 update1 = 1
498
497
499 updated += update1
498 updated += update1
500
499
501 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
500 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
502
501
503 lfdirstate.write()
502 lfdirstate.write()
504 if lfiles:
503 if lfiles:
505 statuswriter(_('%d largefiles updated, %d removed\n') % (updated,
504 statuswriter(_('%d largefiles updated, %d removed\n') % (updated,
506 removed))
505 removed))
507 finally:
508 wlock.release()
509
506
510 @command('lfpull',
507 @command('lfpull',
511 [('r', 'rev', [], _('pull largefiles for these revisions'))
508 [('r', 'rev', [], _('pull largefiles for these revisions'))
512 ] + commands.remoteopts,
509 ] + commands.remoteopts,
513 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
510 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
514 def lfpull(ui, repo, source="default", **opts):
511 def lfpull(ui, repo, source="default", **opts):
515 """pull largefiles for the specified revisions from the specified source
512 """pull largefiles for the specified revisions from the specified source
516
513
517 Pull largefiles that are referenced from local changesets but missing
514 Pull largefiles that are referenced from local changesets but missing
518 locally, pulling from a remote repository to the local cache.
515 locally, pulling from a remote repository to the local cache.
519
516
520 If SOURCE is omitted, the 'default' path will be used.
517 If SOURCE is omitted, the 'default' path will be used.
521 See :hg:`help urls` for more information.
518 See :hg:`help urls` for more information.
522
519
523 .. container:: verbose
520 .. container:: verbose
524
521
525 Some examples:
522 Some examples:
526
523
527 - pull largefiles for all branch heads::
524 - pull largefiles for all branch heads::
528
525
529 hg lfpull -r "head() and not closed()"
526 hg lfpull -r "head() and not closed()"
530
527
531 - pull largefiles on the default branch::
528 - pull largefiles on the default branch::
532
529
533 hg lfpull -r "branch(default)"
530 hg lfpull -r "branch(default)"
534 """
531 """
535 repo.lfpullsource = source
532 repo.lfpullsource = source
536
533
537 revs = opts.get('rev', [])
534 revs = opts.get('rev', [])
538 if not revs:
535 if not revs:
539 raise error.Abort(_('no revisions specified'))
536 raise error.Abort(_('no revisions specified'))
540 revs = scmutil.revrange(repo, revs)
537 revs = scmutil.revrange(repo, revs)
541
538
542 numcached = 0
539 numcached = 0
543 for rev in revs:
540 for rev in revs:
544 ui.note(_('pulling largefiles for revision %s\n') % rev)
541 ui.note(_('pulling largefiles for revision %s\n') % rev)
545 (cached, missing) = cachelfiles(ui, repo, rev)
542 (cached, missing) = cachelfiles(ui, repo, rev)
546 numcached += len(cached)
543 numcached += len(cached)
547 ui.status(_("%d largefiles cached\n") % numcached)
544 ui.status(_("%d largefiles cached\n") % numcached)
General Comments 0
You need to be logged in to leave comments. Login now