##// END OF EJS Templates
largefiles: declare commands using decorator
Gregory Szorc -
r21242:4c94229c default
parent child Browse files
Show More
@@ -1,576 +1,571 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10
10
11 import os, errno
11 import os, errno
12 import shutil
12 import shutil
13
13
14 from mercurial import util, match as match_, hg, node, context, error, \
14 from mercurial import util, match as match_, hg, node, context, error, \
15 cmdutil, scmutil, commands
15 cmdutil, scmutil, commands
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 from mercurial.lock import release
17 from mercurial.lock import release
18
18
19 import lfutil
19 import lfutil
20 import basestore
20 import basestore
21
21
22 # -- Commands ----------------------------------------------------------
22 # -- Commands ----------------------------------------------------------
23
23
24 cmdtable = {}
25 command = cmdutil.command(cmdtable)
26
27 commands.inferrepo += " lfconvert"
28
29 @command('lfconvert',
30 [('s', 'size', '',
31 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
32 ('', 'to-normal', False,
33 _('convert from a largefiles repo to a normal repo')),
34 ],
35 _('hg lfconvert SOURCE DEST [FILE ...]'))
24 def lfconvert(ui, src, dest, *pats, **opts):
36 def lfconvert(ui, src, dest, *pats, **opts):
25 '''convert a normal repository to a largefiles repository
37 '''convert a normal repository to a largefiles repository
26
38
27 Convert repository SOURCE to a new repository DEST, identical to
39 Convert repository SOURCE to a new repository DEST, identical to
28 SOURCE except that certain files will be converted as largefiles:
40 SOURCE except that certain files will be converted as largefiles:
29 specifically, any file that matches any PATTERN *or* whose size is
41 specifically, any file that matches any PATTERN *or* whose size is
30 above the minimum size threshold is converted as a largefile. The
42 above the minimum size threshold is converted as a largefile. The
31 size used to determine whether or not to track a file as a
43 size used to determine whether or not to track a file as a
32 largefile is the size of the first version of the file. The
44 largefile is the size of the first version of the file. The
33 minimum size can be specified either with --size or in
45 minimum size can be specified either with --size or in
34 configuration as ``largefiles.size``.
46 configuration as ``largefiles.size``.
35
47
36 After running this command you will need to make sure that
48 After running this command you will need to make sure that
37 largefiles is enabled anywhere you intend to push the new
49 largefiles is enabled anywhere you intend to push the new
38 repository.
50 repository.
39
51
40 Use --to-normal to convert largefiles back to normal files; after
52 Use --to-normal to convert largefiles back to normal files; after
41 this, the DEST repository can be used without largefiles at all.'''
53 this, the DEST repository can be used without largefiles at all.'''
42
54
43 if opts['to_normal']:
55 if opts['to_normal']:
44 tolfile = False
56 tolfile = False
45 else:
57 else:
46 tolfile = True
58 tolfile = True
47 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
59 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
48
60
49 if not hg.islocal(src):
61 if not hg.islocal(src):
50 raise util.Abort(_('%s is not a local Mercurial repo') % src)
62 raise util.Abort(_('%s is not a local Mercurial repo') % src)
51 if not hg.islocal(dest):
63 if not hg.islocal(dest):
52 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
64 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
53
65
54 rsrc = hg.repository(ui, src)
66 rsrc = hg.repository(ui, src)
55 ui.status(_('initializing destination %s\n') % dest)
67 ui.status(_('initializing destination %s\n') % dest)
56 rdst = hg.repository(ui, dest, create=True)
68 rdst = hg.repository(ui, dest, create=True)
57
69
58 success = False
70 success = False
59 dstwlock = dstlock = None
71 dstwlock = dstlock = None
60 try:
72 try:
61 # Lock destination to prevent modification while it is converted to.
73 # Lock destination to prevent modification while it is converted to.
62 # Don't need to lock src because we are just reading from its history
74 # Don't need to lock src because we are just reading from its history
63 # which can't change.
75 # which can't change.
64 dstwlock = rdst.wlock()
76 dstwlock = rdst.wlock()
65 dstlock = rdst.lock()
77 dstlock = rdst.lock()
66
78
67 # Get a list of all changesets in the source. The easy way to do this
79 # Get a list of all changesets in the source. The easy way to do this
68 # is to simply walk the changelog, using changelog.nodesbetween().
80 # is to simply walk the changelog, using changelog.nodesbetween().
69 # Take a look at mercurial/revlog.py:639 for more details.
81 # Take a look at mercurial/revlog.py:639 for more details.
70 # Use a generator instead of a list to decrease memory usage
82 # Use a generator instead of a list to decrease memory usage
71 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
83 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
72 rsrc.heads())[0])
84 rsrc.heads())[0])
73 revmap = {node.nullid: node.nullid}
85 revmap = {node.nullid: node.nullid}
74 if tolfile:
86 if tolfile:
75 lfiles = set()
87 lfiles = set()
76 normalfiles = set()
88 normalfiles = set()
77 if not pats:
89 if not pats:
78 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
90 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
79 if pats:
91 if pats:
80 matcher = match_.match(rsrc.root, '', list(pats))
92 matcher = match_.match(rsrc.root, '', list(pats))
81 else:
93 else:
82 matcher = None
94 matcher = None
83
95
84 lfiletohash = {}
96 lfiletohash = {}
85 for ctx in ctxs:
97 for ctx in ctxs:
86 ui.progress(_('converting revisions'), ctx.rev(),
98 ui.progress(_('converting revisions'), ctx.rev(),
87 unit=_('revision'), total=rsrc['tip'].rev())
99 unit=_('revision'), total=rsrc['tip'].rev())
88 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
100 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
89 lfiles, normalfiles, matcher, size, lfiletohash)
101 lfiles, normalfiles, matcher, size, lfiletohash)
90 ui.progress(_('converting revisions'), None)
102 ui.progress(_('converting revisions'), None)
91
103
92 if os.path.exists(rdst.wjoin(lfutil.shortname)):
104 if os.path.exists(rdst.wjoin(lfutil.shortname)):
93 shutil.rmtree(rdst.wjoin(lfutil.shortname))
105 shutil.rmtree(rdst.wjoin(lfutil.shortname))
94
106
95 for f in lfiletohash.keys():
107 for f in lfiletohash.keys():
96 if os.path.isfile(rdst.wjoin(f)):
108 if os.path.isfile(rdst.wjoin(f)):
97 os.unlink(rdst.wjoin(f))
109 os.unlink(rdst.wjoin(f))
98 try:
110 try:
99 os.removedirs(os.path.dirname(rdst.wjoin(f)))
111 os.removedirs(os.path.dirname(rdst.wjoin(f)))
100 except OSError:
112 except OSError:
101 pass
113 pass
102
114
103 # If there were any files converted to largefiles, add largefiles
115 # If there were any files converted to largefiles, add largefiles
104 # to the destination repository's requirements.
116 # to the destination repository's requirements.
105 if lfiles:
117 if lfiles:
106 rdst.requirements.add('largefiles')
118 rdst.requirements.add('largefiles')
107 rdst._writerequirements()
119 rdst._writerequirements()
108 else:
120 else:
109 for ctx in ctxs:
121 for ctx in ctxs:
110 ui.progress(_('converting revisions'), ctx.rev(),
122 ui.progress(_('converting revisions'), ctx.rev(),
111 unit=_('revision'), total=rsrc['tip'].rev())
123 unit=_('revision'), total=rsrc['tip'].rev())
112 _addchangeset(ui, rsrc, rdst, ctx, revmap)
124 _addchangeset(ui, rsrc, rdst, ctx, revmap)
113
125
114 ui.progress(_('converting revisions'), None)
126 ui.progress(_('converting revisions'), None)
115 success = True
127 success = True
116 finally:
128 finally:
117 rdst.dirstate.clear()
129 rdst.dirstate.clear()
118 release(dstlock, dstwlock)
130 release(dstlock, dstwlock)
119 if not success:
131 if not success:
120 # we failed, remove the new directory
132 # we failed, remove the new directory
121 shutil.rmtree(rdst.root)
133 shutil.rmtree(rdst.root)
122
134
123 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
135 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
124 # Convert src parents to dst parents
136 # Convert src parents to dst parents
125 parents = _convertparents(ctx, revmap)
137 parents = _convertparents(ctx, revmap)
126
138
127 # Generate list of changed files
139 # Generate list of changed files
128 files = _getchangedfiles(ctx, parents)
140 files = _getchangedfiles(ctx, parents)
129
141
130 def getfilectx(repo, memctx, f):
142 def getfilectx(repo, memctx, f):
131 if lfutil.standin(f) in files:
143 if lfutil.standin(f) in files:
132 # if the file isn't in the manifest then it was removed
144 # if the file isn't in the manifest then it was removed
133 # or renamed, raise IOError to indicate this
145 # or renamed, raise IOError to indicate this
134 try:
146 try:
135 fctx = ctx.filectx(lfutil.standin(f))
147 fctx = ctx.filectx(lfutil.standin(f))
136 except error.LookupError:
148 except error.LookupError:
137 raise IOError
149 raise IOError
138 renamed = fctx.renamed()
150 renamed = fctx.renamed()
139 if renamed:
151 if renamed:
140 renamed = lfutil.splitstandin(renamed[0])
152 renamed = lfutil.splitstandin(renamed[0])
141
153
142 hash = fctx.data().strip()
154 hash = fctx.data().strip()
143 path = lfutil.findfile(rsrc, hash)
155 path = lfutil.findfile(rsrc, hash)
144
156
145 # If one file is missing, likely all files from this rev are
157 # If one file is missing, likely all files from this rev are
146 if path is None:
158 if path is None:
147 cachelfiles(ui, rsrc, ctx.node())
159 cachelfiles(ui, rsrc, ctx.node())
148 path = lfutil.findfile(rsrc, hash)
160 path = lfutil.findfile(rsrc, hash)
149
161
150 if path is None:
162 if path is None:
151 raise util.Abort(
163 raise util.Abort(
152 _("missing largefile \'%s\' from revision %s")
164 _("missing largefile \'%s\' from revision %s")
153 % (f, node.hex(ctx.node())))
165 % (f, node.hex(ctx.node())))
154
166
155 data = ''
167 data = ''
156 fd = None
168 fd = None
157 try:
169 try:
158 fd = open(path, 'rb')
170 fd = open(path, 'rb')
159 data = fd.read()
171 data = fd.read()
160 finally:
172 finally:
161 if fd:
173 if fd:
162 fd.close()
174 fd.close()
163 return context.memfilectx(f, data, 'l' in fctx.flags(),
175 return context.memfilectx(f, data, 'l' in fctx.flags(),
164 'x' in fctx.flags(), renamed)
176 'x' in fctx.flags(), renamed)
165 else:
177 else:
166 return _getnormalcontext(repo.ui, ctx, f, revmap)
178 return _getnormalcontext(repo.ui, ctx, f, revmap)
167
179
168 dstfiles = []
180 dstfiles = []
169 for file in files:
181 for file in files:
170 if lfutil.isstandin(file):
182 if lfutil.isstandin(file):
171 dstfiles.append(lfutil.splitstandin(file))
183 dstfiles.append(lfutil.splitstandin(file))
172 else:
184 else:
173 dstfiles.append(file)
185 dstfiles.append(file)
174 # Commit
186 # Commit
175 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
187 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
176
188
177 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
189 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
178 matcher, size, lfiletohash):
190 matcher, size, lfiletohash):
179 # Convert src parents to dst parents
191 # Convert src parents to dst parents
180 parents = _convertparents(ctx, revmap)
192 parents = _convertparents(ctx, revmap)
181
193
182 # Generate list of changed files
194 # Generate list of changed files
183 files = _getchangedfiles(ctx, parents)
195 files = _getchangedfiles(ctx, parents)
184
196
185 dstfiles = []
197 dstfiles = []
186 for f in files:
198 for f in files:
187 if f not in lfiles and f not in normalfiles:
199 if f not in lfiles and f not in normalfiles:
188 islfile = _islfile(f, ctx, matcher, size)
200 islfile = _islfile(f, ctx, matcher, size)
189 # If this file was renamed or copied then copy
201 # If this file was renamed or copied then copy
190 # the largefile-ness of its predecessor
202 # the largefile-ness of its predecessor
191 if f in ctx.manifest():
203 if f in ctx.manifest():
192 fctx = ctx.filectx(f)
204 fctx = ctx.filectx(f)
193 renamed = fctx.renamed()
205 renamed = fctx.renamed()
194 renamedlfile = renamed and renamed[0] in lfiles
206 renamedlfile = renamed and renamed[0] in lfiles
195 islfile |= renamedlfile
207 islfile |= renamedlfile
196 if 'l' in fctx.flags():
208 if 'l' in fctx.flags():
197 if renamedlfile:
209 if renamedlfile:
198 raise util.Abort(
210 raise util.Abort(
199 _('renamed/copied largefile %s becomes symlink')
211 _('renamed/copied largefile %s becomes symlink')
200 % f)
212 % f)
201 islfile = False
213 islfile = False
202 if islfile:
214 if islfile:
203 lfiles.add(f)
215 lfiles.add(f)
204 else:
216 else:
205 normalfiles.add(f)
217 normalfiles.add(f)
206
218
207 if f in lfiles:
219 if f in lfiles:
208 dstfiles.append(lfutil.standin(f))
220 dstfiles.append(lfutil.standin(f))
209 # largefile in manifest if it has not been removed/renamed
221 # largefile in manifest if it has not been removed/renamed
210 if f in ctx.manifest():
222 if f in ctx.manifest():
211 fctx = ctx.filectx(f)
223 fctx = ctx.filectx(f)
212 if 'l' in fctx.flags():
224 if 'l' in fctx.flags():
213 renamed = fctx.renamed()
225 renamed = fctx.renamed()
214 if renamed and renamed[0] in lfiles:
226 if renamed and renamed[0] in lfiles:
215 raise util.Abort(_('largefile %s becomes symlink') % f)
227 raise util.Abort(_('largefile %s becomes symlink') % f)
216
228
217 # largefile was modified, update standins
229 # largefile was modified, update standins
218 m = util.sha1('')
230 m = util.sha1('')
219 m.update(ctx[f].data())
231 m.update(ctx[f].data())
220 hash = m.hexdigest()
232 hash = m.hexdigest()
221 if f not in lfiletohash or lfiletohash[f] != hash:
233 if f not in lfiletohash or lfiletohash[f] != hash:
222 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
234 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
223 executable = 'x' in ctx[f].flags()
235 executable = 'x' in ctx[f].flags()
224 lfutil.writestandin(rdst, lfutil.standin(f), hash,
236 lfutil.writestandin(rdst, lfutil.standin(f), hash,
225 executable)
237 executable)
226 lfiletohash[f] = hash
238 lfiletohash[f] = hash
227 else:
239 else:
228 # normal file
240 # normal file
229 dstfiles.append(f)
241 dstfiles.append(f)
230
242
231 def getfilectx(repo, memctx, f):
243 def getfilectx(repo, memctx, f):
232 if lfutil.isstandin(f):
244 if lfutil.isstandin(f):
233 # if the file isn't in the manifest then it was removed
245 # if the file isn't in the manifest then it was removed
234 # or renamed, raise IOError to indicate this
246 # or renamed, raise IOError to indicate this
235 srcfname = lfutil.splitstandin(f)
247 srcfname = lfutil.splitstandin(f)
236 try:
248 try:
237 fctx = ctx.filectx(srcfname)
249 fctx = ctx.filectx(srcfname)
238 except error.LookupError:
250 except error.LookupError:
239 raise IOError
251 raise IOError
240 renamed = fctx.renamed()
252 renamed = fctx.renamed()
241 if renamed:
253 if renamed:
242 # standin is always a largefile because largefile-ness
254 # standin is always a largefile because largefile-ness
243 # doesn't change after rename or copy
255 # doesn't change after rename or copy
244 renamed = lfutil.standin(renamed[0])
256 renamed = lfutil.standin(renamed[0])
245
257
246 return context.memfilectx(f, lfiletohash[srcfname] + '\n', 'l' in
258 return context.memfilectx(f, lfiletohash[srcfname] + '\n', 'l' in
247 fctx.flags(), 'x' in fctx.flags(), renamed)
259 fctx.flags(), 'x' in fctx.flags(), renamed)
248 else:
260 else:
249 return _getnormalcontext(repo.ui, ctx, f, revmap)
261 return _getnormalcontext(repo.ui, ctx, f, revmap)
250
262
251 # Commit
263 # Commit
252 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
264 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
253
265
254 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
266 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
255 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
267 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
256 getfilectx, ctx.user(), ctx.date(), ctx.extra())
268 getfilectx, ctx.user(), ctx.date(), ctx.extra())
257 ret = rdst.commitctx(mctx)
269 ret = rdst.commitctx(mctx)
258 rdst.setparents(ret)
270 rdst.setparents(ret)
259 revmap[ctx.node()] = rdst.changelog.tip()
271 revmap[ctx.node()] = rdst.changelog.tip()
260
272
261 # Generate list of changed files
273 # Generate list of changed files
262 def _getchangedfiles(ctx, parents):
274 def _getchangedfiles(ctx, parents):
263 files = set(ctx.files())
275 files = set(ctx.files())
264 if node.nullid not in parents:
276 if node.nullid not in parents:
265 mc = ctx.manifest()
277 mc = ctx.manifest()
266 mp1 = ctx.parents()[0].manifest()
278 mp1 = ctx.parents()[0].manifest()
267 mp2 = ctx.parents()[1].manifest()
279 mp2 = ctx.parents()[1].manifest()
268 files |= (set(mp1) | set(mp2)) - set(mc)
280 files |= (set(mp1) | set(mp2)) - set(mc)
269 for f in mc:
281 for f in mc:
270 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
282 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
271 files.add(f)
283 files.add(f)
272 return files
284 return files
273
285
274 # Convert src parents to dst parents
286 # Convert src parents to dst parents
275 def _convertparents(ctx, revmap):
287 def _convertparents(ctx, revmap):
276 parents = []
288 parents = []
277 for p in ctx.parents():
289 for p in ctx.parents():
278 parents.append(revmap[p.node()])
290 parents.append(revmap[p.node()])
279 while len(parents) < 2:
291 while len(parents) < 2:
280 parents.append(node.nullid)
292 parents.append(node.nullid)
281 return parents
293 return parents
282
294
283 # Get memfilectx for a normal file
295 # Get memfilectx for a normal file
284 def _getnormalcontext(ui, ctx, f, revmap):
296 def _getnormalcontext(ui, ctx, f, revmap):
285 try:
297 try:
286 fctx = ctx.filectx(f)
298 fctx = ctx.filectx(f)
287 except error.LookupError:
299 except error.LookupError:
288 raise IOError
300 raise IOError
289 renamed = fctx.renamed()
301 renamed = fctx.renamed()
290 if renamed:
302 if renamed:
291 renamed = renamed[0]
303 renamed = renamed[0]
292
304
293 data = fctx.data()
305 data = fctx.data()
294 if f == '.hgtags':
306 if f == '.hgtags':
295 data = _converttags (ui, revmap, data)
307 data = _converttags (ui, revmap, data)
296 return context.memfilectx(f, data, 'l' in fctx.flags(),
308 return context.memfilectx(f, data, 'l' in fctx.flags(),
297 'x' in fctx.flags(), renamed)
309 'x' in fctx.flags(), renamed)
298
310
299 # Remap tag data using a revision map
311 # Remap tag data using a revision map
300 def _converttags(ui, revmap, data):
312 def _converttags(ui, revmap, data):
301 newdata = []
313 newdata = []
302 for line in data.splitlines():
314 for line in data.splitlines():
303 try:
315 try:
304 id, name = line.split(' ', 1)
316 id, name = line.split(' ', 1)
305 except ValueError:
317 except ValueError:
306 ui.warn(_('skipping incorrectly formatted tag %s\n')
318 ui.warn(_('skipping incorrectly formatted tag %s\n')
307 % line)
319 % line)
308 continue
320 continue
309 try:
321 try:
310 newid = node.bin(id)
322 newid = node.bin(id)
311 except TypeError:
323 except TypeError:
312 ui.warn(_('skipping incorrectly formatted id %s\n')
324 ui.warn(_('skipping incorrectly formatted id %s\n')
313 % id)
325 % id)
314 continue
326 continue
315 try:
327 try:
316 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
328 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
317 name))
329 name))
318 except KeyError:
330 except KeyError:
319 ui.warn(_('no mapping for id %s\n') % id)
331 ui.warn(_('no mapping for id %s\n') % id)
320 continue
332 continue
321 return ''.join(newdata)
333 return ''.join(newdata)
322
334
323 def _islfile(file, ctx, matcher, size):
335 def _islfile(file, ctx, matcher, size):
324 '''Return true if file should be considered a largefile, i.e.
336 '''Return true if file should be considered a largefile, i.e.
325 matcher matches it or it is larger than size.'''
337 matcher matches it or it is larger than size.'''
326 # never store special .hg* files as largefiles
338 # never store special .hg* files as largefiles
327 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
339 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
328 return False
340 return False
329 if matcher and matcher(file):
341 if matcher and matcher(file):
330 return True
342 return True
331 try:
343 try:
332 return ctx.filectx(file).size() >= size * 1024 * 1024
344 return ctx.filectx(file).size() >= size * 1024 * 1024
333 except error.LookupError:
345 except error.LookupError:
334 return False
346 return False
335
347
336 def uploadlfiles(ui, rsrc, rdst, files):
348 def uploadlfiles(ui, rsrc, rdst, files):
337 '''upload largefiles to the central store'''
349 '''upload largefiles to the central store'''
338
350
339 if not files:
351 if not files:
340 return
352 return
341
353
342 store = basestore._openstore(rsrc, rdst, put=True)
354 store = basestore._openstore(rsrc, rdst, put=True)
343
355
344 at = 0
356 at = 0
345 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
357 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
346 retval = store.exists(files)
358 retval = store.exists(files)
347 files = filter(lambda h: not retval[h], files)
359 files = filter(lambda h: not retval[h], files)
348 ui.debug("%d largefiles need to be uploaded\n" % len(files))
360 ui.debug("%d largefiles need to be uploaded\n" % len(files))
349
361
350 for hash in files:
362 for hash in files:
351 ui.progress(_('uploading largefiles'), at, unit='largefile',
363 ui.progress(_('uploading largefiles'), at, unit='largefile',
352 total=len(files))
364 total=len(files))
353 source = lfutil.findfile(rsrc, hash)
365 source = lfutil.findfile(rsrc, hash)
354 if not source:
366 if not source:
355 raise util.Abort(_('largefile %s missing from store'
367 raise util.Abort(_('largefile %s missing from store'
356 ' (needs to be uploaded)') % hash)
368 ' (needs to be uploaded)') % hash)
357 # XXX check for errors here
369 # XXX check for errors here
358 store.put(source, hash)
370 store.put(source, hash)
359 at += 1
371 at += 1
360 ui.progress(_('uploading largefiles'), None)
372 ui.progress(_('uploading largefiles'), None)
361
373
362 def verifylfiles(ui, repo, all=False, contents=False):
374 def verifylfiles(ui, repo, all=False, contents=False):
363 '''Verify that every largefile revision in the current changeset
375 '''Verify that every largefile revision in the current changeset
364 exists in the central store. With --contents, also verify that
376 exists in the central store. With --contents, also verify that
365 the contents of each local largefile file revision are correct (SHA-1 hash
377 the contents of each local largefile file revision are correct (SHA-1 hash
366 matches the revision ID). With --all, check every changeset in
378 matches the revision ID). With --all, check every changeset in
367 this repository.'''
379 this repository.'''
368 if all:
380 if all:
369 # Pass a list to the function rather than an iterator because we know a
381 # Pass a list to the function rather than an iterator because we know a
370 # list will work.
382 # list will work.
371 revs = range(len(repo))
383 revs = range(len(repo))
372 else:
384 else:
373 revs = ['.']
385 revs = ['.']
374
386
375 store = basestore._openstore(repo)
387 store = basestore._openstore(repo)
376 return store.verify(revs, contents=contents)
388 return store.verify(revs, contents=contents)
377
389
378 def cachelfiles(ui, repo, node, filelist=None):
390 def cachelfiles(ui, repo, node, filelist=None):
379 '''cachelfiles ensures that all largefiles needed by the specified revision
391 '''cachelfiles ensures that all largefiles needed by the specified revision
380 are present in the repository's largefile cache.
392 are present in the repository's largefile cache.
381
393
382 returns a tuple (cached, missing). cached is the list of files downloaded
394 returns a tuple (cached, missing). cached is the list of files downloaded
383 by this operation; missing is the list of files that were needed but could
395 by this operation; missing is the list of files that were needed but could
384 not be found.'''
396 not be found.'''
385 lfiles = lfutil.listlfiles(repo, node)
397 lfiles = lfutil.listlfiles(repo, node)
386 if filelist:
398 if filelist:
387 lfiles = set(lfiles) & set(filelist)
399 lfiles = set(lfiles) & set(filelist)
388 toget = []
400 toget = []
389
401
390 for lfile in lfiles:
402 for lfile in lfiles:
391 try:
403 try:
392 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
404 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
393 except IOError, err:
405 except IOError, err:
394 if err.errno == errno.ENOENT:
406 if err.errno == errno.ENOENT:
395 continue # node must be None and standin wasn't found in wctx
407 continue # node must be None and standin wasn't found in wctx
396 raise
408 raise
397 if not lfutil.findfile(repo, expectedhash):
409 if not lfutil.findfile(repo, expectedhash):
398 toget.append((lfile, expectedhash))
410 toget.append((lfile, expectedhash))
399
411
400 if toget:
412 if toget:
401 store = basestore._openstore(repo)
413 store = basestore._openstore(repo)
402 ret = store.get(toget)
414 ret = store.get(toget)
403 return ret
415 return ret
404
416
405 return ([], [])
417 return ([], [])
406
418
407 def downloadlfiles(ui, repo, rev=None):
419 def downloadlfiles(ui, repo, rev=None):
408 matchfn = scmutil.match(repo[None],
420 matchfn = scmutil.match(repo[None],
409 [repo.wjoin(lfutil.shortname)], {})
421 [repo.wjoin(lfutil.shortname)], {})
410 def prepare(ctx, fns):
422 def prepare(ctx, fns):
411 pass
423 pass
412 totalsuccess = 0
424 totalsuccess = 0
413 totalmissing = 0
425 totalmissing = 0
414 if rev != []: # walkchangerevs on empty list would return all revs
426 if rev != []: # walkchangerevs on empty list would return all revs
415 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
427 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
416 prepare):
428 prepare):
417 success, missing = cachelfiles(ui, repo, ctx.node())
429 success, missing = cachelfiles(ui, repo, ctx.node())
418 totalsuccess += len(success)
430 totalsuccess += len(success)
419 totalmissing += len(missing)
431 totalmissing += len(missing)
420 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
432 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
421 if totalmissing > 0:
433 if totalmissing > 0:
422 ui.status(_("%d largefiles failed to download\n") % totalmissing)
434 ui.status(_("%d largefiles failed to download\n") % totalmissing)
423 return totalsuccess, totalmissing
435 return totalsuccess, totalmissing
424
436
425 def updatelfiles(ui, repo, filelist=None, printmessage=True):
437 def updatelfiles(ui, repo, filelist=None, printmessage=True):
426 wlock = repo.wlock()
438 wlock = repo.wlock()
427 try:
439 try:
428 lfdirstate = lfutil.openlfdirstate(ui, repo)
440 lfdirstate = lfutil.openlfdirstate(ui, repo)
429 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
441 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
430
442
431 if filelist is not None:
443 if filelist is not None:
432 lfiles = [f for f in lfiles if f in filelist]
444 lfiles = [f for f in lfiles if f in filelist]
433
445
434 update = {}
446 update = {}
435 updated, removed = 0, 0
447 updated, removed = 0, 0
436 for lfile in lfiles:
448 for lfile in lfiles:
437 abslfile = repo.wjoin(lfile)
449 abslfile = repo.wjoin(lfile)
438 absstandin = repo.wjoin(lfutil.standin(lfile))
450 absstandin = repo.wjoin(lfutil.standin(lfile))
439 if os.path.exists(absstandin):
451 if os.path.exists(absstandin):
440 if (os.path.exists(absstandin + '.orig') and
452 if (os.path.exists(absstandin + '.orig') and
441 os.path.exists(abslfile)):
453 os.path.exists(abslfile)):
442 shutil.copyfile(abslfile, abslfile + '.orig')
454 shutil.copyfile(abslfile, abslfile + '.orig')
443 util.unlinkpath(absstandin + '.orig')
455 util.unlinkpath(absstandin + '.orig')
444 expecthash = lfutil.readstandin(repo, lfile)
456 expecthash = lfutil.readstandin(repo, lfile)
445 if (expecthash != '' and
457 if (expecthash != '' and
446 (not os.path.exists(abslfile) or
458 (not os.path.exists(abslfile) or
447 expecthash != lfutil.hashfile(abslfile))):
459 expecthash != lfutil.hashfile(abslfile))):
448 if lfile not in repo[None]: # not switched to normal file
460 if lfile not in repo[None]: # not switched to normal file
449 util.unlinkpath(abslfile, ignoremissing=True)
461 util.unlinkpath(abslfile, ignoremissing=True)
450 # use normallookup() to allocate entry in largefiles
462 # use normallookup() to allocate entry in largefiles
451 # dirstate, because lack of it misleads
463 # dirstate, because lack of it misleads
452 # lfilesrepo.status() into recognition that such cache
464 # lfilesrepo.status() into recognition that such cache
453 # missing files are REMOVED.
465 # missing files are REMOVED.
454 lfdirstate.normallookup(lfile)
466 lfdirstate.normallookup(lfile)
455 update[lfile] = expecthash
467 update[lfile] = expecthash
456 else:
468 else:
457 # Remove lfiles for which the standin is deleted, unless the
469 # Remove lfiles for which the standin is deleted, unless the
458 # lfile is added to the repository again. This happens when a
470 # lfile is added to the repository again. This happens when a
459 # largefile is converted back to a normal file: the standin
471 # largefile is converted back to a normal file: the standin
460 # disappears, but a new (normal) file appears as the lfile.
472 # disappears, but a new (normal) file appears as the lfile.
461 if (os.path.exists(abslfile) and
473 if (os.path.exists(abslfile) and
462 repo.dirstate.normalize(lfile) not in repo[None]):
474 repo.dirstate.normalize(lfile) not in repo[None]):
463 util.unlinkpath(abslfile)
475 util.unlinkpath(abslfile)
464 removed += 1
476 removed += 1
465
477
466 # largefile processing might be slow and be interrupted - be prepared
478 # largefile processing might be slow and be interrupted - be prepared
467 lfdirstate.write()
479 lfdirstate.write()
468
480
469 if lfiles:
481 if lfiles:
470 if printmessage:
482 if printmessage:
471 ui.status(_('getting changed largefiles\n'))
483 ui.status(_('getting changed largefiles\n'))
472 cachelfiles(ui, repo, None, lfiles)
484 cachelfiles(ui, repo, None, lfiles)
473
485
474 for lfile in lfiles:
486 for lfile in lfiles:
475 update1 = 0
487 update1 = 0
476
488
477 expecthash = update.get(lfile)
489 expecthash = update.get(lfile)
478 if expecthash:
490 if expecthash:
479 if not lfutil.copyfromcache(repo, expecthash, lfile):
491 if not lfutil.copyfromcache(repo, expecthash, lfile):
480 # failed ... but already removed and set to normallookup
492 # failed ... but already removed and set to normallookup
481 continue
493 continue
482 # Synchronize largefile dirstate to the last modified
494 # Synchronize largefile dirstate to the last modified
483 # time of the file
495 # time of the file
484 lfdirstate.normal(lfile)
496 lfdirstate.normal(lfile)
485 update1 = 1
497 update1 = 1
486
498
487 # copy the state of largefile standin from the repository's
499 # copy the state of largefile standin from the repository's
488 # dirstate to its state in the lfdirstate.
500 # dirstate to its state in the lfdirstate.
489 abslfile = repo.wjoin(lfile)
501 abslfile = repo.wjoin(lfile)
490 absstandin = repo.wjoin(lfutil.standin(lfile))
502 absstandin = repo.wjoin(lfutil.standin(lfile))
491 if os.path.exists(absstandin):
503 if os.path.exists(absstandin):
492 mode = os.stat(absstandin).st_mode
504 mode = os.stat(absstandin).st_mode
493 if mode != os.stat(abslfile).st_mode:
505 if mode != os.stat(abslfile).st_mode:
494 os.chmod(abslfile, mode)
506 os.chmod(abslfile, mode)
495 update1 = 1
507 update1 = 1
496
508
497 updated += update1
509 updated += update1
498
510
499 state = repo.dirstate[lfutil.standin(lfile)]
511 state = repo.dirstate[lfutil.standin(lfile)]
500 if state == 'n':
512 if state == 'n':
501 # When rebasing, we need to synchronize the standin and the
513 # When rebasing, we need to synchronize the standin and the
502 # largefile, because otherwise the largefile will get reverted.
514 # largefile, because otherwise the largefile will get reverted.
503 # But for commit's sake, we have to mark the file as unclean.
515 # But for commit's sake, we have to mark the file as unclean.
504 if getattr(repo, "_isrebasing", False):
516 if getattr(repo, "_isrebasing", False):
505 lfdirstate.normallookup(lfile)
517 lfdirstate.normallookup(lfile)
506 else:
518 else:
507 lfdirstate.normal(lfile)
519 lfdirstate.normal(lfile)
508 elif state == 'r':
520 elif state == 'r':
509 lfdirstate.remove(lfile)
521 lfdirstate.remove(lfile)
510 elif state == 'a':
522 elif state == 'a':
511 lfdirstate.add(lfile)
523 lfdirstate.add(lfile)
512 elif state == '?':
524 elif state == '?':
513 lfdirstate.drop(lfile)
525 lfdirstate.drop(lfile)
514
526
515 lfdirstate.write()
527 lfdirstate.write()
516 if printmessage and lfiles:
528 if printmessage and lfiles:
517 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
529 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
518 removed))
530 removed))
519 finally:
531 finally:
520 wlock.release()
532 wlock.release()
521
533
534 @command('lfpull',
535 [('r', 'rev', [], _('pull largefiles for these revisions'))
536 ] + commands.remoteopts,
537 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
522 def lfpull(ui, repo, source="default", **opts):
538 def lfpull(ui, repo, source="default", **opts):
523 """pull largefiles for the specified revisions from the specified source
539 """pull largefiles for the specified revisions from the specified source
524
540
525 Pull largefiles that are referenced from local changesets but missing
541 Pull largefiles that are referenced from local changesets but missing
526 locally, pulling from a remote repository to the local cache.
542 locally, pulling from a remote repository to the local cache.
527
543
528 If SOURCE is omitted, the 'default' path will be used.
544 If SOURCE is omitted, the 'default' path will be used.
529 See :hg:`help urls` for more information.
545 See :hg:`help urls` for more information.
530
546
531 .. container:: verbose
547 .. container:: verbose
532
548
533 Some examples:
549 Some examples:
534
550
535 - pull largefiles for all branch heads::
551 - pull largefiles for all branch heads::
536
552
537 hg lfpull -r "head() and not closed()"
553 hg lfpull -r "head() and not closed()"
538
554
539 - pull largefiles on the default branch::
555 - pull largefiles on the default branch::
540
556
541 hg lfpull -r "branch(default)"
557 hg lfpull -r "branch(default)"
542 """
558 """
543 repo.lfpullsource = source
559 repo.lfpullsource = source
544
560
545 revs = opts.get('rev', [])
561 revs = opts.get('rev', [])
546 if not revs:
562 if not revs:
547 raise util.Abort(_('no revisions specified'))
563 raise util.Abort(_('no revisions specified'))
548 revs = scmutil.revrange(repo, revs)
564 revs = scmutil.revrange(repo, revs)
549
565
550 numcached = 0
566 numcached = 0
551 for rev in revs:
567 for rev in revs:
552 ui.note(_('pulling largefiles for revision %s\n') % rev)
568 ui.note(_('pulling largefiles for revision %s\n') % rev)
553 (cached, missing) = cachelfiles(ui, repo, rev)
569 (cached, missing) = cachelfiles(ui, repo, rev)
554 numcached += len(cached)
570 numcached += len(cached)
555 ui.status(_("%d largefiles cached\n") % numcached)
571 ui.status(_("%d largefiles cached\n") % numcached)
556
557 # -- hg commands declarations ------------------------------------------------
558
559 cmdtable = {
560 'lfconvert': (lfconvert,
561 [('s', 'size', '',
562 _('minimum size (MB) for files to be converted '
563 'as largefiles'),
564 'SIZE'),
565 ('', 'to-normal', False,
566 _('convert from a largefiles repo to a normal repo')),
567 ],
568 _('hg lfconvert SOURCE DEST [FILE ...]')),
569 'lfpull': (lfpull,
570 [('r', 'rev', [], _('pull largefiles for these revisions'))
571 ] + commands.remoteopts,
572 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]')
573 ),
574 }
575
576 commands.inferrepo += " lfconvert"
General Comments 0
You need to be logged in to leave comments. Login now