##// END OF EJS Templates
largefiles: define inferrepo in command decorator
Gregory Szorc -
r21785:a730b002 default
parent child Browse files
Show More
@@ -1,573 +1,572 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10
10
11 import os, errno
11 import os, errno
12 import shutil
12 import shutil
13
13
14 from mercurial import util, match as match_, hg, node, context, error, \
14 from mercurial import util, match as match_, hg, node, context, error, \
15 cmdutil, scmutil, commands
15 cmdutil, scmutil, commands
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 from mercurial.lock import release
17 from mercurial.lock import release
18
18
19 import lfutil
19 import lfutil
20 import basestore
20 import basestore
21
21
22 # -- Commands ----------------------------------------------------------
22 # -- Commands ----------------------------------------------------------
23
23
24 cmdtable = {}
24 cmdtable = {}
25 command = cmdutil.command(cmdtable)
25 command = cmdutil.command(cmdtable)
26
26
27 commands.inferrepo += " lfconvert"
28
29 @command('lfconvert',
27 @command('lfconvert',
30 [('s', 'size', '',
28 [('s', 'size', '',
31 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
29 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
32 ('', 'to-normal', False,
30 ('', 'to-normal', False,
33 _('convert from a largefiles repo to a normal repo')),
31 _('convert from a largefiles repo to a normal repo')),
34 ],
32 ],
35 _('hg lfconvert SOURCE DEST [FILE ...]'),
33 _('hg lfconvert SOURCE DEST [FILE ...]'),
36 norepo=True)
34 norepo=True,
35 inferrepo=True)
37 def lfconvert(ui, src, dest, *pats, **opts):
36 def lfconvert(ui, src, dest, *pats, **opts):
38 '''convert a normal repository to a largefiles repository
37 '''convert a normal repository to a largefiles repository
39
38
40 Convert repository SOURCE to a new repository DEST, identical to
39 Convert repository SOURCE to a new repository DEST, identical to
41 SOURCE except that certain files will be converted as largefiles:
40 SOURCE except that certain files will be converted as largefiles:
42 specifically, any file that matches any PATTERN *or* whose size is
41 specifically, any file that matches any PATTERN *or* whose size is
43 above the minimum size threshold is converted as a largefile. The
42 above the minimum size threshold is converted as a largefile. The
44 size used to determine whether or not to track a file as a
43 size used to determine whether or not to track a file as a
45 largefile is the size of the first version of the file. The
44 largefile is the size of the first version of the file. The
46 minimum size can be specified either with --size or in
45 minimum size can be specified either with --size or in
47 configuration as ``largefiles.size``.
46 configuration as ``largefiles.size``.
48
47
49 After running this command you will need to make sure that
48 After running this command you will need to make sure that
50 largefiles is enabled anywhere you intend to push the new
49 largefiles is enabled anywhere you intend to push the new
51 repository.
50 repository.
52
51
53 Use --to-normal to convert largefiles back to normal files; after
52 Use --to-normal to convert largefiles back to normal files; after
54 this, the DEST repository can be used without largefiles at all.'''
53 this, the DEST repository can be used without largefiles at all.'''
55
54
56 if opts['to_normal']:
55 if opts['to_normal']:
57 tolfile = False
56 tolfile = False
58 else:
57 else:
59 tolfile = True
58 tolfile = True
60 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
59 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
61
60
62 if not hg.islocal(src):
61 if not hg.islocal(src):
63 raise util.Abort(_('%s is not a local Mercurial repo') % src)
62 raise util.Abort(_('%s is not a local Mercurial repo') % src)
64 if not hg.islocal(dest):
63 if not hg.islocal(dest):
65 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
64 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
66
65
67 rsrc = hg.repository(ui, src)
66 rsrc = hg.repository(ui, src)
68 ui.status(_('initializing destination %s\n') % dest)
67 ui.status(_('initializing destination %s\n') % dest)
69 rdst = hg.repository(ui, dest, create=True)
68 rdst = hg.repository(ui, dest, create=True)
70
69
71 success = False
70 success = False
72 dstwlock = dstlock = None
71 dstwlock = dstlock = None
73 try:
72 try:
74 # Lock destination to prevent modification while it is converted to.
73 # Lock destination to prevent modification while it is converted to.
75 # Don't need to lock src because we are just reading from its history
74 # Don't need to lock src because we are just reading from its history
76 # which can't change.
75 # which can't change.
77 dstwlock = rdst.wlock()
76 dstwlock = rdst.wlock()
78 dstlock = rdst.lock()
77 dstlock = rdst.lock()
79
78
80 # Get a list of all changesets in the source. The easy way to do this
79 # Get a list of all changesets in the source. The easy way to do this
81 # is to simply walk the changelog, using changelog.nodesbetween().
80 # is to simply walk the changelog, using changelog.nodesbetween().
82 # Take a look at mercurial/revlog.py:639 for more details.
81 # Take a look at mercurial/revlog.py:639 for more details.
83 # Use a generator instead of a list to decrease memory usage
82 # Use a generator instead of a list to decrease memory usage
84 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
83 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
85 rsrc.heads())[0])
84 rsrc.heads())[0])
86 revmap = {node.nullid: node.nullid}
85 revmap = {node.nullid: node.nullid}
87 if tolfile:
86 if tolfile:
88 lfiles = set()
87 lfiles = set()
89 normalfiles = set()
88 normalfiles = set()
90 if not pats:
89 if not pats:
91 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
90 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
92 if pats:
91 if pats:
93 matcher = match_.match(rsrc.root, '', list(pats))
92 matcher = match_.match(rsrc.root, '', list(pats))
94 else:
93 else:
95 matcher = None
94 matcher = None
96
95
97 lfiletohash = {}
96 lfiletohash = {}
98 for ctx in ctxs:
97 for ctx in ctxs:
99 ui.progress(_('converting revisions'), ctx.rev(),
98 ui.progress(_('converting revisions'), ctx.rev(),
100 unit=_('revision'), total=rsrc['tip'].rev())
99 unit=_('revision'), total=rsrc['tip'].rev())
101 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
100 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
102 lfiles, normalfiles, matcher, size, lfiletohash)
101 lfiles, normalfiles, matcher, size, lfiletohash)
103 ui.progress(_('converting revisions'), None)
102 ui.progress(_('converting revisions'), None)
104
103
105 if os.path.exists(rdst.wjoin(lfutil.shortname)):
104 if os.path.exists(rdst.wjoin(lfutil.shortname)):
106 shutil.rmtree(rdst.wjoin(lfutil.shortname))
105 shutil.rmtree(rdst.wjoin(lfutil.shortname))
107
106
108 for f in lfiletohash.keys():
107 for f in lfiletohash.keys():
109 if os.path.isfile(rdst.wjoin(f)):
108 if os.path.isfile(rdst.wjoin(f)):
110 os.unlink(rdst.wjoin(f))
109 os.unlink(rdst.wjoin(f))
111 try:
110 try:
112 os.removedirs(os.path.dirname(rdst.wjoin(f)))
111 os.removedirs(os.path.dirname(rdst.wjoin(f)))
113 except OSError:
112 except OSError:
114 pass
113 pass
115
114
116 # If there were any files converted to largefiles, add largefiles
115 # If there were any files converted to largefiles, add largefiles
117 # to the destination repository's requirements.
116 # to the destination repository's requirements.
118 if lfiles:
117 if lfiles:
119 rdst.requirements.add('largefiles')
118 rdst.requirements.add('largefiles')
120 rdst._writerequirements()
119 rdst._writerequirements()
121 else:
120 else:
122 for ctx in ctxs:
121 for ctx in ctxs:
123 ui.progress(_('converting revisions'), ctx.rev(),
122 ui.progress(_('converting revisions'), ctx.rev(),
124 unit=_('revision'), total=rsrc['tip'].rev())
123 unit=_('revision'), total=rsrc['tip'].rev())
125 _addchangeset(ui, rsrc, rdst, ctx, revmap)
124 _addchangeset(ui, rsrc, rdst, ctx, revmap)
126
125
127 ui.progress(_('converting revisions'), None)
126 ui.progress(_('converting revisions'), None)
128 success = True
127 success = True
129 finally:
128 finally:
130 rdst.dirstate.clear()
129 rdst.dirstate.clear()
131 release(dstlock, dstwlock)
130 release(dstlock, dstwlock)
132 if not success:
131 if not success:
133 # we failed, remove the new directory
132 # we failed, remove the new directory
134 shutil.rmtree(rdst.root)
133 shutil.rmtree(rdst.root)
135
134
136 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
135 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
137 # Convert src parents to dst parents
136 # Convert src parents to dst parents
138 parents = _convertparents(ctx, revmap)
137 parents = _convertparents(ctx, revmap)
139
138
140 # Generate list of changed files
139 # Generate list of changed files
141 files = _getchangedfiles(ctx, parents)
140 files = _getchangedfiles(ctx, parents)
142
141
143 def getfilectx(repo, memctx, f):
142 def getfilectx(repo, memctx, f):
144 if lfutil.standin(f) in files:
143 if lfutil.standin(f) in files:
145 # if the file isn't in the manifest then it was removed
144 # if the file isn't in the manifest then it was removed
146 # or renamed, raise IOError to indicate this
145 # or renamed, raise IOError to indicate this
147 try:
146 try:
148 fctx = ctx.filectx(lfutil.standin(f))
147 fctx = ctx.filectx(lfutil.standin(f))
149 except error.LookupError:
148 except error.LookupError:
150 raise IOError
149 raise IOError
151 renamed = fctx.renamed()
150 renamed = fctx.renamed()
152 if renamed:
151 if renamed:
153 renamed = lfutil.splitstandin(renamed[0])
152 renamed = lfutil.splitstandin(renamed[0])
154
153
155 hash = fctx.data().strip()
154 hash = fctx.data().strip()
156 path = lfutil.findfile(rsrc, hash)
155 path = lfutil.findfile(rsrc, hash)
157
156
158 # If one file is missing, likely all files from this rev are
157 # If one file is missing, likely all files from this rev are
159 if path is None:
158 if path is None:
160 cachelfiles(ui, rsrc, ctx.node())
159 cachelfiles(ui, rsrc, ctx.node())
161 path = lfutil.findfile(rsrc, hash)
160 path = lfutil.findfile(rsrc, hash)
162
161
163 if path is None:
162 if path is None:
164 raise util.Abort(
163 raise util.Abort(
165 _("missing largefile \'%s\' from revision %s")
164 _("missing largefile \'%s\' from revision %s")
166 % (f, node.hex(ctx.node())))
165 % (f, node.hex(ctx.node())))
167
166
168 data = ''
167 data = ''
169 fd = None
168 fd = None
170 try:
169 try:
171 fd = open(path, 'rb')
170 fd = open(path, 'rb')
172 data = fd.read()
171 data = fd.read()
173 finally:
172 finally:
174 if fd:
173 if fd:
175 fd.close()
174 fd.close()
176 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
175 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
177 'x' in fctx.flags(), renamed)
176 'x' in fctx.flags(), renamed)
178 else:
177 else:
179 return _getnormalcontext(repo, ctx, f, revmap)
178 return _getnormalcontext(repo, ctx, f, revmap)
180
179
181 dstfiles = []
180 dstfiles = []
182 for file in files:
181 for file in files:
183 if lfutil.isstandin(file):
182 if lfutil.isstandin(file):
184 dstfiles.append(lfutil.splitstandin(file))
183 dstfiles.append(lfutil.splitstandin(file))
185 else:
184 else:
186 dstfiles.append(file)
185 dstfiles.append(file)
187 # Commit
186 # Commit
188 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
187 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
189
188
190 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
189 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
191 matcher, size, lfiletohash):
190 matcher, size, lfiletohash):
192 # Convert src parents to dst parents
191 # Convert src parents to dst parents
193 parents = _convertparents(ctx, revmap)
192 parents = _convertparents(ctx, revmap)
194
193
195 # Generate list of changed files
194 # Generate list of changed files
196 files = _getchangedfiles(ctx, parents)
195 files = _getchangedfiles(ctx, parents)
197
196
198 dstfiles = []
197 dstfiles = []
199 for f in files:
198 for f in files:
200 if f not in lfiles and f not in normalfiles:
199 if f not in lfiles and f not in normalfiles:
201 islfile = _islfile(f, ctx, matcher, size)
200 islfile = _islfile(f, ctx, matcher, size)
202 # If this file was renamed or copied then copy
201 # If this file was renamed or copied then copy
203 # the largefile-ness of its predecessor
202 # the largefile-ness of its predecessor
204 if f in ctx.manifest():
203 if f in ctx.manifest():
205 fctx = ctx.filectx(f)
204 fctx = ctx.filectx(f)
206 renamed = fctx.renamed()
205 renamed = fctx.renamed()
207 renamedlfile = renamed and renamed[0] in lfiles
206 renamedlfile = renamed and renamed[0] in lfiles
208 islfile |= renamedlfile
207 islfile |= renamedlfile
209 if 'l' in fctx.flags():
208 if 'l' in fctx.flags():
210 if renamedlfile:
209 if renamedlfile:
211 raise util.Abort(
210 raise util.Abort(
212 _('renamed/copied largefile %s becomes symlink')
211 _('renamed/copied largefile %s becomes symlink')
213 % f)
212 % f)
214 islfile = False
213 islfile = False
215 if islfile:
214 if islfile:
216 lfiles.add(f)
215 lfiles.add(f)
217 else:
216 else:
218 normalfiles.add(f)
217 normalfiles.add(f)
219
218
220 if f in lfiles:
219 if f in lfiles:
221 dstfiles.append(lfutil.standin(f))
220 dstfiles.append(lfutil.standin(f))
222 # largefile in manifest if it has not been removed/renamed
221 # largefile in manifest if it has not been removed/renamed
223 if f in ctx.manifest():
222 if f in ctx.manifest():
224 fctx = ctx.filectx(f)
223 fctx = ctx.filectx(f)
225 if 'l' in fctx.flags():
224 if 'l' in fctx.flags():
226 renamed = fctx.renamed()
225 renamed = fctx.renamed()
227 if renamed and renamed[0] in lfiles:
226 if renamed and renamed[0] in lfiles:
228 raise util.Abort(_('largefile %s becomes symlink') % f)
227 raise util.Abort(_('largefile %s becomes symlink') % f)
229
228
230 # largefile was modified, update standins
229 # largefile was modified, update standins
231 m = util.sha1('')
230 m = util.sha1('')
232 m.update(ctx[f].data())
231 m.update(ctx[f].data())
233 hash = m.hexdigest()
232 hash = m.hexdigest()
234 if f not in lfiletohash or lfiletohash[f] != hash:
233 if f not in lfiletohash or lfiletohash[f] != hash:
235 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
234 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
236 executable = 'x' in ctx[f].flags()
235 executable = 'x' in ctx[f].flags()
237 lfutil.writestandin(rdst, lfutil.standin(f), hash,
236 lfutil.writestandin(rdst, lfutil.standin(f), hash,
238 executable)
237 executable)
239 lfiletohash[f] = hash
238 lfiletohash[f] = hash
240 else:
239 else:
241 # normal file
240 # normal file
242 dstfiles.append(f)
241 dstfiles.append(f)
243
242
244 def getfilectx(repo, memctx, f):
243 def getfilectx(repo, memctx, f):
245 if lfutil.isstandin(f):
244 if lfutil.isstandin(f):
246 # if the file isn't in the manifest then it was removed
245 # if the file isn't in the manifest then it was removed
247 # or renamed, raise IOError to indicate this
246 # or renamed, raise IOError to indicate this
248 srcfname = lfutil.splitstandin(f)
247 srcfname = lfutil.splitstandin(f)
249 try:
248 try:
250 fctx = ctx.filectx(srcfname)
249 fctx = ctx.filectx(srcfname)
251 except error.LookupError:
250 except error.LookupError:
252 raise IOError
251 raise IOError
253 renamed = fctx.renamed()
252 renamed = fctx.renamed()
254 if renamed:
253 if renamed:
255 # standin is always a largefile because largefile-ness
254 # standin is always a largefile because largefile-ness
256 # doesn't change after rename or copy
255 # doesn't change after rename or copy
257 renamed = lfutil.standin(renamed[0])
256 renamed = lfutil.standin(renamed[0])
258
257
259 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
258 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
260 'l' in fctx.flags(), 'x' in fctx.flags(),
259 'l' in fctx.flags(), 'x' in fctx.flags(),
261 renamed)
260 renamed)
262 else:
261 else:
263 return _getnormalcontext(repo, ctx, f, revmap)
262 return _getnormalcontext(repo, ctx, f, revmap)
264
263
265 # Commit
264 # Commit
266 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
265 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
267
266
268 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
267 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
269 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
268 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
270 getfilectx, ctx.user(), ctx.date(), ctx.extra())
269 getfilectx, ctx.user(), ctx.date(), ctx.extra())
271 ret = rdst.commitctx(mctx)
270 ret = rdst.commitctx(mctx)
272 rdst.setparents(ret)
271 rdst.setparents(ret)
273 revmap[ctx.node()] = rdst.changelog.tip()
272 revmap[ctx.node()] = rdst.changelog.tip()
274
273
275 # Generate list of changed files
274 # Generate list of changed files
276 def _getchangedfiles(ctx, parents):
275 def _getchangedfiles(ctx, parents):
277 files = set(ctx.files())
276 files = set(ctx.files())
278 if node.nullid not in parents:
277 if node.nullid not in parents:
279 mc = ctx.manifest()
278 mc = ctx.manifest()
280 mp1 = ctx.parents()[0].manifest()
279 mp1 = ctx.parents()[0].manifest()
281 mp2 = ctx.parents()[1].manifest()
280 mp2 = ctx.parents()[1].manifest()
282 files |= (set(mp1) | set(mp2)) - set(mc)
281 files |= (set(mp1) | set(mp2)) - set(mc)
283 for f in mc:
282 for f in mc:
284 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
283 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
285 files.add(f)
284 files.add(f)
286 return files
285 return files
287
286
288 # Convert src parents to dst parents
287 # Convert src parents to dst parents
289 def _convertparents(ctx, revmap):
288 def _convertparents(ctx, revmap):
290 parents = []
289 parents = []
291 for p in ctx.parents():
290 for p in ctx.parents():
292 parents.append(revmap[p.node()])
291 parents.append(revmap[p.node()])
293 while len(parents) < 2:
292 while len(parents) < 2:
294 parents.append(node.nullid)
293 parents.append(node.nullid)
295 return parents
294 return parents
296
295
297 # Get memfilectx for a normal file
296 # Get memfilectx for a normal file
298 def _getnormalcontext(repo, ctx, f, revmap):
297 def _getnormalcontext(repo, ctx, f, revmap):
299 try:
298 try:
300 fctx = ctx.filectx(f)
299 fctx = ctx.filectx(f)
301 except error.LookupError:
300 except error.LookupError:
302 raise IOError
301 raise IOError
303 renamed = fctx.renamed()
302 renamed = fctx.renamed()
304 if renamed:
303 if renamed:
305 renamed = renamed[0]
304 renamed = renamed[0]
306
305
307 data = fctx.data()
306 data = fctx.data()
308 if f == '.hgtags':
307 if f == '.hgtags':
309 data = _converttags (repo.ui, revmap, data)
308 data = _converttags (repo.ui, revmap, data)
310 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
309 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
311 'x' in fctx.flags(), renamed)
310 'x' in fctx.flags(), renamed)
312
311
313 # Remap tag data using a revision map
312 # Remap tag data using a revision map
314 def _converttags(ui, revmap, data):
313 def _converttags(ui, revmap, data):
315 newdata = []
314 newdata = []
316 for line in data.splitlines():
315 for line in data.splitlines():
317 try:
316 try:
318 id, name = line.split(' ', 1)
317 id, name = line.split(' ', 1)
319 except ValueError:
318 except ValueError:
320 ui.warn(_('skipping incorrectly formatted tag %s\n')
319 ui.warn(_('skipping incorrectly formatted tag %s\n')
321 % line)
320 % line)
322 continue
321 continue
323 try:
322 try:
324 newid = node.bin(id)
323 newid = node.bin(id)
325 except TypeError:
324 except TypeError:
326 ui.warn(_('skipping incorrectly formatted id %s\n')
325 ui.warn(_('skipping incorrectly formatted id %s\n')
327 % id)
326 % id)
328 continue
327 continue
329 try:
328 try:
330 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
329 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
331 name))
330 name))
332 except KeyError:
331 except KeyError:
333 ui.warn(_('no mapping for id %s\n') % id)
332 ui.warn(_('no mapping for id %s\n') % id)
334 continue
333 continue
335 return ''.join(newdata)
334 return ''.join(newdata)
336
335
337 def _islfile(file, ctx, matcher, size):
336 def _islfile(file, ctx, matcher, size):
338 '''Return true if file should be considered a largefile, i.e.
337 '''Return true if file should be considered a largefile, i.e.
339 matcher matches it or it is larger than size.'''
338 matcher matches it or it is larger than size.'''
340 # never store special .hg* files as largefiles
339 # never store special .hg* files as largefiles
341 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
340 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
342 return False
341 return False
343 if matcher and matcher(file):
342 if matcher and matcher(file):
344 return True
343 return True
345 try:
344 try:
346 return ctx.filectx(file).size() >= size * 1024 * 1024
345 return ctx.filectx(file).size() >= size * 1024 * 1024
347 except error.LookupError:
346 except error.LookupError:
348 return False
347 return False
349
348
350 def uploadlfiles(ui, rsrc, rdst, files):
349 def uploadlfiles(ui, rsrc, rdst, files):
351 '''upload largefiles to the central store'''
350 '''upload largefiles to the central store'''
352
351
353 if not files:
352 if not files:
354 return
353 return
355
354
356 store = basestore._openstore(rsrc, rdst, put=True)
355 store = basestore._openstore(rsrc, rdst, put=True)
357
356
358 at = 0
357 at = 0
359 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
358 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
360 retval = store.exists(files)
359 retval = store.exists(files)
361 files = filter(lambda h: not retval[h], files)
360 files = filter(lambda h: not retval[h], files)
362 ui.debug("%d largefiles need to be uploaded\n" % len(files))
361 ui.debug("%d largefiles need to be uploaded\n" % len(files))
363
362
364 for hash in files:
363 for hash in files:
365 ui.progress(_('uploading largefiles'), at, unit='largefile',
364 ui.progress(_('uploading largefiles'), at, unit='largefile',
366 total=len(files))
365 total=len(files))
367 source = lfutil.findfile(rsrc, hash)
366 source = lfutil.findfile(rsrc, hash)
368 if not source:
367 if not source:
369 raise util.Abort(_('largefile %s missing from store'
368 raise util.Abort(_('largefile %s missing from store'
370 ' (needs to be uploaded)') % hash)
369 ' (needs to be uploaded)') % hash)
371 # XXX check for errors here
370 # XXX check for errors here
372 store.put(source, hash)
371 store.put(source, hash)
373 at += 1
372 at += 1
374 ui.progress(_('uploading largefiles'), None)
373 ui.progress(_('uploading largefiles'), None)
375
374
376 def verifylfiles(ui, repo, all=False, contents=False):
375 def verifylfiles(ui, repo, all=False, contents=False):
377 '''Verify that every largefile revision in the current changeset
376 '''Verify that every largefile revision in the current changeset
378 exists in the central store. With --contents, also verify that
377 exists in the central store. With --contents, also verify that
379 the contents of each local largefile file revision are correct (SHA-1 hash
378 the contents of each local largefile file revision are correct (SHA-1 hash
380 matches the revision ID). With --all, check every changeset in
379 matches the revision ID). With --all, check every changeset in
381 this repository.'''
380 this repository.'''
382 if all:
381 if all:
383 # Pass a list to the function rather than an iterator because we know a
382 # Pass a list to the function rather than an iterator because we know a
384 # list will work.
383 # list will work.
385 revs = range(len(repo))
384 revs = range(len(repo))
386 else:
385 else:
387 revs = ['.']
386 revs = ['.']
388
387
389 store = basestore._openstore(repo)
388 store = basestore._openstore(repo)
390 return store.verify(revs, contents=contents)
389 return store.verify(revs, contents=contents)
391
390
392 def cachelfiles(ui, repo, node, filelist=None):
391 def cachelfiles(ui, repo, node, filelist=None):
393 '''cachelfiles ensures that all largefiles needed by the specified revision
392 '''cachelfiles ensures that all largefiles needed by the specified revision
394 are present in the repository's largefile cache.
393 are present in the repository's largefile cache.
395
394
396 returns a tuple (cached, missing). cached is the list of files downloaded
395 returns a tuple (cached, missing). cached is the list of files downloaded
397 by this operation; missing is the list of files that were needed but could
396 by this operation; missing is the list of files that were needed but could
398 not be found.'''
397 not be found.'''
399 lfiles = lfutil.listlfiles(repo, node)
398 lfiles = lfutil.listlfiles(repo, node)
400 if filelist:
399 if filelist:
401 lfiles = set(lfiles) & set(filelist)
400 lfiles = set(lfiles) & set(filelist)
402 toget = []
401 toget = []
403
402
404 for lfile in lfiles:
403 for lfile in lfiles:
405 try:
404 try:
406 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
405 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
407 except IOError, err:
406 except IOError, err:
408 if err.errno == errno.ENOENT:
407 if err.errno == errno.ENOENT:
409 continue # node must be None and standin wasn't found in wctx
408 continue # node must be None and standin wasn't found in wctx
410 raise
409 raise
411 if not lfutil.findfile(repo, expectedhash):
410 if not lfutil.findfile(repo, expectedhash):
412 toget.append((lfile, expectedhash))
411 toget.append((lfile, expectedhash))
413
412
414 if toget:
413 if toget:
415 store = basestore._openstore(repo)
414 store = basestore._openstore(repo)
416 ret = store.get(toget)
415 ret = store.get(toget)
417 return ret
416 return ret
418
417
419 return ([], [])
418 return ([], [])
420
419
421 def downloadlfiles(ui, repo, rev=None):
420 def downloadlfiles(ui, repo, rev=None):
422 matchfn = scmutil.match(repo[None],
421 matchfn = scmutil.match(repo[None],
423 [repo.wjoin(lfutil.shortname)], {})
422 [repo.wjoin(lfutil.shortname)], {})
424 def prepare(ctx, fns):
423 def prepare(ctx, fns):
425 pass
424 pass
426 totalsuccess = 0
425 totalsuccess = 0
427 totalmissing = 0
426 totalmissing = 0
428 if rev != []: # walkchangerevs on empty list would return all revs
427 if rev != []: # walkchangerevs on empty list would return all revs
429 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
428 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
430 prepare):
429 prepare):
431 success, missing = cachelfiles(ui, repo, ctx.node())
430 success, missing = cachelfiles(ui, repo, ctx.node())
432 totalsuccess += len(success)
431 totalsuccess += len(success)
433 totalmissing += len(missing)
432 totalmissing += len(missing)
434 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
433 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
435 if totalmissing > 0:
434 if totalmissing > 0:
436 ui.status(_("%d largefiles failed to download\n") % totalmissing)
435 ui.status(_("%d largefiles failed to download\n") % totalmissing)
437 return totalsuccess, totalmissing
436 return totalsuccess, totalmissing
438
437
439 def updatelfiles(ui, repo, filelist=None, printmessage=True):
438 def updatelfiles(ui, repo, filelist=None, printmessage=True):
440 wlock = repo.wlock()
439 wlock = repo.wlock()
441 try:
440 try:
442 lfdirstate = lfutil.openlfdirstate(ui, repo)
441 lfdirstate = lfutil.openlfdirstate(ui, repo)
443 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
442 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
444
443
445 if filelist is not None:
444 if filelist is not None:
446 lfiles = [f for f in lfiles if f in filelist]
445 lfiles = [f for f in lfiles if f in filelist]
447
446
448 update = {}
447 update = {}
449 updated, removed = 0, 0
448 updated, removed = 0, 0
450 for lfile in lfiles:
449 for lfile in lfiles:
451 abslfile = repo.wjoin(lfile)
450 abslfile = repo.wjoin(lfile)
452 absstandin = repo.wjoin(lfutil.standin(lfile))
451 absstandin = repo.wjoin(lfutil.standin(lfile))
453 if os.path.exists(absstandin):
452 if os.path.exists(absstandin):
454 if (os.path.exists(absstandin + '.orig') and
453 if (os.path.exists(absstandin + '.orig') and
455 os.path.exists(abslfile)):
454 os.path.exists(abslfile)):
456 shutil.copyfile(abslfile, abslfile + '.orig')
455 shutil.copyfile(abslfile, abslfile + '.orig')
457 util.unlinkpath(absstandin + '.orig')
456 util.unlinkpath(absstandin + '.orig')
458 expecthash = lfutil.readstandin(repo, lfile)
457 expecthash = lfutil.readstandin(repo, lfile)
459 if (expecthash != '' and
458 if (expecthash != '' and
460 (not os.path.exists(abslfile) or
459 (not os.path.exists(abslfile) or
461 expecthash != lfutil.hashfile(abslfile))):
460 expecthash != lfutil.hashfile(abslfile))):
462 if lfile not in repo[None]: # not switched to normal file
461 if lfile not in repo[None]: # not switched to normal file
463 util.unlinkpath(abslfile, ignoremissing=True)
462 util.unlinkpath(abslfile, ignoremissing=True)
464 # use normallookup() to allocate entry in largefiles
463 # use normallookup() to allocate entry in largefiles
465 # dirstate, because lack of it misleads
464 # dirstate, because lack of it misleads
466 # lfilesrepo.status() into recognition that such cache
465 # lfilesrepo.status() into recognition that such cache
467 # missing files are REMOVED.
466 # missing files are REMOVED.
468 lfdirstate.normallookup(lfile)
467 lfdirstate.normallookup(lfile)
469 update[lfile] = expecthash
468 update[lfile] = expecthash
470 else:
469 else:
471 # Remove lfiles for which the standin is deleted, unless the
470 # Remove lfiles for which the standin is deleted, unless the
472 # lfile is added to the repository again. This happens when a
471 # lfile is added to the repository again. This happens when a
473 # largefile is converted back to a normal file: the standin
472 # largefile is converted back to a normal file: the standin
474 # disappears, but a new (normal) file appears as the lfile.
473 # disappears, but a new (normal) file appears as the lfile.
475 if (os.path.exists(abslfile) and
474 if (os.path.exists(abslfile) and
476 repo.dirstate.normalize(lfile) not in repo[None]):
475 repo.dirstate.normalize(lfile) not in repo[None]):
477 util.unlinkpath(abslfile)
476 util.unlinkpath(abslfile)
478 removed += 1
477 removed += 1
479
478
480 # largefile processing might be slow and be interrupted - be prepared
479 # largefile processing might be slow and be interrupted - be prepared
481 lfdirstate.write()
480 lfdirstate.write()
482
481
483 if lfiles:
482 if lfiles:
484 if printmessage:
483 if printmessage:
485 ui.status(_('getting changed largefiles\n'))
484 ui.status(_('getting changed largefiles\n'))
486 cachelfiles(ui, repo, None, lfiles)
485 cachelfiles(ui, repo, None, lfiles)
487
486
488 for lfile in lfiles:
487 for lfile in lfiles:
489 update1 = 0
488 update1 = 0
490
489
491 expecthash = update.get(lfile)
490 expecthash = update.get(lfile)
492 if expecthash:
491 if expecthash:
493 if not lfutil.copyfromcache(repo, expecthash, lfile):
492 if not lfutil.copyfromcache(repo, expecthash, lfile):
494 # failed ... but already removed and set to normallookup
493 # failed ... but already removed and set to normallookup
495 continue
494 continue
496 # Synchronize largefile dirstate to the last modified
495 # Synchronize largefile dirstate to the last modified
497 # time of the file
496 # time of the file
498 lfdirstate.normal(lfile)
497 lfdirstate.normal(lfile)
499 update1 = 1
498 update1 = 1
500
499
501 # copy the state of largefile standin from the repository's
500 # copy the state of largefile standin from the repository's
502 # dirstate to its state in the lfdirstate.
501 # dirstate to its state in the lfdirstate.
503 abslfile = repo.wjoin(lfile)
502 abslfile = repo.wjoin(lfile)
504 absstandin = repo.wjoin(lfutil.standin(lfile))
503 absstandin = repo.wjoin(lfutil.standin(lfile))
505 if os.path.exists(absstandin):
504 if os.path.exists(absstandin):
506 mode = os.stat(absstandin).st_mode
505 mode = os.stat(absstandin).st_mode
507 if mode != os.stat(abslfile).st_mode:
506 if mode != os.stat(abslfile).st_mode:
508 os.chmod(abslfile, mode)
507 os.chmod(abslfile, mode)
509 update1 = 1
508 update1 = 1
510
509
511 updated += update1
510 updated += update1
512
511
513 state = repo.dirstate[lfutil.standin(lfile)]
512 state = repo.dirstate[lfutil.standin(lfile)]
514 if state == 'n':
513 if state == 'n':
515 # When rebasing, we need to synchronize the standin and the
514 # When rebasing, we need to synchronize the standin and the
516 # largefile, because otherwise the largefile will get reverted.
515 # largefile, because otherwise the largefile will get reverted.
517 # But for commit's sake, we have to mark the file as unclean.
516 # But for commit's sake, we have to mark the file as unclean.
518 if getattr(repo, "_isrebasing", False):
517 if getattr(repo, "_isrebasing", False):
519 lfdirstate.normallookup(lfile)
518 lfdirstate.normallookup(lfile)
520 else:
519 else:
521 lfdirstate.normal(lfile)
520 lfdirstate.normal(lfile)
522 elif state == 'r':
521 elif state == 'r':
523 lfdirstate.remove(lfile)
522 lfdirstate.remove(lfile)
524 elif state == 'a':
523 elif state == 'a':
525 lfdirstate.add(lfile)
524 lfdirstate.add(lfile)
526 elif state == '?':
525 elif state == '?':
527 lfdirstate.drop(lfile)
526 lfdirstate.drop(lfile)
528
527
529 lfdirstate.write()
528 lfdirstate.write()
530 if printmessage and lfiles:
529 if printmessage and lfiles:
531 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
530 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
532 removed))
531 removed))
533 finally:
532 finally:
534 wlock.release()
533 wlock.release()
535
534
536 @command('lfpull',
535 @command('lfpull',
537 [('r', 'rev', [], _('pull largefiles for these revisions'))
536 [('r', 'rev', [], _('pull largefiles for these revisions'))
538 ] + commands.remoteopts,
537 ] + commands.remoteopts,
539 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
538 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
540 def lfpull(ui, repo, source="default", **opts):
539 def lfpull(ui, repo, source="default", **opts):
541 """pull largefiles for the specified revisions from the specified source
540 """pull largefiles for the specified revisions from the specified source
542
541
543 Pull largefiles that are referenced from local changesets but missing
542 Pull largefiles that are referenced from local changesets but missing
544 locally, pulling from a remote repository to the local cache.
543 locally, pulling from a remote repository to the local cache.
545
544
546 If SOURCE is omitted, the 'default' path will be used.
545 If SOURCE is omitted, the 'default' path will be used.
547 See :hg:`help urls` for more information.
546 See :hg:`help urls` for more information.
548
547
549 .. container:: verbose
548 .. container:: verbose
550
549
551 Some examples:
550 Some examples:
552
551
553 - pull largefiles for all branch heads::
552 - pull largefiles for all branch heads::
554
553
555 hg lfpull -r "head() and not closed()"
554 hg lfpull -r "head() and not closed()"
556
555
557 - pull largefiles on the default branch::
556 - pull largefiles on the default branch::
558
557
559 hg lfpull -r "branch(default)"
558 hg lfpull -r "branch(default)"
560 """
559 """
561 repo.lfpullsource = source
560 repo.lfpullsource = source
562
561
563 revs = opts.get('rev', [])
562 revs = opts.get('rev', [])
564 if not revs:
563 if not revs:
565 raise util.Abort(_('no revisions specified'))
564 raise util.Abort(_('no revisions specified'))
566 revs = scmutil.revrange(repo, revs)
565 revs = scmutil.revrange(repo, revs)
567
566
568 numcached = 0
567 numcached = 0
569 for rev in revs:
568 for rev in revs:
570 ui.note(_('pulling largefiles for revision %s\n') % rev)
569 ui.note(_('pulling largefiles for revision %s\n') % rev)
571 (cached, missing) = cachelfiles(ui, repo, rev)
570 (cached, missing) = cachelfiles(ui, repo, rev)
572 numcached += len(cached)
571 numcached += len(cached)
573 ui.status(_("%d largefiles cached\n") % numcached)
572 ui.status(_("%d largefiles cached\n") % numcached)
General Comments 0
You need to be logged in to leave comments. Login now