##// END OF EJS Templates
largefiles: inline _updatelfile, prepare for further refactorings
Mads Kiilerich -
r20062:452f6873 stable
parent child Browse files
Show More
@@ -1,576 +1,566 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10
10
11 import os, errno
11 import os, errno
12 import shutil
12 import shutil
13
13
14 from mercurial import util, match as match_, hg, node, context, error, \
14 from mercurial import util, match as match_, hg, node, context, error, \
15 cmdutil, scmutil, commands
15 cmdutil, scmutil, commands
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 from mercurial.lock import release
17 from mercurial.lock import release
18
18
19 import lfutil
19 import lfutil
20 import basestore
20 import basestore
21
21
22 # -- Commands ----------------------------------------------------------
22 # -- Commands ----------------------------------------------------------
23
23
24 def lfconvert(ui, src, dest, *pats, **opts):
24 def lfconvert(ui, src, dest, *pats, **opts):
25 '''convert a normal repository to a largefiles repository
25 '''convert a normal repository to a largefiles repository
26
26
27 Convert repository SOURCE to a new repository DEST, identical to
27 Convert repository SOURCE to a new repository DEST, identical to
28 SOURCE except that certain files will be converted as largefiles:
28 SOURCE except that certain files will be converted as largefiles:
29 specifically, any file that matches any PATTERN *or* whose size is
29 specifically, any file that matches any PATTERN *or* whose size is
30 above the minimum size threshold is converted as a largefile. The
30 above the minimum size threshold is converted as a largefile. The
31 size used to determine whether or not to track a file as a
31 size used to determine whether or not to track a file as a
32 largefile is the size of the first version of the file. The
32 largefile is the size of the first version of the file. The
33 minimum size can be specified either with --size or in
33 minimum size can be specified either with --size or in
34 configuration as ``largefiles.size``.
34 configuration as ``largefiles.size``.
35
35
36 After running this command you will need to make sure that
36 After running this command you will need to make sure that
37 largefiles is enabled anywhere you intend to push the new
37 largefiles is enabled anywhere you intend to push the new
38 repository.
38 repository.
39
39
40 Use --to-normal to convert largefiles back to normal files; after
40 Use --to-normal to convert largefiles back to normal files; after
41 this, the DEST repository can be used without largefiles at all.'''
41 this, the DEST repository can be used without largefiles at all.'''
42
42
43 if opts['to_normal']:
43 if opts['to_normal']:
44 tolfile = False
44 tolfile = False
45 else:
45 else:
46 tolfile = True
46 tolfile = True
47 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
47 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
48
48
49 if not hg.islocal(src):
49 if not hg.islocal(src):
50 raise util.Abort(_('%s is not a local Mercurial repo') % src)
50 raise util.Abort(_('%s is not a local Mercurial repo') % src)
51 if not hg.islocal(dest):
51 if not hg.islocal(dest):
52 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
52 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
53
53
54 rsrc = hg.repository(ui, src)
54 rsrc = hg.repository(ui, src)
55 ui.status(_('initializing destination %s\n') % dest)
55 ui.status(_('initializing destination %s\n') % dest)
56 rdst = hg.repository(ui, dest, create=True)
56 rdst = hg.repository(ui, dest, create=True)
57
57
58 success = False
58 success = False
59 dstwlock = dstlock = None
59 dstwlock = dstlock = None
60 try:
60 try:
61 # Lock destination to prevent modification while it is converted to.
61 # Lock destination to prevent modification while it is converted to.
62 # Don't need to lock src because we are just reading from its history
62 # Don't need to lock src because we are just reading from its history
63 # which can't change.
63 # which can't change.
64 dstwlock = rdst.wlock()
64 dstwlock = rdst.wlock()
65 dstlock = rdst.lock()
65 dstlock = rdst.lock()
66
66
67 # Get a list of all changesets in the source. The easy way to do this
67 # Get a list of all changesets in the source. The easy way to do this
68 # is to simply walk the changelog, using changelog.nodesbetween().
68 # is to simply walk the changelog, using changelog.nodesbetween().
69 # Take a look at mercurial/revlog.py:639 for more details.
69 # Take a look at mercurial/revlog.py:639 for more details.
70 # Use a generator instead of a list to decrease memory usage
70 # Use a generator instead of a list to decrease memory usage
71 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
71 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
72 rsrc.heads())[0])
72 rsrc.heads())[0])
73 revmap = {node.nullid: node.nullid}
73 revmap = {node.nullid: node.nullid}
74 if tolfile:
74 if tolfile:
75 lfiles = set()
75 lfiles = set()
76 normalfiles = set()
76 normalfiles = set()
77 if not pats:
77 if not pats:
78 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
78 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
79 if pats:
79 if pats:
80 matcher = match_.match(rsrc.root, '', list(pats))
80 matcher = match_.match(rsrc.root, '', list(pats))
81 else:
81 else:
82 matcher = None
82 matcher = None
83
83
84 lfiletohash = {}
84 lfiletohash = {}
85 for ctx in ctxs:
85 for ctx in ctxs:
86 ui.progress(_('converting revisions'), ctx.rev(),
86 ui.progress(_('converting revisions'), ctx.rev(),
87 unit=_('revision'), total=rsrc['tip'].rev())
87 unit=_('revision'), total=rsrc['tip'].rev())
88 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
88 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
89 lfiles, normalfiles, matcher, size, lfiletohash)
89 lfiles, normalfiles, matcher, size, lfiletohash)
90 ui.progress(_('converting revisions'), None)
90 ui.progress(_('converting revisions'), None)
91
91
92 if os.path.exists(rdst.wjoin(lfutil.shortname)):
92 if os.path.exists(rdst.wjoin(lfutil.shortname)):
93 shutil.rmtree(rdst.wjoin(lfutil.shortname))
93 shutil.rmtree(rdst.wjoin(lfutil.shortname))
94
94
95 for f in lfiletohash.keys():
95 for f in lfiletohash.keys():
96 if os.path.isfile(rdst.wjoin(f)):
96 if os.path.isfile(rdst.wjoin(f)):
97 os.unlink(rdst.wjoin(f))
97 os.unlink(rdst.wjoin(f))
98 try:
98 try:
99 os.removedirs(os.path.dirname(rdst.wjoin(f)))
99 os.removedirs(os.path.dirname(rdst.wjoin(f)))
100 except OSError:
100 except OSError:
101 pass
101 pass
102
102
103 # If there were any files converted to largefiles, add largefiles
103 # If there were any files converted to largefiles, add largefiles
104 # to the destination repository's requirements.
104 # to the destination repository's requirements.
105 if lfiles:
105 if lfiles:
106 rdst.requirements.add('largefiles')
106 rdst.requirements.add('largefiles')
107 rdst._writerequirements()
107 rdst._writerequirements()
108 else:
108 else:
109 for ctx in ctxs:
109 for ctx in ctxs:
110 ui.progress(_('converting revisions'), ctx.rev(),
110 ui.progress(_('converting revisions'), ctx.rev(),
111 unit=_('revision'), total=rsrc['tip'].rev())
111 unit=_('revision'), total=rsrc['tip'].rev())
112 _addchangeset(ui, rsrc, rdst, ctx, revmap)
112 _addchangeset(ui, rsrc, rdst, ctx, revmap)
113
113
114 ui.progress(_('converting revisions'), None)
114 ui.progress(_('converting revisions'), None)
115 success = True
115 success = True
116 finally:
116 finally:
117 rdst.dirstate.clear()
117 rdst.dirstate.clear()
118 release(dstlock, dstwlock)
118 release(dstlock, dstwlock)
119 if not success:
119 if not success:
120 # we failed, remove the new directory
120 # we failed, remove the new directory
121 shutil.rmtree(rdst.root)
121 shutil.rmtree(rdst.root)
122
122
123 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
123 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
124 # Convert src parents to dst parents
124 # Convert src parents to dst parents
125 parents = _convertparents(ctx, revmap)
125 parents = _convertparents(ctx, revmap)
126
126
127 # Generate list of changed files
127 # Generate list of changed files
128 files = _getchangedfiles(ctx, parents)
128 files = _getchangedfiles(ctx, parents)
129
129
130 def getfilectx(repo, memctx, f):
130 def getfilectx(repo, memctx, f):
131 if lfutil.standin(f) in files:
131 if lfutil.standin(f) in files:
132 # if the file isn't in the manifest then it was removed
132 # if the file isn't in the manifest then it was removed
133 # or renamed, raise IOError to indicate this
133 # or renamed, raise IOError to indicate this
134 try:
134 try:
135 fctx = ctx.filectx(lfutil.standin(f))
135 fctx = ctx.filectx(lfutil.standin(f))
136 except error.LookupError:
136 except error.LookupError:
137 raise IOError
137 raise IOError
138 renamed = fctx.renamed()
138 renamed = fctx.renamed()
139 if renamed:
139 if renamed:
140 renamed = lfutil.splitstandin(renamed[0])
140 renamed = lfutil.splitstandin(renamed[0])
141
141
142 hash = fctx.data().strip()
142 hash = fctx.data().strip()
143 path = lfutil.findfile(rsrc, hash)
143 path = lfutil.findfile(rsrc, hash)
144
144
145 # If one file is missing, likely all files from this rev are
145 # If one file is missing, likely all files from this rev are
146 if path is None:
146 if path is None:
147 cachelfiles(ui, rsrc, ctx.node())
147 cachelfiles(ui, rsrc, ctx.node())
148 path = lfutil.findfile(rsrc, hash)
148 path = lfutil.findfile(rsrc, hash)
149
149
150 if path is None:
150 if path is None:
151 raise util.Abort(
151 raise util.Abort(
152 _("missing largefile \'%s\' from revision %s")
152 _("missing largefile \'%s\' from revision %s")
153 % (f, node.hex(ctx.node())))
153 % (f, node.hex(ctx.node())))
154
154
155 data = ''
155 data = ''
156 fd = None
156 fd = None
157 try:
157 try:
158 fd = open(path, 'rb')
158 fd = open(path, 'rb')
159 data = fd.read()
159 data = fd.read()
160 finally:
160 finally:
161 if fd:
161 if fd:
162 fd.close()
162 fd.close()
163 return context.memfilectx(f, data, 'l' in fctx.flags(),
163 return context.memfilectx(f, data, 'l' in fctx.flags(),
164 'x' in fctx.flags(), renamed)
164 'x' in fctx.flags(), renamed)
165 else:
165 else:
166 return _getnormalcontext(repo.ui, ctx, f, revmap)
166 return _getnormalcontext(repo.ui, ctx, f, revmap)
167
167
168 dstfiles = []
168 dstfiles = []
169 for file in files:
169 for file in files:
170 if lfutil.isstandin(file):
170 if lfutil.isstandin(file):
171 dstfiles.append(lfutil.splitstandin(file))
171 dstfiles.append(lfutil.splitstandin(file))
172 else:
172 else:
173 dstfiles.append(file)
173 dstfiles.append(file)
174 # Commit
174 # Commit
175 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
175 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
176
176
177 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
177 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
178 matcher, size, lfiletohash):
178 matcher, size, lfiletohash):
179 # Convert src parents to dst parents
179 # Convert src parents to dst parents
180 parents = _convertparents(ctx, revmap)
180 parents = _convertparents(ctx, revmap)
181
181
182 # Generate list of changed files
182 # Generate list of changed files
183 files = _getchangedfiles(ctx, parents)
183 files = _getchangedfiles(ctx, parents)
184
184
185 dstfiles = []
185 dstfiles = []
186 for f in files:
186 for f in files:
187 if f not in lfiles and f not in normalfiles:
187 if f not in lfiles and f not in normalfiles:
188 islfile = _islfile(f, ctx, matcher, size)
188 islfile = _islfile(f, ctx, matcher, size)
189 # If this file was renamed or copied then copy
189 # If this file was renamed or copied then copy
190 # the largefile-ness of its predecessor
190 # the largefile-ness of its predecessor
191 if f in ctx.manifest():
191 if f in ctx.manifest():
192 fctx = ctx.filectx(f)
192 fctx = ctx.filectx(f)
193 renamed = fctx.renamed()
193 renamed = fctx.renamed()
194 renamedlfile = renamed and renamed[0] in lfiles
194 renamedlfile = renamed and renamed[0] in lfiles
195 islfile |= renamedlfile
195 islfile |= renamedlfile
196 if 'l' in fctx.flags():
196 if 'l' in fctx.flags():
197 if renamedlfile:
197 if renamedlfile:
198 raise util.Abort(
198 raise util.Abort(
199 _('renamed/copied largefile %s becomes symlink')
199 _('renamed/copied largefile %s becomes symlink')
200 % f)
200 % f)
201 islfile = False
201 islfile = False
202 if islfile:
202 if islfile:
203 lfiles.add(f)
203 lfiles.add(f)
204 else:
204 else:
205 normalfiles.add(f)
205 normalfiles.add(f)
206
206
207 if f in lfiles:
207 if f in lfiles:
208 dstfiles.append(lfutil.standin(f))
208 dstfiles.append(lfutil.standin(f))
209 # largefile in manifest if it has not been removed/renamed
209 # largefile in manifest if it has not been removed/renamed
210 if f in ctx.manifest():
210 if f in ctx.manifest():
211 fctx = ctx.filectx(f)
211 fctx = ctx.filectx(f)
212 if 'l' in fctx.flags():
212 if 'l' in fctx.flags():
213 renamed = fctx.renamed()
213 renamed = fctx.renamed()
214 if renamed and renamed[0] in lfiles:
214 if renamed and renamed[0] in lfiles:
215 raise util.Abort(_('largefile %s becomes symlink') % f)
215 raise util.Abort(_('largefile %s becomes symlink') % f)
216
216
217 # largefile was modified, update standins
217 # largefile was modified, update standins
218 m = util.sha1('')
218 m = util.sha1('')
219 m.update(ctx[f].data())
219 m.update(ctx[f].data())
220 hash = m.hexdigest()
220 hash = m.hexdigest()
221 if f not in lfiletohash or lfiletohash[f] != hash:
221 if f not in lfiletohash or lfiletohash[f] != hash:
222 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
222 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
223 executable = 'x' in ctx[f].flags()
223 executable = 'x' in ctx[f].flags()
224 lfutil.writestandin(rdst, lfutil.standin(f), hash,
224 lfutil.writestandin(rdst, lfutil.standin(f), hash,
225 executable)
225 executable)
226 lfiletohash[f] = hash
226 lfiletohash[f] = hash
227 else:
227 else:
228 # normal file
228 # normal file
229 dstfiles.append(f)
229 dstfiles.append(f)
230
230
231 def getfilectx(repo, memctx, f):
231 def getfilectx(repo, memctx, f):
232 if lfutil.isstandin(f):
232 if lfutil.isstandin(f):
233 # if the file isn't in the manifest then it was removed
233 # if the file isn't in the manifest then it was removed
234 # or renamed, raise IOError to indicate this
234 # or renamed, raise IOError to indicate this
235 srcfname = lfutil.splitstandin(f)
235 srcfname = lfutil.splitstandin(f)
236 try:
236 try:
237 fctx = ctx.filectx(srcfname)
237 fctx = ctx.filectx(srcfname)
238 except error.LookupError:
238 except error.LookupError:
239 raise IOError
239 raise IOError
240 renamed = fctx.renamed()
240 renamed = fctx.renamed()
241 if renamed:
241 if renamed:
242 # standin is always a largefile because largefile-ness
242 # standin is always a largefile because largefile-ness
243 # doesn't change after rename or copy
243 # doesn't change after rename or copy
244 renamed = lfutil.standin(renamed[0])
244 renamed = lfutil.standin(renamed[0])
245
245
246 return context.memfilectx(f, lfiletohash[srcfname] + '\n', 'l' in
246 return context.memfilectx(f, lfiletohash[srcfname] + '\n', 'l' in
247 fctx.flags(), 'x' in fctx.flags(), renamed)
247 fctx.flags(), 'x' in fctx.flags(), renamed)
248 else:
248 else:
249 return _getnormalcontext(repo.ui, ctx, f, revmap)
249 return _getnormalcontext(repo.ui, ctx, f, revmap)
250
250
251 # Commit
251 # Commit
252 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
252 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
253
253
254 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
254 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
255 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
255 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
256 getfilectx, ctx.user(), ctx.date(), ctx.extra())
256 getfilectx, ctx.user(), ctx.date(), ctx.extra())
257 ret = rdst.commitctx(mctx)
257 ret = rdst.commitctx(mctx)
258 rdst.setparents(ret)
258 rdst.setparents(ret)
259 revmap[ctx.node()] = rdst.changelog.tip()
259 revmap[ctx.node()] = rdst.changelog.tip()
260
260
261 # Generate list of changed files
261 # Generate list of changed files
262 def _getchangedfiles(ctx, parents):
262 def _getchangedfiles(ctx, parents):
263 files = set(ctx.files())
263 files = set(ctx.files())
264 if node.nullid not in parents:
264 if node.nullid not in parents:
265 mc = ctx.manifest()
265 mc = ctx.manifest()
266 mp1 = ctx.parents()[0].manifest()
266 mp1 = ctx.parents()[0].manifest()
267 mp2 = ctx.parents()[1].manifest()
267 mp2 = ctx.parents()[1].manifest()
268 files |= (set(mp1) | set(mp2)) - set(mc)
268 files |= (set(mp1) | set(mp2)) - set(mc)
269 for f in mc:
269 for f in mc:
270 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
270 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
271 files.add(f)
271 files.add(f)
272 return files
272 return files
273
273
274 # Convert src parents to dst parents
274 # Convert src parents to dst parents
275 def _convertparents(ctx, revmap):
275 def _convertparents(ctx, revmap):
276 parents = []
276 parents = []
277 for p in ctx.parents():
277 for p in ctx.parents():
278 parents.append(revmap[p.node()])
278 parents.append(revmap[p.node()])
279 while len(parents) < 2:
279 while len(parents) < 2:
280 parents.append(node.nullid)
280 parents.append(node.nullid)
281 return parents
281 return parents
282
282
283 # Get memfilectx for a normal file
283 # Get memfilectx for a normal file
284 def _getnormalcontext(ui, ctx, f, revmap):
284 def _getnormalcontext(ui, ctx, f, revmap):
285 try:
285 try:
286 fctx = ctx.filectx(f)
286 fctx = ctx.filectx(f)
287 except error.LookupError:
287 except error.LookupError:
288 raise IOError
288 raise IOError
289 renamed = fctx.renamed()
289 renamed = fctx.renamed()
290 if renamed:
290 if renamed:
291 renamed = renamed[0]
291 renamed = renamed[0]
292
292
293 data = fctx.data()
293 data = fctx.data()
294 if f == '.hgtags':
294 if f == '.hgtags':
295 data = _converttags (ui, revmap, data)
295 data = _converttags (ui, revmap, data)
296 return context.memfilectx(f, data, 'l' in fctx.flags(),
296 return context.memfilectx(f, data, 'l' in fctx.flags(),
297 'x' in fctx.flags(), renamed)
297 'x' in fctx.flags(), renamed)
298
298
299 # Remap tag data using a revision map
299 # Remap tag data using a revision map
300 def _converttags(ui, revmap, data):
300 def _converttags(ui, revmap, data):
301 newdata = []
301 newdata = []
302 for line in data.splitlines():
302 for line in data.splitlines():
303 try:
303 try:
304 id, name = line.split(' ', 1)
304 id, name = line.split(' ', 1)
305 except ValueError:
305 except ValueError:
306 ui.warn(_('skipping incorrectly formatted tag %s\n'
306 ui.warn(_('skipping incorrectly formatted tag %s\n'
307 % line))
307 % line))
308 continue
308 continue
309 try:
309 try:
310 newid = node.bin(id)
310 newid = node.bin(id)
311 except TypeError:
311 except TypeError:
312 ui.warn(_('skipping incorrectly formatted id %s\n'
312 ui.warn(_('skipping incorrectly formatted id %s\n'
313 % id))
313 % id))
314 continue
314 continue
315 try:
315 try:
316 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
316 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
317 name))
317 name))
318 except KeyError:
318 except KeyError:
319 ui.warn(_('no mapping for id %s\n') % id)
319 ui.warn(_('no mapping for id %s\n') % id)
320 continue
320 continue
321 return ''.join(newdata)
321 return ''.join(newdata)
322
322
323 def _islfile(file, ctx, matcher, size):
323 def _islfile(file, ctx, matcher, size):
324 '''Return true if file should be considered a largefile, i.e.
324 '''Return true if file should be considered a largefile, i.e.
325 matcher matches it or it is larger than size.'''
325 matcher matches it or it is larger than size.'''
326 # never store special .hg* files as largefiles
326 # never store special .hg* files as largefiles
327 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
327 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
328 return False
328 return False
329 if matcher and matcher(file):
329 if matcher and matcher(file):
330 return True
330 return True
331 try:
331 try:
332 return ctx.filectx(file).size() >= size * 1024 * 1024
332 return ctx.filectx(file).size() >= size * 1024 * 1024
333 except error.LookupError:
333 except error.LookupError:
334 return False
334 return False
335
335
336 def uploadlfiles(ui, rsrc, rdst, files):
336 def uploadlfiles(ui, rsrc, rdst, files):
337 '''upload largefiles to the central store'''
337 '''upload largefiles to the central store'''
338
338
339 if not files:
339 if not files:
340 return
340 return
341
341
342 store = basestore._openstore(rsrc, rdst, put=True)
342 store = basestore._openstore(rsrc, rdst, put=True)
343
343
344 at = 0
344 at = 0
345 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
345 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
346 retval = store.exists(files)
346 retval = store.exists(files)
347 files = filter(lambda h: not retval[h], files)
347 files = filter(lambda h: not retval[h], files)
348 ui.debug("%d largefiles need to be uploaded\n" % len(files))
348 ui.debug("%d largefiles need to be uploaded\n" % len(files))
349
349
350 for hash in files:
350 for hash in files:
351 ui.progress(_('uploading largefiles'), at, unit='largefile',
351 ui.progress(_('uploading largefiles'), at, unit='largefile',
352 total=len(files))
352 total=len(files))
353 source = lfutil.findfile(rsrc, hash)
353 source = lfutil.findfile(rsrc, hash)
354 if not source:
354 if not source:
355 raise util.Abort(_('largefile %s missing from store'
355 raise util.Abort(_('largefile %s missing from store'
356 ' (needs to be uploaded)') % hash)
356 ' (needs to be uploaded)') % hash)
357 # XXX check for errors here
357 # XXX check for errors here
358 store.put(source, hash)
358 store.put(source, hash)
359 at += 1
359 at += 1
360 ui.progress(_('uploading largefiles'), None)
360 ui.progress(_('uploading largefiles'), None)
361
361
362 def verifylfiles(ui, repo, all=False, contents=False):
362 def verifylfiles(ui, repo, all=False, contents=False):
363 '''Verify that every largefile revision in the current changeset
363 '''Verify that every largefile revision in the current changeset
364 exists in the central store. With --contents, also verify that
364 exists in the central store. With --contents, also verify that
365 the contents of each local largefile file revision are correct (SHA-1 hash
365 the contents of each local largefile file revision are correct (SHA-1 hash
366 matches the revision ID). With --all, check every changeset in
366 matches the revision ID). With --all, check every changeset in
367 this repository.'''
367 this repository.'''
368 if all:
368 if all:
369 # Pass a list to the function rather than an iterator because we know a
369 # Pass a list to the function rather than an iterator because we know a
370 # list will work.
370 # list will work.
371 revs = range(len(repo))
371 revs = range(len(repo))
372 else:
372 else:
373 revs = ['.']
373 revs = ['.']
374
374
375 store = basestore._openstore(repo)
375 store = basestore._openstore(repo)
376 return store.verify(revs, contents=contents)
376 return store.verify(revs, contents=contents)
377
377
378 def debugdirstate(ui, repo):
378 def debugdirstate(ui, repo):
379 '''Show basic information for the largefiles dirstate'''
379 '''Show basic information for the largefiles dirstate'''
380 lfdirstate = lfutil.openlfdirstate(ui, repo)
380 lfdirstate = lfutil.openlfdirstate(ui, repo)
381 for file_, ent in sorted(lfdirstate._map.iteritems()):
381 for file_, ent in sorted(lfdirstate._map.iteritems()):
382 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
382 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
383 ui.write("%c %s %10d %s\n" % (ent[0], mode, ent[2], file_))
383 ui.write("%c %s %10d %s\n" % (ent[0], mode, ent[2], file_))
384
384
385 def cachelfiles(ui, repo, node, filelist=None):
385 def cachelfiles(ui, repo, node, filelist=None):
386 '''cachelfiles ensures that all largefiles needed by the specified revision
386 '''cachelfiles ensures that all largefiles needed by the specified revision
387 are present in the repository's largefile cache.
387 are present in the repository's largefile cache.
388
388
389 returns a tuple (cached, missing). cached is the list of files downloaded
389 returns a tuple (cached, missing). cached is the list of files downloaded
390 by this operation; missing is the list of files that were needed but could
390 by this operation; missing is the list of files that were needed but could
391 not be found.'''
391 not be found.'''
392 lfiles = lfutil.listlfiles(repo, node)
392 lfiles = lfutil.listlfiles(repo, node)
393 if filelist:
393 if filelist:
394 lfiles = set(lfiles) & set(filelist)
394 lfiles = set(lfiles) & set(filelist)
395 toget = []
395 toget = []
396
396
397 for lfile in lfiles:
397 for lfile in lfiles:
398 try:
398 try:
399 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
399 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
400 except IOError, err:
400 except IOError, err:
401 if err.errno == errno.ENOENT:
401 if err.errno == errno.ENOENT:
402 continue # node must be None and standin wasn't found in wctx
402 continue # node must be None and standin wasn't found in wctx
403 raise
403 raise
404 if not lfutil.findfile(repo, expectedhash):
404 if not lfutil.findfile(repo, expectedhash):
405 toget.append((lfile, expectedhash))
405 toget.append((lfile, expectedhash))
406
406
407 if toget:
407 if toget:
408 store = basestore._openstore(repo)
408 store = basestore._openstore(repo)
409 ret = store.get(toget)
409 ret = store.get(toget)
410 return ret
410 return ret
411
411
412 return ([], [])
412 return ([], [])
413
413
414 def downloadlfiles(ui, repo, rev=None):
414 def downloadlfiles(ui, repo, rev=None):
415 matchfn = scmutil.match(repo[None],
415 matchfn = scmutil.match(repo[None],
416 [repo.wjoin(lfutil.shortname)], {})
416 [repo.wjoin(lfutil.shortname)], {})
417 def prepare(ctx, fns):
417 def prepare(ctx, fns):
418 pass
418 pass
419 totalsuccess = 0
419 totalsuccess = 0
420 totalmissing = 0
420 totalmissing = 0
421 if rev != []: # walkchangerevs on empty list would return all revs
421 if rev != []: # walkchangerevs on empty list would return all revs
422 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
422 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
423 prepare):
423 prepare):
424 success, missing = cachelfiles(ui, repo, ctx.node())
424 success, missing = cachelfiles(ui, repo, ctx.node())
425 totalsuccess += len(success)
425 totalsuccess += len(success)
426 totalmissing += len(missing)
426 totalmissing += len(missing)
427 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
427 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
428 if totalmissing > 0:
428 if totalmissing > 0:
429 ui.status(_("%d largefiles failed to download\n") % totalmissing)
429 ui.status(_("%d largefiles failed to download\n") % totalmissing)
430 return totalsuccess, totalmissing
430 return totalsuccess, totalmissing
431
431
432 def updatelfiles(ui, repo, filelist=None, printmessage=True):
432 def updatelfiles(ui, repo, filelist=None, printmessage=True):
433 wlock = repo.wlock()
433 wlock = repo.wlock()
434 try:
434 try:
435 lfdirstate = lfutil.openlfdirstate(ui, repo)
435 lfdirstate = lfutil.openlfdirstate(ui, repo)
436 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
436 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
437
437
438 if filelist is not None:
438 if filelist is not None:
439 lfiles = [f for f in lfiles if f in filelist]
439 lfiles = [f for f in lfiles if f in filelist]
440
440
441 if lfiles:
441 if lfiles:
442 if printmessage:
442 if printmessage:
443 ui.status(_('getting changed largefiles\n'))
443 ui.status(_('getting changed largefiles\n'))
444 cachelfiles(ui, repo, None, lfiles)
444 cachelfiles(ui, repo, None, lfiles)
445
445
446 updated, removed = 0, 0
446 updated, removed = 0, 0
447 for f in lfiles:
447 for lfile in lfiles:
448 i = _updatelfile(repo, lfdirstate, f)
448 # updates a single largefile and copies the state of its standin from
449 if i:
449 # the repository's dirstate to its state in the lfdirstate.
450 if i > 0:
450 abslfile = repo.wjoin(lfile)
451 updated += i
451 absstandin = repo.wjoin(lfutil.standin(lfile))
452 if os.path.exists(absstandin):
453 if (os.path.exists(absstandin + '.orig') and
454 os.path.exists(abslfile)):
455 shutil.copyfile(abslfile, abslfile + '.orig')
456 update1 = 0
457 expecthash = lfutil.readstandin(repo, lfile)
458 if (expecthash != '' and
459 (not os.path.exists(abslfile) or
460 expecthash != lfutil.hashfile(abslfile))):
461 if not lfutil.copyfromcache(repo, expecthash, lfile):
462 # use normallookup() to allocate entry in largefiles
463 # dirstate, because lack of it misleads
464 # lfilesrepo.status() into recognition that such cache
465 # missing files are REMOVED.
466 if lfile not in repo[None]: # not switched to normal
467 util.unlinkpath(abslfile, ignoremissing=True)
468 lfdirstate.normallookup(lfile)
469 continue # don't try to set the mode
470 else:
471 # Synchronize largefile dirstate to the last modified
472 # time of the file
473 lfdirstate.normal(lfile)
474 update1 = 1
475 mode = os.stat(absstandin).st_mode
476 if mode != os.stat(abslfile).st_mode:
477 os.chmod(abslfile, mode)
478 update1 = 1
479 updated += update1
480 else:
481 # Remove lfiles for which the standin is deleted, unless the
482 # lfile is added to the repository again. This happens when a
483 # largefile is converted back to a normal file: the standin
484 # disappears, but a new (normal) file appears as the lfile.
485 if (os.path.exists(abslfile) and
486 repo.dirstate.normalize(lfile) not in repo[None]):
487 util.unlinkpath(abslfile)
488 removed += 1
489 state = repo.dirstate[lfutil.standin(lfile)]
490 if state == 'n':
491 # When rebasing, we need to synchronize the standin and the
492 # largefile, because otherwise the largefile will get reverted.
493 # But for commit's sake, we have to mark the file as unclean.
494 if getattr(repo, "_isrebasing", False):
495 lfdirstate.normallookup(lfile)
452 else:
496 else:
453 removed -= i
497 lfdirstate.normal(lfile)
498 elif state == 'r':
499 lfdirstate.remove(lfile)
500 elif state == 'a':
501 lfdirstate.add(lfile)
502 elif state == '?':
503 lfdirstate.drop(lfile)
454
504
455 lfdirstate.write()
505 lfdirstate.write()
456 if printmessage and lfiles:
506 if printmessage and lfiles:
457 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
507 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
458 removed))
508 removed))
459 finally:
509 finally:
460 wlock.release()
510 wlock.release()
461
511
462 def _updatelfile(repo, lfdirstate, lfile):
463 '''updates a single largefile and copies the state of its standin from
464 the repository's dirstate to its state in the lfdirstate.
465
466 returns 1 if the file was modified, -1 if the file was removed, 0 if the
467 file was unchanged, and None if the needed largefile was missing from the
468 cache.'''
469 ret = 0
470 abslfile = repo.wjoin(lfile)
471 absstandin = repo.wjoin(lfutil.standin(lfile))
472 if os.path.exists(absstandin):
473 if os.path.exists(absstandin + '.orig') and os.path.exists(abslfile):
474 shutil.copyfile(abslfile, abslfile + '.orig')
475 expecthash = lfutil.readstandin(repo, lfile)
476 if (expecthash != '' and
477 (not os.path.exists(abslfile) or
478 expecthash != lfutil.hashfile(abslfile))):
479 if not lfutil.copyfromcache(repo, expecthash, lfile):
480 # use normallookup() to allocate entry in largefiles dirstate,
481 # because lack of it misleads lfilesrepo.status() into
482 # recognition that such cache missing files are REMOVED.
483 if lfile not in repo[None]: # not switched to normal file
484 util.unlinkpath(abslfile, ignoremissing=True)
485 lfdirstate.normallookup(lfile)
486 return None # don't try to set the mode
487 else:
488 # Synchronize largefile dirstate to the last modified time of
489 # the file
490 lfdirstate.normal(lfile)
491 ret = 1
492 mode = os.stat(absstandin).st_mode
493 if mode != os.stat(abslfile).st_mode:
494 os.chmod(abslfile, mode)
495 ret = 1
496 else:
497 # Remove lfiles for which the standin is deleted, unless the
498 # lfile is added to the repository again. This happens when a
499 # largefile is converted back to a normal file: the standin
500 # disappears, but a new (normal) file appears as the lfile.
501 if (os.path.exists(abslfile) and
502 repo.dirstate.normalize(lfile) not in repo[None]):
503 util.unlinkpath(abslfile)
504 ret = -1
505 state = repo.dirstate[lfutil.standin(lfile)]
506 if state == 'n':
507 # When rebasing, we need to synchronize the standin and the largefile,
508 # because otherwise the largefile will get reverted. But for commit's
509 # sake, we have to mark the file as unclean.
510 if getattr(repo, "_isrebasing", False):
511 lfdirstate.normallookup(lfile)
512 else:
513 lfdirstate.normal(lfile)
514 elif state == 'r':
515 lfdirstate.remove(lfile)
516 elif state == 'a':
517 lfdirstate.add(lfile)
518 elif state == '?':
519 lfdirstate.drop(lfile)
520 return ret
521
522 def lfpull(ui, repo, source="default", **opts):
512 def lfpull(ui, repo, source="default", **opts):
523 """pull largefiles for the specified revisions from the specified source
513 """pull largefiles for the specified revisions from the specified source
524
514
525 Pull largefiles that are referenced from local changesets but missing
515 Pull largefiles that are referenced from local changesets but missing
526 locally, pulling from a remote repository to the local cache.
516 locally, pulling from a remote repository to the local cache.
527
517
528 If SOURCE is omitted, the 'default' path will be used.
518 If SOURCE is omitted, the 'default' path will be used.
529 See :hg:`help urls` for more information.
519 See :hg:`help urls` for more information.
530
520
531 .. container:: verbose
521 .. container:: verbose
532
522
533 Some examples:
523 Some examples:
534
524
535 - pull largefiles for all branch heads::
525 - pull largefiles for all branch heads::
536
526
537 hg lfpull -r "head() and not closed()"
527 hg lfpull -r "head() and not closed()"
538
528
539 - pull largefiles on the default branch::
529 - pull largefiles on the default branch::
540
530
541 hg lfpull -r "branch(default)"
531 hg lfpull -r "branch(default)"
542 """
532 """
543 repo.lfpullsource = source
533 repo.lfpullsource = source
544
534
545 revs = opts.get('rev', [])
535 revs = opts.get('rev', [])
546 if not revs:
536 if not revs:
547 raise util.Abort(_('no revisions specified'))
537 raise util.Abort(_('no revisions specified'))
548 revs = scmutil.revrange(repo, revs)
538 revs = scmutil.revrange(repo, revs)
549
539
550 numcached = 0
540 numcached = 0
551 for rev in revs:
541 for rev in revs:
552 ui.note(_('pulling largefiles for revision %s\n') % rev)
542 ui.note(_('pulling largefiles for revision %s\n') % rev)
553 (cached, missing) = cachelfiles(ui, repo, rev)
543 (cached, missing) = cachelfiles(ui, repo, rev)
554 numcached += len(cached)
544 numcached += len(cached)
555 ui.status(_("%d largefiles cached\n") % numcached)
545 ui.status(_("%d largefiles cached\n") % numcached)
556
546
557 # -- hg commands declarations ------------------------------------------------
547 # -- hg commands declarations ------------------------------------------------
558
548
559 cmdtable = {
549 cmdtable = {
560 'lfconvert': (lfconvert,
550 'lfconvert': (lfconvert,
561 [('s', 'size', '',
551 [('s', 'size', '',
562 _('minimum size (MB) for files to be converted '
552 _('minimum size (MB) for files to be converted '
563 'as largefiles'),
553 'as largefiles'),
564 'SIZE'),
554 'SIZE'),
565 ('', 'to-normal', False,
555 ('', 'to-normal', False,
566 _('convert from a largefiles repo to a normal repo')),
556 _('convert from a largefiles repo to a normal repo')),
567 ],
557 ],
568 _('hg lfconvert SOURCE DEST [FILE ...]')),
558 _('hg lfconvert SOURCE DEST [FILE ...]')),
569 'lfpull': (lfpull,
559 'lfpull': (lfpull,
570 [('r', 'rev', [], _('pull largefiles for these revisions'))
560 [('r', 'rev', [], _('pull largefiles for these revisions'))
571 ] + commands.remoteopts,
561 ] + commands.remoteopts,
572 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]')
562 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]')
573 ),
563 ),
574 }
564 }
575
565
576 commands.inferrepo += " lfconvert"
566 commands.inferrepo += " lfconvert"
General Comments 0
You need to be logged in to leave comments. Login now