##// END OF EJS Templates
largefiles: remove pasted code...
Levi Bard -
r15811:b9886dde default
parent child Browse files
Show More
@@ -1,502 +1,491 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10
10
11 import os
11 import os
12 import shutil
12 import shutil
13
13
14 from mercurial import util, match as match_, hg, node, context, error
14 from mercurial import util, match as match_, hg, node, context, error
15 from mercurial.i18n import _
15 from mercurial.i18n import _
16
16
17 import lfutil
17 import lfutil
18 import basestore
18 import basestore
19
19
20 # -- Commands ----------------------------------------------------------
20 # -- Commands ----------------------------------------------------------
21
21
22 def lfconvert(ui, src, dest, *pats, **opts):
22 def lfconvert(ui, src, dest, *pats, **opts):
23 '''convert a normal repository to a largefiles repository
23 '''convert a normal repository to a largefiles repository
24
24
25 Convert repository SOURCE to a new repository DEST, identical to
25 Convert repository SOURCE to a new repository DEST, identical to
26 SOURCE except that certain files will be converted as largefiles:
26 SOURCE except that certain files will be converted as largefiles:
27 specifically, any file that matches any PATTERN *or* whose size is
27 specifically, any file that matches any PATTERN *or* whose size is
28 above the minimum size threshold is converted as a largefile. The
28 above the minimum size threshold is converted as a largefile. The
29 size used to determine whether or not to track a file as a
29 size used to determine whether or not to track a file as a
30 largefile is the size of the first version of the file. The
30 largefile is the size of the first version of the file. The
31 minimum size can be specified either with --size or in
31 minimum size can be specified either with --size or in
32 configuration as ``largefiles.size``.
32 configuration as ``largefiles.size``.
33
33
34 After running this command you will need to make sure that
34 After running this command you will need to make sure that
35 largefiles is enabled anywhere you intend to push the new
35 largefiles is enabled anywhere you intend to push the new
36 repository.
36 repository.
37
37
38 Use --to-normal to convert largefiles back to normal files; after
38 Use --to-normal to convert largefiles back to normal files; after
39 this, the DEST repository can be used without largefiles at all.'''
39 this, the DEST repository can be used without largefiles at all.'''
40
40
41 if opts['to_normal']:
41 if opts['to_normal']:
42 tolfile = False
42 tolfile = False
43 else:
43 else:
44 tolfile = True
44 tolfile = True
45 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
45 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
46
46
47 if not hg.islocal(src):
47 if not hg.islocal(src):
48 raise util.Abort(_('%s is not a local Mercurial repo') % src)
48 raise util.Abort(_('%s is not a local Mercurial repo') % src)
49 if not hg.islocal(dest):
49 if not hg.islocal(dest):
50 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
50 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
51
51
52 rsrc = hg.repository(ui, src)
52 rsrc = hg.repository(ui, src)
53 ui.status(_('initializing destination %s\n') % dest)
53 ui.status(_('initializing destination %s\n') % dest)
54 rdst = hg.repository(ui, dest, create=True)
54 rdst = hg.repository(ui, dest, create=True)
55
55
56 success = False
56 success = False
57 try:
57 try:
58 # Lock destination to prevent modification while it is converted to.
58 # Lock destination to prevent modification while it is converted to.
59 # Don't need to lock src because we are just reading from its history
59 # Don't need to lock src because we are just reading from its history
60 # which can't change.
60 # which can't change.
61 dst_lock = rdst.lock()
61 dst_lock = rdst.lock()
62
62
63 # Get a list of all changesets in the source. The easy way to do this
63 # Get a list of all changesets in the source. The easy way to do this
64 # is to simply walk the changelog, using changelog.nodesbewteen().
64 # is to simply walk the changelog, using changelog.nodesbewteen().
65 # Take a look at mercurial/revlog.py:639 for more details.
65 # Take a look at mercurial/revlog.py:639 for more details.
66 # Use a generator instead of a list to decrease memory usage
66 # Use a generator instead of a list to decrease memory usage
67 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
67 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
68 rsrc.heads())[0])
68 rsrc.heads())[0])
69 revmap = {node.nullid: node.nullid}
69 revmap = {node.nullid: node.nullid}
70 if tolfile:
70 if tolfile:
71 lfiles = set()
71 lfiles = set()
72 normalfiles = set()
72 normalfiles = set()
73 if not pats:
73 if not pats:
74 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
74 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
75 if pats:
75 if pats:
76 matcher = match_.match(rsrc.root, '', list(pats))
76 matcher = match_.match(rsrc.root, '', list(pats))
77 else:
77 else:
78 matcher = None
78 matcher = None
79
79
80 lfiletohash = {}
80 lfiletohash = {}
81 for ctx in ctxs:
81 for ctx in ctxs:
82 ui.progress(_('converting revisions'), ctx.rev(),
82 ui.progress(_('converting revisions'), ctx.rev(),
83 unit=_('revision'), total=rsrc['tip'].rev())
83 unit=_('revision'), total=rsrc['tip'].rev())
84 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
84 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
85 lfiles, normalfiles, matcher, size, lfiletohash)
85 lfiles, normalfiles, matcher, size, lfiletohash)
86 ui.progress(_('converting revisions'), None)
86 ui.progress(_('converting revisions'), None)
87
87
88 if os.path.exists(rdst.wjoin(lfutil.shortname)):
88 if os.path.exists(rdst.wjoin(lfutil.shortname)):
89 shutil.rmtree(rdst.wjoin(lfutil.shortname))
89 shutil.rmtree(rdst.wjoin(lfutil.shortname))
90
90
91 for f in lfiletohash.keys():
91 for f in lfiletohash.keys():
92 if os.path.isfile(rdst.wjoin(f)):
92 if os.path.isfile(rdst.wjoin(f)):
93 os.unlink(rdst.wjoin(f))
93 os.unlink(rdst.wjoin(f))
94 try:
94 try:
95 os.removedirs(os.path.dirname(rdst.wjoin(f)))
95 os.removedirs(os.path.dirname(rdst.wjoin(f)))
96 except OSError:
96 except OSError:
97 pass
97 pass
98
98
99 # If there were any files converted to largefiles, add largefiles
99 # If there were any files converted to largefiles, add largefiles
100 # to the destination repository's requirements.
100 # to the destination repository's requirements.
101 if lfiles:
101 if lfiles:
102 rdst.requirements.add('largefiles')
102 rdst.requirements.add('largefiles')
103 rdst._writerequirements()
103 rdst._writerequirements()
104 else:
104 else:
105 for ctx in ctxs:
105 for ctx in ctxs:
106 ui.progress(_('converting revisions'), ctx.rev(),
106 ui.progress(_('converting revisions'), ctx.rev(),
107 unit=_('revision'), total=rsrc['tip'].rev())
107 unit=_('revision'), total=rsrc['tip'].rev())
108 _addchangeset(ui, rsrc, rdst, ctx, revmap)
108 _addchangeset(ui, rsrc, rdst, ctx, revmap)
109
109
110 ui.progress(_('converting revisions'), None)
110 ui.progress(_('converting revisions'), None)
111 success = True
111 success = True
112 finally:
112 finally:
113 if not success:
113 if not success:
114 # we failed, remove the new directory
114 # we failed, remove the new directory
115 shutil.rmtree(rdst.root)
115 shutil.rmtree(rdst.root)
116 dst_lock.release()
116 dst_lock.release()
117
117
118 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
118 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
119 # Convert src parents to dst parents
119 # Convert src parents to dst parents
120 parents = []
120 parents = _convertparents(ctx, revmap)
121 for p in ctx.parents():
122 parents.append(revmap[p.node()])
123 while len(parents) < 2:
124 parents.append(node.nullid)
125
121
126 # Generate list of changed files
122 # Generate list of changed files
127 files = set(ctx.files())
123 files = _getchangedfiles(ctx, parents)
128 if node.nullid not in parents:
129 mc = ctx.manifest()
130 mp1 = ctx.parents()[0].manifest()
131 mp2 = ctx.parents()[1].manifest()
132 files |= (set(mp1) | set(mp2)) - set(mc)
133 for f in mc:
134 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
135 files.add(f)
136
124
137 def getfilectx(repo, memctx, f):
125 def getfilectx(repo, memctx, f):
138 if lfutil.standin(f) in files:
126 if lfutil.standin(f) in files:
139 # if the file isn't in the manifest then it was removed
127 # if the file isn't in the manifest then it was removed
140 # or renamed, raise IOError to indicate this
128 # or renamed, raise IOError to indicate this
141 try:
129 try:
142 fctx = ctx.filectx(lfutil.standin(f))
130 fctx = ctx.filectx(lfutil.standin(f))
143 except error.LookupError:
131 except error.LookupError:
144 raise IOError()
132 raise IOError()
145 renamed = fctx.renamed()
133 renamed = fctx.renamed()
146 if renamed:
134 if renamed:
147 renamed = lfutil.splitstandin(renamed[0])
135 renamed = lfutil.splitstandin(renamed[0])
148
136
149 hash = fctx.data().strip()
137 hash = fctx.data().strip()
150 path = lfutil.findfile(rsrc, hash)
138 path = lfutil.findfile(rsrc, hash)
151 ### TODO: What if the file is not cached?
139 ### TODO: What if the file is not cached?
152 data = ''
140 data = ''
153 fd = None
141 fd = None
154 try:
142 try:
155 fd = open(path, 'rb')
143 fd = open(path, 'rb')
156 data = fd.read()
144 data = fd.read()
157 finally:
145 finally:
158 if fd:
146 if fd:
159 fd.close()
147 fd.close()
160 return context.memfilectx(f, data, 'l' in fctx.flags(),
148 return context.memfilectx(f, data, 'l' in fctx.flags(),
161 'x' in fctx.flags(), renamed)
149 'x' in fctx.flags(), renamed)
162 else:
150 else:
163 try:
151 return _getnormalcontext(repo.ui, ctx, f, revmap)
164 fctx = ctx.filectx(f)
165 except error.LookupError:
166 raise IOError()
167 renamed = fctx.renamed()
168 if renamed:
169 renamed = renamed[0]
170 data = fctx.data()
171 if f == '.hgtags':
172 newdata = []
173 for line in data.splitlines():
174 id, name = line.split(' ', 1)
175 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
176 name))
177 data = ''.join(newdata)
178 return context.memfilectx(f, data, 'l' in fctx.flags(),
179 'x' in fctx.flags(), renamed)
180
152
181 dstfiles = []
153 dstfiles = []
182 for file in files:
154 for file in files:
183 if lfutil.isstandin(file):
155 if lfutil.isstandin(file):
184 dstfiles.append(lfutil.splitstandin(file))
156 dstfiles.append(lfutil.splitstandin(file))
185 else:
157 else:
186 dstfiles.append(file)
158 dstfiles.append(file)
187 # Commit
159 # Commit
188 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
160 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
189 getfilectx, ctx.user(), ctx.date(), ctx.extra())
190 ret = rdst.commitctx(mctx)
191 rdst.dirstate.setparents(ret)
192 revmap[ctx.node()] = rdst.changelog.tip()
193
161
194 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
162 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
195 matcher, size, lfiletohash):
163 matcher, size, lfiletohash):
196 # Convert src parents to dst parents
164 # Convert src parents to dst parents
197 parents = []
165 parents = _convertparents(ctx, revmap)
198 for p in ctx.parents():
199 parents.append(revmap[p.node()])
200 while len(parents) < 2:
201 parents.append(node.nullid)
202
166
203 # Generate list of changed files
167 # Generate list of changed files
204 files = set(ctx.files())
168 files = _getchangedfiles(ctx, parents)
205 if node.nullid not in parents:
206 mc = ctx.manifest()
207 mp1 = ctx.parents()[0].manifest()
208 mp2 = ctx.parents()[1].manifest()
209 files |= (set(mp1) | set(mp2)) - set(mc)
210 for f in mc:
211 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
212 files.add(f)
213
169
214 dstfiles = []
170 dstfiles = []
215 for f in files:
171 for f in files:
216 if f not in lfiles and f not in normalfiles:
172 if f not in lfiles and f not in normalfiles:
217 islfile = _islfile(f, ctx, matcher, size)
173 islfile = _islfile(f, ctx, matcher, size)
218 # If this file was renamed or copied then copy
174 # If this file was renamed or copied then copy
219 # the lfileness of its predecessor
175 # the lfileness of its predecessor
220 if f in ctx.manifest():
176 if f in ctx.manifest():
221 fctx = ctx.filectx(f)
177 fctx = ctx.filectx(f)
222 renamed = fctx.renamed()
178 renamed = fctx.renamed()
223 renamedlfile = renamed and renamed[0] in lfiles
179 renamedlfile = renamed and renamed[0] in lfiles
224 islfile |= renamedlfile
180 islfile |= renamedlfile
225 if 'l' in fctx.flags():
181 if 'l' in fctx.flags():
226 if renamedlfile:
182 if renamedlfile:
227 raise util.Abort(
183 raise util.Abort(
228 _('renamed/copied largefile %s becomes symlink')
184 _('renamed/copied largefile %s becomes symlink')
229 % f)
185 % f)
230 islfile = False
186 islfile = False
231 if islfile:
187 if islfile:
232 lfiles.add(f)
188 lfiles.add(f)
233 else:
189 else:
234 normalfiles.add(f)
190 normalfiles.add(f)
235
191
236 if f in lfiles:
192 if f in lfiles:
237 dstfiles.append(lfutil.standin(f))
193 dstfiles.append(lfutil.standin(f))
238 # largefile in manifest if it has not been removed/renamed
194 # largefile in manifest if it has not been removed/renamed
239 if f in ctx.manifest():
195 if f in ctx.manifest():
240 fctx = ctx.filectx(f)
196 fctx = ctx.filectx(f)
241 if 'l' in fctx.flags():
197 if 'l' in fctx.flags():
242 renamed = fctx.renamed()
198 renamed = fctx.renamed()
243 if renamed and renamed[0] in lfiles:
199 if renamed and renamed[0] in lfiles:
244 raise util.Abort(_('largefile %s becomes symlink') % f)
200 raise util.Abort(_('largefile %s becomes symlink') % f)
245
201
246 # largefile was modified, update standins
202 # largefile was modified, update standins
247 fullpath = rdst.wjoin(f)
203 fullpath = rdst.wjoin(f)
248 util.makedirs(os.path.dirname(fullpath))
204 util.makedirs(os.path.dirname(fullpath))
249 m = util.sha1('')
205 m = util.sha1('')
250 m.update(ctx[f].data())
206 m.update(ctx[f].data())
251 hash = m.hexdigest()
207 hash = m.hexdigest()
252 if f not in lfiletohash or lfiletohash[f] != hash:
208 if f not in lfiletohash or lfiletohash[f] != hash:
253 try:
209 try:
254 fd = open(fullpath, 'wb')
210 fd = open(fullpath, 'wb')
255 fd.write(ctx[f].data())
211 fd.write(ctx[f].data())
256 finally:
212 finally:
257 if fd:
213 if fd:
258 fd.close()
214 fd.close()
259 executable = 'x' in ctx[f].flags()
215 executable = 'x' in ctx[f].flags()
260 os.chmod(fullpath, lfutil.getmode(executable))
216 os.chmod(fullpath, lfutil.getmode(executable))
261 lfutil.writestandin(rdst, lfutil.standin(f), hash,
217 lfutil.writestandin(rdst, lfutil.standin(f), hash,
262 executable)
218 executable)
263 lfiletohash[f] = hash
219 lfiletohash[f] = hash
264 else:
220 else:
265 # normal file
221 # normal file
266 dstfiles.append(f)
222 dstfiles.append(f)
267
223
268 def getfilectx(repo, memctx, f):
224 def getfilectx(repo, memctx, f):
269 if lfutil.isstandin(f):
225 if lfutil.isstandin(f):
270 # if the file isn't in the manifest then it was removed
226 # if the file isn't in the manifest then it was removed
271 # or renamed, raise IOError to indicate this
227 # or renamed, raise IOError to indicate this
272 srcfname = lfutil.splitstandin(f)
228 srcfname = lfutil.splitstandin(f)
273 try:
229 try:
274 fctx = ctx.filectx(srcfname)
230 fctx = ctx.filectx(srcfname)
275 except error.LookupError:
231 except error.LookupError:
276 raise IOError()
232 raise IOError()
277 renamed = fctx.renamed()
233 renamed = fctx.renamed()
278 if renamed:
234 if renamed:
279 # standin is always a largefile because largefile-ness
235 # standin is always a largefile because largefile-ness
280 # doesn't change after rename or copy
236 # doesn't change after rename or copy
281 renamed = lfutil.standin(renamed[0])
237 renamed = lfutil.standin(renamed[0])
282
238
283 return context.memfilectx(f, lfiletohash[srcfname] + '\n', 'l' in
239 return context.memfilectx(f, lfiletohash[srcfname] + '\n', 'l' in
284 fctx.flags(), 'x' in fctx.flags(), renamed)
240 fctx.flags(), 'x' in fctx.flags(), renamed)
285 else:
241 else:
286 try:
242 return _getnormalcontext(repo.ui, ctx, f, revmap)
287 fctx = ctx.filectx(f)
288 except error.LookupError:
289 raise IOError()
290 renamed = fctx.renamed()
291 if renamed:
292 renamed = renamed[0]
293
294 data = fctx.data()
295 if f == '.hgtags':
296 newdata = []
297 for line in data.splitlines():
298 try:
299 id, name = line.split(' ', 1)
300 except ValueError:
301 repo.ui.warn(_('skipping incorrectly formatted tag %s\n'
302 % line))
303 continue
304 try:
305 newid = node.bin(id)
306 except TypeError:
307 repo.ui.warn(_('skipping incorrectly formatted id %s\n'
308 % id))
309 continue
310 try:
311 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
312 name))
313 except KeyError:
314 repo.ui.warn(_('no mapping for id %s\n' % id))
315 continue
316 data = ''.join(newdata)
317 return context.memfilectx(f, data, 'l' in fctx.flags(),
318 'x' in fctx.flags(), renamed)
319
243
320 # Commit
244 # Commit
245 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
246
247 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
321 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
248 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
322 getfilectx, ctx.user(), ctx.date(), ctx.extra())
249 getfilectx, ctx.user(), ctx.date(), ctx.extra())
323 ret = rdst.commitctx(mctx)
250 ret = rdst.commitctx(mctx)
324 rdst.dirstate.setparents(ret)
251 rdst.dirstate.setparents(ret)
325 revmap[ctx.node()] = rdst.changelog.tip()
252 revmap[ctx.node()] = rdst.changelog.tip()
326
253
254 # Generate list of changed files
255 def _getchangedfiles(ctx, parents):
256 files = set(ctx.files())
257 if node.nullid not in parents:
258 mc = ctx.manifest()
259 mp1 = ctx.parents()[0].manifest()
260 mp2 = ctx.parents()[1].manifest()
261 files |= (set(mp1) | set(mp2)) - set(mc)
262 for f in mc:
263 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
264 files.add(f)
265 return files
266
267 # Convert src parents to dst parents
268 def _convertparents(ctx, revmap):
269 parents = []
270 for p in ctx.parents():
271 parents.append(revmap[p.node()])
272 while len(parents) < 2:
273 parents.append(node.nullid)
274 return parents
275
276 # Get memfilectx for a normal file
277 def _getnormalcontext(ui, ctx, f, revmap):
278 try:
279 fctx = ctx.filectx(f)
280 except error.LookupError:
281 raise IOError()
282 renamed = fctx.renamed()
283 if renamed:
284 renamed = renamed[0]
285
286 data = fctx.data()
287 if f == '.hgtags':
288 data = _converttags (ui, revmap, data)
289 return context.memfilectx(f, data, 'l' in fctx.flags(),
290 'x' in fctx.flags(), renamed)
291
292 # Remap tag data using a revision map
293 def _converttags(ui, revmap, data):
294 newdata = []
295 for line in data.splitlines():
296 try:
297 id, name = line.split(' ', 1)
298 except ValueError:
299 ui.warn(_('skipping incorrectly formatted tag %s\n'
300 % line))
301 continue
302 try:
303 newid = node.bin(id)
304 except TypeError:
305 ui.warn(_('skipping incorrectly formatted id %s\n'
306 % id))
307 continue
308 try:
309 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
310 name))
311 except KeyError:
312 ui.warn(_('no mapping for id %s\n' % id))
313 continue
314 return ''.join(newdata)
315
327 def _islfile(file, ctx, matcher, size):
316 def _islfile(file, ctx, matcher, size):
328 '''Return true if file should be considered a largefile, i.e.
317 '''Return true if file should be considered a largefile, i.e.
329 matcher matches it or it is larger than size.'''
318 matcher matches it or it is larger than size.'''
330 # never store special .hg* files as largefiles
319 # never store special .hg* files as largefiles
331 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
320 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
332 return False
321 return False
333 if matcher and matcher(file):
322 if matcher and matcher(file):
334 return True
323 return True
335 try:
324 try:
336 return ctx.filectx(file).size() >= size * 1024 * 1024
325 return ctx.filectx(file).size() >= size * 1024 * 1024
337 except error.LookupError:
326 except error.LookupError:
338 return False
327 return False
339
328
340 def uploadlfiles(ui, rsrc, rdst, files):
329 def uploadlfiles(ui, rsrc, rdst, files):
341 '''upload largefiles to the central store'''
330 '''upload largefiles to the central store'''
342
331
343 if not files:
332 if not files:
344 return
333 return
345
334
346 store = basestore._openstore(rsrc, rdst, put=True)
335 store = basestore._openstore(rsrc, rdst, put=True)
347
336
348 at = 0
337 at = 0
349 files = filter(lambda h: not store.exists(h), files)
338 files = filter(lambda h: not store.exists(h), files)
350 for hash in files:
339 for hash in files:
351 ui.progress(_('uploading largefiles'), at, unit='largefile',
340 ui.progress(_('uploading largefiles'), at, unit='largefile',
352 total=len(files))
341 total=len(files))
353 source = lfutil.findfile(rsrc, hash)
342 source = lfutil.findfile(rsrc, hash)
354 if not source:
343 if not source:
355 raise util.Abort(_('largefile %s missing from store'
344 raise util.Abort(_('largefile %s missing from store'
356 ' (needs to be uploaded)') % hash)
345 ' (needs to be uploaded)') % hash)
357 # XXX check for errors here
346 # XXX check for errors here
358 store.put(source, hash)
347 store.put(source, hash)
359 at += 1
348 at += 1
360 ui.progress(_('uploading largefiles'), None)
349 ui.progress(_('uploading largefiles'), None)
361
350
362 def verifylfiles(ui, repo, all=False, contents=False):
351 def verifylfiles(ui, repo, all=False, contents=False):
363 '''Verify that every big file revision in the current changeset
352 '''Verify that every big file revision in the current changeset
364 exists in the central store. With --contents, also verify that
353 exists in the central store. With --contents, also verify that
365 the contents of each big file revision are correct (SHA-1 hash
354 the contents of each big file revision are correct (SHA-1 hash
366 matches the revision ID). With --all, check every changeset in
355 matches the revision ID). With --all, check every changeset in
367 this repository.'''
356 this repository.'''
368 if all:
357 if all:
369 # Pass a list to the function rather than an iterator because we know a
358 # Pass a list to the function rather than an iterator because we know a
370 # list will work.
359 # list will work.
371 revs = range(len(repo))
360 revs = range(len(repo))
372 else:
361 else:
373 revs = ['.']
362 revs = ['.']
374
363
375 store = basestore._openstore(repo)
364 store = basestore._openstore(repo)
376 return store.verify(revs, contents=contents)
365 return store.verify(revs, contents=contents)
377
366
378 def cachelfiles(ui, repo, node):
367 def cachelfiles(ui, repo, node):
379 '''cachelfiles ensures that all largefiles needed by the specified revision
368 '''cachelfiles ensures that all largefiles needed by the specified revision
380 are present in the repository's largefile cache.
369 are present in the repository's largefile cache.
381
370
382 returns a tuple (cached, missing). cached is the list of files downloaded
371 returns a tuple (cached, missing). cached is the list of files downloaded
383 by this operation; missing is the list of files that were needed but could
372 by this operation; missing is the list of files that were needed but could
384 not be found.'''
373 not be found.'''
385 lfiles = lfutil.listlfiles(repo, node)
374 lfiles = lfutil.listlfiles(repo, node)
386 toget = []
375 toget = []
387
376
388 for lfile in lfiles:
377 for lfile in lfiles:
389 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
378 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
390 # if it exists and its hash matches, it might have been locally
379 # if it exists and its hash matches, it might have been locally
391 # modified before updating and the user chose 'local'. in this case,
380 # modified before updating and the user chose 'local'. in this case,
392 # it will not be in any store, so don't look for it.
381 # it will not be in any store, so don't look for it.
393 if ((not os.path.exists(repo.wjoin(lfile)) or
382 if ((not os.path.exists(repo.wjoin(lfile)) or
394 expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and
383 expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and
395 not lfutil.findfile(repo, expectedhash)):
384 not lfutil.findfile(repo, expectedhash)):
396 toget.append((lfile, expectedhash))
385 toget.append((lfile, expectedhash))
397
386
398 if toget:
387 if toget:
399 store = basestore._openstore(repo)
388 store = basestore._openstore(repo)
400 ret = store.get(toget)
389 ret = store.get(toget)
401 return ret
390 return ret
402
391
403 return ([], [])
392 return ([], [])
404
393
405 def updatelfiles(ui, repo, filelist=None, printmessage=True):
394 def updatelfiles(ui, repo, filelist=None, printmessage=True):
406 wlock = repo.wlock()
395 wlock = repo.wlock()
407 try:
396 try:
408 lfdirstate = lfutil.openlfdirstate(ui, repo)
397 lfdirstate = lfutil.openlfdirstate(ui, repo)
409 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
398 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
410
399
411 if filelist is not None:
400 if filelist is not None:
412 lfiles = [f for f in lfiles if f in filelist]
401 lfiles = [f for f in lfiles if f in filelist]
413
402
414 printed = False
403 printed = False
415 if printmessage and lfiles:
404 if printmessage and lfiles:
416 ui.status(_('getting changed largefiles\n'))
405 ui.status(_('getting changed largefiles\n'))
417 printed = True
406 printed = True
418 cachelfiles(ui, repo, '.')
407 cachelfiles(ui, repo, '.')
419
408
420 updated, removed = 0, 0
409 updated, removed = 0, 0
421 for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles):
410 for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles):
422 # increment the appropriate counter according to _updatelfile's
411 # increment the appropriate counter according to _updatelfile's
423 # return value
412 # return value
424 updated += i > 0 and i or 0
413 updated += i > 0 and i or 0
425 removed -= i < 0 and i or 0
414 removed -= i < 0 and i or 0
426 if printmessage and (removed or updated) and not printed:
415 if printmessage and (removed or updated) and not printed:
427 ui.status(_('getting changed largefiles\n'))
416 ui.status(_('getting changed largefiles\n'))
428 printed = True
417 printed = True
429
418
430 lfdirstate.write()
419 lfdirstate.write()
431 if printed and printmessage:
420 if printed and printmessage:
432 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
421 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
433 removed))
422 removed))
434 finally:
423 finally:
435 wlock.release()
424 wlock.release()
436
425
437 def _updatelfile(repo, lfdirstate, lfile):
426 def _updatelfile(repo, lfdirstate, lfile):
438 '''updates a single largefile and copies the state of its standin from
427 '''updates a single largefile and copies the state of its standin from
439 the repository's dirstate to its state in the lfdirstate.
428 the repository's dirstate to its state in the lfdirstate.
440
429
441 returns 1 if the file was modified, -1 if the file was removed, 0 if the
430 returns 1 if the file was modified, -1 if the file was removed, 0 if the
442 file was unchanged, and None if the needed largefile was missing from the
431 file was unchanged, and None if the needed largefile was missing from the
443 cache.'''
432 cache.'''
444 ret = 0
433 ret = 0
445 abslfile = repo.wjoin(lfile)
434 abslfile = repo.wjoin(lfile)
446 absstandin = repo.wjoin(lfutil.standin(lfile))
435 absstandin = repo.wjoin(lfutil.standin(lfile))
447 if os.path.exists(absstandin):
436 if os.path.exists(absstandin):
448 if os.path.exists(absstandin+'.orig'):
437 if os.path.exists(absstandin+'.orig'):
449 shutil.copyfile(abslfile, abslfile+'.orig')
438 shutil.copyfile(abslfile, abslfile+'.orig')
450 expecthash = lfutil.readstandin(repo, lfile)
439 expecthash = lfutil.readstandin(repo, lfile)
451 if (expecthash != '' and
440 if (expecthash != '' and
452 (not os.path.exists(abslfile) or
441 (not os.path.exists(abslfile) or
453 expecthash != lfutil.hashfile(abslfile))):
442 expecthash != lfutil.hashfile(abslfile))):
454 if not lfutil.copyfromcache(repo, expecthash, lfile):
443 if not lfutil.copyfromcache(repo, expecthash, lfile):
455 # use normallookup() to allocate entry in largefiles dirstate,
444 # use normallookup() to allocate entry in largefiles dirstate,
456 # because lack of it misleads lfiles_repo.status() into
445 # because lack of it misleads lfiles_repo.status() into
457 # recognition that such cache missing files are REMOVED.
446 # recognition that such cache missing files are REMOVED.
458 lfdirstate.normallookup(lfile)
447 lfdirstate.normallookup(lfile)
459 return None # don't try to set the mode
448 return None # don't try to set the mode
460 ret = 1
449 ret = 1
461 mode = os.stat(absstandin).st_mode
450 mode = os.stat(absstandin).st_mode
462 if mode != os.stat(abslfile).st_mode:
451 if mode != os.stat(abslfile).st_mode:
463 os.chmod(abslfile, mode)
452 os.chmod(abslfile, mode)
464 ret = 1
453 ret = 1
465 else:
454 else:
466 # Remove lfiles for which the standin is deleted, unless the
455 # Remove lfiles for which the standin is deleted, unless the
467 # lfile is added to the repository again. This happens when a
456 # lfile is added to the repository again. This happens when a
468 # largefile is converted back to a normal file: the standin
457 # largefile is converted back to a normal file: the standin
469 # disappears, but a new (normal) file appears as the lfile.
458 # disappears, but a new (normal) file appears as the lfile.
470 if os.path.exists(abslfile) and lfile not in repo[None]:
459 if os.path.exists(abslfile) and lfile not in repo[None]:
471 os.unlink(abslfile)
460 os.unlink(abslfile)
472 ret = -1
461 ret = -1
473 state = repo.dirstate[lfutil.standin(lfile)]
462 state = repo.dirstate[lfutil.standin(lfile)]
474 if state == 'n':
463 if state == 'n':
475 # When rebasing, we need to synchronize the standin and the largefile,
464 # When rebasing, we need to synchronize the standin and the largefile,
476 # because otherwise the largefile will get reverted. But for commit's
465 # because otherwise the largefile will get reverted. But for commit's
477 # sake, we have to mark the file as unclean.
466 # sake, we have to mark the file as unclean.
478 if getattr(repo, "_isrebasing", False):
467 if getattr(repo, "_isrebasing", False):
479 lfdirstate.normallookup(lfile)
468 lfdirstate.normallookup(lfile)
480 else:
469 else:
481 lfdirstate.normal(lfile)
470 lfdirstate.normal(lfile)
482 elif state == 'r':
471 elif state == 'r':
483 lfdirstate.remove(lfile)
472 lfdirstate.remove(lfile)
484 elif state == 'a':
473 elif state == 'a':
485 lfdirstate.add(lfile)
474 lfdirstate.add(lfile)
486 elif state == '?':
475 elif state == '?':
487 lfdirstate.drop(lfile)
476 lfdirstate.drop(lfile)
488 return ret
477 return ret
489
478
490 # -- hg commands declarations ------------------------------------------------
479 # -- hg commands declarations ------------------------------------------------
491
480
492 cmdtable = {
481 cmdtable = {
493 'lfconvert': (lfconvert,
482 'lfconvert': (lfconvert,
494 [('s', 'size', '',
483 [('s', 'size', '',
495 _('minimum size (MB) for files to be converted '
484 _('minimum size (MB) for files to be converted '
496 'as largefiles'),
485 'as largefiles'),
497 'SIZE'),
486 'SIZE'),
498 ('', 'to-normal', False,
487 ('', 'to-normal', False,
499 _('convert from a largefiles repo to a normal repo')),
488 _('convert from a largefiles repo to a normal repo')),
500 ],
489 ],
501 _('hg lfconvert SOURCE DEST [FILE ...]')),
490 _('hg lfconvert SOURCE DEST [FILE ...]')),
502 }
491 }
General Comments 0
You need to be logged in to leave comments. Login now