##// END OF EJS Templates
largefiles: fix uppercase in abort message
Martin Geisler -
r15380:a5388868 stable
parent child Browse files
Show More
@@ -1,473 +1,473 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10
10
11 import os
11 import os
12 import shutil
12 import shutil
13
13
14 from mercurial import util, match as match_, hg, node, context, error
14 from mercurial import util, match as match_, hg, node, context, error
15 from mercurial.i18n import _
15 from mercurial.i18n import _
16
16
17 import lfutil
17 import lfutil
18 import basestore
18 import basestore
19
19
20 # -- Commands ----------------------------------------------------------
20 # -- Commands ----------------------------------------------------------
21
21
22 def lfconvert(ui, src, dest, *pats, **opts):
22 def lfconvert(ui, src, dest, *pats, **opts):
23 '''convert a normal repository to a largefiles repository
23 '''convert a normal repository to a largefiles repository
24
24
25 Convert repository SOURCE to a new repository DEST, identical to
25 Convert repository SOURCE to a new repository DEST, identical to
26 SOURCE except that certain files will be converted as largefiles:
26 SOURCE except that certain files will be converted as largefiles:
27 specifically, any file that matches any PATTERN *or* whose size is
27 specifically, any file that matches any PATTERN *or* whose size is
28 above the minimum size threshold is converted as a largefile. The
28 above the minimum size threshold is converted as a largefile. The
29 size used to determine whether or not to track a file as a
29 size used to determine whether or not to track a file as a
30 largefile is the size of the first version of the file. The
30 largefile is the size of the first version of the file. The
31 minimum size can be specified either with --size or in
31 minimum size can be specified either with --size or in
32 configuration as ``largefiles.size``.
32 configuration as ``largefiles.size``.
33
33
34 After running this command you will need to make sure that
34 After running this command you will need to make sure that
35 largefiles is enabled anywhere you intend to push the new
35 largefiles is enabled anywhere you intend to push the new
36 repository.
36 repository.
37
37
38 Use --to-normal to convert largefiles back to normal files; after
38 Use --to-normal to convert largefiles back to normal files; after
39 this, the DEST repository can be used without largefiles at all.'''
39 this, the DEST repository can be used without largefiles at all.'''
40
40
41 if opts['to_normal']:
41 if opts['to_normal']:
42 tolfile = False
42 tolfile = False
43 else:
43 else:
44 tolfile = True
44 tolfile = True
45 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
45 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
46
46
47 if not hg.islocal(src):
47 if not hg.islocal(src):
48 raise util.Abort(_('%s is not a local Mercurial repo') % src)
48 raise util.Abort(_('%s is not a local Mercurial repo') % src)
49 if not hg.islocal(dest):
49 if not hg.islocal(dest):
50 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
50 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
51
51
52 rsrc = hg.repository(ui, src)
52 rsrc = hg.repository(ui, src)
53 ui.status(_('initializing destination %s\n') % dest)
53 ui.status(_('initializing destination %s\n') % dest)
54 rdst = hg.repository(ui, dest, create=True)
54 rdst = hg.repository(ui, dest, create=True)
55
55
56 success = False
56 success = False
57 try:
57 try:
58 # Lock destination to prevent modification while it is converted to.
58 # Lock destination to prevent modification while it is converted to.
59 # Don't need to lock src because we are just reading from its history
59 # Don't need to lock src because we are just reading from its history
60 # which can't change.
60 # which can't change.
61 dst_lock = rdst.lock()
61 dst_lock = rdst.lock()
62
62
63 # Get a list of all changesets in the source. The easy way to do this
63 # Get a list of all changesets in the source. The easy way to do this
64 # is to simply walk the changelog, using changelog.nodesbewteen().
64 # is to simply walk the changelog, using changelog.nodesbewteen().
65 # Take a look at mercurial/revlog.py:639 for more details.
65 # Take a look at mercurial/revlog.py:639 for more details.
66 # Use a generator instead of a list to decrease memory usage
66 # Use a generator instead of a list to decrease memory usage
67 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
67 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
68 rsrc.heads())[0])
68 rsrc.heads())[0])
69 revmap = {node.nullid: node.nullid}
69 revmap = {node.nullid: node.nullid}
70 if tolfile:
70 if tolfile:
71 lfiles = set()
71 lfiles = set()
72 normalfiles = set()
72 normalfiles = set()
73 if not pats:
73 if not pats:
74 pats = ui.config(lfutil.longname, 'patterns', default=())
74 pats = ui.config(lfutil.longname, 'patterns', default=())
75 if pats:
75 if pats:
76 pats = pats.split(' ')
76 pats = pats.split(' ')
77 if pats:
77 if pats:
78 matcher = match_.match(rsrc.root, '', list(pats))
78 matcher = match_.match(rsrc.root, '', list(pats))
79 else:
79 else:
80 matcher = None
80 matcher = None
81
81
82 lfiletohash = {}
82 lfiletohash = {}
83 for ctx in ctxs:
83 for ctx in ctxs:
84 ui.progress(_('converting revisions'), ctx.rev(),
84 ui.progress(_('converting revisions'), ctx.rev(),
85 unit=_('revision'), total=rsrc['tip'].rev())
85 unit=_('revision'), total=rsrc['tip'].rev())
86 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
86 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
87 lfiles, normalfiles, matcher, size, lfiletohash)
87 lfiles, normalfiles, matcher, size, lfiletohash)
88 ui.progress(_('converting revisions'), None)
88 ui.progress(_('converting revisions'), None)
89
89
90 if os.path.exists(rdst.wjoin(lfutil.shortname)):
90 if os.path.exists(rdst.wjoin(lfutil.shortname)):
91 shutil.rmtree(rdst.wjoin(lfutil.shortname))
91 shutil.rmtree(rdst.wjoin(lfutil.shortname))
92
92
93 for f in lfiletohash.keys():
93 for f in lfiletohash.keys():
94 if os.path.isfile(rdst.wjoin(f)):
94 if os.path.isfile(rdst.wjoin(f)):
95 os.unlink(rdst.wjoin(f))
95 os.unlink(rdst.wjoin(f))
96 try:
96 try:
97 os.removedirs(os.path.dirname(rdst.wjoin(f)))
97 os.removedirs(os.path.dirname(rdst.wjoin(f)))
98 except OSError:
98 except OSError:
99 pass
99 pass
100
100
101 # If there were any files converted to largefiles, add largefiles
101 # If there were any files converted to largefiles, add largefiles
102 # to the destination repository's requirements.
102 # to the destination repository's requirements.
103 if lfiles:
103 if lfiles:
104 rdst.requirements.add('largefiles')
104 rdst.requirements.add('largefiles')
105 rdst._writerequirements()
105 rdst._writerequirements()
106 else:
106 else:
107 for ctx in ctxs:
107 for ctx in ctxs:
108 ui.progress(_('converting revisions'), ctx.rev(),
108 ui.progress(_('converting revisions'), ctx.rev(),
109 unit=_('revision'), total=rsrc['tip'].rev())
109 unit=_('revision'), total=rsrc['tip'].rev())
110 _addchangeset(ui, rsrc, rdst, ctx, revmap)
110 _addchangeset(ui, rsrc, rdst, ctx, revmap)
111
111
112 ui.progress(_('converting revisions'), None)
112 ui.progress(_('converting revisions'), None)
113 success = True
113 success = True
114 finally:
114 finally:
115 if not success:
115 if not success:
116 # we failed, remove the new directory
116 # we failed, remove the new directory
117 shutil.rmtree(rdst.root)
117 shutil.rmtree(rdst.root)
118 dst_lock.release()
118 dst_lock.release()
119
119
120 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
120 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
121 # Convert src parents to dst parents
121 # Convert src parents to dst parents
122 parents = []
122 parents = []
123 for p in ctx.parents():
123 for p in ctx.parents():
124 parents.append(revmap[p.node()])
124 parents.append(revmap[p.node()])
125 while len(parents) < 2:
125 while len(parents) < 2:
126 parents.append(node.nullid)
126 parents.append(node.nullid)
127
127
128 # Generate list of changed files
128 # Generate list of changed files
129 files = set(ctx.files())
129 files = set(ctx.files())
130 if node.nullid not in parents:
130 if node.nullid not in parents:
131 mc = ctx.manifest()
131 mc = ctx.manifest()
132 mp1 = ctx.parents()[0].manifest()
132 mp1 = ctx.parents()[0].manifest()
133 mp2 = ctx.parents()[1].manifest()
133 mp2 = ctx.parents()[1].manifest()
134 files |= (set(mp1) | set(mp2)) - set(mc)
134 files |= (set(mp1) | set(mp2)) - set(mc)
135 for f in mc:
135 for f in mc:
136 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
136 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
137 files.add(f)
137 files.add(f)
138
138
139 def getfilectx(repo, memctx, f):
139 def getfilectx(repo, memctx, f):
140 if lfutil.standin(f) in files:
140 if lfutil.standin(f) in files:
141 # if the file isn't in the manifest then it was removed
141 # if the file isn't in the manifest then it was removed
142 # or renamed, raise IOError to indicate this
142 # or renamed, raise IOError to indicate this
143 try:
143 try:
144 fctx = ctx.filectx(lfutil.standin(f))
144 fctx = ctx.filectx(lfutil.standin(f))
145 except error.LookupError:
145 except error.LookupError:
146 raise IOError()
146 raise IOError()
147 renamed = fctx.renamed()
147 renamed = fctx.renamed()
148 if renamed:
148 if renamed:
149 renamed = lfutil.splitstandin(renamed[0])
149 renamed = lfutil.splitstandin(renamed[0])
150
150
151 hash = fctx.data().strip()
151 hash = fctx.data().strip()
152 path = lfutil.findfile(rsrc, hash)
152 path = lfutil.findfile(rsrc, hash)
153 ### TODO: What if the file is not cached?
153 ### TODO: What if the file is not cached?
154 data = ''
154 data = ''
155 fd = None
155 fd = None
156 try:
156 try:
157 fd = open(path, 'rb')
157 fd = open(path, 'rb')
158 data = fd.read()
158 data = fd.read()
159 finally:
159 finally:
160 if fd:
160 if fd:
161 fd.close()
161 fd.close()
162 return context.memfilectx(f, data, 'l' in fctx.flags(),
162 return context.memfilectx(f, data, 'l' in fctx.flags(),
163 'x' in fctx.flags(), renamed)
163 'x' in fctx.flags(), renamed)
164 else:
164 else:
165 try:
165 try:
166 fctx = ctx.filectx(f)
166 fctx = ctx.filectx(f)
167 except error.LookupError:
167 except error.LookupError:
168 raise IOError()
168 raise IOError()
169 renamed = fctx.renamed()
169 renamed = fctx.renamed()
170 if renamed:
170 if renamed:
171 renamed = renamed[0]
171 renamed = renamed[0]
172 data = fctx.data()
172 data = fctx.data()
173 if f == '.hgtags':
173 if f == '.hgtags':
174 newdata = []
174 newdata = []
175 for line in data.splitlines():
175 for line in data.splitlines():
176 id, name = line.split(' ', 1)
176 id, name = line.split(' ', 1)
177 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
177 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
178 name))
178 name))
179 data = ''.join(newdata)
179 data = ''.join(newdata)
180 return context.memfilectx(f, data, 'l' in fctx.flags(),
180 return context.memfilectx(f, data, 'l' in fctx.flags(),
181 'x' in fctx.flags(), renamed)
181 'x' in fctx.flags(), renamed)
182
182
183 dstfiles = []
183 dstfiles = []
184 for file in files:
184 for file in files:
185 if lfutil.isstandin(file):
185 if lfutil.isstandin(file):
186 dstfiles.append(lfutil.splitstandin(file))
186 dstfiles.append(lfutil.splitstandin(file))
187 else:
187 else:
188 dstfiles.append(file)
188 dstfiles.append(file)
189 # Commit
189 # Commit
190 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
190 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
191 getfilectx, ctx.user(), ctx.date(), ctx.extra())
191 getfilectx, ctx.user(), ctx.date(), ctx.extra())
192 ret = rdst.commitctx(mctx)
192 ret = rdst.commitctx(mctx)
193 rdst.dirstate.setparents(ret)
193 rdst.dirstate.setparents(ret)
194 revmap[ctx.node()] = rdst.changelog.tip()
194 revmap[ctx.node()] = rdst.changelog.tip()
195
195
196 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
196 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
197 matcher, size, lfiletohash):
197 matcher, size, lfiletohash):
198 # Convert src parents to dst parents
198 # Convert src parents to dst parents
199 parents = []
199 parents = []
200 for p in ctx.parents():
200 for p in ctx.parents():
201 parents.append(revmap[p.node()])
201 parents.append(revmap[p.node()])
202 while len(parents) < 2:
202 while len(parents) < 2:
203 parents.append(node.nullid)
203 parents.append(node.nullid)
204
204
205 # Generate list of changed files
205 # Generate list of changed files
206 files = set(ctx.files())
206 files = set(ctx.files())
207 if node.nullid not in parents:
207 if node.nullid not in parents:
208 mc = ctx.manifest()
208 mc = ctx.manifest()
209 mp1 = ctx.parents()[0].manifest()
209 mp1 = ctx.parents()[0].manifest()
210 mp2 = ctx.parents()[1].manifest()
210 mp2 = ctx.parents()[1].manifest()
211 files |= (set(mp1) | set(mp2)) - set(mc)
211 files |= (set(mp1) | set(mp2)) - set(mc)
212 for f in mc:
212 for f in mc:
213 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
213 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
214 files.add(f)
214 files.add(f)
215
215
216 dstfiles = []
216 dstfiles = []
217 for f in files:
217 for f in files:
218 if f not in lfiles and f not in normalfiles:
218 if f not in lfiles and f not in normalfiles:
219 islfile = _islfile(f, ctx, matcher, size)
219 islfile = _islfile(f, ctx, matcher, size)
220 # If this file was renamed or copied then copy
220 # If this file was renamed or copied then copy
221 # the lfileness of its predecessor
221 # the lfileness of its predecessor
222 if f in ctx.manifest():
222 if f in ctx.manifest():
223 fctx = ctx.filectx(f)
223 fctx = ctx.filectx(f)
224 renamed = fctx.renamed()
224 renamed = fctx.renamed()
225 renamedlfile = renamed and renamed[0] in lfiles
225 renamedlfile = renamed and renamed[0] in lfiles
226 islfile |= renamedlfile
226 islfile |= renamedlfile
227 if 'l' in fctx.flags():
227 if 'l' in fctx.flags():
228 if renamedlfile:
228 if renamedlfile:
229 raise util.Abort(
229 raise util.Abort(
230 _('Renamed/copied largefile %s becomes symlink')
230 _('renamed/copied largefile %s becomes symlink')
231 % f)
231 % f)
232 islfile = False
232 islfile = False
233 if islfile:
233 if islfile:
234 lfiles.add(f)
234 lfiles.add(f)
235 else:
235 else:
236 normalfiles.add(f)
236 normalfiles.add(f)
237
237
238 if f in lfiles:
238 if f in lfiles:
239 dstfiles.append(lfutil.standin(f))
239 dstfiles.append(lfutil.standin(f))
240 # largefile in manifest if it has not been removed/renamed
240 # largefile in manifest if it has not been removed/renamed
241 if f in ctx.manifest():
241 if f in ctx.manifest():
242 if 'l' in ctx.filectx(f).flags():
242 if 'l' in ctx.filectx(f).flags():
243 if renamed and renamed[0] in lfiles:
243 if renamed and renamed[0] in lfiles:
244 raise util.Abort(_('largefile %s becomes symlink') % f)
244 raise util.Abort(_('largefile %s becomes symlink') % f)
245
245
246 # largefile was modified, update standins
246 # largefile was modified, update standins
247 fullpath = rdst.wjoin(f)
247 fullpath = rdst.wjoin(f)
248 util.makedirs(os.path.dirname(fullpath))
248 util.makedirs(os.path.dirname(fullpath))
249 m = util.sha1('')
249 m = util.sha1('')
250 m.update(ctx[f].data())
250 m.update(ctx[f].data())
251 hash = m.hexdigest()
251 hash = m.hexdigest()
252 if f not in lfiletohash or lfiletohash[f] != hash:
252 if f not in lfiletohash or lfiletohash[f] != hash:
253 try:
253 try:
254 fd = open(fullpath, 'wb')
254 fd = open(fullpath, 'wb')
255 fd.write(ctx[f].data())
255 fd.write(ctx[f].data())
256 finally:
256 finally:
257 if fd:
257 if fd:
258 fd.close()
258 fd.close()
259 executable = 'x' in ctx[f].flags()
259 executable = 'x' in ctx[f].flags()
260 os.chmod(fullpath, lfutil.getmode(executable))
260 os.chmod(fullpath, lfutil.getmode(executable))
261 lfutil.writestandin(rdst, lfutil.standin(f), hash,
261 lfutil.writestandin(rdst, lfutil.standin(f), hash,
262 executable)
262 executable)
263 lfiletohash[f] = hash
263 lfiletohash[f] = hash
264 else:
264 else:
265 # normal file
265 # normal file
266 dstfiles.append(f)
266 dstfiles.append(f)
267
267
268 def getfilectx(repo, memctx, f):
268 def getfilectx(repo, memctx, f):
269 if lfutil.isstandin(f):
269 if lfutil.isstandin(f):
270 # if the file isn't in the manifest then it was removed
270 # if the file isn't in the manifest then it was removed
271 # or renamed, raise IOError to indicate this
271 # or renamed, raise IOError to indicate this
272 srcfname = lfutil.splitstandin(f)
272 srcfname = lfutil.splitstandin(f)
273 try:
273 try:
274 fctx = ctx.filectx(srcfname)
274 fctx = ctx.filectx(srcfname)
275 except error.LookupError:
275 except error.LookupError:
276 raise IOError()
276 raise IOError()
277 renamed = fctx.renamed()
277 renamed = fctx.renamed()
278 if renamed:
278 if renamed:
279 # standin is always a largefile because largefile-ness
279 # standin is always a largefile because largefile-ness
280 # doesn't change after rename or copy
280 # doesn't change after rename or copy
281 renamed = lfutil.standin(renamed[0])
281 renamed = lfutil.standin(renamed[0])
282
282
283 return context.memfilectx(f, lfiletohash[srcfname] + '\n', 'l' in
283 return context.memfilectx(f, lfiletohash[srcfname] + '\n', 'l' in
284 fctx.flags(), 'x' in fctx.flags(), renamed)
284 fctx.flags(), 'x' in fctx.flags(), renamed)
285 else:
285 else:
286 try:
286 try:
287 fctx = ctx.filectx(f)
287 fctx = ctx.filectx(f)
288 except error.LookupError:
288 except error.LookupError:
289 raise IOError()
289 raise IOError()
290 renamed = fctx.renamed()
290 renamed = fctx.renamed()
291 if renamed:
291 if renamed:
292 renamed = renamed[0]
292 renamed = renamed[0]
293
293
294 data = fctx.data()
294 data = fctx.data()
295 if f == '.hgtags':
295 if f == '.hgtags':
296 newdata = []
296 newdata = []
297 for line in data.splitlines():
297 for line in data.splitlines():
298 id, name = line.split(' ', 1)
298 id, name = line.split(' ', 1)
299 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
299 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
300 name))
300 name))
301 data = ''.join(newdata)
301 data = ''.join(newdata)
302 return context.memfilectx(f, data, 'l' in fctx.flags(),
302 return context.memfilectx(f, data, 'l' in fctx.flags(),
303 'x' in fctx.flags(), renamed)
303 'x' in fctx.flags(), renamed)
304
304
305 # Commit
305 # Commit
306 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
306 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
307 getfilectx, ctx.user(), ctx.date(), ctx.extra())
307 getfilectx, ctx.user(), ctx.date(), ctx.extra())
308 ret = rdst.commitctx(mctx)
308 ret = rdst.commitctx(mctx)
309 rdst.dirstate.setparents(ret)
309 rdst.dirstate.setparents(ret)
310 revmap[ctx.node()] = rdst.changelog.tip()
310 revmap[ctx.node()] = rdst.changelog.tip()
311
311
312 def _islfile(file, ctx, matcher, size):
312 def _islfile(file, ctx, matcher, size):
313 '''Return true if file should be considered a largefile, i.e.
313 '''Return true if file should be considered a largefile, i.e.
314 matcher matches it or it is larger than size.'''
314 matcher matches it or it is larger than size.'''
315 # never store special .hg* files as largefiles
315 # never store special .hg* files as largefiles
316 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
316 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
317 return False
317 return False
318 if matcher and matcher(file):
318 if matcher and matcher(file):
319 return True
319 return True
320 try:
320 try:
321 return ctx.filectx(file).size() >= size * 1024 * 1024
321 return ctx.filectx(file).size() >= size * 1024 * 1024
322 except error.LookupError:
322 except error.LookupError:
323 return False
323 return False
324
324
325 def uploadlfiles(ui, rsrc, rdst, files):
325 def uploadlfiles(ui, rsrc, rdst, files):
326 '''upload largefiles to the central store'''
326 '''upload largefiles to the central store'''
327
327
328 if not files:
328 if not files:
329 return
329 return
330
330
331 store = basestore._openstore(rsrc, rdst, put=True)
331 store = basestore._openstore(rsrc, rdst, put=True)
332
332
333 at = 0
333 at = 0
334 files = filter(lambda h: not store.exists(h), files)
334 files = filter(lambda h: not store.exists(h), files)
335 for hash in files:
335 for hash in files:
336 ui.progress(_('uploading largefiles'), at, unit='largefile',
336 ui.progress(_('uploading largefiles'), at, unit='largefile',
337 total=len(files))
337 total=len(files))
338 source = lfutil.findfile(rsrc, hash)
338 source = lfutil.findfile(rsrc, hash)
339 if not source:
339 if not source:
340 raise util.Abort(_('largefile %s missing from store'
340 raise util.Abort(_('largefile %s missing from store'
341 ' (needs to be uploaded)') % hash)
341 ' (needs to be uploaded)') % hash)
342 # XXX check for errors here
342 # XXX check for errors here
343 store.put(source, hash)
343 store.put(source, hash)
344 at += 1
344 at += 1
345 ui.progress(_('uploading largefiles'), None)
345 ui.progress(_('uploading largefiles'), None)
346
346
347 def verifylfiles(ui, repo, all=False, contents=False):
347 def verifylfiles(ui, repo, all=False, contents=False):
348 '''Verify that every big file revision in the current changeset
348 '''Verify that every big file revision in the current changeset
349 exists in the central store. With --contents, also verify that
349 exists in the central store. With --contents, also verify that
350 the contents of each big file revision are correct (SHA-1 hash
350 the contents of each big file revision are correct (SHA-1 hash
351 matches the revision ID). With --all, check every changeset in
351 matches the revision ID). With --all, check every changeset in
352 this repository.'''
352 this repository.'''
353 if all:
353 if all:
354 # Pass a list to the function rather than an iterator because we know a
354 # Pass a list to the function rather than an iterator because we know a
355 # list will work.
355 # list will work.
356 revs = range(len(repo))
356 revs = range(len(repo))
357 else:
357 else:
358 revs = ['.']
358 revs = ['.']
359
359
360 store = basestore._openstore(repo)
360 store = basestore._openstore(repo)
361 return store.verify(revs, contents=contents)
361 return store.verify(revs, contents=contents)
362
362
363 def cachelfiles(ui, repo, node):
363 def cachelfiles(ui, repo, node):
364 '''cachelfiles ensures that all largefiles needed by the specified revision
364 '''cachelfiles ensures that all largefiles needed by the specified revision
365 are present in the repository's largefile cache.
365 are present in the repository's largefile cache.
366
366
367 returns a tuple (cached, missing). cached is the list of files downloaded
367 returns a tuple (cached, missing). cached is the list of files downloaded
368 by this operation; missing is the list of files that were needed but could
368 by this operation; missing is the list of files that were needed but could
369 not be found.'''
369 not be found.'''
370 lfiles = lfutil.listlfiles(repo, node)
370 lfiles = lfutil.listlfiles(repo, node)
371 toget = []
371 toget = []
372
372
373 for lfile in lfiles:
373 for lfile in lfiles:
374 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
374 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
375 # if it exists and its hash matches, it might have been locally
375 # if it exists and its hash matches, it might have been locally
376 # modified before updating and the user chose 'local'. in this case,
376 # modified before updating and the user chose 'local'. in this case,
377 # it will not be in any store, so don't look for it.
377 # it will not be in any store, so don't look for it.
378 if ((not os.path.exists(repo.wjoin(lfile)) or
378 if ((not os.path.exists(repo.wjoin(lfile)) or
379 expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and
379 expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and
380 not lfutil.findfile(repo, expectedhash)):
380 not lfutil.findfile(repo, expectedhash)):
381 toget.append((lfile, expectedhash))
381 toget.append((lfile, expectedhash))
382
382
383 if toget:
383 if toget:
384 store = basestore._openstore(repo)
384 store = basestore._openstore(repo)
385 ret = store.get(toget)
385 ret = store.get(toget)
386 return ret
386 return ret
387
387
388 return ([], [])
388 return ([], [])
389
389
390 def updatelfiles(ui, repo, filelist=None, printmessage=True):
390 def updatelfiles(ui, repo, filelist=None, printmessage=True):
391 wlock = repo.wlock()
391 wlock = repo.wlock()
392 try:
392 try:
393 lfdirstate = lfutil.openlfdirstate(ui, repo)
393 lfdirstate = lfutil.openlfdirstate(ui, repo)
394 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
394 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
395
395
396 if filelist is not None:
396 if filelist is not None:
397 lfiles = [f for f in lfiles if f in filelist]
397 lfiles = [f for f in lfiles if f in filelist]
398
398
399 printed = False
399 printed = False
400 if printmessage and lfiles:
400 if printmessage and lfiles:
401 ui.status(_('getting changed largefiles\n'))
401 ui.status(_('getting changed largefiles\n'))
402 printed = True
402 printed = True
403 cachelfiles(ui, repo, '.')
403 cachelfiles(ui, repo, '.')
404
404
405 updated, removed = 0, 0
405 updated, removed = 0, 0
406 for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles):
406 for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles):
407 # increment the appropriate counter according to _updatelfile's
407 # increment the appropriate counter according to _updatelfile's
408 # return value
408 # return value
409 updated += i > 0 and i or 0
409 updated += i > 0 and i or 0
410 removed -= i < 0 and i or 0
410 removed -= i < 0 and i or 0
411 if printmessage and (removed or updated) and not printed:
411 if printmessage and (removed or updated) and not printed:
412 ui.status(_('getting changed largefiles\n'))
412 ui.status(_('getting changed largefiles\n'))
413 printed = True
413 printed = True
414
414
415 lfdirstate.write()
415 lfdirstate.write()
416 if printed and printmessage:
416 if printed and printmessage:
417 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
417 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
418 removed))
418 removed))
419 finally:
419 finally:
420 wlock.release()
420 wlock.release()
421
421
422 def _updatelfile(repo, lfdirstate, lfile):
422 def _updatelfile(repo, lfdirstate, lfile):
423 '''updates a single largefile and copies the state of its standin from
423 '''updates a single largefile and copies the state of its standin from
424 the repository's dirstate to its state in the lfdirstate.
424 the repository's dirstate to its state in the lfdirstate.
425
425
426 returns 1 if the file was modified, -1 if the file was removed, 0 if the
426 returns 1 if the file was modified, -1 if the file was removed, 0 if the
427 file was unchanged, and None if the needed largefile was missing from the
427 file was unchanged, and None if the needed largefile was missing from the
428 cache.'''
428 cache.'''
429 ret = 0
429 ret = 0
430 abslfile = repo.wjoin(lfile)
430 abslfile = repo.wjoin(lfile)
431 absstandin = repo.wjoin(lfutil.standin(lfile))
431 absstandin = repo.wjoin(lfutil.standin(lfile))
432 if os.path.exists(absstandin):
432 if os.path.exists(absstandin):
433 if os.path.exists(absstandin+'.orig'):
433 if os.path.exists(absstandin+'.orig'):
434 shutil.copyfile(abslfile, abslfile+'.orig')
434 shutil.copyfile(abslfile, abslfile+'.orig')
435 expecthash = lfutil.readstandin(repo, lfile)
435 expecthash = lfutil.readstandin(repo, lfile)
436 if (expecthash != '' and
436 if (expecthash != '' and
437 (not os.path.exists(abslfile) or
437 (not os.path.exists(abslfile) or
438 expecthash != lfutil.hashfile(abslfile))):
438 expecthash != lfutil.hashfile(abslfile))):
439 if not lfutil.copyfromcache(repo, expecthash, lfile):
439 if not lfutil.copyfromcache(repo, expecthash, lfile):
440 return None # don't try to set the mode or update the dirstate
440 return None # don't try to set the mode or update the dirstate
441 ret = 1
441 ret = 1
442 mode = os.stat(absstandin).st_mode
442 mode = os.stat(absstandin).st_mode
443 if mode != os.stat(abslfile).st_mode:
443 if mode != os.stat(abslfile).st_mode:
444 os.chmod(abslfile, mode)
444 os.chmod(abslfile, mode)
445 ret = 1
445 ret = 1
446 else:
446 else:
447 if os.path.exists(abslfile):
447 if os.path.exists(abslfile):
448 os.unlink(abslfile)
448 os.unlink(abslfile)
449 ret = -1
449 ret = -1
450 state = repo.dirstate[lfutil.standin(lfile)]
450 state = repo.dirstate[lfutil.standin(lfile)]
451 if state == 'n':
451 if state == 'n':
452 lfdirstate.normal(lfile)
452 lfdirstate.normal(lfile)
453 elif state == 'r':
453 elif state == 'r':
454 lfdirstate.remove(lfile)
454 lfdirstate.remove(lfile)
455 elif state == 'a':
455 elif state == 'a':
456 lfdirstate.add(lfile)
456 lfdirstate.add(lfile)
457 elif state == '?':
457 elif state == '?':
458 lfdirstate.drop(lfile)
458 lfdirstate.drop(lfile)
459 return ret
459 return ret
460
460
461 # -- hg commands declarations ------------------------------------------------
461 # -- hg commands declarations ------------------------------------------------
462
462
463 cmdtable = {
463 cmdtable = {
464 'lfconvert': (lfconvert,
464 'lfconvert': (lfconvert,
465 [('s', 'size', '',
465 [('s', 'size', '',
466 _('minimum size (MB) for files to be converted '
466 _('minimum size (MB) for files to be converted '
467 'as largefiles'),
467 'as largefiles'),
468 'SIZE'),
468 'SIZE'),
469 ('', 'to-normal', False,
469 ('', 'to-normal', False,
470 _('convert from a largefiles repo to a normal repo')),
470 _('convert from a largefiles repo to a normal repo')),
471 ],
471 ],
472 _('hg lfconvert SOURCE DEST [FILE ...]')),
472 _('hg lfconvert SOURCE DEST [FILE ...]')),
473 }
473 }
General Comments 0
You need to be logged in to leave comments. Login now