##// END OF EJS Templates
largefiles: fix multistatement line
Matt Mackall -
r15172:fb1dcd2a default
parent child Browse files
Show More
@@ -1,487 +1,488 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''High-level command functions: lfadd() et. al, plus the cmdtable.'''
9 '''High-level command functions: lfadd() et. al, plus the cmdtable.'''
10
10
11 import os
11 import os
12 import shutil
12 import shutil
13
13
14 from mercurial import util, match as match_, hg, node, context, error
14 from mercurial import util, match as match_, hg, node, context, error
15 from mercurial.i18n import _
15 from mercurial.i18n import _
16
16
17 import lfutil
17 import lfutil
18 import basestore
18 import basestore
19
19
20 # -- Commands ----------------------------------------------------------
20 # -- Commands ----------------------------------------------------------
21
21
22 def lfconvert(ui, src, dest, *pats, **opts):
22 def lfconvert(ui, src, dest, *pats, **opts):
23 '''Convert a normal repository to a largefiles repository
23 '''Convert a normal repository to a largefiles repository
24
24
25 Convert source repository creating an identical repository, except that all
25 Convert source repository creating an identical repository, except that all
26 files that match the patterns given, or are over the given size will be
26 files that match the patterns given, or are over the given size will be
27 added as largefiles. The size used to determine whether or not to track a
27 added as largefiles. The size used to determine whether or not to track a
28 file as a largefile is the size of the first version of the file. After
28 file as a largefile is the size of the first version of the file. After
29 running this command you will need to make sure that largefiles is enabled
29 running this command you will need to make sure that largefiles is enabled
30 anywhere you intend to push the new repository.'''
30 anywhere you intend to push the new repository.'''
31
31
32 if opts['tonormal']:
32 if opts['tonormal']:
33 tolfile = False
33 tolfile = False
34 else:
34 else:
35 tolfile = True
35 tolfile = True
36 size = opts['size']
36 size = opts['size']
37 if not size:
37 if not size:
38 size = ui.config(lfutil.longname, 'size', default=None)
38 size = ui.config(lfutil.longname, 'size', default=None)
39 try:
39 try:
40 size = int(size)
40 size = int(size)
41 except ValueError:
41 except ValueError:
42 raise util.Abort(_('largefiles.size must be integer, was %s\n')
42 raise util.Abort(_('largefiles.size must be integer, was %s\n')
43 % size)
43 % size)
44 except TypeError:
44 except TypeError:
45 raise util.Abort(_('size must be specified'))
45 raise util.Abort(_('size must be specified'))
46
46
47 try:
47 try:
48 rsrc = hg.repository(ui, src)
48 rsrc = hg.repository(ui, src)
49 if not rsrc.local():
49 if not rsrc.local():
50 raise util.Abort(_('%s is not a local Mercurial repo') % src)
50 raise util.Abort(_('%s is not a local Mercurial repo') % src)
51 except error.RepoError, err:
51 except error.RepoError, err:
52 ui.traceback()
52 ui.traceback()
53 raise util.Abort(err.args[0])
53 raise util.Abort(err.args[0])
54 if os.path.exists(dest):
54 if os.path.exists(dest):
55 if not os.path.isdir(dest):
55 if not os.path.isdir(dest):
56 raise util.Abort(_('destination %s already exists') % dest)
56 raise util.Abort(_('destination %s already exists') % dest)
57 elif os.listdir(dest):
57 elif os.listdir(dest):
58 raise util.Abort(_('destination %s is not empty') % dest)
58 raise util.Abort(_('destination %s is not empty') % dest)
59 try:
59 try:
60 ui.status(_('initializing destination %s\n') % dest)
60 ui.status(_('initializing destination %s\n') % dest)
61 rdst = hg.repository(ui, dest, create=True)
61 rdst = hg.repository(ui, dest, create=True)
62 if not rdst.local():
62 if not rdst.local():
63 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
63 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
64 except error.RepoError:
64 except error.RepoError:
65 ui.traceback()
65 ui.traceback()
66 raise util.Abort(_('%s is not a repo') % dest)
66 raise util.Abort(_('%s is not a repo') % dest)
67
67
68 success = False
68 success = False
69 try:
69 try:
70 # Lock destination to prevent modification while it is converted to.
70 # Lock destination to prevent modification while it is converted to.
71 # Don't need to lock src because we are just reading from its history
71 # Don't need to lock src because we are just reading from its history
72 # which can't change.
72 # which can't change.
73 dst_lock = rdst.lock()
73 dst_lock = rdst.lock()
74
74
75 # Get a list of all changesets in the source. The easy way to do this
75 # Get a list of all changesets in the source. The easy way to do this
76 # is to simply walk the changelog, using changelog.nodesbewteen().
76 # is to simply walk the changelog, using changelog.nodesbewteen().
77 # Take a look at mercurial/revlog.py:639 for more details.
77 # Take a look at mercurial/revlog.py:639 for more details.
78 # Use a generator instead of a list to decrease memory usage
78 # Use a generator instead of a list to decrease memory usage
79 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
79 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
80 rsrc.heads())[0])
80 rsrc.heads())[0])
81 revmap = {node.nullid: node.nullid}
81 revmap = {node.nullid: node.nullid}
82 if tolfile:
82 if tolfile:
83 lfiles = set()
83 lfiles = set()
84 normalfiles = set()
84 normalfiles = set()
85 if not pats:
85 if not pats:
86 pats = ui.config(lfutil.longname, 'patterns', default=())
86 pats = ui.config(lfutil.longname, 'patterns', default=())
87 if pats:
87 if pats:
88 pats = pats.split(' ')
88 pats = pats.split(' ')
89 if pats:
89 if pats:
90 matcher = match_.match(rsrc.root, '', list(pats))
90 matcher = match_.match(rsrc.root, '', list(pats))
91 else:
91 else:
92 matcher = None
92 matcher = None
93
93
94 lfiletohash = {}
94 lfiletohash = {}
95 for ctx in ctxs:
95 for ctx in ctxs:
96 ui.progress(_('converting revisions'), ctx.rev(),
96 ui.progress(_('converting revisions'), ctx.rev(),
97 unit=_('revision'), total=rsrc['tip'].rev())
97 unit=_('revision'), total=rsrc['tip'].rev())
98 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
98 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
99 lfiles, normalfiles, matcher, size, lfiletohash)
99 lfiles, normalfiles, matcher, size, lfiletohash)
100 ui.progress(_('converting revisions'), None)
100 ui.progress(_('converting revisions'), None)
101
101
102 if os.path.exists(rdst.wjoin(lfutil.shortname)):
102 if os.path.exists(rdst.wjoin(lfutil.shortname)):
103 shutil.rmtree(rdst.wjoin(lfutil.shortname))
103 shutil.rmtree(rdst.wjoin(lfutil.shortname))
104
104
105 for f in lfiletohash.keys():
105 for f in lfiletohash.keys():
106 if os.path.isfile(rdst.wjoin(f)):
106 if os.path.isfile(rdst.wjoin(f)):
107 os.unlink(rdst.wjoin(f))
107 os.unlink(rdst.wjoin(f))
108 try:
108 try:
109 os.removedirs(os.path.dirname(rdst.wjoin(f)))
109 os.removedirs(os.path.dirname(rdst.wjoin(f)))
110 except OSError:
110 except OSError:
111 pass
111 pass
112
112
113 else:
113 else:
114 for ctx in ctxs:
114 for ctx in ctxs:
115 ui.progress(_('converting revisions'), ctx.rev(),
115 ui.progress(_('converting revisions'), ctx.rev(),
116 unit=_('revision'), total=rsrc['tip'].rev())
116 unit=_('revision'), total=rsrc['tip'].rev())
117 _addchangeset(ui, rsrc, rdst, ctx, revmap)
117 _addchangeset(ui, rsrc, rdst, ctx, revmap)
118
118
119 ui.progress(_('converting revisions'), None)
119 ui.progress(_('converting revisions'), None)
120 success = True
120 success = True
121 finally:
121 finally:
122 if not success:
122 if not success:
123 # we failed, remove the new directory
123 # we failed, remove the new directory
124 shutil.rmtree(rdst.root)
124 shutil.rmtree(rdst.root)
125 dst_lock.release()
125 dst_lock.release()
126
126
127 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
127 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
128 # Convert src parents to dst parents
128 # Convert src parents to dst parents
129 parents = []
129 parents = []
130 for p in ctx.parents():
130 for p in ctx.parents():
131 parents.append(revmap[p.node()])
131 parents.append(revmap[p.node()])
132 while len(parents) < 2:
132 while len(parents) < 2:
133 parents.append(node.nullid)
133 parents.append(node.nullid)
134
134
135 # Generate list of changed files
135 # Generate list of changed files
136 files = set(ctx.files())
136 files = set(ctx.files())
137 if node.nullid not in parents:
137 if node.nullid not in parents:
138 mc = ctx.manifest()
138 mc = ctx.manifest()
139 mp1 = ctx.parents()[0].manifest()
139 mp1 = ctx.parents()[0].manifest()
140 mp2 = ctx.parents()[1].manifest()
140 mp2 = ctx.parents()[1].manifest()
141 files |= (set(mp1) | set(mp2)) - set(mc)
141 files |= (set(mp1) | set(mp2)) - set(mc)
142 for f in mc:
142 for f in mc:
143 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
143 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
144 files.add(f)
144 files.add(f)
145
145
146 def getfilectx(repo, memctx, f):
146 def getfilectx(repo, memctx, f):
147 if lfutil.standin(f) in files:
147 if lfutil.standin(f) in files:
148 # if the file isn't in the manifest then it was removed
148 # if the file isn't in the manifest then it was removed
149 # or renamed, raise IOError to indicate this
149 # or renamed, raise IOError to indicate this
150 try:
150 try:
151 fctx = ctx.filectx(lfutil.standin(f))
151 fctx = ctx.filectx(lfutil.standin(f))
152 except error.LookupError:
152 except error.LookupError:
153 raise IOError()
153 raise IOError()
154 renamed = fctx.renamed()
154 renamed = fctx.renamed()
155 if renamed:
155 if renamed:
156 renamed = lfutil.splitstandin(renamed[0])
156 renamed = lfutil.splitstandin(renamed[0])
157
157
158 hash = fctx.data().strip()
158 hash = fctx.data().strip()
159 path = lfutil.findfile(rsrc, hash)
159 path = lfutil.findfile(rsrc, hash)
160 ### TODO: What if the file is not cached?
160 ### TODO: What if the file is not cached?
161 data = ''
161 data = ''
162 fd = None
162 fd = None
163 try:
163 try:
164 fd = open(path, 'rb')
164 fd = open(path, 'rb')
165 data = fd.read()
165 data = fd.read()
166 finally:
166 finally:
167 if fd: fd.close()
167 if fd:
168 fd.close()
168 return context.memfilectx(f, data, 'l' in fctx.flags(),
169 return context.memfilectx(f, data, 'l' in fctx.flags(),
169 'x' in fctx.flags(), renamed)
170 'x' in fctx.flags(), renamed)
170 else:
171 else:
171 try:
172 try:
172 fctx = ctx.filectx(f)
173 fctx = ctx.filectx(f)
173 except error.LookupError:
174 except error.LookupError:
174 raise IOError()
175 raise IOError()
175 renamed = fctx.renamed()
176 renamed = fctx.renamed()
176 if renamed:
177 if renamed:
177 renamed = renamed[0]
178 renamed = renamed[0]
178 data = fctx.data()
179 data = fctx.data()
179 if f == '.hgtags':
180 if f == '.hgtags':
180 newdata = []
181 newdata = []
181 for line in data.splitlines():
182 for line in data.splitlines():
182 id, name = line.split(' ', 1)
183 id, name = line.split(' ', 1)
183 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
184 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
184 name))
185 name))
185 data = ''.join(newdata)
186 data = ''.join(newdata)
186 return context.memfilectx(f, data, 'l' in fctx.flags(),
187 return context.memfilectx(f, data, 'l' in fctx.flags(),
187 'x' in fctx.flags(), renamed)
188 'x' in fctx.flags(), renamed)
188
189
189 dstfiles = []
190 dstfiles = []
190 for file in files:
191 for file in files:
191 if lfutil.isstandin(file):
192 if lfutil.isstandin(file):
192 dstfiles.append(lfutil.splitstandin(file))
193 dstfiles.append(lfutil.splitstandin(file))
193 else:
194 else:
194 dstfiles.append(file)
195 dstfiles.append(file)
195 # Commit
196 # Commit
196 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
197 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
197 getfilectx, ctx.user(), ctx.date(), ctx.extra())
198 getfilectx, ctx.user(), ctx.date(), ctx.extra())
198 ret = rdst.commitctx(mctx)
199 ret = rdst.commitctx(mctx)
199 rdst.dirstate.setparents(ret)
200 rdst.dirstate.setparents(ret)
200 revmap[ctx.node()] = rdst.changelog.tip()
201 revmap[ctx.node()] = rdst.changelog.tip()
201
202
202 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
203 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
203 matcher, size, lfiletohash):
204 matcher, size, lfiletohash):
204 # Convert src parents to dst parents
205 # Convert src parents to dst parents
205 parents = []
206 parents = []
206 for p in ctx.parents():
207 for p in ctx.parents():
207 parents.append(revmap[p.node()])
208 parents.append(revmap[p.node()])
208 while len(parents) < 2:
209 while len(parents) < 2:
209 parents.append(node.nullid)
210 parents.append(node.nullid)
210
211
211 # Generate list of changed files
212 # Generate list of changed files
212 files = set(ctx.files())
213 files = set(ctx.files())
213 if node.nullid not in parents:
214 if node.nullid not in parents:
214 mc = ctx.manifest()
215 mc = ctx.manifest()
215 mp1 = ctx.parents()[0].manifest()
216 mp1 = ctx.parents()[0].manifest()
216 mp2 = ctx.parents()[1].manifest()
217 mp2 = ctx.parents()[1].manifest()
217 files |= (set(mp1) | set(mp2)) - set(mc)
218 files |= (set(mp1) | set(mp2)) - set(mc)
218 for f in mc:
219 for f in mc:
219 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
220 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
220 files.add(f)
221 files.add(f)
221
222
222 dstfiles = []
223 dstfiles = []
223 for f in files:
224 for f in files:
224 if f not in lfiles and f not in normalfiles:
225 if f not in lfiles and f not in normalfiles:
225 islfile = _islfile(f, ctx, matcher, size)
226 islfile = _islfile(f, ctx, matcher, size)
226 # If this file was renamed or copied then copy
227 # If this file was renamed or copied then copy
227 # the lfileness of its predecessor
228 # the lfileness of its predecessor
228 if f in ctx.manifest():
229 if f in ctx.manifest():
229 fctx = ctx.filectx(f)
230 fctx = ctx.filectx(f)
230 renamed = fctx.renamed()
231 renamed = fctx.renamed()
231 renamedlfile = renamed and renamed[0] in lfiles
232 renamedlfile = renamed and renamed[0] in lfiles
232 islfile |= renamedlfile
233 islfile |= renamedlfile
233 if 'l' in fctx.flags():
234 if 'l' in fctx.flags():
234 if renamedlfile:
235 if renamedlfile:
235 raise util.Abort(
236 raise util.Abort(
236 _('Renamed/copied largefile %s becomes symlink')
237 _('Renamed/copied largefile %s becomes symlink')
237 % f)
238 % f)
238 islfile = False
239 islfile = False
239 if islfile:
240 if islfile:
240 lfiles.add(f)
241 lfiles.add(f)
241 else:
242 else:
242 normalfiles.add(f)
243 normalfiles.add(f)
243
244
244 if f in lfiles:
245 if f in lfiles:
245 dstfiles.append(lfutil.standin(f))
246 dstfiles.append(lfutil.standin(f))
246 # lfile in manifest if it has not been removed/renamed
247 # lfile in manifest if it has not been removed/renamed
247 if f in ctx.manifest():
248 if f in ctx.manifest():
248 if 'l' in ctx.filectx(f).flags():
249 if 'l' in ctx.filectx(f).flags():
249 if renamed and renamed[0] in lfiles:
250 if renamed and renamed[0] in lfiles:
250 raise util.Abort(_('largefile %s becomes symlink') % f)
251 raise util.Abort(_('largefile %s becomes symlink') % f)
251
252
252 # lfile was modified, update standins
253 # lfile was modified, update standins
253 fullpath = rdst.wjoin(f)
254 fullpath = rdst.wjoin(f)
254 lfutil.createdir(os.path.dirname(fullpath))
255 lfutil.createdir(os.path.dirname(fullpath))
255 m = util.sha1('')
256 m = util.sha1('')
256 m.update(ctx[f].data())
257 m.update(ctx[f].data())
257 hash = m.hexdigest()
258 hash = m.hexdigest()
258 if f not in lfiletohash or lfiletohash[f] != hash:
259 if f not in lfiletohash or lfiletohash[f] != hash:
259 try:
260 try:
260 fd = open(fullpath, 'wb')
261 fd = open(fullpath, 'wb')
261 fd.write(ctx[f].data())
262 fd.write(ctx[f].data())
262 finally:
263 finally:
263 if fd:
264 if fd:
264 fd.close()
265 fd.close()
265 executable = 'x' in ctx[f].flags()
266 executable = 'x' in ctx[f].flags()
266 os.chmod(fullpath, lfutil.getmode(executable))
267 os.chmod(fullpath, lfutil.getmode(executable))
267 lfutil.writestandin(rdst, lfutil.standin(f), hash,
268 lfutil.writestandin(rdst, lfutil.standin(f), hash,
268 executable)
269 executable)
269 lfiletohash[f] = hash
270 lfiletohash[f] = hash
270 else:
271 else:
271 # normal file
272 # normal file
272 dstfiles.append(f)
273 dstfiles.append(f)
273
274
274 def getfilectx(repo, memctx, f):
275 def getfilectx(repo, memctx, f):
275 if lfutil.isstandin(f):
276 if lfutil.isstandin(f):
276 # if the file isn't in the manifest then it was removed
277 # if the file isn't in the manifest then it was removed
277 # or renamed, raise IOError to indicate this
278 # or renamed, raise IOError to indicate this
278 srcfname = lfutil.splitstandin(f)
279 srcfname = lfutil.splitstandin(f)
279 try:
280 try:
280 fctx = ctx.filectx(srcfname)
281 fctx = ctx.filectx(srcfname)
281 except error.LookupError:
282 except error.LookupError:
282 raise IOError()
283 raise IOError()
283 renamed = fctx.renamed()
284 renamed = fctx.renamed()
284 if renamed:
285 if renamed:
285 # standin is always a lfile because lfileness
286 # standin is always a lfile because lfileness
286 # doesn't change after rename or copy
287 # doesn't change after rename or copy
287 renamed = lfutil.standin(renamed[0])
288 renamed = lfutil.standin(renamed[0])
288
289
289 return context.memfilectx(f, lfiletohash[srcfname], 'l' in
290 return context.memfilectx(f, lfiletohash[srcfname], 'l' in
290 fctx.flags(), 'x' in fctx.flags(), renamed)
291 fctx.flags(), 'x' in fctx.flags(), renamed)
291 else:
292 else:
292 try:
293 try:
293 fctx = ctx.filectx(f)
294 fctx = ctx.filectx(f)
294 except error.LookupError:
295 except error.LookupError:
295 raise IOError()
296 raise IOError()
296 renamed = fctx.renamed()
297 renamed = fctx.renamed()
297 if renamed:
298 if renamed:
298 renamed = renamed[0]
299 renamed = renamed[0]
299
300
300 data = fctx.data()
301 data = fctx.data()
301 if f == '.hgtags':
302 if f == '.hgtags':
302 newdata = []
303 newdata = []
303 for line in data.splitlines():
304 for line in data.splitlines():
304 id, name = line.split(' ', 1)
305 id, name = line.split(' ', 1)
305 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
306 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
306 name))
307 name))
307 data = ''.join(newdata)
308 data = ''.join(newdata)
308 return context.memfilectx(f, data, 'l' in fctx.flags(),
309 return context.memfilectx(f, data, 'l' in fctx.flags(),
309 'x' in fctx.flags(), renamed)
310 'x' in fctx.flags(), renamed)
310
311
311 # Commit
312 # Commit
312 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
313 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
313 getfilectx, ctx.user(), ctx.date(), ctx.extra())
314 getfilectx, ctx.user(), ctx.date(), ctx.extra())
314 ret = rdst.commitctx(mctx)
315 ret = rdst.commitctx(mctx)
315 rdst.dirstate.setparents(ret)
316 rdst.dirstate.setparents(ret)
316 revmap[ctx.node()] = rdst.changelog.tip()
317 revmap[ctx.node()] = rdst.changelog.tip()
317
318
318 def _islfile(file, ctx, matcher, size):
319 def _islfile(file, ctx, matcher, size):
319 '''
320 '''
320 A file is a lfile if it matches a pattern or is over
321 A file is a lfile if it matches a pattern or is over
321 the given size.
322 the given size.
322 '''
323 '''
323 # Never store hgtags or hgignore as lfiles
324 # Never store hgtags or hgignore as lfiles
324 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
325 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
325 return False
326 return False
326 if matcher and matcher(file):
327 if matcher and matcher(file):
327 return True
328 return True
328 try:
329 try:
329 return ctx.filectx(file).size() >= size * 1024 * 1024
330 return ctx.filectx(file).size() >= size * 1024 * 1024
330 except error.LookupError:
331 except error.LookupError:
331 return False
332 return False
332
333
333 def uploadlfiles(ui, rsrc, rdst, files):
334 def uploadlfiles(ui, rsrc, rdst, files):
334 '''upload largefiles to the central store'''
335 '''upload largefiles to the central store'''
335
336
336 # Don't upload locally. All largefiles are in the system wide cache
337 # Don't upload locally. All largefiles are in the system wide cache
337 # so the other repo can just get them from there.
338 # so the other repo can just get them from there.
338 if not files or rdst.local():
339 if not files or rdst.local():
339 return
340 return
340
341
341 store = basestore._openstore(rsrc, rdst, put=True)
342 store = basestore._openstore(rsrc, rdst, put=True)
342
343
343 at = 0
344 at = 0
344 files = filter(lambda h: not store.exists(h), files)
345 files = filter(lambda h: not store.exists(h), files)
345 for hash in files:
346 for hash in files:
346 ui.progress(_('uploading largefiles'), at, unit='largefile',
347 ui.progress(_('uploading largefiles'), at, unit='largefile',
347 total=len(files))
348 total=len(files))
348 source = lfutil.findfile(rsrc, hash)
349 source = lfutil.findfile(rsrc, hash)
349 if not source:
350 if not source:
350 raise util.Abort(_('Missing largefile %s needs to be uploaded')
351 raise util.Abort(_('Missing largefile %s needs to be uploaded')
351 % hash)
352 % hash)
352 # XXX check for errors here
353 # XXX check for errors here
353 store.put(source, hash)
354 store.put(source, hash)
354 at += 1
355 at += 1
355 ui.progress('uploading largefiles', None)
356 ui.progress('uploading largefiles', None)
356
357
357 def verifylfiles(ui, repo, all=False, contents=False):
358 def verifylfiles(ui, repo, all=False, contents=False):
358 '''Verify that every big file revision in the current changeset
359 '''Verify that every big file revision in the current changeset
359 exists in the central store. With --contents, also verify that
360 exists in the central store. With --contents, also verify that
360 the contents of each big file revision are correct (SHA-1 hash
361 the contents of each big file revision are correct (SHA-1 hash
361 matches the revision ID). With --all, check every changeset in
362 matches the revision ID). With --all, check every changeset in
362 this repository.'''
363 this repository.'''
363 if all:
364 if all:
364 # Pass a list to the function rather than an iterator because we know a
365 # Pass a list to the function rather than an iterator because we know a
365 # list will work.
366 # list will work.
366 revs = range(len(repo))
367 revs = range(len(repo))
367 else:
368 else:
368 revs = ['.']
369 revs = ['.']
369
370
370 store = basestore._openstore(repo)
371 store = basestore._openstore(repo)
371 return store.verify(revs, contents=contents)
372 return store.verify(revs, contents=contents)
372
373
373 def cachelfiles(ui, repo, node):
374 def cachelfiles(ui, repo, node):
374 '''cachelfiles ensures that all largefiles needed by the specified revision
375 '''cachelfiles ensures that all largefiles needed by the specified revision
375 are present in the repository's largefile cache.
376 are present in the repository's largefile cache.
376
377
377 returns a tuple (cached, missing). cached is the list of files downloaded
378 returns a tuple (cached, missing). cached is the list of files downloaded
378 by this operation; missing is the list of files that were needed but could
379 by this operation; missing is the list of files that were needed but could
379 not be found.'''
380 not be found.'''
380 lfiles = lfutil.listlfiles(repo, node)
381 lfiles = lfutil.listlfiles(repo, node)
381 toget = []
382 toget = []
382
383
383 for lfile in lfiles:
384 for lfile in lfiles:
384 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
385 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
385 # if it exists and its hash matches, it might have been locally
386 # if it exists and its hash matches, it might have been locally
386 # modified before updating and the user chose 'local'. in this case,
387 # modified before updating and the user chose 'local'. in this case,
387 # it will not be in any store, so don't look for it.
388 # it will not be in any store, so don't look for it.
388 if (not os.path.exists(repo.wjoin(lfile)) \
389 if (not os.path.exists(repo.wjoin(lfile)) \
389 or expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and \
390 or expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and \
390 not lfutil.findfile(repo, expectedhash):
391 not lfutil.findfile(repo, expectedhash):
391 toget.append((lfile, expectedhash))
392 toget.append((lfile, expectedhash))
392
393
393 if toget:
394 if toget:
394 store = basestore._openstore(repo)
395 store = basestore._openstore(repo)
395 ret = store.get(toget)
396 ret = store.get(toget)
396 return ret
397 return ret
397
398
398 return ([], [])
399 return ([], [])
399
400
400 def updatelfiles(ui, repo, filelist=None, printmessage=True):
401 def updatelfiles(ui, repo, filelist=None, printmessage=True):
401 wlock = repo.wlock()
402 wlock = repo.wlock()
402 try:
403 try:
403 lfdirstate = lfutil.openlfdirstate(ui, repo)
404 lfdirstate = lfutil.openlfdirstate(ui, repo)
404 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
405 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
405
406
406 if filelist is not None:
407 if filelist is not None:
407 lfiles = [f for f in lfiles if f in filelist]
408 lfiles = [f for f in lfiles if f in filelist]
408
409
409 printed = False
410 printed = False
410 if printmessage and lfiles:
411 if printmessage and lfiles:
411 ui.status(_('getting changed largefiles\n'))
412 ui.status(_('getting changed largefiles\n'))
412 printed = True
413 printed = True
413 cachelfiles(ui, repo, '.')
414 cachelfiles(ui, repo, '.')
414
415
415 updated, removed = 0, 0
416 updated, removed = 0, 0
416 for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles):
417 for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles):
417 # increment the appropriate counter according to _updatelfile's
418 # increment the appropriate counter according to _updatelfile's
418 # return value
419 # return value
419 updated += i > 0 and i or 0
420 updated += i > 0 and i or 0
420 removed -= i < 0 and i or 0
421 removed -= i < 0 and i or 0
421 if printmessage and (removed or updated) and not printed:
422 if printmessage and (removed or updated) and not printed:
422 ui.status(_('getting changed largefiles\n'))
423 ui.status(_('getting changed largefiles\n'))
423 printed = True
424 printed = True
424
425
425 lfdirstate.write()
426 lfdirstate.write()
426 if printed and printmessage:
427 if printed and printmessage:
427 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
428 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
428 removed))
429 removed))
429 finally:
430 finally:
430 wlock.release()
431 wlock.release()
431
432
432 def _updatelfile(repo, lfdirstate, lfile):
433 def _updatelfile(repo, lfdirstate, lfile):
433 '''updates a single largefile and copies the state of its standin from
434 '''updates a single largefile and copies the state of its standin from
434 the repository's dirstate to its state in the lfdirstate.
435 the repository's dirstate to its state in the lfdirstate.
435
436
436 returns 1 if the file was modified, -1 if the file was removed, 0 if the
437 returns 1 if the file was modified, -1 if the file was removed, 0 if the
437 file was unchanged, and None if the needed largefile was missing from the
438 file was unchanged, and None if the needed largefile was missing from the
438 cache.'''
439 cache.'''
439 ret = 0
440 ret = 0
440 abslfile = repo.wjoin(lfile)
441 abslfile = repo.wjoin(lfile)
441 absstandin = repo.wjoin(lfutil.standin(lfile))
442 absstandin = repo.wjoin(lfutil.standin(lfile))
442 if os.path.exists(absstandin):
443 if os.path.exists(absstandin):
443 if os.path.exists(absstandin+'.orig'):
444 if os.path.exists(absstandin+'.orig'):
444 shutil.copyfile(abslfile, abslfile+'.orig')
445 shutil.copyfile(abslfile, abslfile+'.orig')
445 expecthash = lfutil.readstandin(repo, lfile)
446 expecthash = lfutil.readstandin(repo, lfile)
446 if expecthash != '' and \
447 if expecthash != '' and \
447 (not os.path.exists(abslfile) or \
448 (not os.path.exists(abslfile) or \
448 expecthash != lfutil.hashfile(abslfile)):
449 expecthash != lfutil.hashfile(abslfile)):
449 if not lfutil.copyfromcache(repo, expecthash, lfile):
450 if not lfutil.copyfromcache(repo, expecthash, lfile):
450 return None # don't try to set the mode or update the dirstate
451 return None # don't try to set the mode or update the dirstate
451 ret = 1
452 ret = 1
452 mode = os.stat(absstandin).st_mode
453 mode = os.stat(absstandin).st_mode
453 if mode != os.stat(abslfile).st_mode:
454 if mode != os.stat(abslfile).st_mode:
454 os.chmod(abslfile, mode)
455 os.chmod(abslfile, mode)
455 ret = 1
456 ret = 1
456 else:
457 else:
457 if os.path.exists(abslfile):
458 if os.path.exists(abslfile):
458 os.unlink(abslfile)
459 os.unlink(abslfile)
459 ret = -1
460 ret = -1
460 state = repo.dirstate[lfutil.standin(lfile)]
461 state = repo.dirstate[lfutil.standin(lfile)]
461 if state == 'n':
462 if state == 'n':
462 lfdirstate.normal(lfile)
463 lfdirstate.normal(lfile)
463 elif state == 'r':
464 elif state == 'r':
464 lfdirstate.remove(lfile)
465 lfdirstate.remove(lfile)
465 elif state == 'a':
466 elif state == 'a':
466 lfdirstate.add(lfile)
467 lfdirstate.add(lfile)
467 elif state == '?':
468 elif state == '?':
468 try:
469 try:
469 # Mercurial >= 1.9
470 # Mercurial >= 1.9
470 lfdirstate.drop(lfile)
471 lfdirstate.drop(lfile)
471 except AttributeError:
472 except AttributeError:
472 # Mercurial <= 1.8
473 # Mercurial <= 1.8
473 lfdirstate.forget(lfile)
474 lfdirstate.forget(lfile)
474 return ret
475 return ret
475
476
476 # -- hg commands declarations ------------------------------------------------
477 # -- hg commands declarations ------------------------------------------------
477
478
478
479
479 cmdtable = {
480 cmdtable = {
480 'lfconvert': (lfconvert,
481 'lfconvert': (lfconvert,
481 [('s', 'size', 0, 'All files over this size (in megabytes) '
482 [('s', 'size', 0, 'All files over this size (in megabytes) '
482 'will be considered largefiles. This can also be specified '
483 'will be considered largefiles. This can also be specified '
483 'in your hgrc as [largefiles].size.'),
484 'in your hgrc as [largefiles].size.'),
484 ('','tonormal',False,
485 ('','tonormal',False,
485 'Convert from a largefiles repo to a normal repo')],
486 'Convert from a largefiles repo to a normal repo')],
486 _('hg lfconvert SOURCE DEST [FILE ...]')),
487 _('hg lfconvert SOURCE DEST [FILE ...]')),
487 }
488 }
General Comments 0
You need to be logged in to leave comments. Login now