##// END OF EJS Templates
largefiles: mark a string for translation
Matt Mackall -
r15173:3d27a8ff default
parent child Browse files
Show More
@@ -1,488 +1,488 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''High-level command functions: lfadd() et. al, plus the cmdtable.'''
9 '''High-level command functions: lfadd() et. al, plus the cmdtable.'''
10
10
11 import os
11 import os
12 import shutil
12 import shutil
13
13
14 from mercurial import util, match as match_, hg, node, context, error
14 from mercurial import util, match as match_, hg, node, context, error
15 from mercurial.i18n import _
15 from mercurial.i18n import _
16
16
17 import lfutil
17 import lfutil
18 import basestore
18 import basestore
19
19
20 # -- Commands ----------------------------------------------------------
20 # -- Commands ----------------------------------------------------------
21
21
22 def lfconvert(ui, src, dest, *pats, **opts):
22 def lfconvert(ui, src, dest, *pats, **opts):
23 '''Convert a normal repository to a largefiles repository
23 '''Convert a normal repository to a largefiles repository
24
24
25 Convert source repository creating an identical repository, except that all
25 Convert source repository creating an identical repository, except that all
26 files that match the patterns given, or are over the given size will be
26 files that match the patterns given, or are over the given size will be
27 added as largefiles. The size used to determine whether or not to track a
27 added as largefiles. The size used to determine whether or not to track a
28 file as a largefile is the size of the first version of the file. After
28 file as a largefile is the size of the first version of the file. After
29 running this command you will need to make sure that largefiles is enabled
29 running this command you will need to make sure that largefiles is enabled
30 anywhere you intend to push the new repository.'''
30 anywhere you intend to push the new repository.'''
31
31
32 if opts['tonormal']:
32 if opts['tonormal']:
33 tolfile = False
33 tolfile = False
34 else:
34 else:
35 tolfile = True
35 tolfile = True
36 size = opts['size']
36 size = opts['size']
37 if not size:
37 if not size:
38 size = ui.config(lfutil.longname, 'size', default=None)
38 size = ui.config(lfutil.longname, 'size', default=None)
39 try:
39 try:
40 size = int(size)
40 size = int(size)
41 except ValueError:
41 except ValueError:
42 raise util.Abort(_('largefiles.size must be integer, was %s\n')
42 raise util.Abort(_('largefiles.size must be integer, was %s\n')
43 % size)
43 % size)
44 except TypeError:
44 except TypeError:
45 raise util.Abort(_('size must be specified'))
45 raise util.Abort(_('size must be specified'))
46
46
47 try:
47 try:
48 rsrc = hg.repository(ui, src)
48 rsrc = hg.repository(ui, src)
49 if not rsrc.local():
49 if not rsrc.local():
50 raise util.Abort(_('%s is not a local Mercurial repo') % src)
50 raise util.Abort(_('%s is not a local Mercurial repo') % src)
51 except error.RepoError, err:
51 except error.RepoError, err:
52 ui.traceback()
52 ui.traceback()
53 raise util.Abort(err.args[0])
53 raise util.Abort(err.args[0])
54 if os.path.exists(dest):
54 if os.path.exists(dest):
55 if not os.path.isdir(dest):
55 if not os.path.isdir(dest):
56 raise util.Abort(_('destination %s already exists') % dest)
56 raise util.Abort(_('destination %s already exists') % dest)
57 elif os.listdir(dest):
57 elif os.listdir(dest):
58 raise util.Abort(_('destination %s is not empty') % dest)
58 raise util.Abort(_('destination %s is not empty') % dest)
59 try:
59 try:
60 ui.status(_('initializing destination %s\n') % dest)
60 ui.status(_('initializing destination %s\n') % dest)
61 rdst = hg.repository(ui, dest, create=True)
61 rdst = hg.repository(ui, dest, create=True)
62 if not rdst.local():
62 if not rdst.local():
63 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
63 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
64 except error.RepoError:
64 except error.RepoError:
65 ui.traceback()
65 ui.traceback()
66 raise util.Abort(_('%s is not a repo') % dest)
66 raise util.Abort(_('%s is not a repo') % dest)
67
67
68 success = False
68 success = False
69 try:
69 try:
70 # Lock destination to prevent modification while it is converted to.
70 # Lock destination to prevent modification while it is converted to.
71 # Don't need to lock src because we are just reading from its history
71 # Don't need to lock src because we are just reading from its history
72 # which can't change.
72 # which can't change.
73 dst_lock = rdst.lock()
73 dst_lock = rdst.lock()
74
74
75 # Get a list of all changesets in the source. The easy way to do this
75 # Get a list of all changesets in the source. The easy way to do this
76 # is to simply walk the changelog, using changelog.nodesbewteen().
76 # is to simply walk the changelog, using changelog.nodesbewteen().
77 # Take a look at mercurial/revlog.py:639 for more details.
77 # Take a look at mercurial/revlog.py:639 for more details.
78 # Use a generator instead of a list to decrease memory usage
78 # Use a generator instead of a list to decrease memory usage
79 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
79 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
80 rsrc.heads())[0])
80 rsrc.heads())[0])
81 revmap = {node.nullid: node.nullid}
81 revmap = {node.nullid: node.nullid}
82 if tolfile:
82 if tolfile:
83 lfiles = set()
83 lfiles = set()
84 normalfiles = set()
84 normalfiles = set()
85 if not pats:
85 if not pats:
86 pats = ui.config(lfutil.longname, 'patterns', default=())
86 pats = ui.config(lfutil.longname, 'patterns', default=())
87 if pats:
87 if pats:
88 pats = pats.split(' ')
88 pats = pats.split(' ')
89 if pats:
89 if pats:
90 matcher = match_.match(rsrc.root, '', list(pats))
90 matcher = match_.match(rsrc.root, '', list(pats))
91 else:
91 else:
92 matcher = None
92 matcher = None
93
93
94 lfiletohash = {}
94 lfiletohash = {}
95 for ctx in ctxs:
95 for ctx in ctxs:
96 ui.progress(_('converting revisions'), ctx.rev(),
96 ui.progress(_('converting revisions'), ctx.rev(),
97 unit=_('revision'), total=rsrc['tip'].rev())
97 unit=_('revision'), total=rsrc['tip'].rev())
98 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
98 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
99 lfiles, normalfiles, matcher, size, lfiletohash)
99 lfiles, normalfiles, matcher, size, lfiletohash)
100 ui.progress(_('converting revisions'), None)
100 ui.progress(_('converting revisions'), None)
101
101
102 if os.path.exists(rdst.wjoin(lfutil.shortname)):
102 if os.path.exists(rdst.wjoin(lfutil.shortname)):
103 shutil.rmtree(rdst.wjoin(lfutil.shortname))
103 shutil.rmtree(rdst.wjoin(lfutil.shortname))
104
104
105 for f in lfiletohash.keys():
105 for f in lfiletohash.keys():
106 if os.path.isfile(rdst.wjoin(f)):
106 if os.path.isfile(rdst.wjoin(f)):
107 os.unlink(rdst.wjoin(f))
107 os.unlink(rdst.wjoin(f))
108 try:
108 try:
109 os.removedirs(os.path.dirname(rdst.wjoin(f)))
109 os.removedirs(os.path.dirname(rdst.wjoin(f)))
110 except OSError:
110 except OSError:
111 pass
111 pass
112
112
113 else:
113 else:
114 for ctx in ctxs:
114 for ctx in ctxs:
115 ui.progress(_('converting revisions'), ctx.rev(),
115 ui.progress(_('converting revisions'), ctx.rev(),
116 unit=_('revision'), total=rsrc['tip'].rev())
116 unit=_('revision'), total=rsrc['tip'].rev())
117 _addchangeset(ui, rsrc, rdst, ctx, revmap)
117 _addchangeset(ui, rsrc, rdst, ctx, revmap)
118
118
119 ui.progress(_('converting revisions'), None)
119 ui.progress(_('converting revisions'), None)
120 success = True
120 success = True
121 finally:
121 finally:
122 if not success:
122 if not success:
123 # we failed, remove the new directory
123 # we failed, remove the new directory
124 shutil.rmtree(rdst.root)
124 shutil.rmtree(rdst.root)
125 dst_lock.release()
125 dst_lock.release()
126
126
127 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
127 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
128 # Convert src parents to dst parents
128 # Convert src parents to dst parents
129 parents = []
129 parents = []
130 for p in ctx.parents():
130 for p in ctx.parents():
131 parents.append(revmap[p.node()])
131 parents.append(revmap[p.node()])
132 while len(parents) < 2:
132 while len(parents) < 2:
133 parents.append(node.nullid)
133 parents.append(node.nullid)
134
134
135 # Generate list of changed files
135 # Generate list of changed files
136 files = set(ctx.files())
136 files = set(ctx.files())
137 if node.nullid not in parents:
137 if node.nullid not in parents:
138 mc = ctx.manifest()
138 mc = ctx.manifest()
139 mp1 = ctx.parents()[0].manifest()
139 mp1 = ctx.parents()[0].manifest()
140 mp2 = ctx.parents()[1].manifest()
140 mp2 = ctx.parents()[1].manifest()
141 files |= (set(mp1) | set(mp2)) - set(mc)
141 files |= (set(mp1) | set(mp2)) - set(mc)
142 for f in mc:
142 for f in mc:
143 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
143 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
144 files.add(f)
144 files.add(f)
145
145
146 def getfilectx(repo, memctx, f):
146 def getfilectx(repo, memctx, f):
147 if lfutil.standin(f) in files:
147 if lfutil.standin(f) in files:
148 # if the file isn't in the manifest then it was removed
148 # if the file isn't in the manifest then it was removed
149 # or renamed, raise IOError to indicate this
149 # or renamed, raise IOError to indicate this
150 try:
150 try:
151 fctx = ctx.filectx(lfutil.standin(f))
151 fctx = ctx.filectx(lfutil.standin(f))
152 except error.LookupError:
152 except error.LookupError:
153 raise IOError()
153 raise IOError()
154 renamed = fctx.renamed()
154 renamed = fctx.renamed()
155 if renamed:
155 if renamed:
156 renamed = lfutil.splitstandin(renamed[0])
156 renamed = lfutil.splitstandin(renamed[0])
157
157
158 hash = fctx.data().strip()
158 hash = fctx.data().strip()
159 path = lfutil.findfile(rsrc, hash)
159 path = lfutil.findfile(rsrc, hash)
160 ### TODO: What if the file is not cached?
160 ### TODO: What if the file is not cached?
161 data = ''
161 data = ''
162 fd = None
162 fd = None
163 try:
163 try:
164 fd = open(path, 'rb')
164 fd = open(path, 'rb')
165 data = fd.read()
165 data = fd.read()
166 finally:
166 finally:
167 if fd:
167 if fd:
168 fd.close()
168 fd.close()
169 return context.memfilectx(f, data, 'l' in fctx.flags(),
169 return context.memfilectx(f, data, 'l' in fctx.flags(),
170 'x' in fctx.flags(), renamed)
170 'x' in fctx.flags(), renamed)
171 else:
171 else:
172 try:
172 try:
173 fctx = ctx.filectx(f)
173 fctx = ctx.filectx(f)
174 except error.LookupError:
174 except error.LookupError:
175 raise IOError()
175 raise IOError()
176 renamed = fctx.renamed()
176 renamed = fctx.renamed()
177 if renamed:
177 if renamed:
178 renamed = renamed[0]
178 renamed = renamed[0]
179 data = fctx.data()
179 data = fctx.data()
180 if f == '.hgtags':
180 if f == '.hgtags':
181 newdata = []
181 newdata = []
182 for line in data.splitlines():
182 for line in data.splitlines():
183 id, name = line.split(' ', 1)
183 id, name = line.split(' ', 1)
184 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
184 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
185 name))
185 name))
186 data = ''.join(newdata)
186 data = ''.join(newdata)
187 return context.memfilectx(f, data, 'l' in fctx.flags(),
187 return context.memfilectx(f, data, 'l' in fctx.flags(),
188 'x' in fctx.flags(), renamed)
188 'x' in fctx.flags(), renamed)
189
189
190 dstfiles = []
190 dstfiles = []
191 for file in files:
191 for file in files:
192 if lfutil.isstandin(file):
192 if lfutil.isstandin(file):
193 dstfiles.append(lfutil.splitstandin(file))
193 dstfiles.append(lfutil.splitstandin(file))
194 else:
194 else:
195 dstfiles.append(file)
195 dstfiles.append(file)
196 # Commit
196 # Commit
197 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
197 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
198 getfilectx, ctx.user(), ctx.date(), ctx.extra())
198 getfilectx, ctx.user(), ctx.date(), ctx.extra())
199 ret = rdst.commitctx(mctx)
199 ret = rdst.commitctx(mctx)
200 rdst.dirstate.setparents(ret)
200 rdst.dirstate.setparents(ret)
201 revmap[ctx.node()] = rdst.changelog.tip()
201 revmap[ctx.node()] = rdst.changelog.tip()
202
202
203 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
203 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
204 matcher, size, lfiletohash):
204 matcher, size, lfiletohash):
205 # Convert src parents to dst parents
205 # Convert src parents to dst parents
206 parents = []
206 parents = []
207 for p in ctx.parents():
207 for p in ctx.parents():
208 parents.append(revmap[p.node()])
208 parents.append(revmap[p.node()])
209 while len(parents) < 2:
209 while len(parents) < 2:
210 parents.append(node.nullid)
210 parents.append(node.nullid)
211
211
212 # Generate list of changed files
212 # Generate list of changed files
213 files = set(ctx.files())
213 files = set(ctx.files())
214 if node.nullid not in parents:
214 if node.nullid not in parents:
215 mc = ctx.manifest()
215 mc = ctx.manifest()
216 mp1 = ctx.parents()[0].manifest()
216 mp1 = ctx.parents()[0].manifest()
217 mp2 = ctx.parents()[1].manifest()
217 mp2 = ctx.parents()[1].manifest()
218 files |= (set(mp1) | set(mp2)) - set(mc)
218 files |= (set(mp1) | set(mp2)) - set(mc)
219 for f in mc:
219 for f in mc:
220 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
220 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
221 files.add(f)
221 files.add(f)
222
222
223 dstfiles = []
223 dstfiles = []
224 for f in files:
224 for f in files:
225 if f not in lfiles and f not in normalfiles:
225 if f not in lfiles and f not in normalfiles:
226 islfile = _islfile(f, ctx, matcher, size)
226 islfile = _islfile(f, ctx, matcher, size)
227 # If this file was renamed or copied then copy
227 # If this file was renamed or copied then copy
228 # the lfileness of its predecessor
228 # the lfileness of its predecessor
229 if f in ctx.manifest():
229 if f in ctx.manifest():
230 fctx = ctx.filectx(f)
230 fctx = ctx.filectx(f)
231 renamed = fctx.renamed()
231 renamed = fctx.renamed()
232 renamedlfile = renamed and renamed[0] in lfiles
232 renamedlfile = renamed and renamed[0] in lfiles
233 islfile |= renamedlfile
233 islfile |= renamedlfile
234 if 'l' in fctx.flags():
234 if 'l' in fctx.flags():
235 if renamedlfile:
235 if renamedlfile:
236 raise util.Abort(
236 raise util.Abort(
237 _('Renamed/copied largefile %s becomes symlink')
237 _('Renamed/copied largefile %s becomes symlink')
238 % f)
238 % f)
239 islfile = False
239 islfile = False
240 if islfile:
240 if islfile:
241 lfiles.add(f)
241 lfiles.add(f)
242 else:
242 else:
243 normalfiles.add(f)
243 normalfiles.add(f)
244
244
245 if f in lfiles:
245 if f in lfiles:
246 dstfiles.append(lfutil.standin(f))
246 dstfiles.append(lfutil.standin(f))
247 # lfile in manifest if it has not been removed/renamed
247 # lfile in manifest if it has not been removed/renamed
248 if f in ctx.manifest():
248 if f in ctx.manifest():
249 if 'l' in ctx.filectx(f).flags():
249 if 'l' in ctx.filectx(f).flags():
250 if renamed and renamed[0] in lfiles:
250 if renamed and renamed[0] in lfiles:
251 raise util.Abort(_('largefile %s becomes symlink') % f)
251 raise util.Abort(_('largefile %s becomes symlink') % f)
252
252
253 # lfile was modified, update standins
253 # lfile was modified, update standins
254 fullpath = rdst.wjoin(f)
254 fullpath = rdst.wjoin(f)
255 lfutil.createdir(os.path.dirname(fullpath))
255 lfutil.createdir(os.path.dirname(fullpath))
256 m = util.sha1('')
256 m = util.sha1('')
257 m.update(ctx[f].data())
257 m.update(ctx[f].data())
258 hash = m.hexdigest()
258 hash = m.hexdigest()
259 if f not in lfiletohash or lfiletohash[f] != hash:
259 if f not in lfiletohash or lfiletohash[f] != hash:
260 try:
260 try:
261 fd = open(fullpath, 'wb')
261 fd = open(fullpath, 'wb')
262 fd.write(ctx[f].data())
262 fd.write(ctx[f].data())
263 finally:
263 finally:
264 if fd:
264 if fd:
265 fd.close()
265 fd.close()
266 executable = 'x' in ctx[f].flags()
266 executable = 'x' in ctx[f].flags()
267 os.chmod(fullpath, lfutil.getmode(executable))
267 os.chmod(fullpath, lfutil.getmode(executable))
268 lfutil.writestandin(rdst, lfutil.standin(f), hash,
268 lfutil.writestandin(rdst, lfutil.standin(f), hash,
269 executable)
269 executable)
270 lfiletohash[f] = hash
270 lfiletohash[f] = hash
271 else:
271 else:
272 # normal file
272 # normal file
273 dstfiles.append(f)
273 dstfiles.append(f)
274
274
275 def getfilectx(repo, memctx, f):
275 def getfilectx(repo, memctx, f):
276 if lfutil.isstandin(f):
276 if lfutil.isstandin(f):
277 # if the file isn't in the manifest then it was removed
277 # if the file isn't in the manifest then it was removed
278 # or renamed, raise IOError to indicate this
278 # or renamed, raise IOError to indicate this
279 srcfname = lfutil.splitstandin(f)
279 srcfname = lfutil.splitstandin(f)
280 try:
280 try:
281 fctx = ctx.filectx(srcfname)
281 fctx = ctx.filectx(srcfname)
282 except error.LookupError:
282 except error.LookupError:
283 raise IOError()
283 raise IOError()
284 renamed = fctx.renamed()
284 renamed = fctx.renamed()
285 if renamed:
285 if renamed:
286 # standin is always a lfile because lfileness
286 # standin is always a lfile because lfileness
287 # doesn't change after rename or copy
287 # doesn't change after rename or copy
288 renamed = lfutil.standin(renamed[0])
288 renamed = lfutil.standin(renamed[0])
289
289
290 return context.memfilectx(f, lfiletohash[srcfname], 'l' in
290 return context.memfilectx(f, lfiletohash[srcfname], 'l' in
291 fctx.flags(), 'x' in fctx.flags(), renamed)
291 fctx.flags(), 'x' in fctx.flags(), renamed)
292 else:
292 else:
293 try:
293 try:
294 fctx = ctx.filectx(f)
294 fctx = ctx.filectx(f)
295 except error.LookupError:
295 except error.LookupError:
296 raise IOError()
296 raise IOError()
297 renamed = fctx.renamed()
297 renamed = fctx.renamed()
298 if renamed:
298 if renamed:
299 renamed = renamed[0]
299 renamed = renamed[0]
300
300
301 data = fctx.data()
301 data = fctx.data()
302 if f == '.hgtags':
302 if f == '.hgtags':
303 newdata = []
303 newdata = []
304 for line in data.splitlines():
304 for line in data.splitlines():
305 id, name = line.split(' ', 1)
305 id, name = line.split(' ', 1)
306 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
306 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
307 name))
307 name))
308 data = ''.join(newdata)
308 data = ''.join(newdata)
309 return context.memfilectx(f, data, 'l' in fctx.flags(),
309 return context.memfilectx(f, data, 'l' in fctx.flags(),
310 'x' in fctx.flags(), renamed)
310 'x' in fctx.flags(), renamed)
311
311
312 # Commit
312 # Commit
313 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
313 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
314 getfilectx, ctx.user(), ctx.date(), ctx.extra())
314 getfilectx, ctx.user(), ctx.date(), ctx.extra())
315 ret = rdst.commitctx(mctx)
315 ret = rdst.commitctx(mctx)
316 rdst.dirstate.setparents(ret)
316 rdst.dirstate.setparents(ret)
317 revmap[ctx.node()] = rdst.changelog.tip()
317 revmap[ctx.node()] = rdst.changelog.tip()
318
318
319 def _islfile(file, ctx, matcher, size):
319 def _islfile(file, ctx, matcher, size):
320 '''
320 '''
321 A file is a lfile if it matches a pattern or is over
321 A file is a lfile if it matches a pattern or is over
322 the given size.
322 the given size.
323 '''
323 '''
324 # Never store hgtags or hgignore as lfiles
324 # Never store hgtags or hgignore as lfiles
325 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
325 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
326 return False
326 return False
327 if matcher and matcher(file):
327 if matcher and matcher(file):
328 return True
328 return True
329 try:
329 try:
330 return ctx.filectx(file).size() >= size * 1024 * 1024
330 return ctx.filectx(file).size() >= size * 1024 * 1024
331 except error.LookupError:
331 except error.LookupError:
332 return False
332 return False
333
333
334 def uploadlfiles(ui, rsrc, rdst, files):
334 def uploadlfiles(ui, rsrc, rdst, files):
335 '''upload largefiles to the central store'''
335 '''upload largefiles to the central store'''
336
336
337 # Don't upload locally. All largefiles are in the system wide cache
337 # Don't upload locally. All largefiles are in the system wide cache
338 # so the other repo can just get them from there.
338 # so the other repo can just get them from there.
339 if not files or rdst.local():
339 if not files or rdst.local():
340 return
340 return
341
341
342 store = basestore._openstore(rsrc, rdst, put=True)
342 store = basestore._openstore(rsrc, rdst, put=True)
343
343
344 at = 0
344 at = 0
345 files = filter(lambda h: not store.exists(h), files)
345 files = filter(lambda h: not store.exists(h), files)
346 for hash in files:
346 for hash in files:
347 ui.progress(_('uploading largefiles'), at, unit='largefile',
347 ui.progress(_('uploading largefiles'), at, unit='largefile',
348 total=len(files))
348 total=len(files))
349 source = lfutil.findfile(rsrc, hash)
349 source = lfutil.findfile(rsrc, hash)
350 if not source:
350 if not source:
351 raise util.Abort(_('Missing largefile %s needs to be uploaded')
351 raise util.Abort(_('Missing largefile %s needs to be uploaded')
352 % hash)
352 % hash)
353 # XXX check for errors here
353 # XXX check for errors here
354 store.put(source, hash)
354 store.put(source, hash)
355 at += 1
355 at += 1
356 ui.progress('uploading largefiles', None)
356 ui.progress(_('uploading largefiles'), None)
357
357
358 def verifylfiles(ui, repo, all=False, contents=False):
358 def verifylfiles(ui, repo, all=False, contents=False):
359 '''Verify that every big file revision in the current changeset
359 '''Verify that every big file revision in the current changeset
360 exists in the central store. With --contents, also verify that
360 exists in the central store. With --contents, also verify that
361 the contents of each big file revision are correct (SHA-1 hash
361 the contents of each big file revision are correct (SHA-1 hash
362 matches the revision ID). With --all, check every changeset in
362 matches the revision ID). With --all, check every changeset in
363 this repository.'''
363 this repository.'''
364 if all:
364 if all:
365 # Pass a list to the function rather than an iterator because we know a
365 # Pass a list to the function rather than an iterator because we know a
366 # list will work.
366 # list will work.
367 revs = range(len(repo))
367 revs = range(len(repo))
368 else:
368 else:
369 revs = ['.']
369 revs = ['.']
370
370
371 store = basestore._openstore(repo)
371 store = basestore._openstore(repo)
372 return store.verify(revs, contents=contents)
372 return store.verify(revs, contents=contents)
373
373
374 def cachelfiles(ui, repo, node):
374 def cachelfiles(ui, repo, node):
375 '''cachelfiles ensures that all largefiles needed by the specified revision
375 '''cachelfiles ensures that all largefiles needed by the specified revision
376 are present in the repository's largefile cache.
376 are present in the repository's largefile cache.
377
377
378 returns a tuple (cached, missing). cached is the list of files downloaded
378 returns a tuple (cached, missing). cached is the list of files downloaded
379 by this operation; missing is the list of files that were needed but could
379 by this operation; missing is the list of files that were needed but could
380 not be found.'''
380 not be found.'''
381 lfiles = lfutil.listlfiles(repo, node)
381 lfiles = lfutil.listlfiles(repo, node)
382 toget = []
382 toget = []
383
383
384 for lfile in lfiles:
384 for lfile in lfiles:
385 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
385 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
386 # if it exists and its hash matches, it might have been locally
386 # if it exists and its hash matches, it might have been locally
387 # modified before updating and the user chose 'local'. in this case,
387 # modified before updating and the user chose 'local'. in this case,
388 # it will not be in any store, so don't look for it.
388 # it will not be in any store, so don't look for it.
389 if (not os.path.exists(repo.wjoin(lfile)) \
389 if (not os.path.exists(repo.wjoin(lfile)) \
390 or expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and \
390 or expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and \
391 not lfutil.findfile(repo, expectedhash):
391 not lfutil.findfile(repo, expectedhash):
392 toget.append((lfile, expectedhash))
392 toget.append((lfile, expectedhash))
393
393
394 if toget:
394 if toget:
395 store = basestore._openstore(repo)
395 store = basestore._openstore(repo)
396 ret = store.get(toget)
396 ret = store.get(toget)
397 return ret
397 return ret
398
398
399 return ([], [])
399 return ([], [])
400
400
401 def updatelfiles(ui, repo, filelist=None, printmessage=True):
401 def updatelfiles(ui, repo, filelist=None, printmessage=True):
402 wlock = repo.wlock()
402 wlock = repo.wlock()
403 try:
403 try:
404 lfdirstate = lfutil.openlfdirstate(ui, repo)
404 lfdirstate = lfutil.openlfdirstate(ui, repo)
405 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
405 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
406
406
407 if filelist is not None:
407 if filelist is not None:
408 lfiles = [f for f in lfiles if f in filelist]
408 lfiles = [f for f in lfiles if f in filelist]
409
409
410 printed = False
410 printed = False
411 if printmessage and lfiles:
411 if printmessage and lfiles:
412 ui.status(_('getting changed largefiles\n'))
412 ui.status(_('getting changed largefiles\n'))
413 printed = True
413 printed = True
414 cachelfiles(ui, repo, '.')
414 cachelfiles(ui, repo, '.')
415
415
416 updated, removed = 0, 0
416 updated, removed = 0, 0
417 for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles):
417 for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles):
418 # increment the appropriate counter according to _updatelfile's
418 # increment the appropriate counter according to _updatelfile's
419 # return value
419 # return value
420 updated += i > 0 and i or 0
420 updated += i > 0 and i or 0
421 removed -= i < 0 and i or 0
421 removed -= i < 0 and i or 0
422 if printmessage and (removed or updated) and not printed:
422 if printmessage and (removed or updated) and not printed:
423 ui.status(_('getting changed largefiles\n'))
423 ui.status(_('getting changed largefiles\n'))
424 printed = True
424 printed = True
425
425
426 lfdirstate.write()
426 lfdirstate.write()
427 if printed and printmessage:
427 if printed and printmessage:
428 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
428 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
429 removed))
429 removed))
430 finally:
430 finally:
431 wlock.release()
431 wlock.release()
432
432
433 def _updatelfile(repo, lfdirstate, lfile):
433 def _updatelfile(repo, lfdirstate, lfile):
434 '''updates a single largefile and copies the state of its standin from
434 '''updates a single largefile and copies the state of its standin from
435 the repository's dirstate to its state in the lfdirstate.
435 the repository's dirstate to its state in the lfdirstate.
436
436
437 returns 1 if the file was modified, -1 if the file was removed, 0 if the
437 returns 1 if the file was modified, -1 if the file was removed, 0 if the
438 file was unchanged, and None if the needed largefile was missing from the
438 file was unchanged, and None if the needed largefile was missing from the
439 cache.'''
439 cache.'''
440 ret = 0
440 ret = 0
441 abslfile = repo.wjoin(lfile)
441 abslfile = repo.wjoin(lfile)
442 absstandin = repo.wjoin(lfutil.standin(lfile))
442 absstandin = repo.wjoin(lfutil.standin(lfile))
443 if os.path.exists(absstandin):
443 if os.path.exists(absstandin):
444 if os.path.exists(absstandin+'.orig'):
444 if os.path.exists(absstandin+'.orig'):
445 shutil.copyfile(abslfile, abslfile+'.orig')
445 shutil.copyfile(abslfile, abslfile+'.orig')
446 expecthash = lfutil.readstandin(repo, lfile)
446 expecthash = lfutil.readstandin(repo, lfile)
447 if expecthash != '' and \
447 if expecthash != '' and \
448 (not os.path.exists(abslfile) or \
448 (not os.path.exists(abslfile) or \
449 expecthash != lfutil.hashfile(abslfile)):
449 expecthash != lfutil.hashfile(abslfile)):
450 if not lfutil.copyfromcache(repo, expecthash, lfile):
450 if not lfutil.copyfromcache(repo, expecthash, lfile):
451 return None # don't try to set the mode or update the dirstate
451 return None # don't try to set the mode or update the dirstate
452 ret = 1
452 ret = 1
453 mode = os.stat(absstandin).st_mode
453 mode = os.stat(absstandin).st_mode
454 if mode != os.stat(abslfile).st_mode:
454 if mode != os.stat(abslfile).st_mode:
455 os.chmod(abslfile, mode)
455 os.chmod(abslfile, mode)
456 ret = 1
456 ret = 1
457 else:
457 else:
458 if os.path.exists(abslfile):
458 if os.path.exists(abslfile):
459 os.unlink(abslfile)
459 os.unlink(abslfile)
460 ret = -1
460 ret = -1
461 state = repo.dirstate[lfutil.standin(lfile)]
461 state = repo.dirstate[lfutil.standin(lfile)]
462 if state == 'n':
462 if state == 'n':
463 lfdirstate.normal(lfile)
463 lfdirstate.normal(lfile)
464 elif state == 'r':
464 elif state == 'r':
465 lfdirstate.remove(lfile)
465 lfdirstate.remove(lfile)
466 elif state == 'a':
466 elif state == 'a':
467 lfdirstate.add(lfile)
467 lfdirstate.add(lfile)
468 elif state == '?':
468 elif state == '?':
469 try:
469 try:
470 # Mercurial >= 1.9
470 # Mercurial >= 1.9
471 lfdirstate.drop(lfile)
471 lfdirstate.drop(lfile)
472 except AttributeError:
472 except AttributeError:
473 # Mercurial <= 1.8
473 # Mercurial <= 1.8
474 lfdirstate.forget(lfile)
474 lfdirstate.forget(lfile)
475 return ret
475 return ret
476
476
477 # -- hg commands declarations ------------------------------------------------
477 # -- hg commands declarations ------------------------------------------------
478
478
479
479
480 cmdtable = {
480 cmdtable = {
481 'lfconvert': (lfconvert,
481 'lfconvert': (lfconvert,
482 [('s', 'size', 0, 'All files over this size (in megabytes) '
482 [('s', 'size', 0, 'All files over this size (in megabytes) '
483 'will be considered largefiles. This can also be specified '
483 'will be considered largefiles. This can also be specified '
484 'in your hgrc as [largefiles].size.'),
484 'in your hgrc as [largefiles].size.'),
485 ('','tonormal',False,
485 ('','tonormal',False,
486 'Convert from a largefiles repo to a normal repo')],
486 'Convert from a largefiles repo to a normal repo')],
487 _('hg lfconvert SOURCE DEST [FILE ...]')),
487 _('hg lfconvert SOURCE DEST [FILE ...]')),
488 }
488 }
General Comments 0
You need to be logged in to leave comments. Login now