##// END OF EJS Templates
largefiles: use "normallookup", if "mtime" of standin is unset...
FUJIWARA Katsunori -
r21932:21a2f31f stable
parent child Browse files
Show More
@@ -0,0 +1,53 b''
1 This file focuses mainly on updating largefiles in the working
2 directory (and ".hg/largefiles/dirstate")
3
4 $ cat >> $HGRCPATH <<EOF
5 > [ui]
6 > merge = internal:fail
7 > [extensions]
8 > largefiles =
9 > EOF
10
11 $ hg init repo
12 $ cd repo
13
14 $ echo large1 > large1
15 $ echo large2 > large2
16 $ hg add --large large1 large2
17 $ echo normal1 > normal1
18 $ hg add normal1
19 $ hg commit -m '#0'
20 $ echo 'large1 in #1' > large1
21 $ echo 'normal1 in #1' > normal1
22 $ hg commit -m '#1'
23 $ hg update -q -C 0
24 $ echo 'large2 in #2' > large2
25 $ hg commit -m '#2'
26 created new head
27
28 Test that "hg merge" updates largefiles from "other" correctly
29
30 (getting largefiles from "other" normally)
31
32 $ hg status -A large1
33 C large1
34 $ cat large1
35 large1
36 $ cat .hglf/large1
37 4669e532d5b2c093a78eca010077e708a071bb64
38 $ hg merge --config debug.dirstate.delaywrite=2
39 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
40 (branch merge, don't forget to commit)
41 getting changed largefiles
42 1 largefiles updated, 0 removed
43 $ hg status -A large1
44 M large1
45 $ cat large1
46 large1 in #1
47 $ cat .hglf/large1
48 58e24f733a964da346e2407a2bee99d9001184f5
49 $ hg diff -c 1 --nodates .hglf/large1 | grep '^[+-][0-9a-z]'
50 -4669e532d5b2c093a78eca010077e708a071bb64
51 +58e24f733a964da346e2407a2bee99d9001184f5
52
53 $ cd ..
@@ -1,572 +1,575 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10
10
11 import os, errno
11 import os, errno
12 import shutil
12 import shutil
13
13
14 from mercurial import util, match as match_, hg, node, context, error, \
14 from mercurial import util, match as match_, hg, node, context, error, \
15 cmdutil, scmutil, commands
15 cmdutil, scmutil, commands
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 from mercurial.lock import release
17 from mercurial.lock import release
18
18
19 import lfutil
19 import lfutil
20 import basestore
20 import basestore
21
21
22 # -- Commands ----------------------------------------------------------
22 # -- Commands ----------------------------------------------------------
23
23
24 cmdtable = {}
24 cmdtable = {}
25 command = cmdutil.command(cmdtable)
25 command = cmdutil.command(cmdtable)
26
26
27 @command('lfconvert',
27 @command('lfconvert',
28 [('s', 'size', '',
28 [('s', 'size', '',
29 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
29 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
30 ('', 'to-normal', False,
30 ('', 'to-normal', False,
31 _('convert from a largefiles repo to a normal repo')),
31 _('convert from a largefiles repo to a normal repo')),
32 ],
32 ],
33 _('hg lfconvert SOURCE DEST [FILE ...]'),
33 _('hg lfconvert SOURCE DEST [FILE ...]'),
34 norepo=True,
34 norepo=True,
35 inferrepo=True)
35 inferrepo=True)
36 def lfconvert(ui, src, dest, *pats, **opts):
36 def lfconvert(ui, src, dest, *pats, **opts):
37 '''convert a normal repository to a largefiles repository
37 '''convert a normal repository to a largefiles repository
38
38
39 Convert repository SOURCE to a new repository DEST, identical to
39 Convert repository SOURCE to a new repository DEST, identical to
40 SOURCE except that certain files will be converted as largefiles:
40 SOURCE except that certain files will be converted as largefiles:
41 specifically, any file that matches any PATTERN *or* whose size is
41 specifically, any file that matches any PATTERN *or* whose size is
42 above the minimum size threshold is converted as a largefile. The
42 above the minimum size threshold is converted as a largefile. The
43 size used to determine whether or not to track a file as a
43 size used to determine whether or not to track a file as a
44 largefile is the size of the first version of the file. The
44 largefile is the size of the first version of the file. The
45 minimum size can be specified either with --size or in
45 minimum size can be specified either with --size or in
46 configuration as ``largefiles.size``.
46 configuration as ``largefiles.size``.
47
47
48 After running this command you will need to make sure that
48 After running this command you will need to make sure that
49 largefiles is enabled anywhere you intend to push the new
49 largefiles is enabled anywhere you intend to push the new
50 repository.
50 repository.
51
51
52 Use --to-normal to convert largefiles back to normal files; after
52 Use --to-normal to convert largefiles back to normal files; after
53 this, the DEST repository can be used without largefiles at all.'''
53 this, the DEST repository can be used without largefiles at all.'''
54
54
55 if opts['to_normal']:
55 if opts['to_normal']:
56 tolfile = False
56 tolfile = False
57 else:
57 else:
58 tolfile = True
58 tolfile = True
59 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
59 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
60
60
61 if not hg.islocal(src):
61 if not hg.islocal(src):
62 raise util.Abort(_('%s is not a local Mercurial repo') % src)
62 raise util.Abort(_('%s is not a local Mercurial repo') % src)
63 if not hg.islocal(dest):
63 if not hg.islocal(dest):
64 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
64 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
65
65
66 rsrc = hg.repository(ui, src)
66 rsrc = hg.repository(ui, src)
67 ui.status(_('initializing destination %s\n') % dest)
67 ui.status(_('initializing destination %s\n') % dest)
68 rdst = hg.repository(ui, dest, create=True)
68 rdst = hg.repository(ui, dest, create=True)
69
69
70 success = False
70 success = False
71 dstwlock = dstlock = None
71 dstwlock = dstlock = None
72 try:
72 try:
73 # Lock destination to prevent modification while it is converted to.
73 # Lock destination to prevent modification while it is converted to.
74 # Don't need to lock src because we are just reading from its history
74 # Don't need to lock src because we are just reading from its history
75 # which can't change.
75 # which can't change.
76 dstwlock = rdst.wlock()
76 dstwlock = rdst.wlock()
77 dstlock = rdst.lock()
77 dstlock = rdst.lock()
78
78
79 # Get a list of all changesets in the source. The easy way to do this
79 # Get a list of all changesets in the source. The easy way to do this
80 # is to simply walk the changelog, using changelog.nodesbetween().
80 # is to simply walk the changelog, using changelog.nodesbetween().
81 # Take a look at mercurial/revlog.py:639 for more details.
81 # Take a look at mercurial/revlog.py:639 for more details.
82 # Use a generator instead of a list to decrease memory usage
82 # Use a generator instead of a list to decrease memory usage
83 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
83 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
84 rsrc.heads())[0])
84 rsrc.heads())[0])
85 revmap = {node.nullid: node.nullid}
85 revmap = {node.nullid: node.nullid}
86 if tolfile:
86 if tolfile:
87 lfiles = set()
87 lfiles = set()
88 normalfiles = set()
88 normalfiles = set()
89 if not pats:
89 if not pats:
90 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
90 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
91 if pats:
91 if pats:
92 matcher = match_.match(rsrc.root, '', list(pats))
92 matcher = match_.match(rsrc.root, '', list(pats))
93 else:
93 else:
94 matcher = None
94 matcher = None
95
95
96 lfiletohash = {}
96 lfiletohash = {}
97 for ctx in ctxs:
97 for ctx in ctxs:
98 ui.progress(_('converting revisions'), ctx.rev(),
98 ui.progress(_('converting revisions'), ctx.rev(),
99 unit=_('revision'), total=rsrc['tip'].rev())
99 unit=_('revision'), total=rsrc['tip'].rev())
100 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
100 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
101 lfiles, normalfiles, matcher, size, lfiletohash)
101 lfiles, normalfiles, matcher, size, lfiletohash)
102 ui.progress(_('converting revisions'), None)
102 ui.progress(_('converting revisions'), None)
103
103
104 if os.path.exists(rdst.wjoin(lfutil.shortname)):
104 if os.path.exists(rdst.wjoin(lfutil.shortname)):
105 shutil.rmtree(rdst.wjoin(lfutil.shortname))
105 shutil.rmtree(rdst.wjoin(lfutil.shortname))
106
106
107 for f in lfiletohash.keys():
107 for f in lfiletohash.keys():
108 if os.path.isfile(rdst.wjoin(f)):
108 if os.path.isfile(rdst.wjoin(f)):
109 os.unlink(rdst.wjoin(f))
109 os.unlink(rdst.wjoin(f))
110 try:
110 try:
111 os.removedirs(os.path.dirname(rdst.wjoin(f)))
111 os.removedirs(os.path.dirname(rdst.wjoin(f)))
112 except OSError:
112 except OSError:
113 pass
113 pass
114
114
115 # If there were any files converted to largefiles, add largefiles
115 # If there were any files converted to largefiles, add largefiles
116 # to the destination repository's requirements.
116 # to the destination repository's requirements.
117 if lfiles:
117 if lfiles:
118 rdst.requirements.add('largefiles')
118 rdst.requirements.add('largefiles')
119 rdst._writerequirements()
119 rdst._writerequirements()
120 else:
120 else:
121 for ctx in ctxs:
121 for ctx in ctxs:
122 ui.progress(_('converting revisions'), ctx.rev(),
122 ui.progress(_('converting revisions'), ctx.rev(),
123 unit=_('revision'), total=rsrc['tip'].rev())
123 unit=_('revision'), total=rsrc['tip'].rev())
124 _addchangeset(ui, rsrc, rdst, ctx, revmap)
124 _addchangeset(ui, rsrc, rdst, ctx, revmap)
125
125
126 ui.progress(_('converting revisions'), None)
126 ui.progress(_('converting revisions'), None)
127 success = True
127 success = True
128 finally:
128 finally:
129 rdst.dirstate.clear()
129 rdst.dirstate.clear()
130 release(dstlock, dstwlock)
130 release(dstlock, dstwlock)
131 if not success:
131 if not success:
132 # we failed, remove the new directory
132 # we failed, remove the new directory
133 shutil.rmtree(rdst.root)
133 shutil.rmtree(rdst.root)
134
134
135 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
135 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
136 # Convert src parents to dst parents
136 # Convert src parents to dst parents
137 parents = _convertparents(ctx, revmap)
137 parents = _convertparents(ctx, revmap)
138
138
139 # Generate list of changed files
139 # Generate list of changed files
140 files = _getchangedfiles(ctx, parents)
140 files = _getchangedfiles(ctx, parents)
141
141
142 def getfilectx(repo, memctx, f):
142 def getfilectx(repo, memctx, f):
143 if lfutil.standin(f) in files:
143 if lfutil.standin(f) in files:
144 # if the file isn't in the manifest then it was removed
144 # if the file isn't in the manifest then it was removed
145 # or renamed, raise IOError to indicate this
145 # or renamed, raise IOError to indicate this
146 try:
146 try:
147 fctx = ctx.filectx(lfutil.standin(f))
147 fctx = ctx.filectx(lfutil.standin(f))
148 except error.LookupError:
148 except error.LookupError:
149 raise IOError
149 raise IOError
150 renamed = fctx.renamed()
150 renamed = fctx.renamed()
151 if renamed:
151 if renamed:
152 renamed = lfutil.splitstandin(renamed[0])
152 renamed = lfutil.splitstandin(renamed[0])
153
153
154 hash = fctx.data().strip()
154 hash = fctx.data().strip()
155 path = lfutil.findfile(rsrc, hash)
155 path = lfutil.findfile(rsrc, hash)
156
156
157 # If one file is missing, likely all files from this rev are
157 # If one file is missing, likely all files from this rev are
158 if path is None:
158 if path is None:
159 cachelfiles(ui, rsrc, ctx.node())
159 cachelfiles(ui, rsrc, ctx.node())
160 path = lfutil.findfile(rsrc, hash)
160 path = lfutil.findfile(rsrc, hash)
161
161
162 if path is None:
162 if path is None:
163 raise util.Abort(
163 raise util.Abort(
164 _("missing largefile \'%s\' from revision %s")
164 _("missing largefile \'%s\' from revision %s")
165 % (f, node.hex(ctx.node())))
165 % (f, node.hex(ctx.node())))
166
166
167 data = ''
167 data = ''
168 fd = None
168 fd = None
169 try:
169 try:
170 fd = open(path, 'rb')
170 fd = open(path, 'rb')
171 data = fd.read()
171 data = fd.read()
172 finally:
172 finally:
173 if fd:
173 if fd:
174 fd.close()
174 fd.close()
175 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
175 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
176 'x' in fctx.flags(), renamed)
176 'x' in fctx.flags(), renamed)
177 else:
177 else:
178 return _getnormalcontext(repo, ctx, f, revmap)
178 return _getnormalcontext(repo, ctx, f, revmap)
179
179
180 dstfiles = []
180 dstfiles = []
181 for file in files:
181 for file in files:
182 if lfutil.isstandin(file):
182 if lfutil.isstandin(file):
183 dstfiles.append(lfutil.splitstandin(file))
183 dstfiles.append(lfutil.splitstandin(file))
184 else:
184 else:
185 dstfiles.append(file)
185 dstfiles.append(file)
186 # Commit
186 # Commit
187 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
187 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
188
188
189 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
189 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
190 matcher, size, lfiletohash):
190 matcher, size, lfiletohash):
191 # Convert src parents to dst parents
191 # Convert src parents to dst parents
192 parents = _convertparents(ctx, revmap)
192 parents = _convertparents(ctx, revmap)
193
193
194 # Generate list of changed files
194 # Generate list of changed files
195 files = _getchangedfiles(ctx, parents)
195 files = _getchangedfiles(ctx, parents)
196
196
197 dstfiles = []
197 dstfiles = []
198 for f in files:
198 for f in files:
199 if f not in lfiles and f not in normalfiles:
199 if f not in lfiles and f not in normalfiles:
200 islfile = _islfile(f, ctx, matcher, size)
200 islfile = _islfile(f, ctx, matcher, size)
201 # If this file was renamed or copied then copy
201 # If this file was renamed or copied then copy
202 # the largefile-ness of its predecessor
202 # the largefile-ness of its predecessor
203 if f in ctx.manifest():
203 if f in ctx.manifest():
204 fctx = ctx.filectx(f)
204 fctx = ctx.filectx(f)
205 renamed = fctx.renamed()
205 renamed = fctx.renamed()
206 renamedlfile = renamed and renamed[0] in lfiles
206 renamedlfile = renamed and renamed[0] in lfiles
207 islfile |= renamedlfile
207 islfile |= renamedlfile
208 if 'l' in fctx.flags():
208 if 'l' in fctx.flags():
209 if renamedlfile:
209 if renamedlfile:
210 raise util.Abort(
210 raise util.Abort(
211 _('renamed/copied largefile %s becomes symlink')
211 _('renamed/copied largefile %s becomes symlink')
212 % f)
212 % f)
213 islfile = False
213 islfile = False
214 if islfile:
214 if islfile:
215 lfiles.add(f)
215 lfiles.add(f)
216 else:
216 else:
217 normalfiles.add(f)
217 normalfiles.add(f)
218
218
219 if f in lfiles:
219 if f in lfiles:
220 dstfiles.append(lfutil.standin(f))
220 dstfiles.append(lfutil.standin(f))
221 # largefile in manifest if it has not been removed/renamed
221 # largefile in manifest if it has not been removed/renamed
222 if f in ctx.manifest():
222 if f in ctx.manifest():
223 fctx = ctx.filectx(f)
223 fctx = ctx.filectx(f)
224 if 'l' in fctx.flags():
224 if 'l' in fctx.flags():
225 renamed = fctx.renamed()
225 renamed = fctx.renamed()
226 if renamed and renamed[0] in lfiles:
226 if renamed and renamed[0] in lfiles:
227 raise util.Abort(_('largefile %s becomes symlink') % f)
227 raise util.Abort(_('largefile %s becomes symlink') % f)
228
228
229 # largefile was modified, update standins
229 # largefile was modified, update standins
230 m = util.sha1('')
230 m = util.sha1('')
231 m.update(ctx[f].data())
231 m.update(ctx[f].data())
232 hash = m.hexdigest()
232 hash = m.hexdigest()
233 if f not in lfiletohash or lfiletohash[f] != hash:
233 if f not in lfiletohash or lfiletohash[f] != hash:
234 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
234 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
235 executable = 'x' in ctx[f].flags()
235 executable = 'x' in ctx[f].flags()
236 lfutil.writestandin(rdst, lfutil.standin(f), hash,
236 lfutil.writestandin(rdst, lfutil.standin(f), hash,
237 executable)
237 executable)
238 lfiletohash[f] = hash
238 lfiletohash[f] = hash
239 else:
239 else:
240 # normal file
240 # normal file
241 dstfiles.append(f)
241 dstfiles.append(f)
242
242
243 def getfilectx(repo, memctx, f):
243 def getfilectx(repo, memctx, f):
244 if lfutil.isstandin(f):
244 if lfutil.isstandin(f):
245 # if the file isn't in the manifest then it was removed
245 # if the file isn't in the manifest then it was removed
246 # or renamed, raise IOError to indicate this
246 # or renamed, raise IOError to indicate this
247 srcfname = lfutil.splitstandin(f)
247 srcfname = lfutil.splitstandin(f)
248 try:
248 try:
249 fctx = ctx.filectx(srcfname)
249 fctx = ctx.filectx(srcfname)
250 except error.LookupError:
250 except error.LookupError:
251 raise IOError
251 raise IOError
252 renamed = fctx.renamed()
252 renamed = fctx.renamed()
253 if renamed:
253 if renamed:
254 # standin is always a largefile because largefile-ness
254 # standin is always a largefile because largefile-ness
255 # doesn't change after rename or copy
255 # doesn't change after rename or copy
256 renamed = lfutil.standin(renamed[0])
256 renamed = lfutil.standin(renamed[0])
257
257
258 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
258 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
259 'l' in fctx.flags(), 'x' in fctx.flags(),
259 'l' in fctx.flags(), 'x' in fctx.flags(),
260 renamed)
260 renamed)
261 else:
261 else:
262 return _getnormalcontext(repo, ctx, f, revmap)
262 return _getnormalcontext(repo, ctx, f, revmap)
263
263
264 # Commit
264 # Commit
265 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
265 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
266
266
267 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
267 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
268 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
268 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
269 getfilectx, ctx.user(), ctx.date(), ctx.extra())
269 getfilectx, ctx.user(), ctx.date(), ctx.extra())
270 ret = rdst.commitctx(mctx)
270 ret = rdst.commitctx(mctx)
271 rdst.setparents(ret)
271 rdst.setparents(ret)
272 revmap[ctx.node()] = rdst.changelog.tip()
272 revmap[ctx.node()] = rdst.changelog.tip()
273
273
274 # Generate list of changed files
274 # Generate list of changed files
275 def _getchangedfiles(ctx, parents):
275 def _getchangedfiles(ctx, parents):
276 files = set(ctx.files())
276 files = set(ctx.files())
277 if node.nullid not in parents:
277 if node.nullid not in parents:
278 mc = ctx.manifest()
278 mc = ctx.manifest()
279 mp1 = ctx.parents()[0].manifest()
279 mp1 = ctx.parents()[0].manifest()
280 mp2 = ctx.parents()[1].manifest()
280 mp2 = ctx.parents()[1].manifest()
281 files |= (set(mp1) | set(mp2)) - set(mc)
281 files |= (set(mp1) | set(mp2)) - set(mc)
282 for f in mc:
282 for f in mc:
283 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
283 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
284 files.add(f)
284 files.add(f)
285 return files
285 return files
286
286
287 # Convert src parents to dst parents
287 # Convert src parents to dst parents
288 def _convertparents(ctx, revmap):
288 def _convertparents(ctx, revmap):
289 parents = []
289 parents = []
290 for p in ctx.parents():
290 for p in ctx.parents():
291 parents.append(revmap[p.node()])
291 parents.append(revmap[p.node()])
292 while len(parents) < 2:
292 while len(parents) < 2:
293 parents.append(node.nullid)
293 parents.append(node.nullid)
294 return parents
294 return parents
295
295
296 # Get memfilectx for a normal file
296 # Get memfilectx for a normal file
297 def _getnormalcontext(repo, ctx, f, revmap):
297 def _getnormalcontext(repo, ctx, f, revmap):
298 try:
298 try:
299 fctx = ctx.filectx(f)
299 fctx = ctx.filectx(f)
300 except error.LookupError:
300 except error.LookupError:
301 raise IOError
301 raise IOError
302 renamed = fctx.renamed()
302 renamed = fctx.renamed()
303 if renamed:
303 if renamed:
304 renamed = renamed[0]
304 renamed = renamed[0]
305
305
306 data = fctx.data()
306 data = fctx.data()
307 if f == '.hgtags':
307 if f == '.hgtags':
308 data = _converttags (repo.ui, revmap, data)
308 data = _converttags (repo.ui, revmap, data)
309 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
309 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
310 'x' in fctx.flags(), renamed)
310 'x' in fctx.flags(), renamed)
311
311
312 # Remap tag data using a revision map
312 # Remap tag data using a revision map
313 def _converttags(ui, revmap, data):
313 def _converttags(ui, revmap, data):
314 newdata = []
314 newdata = []
315 for line in data.splitlines():
315 for line in data.splitlines():
316 try:
316 try:
317 id, name = line.split(' ', 1)
317 id, name = line.split(' ', 1)
318 except ValueError:
318 except ValueError:
319 ui.warn(_('skipping incorrectly formatted tag %s\n')
319 ui.warn(_('skipping incorrectly formatted tag %s\n')
320 % line)
320 % line)
321 continue
321 continue
322 try:
322 try:
323 newid = node.bin(id)
323 newid = node.bin(id)
324 except TypeError:
324 except TypeError:
325 ui.warn(_('skipping incorrectly formatted id %s\n')
325 ui.warn(_('skipping incorrectly formatted id %s\n')
326 % id)
326 % id)
327 continue
327 continue
328 try:
328 try:
329 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
329 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
330 name))
330 name))
331 except KeyError:
331 except KeyError:
332 ui.warn(_('no mapping for id %s\n') % id)
332 ui.warn(_('no mapping for id %s\n') % id)
333 continue
333 continue
334 return ''.join(newdata)
334 return ''.join(newdata)
335
335
336 def _islfile(file, ctx, matcher, size):
336 def _islfile(file, ctx, matcher, size):
337 '''Return true if file should be considered a largefile, i.e.
337 '''Return true if file should be considered a largefile, i.e.
338 matcher matches it or it is larger than size.'''
338 matcher matches it or it is larger than size.'''
339 # never store special .hg* files as largefiles
339 # never store special .hg* files as largefiles
340 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
340 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
341 return False
341 return False
342 if matcher and matcher(file):
342 if matcher and matcher(file):
343 return True
343 return True
344 try:
344 try:
345 return ctx.filectx(file).size() >= size * 1024 * 1024
345 return ctx.filectx(file).size() >= size * 1024 * 1024
346 except error.LookupError:
346 except error.LookupError:
347 return False
347 return False
348
348
349 def uploadlfiles(ui, rsrc, rdst, files):
349 def uploadlfiles(ui, rsrc, rdst, files):
350 '''upload largefiles to the central store'''
350 '''upload largefiles to the central store'''
351
351
352 if not files:
352 if not files:
353 return
353 return
354
354
355 store = basestore._openstore(rsrc, rdst, put=True)
355 store = basestore._openstore(rsrc, rdst, put=True)
356
356
357 at = 0
357 at = 0
358 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
358 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
359 retval = store.exists(files)
359 retval = store.exists(files)
360 files = filter(lambda h: not retval[h], files)
360 files = filter(lambda h: not retval[h], files)
361 ui.debug("%d largefiles need to be uploaded\n" % len(files))
361 ui.debug("%d largefiles need to be uploaded\n" % len(files))
362
362
363 for hash in files:
363 for hash in files:
364 ui.progress(_('uploading largefiles'), at, unit='largefile',
364 ui.progress(_('uploading largefiles'), at, unit='largefile',
365 total=len(files))
365 total=len(files))
366 source = lfutil.findfile(rsrc, hash)
366 source = lfutil.findfile(rsrc, hash)
367 if not source:
367 if not source:
368 raise util.Abort(_('largefile %s missing from store'
368 raise util.Abort(_('largefile %s missing from store'
369 ' (needs to be uploaded)') % hash)
369 ' (needs to be uploaded)') % hash)
370 # XXX check for errors here
370 # XXX check for errors here
371 store.put(source, hash)
371 store.put(source, hash)
372 at += 1
372 at += 1
373 ui.progress(_('uploading largefiles'), None)
373 ui.progress(_('uploading largefiles'), None)
374
374
375 def verifylfiles(ui, repo, all=False, contents=False):
375 def verifylfiles(ui, repo, all=False, contents=False):
376 '''Verify that every largefile revision in the current changeset
376 '''Verify that every largefile revision in the current changeset
377 exists in the central store. With --contents, also verify that
377 exists in the central store. With --contents, also verify that
378 the contents of each local largefile file revision are correct (SHA-1 hash
378 the contents of each local largefile file revision are correct (SHA-1 hash
379 matches the revision ID). With --all, check every changeset in
379 matches the revision ID). With --all, check every changeset in
380 this repository.'''
380 this repository.'''
381 if all:
381 if all:
382 # Pass a list to the function rather than an iterator because we know a
382 # Pass a list to the function rather than an iterator because we know a
383 # list will work.
383 # list will work.
384 revs = range(len(repo))
384 revs = range(len(repo))
385 else:
385 else:
386 revs = ['.']
386 revs = ['.']
387
387
388 store = basestore._openstore(repo)
388 store = basestore._openstore(repo)
389 return store.verify(revs, contents=contents)
389 return store.verify(revs, contents=contents)
390
390
391 def cachelfiles(ui, repo, node, filelist=None):
391 def cachelfiles(ui, repo, node, filelist=None):
392 '''cachelfiles ensures that all largefiles needed by the specified revision
392 '''cachelfiles ensures that all largefiles needed by the specified revision
393 are present in the repository's largefile cache.
393 are present in the repository's largefile cache.
394
394
395 returns a tuple (cached, missing). cached is the list of files downloaded
395 returns a tuple (cached, missing). cached is the list of files downloaded
396 by this operation; missing is the list of files that were needed but could
396 by this operation; missing is the list of files that were needed but could
397 not be found.'''
397 not be found.'''
398 lfiles = lfutil.listlfiles(repo, node)
398 lfiles = lfutil.listlfiles(repo, node)
399 if filelist:
399 if filelist:
400 lfiles = set(lfiles) & set(filelist)
400 lfiles = set(lfiles) & set(filelist)
401 toget = []
401 toget = []
402
402
403 for lfile in lfiles:
403 for lfile in lfiles:
404 try:
404 try:
405 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
405 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
406 except IOError, err:
406 except IOError, err:
407 if err.errno == errno.ENOENT:
407 if err.errno == errno.ENOENT:
408 continue # node must be None and standin wasn't found in wctx
408 continue # node must be None and standin wasn't found in wctx
409 raise
409 raise
410 if not lfutil.findfile(repo, expectedhash):
410 if not lfutil.findfile(repo, expectedhash):
411 toget.append((lfile, expectedhash))
411 toget.append((lfile, expectedhash))
412
412
413 if toget:
413 if toget:
414 store = basestore._openstore(repo)
414 store = basestore._openstore(repo)
415 ret = store.get(toget)
415 ret = store.get(toget)
416 return ret
416 return ret
417
417
418 return ([], [])
418 return ([], [])
419
419
420 def downloadlfiles(ui, repo, rev=None):
420 def downloadlfiles(ui, repo, rev=None):
421 matchfn = scmutil.match(repo[None],
421 matchfn = scmutil.match(repo[None],
422 [repo.wjoin(lfutil.shortname)], {})
422 [repo.wjoin(lfutil.shortname)], {})
423 def prepare(ctx, fns):
423 def prepare(ctx, fns):
424 pass
424 pass
425 totalsuccess = 0
425 totalsuccess = 0
426 totalmissing = 0
426 totalmissing = 0
427 if rev != []: # walkchangerevs on empty list would return all revs
427 if rev != []: # walkchangerevs on empty list would return all revs
428 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
428 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
429 prepare):
429 prepare):
430 success, missing = cachelfiles(ui, repo, ctx.node())
430 success, missing = cachelfiles(ui, repo, ctx.node())
431 totalsuccess += len(success)
431 totalsuccess += len(success)
432 totalmissing += len(missing)
432 totalmissing += len(missing)
433 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
433 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
434 if totalmissing > 0:
434 if totalmissing > 0:
435 ui.status(_("%d largefiles failed to download\n") % totalmissing)
435 ui.status(_("%d largefiles failed to download\n") % totalmissing)
436 return totalsuccess, totalmissing
436 return totalsuccess, totalmissing
437
437
438 def updatelfiles(ui, repo, filelist=None, printmessage=True):
438 def updatelfiles(ui, repo, filelist=None, printmessage=True):
439 wlock = repo.wlock()
439 wlock = repo.wlock()
440 try:
440 try:
441 lfdirstate = lfutil.openlfdirstate(ui, repo)
441 lfdirstate = lfutil.openlfdirstate(ui, repo)
442 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
442 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
443
443
444 if filelist is not None:
444 if filelist is not None:
445 lfiles = [f for f in lfiles if f in filelist]
445 lfiles = [f for f in lfiles if f in filelist]
446
446
447 update = {}
447 update = {}
448 updated, removed = 0, 0
448 updated, removed = 0, 0
449 for lfile in lfiles:
449 for lfile in lfiles:
450 abslfile = repo.wjoin(lfile)
450 abslfile = repo.wjoin(lfile)
451 absstandin = repo.wjoin(lfutil.standin(lfile))
451 absstandin = repo.wjoin(lfutil.standin(lfile))
452 if os.path.exists(absstandin):
452 if os.path.exists(absstandin):
453 if (os.path.exists(absstandin + '.orig') and
453 if (os.path.exists(absstandin + '.orig') and
454 os.path.exists(abslfile)):
454 os.path.exists(abslfile)):
455 shutil.copyfile(abslfile, abslfile + '.orig')
455 shutil.copyfile(abslfile, abslfile + '.orig')
456 util.unlinkpath(absstandin + '.orig')
456 util.unlinkpath(absstandin + '.orig')
457 expecthash = lfutil.readstandin(repo, lfile)
457 expecthash = lfutil.readstandin(repo, lfile)
458 if (expecthash != '' and
458 if (expecthash != '' and
459 (not os.path.exists(abslfile) or
459 (not os.path.exists(abslfile) or
460 expecthash != lfutil.hashfile(abslfile))):
460 expecthash != lfutil.hashfile(abslfile))):
461 if lfile not in repo[None]: # not switched to normal file
461 if lfile not in repo[None]: # not switched to normal file
462 util.unlinkpath(abslfile, ignoremissing=True)
462 util.unlinkpath(abslfile, ignoremissing=True)
463 # use normallookup() to allocate entry in largefiles
463 # use normallookup() to allocate entry in largefiles
464 # dirstate, because lack of it misleads
464 # dirstate, because lack of it misleads
465 # lfilesrepo.status() into recognition that such cache
465 # lfilesrepo.status() into recognition that such cache
466 # missing files are REMOVED.
466 # missing files are REMOVED.
467 lfdirstate.normallookup(lfile)
467 lfdirstate.normallookup(lfile)
468 update[lfile] = expecthash
468 update[lfile] = expecthash
469 else:
469 else:
470 # Remove lfiles for which the standin is deleted, unless the
470 # Remove lfiles for which the standin is deleted, unless the
471 # lfile is added to the repository again. This happens when a
471 # lfile is added to the repository again. This happens when a
472 # largefile is converted back to a normal file: the standin
472 # largefile is converted back to a normal file: the standin
473 # disappears, but a new (normal) file appears as the lfile.
473 # disappears, but a new (normal) file appears as the lfile.
474 if (os.path.exists(abslfile) and
474 if (os.path.exists(abslfile) and
475 repo.dirstate.normalize(lfile) not in repo[None]):
475 repo.dirstate.normalize(lfile) not in repo[None]):
476 util.unlinkpath(abslfile)
476 util.unlinkpath(abslfile)
477 removed += 1
477 removed += 1
478
478
479 # largefile processing might be slow and be interrupted - be prepared
479 # largefile processing might be slow and be interrupted - be prepared
480 lfdirstate.write()
480 lfdirstate.write()
481
481
482 if lfiles:
482 if lfiles:
483 if printmessage:
483 if printmessage:
484 ui.status(_('getting changed largefiles\n'))
484 ui.status(_('getting changed largefiles\n'))
485 cachelfiles(ui, repo, None, lfiles)
485 cachelfiles(ui, repo, None, lfiles)
486
486
487 for lfile in lfiles:
487 for lfile in lfiles:
488 update1 = 0
488 update1 = 0
489
489
490 expecthash = update.get(lfile)
490 expecthash = update.get(lfile)
491 if expecthash:
491 if expecthash:
492 if not lfutil.copyfromcache(repo, expecthash, lfile):
492 if not lfutil.copyfromcache(repo, expecthash, lfile):
493 # failed ... but already removed and set to normallookup
493 # failed ... but already removed and set to normallookup
494 continue
494 continue
495 # Synchronize largefile dirstate to the last modified
495 # Synchronize largefile dirstate to the last modified
496 # time of the file
496 # time of the file
497 lfdirstate.normal(lfile)
497 lfdirstate.normal(lfile)
498 update1 = 1
498 update1 = 1
499
499
500 # copy the state of largefile standin from the repository's
500 # copy the state of largefile standin from the repository's
501 # dirstate to its state in the lfdirstate.
501 # dirstate to its state in the lfdirstate.
502 abslfile = repo.wjoin(lfile)
502 abslfile = repo.wjoin(lfile)
503 absstandin = repo.wjoin(lfutil.standin(lfile))
503 absstandin = repo.wjoin(lfutil.standin(lfile))
504 if os.path.exists(absstandin):
504 if os.path.exists(absstandin):
505 mode = os.stat(absstandin).st_mode
505 mode = os.stat(absstandin).st_mode
506 if mode != os.stat(abslfile).st_mode:
506 if mode != os.stat(abslfile).st_mode:
507 os.chmod(abslfile, mode)
507 os.chmod(abslfile, mode)
508 update1 = 1
508 update1 = 1
509
509
510 updated += update1
510 updated += update1
511
511
512 state = repo.dirstate[lfutil.standin(lfile)]
512 standin = lfutil.standin(lfile)
513 if standin in repo.dirstate:
514 stat = repo.dirstate._map[standin]
515 state, mtime = stat[0], stat[3]
516 else:
517 state, mtime = '?', -1
513 if state == 'n':
518 if state == 'n':
514 # When rebasing, we need to synchronize the standin and the
519 if mtime < 0:
515 # largefile, because otherwise the largefile will get reverted.
520 # state 'n' doesn't ensure 'clean' in this case
516 # But for commit's sake, we have to mark the file as unclean.
517 if getattr(repo, "_isrebasing", False):
518 lfdirstate.normallookup(lfile)
521 lfdirstate.normallookup(lfile)
519 else:
522 else:
520 lfdirstate.normal(lfile)
523 lfdirstate.normal(lfile)
521 elif state == 'r':
524 elif state == 'r':
522 lfdirstate.remove(lfile)
525 lfdirstate.remove(lfile)
523 elif state == 'a':
526 elif state == 'a':
524 lfdirstate.add(lfile)
527 lfdirstate.add(lfile)
525 elif state == '?':
528 elif state == '?':
526 lfdirstate.drop(lfile)
529 lfdirstate.drop(lfile)
527
530
528 lfdirstate.write()
531 lfdirstate.write()
529 if printmessage and lfiles:
532 if printmessage and lfiles:
530 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
533 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
531 removed))
534 removed))
532 finally:
535 finally:
533 wlock.release()
536 wlock.release()
534
537
535 @command('lfpull',
538 @command('lfpull',
536 [('r', 'rev', [], _('pull largefiles for these revisions'))
539 [('r', 'rev', [], _('pull largefiles for these revisions'))
537 ] + commands.remoteopts,
540 ] + commands.remoteopts,
538 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
541 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
539 def lfpull(ui, repo, source="default", **opts):
542 def lfpull(ui, repo, source="default", **opts):
540 """pull largefiles for the specified revisions from the specified source
543 """pull largefiles for the specified revisions from the specified source
541
544
542 Pull largefiles that are referenced from local changesets but missing
545 Pull largefiles that are referenced from local changesets but missing
543 locally, pulling from a remote repository to the local cache.
546 locally, pulling from a remote repository to the local cache.
544
547
545 If SOURCE is omitted, the 'default' path will be used.
548 If SOURCE is omitted, the 'default' path will be used.
546 See :hg:`help urls` for more information.
549 See :hg:`help urls` for more information.
547
550
548 .. container:: verbose
551 .. container:: verbose
549
552
550 Some examples:
553 Some examples:
551
554
552 - pull largefiles for all branch heads::
555 - pull largefiles for all branch heads::
553
556
554 hg lfpull -r "head() and not closed()"
557 hg lfpull -r "head() and not closed()"
555
558
556 - pull largefiles on the default branch::
559 - pull largefiles on the default branch::
557
560
558 hg lfpull -r "branch(default)"
561 hg lfpull -r "branch(default)"
559 """
562 """
560 repo.lfpullsource = source
563 repo.lfpullsource = source
561
564
562 revs = opts.get('rev', [])
565 revs = opts.get('rev', [])
563 if not revs:
566 if not revs:
564 raise util.Abort(_('no revisions specified'))
567 raise util.Abort(_('no revisions specified'))
565 revs = scmutil.revrange(repo, revs)
568 revs = scmutil.revrange(repo, revs)
566
569
567 numcached = 0
570 numcached = 0
568 for rev in revs:
571 for rev in revs:
569 ui.note(_('pulling largefiles for revision %s\n') % rev)
572 ui.note(_('pulling largefiles for revision %s\n') % rev)
570 (cached, missing) = cachelfiles(ui, repo, rev)
573 (cached, missing) = cachelfiles(ui, repo, rev)
571 numcached += len(cached)
574 numcached += len(cached)
572 ui.status(_("%d largefiles cached\n") % numcached)
575 ui.status(_("%d largefiles cached\n") % numcached)
General Comments 0
You need to be logged in to leave comments. Login now