##// END OF EJS Templates
revert: evaluate filesets against working directory (issue4497)...
Martin von Zweigbergk -
r24438:5b85a5bc default
parent child Browse files
Show More
@@ -1,1406 +1,1406 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10
10
11 import os
11 import os
12 import copy
12 import copy
13
13
14 from mercurial import hg, util, cmdutil, scmutil, match as match_, \
14 from mercurial import hg, util, cmdutil, scmutil, match as match_, \
15 archival, pathutil, revset
15 archival, pathutil, revset
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 from mercurial.node import hex
17 from mercurial.node import hex
18
18
19 import lfutil
19 import lfutil
20 import lfcommands
20 import lfcommands
21 import basestore
21 import basestore
22
22
23 # -- Utility functions: commonly/repeatedly needed functionality ---------------
23 # -- Utility functions: commonly/repeatedly needed functionality ---------------
24
24
25 def composelargefilematcher(match, manifest):
25 def composelargefilematcher(match, manifest):
26 '''create a matcher that matches only the largefiles in the original
26 '''create a matcher that matches only the largefiles in the original
27 matcher'''
27 matcher'''
28 m = copy.copy(match)
28 m = copy.copy(match)
29 lfile = lambda f: lfutil.standin(f) in manifest
29 lfile = lambda f: lfutil.standin(f) in manifest
30 m._files = filter(lfile, m._files)
30 m._files = filter(lfile, m._files)
31 m._fmap = set(m._files)
31 m._fmap = set(m._files)
32 m._always = False
32 m._always = False
33 origmatchfn = m.matchfn
33 origmatchfn = m.matchfn
34 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
34 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
35 return m
35 return m
36
36
37 def composenormalfilematcher(match, manifest, exclude=None):
37 def composenormalfilematcher(match, manifest, exclude=None):
38 excluded = set()
38 excluded = set()
39 if exclude is not None:
39 if exclude is not None:
40 excluded.update(exclude)
40 excluded.update(exclude)
41
41
42 m = copy.copy(match)
42 m = copy.copy(match)
43 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
43 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
44 manifest or f in excluded)
44 manifest or f in excluded)
45 m._files = filter(notlfile, m._files)
45 m._files = filter(notlfile, m._files)
46 m._fmap = set(m._files)
46 m._fmap = set(m._files)
47 m._always = False
47 m._always = False
48 origmatchfn = m.matchfn
48 origmatchfn = m.matchfn
49 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
49 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
50 return m
50 return m
51
51
52 def installnormalfilesmatchfn(manifest):
52 def installnormalfilesmatchfn(manifest):
53 '''installmatchfn with a matchfn that ignores all largefiles'''
53 '''installmatchfn with a matchfn that ignores all largefiles'''
54 def overridematch(ctx, pats=[], opts={}, globbed=False,
54 def overridematch(ctx, pats=[], opts={}, globbed=False,
55 default='relpath'):
55 default='relpath'):
56 match = oldmatch(ctx, pats, opts, globbed, default)
56 match = oldmatch(ctx, pats, opts, globbed, default)
57 return composenormalfilematcher(match, manifest)
57 return composenormalfilematcher(match, manifest)
58 oldmatch = installmatchfn(overridematch)
58 oldmatch = installmatchfn(overridematch)
59
59
60 def installmatchfn(f):
60 def installmatchfn(f):
61 '''monkey patch the scmutil module with a custom match function.
61 '''monkey patch the scmutil module with a custom match function.
62 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
62 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
63 oldmatch = scmutil.match
63 oldmatch = scmutil.match
64 setattr(f, 'oldmatch', oldmatch)
64 setattr(f, 'oldmatch', oldmatch)
65 scmutil.match = f
65 scmutil.match = f
66 return oldmatch
66 return oldmatch
67
67
68 def restorematchfn():
68 def restorematchfn():
69 '''restores scmutil.match to what it was before installmatchfn
69 '''restores scmutil.match to what it was before installmatchfn
70 was called. no-op if scmutil.match is its original function.
70 was called. no-op if scmutil.match is its original function.
71
71
72 Note that n calls to installmatchfn will require n calls to
72 Note that n calls to installmatchfn will require n calls to
73 restore the original matchfn.'''
73 restore the original matchfn.'''
74 scmutil.match = getattr(scmutil.match, 'oldmatch')
74 scmutil.match = getattr(scmutil.match, 'oldmatch')
75
75
76 def installmatchandpatsfn(f):
76 def installmatchandpatsfn(f):
77 oldmatchandpats = scmutil.matchandpats
77 oldmatchandpats = scmutil.matchandpats
78 setattr(f, 'oldmatchandpats', oldmatchandpats)
78 setattr(f, 'oldmatchandpats', oldmatchandpats)
79 scmutil.matchandpats = f
79 scmutil.matchandpats = f
80 return oldmatchandpats
80 return oldmatchandpats
81
81
82 def restorematchandpatsfn():
82 def restorematchandpatsfn():
83 '''restores scmutil.matchandpats to what it was before
83 '''restores scmutil.matchandpats to what it was before
84 installmatchandpatsfn was called. No-op if scmutil.matchandpats
84 installmatchandpatsfn was called. No-op if scmutil.matchandpats
85 is its original function.
85 is its original function.
86
86
87 Note that n calls to installmatchandpatsfn will require n calls
87 Note that n calls to installmatchandpatsfn will require n calls
88 to restore the original matchfn.'''
88 to restore the original matchfn.'''
89 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
89 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
90 scmutil.matchandpats)
90 scmutil.matchandpats)
91
91
92 def addlargefiles(ui, repo, isaddremove, matcher, **opts):
92 def addlargefiles(ui, repo, isaddremove, matcher, **opts):
93 large = opts.get('large')
93 large = opts.get('large')
94 lfsize = lfutil.getminsize(
94 lfsize = lfutil.getminsize(
95 ui, lfutil.islfilesrepo(repo), opts.get('lfsize'))
95 ui, lfutil.islfilesrepo(repo), opts.get('lfsize'))
96
96
97 lfmatcher = None
97 lfmatcher = None
98 if lfutil.islfilesrepo(repo):
98 if lfutil.islfilesrepo(repo):
99 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
99 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
100 if lfpats:
100 if lfpats:
101 lfmatcher = match_.match(repo.root, '', list(lfpats))
101 lfmatcher = match_.match(repo.root, '', list(lfpats))
102
102
103 lfnames = []
103 lfnames = []
104 m = copy.copy(matcher)
104 m = copy.copy(matcher)
105 m.bad = lambda x, y: None
105 m.bad = lambda x, y: None
106 wctx = repo[None]
106 wctx = repo[None]
107 for f in repo.walk(m):
107 for f in repo.walk(m):
108 exact = m.exact(f)
108 exact = m.exact(f)
109 lfile = lfutil.standin(f) in wctx
109 lfile = lfutil.standin(f) in wctx
110 nfile = f in wctx
110 nfile = f in wctx
111 exists = lfile or nfile
111 exists = lfile or nfile
112
112
113 # addremove in core gets fancy with the name, add doesn't
113 # addremove in core gets fancy with the name, add doesn't
114 if isaddremove:
114 if isaddremove:
115 name = m.uipath(f)
115 name = m.uipath(f)
116 else:
116 else:
117 name = m.rel(f)
117 name = m.rel(f)
118
118
119 # Don't warn the user when they attempt to add a normal tracked file.
119 # Don't warn the user when they attempt to add a normal tracked file.
120 # The normal add code will do that for us.
120 # The normal add code will do that for us.
121 if exact and exists:
121 if exact and exists:
122 if lfile:
122 if lfile:
123 ui.warn(_('%s already a largefile\n') % name)
123 ui.warn(_('%s already a largefile\n') % name)
124 continue
124 continue
125
125
126 if (exact or not exists) and not lfutil.isstandin(f):
126 if (exact or not exists) and not lfutil.isstandin(f):
127 # In case the file was removed previously, but not committed
127 # In case the file was removed previously, but not committed
128 # (issue3507)
128 # (issue3507)
129 if not repo.wvfs.exists(f):
129 if not repo.wvfs.exists(f):
130 continue
130 continue
131
131
132 abovemin = (lfsize and
132 abovemin = (lfsize and
133 repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024)
133 repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024)
134 if large or abovemin or (lfmatcher and lfmatcher(f)):
134 if large or abovemin or (lfmatcher and lfmatcher(f)):
135 lfnames.append(f)
135 lfnames.append(f)
136 if ui.verbose or not exact:
136 if ui.verbose or not exact:
137 ui.status(_('adding %s as a largefile\n') % name)
137 ui.status(_('adding %s as a largefile\n') % name)
138
138
139 bad = []
139 bad = []
140
140
141 # Need to lock, otherwise there could be a race condition between
141 # Need to lock, otherwise there could be a race condition between
142 # when standins are created and added to the repo.
142 # when standins are created and added to the repo.
143 wlock = repo.wlock()
143 wlock = repo.wlock()
144 try:
144 try:
145 if not opts.get('dry_run'):
145 if not opts.get('dry_run'):
146 standins = []
146 standins = []
147 lfdirstate = lfutil.openlfdirstate(ui, repo)
147 lfdirstate = lfutil.openlfdirstate(ui, repo)
148 for f in lfnames:
148 for f in lfnames:
149 standinname = lfutil.standin(f)
149 standinname = lfutil.standin(f)
150 lfutil.writestandin(repo, standinname, hash='',
150 lfutil.writestandin(repo, standinname, hash='',
151 executable=lfutil.getexecutable(repo.wjoin(f)))
151 executable=lfutil.getexecutable(repo.wjoin(f)))
152 standins.append(standinname)
152 standins.append(standinname)
153 if lfdirstate[f] == 'r':
153 if lfdirstate[f] == 'r':
154 lfdirstate.normallookup(f)
154 lfdirstate.normallookup(f)
155 else:
155 else:
156 lfdirstate.add(f)
156 lfdirstate.add(f)
157 lfdirstate.write()
157 lfdirstate.write()
158 bad += [lfutil.splitstandin(f)
158 bad += [lfutil.splitstandin(f)
159 for f in repo[None].add(standins)
159 for f in repo[None].add(standins)
160 if f in m.files()]
160 if f in m.files()]
161
161
162 added = [f for f in lfnames if f not in bad]
162 added = [f for f in lfnames if f not in bad]
163 finally:
163 finally:
164 wlock.release()
164 wlock.release()
165 return added, bad
165 return added, bad
166
166
167 def removelargefiles(ui, repo, isaddremove, matcher, **opts):
167 def removelargefiles(ui, repo, isaddremove, matcher, **opts):
168 after = opts.get('after')
168 after = opts.get('after')
169 m = composelargefilematcher(matcher, repo[None].manifest())
169 m = composelargefilematcher(matcher, repo[None].manifest())
170 try:
170 try:
171 repo.lfstatus = True
171 repo.lfstatus = True
172 s = repo.status(match=m, clean=not isaddremove)
172 s = repo.status(match=m, clean=not isaddremove)
173 finally:
173 finally:
174 repo.lfstatus = False
174 repo.lfstatus = False
175 manifest = repo[None].manifest()
175 manifest = repo[None].manifest()
176 modified, added, deleted, clean = [[f for f in list
176 modified, added, deleted, clean = [[f for f in list
177 if lfutil.standin(f) in manifest]
177 if lfutil.standin(f) in manifest]
178 for list in (s.modified, s.added,
178 for list in (s.modified, s.added,
179 s.deleted, s.clean)]
179 s.deleted, s.clean)]
180
180
181 def warn(files, msg):
181 def warn(files, msg):
182 for f in files:
182 for f in files:
183 ui.warn(msg % m.rel(f))
183 ui.warn(msg % m.rel(f))
184 return int(len(files) > 0)
184 return int(len(files) > 0)
185
185
186 result = 0
186 result = 0
187
187
188 if after:
188 if after:
189 remove = deleted
189 remove = deleted
190 result = warn(modified + added + clean,
190 result = warn(modified + added + clean,
191 _('not removing %s: file still exists\n'))
191 _('not removing %s: file still exists\n'))
192 else:
192 else:
193 remove = deleted + clean
193 remove = deleted + clean
194 result = warn(modified, _('not removing %s: file is modified (use -f'
194 result = warn(modified, _('not removing %s: file is modified (use -f'
195 ' to force removal)\n'))
195 ' to force removal)\n'))
196 result = warn(added, _('not removing %s: file has been marked for add'
196 result = warn(added, _('not removing %s: file has been marked for add'
197 ' (use forget to undo)\n')) or result
197 ' (use forget to undo)\n')) or result
198
198
199 # Need to lock because standin files are deleted then removed from the
199 # Need to lock because standin files are deleted then removed from the
200 # repository and we could race in-between.
200 # repository and we could race in-between.
201 wlock = repo.wlock()
201 wlock = repo.wlock()
202 try:
202 try:
203 lfdirstate = lfutil.openlfdirstate(ui, repo)
203 lfdirstate = lfutil.openlfdirstate(ui, repo)
204 for f in sorted(remove):
204 for f in sorted(remove):
205 if ui.verbose or not m.exact(f):
205 if ui.verbose or not m.exact(f):
206 # addremove in core gets fancy with the name, remove doesn't
206 # addremove in core gets fancy with the name, remove doesn't
207 if isaddremove:
207 if isaddremove:
208 name = m.uipath(f)
208 name = m.uipath(f)
209 else:
209 else:
210 name = m.rel(f)
210 name = m.rel(f)
211 ui.status(_('removing %s\n') % name)
211 ui.status(_('removing %s\n') % name)
212
212
213 if not opts.get('dry_run'):
213 if not opts.get('dry_run'):
214 if not after:
214 if not after:
215 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
215 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
216
216
217 if opts.get('dry_run'):
217 if opts.get('dry_run'):
218 return result
218 return result
219
219
220 remove = [lfutil.standin(f) for f in remove]
220 remove = [lfutil.standin(f) for f in remove]
221 # If this is being called by addremove, let the original addremove
221 # If this is being called by addremove, let the original addremove
222 # function handle this.
222 # function handle this.
223 if not isaddremove:
223 if not isaddremove:
224 for f in remove:
224 for f in remove:
225 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
225 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
226 repo[None].forget(remove)
226 repo[None].forget(remove)
227
227
228 for f in remove:
228 for f in remove:
229 lfutil.synclfdirstate(repo, lfdirstate, lfutil.splitstandin(f),
229 lfutil.synclfdirstate(repo, lfdirstate, lfutil.splitstandin(f),
230 False)
230 False)
231
231
232 lfdirstate.write()
232 lfdirstate.write()
233 finally:
233 finally:
234 wlock.release()
234 wlock.release()
235
235
236 return result
236 return result
237
237
238 # For overriding mercurial.hgweb.webcommands so that largefiles will
238 # For overriding mercurial.hgweb.webcommands so that largefiles will
239 # appear at their right place in the manifests.
239 # appear at their right place in the manifests.
240 def decodepath(orig, path):
240 def decodepath(orig, path):
241 return lfutil.splitstandin(path) or path
241 return lfutil.splitstandin(path) or path
242
242
243 # -- Wrappers: modify existing commands --------------------------------
243 # -- Wrappers: modify existing commands --------------------------------
244
244
245 def overrideadd(orig, ui, repo, *pats, **opts):
245 def overrideadd(orig, ui, repo, *pats, **opts):
246 if opts.get('normal') and opts.get('large'):
246 if opts.get('normal') and opts.get('large'):
247 raise util.Abort(_('--normal cannot be used with --large'))
247 raise util.Abort(_('--normal cannot be used with --large'))
248 return orig(ui, repo, *pats, **opts)
248 return orig(ui, repo, *pats, **opts)
249
249
250 def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
250 def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
251 # The --normal flag short circuits this override
251 # The --normal flag short circuits this override
252 if opts.get('normal'):
252 if opts.get('normal'):
253 return orig(ui, repo, matcher, prefix, explicitonly, **opts)
253 return orig(ui, repo, matcher, prefix, explicitonly, **opts)
254
254
255 ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts)
255 ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts)
256 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(),
256 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(),
257 ladded)
257 ladded)
258 bad = orig(ui, repo, normalmatcher, prefix, explicitonly, **opts)
258 bad = orig(ui, repo, normalmatcher, prefix, explicitonly, **opts)
259
259
260 bad.extend(f for f in lbad)
260 bad.extend(f for f in lbad)
261 return bad
261 return bad
262
262
263 def cmdutilremove(orig, ui, repo, matcher, prefix, after, force, subrepos):
263 def cmdutilremove(orig, ui, repo, matcher, prefix, after, force, subrepos):
264 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
264 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
265 result = orig(ui, repo, normalmatcher, prefix, after, force, subrepos)
265 result = orig(ui, repo, normalmatcher, prefix, after, force, subrepos)
266 return removelargefiles(ui, repo, False, matcher, after=after,
266 return removelargefiles(ui, repo, False, matcher, after=after,
267 force=force) or result
267 force=force) or result
268
268
269 def overridestatusfn(orig, repo, rev2, **opts):
269 def overridestatusfn(orig, repo, rev2, **opts):
270 try:
270 try:
271 repo._repo.lfstatus = True
271 repo._repo.lfstatus = True
272 return orig(repo, rev2, **opts)
272 return orig(repo, rev2, **opts)
273 finally:
273 finally:
274 repo._repo.lfstatus = False
274 repo._repo.lfstatus = False
275
275
276 def overridestatus(orig, ui, repo, *pats, **opts):
276 def overridestatus(orig, ui, repo, *pats, **opts):
277 try:
277 try:
278 repo.lfstatus = True
278 repo.lfstatus = True
279 return orig(ui, repo, *pats, **opts)
279 return orig(ui, repo, *pats, **opts)
280 finally:
280 finally:
281 repo.lfstatus = False
281 repo.lfstatus = False
282
282
283 def overridedirty(orig, repo, ignoreupdate=False):
283 def overridedirty(orig, repo, ignoreupdate=False):
284 try:
284 try:
285 repo._repo.lfstatus = True
285 repo._repo.lfstatus = True
286 return orig(repo, ignoreupdate)
286 return orig(repo, ignoreupdate)
287 finally:
287 finally:
288 repo._repo.lfstatus = False
288 repo._repo.lfstatus = False
289
289
290 def overridelog(orig, ui, repo, *pats, **opts):
290 def overridelog(orig, ui, repo, *pats, **opts):
291 def overridematchandpats(ctx, pats=[], opts={}, globbed=False,
291 def overridematchandpats(ctx, pats=[], opts={}, globbed=False,
292 default='relpath'):
292 default='relpath'):
293 """Matcher that merges root directory with .hglf, suitable for log.
293 """Matcher that merges root directory with .hglf, suitable for log.
294 It is still possible to match .hglf directly.
294 It is still possible to match .hglf directly.
295 For any listed files run log on the standin too.
295 For any listed files run log on the standin too.
296 matchfn tries both the given filename and with .hglf stripped.
296 matchfn tries both the given filename and with .hglf stripped.
297 """
297 """
298 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default)
298 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default)
299 m, p = copy.copy(matchandpats)
299 m, p = copy.copy(matchandpats)
300
300
301 if m.always():
301 if m.always():
302 # We want to match everything anyway, so there's no benefit trying
302 # We want to match everything anyway, so there's no benefit trying
303 # to add standins.
303 # to add standins.
304 return matchandpats
304 return matchandpats
305
305
306 pats = set(p)
306 pats = set(p)
307
307
308 def fixpats(pat, tostandin=lfutil.standin):
308 def fixpats(pat, tostandin=lfutil.standin):
309 kindpat = match_._patsplit(pat, None)
309 kindpat = match_._patsplit(pat, None)
310
310
311 if kindpat[0] is not None:
311 if kindpat[0] is not None:
312 return kindpat[0] + ':' + tostandin(kindpat[1])
312 return kindpat[0] + ':' + tostandin(kindpat[1])
313 return tostandin(kindpat[1])
313 return tostandin(kindpat[1])
314
314
315 if m._cwd:
315 if m._cwd:
316 hglf = lfutil.shortname
316 hglf = lfutil.shortname
317 back = util.pconvert(m.rel(hglf)[:-len(hglf)])
317 back = util.pconvert(m.rel(hglf)[:-len(hglf)])
318
318
319 def tostandin(f):
319 def tostandin(f):
320 # The file may already be a standin, so trucate the back
320 # The file may already be a standin, so trucate the back
321 # prefix and test before mangling it. This avoids turning
321 # prefix and test before mangling it. This avoids turning
322 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
322 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
323 if f.startswith(back) and lfutil.splitstandin(f[len(back):]):
323 if f.startswith(back) and lfutil.splitstandin(f[len(back):]):
324 return f
324 return f
325
325
326 # An absolute path is from outside the repo, so truncate the
326 # An absolute path is from outside the repo, so truncate the
327 # path to the root before building the standin. Otherwise cwd
327 # path to the root before building the standin. Otherwise cwd
328 # is somewhere in the repo, relative to root, and needs to be
328 # is somewhere in the repo, relative to root, and needs to be
329 # prepended before building the standin.
329 # prepended before building the standin.
330 if os.path.isabs(m._cwd):
330 if os.path.isabs(m._cwd):
331 f = f[len(back):]
331 f = f[len(back):]
332 else:
332 else:
333 f = m._cwd + '/' + f
333 f = m._cwd + '/' + f
334 return back + lfutil.standin(f)
334 return back + lfutil.standin(f)
335
335
336 pats.update(fixpats(f, tostandin) for f in p)
336 pats.update(fixpats(f, tostandin) for f in p)
337 else:
337 else:
338 def tostandin(f):
338 def tostandin(f):
339 if lfutil.splitstandin(f):
339 if lfutil.splitstandin(f):
340 return f
340 return f
341 return lfutil.standin(f)
341 return lfutil.standin(f)
342 pats.update(fixpats(f, tostandin) for f in p)
342 pats.update(fixpats(f, tostandin) for f in p)
343
343
344 for i in range(0, len(m._files)):
344 for i in range(0, len(m._files)):
345 # Don't add '.hglf' to m.files, since that is already covered by '.'
345 # Don't add '.hglf' to m.files, since that is already covered by '.'
346 if m._files[i] == '.':
346 if m._files[i] == '.':
347 continue
347 continue
348 standin = lfutil.standin(m._files[i])
348 standin = lfutil.standin(m._files[i])
349 # If the "standin" is a directory, append instead of replace to
349 # If the "standin" is a directory, append instead of replace to
350 # support naming a directory on the command line with only
350 # support naming a directory on the command line with only
351 # largefiles. The original directory is kept to support normal
351 # largefiles. The original directory is kept to support normal
352 # files.
352 # files.
353 if standin in repo[ctx.node()]:
353 if standin in repo[ctx.node()]:
354 m._files[i] = standin
354 m._files[i] = standin
355 elif m._files[i] not in repo[ctx.node()] \
355 elif m._files[i] not in repo[ctx.node()] \
356 and repo.wvfs.isdir(standin):
356 and repo.wvfs.isdir(standin):
357 m._files.append(standin)
357 m._files.append(standin)
358
358
359 m._fmap = set(m._files)
359 m._fmap = set(m._files)
360 m._always = False
360 m._always = False
361 origmatchfn = m.matchfn
361 origmatchfn = m.matchfn
362 def lfmatchfn(f):
362 def lfmatchfn(f):
363 lf = lfutil.splitstandin(f)
363 lf = lfutil.splitstandin(f)
364 if lf is not None and origmatchfn(lf):
364 if lf is not None and origmatchfn(lf):
365 return True
365 return True
366 r = origmatchfn(f)
366 r = origmatchfn(f)
367 return r
367 return r
368 m.matchfn = lfmatchfn
368 m.matchfn = lfmatchfn
369
369
370 ui.debug('updated patterns: %s\n' % sorted(pats))
370 ui.debug('updated patterns: %s\n' % sorted(pats))
371 return m, pats
371 return m, pats
372
372
373 # For hg log --patch, the match object is used in two different senses:
373 # For hg log --patch, the match object is used in two different senses:
374 # (1) to determine what revisions should be printed out, and
374 # (1) to determine what revisions should be printed out, and
375 # (2) to determine what files to print out diffs for.
375 # (2) to determine what files to print out diffs for.
376 # The magic matchandpats override should be used for case (1) but not for
376 # The magic matchandpats override should be used for case (1) but not for
377 # case (2).
377 # case (2).
378 def overridemakelogfilematcher(repo, pats, opts):
378 def overridemakelogfilematcher(repo, pats, opts):
379 pctx = repo[None]
379 pctx = repo[None]
380 match, pats = oldmatchandpats(pctx, pats, opts)
380 match, pats = oldmatchandpats(pctx, pats, opts)
381 return lambda rev: match
381 return lambda rev: match
382
382
383 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
383 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
384 oldmakelogfilematcher = cmdutil._makenofollowlogfilematcher
384 oldmakelogfilematcher = cmdutil._makenofollowlogfilematcher
385 setattr(cmdutil, '_makenofollowlogfilematcher', overridemakelogfilematcher)
385 setattr(cmdutil, '_makenofollowlogfilematcher', overridemakelogfilematcher)
386
386
387 try:
387 try:
388 return orig(ui, repo, *pats, **opts)
388 return orig(ui, repo, *pats, **opts)
389 finally:
389 finally:
390 restorematchandpatsfn()
390 restorematchandpatsfn()
391 setattr(cmdutil, '_makenofollowlogfilematcher', oldmakelogfilematcher)
391 setattr(cmdutil, '_makenofollowlogfilematcher', oldmakelogfilematcher)
392
392
393 def overrideverify(orig, ui, repo, *pats, **opts):
393 def overrideverify(orig, ui, repo, *pats, **opts):
394 large = opts.pop('large', False)
394 large = opts.pop('large', False)
395 all = opts.pop('lfa', False)
395 all = opts.pop('lfa', False)
396 contents = opts.pop('lfc', False)
396 contents = opts.pop('lfc', False)
397
397
398 result = orig(ui, repo, *pats, **opts)
398 result = orig(ui, repo, *pats, **opts)
399 if large or all or contents:
399 if large or all or contents:
400 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
400 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
401 return result
401 return result
402
402
403 def overridedebugstate(orig, ui, repo, *pats, **opts):
403 def overridedebugstate(orig, ui, repo, *pats, **opts):
404 large = opts.pop('large', False)
404 large = opts.pop('large', False)
405 if large:
405 if large:
406 class fakerepo(object):
406 class fakerepo(object):
407 dirstate = lfutil.openlfdirstate(ui, repo)
407 dirstate = lfutil.openlfdirstate(ui, repo)
408 orig(ui, fakerepo, *pats, **opts)
408 orig(ui, fakerepo, *pats, **opts)
409 else:
409 else:
410 orig(ui, repo, *pats, **opts)
410 orig(ui, repo, *pats, **opts)
411
411
412 # Override needs to refresh standins so that update's normal merge
412 # Override needs to refresh standins so that update's normal merge
413 # will go through properly. Then the other update hook (overriding repo.update)
413 # will go through properly. Then the other update hook (overriding repo.update)
414 # will get the new files. Filemerge is also overridden so that the merge
414 # will get the new files. Filemerge is also overridden so that the merge
415 # will merge standins correctly.
415 # will merge standins correctly.
416 def overrideupdate(orig, ui, repo, *pats, **opts):
416 def overrideupdate(orig, ui, repo, *pats, **opts):
417 # Need to lock between the standins getting updated and their
417 # Need to lock between the standins getting updated and their
418 # largefiles getting updated
418 # largefiles getting updated
419 wlock = repo.wlock()
419 wlock = repo.wlock()
420 try:
420 try:
421 if opts['check']:
421 if opts['check']:
422 lfdirstate = lfutil.openlfdirstate(ui, repo)
422 lfdirstate = lfutil.openlfdirstate(ui, repo)
423 unsure, s = lfdirstate.status(
423 unsure, s = lfdirstate.status(
424 match_.always(repo.root, repo.getcwd()),
424 match_.always(repo.root, repo.getcwd()),
425 [], False, False, False)
425 [], False, False, False)
426
426
427 mod = len(s.modified) > 0
427 mod = len(s.modified) > 0
428 for lfile in unsure:
428 for lfile in unsure:
429 standin = lfutil.standin(lfile)
429 standin = lfutil.standin(lfile)
430 if repo['.'][standin].data().strip() != \
430 if repo['.'][standin].data().strip() != \
431 lfutil.hashfile(repo.wjoin(lfile)):
431 lfutil.hashfile(repo.wjoin(lfile)):
432 mod = True
432 mod = True
433 else:
433 else:
434 lfdirstate.normal(lfile)
434 lfdirstate.normal(lfile)
435 lfdirstate.write()
435 lfdirstate.write()
436 if mod:
436 if mod:
437 raise util.Abort(_('uncommitted changes'))
437 raise util.Abort(_('uncommitted changes'))
438 return orig(ui, repo, *pats, **opts)
438 return orig(ui, repo, *pats, **opts)
439 finally:
439 finally:
440 wlock.release()
440 wlock.release()
441
441
442 # Before starting the manifest merge, merge.updates will call
442 # Before starting the manifest merge, merge.updates will call
443 # _checkunknownfile to check if there are any files in the merged-in
443 # _checkunknownfile to check if there are any files in the merged-in
444 # changeset that collide with unknown files in the working copy.
444 # changeset that collide with unknown files in the working copy.
445 #
445 #
446 # The largefiles are seen as unknown, so this prevents us from merging
446 # The largefiles are seen as unknown, so this prevents us from merging
447 # in a file 'foo' if we already have a largefile with the same name.
447 # in a file 'foo' if we already have a largefile with the same name.
448 #
448 #
449 # The overridden function filters the unknown files by removing any
449 # The overridden function filters the unknown files by removing any
450 # largefiles. This makes the merge proceed and we can then handle this
450 # largefiles. This makes the merge proceed and we can then handle this
451 # case further in the overridden calculateupdates function below.
451 # case further in the overridden calculateupdates function below.
452 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
452 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
453 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
453 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
454 return False
454 return False
455 return origfn(repo, wctx, mctx, f, f2)
455 return origfn(repo, wctx, mctx, f, f2)
456
456
457 # The manifest merge handles conflicts on the manifest level. We want
457 # The manifest merge handles conflicts on the manifest level. We want
458 # to handle changes in largefile-ness of files at this level too.
458 # to handle changes in largefile-ness of files at this level too.
459 #
459 #
460 # The strategy is to run the original calculateupdates and then process
460 # The strategy is to run the original calculateupdates and then process
461 # the action list it outputs. There are two cases we need to deal with:
461 # the action list it outputs. There are two cases we need to deal with:
462 #
462 #
463 # 1. Normal file in p1, largefile in p2. Here the largefile is
463 # 1. Normal file in p1, largefile in p2. Here the largefile is
464 # detected via its standin file, which will enter the working copy
464 # detected via its standin file, which will enter the working copy
465 # with a "get" action. It is not "merge" since the standin is all
465 # with a "get" action. It is not "merge" since the standin is all
466 # Mercurial is concerned with at this level -- the link to the
466 # Mercurial is concerned with at this level -- the link to the
467 # existing normal file is not relevant here.
467 # existing normal file is not relevant here.
468 #
468 #
469 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
469 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
470 # since the largefile will be present in the working copy and
470 # since the largefile will be present in the working copy and
471 # different from the normal file in p2. Mercurial therefore
471 # different from the normal file in p2. Mercurial therefore
472 # triggers a merge action.
472 # triggers a merge action.
473 #
473 #
474 # In both cases, we prompt the user and emit new actions to either
474 # In both cases, we prompt the user and emit new actions to either
475 # remove the standin (if the normal file was kept) or to remove the
475 # remove the standin (if the normal file was kept) or to remove the
476 # normal file and get the standin (if the largefile was kept). The
476 # normal file and get the standin (if the largefile was kept). The
477 # default prompt answer is to use the largefile version since it was
477 # default prompt answer is to use the largefile version since it was
478 # presumably changed on purpose.
478 # presumably changed on purpose.
479 #
479 #
480 # Finally, the merge.applyupdates function will then take care of
480 # Finally, the merge.applyupdates function will then take care of
481 # writing the files into the working copy and lfcommands.updatelfiles
481 # writing the files into the working copy and lfcommands.updatelfiles
482 # will update the largefiles.
482 # will update the largefiles.
483 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
483 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
484 partial, acceptremote, followcopies):
484 partial, acceptremote, followcopies):
485 overwrite = force and not branchmerge
485 overwrite = force and not branchmerge
486 actions, diverge, renamedelete = origfn(
486 actions, diverge, renamedelete = origfn(
487 repo, p1, p2, pas, branchmerge, force, partial, acceptremote,
487 repo, p1, p2, pas, branchmerge, force, partial, acceptremote,
488 followcopies)
488 followcopies)
489
489
490 if overwrite:
490 if overwrite:
491 return actions, diverge, renamedelete
491 return actions, diverge, renamedelete
492
492
493 # Convert to dictionary with filename as key and action as value.
493 # Convert to dictionary with filename as key and action as value.
494 lfiles = set()
494 lfiles = set()
495 for f in actions:
495 for f in actions:
496 splitstandin = f and lfutil.splitstandin(f)
496 splitstandin = f and lfutil.splitstandin(f)
497 if splitstandin in p1:
497 if splitstandin in p1:
498 lfiles.add(splitstandin)
498 lfiles.add(splitstandin)
499 elif lfutil.standin(f) in p1:
499 elif lfutil.standin(f) in p1:
500 lfiles.add(f)
500 lfiles.add(f)
501
501
502 for lfile in lfiles:
502 for lfile in lfiles:
503 standin = lfutil.standin(lfile)
503 standin = lfutil.standin(lfile)
504 (lm, largs, lmsg) = actions.get(lfile, (None, None, None))
504 (lm, largs, lmsg) = actions.get(lfile, (None, None, None))
505 (sm, sargs, smsg) = actions.get(standin, (None, None, None))
505 (sm, sargs, smsg) = actions.get(standin, (None, None, None))
506 if sm in ('g', 'dc') and lm != 'r':
506 if sm in ('g', 'dc') and lm != 'r':
507 # Case 1: normal file in the working copy, largefile in
507 # Case 1: normal file in the working copy, largefile in
508 # the second parent
508 # the second parent
509 usermsg = _('remote turned local normal file %s into a largefile\n'
509 usermsg = _('remote turned local normal file %s into a largefile\n'
510 'use (l)argefile or keep (n)ormal file?'
510 'use (l)argefile or keep (n)ormal file?'
511 '$$ &Largefile $$ &Normal file') % lfile
511 '$$ &Largefile $$ &Normal file') % lfile
512 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
512 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
513 actions[lfile] = ('r', None, 'replaced by standin')
513 actions[lfile] = ('r', None, 'replaced by standin')
514 actions[standin] = ('g', sargs, 'replaces standin')
514 actions[standin] = ('g', sargs, 'replaces standin')
515 else: # keep local normal file
515 else: # keep local normal file
516 actions[lfile] = ('k', None, 'replaces standin')
516 actions[lfile] = ('k', None, 'replaces standin')
517 if branchmerge:
517 if branchmerge:
518 actions[standin] = ('k', None, 'replaced by non-standin')
518 actions[standin] = ('k', None, 'replaced by non-standin')
519 else:
519 else:
520 actions[standin] = ('r', None, 'replaced by non-standin')
520 actions[standin] = ('r', None, 'replaced by non-standin')
521 elif lm in ('g', 'dc') and sm != 'r':
521 elif lm in ('g', 'dc') and sm != 'r':
522 # Case 2: largefile in the working copy, normal file in
522 # Case 2: largefile in the working copy, normal file in
523 # the second parent
523 # the second parent
524 usermsg = _('remote turned local largefile %s into a normal file\n'
524 usermsg = _('remote turned local largefile %s into a normal file\n'
525 'keep (l)argefile or use (n)ormal file?'
525 'keep (l)argefile or use (n)ormal file?'
526 '$$ &Largefile $$ &Normal file') % lfile
526 '$$ &Largefile $$ &Normal file') % lfile
527 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
527 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
528 if branchmerge:
528 if branchmerge:
529 # largefile can be restored from standin safely
529 # largefile can be restored from standin safely
530 actions[lfile] = ('k', None, 'replaced by standin')
530 actions[lfile] = ('k', None, 'replaced by standin')
531 actions[standin] = ('k', None, 'replaces standin')
531 actions[standin] = ('k', None, 'replaces standin')
532 else:
532 else:
533 # "lfile" should be marked as "removed" without
533 # "lfile" should be marked as "removed" without
534 # removal of itself
534 # removal of itself
535 actions[lfile] = ('lfmr', None,
535 actions[lfile] = ('lfmr', None,
536 'forget non-standin largefile')
536 'forget non-standin largefile')
537
537
538 # linear-merge should treat this largefile as 're-added'
538 # linear-merge should treat this largefile as 're-added'
539 actions[standin] = ('a', None, 'keep standin')
539 actions[standin] = ('a', None, 'keep standin')
540 else: # pick remote normal file
540 else: # pick remote normal file
541 actions[lfile] = ('g', largs, 'replaces standin')
541 actions[lfile] = ('g', largs, 'replaces standin')
542 actions[standin] = ('r', None, 'replaced by non-standin')
542 actions[standin] = ('r', None, 'replaced by non-standin')
543
543
544 return actions, diverge, renamedelete
544 return actions, diverge, renamedelete
545
545
546 def mergerecordupdates(orig, repo, actions, branchmerge):
546 def mergerecordupdates(orig, repo, actions, branchmerge):
547 if 'lfmr' in actions:
547 if 'lfmr' in actions:
548 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
548 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
549 for lfile, args, msg in actions['lfmr']:
549 for lfile, args, msg in actions['lfmr']:
550 # this should be executed before 'orig', to execute 'remove'
550 # this should be executed before 'orig', to execute 'remove'
551 # before all other actions
551 # before all other actions
552 repo.dirstate.remove(lfile)
552 repo.dirstate.remove(lfile)
553 # make sure lfile doesn't get synclfdirstate'd as normal
553 # make sure lfile doesn't get synclfdirstate'd as normal
554 lfdirstate.add(lfile)
554 lfdirstate.add(lfile)
555 lfdirstate.write()
555 lfdirstate.write()
556
556
557 return orig(repo, actions, branchmerge)
557 return orig(repo, actions, branchmerge)
558
558
559
559
560 # Override filemerge to prompt the user about how they wish to merge
560 # Override filemerge to prompt the user about how they wish to merge
561 # largefiles. This will handle identical edits without prompting the user.
561 # largefiles. This will handle identical edits without prompting the user.
562 def overridefilemerge(origfn, repo, mynode, orig, fcd, fco, fca, labels=None):
562 def overridefilemerge(origfn, repo, mynode, orig, fcd, fco, fca, labels=None):
563 if not lfutil.isstandin(orig):
563 if not lfutil.isstandin(orig):
564 return origfn(repo, mynode, orig, fcd, fco, fca, labels=labels)
564 return origfn(repo, mynode, orig, fcd, fco, fca, labels=labels)
565
565
566 ahash = fca.data().strip().lower()
566 ahash = fca.data().strip().lower()
567 dhash = fcd.data().strip().lower()
567 dhash = fcd.data().strip().lower()
568 ohash = fco.data().strip().lower()
568 ohash = fco.data().strip().lower()
569 if (ohash != ahash and
569 if (ohash != ahash and
570 ohash != dhash and
570 ohash != dhash and
571 (dhash == ahash or
571 (dhash == ahash or
572 repo.ui.promptchoice(
572 repo.ui.promptchoice(
573 _('largefile %s has a merge conflict\nancestor was %s\n'
573 _('largefile %s has a merge conflict\nancestor was %s\n'
574 'keep (l)ocal %s or\ntake (o)ther %s?'
574 'keep (l)ocal %s or\ntake (o)ther %s?'
575 '$$ &Local $$ &Other') %
575 '$$ &Local $$ &Other') %
576 (lfutil.splitstandin(orig), ahash, dhash, ohash),
576 (lfutil.splitstandin(orig), ahash, dhash, ohash),
577 0) == 1)):
577 0) == 1)):
578 repo.wwrite(fcd.path(), fco.data(), fco.flags())
578 repo.wwrite(fcd.path(), fco.data(), fco.flags())
579 return 0
579 return 0
580
580
581 def copiespathcopies(orig, ctx1, ctx2):
581 def copiespathcopies(orig, ctx1, ctx2):
582 copies = orig(ctx1, ctx2)
582 copies = orig(ctx1, ctx2)
583 updated = {}
583 updated = {}
584
584
585 for k, v in copies.iteritems():
585 for k, v in copies.iteritems():
586 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
586 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
587
587
588 return updated
588 return updated
589
589
590 # Copy first changes the matchers to match standins instead of
590 # Copy first changes the matchers to match standins instead of
591 # largefiles. Then it overrides util.copyfile in that function it
591 # largefiles. Then it overrides util.copyfile in that function it
592 # checks if the destination largefile already exists. It also keeps a
592 # checks if the destination largefile already exists. It also keeps a
593 # list of copied files so that the largefiles can be copied and the
593 # list of copied files so that the largefiles can be copied and the
594 # dirstate updated.
594 # dirstate updated.
595 def overridecopy(orig, ui, repo, pats, opts, rename=False):
595 def overridecopy(orig, ui, repo, pats, opts, rename=False):
596 # doesn't remove largefile on rename
596 # doesn't remove largefile on rename
597 if len(pats) < 2:
597 if len(pats) < 2:
598 # this isn't legal, let the original function deal with it
598 # this isn't legal, let the original function deal with it
599 return orig(ui, repo, pats, opts, rename)
599 return orig(ui, repo, pats, opts, rename)
600
600
601 # This could copy both lfiles and normal files in one command,
601 # This could copy both lfiles and normal files in one command,
602 # but we don't want to do that. First replace their matcher to
602 # but we don't want to do that. First replace their matcher to
603 # only match normal files and run it, then replace it to just
603 # only match normal files and run it, then replace it to just
604 # match largefiles and run it again.
604 # match largefiles and run it again.
605 nonormalfiles = False
605 nonormalfiles = False
606 nolfiles = False
606 nolfiles = False
607 installnormalfilesmatchfn(repo[None].manifest())
607 installnormalfilesmatchfn(repo[None].manifest())
608 try:
608 try:
609 try:
609 try:
610 result = orig(ui, repo, pats, opts, rename)
610 result = orig(ui, repo, pats, opts, rename)
611 except util.Abort, e:
611 except util.Abort, e:
612 if str(e) != _('no files to copy'):
612 if str(e) != _('no files to copy'):
613 raise e
613 raise e
614 else:
614 else:
615 nonormalfiles = True
615 nonormalfiles = True
616 result = 0
616 result = 0
617 finally:
617 finally:
618 restorematchfn()
618 restorematchfn()
619
619
620 # The first rename can cause our current working directory to be removed.
620 # The first rename can cause our current working directory to be removed.
621 # In that case there is nothing left to copy/rename so just quit.
621 # In that case there is nothing left to copy/rename so just quit.
622 try:
622 try:
623 repo.getcwd()
623 repo.getcwd()
624 except OSError:
624 except OSError:
625 return result
625 return result
626
626
627 def makestandin(relpath):
627 def makestandin(relpath):
628 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
628 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
629 return os.path.join(repo.wjoin(lfutil.standin(path)))
629 return os.path.join(repo.wjoin(lfutil.standin(path)))
630
630
631 fullpats = scmutil.expandpats(pats)
631 fullpats = scmutil.expandpats(pats)
632 dest = fullpats[-1]
632 dest = fullpats[-1]
633
633
634 if os.path.isdir(dest):
634 if os.path.isdir(dest):
635 if not os.path.isdir(makestandin(dest)):
635 if not os.path.isdir(makestandin(dest)):
636 os.makedirs(makestandin(dest))
636 os.makedirs(makestandin(dest))
637
637
638 try:
638 try:
639 try:
639 try:
640 # When we call orig below it creates the standins but we don't add
640 # When we call orig below it creates the standins but we don't add
641 # them to the dir state until later so lock during that time.
641 # them to the dir state until later so lock during that time.
642 wlock = repo.wlock()
642 wlock = repo.wlock()
643
643
644 manifest = repo[None].manifest()
644 manifest = repo[None].manifest()
645 def overridematch(ctx, pats=[], opts={}, globbed=False,
645 def overridematch(ctx, pats=[], opts={}, globbed=False,
646 default='relpath'):
646 default='relpath'):
647 newpats = []
647 newpats = []
648 # The patterns were previously mangled to add the standin
648 # The patterns were previously mangled to add the standin
649 # directory; we need to remove that now
649 # directory; we need to remove that now
650 for pat in pats:
650 for pat in pats:
651 if match_.patkind(pat) is None and lfutil.shortname in pat:
651 if match_.patkind(pat) is None and lfutil.shortname in pat:
652 newpats.append(pat.replace(lfutil.shortname, ''))
652 newpats.append(pat.replace(lfutil.shortname, ''))
653 else:
653 else:
654 newpats.append(pat)
654 newpats.append(pat)
655 match = oldmatch(ctx, newpats, opts, globbed, default)
655 match = oldmatch(ctx, newpats, opts, globbed, default)
656 m = copy.copy(match)
656 m = copy.copy(match)
657 lfile = lambda f: lfutil.standin(f) in manifest
657 lfile = lambda f: lfutil.standin(f) in manifest
658 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
658 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
659 m._fmap = set(m._files)
659 m._fmap = set(m._files)
660 origmatchfn = m.matchfn
660 origmatchfn = m.matchfn
661 m.matchfn = lambda f: (lfutil.isstandin(f) and
661 m.matchfn = lambda f: (lfutil.isstandin(f) and
662 (f in manifest) and
662 (f in manifest) and
663 origmatchfn(lfutil.splitstandin(f)) or
663 origmatchfn(lfutil.splitstandin(f)) or
664 None)
664 None)
665 return m
665 return m
666 oldmatch = installmatchfn(overridematch)
666 oldmatch = installmatchfn(overridematch)
667 listpats = []
667 listpats = []
668 for pat in pats:
668 for pat in pats:
669 if match_.patkind(pat) is not None:
669 if match_.patkind(pat) is not None:
670 listpats.append(pat)
670 listpats.append(pat)
671 else:
671 else:
672 listpats.append(makestandin(pat))
672 listpats.append(makestandin(pat))
673
673
674 try:
674 try:
675 origcopyfile = util.copyfile
675 origcopyfile = util.copyfile
676 copiedfiles = []
676 copiedfiles = []
677 def overridecopyfile(src, dest):
677 def overridecopyfile(src, dest):
678 if (lfutil.shortname in src and
678 if (lfutil.shortname in src and
679 dest.startswith(repo.wjoin(lfutil.shortname))):
679 dest.startswith(repo.wjoin(lfutil.shortname))):
680 destlfile = dest.replace(lfutil.shortname, '')
680 destlfile = dest.replace(lfutil.shortname, '')
681 if not opts['force'] and os.path.exists(destlfile):
681 if not opts['force'] and os.path.exists(destlfile):
682 raise IOError('',
682 raise IOError('',
683 _('destination largefile already exists'))
683 _('destination largefile already exists'))
684 copiedfiles.append((src, dest))
684 copiedfiles.append((src, dest))
685 origcopyfile(src, dest)
685 origcopyfile(src, dest)
686
686
687 util.copyfile = overridecopyfile
687 util.copyfile = overridecopyfile
688 result += orig(ui, repo, listpats, opts, rename)
688 result += orig(ui, repo, listpats, opts, rename)
689 finally:
689 finally:
690 util.copyfile = origcopyfile
690 util.copyfile = origcopyfile
691
691
692 lfdirstate = lfutil.openlfdirstate(ui, repo)
692 lfdirstate = lfutil.openlfdirstate(ui, repo)
693 for (src, dest) in copiedfiles:
693 for (src, dest) in copiedfiles:
694 if (lfutil.shortname in src and
694 if (lfutil.shortname in src and
695 dest.startswith(repo.wjoin(lfutil.shortname))):
695 dest.startswith(repo.wjoin(lfutil.shortname))):
696 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
696 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
697 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
697 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
698 destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
698 destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
699 if not os.path.isdir(destlfiledir):
699 if not os.path.isdir(destlfiledir):
700 os.makedirs(destlfiledir)
700 os.makedirs(destlfiledir)
701 if rename:
701 if rename:
702 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
702 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
703
703
704 # The file is gone, but this deletes any empty parent
704 # The file is gone, but this deletes any empty parent
705 # directories as a side-effect.
705 # directories as a side-effect.
706 util.unlinkpath(repo.wjoin(srclfile), True)
706 util.unlinkpath(repo.wjoin(srclfile), True)
707 lfdirstate.remove(srclfile)
707 lfdirstate.remove(srclfile)
708 else:
708 else:
709 util.copyfile(repo.wjoin(srclfile),
709 util.copyfile(repo.wjoin(srclfile),
710 repo.wjoin(destlfile))
710 repo.wjoin(destlfile))
711
711
712 lfdirstate.add(destlfile)
712 lfdirstate.add(destlfile)
713 lfdirstate.write()
713 lfdirstate.write()
714 except util.Abort, e:
714 except util.Abort, e:
715 if str(e) != _('no files to copy'):
715 if str(e) != _('no files to copy'):
716 raise e
716 raise e
717 else:
717 else:
718 nolfiles = True
718 nolfiles = True
719 finally:
719 finally:
720 restorematchfn()
720 restorematchfn()
721 wlock.release()
721 wlock.release()
722
722
723 if nolfiles and nonormalfiles:
723 if nolfiles and nonormalfiles:
724 raise util.Abort(_('no files to copy'))
724 raise util.Abort(_('no files to copy'))
725
725
726 return result
726 return result
727
727
728 # When the user calls revert, we have to be careful to not revert any
728 # When the user calls revert, we have to be careful to not revert any
729 # changes to other largefiles accidentally. This means we have to keep
729 # changes to other largefiles accidentally. This means we have to keep
730 # track of the largefiles that are being reverted so we only pull down
730 # track of the largefiles that are being reverted so we only pull down
731 # the necessary largefiles.
731 # the necessary largefiles.
732 #
732 #
733 # Standins are only updated (to match the hash of largefiles) before
733 # Standins are only updated (to match the hash of largefiles) before
734 # commits. Update the standins then run the original revert, changing
734 # commits. Update the standins then run the original revert, changing
735 # the matcher to hit standins instead of largefiles. Based on the
735 # the matcher to hit standins instead of largefiles. Based on the
736 # resulting standins update the largefiles.
736 # resulting standins update the largefiles.
737 def overriderevert(orig, ui, repo, ctx, parents, *pats, **opts):
737 def overriderevert(orig, ui, repo, ctx, parents, *pats, **opts):
738 # Because we put the standins in a bad state (by updating them)
738 # Because we put the standins in a bad state (by updating them)
739 # and then return them to a correct state we need to lock to
739 # and then return them to a correct state we need to lock to
740 # prevent others from changing them in their incorrect state.
740 # prevent others from changing them in their incorrect state.
741 wlock = repo.wlock()
741 wlock = repo.wlock()
742 try:
742 try:
743 lfdirstate = lfutil.openlfdirstate(ui, repo)
743 lfdirstate = lfutil.openlfdirstate(ui, repo)
744 s = lfutil.lfdirstatestatus(lfdirstate, repo)
744 s = lfutil.lfdirstatestatus(lfdirstate, repo)
745 lfdirstate.write()
745 lfdirstate.write()
746 for lfile in s.modified:
746 for lfile in s.modified:
747 lfutil.updatestandin(repo, lfutil.standin(lfile))
747 lfutil.updatestandin(repo, lfutil.standin(lfile))
748 for lfile in s.deleted:
748 for lfile in s.deleted:
749 if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
749 if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
750 os.unlink(repo.wjoin(lfutil.standin(lfile)))
750 os.unlink(repo.wjoin(lfutil.standin(lfile)))
751
751
752 oldstandins = lfutil.getstandinsstate(repo)
752 oldstandins = lfutil.getstandinsstate(repo)
753
753
754 def overridematch(mctx, pats=[], opts={}, globbed=False,
754 def overridematch(mctx, pats=[], opts={}, globbed=False,
755 default='relpath'):
755 default='relpath'):
756 match = oldmatch(mctx, pats, opts, globbed, default)
756 match = oldmatch(mctx, pats, opts, globbed, default)
757 m = copy.copy(match)
757 m = copy.copy(match)
758
758
759 # revert supports recursing into subrepos, and though largefiles
759 # revert supports recursing into subrepos, and though largefiles
760 # currently doesn't work correctly in that case, this match is
760 # currently doesn't work correctly in that case, this match is
761 # called, so the lfdirstate above may not be the correct one for
761 # called, so the lfdirstate above may not be the correct one for
762 # this invocation of match.
762 # this invocation of match.
763 lfdirstate = lfutil.openlfdirstate(mctx.repo().ui, mctx.repo(),
763 lfdirstate = lfutil.openlfdirstate(mctx.repo().ui, mctx.repo(),
764 False)
764 False)
765
765
766 def tostandin(f):
766 def tostandin(f):
767 standin = lfutil.standin(f)
767 standin = lfutil.standin(f)
768 if standin in mctx:
768 if standin in ctx or standin in mctx:
769 return standin
769 return standin
770 elif standin in repo[None] or lfdirstate[f] == 'r':
770 elif standin in repo[None] or lfdirstate[f] == 'r':
771 return None
771 return None
772 return f
772 return f
773 m._files = [tostandin(f) for f in m._files]
773 m._files = [tostandin(f) for f in m._files]
774 m._files = [f for f in m._files if f is not None]
774 m._files = [f for f in m._files if f is not None]
775 m._fmap = set(m._files)
775 m._fmap = set(m._files)
776 origmatchfn = m.matchfn
776 origmatchfn = m.matchfn
777 def matchfn(f):
777 def matchfn(f):
778 if lfutil.isstandin(f):
778 if lfutil.isstandin(f):
779 return (origmatchfn(lfutil.splitstandin(f)) and
779 return (origmatchfn(lfutil.splitstandin(f)) and
780 (f in repo[None] or f in mctx))
780 (f in ctx or f in mctx))
781 return origmatchfn(f)
781 return origmatchfn(f)
782 m.matchfn = matchfn
782 m.matchfn = matchfn
783 return m
783 return m
784 oldmatch = installmatchfn(overridematch)
784 oldmatch = installmatchfn(overridematch)
785 try:
785 try:
786 orig(ui, repo, ctx, parents, *pats, **opts)
786 orig(ui, repo, ctx, parents, *pats, **opts)
787 finally:
787 finally:
788 restorematchfn()
788 restorematchfn()
789
789
790 newstandins = lfutil.getstandinsstate(repo)
790 newstandins = lfutil.getstandinsstate(repo)
791 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
791 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
792 # lfdirstate should be 'normallookup'-ed for updated files,
792 # lfdirstate should be 'normallookup'-ed for updated files,
793 # because reverting doesn't touch dirstate for 'normal' files
793 # because reverting doesn't touch dirstate for 'normal' files
794 # when target revision is explicitly specified: in such case,
794 # when target revision is explicitly specified: in such case,
795 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
795 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
796 # of target (standin) file.
796 # of target (standin) file.
797 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
797 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
798 normallookup=True)
798 normallookup=True)
799
799
800 finally:
800 finally:
801 wlock.release()
801 wlock.release()
802
802
803 # after pulling changesets, we need to take some extra care to get
803 # after pulling changesets, we need to take some extra care to get
804 # largefiles updated remotely
804 # largefiles updated remotely
805 def overridepull(orig, ui, repo, source=None, **opts):
805 def overridepull(orig, ui, repo, source=None, **opts):
806 revsprepull = len(repo)
806 revsprepull = len(repo)
807 if not source:
807 if not source:
808 source = 'default'
808 source = 'default'
809 repo.lfpullsource = source
809 repo.lfpullsource = source
810 result = orig(ui, repo, source, **opts)
810 result = orig(ui, repo, source, **opts)
811 revspostpull = len(repo)
811 revspostpull = len(repo)
812 lfrevs = opts.get('lfrev', [])
812 lfrevs = opts.get('lfrev', [])
813 if opts.get('all_largefiles'):
813 if opts.get('all_largefiles'):
814 lfrevs.append('pulled()')
814 lfrevs.append('pulled()')
815 if lfrevs and revspostpull > revsprepull:
815 if lfrevs and revspostpull > revsprepull:
816 numcached = 0
816 numcached = 0
817 repo.firstpulled = revsprepull # for pulled() revset expression
817 repo.firstpulled = revsprepull # for pulled() revset expression
818 try:
818 try:
819 for rev in scmutil.revrange(repo, lfrevs):
819 for rev in scmutil.revrange(repo, lfrevs):
820 ui.note(_('pulling largefiles for revision %s\n') % rev)
820 ui.note(_('pulling largefiles for revision %s\n') % rev)
821 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
821 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
822 numcached += len(cached)
822 numcached += len(cached)
823 finally:
823 finally:
824 del repo.firstpulled
824 del repo.firstpulled
825 ui.status(_("%d largefiles cached\n") % numcached)
825 ui.status(_("%d largefiles cached\n") % numcached)
826 return result
826 return result
827
827
828 def pulledrevsetsymbol(repo, subset, x):
828 def pulledrevsetsymbol(repo, subset, x):
829 """``pulled()``
829 """``pulled()``
830 Changesets that just has been pulled.
830 Changesets that just has been pulled.
831
831
832 Only available with largefiles from pull --lfrev expressions.
832 Only available with largefiles from pull --lfrev expressions.
833
833
834 .. container:: verbose
834 .. container:: verbose
835
835
836 Some examples:
836 Some examples:
837
837
838 - pull largefiles for all new changesets::
838 - pull largefiles for all new changesets::
839
839
840 hg pull -lfrev "pulled()"
840 hg pull -lfrev "pulled()"
841
841
842 - pull largefiles for all new branch heads::
842 - pull largefiles for all new branch heads::
843
843
844 hg pull -lfrev "head(pulled()) and not closed()"
844 hg pull -lfrev "head(pulled()) and not closed()"
845
845
846 """
846 """
847
847
848 try:
848 try:
849 firstpulled = repo.firstpulled
849 firstpulled = repo.firstpulled
850 except AttributeError:
850 except AttributeError:
851 raise util.Abort(_("pulled() only available in --lfrev"))
851 raise util.Abort(_("pulled() only available in --lfrev"))
852 return revset.baseset([r for r in subset if r >= firstpulled])
852 return revset.baseset([r for r in subset if r >= firstpulled])
853
853
854 def overrideclone(orig, ui, source, dest=None, **opts):
854 def overrideclone(orig, ui, source, dest=None, **opts):
855 d = dest
855 d = dest
856 if d is None:
856 if d is None:
857 d = hg.defaultdest(source)
857 d = hg.defaultdest(source)
858 if opts.get('all_largefiles') and not hg.islocal(d):
858 if opts.get('all_largefiles') and not hg.islocal(d):
859 raise util.Abort(_(
859 raise util.Abort(_(
860 '--all-largefiles is incompatible with non-local destination %s') %
860 '--all-largefiles is incompatible with non-local destination %s') %
861 d)
861 d)
862
862
863 return orig(ui, source, dest, **opts)
863 return orig(ui, source, dest, **opts)
864
864
865 def hgclone(orig, ui, opts, *args, **kwargs):
865 def hgclone(orig, ui, opts, *args, **kwargs):
866 result = orig(ui, opts, *args, **kwargs)
866 result = orig(ui, opts, *args, **kwargs)
867
867
868 if result is not None:
868 if result is not None:
869 sourcerepo, destrepo = result
869 sourcerepo, destrepo = result
870 repo = destrepo.local()
870 repo = destrepo.local()
871
871
872 # If largefiles is required for this repo, permanently enable it locally
872 # If largefiles is required for this repo, permanently enable it locally
873 if 'largefiles' in repo.requirements:
873 if 'largefiles' in repo.requirements:
874 fp = repo.vfs('hgrc', 'a', text=True)
874 fp = repo.vfs('hgrc', 'a', text=True)
875 try:
875 try:
876 fp.write('\n[extensions]\nlargefiles=\n')
876 fp.write('\n[extensions]\nlargefiles=\n')
877 finally:
877 finally:
878 fp.close()
878 fp.close()
879
879
880 # Caching is implicitly limited to 'rev' option, since the dest repo was
880 # Caching is implicitly limited to 'rev' option, since the dest repo was
881 # truncated at that point. The user may expect a download count with
881 # truncated at that point. The user may expect a download count with
882 # this option, so attempt whether or not this is a largefile repo.
882 # this option, so attempt whether or not this is a largefile repo.
883 if opts.get('all_largefiles'):
883 if opts.get('all_largefiles'):
884 success, missing = lfcommands.downloadlfiles(ui, repo, None)
884 success, missing = lfcommands.downloadlfiles(ui, repo, None)
885
885
886 if missing != 0:
886 if missing != 0:
887 return None
887 return None
888
888
889 return result
889 return result
890
890
891 def overriderebase(orig, ui, repo, **opts):
891 def overriderebase(orig, ui, repo, **opts):
892 if not util.safehasattr(repo, '_largefilesenabled'):
892 if not util.safehasattr(repo, '_largefilesenabled'):
893 return orig(ui, repo, **opts)
893 return orig(ui, repo, **opts)
894
894
895 resuming = opts.get('continue')
895 resuming = opts.get('continue')
896 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
896 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
897 repo._lfstatuswriters.append(lambda *msg, **opts: None)
897 repo._lfstatuswriters.append(lambda *msg, **opts: None)
898 try:
898 try:
899 return orig(ui, repo, **opts)
899 return orig(ui, repo, **opts)
900 finally:
900 finally:
901 repo._lfstatuswriters.pop()
901 repo._lfstatuswriters.pop()
902 repo._lfcommithooks.pop()
902 repo._lfcommithooks.pop()
903
903
904 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
904 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
905 prefix='', mtime=None, subrepos=None):
905 prefix='', mtime=None, subrepos=None):
906 # No need to lock because we are only reading history and
906 # No need to lock because we are only reading history and
907 # largefile caches, neither of which are modified.
907 # largefile caches, neither of which are modified.
908 lfcommands.cachelfiles(repo.ui, repo, node)
908 lfcommands.cachelfiles(repo.ui, repo, node)
909
909
910 if kind not in archival.archivers:
910 if kind not in archival.archivers:
911 raise util.Abort(_("unknown archive type '%s'") % kind)
911 raise util.Abort(_("unknown archive type '%s'") % kind)
912
912
913 ctx = repo[node]
913 ctx = repo[node]
914
914
915 if kind == 'files':
915 if kind == 'files':
916 if prefix:
916 if prefix:
917 raise util.Abort(
917 raise util.Abort(
918 _('cannot give prefix when archiving to files'))
918 _('cannot give prefix when archiving to files'))
919 else:
919 else:
920 prefix = archival.tidyprefix(dest, kind, prefix)
920 prefix = archival.tidyprefix(dest, kind, prefix)
921
921
922 def write(name, mode, islink, getdata):
922 def write(name, mode, islink, getdata):
923 if matchfn and not matchfn(name):
923 if matchfn and not matchfn(name):
924 return
924 return
925 data = getdata()
925 data = getdata()
926 if decode:
926 if decode:
927 data = repo.wwritedata(name, data)
927 data = repo.wwritedata(name, data)
928 archiver.addfile(prefix + name, mode, islink, data)
928 archiver.addfile(prefix + name, mode, islink, data)
929
929
930 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
930 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
931
931
932 if repo.ui.configbool("ui", "archivemeta", True):
932 if repo.ui.configbool("ui", "archivemeta", True):
933 def metadata():
933 def metadata():
934 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
934 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
935 hex(repo.changelog.node(0)), hex(node), ctx.branch())
935 hex(repo.changelog.node(0)), hex(node), ctx.branch())
936
936
937 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
937 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
938 if repo.tagtype(t) == 'global')
938 if repo.tagtype(t) == 'global')
939 if not tags:
939 if not tags:
940 repo.ui.pushbuffer()
940 repo.ui.pushbuffer()
941 opts = {'template': '{latesttag}\n{latesttagdistance}',
941 opts = {'template': '{latesttag}\n{latesttagdistance}',
942 'style': '', 'patch': None, 'git': None}
942 'style': '', 'patch': None, 'git': None}
943 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
943 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
944 ltags, dist = repo.ui.popbuffer().split('\n')
944 ltags, dist = repo.ui.popbuffer().split('\n')
945 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
945 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
946 tags += 'latesttagdistance: %s\n' % dist
946 tags += 'latesttagdistance: %s\n' % dist
947
947
948 return base + tags
948 return base + tags
949
949
950 write('.hg_archival.txt', 0644, False, metadata)
950 write('.hg_archival.txt', 0644, False, metadata)
951
951
952 for f in ctx:
952 for f in ctx:
953 ff = ctx.flags(f)
953 ff = ctx.flags(f)
954 getdata = ctx[f].data
954 getdata = ctx[f].data
955 if lfutil.isstandin(f):
955 if lfutil.isstandin(f):
956 path = lfutil.findfile(repo, getdata().strip())
956 path = lfutil.findfile(repo, getdata().strip())
957 if path is None:
957 if path is None:
958 raise util.Abort(
958 raise util.Abort(
959 _('largefile %s not found in repo store or system cache')
959 _('largefile %s not found in repo store or system cache')
960 % lfutil.splitstandin(f))
960 % lfutil.splitstandin(f))
961 f = lfutil.splitstandin(f)
961 f = lfutil.splitstandin(f)
962
962
963 def getdatafn():
963 def getdatafn():
964 fd = None
964 fd = None
965 try:
965 try:
966 fd = open(path, 'rb')
966 fd = open(path, 'rb')
967 return fd.read()
967 return fd.read()
968 finally:
968 finally:
969 if fd:
969 if fd:
970 fd.close()
970 fd.close()
971
971
972 getdata = getdatafn
972 getdata = getdatafn
973 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
973 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
974
974
975 if subrepos:
975 if subrepos:
976 for subpath in sorted(ctx.substate):
976 for subpath in sorted(ctx.substate):
977 sub = ctx.sub(subpath)
977 sub = ctx.sub(subpath)
978 submatch = match_.narrowmatcher(subpath, matchfn)
978 submatch = match_.narrowmatcher(subpath, matchfn)
979 sub.archive(archiver, prefix, submatch)
979 sub.archive(archiver, prefix, submatch)
980
980
981 archiver.done()
981 archiver.done()
982
982
983 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None):
983 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None):
984 repo._get(repo._state + ('hg',))
984 repo._get(repo._state + ('hg',))
985 rev = repo._state[1]
985 rev = repo._state[1]
986 ctx = repo._repo[rev]
986 ctx = repo._repo[rev]
987
987
988 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
988 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
989
989
990 def write(name, mode, islink, getdata):
990 def write(name, mode, islink, getdata):
991 # At this point, the standin has been replaced with the largefile name,
991 # At this point, the standin has been replaced with the largefile name,
992 # so the normal matcher works here without the lfutil variants.
992 # so the normal matcher works here without the lfutil variants.
993 if match and not match(f):
993 if match and not match(f):
994 return
994 return
995 data = getdata()
995 data = getdata()
996
996
997 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
997 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
998
998
999 for f in ctx:
999 for f in ctx:
1000 ff = ctx.flags(f)
1000 ff = ctx.flags(f)
1001 getdata = ctx[f].data
1001 getdata = ctx[f].data
1002 if lfutil.isstandin(f):
1002 if lfutil.isstandin(f):
1003 path = lfutil.findfile(repo._repo, getdata().strip())
1003 path = lfutil.findfile(repo._repo, getdata().strip())
1004 if path is None:
1004 if path is None:
1005 raise util.Abort(
1005 raise util.Abort(
1006 _('largefile %s not found in repo store or system cache')
1006 _('largefile %s not found in repo store or system cache')
1007 % lfutil.splitstandin(f))
1007 % lfutil.splitstandin(f))
1008 f = lfutil.splitstandin(f)
1008 f = lfutil.splitstandin(f)
1009
1009
1010 def getdatafn():
1010 def getdatafn():
1011 fd = None
1011 fd = None
1012 try:
1012 try:
1013 fd = open(os.path.join(prefix, path), 'rb')
1013 fd = open(os.path.join(prefix, path), 'rb')
1014 return fd.read()
1014 return fd.read()
1015 finally:
1015 finally:
1016 if fd:
1016 if fd:
1017 fd.close()
1017 fd.close()
1018
1018
1019 getdata = getdatafn
1019 getdata = getdatafn
1020
1020
1021 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
1021 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
1022
1022
1023 for subpath in sorted(ctx.substate):
1023 for subpath in sorted(ctx.substate):
1024 sub = ctx.sub(subpath)
1024 sub = ctx.sub(subpath)
1025 submatch = match_.narrowmatcher(subpath, match)
1025 submatch = match_.narrowmatcher(subpath, match)
1026 sub.archive(archiver, os.path.join(prefix, repo._path) + '/', submatch)
1026 sub.archive(archiver, os.path.join(prefix, repo._path) + '/', submatch)
1027
1027
1028 # If a largefile is modified, the change is not reflected in its
1028 # If a largefile is modified, the change is not reflected in its
1029 # standin until a commit. cmdutil.bailifchanged() raises an exception
1029 # standin until a commit. cmdutil.bailifchanged() raises an exception
1030 # if the repo has uncommitted changes. Wrap it to also check if
1030 # if the repo has uncommitted changes. Wrap it to also check if
1031 # largefiles were changed. This is used by bisect, backout and fetch.
1031 # largefiles were changed. This is used by bisect, backout and fetch.
1032 def overridebailifchanged(orig, repo):
1032 def overridebailifchanged(orig, repo):
1033 orig(repo)
1033 orig(repo)
1034 repo.lfstatus = True
1034 repo.lfstatus = True
1035 s = repo.status()
1035 s = repo.status()
1036 repo.lfstatus = False
1036 repo.lfstatus = False
1037 if s.modified or s.added or s.removed or s.deleted:
1037 if s.modified or s.added or s.removed or s.deleted:
1038 raise util.Abort(_('uncommitted changes'))
1038 raise util.Abort(_('uncommitted changes'))
1039
1039
1040 def cmdutilforget(orig, ui, repo, match, prefix, explicitonly):
1040 def cmdutilforget(orig, ui, repo, match, prefix, explicitonly):
1041 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1041 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1042 bad, forgot = orig(ui, repo, normalmatcher, prefix, explicitonly)
1042 bad, forgot = orig(ui, repo, normalmatcher, prefix, explicitonly)
1043 m = composelargefilematcher(match, repo[None].manifest())
1043 m = composelargefilematcher(match, repo[None].manifest())
1044
1044
1045 try:
1045 try:
1046 repo.lfstatus = True
1046 repo.lfstatus = True
1047 s = repo.status(match=m, clean=True)
1047 s = repo.status(match=m, clean=True)
1048 finally:
1048 finally:
1049 repo.lfstatus = False
1049 repo.lfstatus = False
1050 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1050 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1051 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
1051 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
1052
1052
1053 for f in forget:
1053 for f in forget:
1054 if lfutil.standin(f) not in repo.dirstate and not \
1054 if lfutil.standin(f) not in repo.dirstate and not \
1055 repo.wvfs.isdir(lfutil.standin(f)):
1055 repo.wvfs.isdir(lfutil.standin(f)):
1056 ui.warn(_('not removing %s: file is already untracked\n')
1056 ui.warn(_('not removing %s: file is already untracked\n')
1057 % m.rel(f))
1057 % m.rel(f))
1058 bad.append(f)
1058 bad.append(f)
1059
1059
1060 for f in forget:
1060 for f in forget:
1061 if ui.verbose or not m.exact(f):
1061 if ui.verbose or not m.exact(f):
1062 ui.status(_('removing %s\n') % m.rel(f))
1062 ui.status(_('removing %s\n') % m.rel(f))
1063
1063
1064 # Need to lock because standin files are deleted then removed from the
1064 # Need to lock because standin files are deleted then removed from the
1065 # repository and we could race in-between.
1065 # repository and we could race in-between.
1066 wlock = repo.wlock()
1066 wlock = repo.wlock()
1067 try:
1067 try:
1068 lfdirstate = lfutil.openlfdirstate(ui, repo)
1068 lfdirstate = lfutil.openlfdirstate(ui, repo)
1069 for f in forget:
1069 for f in forget:
1070 if lfdirstate[f] == 'a':
1070 if lfdirstate[f] == 'a':
1071 lfdirstate.drop(f)
1071 lfdirstate.drop(f)
1072 else:
1072 else:
1073 lfdirstate.remove(f)
1073 lfdirstate.remove(f)
1074 lfdirstate.write()
1074 lfdirstate.write()
1075 standins = [lfutil.standin(f) for f in forget]
1075 standins = [lfutil.standin(f) for f in forget]
1076 for f in standins:
1076 for f in standins:
1077 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1077 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1078 rejected = repo[None].forget(standins)
1078 rejected = repo[None].forget(standins)
1079 finally:
1079 finally:
1080 wlock.release()
1080 wlock.release()
1081
1081
1082 bad.extend(f for f in rejected if f in m.files())
1082 bad.extend(f for f in rejected if f in m.files())
1083 forgot.extend(f for f in forget if f not in rejected)
1083 forgot.extend(f for f in forget if f not in rejected)
1084 return bad, forgot
1084 return bad, forgot
1085
1085
1086 def _getoutgoings(repo, other, missing, addfunc):
1086 def _getoutgoings(repo, other, missing, addfunc):
1087 """get pairs of filename and largefile hash in outgoing revisions
1087 """get pairs of filename and largefile hash in outgoing revisions
1088 in 'missing'.
1088 in 'missing'.
1089
1089
1090 largefiles already existing on 'other' repository are ignored.
1090 largefiles already existing on 'other' repository are ignored.
1091
1091
1092 'addfunc' is invoked with each unique pairs of filename and
1092 'addfunc' is invoked with each unique pairs of filename and
1093 largefile hash value.
1093 largefile hash value.
1094 """
1094 """
1095 knowns = set()
1095 knowns = set()
1096 lfhashes = set()
1096 lfhashes = set()
1097 def dedup(fn, lfhash):
1097 def dedup(fn, lfhash):
1098 k = (fn, lfhash)
1098 k = (fn, lfhash)
1099 if k not in knowns:
1099 if k not in knowns:
1100 knowns.add(k)
1100 knowns.add(k)
1101 lfhashes.add(lfhash)
1101 lfhashes.add(lfhash)
1102 lfutil.getlfilestoupload(repo, missing, dedup)
1102 lfutil.getlfilestoupload(repo, missing, dedup)
1103 if lfhashes:
1103 if lfhashes:
1104 lfexists = basestore._openstore(repo, other).exists(lfhashes)
1104 lfexists = basestore._openstore(repo, other).exists(lfhashes)
1105 for fn, lfhash in knowns:
1105 for fn, lfhash in knowns:
1106 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1106 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1107 addfunc(fn, lfhash)
1107 addfunc(fn, lfhash)
1108
1108
1109 def outgoinghook(ui, repo, other, opts, missing):
1109 def outgoinghook(ui, repo, other, opts, missing):
1110 if opts.pop('large', None):
1110 if opts.pop('large', None):
1111 lfhashes = set()
1111 lfhashes = set()
1112 if ui.debugflag:
1112 if ui.debugflag:
1113 toupload = {}
1113 toupload = {}
1114 def addfunc(fn, lfhash):
1114 def addfunc(fn, lfhash):
1115 if fn not in toupload:
1115 if fn not in toupload:
1116 toupload[fn] = []
1116 toupload[fn] = []
1117 toupload[fn].append(lfhash)
1117 toupload[fn].append(lfhash)
1118 lfhashes.add(lfhash)
1118 lfhashes.add(lfhash)
1119 def showhashes(fn):
1119 def showhashes(fn):
1120 for lfhash in sorted(toupload[fn]):
1120 for lfhash in sorted(toupload[fn]):
1121 ui.debug(' %s\n' % (lfhash))
1121 ui.debug(' %s\n' % (lfhash))
1122 else:
1122 else:
1123 toupload = set()
1123 toupload = set()
1124 def addfunc(fn, lfhash):
1124 def addfunc(fn, lfhash):
1125 toupload.add(fn)
1125 toupload.add(fn)
1126 lfhashes.add(lfhash)
1126 lfhashes.add(lfhash)
1127 def showhashes(fn):
1127 def showhashes(fn):
1128 pass
1128 pass
1129 _getoutgoings(repo, other, missing, addfunc)
1129 _getoutgoings(repo, other, missing, addfunc)
1130
1130
1131 if not toupload:
1131 if not toupload:
1132 ui.status(_('largefiles: no files to upload\n'))
1132 ui.status(_('largefiles: no files to upload\n'))
1133 else:
1133 else:
1134 ui.status(_('largefiles to upload (%d entities):\n')
1134 ui.status(_('largefiles to upload (%d entities):\n')
1135 % (len(lfhashes)))
1135 % (len(lfhashes)))
1136 for file in sorted(toupload):
1136 for file in sorted(toupload):
1137 ui.status(lfutil.splitstandin(file) + '\n')
1137 ui.status(lfutil.splitstandin(file) + '\n')
1138 showhashes(file)
1138 showhashes(file)
1139 ui.status('\n')
1139 ui.status('\n')
1140
1140
1141 def summaryremotehook(ui, repo, opts, changes):
1141 def summaryremotehook(ui, repo, opts, changes):
1142 largeopt = opts.get('large', False)
1142 largeopt = opts.get('large', False)
1143 if changes is None:
1143 if changes is None:
1144 if largeopt:
1144 if largeopt:
1145 return (False, True) # only outgoing check is needed
1145 return (False, True) # only outgoing check is needed
1146 else:
1146 else:
1147 return (False, False)
1147 return (False, False)
1148 elif largeopt:
1148 elif largeopt:
1149 url, branch, peer, outgoing = changes[1]
1149 url, branch, peer, outgoing = changes[1]
1150 if peer is None:
1150 if peer is None:
1151 # i18n: column positioning for "hg summary"
1151 # i18n: column positioning for "hg summary"
1152 ui.status(_('largefiles: (no remote repo)\n'))
1152 ui.status(_('largefiles: (no remote repo)\n'))
1153 return
1153 return
1154
1154
1155 toupload = set()
1155 toupload = set()
1156 lfhashes = set()
1156 lfhashes = set()
1157 def addfunc(fn, lfhash):
1157 def addfunc(fn, lfhash):
1158 toupload.add(fn)
1158 toupload.add(fn)
1159 lfhashes.add(lfhash)
1159 lfhashes.add(lfhash)
1160 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1160 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1161
1161
1162 if not toupload:
1162 if not toupload:
1163 # i18n: column positioning for "hg summary"
1163 # i18n: column positioning for "hg summary"
1164 ui.status(_('largefiles: (no files to upload)\n'))
1164 ui.status(_('largefiles: (no files to upload)\n'))
1165 else:
1165 else:
1166 # i18n: column positioning for "hg summary"
1166 # i18n: column positioning for "hg summary"
1167 ui.status(_('largefiles: %d entities for %d files to upload\n')
1167 ui.status(_('largefiles: %d entities for %d files to upload\n')
1168 % (len(lfhashes), len(toupload)))
1168 % (len(lfhashes), len(toupload)))
1169
1169
1170 def overridesummary(orig, ui, repo, *pats, **opts):
1170 def overridesummary(orig, ui, repo, *pats, **opts):
1171 try:
1171 try:
1172 repo.lfstatus = True
1172 repo.lfstatus = True
1173 orig(ui, repo, *pats, **opts)
1173 orig(ui, repo, *pats, **opts)
1174 finally:
1174 finally:
1175 repo.lfstatus = False
1175 repo.lfstatus = False
1176
1176
1177 def scmutiladdremove(orig, repo, matcher, prefix, opts={}, dry_run=None,
1177 def scmutiladdremove(orig, repo, matcher, prefix, opts={}, dry_run=None,
1178 similarity=None):
1178 similarity=None):
1179 if not lfutil.islfilesrepo(repo):
1179 if not lfutil.islfilesrepo(repo):
1180 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1180 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1181 # Get the list of missing largefiles so we can remove them
1181 # Get the list of missing largefiles so we can remove them
1182 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1182 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1183 unsure, s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
1183 unsure, s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
1184 False, False, False)
1184 False, False, False)
1185
1185
1186 # Call into the normal remove code, but the removing of the standin, we want
1186 # Call into the normal remove code, but the removing of the standin, we want
1187 # to have handled by original addremove. Monkey patching here makes sure
1187 # to have handled by original addremove. Monkey patching here makes sure
1188 # we don't remove the standin in the largefiles code, preventing a very
1188 # we don't remove the standin in the largefiles code, preventing a very
1189 # confused state later.
1189 # confused state later.
1190 if s.deleted:
1190 if s.deleted:
1191 m = copy.copy(matcher)
1191 m = copy.copy(matcher)
1192
1192
1193 # The m._files and m._map attributes are not changed to the deleted list
1193 # The m._files and m._map attributes are not changed to the deleted list
1194 # because that affects the m.exact() test, which in turn governs whether
1194 # because that affects the m.exact() test, which in turn governs whether
1195 # or not the file name is printed, and how. Simply limit the original
1195 # or not the file name is printed, and how. Simply limit the original
1196 # matches to those in the deleted status list.
1196 # matches to those in the deleted status list.
1197 matchfn = m.matchfn
1197 matchfn = m.matchfn
1198 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1198 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1199
1199
1200 removelargefiles(repo.ui, repo, True, m, **opts)
1200 removelargefiles(repo.ui, repo, True, m, **opts)
1201 # Call into the normal add code, and any files that *should* be added as
1201 # Call into the normal add code, and any files that *should* be added as
1202 # largefiles will be
1202 # largefiles will be
1203 added, bad = addlargefiles(repo.ui, repo, True, matcher, **opts)
1203 added, bad = addlargefiles(repo.ui, repo, True, matcher, **opts)
1204 # Now that we've handled largefiles, hand off to the original addremove
1204 # Now that we've handled largefiles, hand off to the original addremove
1205 # function to take care of the rest. Make sure it doesn't do anything with
1205 # function to take care of the rest. Make sure it doesn't do anything with
1206 # largefiles by passing a matcher that will ignore them.
1206 # largefiles by passing a matcher that will ignore them.
1207 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1207 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1208 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1208 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1209
1209
1210 # Calling purge with --all will cause the largefiles to be deleted.
1210 # Calling purge with --all will cause the largefiles to be deleted.
1211 # Override repo.status to prevent this from happening.
1211 # Override repo.status to prevent this from happening.
1212 def overridepurge(orig, ui, repo, *dirs, **opts):
1212 def overridepurge(orig, ui, repo, *dirs, **opts):
1213 # XXX Monkey patching a repoview will not work. The assigned attribute will
1213 # XXX Monkey patching a repoview will not work. The assigned attribute will
1214 # be set on the unfiltered repo, but we will only lookup attributes in the
1214 # be set on the unfiltered repo, but we will only lookup attributes in the
1215 # unfiltered repo if the lookup in the repoview object itself fails. As the
1215 # unfiltered repo if the lookup in the repoview object itself fails. As the
1216 # monkey patched method exists on the repoview class the lookup will not
1216 # monkey patched method exists on the repoview class the lookup will not
1217 # fail. As a result, the original version will shadow the monkey patched
1217 # fail. As a result, the original version will shadow the monkey patched
1218 # one, defeating the monkey patch.
1218 # one, defeating the monkey patch.
1219 #
1219 #
1220 # As a work around we use an unfiltered repo here. We should do something
1220 # As a work around we use an unfiltered repo here. We should do something
1221 # cleaner instead.
1221 # cleaner instead.
1222 repo = repo.unfiltered()
1222 repo = repo.unfiltered()
1223 oldstatus = repo.status
1223 oldstatus = repo.status
1224 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1224 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1225 clean=False, unknown=False, listsubrepos=False):
1225 clean=False, unknown=False, listsubrepos=False):
1226 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1226 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1227 listsubrepos)
1227 listsubrepos)
1228 lfdirstate = lfutil.openlfdirstate(ui, repo)
1228 lfdirstate = lfutil.openlfdirstate(ui, repo)
1229 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1229 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1230 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1230 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1231 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1231 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1232 unknown, ignored, r.clean)
1232 unknown, ignored, r.clean)
1233 repo.status = overridestatus
1233 repo.status = overridestatus
1234 orig(ui, repo, *dirs, **opts)
1234 orig(ui, repo, *dirs, **opts)
1235 repo.status = oldstatus
1235 repo.status = oldstatus
1236 def overriderollback(orig, ui, repo, **opts):
1236 def overriderollback(orig, ui, repo, **opts):
1237 wlock = repo.wlock()
1237 wlock = repo.wlock()
1238 try:
1238 try:
1239 before = repo.dirstate.parents()
1239 before = repo.dirstate.parents()
1240 orphans = set(f for f in repo.dirstate
1240 orphans = set(f for f in repo.dirstate
1241 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1241 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1242 result = orig(ui, repo, **opts)
1242 result = orig(ui, repo, **opts)
1243 after = repo.dirstate.parents()
1243 after = repo.dirstate.parents()
1244 if before == after:
1244 if before == after:
1245 return result # no need to restore standins
1245 return result # no need to restore standins
1246
1246
1247 pctx = repo['.']
1247 pctx = repo['.']
1248 for f in repo.dirstate:
1248 for f in repo.dirstate:
1249 if lfutil.isstandin(f):
1249 if lfutil.isstandin(f):
1250 orphans.discard(f)
1250 orphans.discard(f)
1251 if repo.dirstate[f] == 'r':
1251 if repo.dirstate[f] == 'r':
1252 repo.wvfs.unlinkpath(f, ignoremissing=True)
1252 repo.wvfs.unlinkpath(f, ignoremissing=True)
1253 elif f in pctx:
1253 elif f in pctx:
1254 fctx = pctx[f]
1254 fctx = pctx[f]
1255 repo.wwrite(f, fctx.data(), fctx.flags())
1255 repo.wwrite(f, fctx.data(), fctx.flags())
1256 else:
1256 else:
1257 # content of standin is not so important in 'a',
1257 # content of standin is not so important in 'a',
1258 # 'm' or 'n' (coming from the 2nd parent) cases
1258 # 'm' or 'n' (coming from the 2nd parent) cases
1259 lfutil.writestandin(repo, f, '', False)
1259 lfutil.writestandin(repo, f, '', False)
1260 for standin in orphans:
1260 for standin in orphans:
1261 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1261 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1262
1262
1263 lfdirstate = lfutil.openlfdirstate(ui, repo)
1263 lfdirstate = lfutil.openlfdirstate(ui, repo)
1264 orphans = set(lfdirstate)
1264 orphans = set(lfdirstate)
1265 lfiles = lfutil.listlfiles(repo)
1265 lfiles = lfutil.listlfiles(repo)
1266 for file in lfiles:
1266 for file in lfiles:
1267 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1267 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1268 orphans.discard(file)
1268 orphans.discard(file)
1269 for lfile in orphans:
1269 for lfile in orphans:
1270 lfdirstate.drop(lfile)
1270 lfdirstate.drop(lfile)
1271 lfdirstate.write()
1271 lfdirstate.write()
1272 finally:
1272 finally:
1273 wlock.release()
1273 wlock.release()
1274 return result
1274 return result
1275
1275
1276 def overridetransplant(orig, ui, repo, *revs, **opts):
1276 def overridetransplant(orig, ui, repo, *revs, **opts):
1277 resuming = opts.get('continue')
1277 resuming = opts.get('continue')
1278 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1278 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1279 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1279 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1280 try:
1280 try:
1281 result = orig(ui, repo, *revs, **opts)
1281 result = orig(ui, repo, *revs, **opts)
1282 finally:
1282 finally:
1283 repo._lfstatuswriters.pop()
1283 repo._lfstatuswriters.pop()
1284 repo._lfcommithooks.pop()
1284 repo._lfcommithooks.pop()
1285 return result
1285 return result
1286
1286
1287 def overridecat(orig, ui, repo, file1, *pats, **opts):
1287 def overridecat(orig, ui, repo, file1, *pats, **opts):
1288 ctx = scmutil.revsingle(repo, opts.get('rev'))
1288 ctx = scmutil.revsingle(repo, opts.get('rev'))
1289 err = 1
1289 err = 1
1290 notbad = set()
1290 notbad = set()
1291 m = scmutil.match(ctx, (file1,) + pats, opts)
1291 m = scmutil.match(ctx, (file1,) + pats, opts)
1292 origmatchfn = m.matchfn
1292 origmatchfn = m.matchfn
1293 def lfmatchfn(f):
1293 def lfmatchfn(f):
1294 if origmatchfn(f):
1294 if origmatchfn(f):
1295 return True
1295 return True
1296 lf = lfutil.splitstandin(f)
1296 lf = lfutil.splitstandin(f)
1297 if lf is None:
1297 if lf is None:
1298 return False
1298 return False
1299 notbad.add(lf)
1299 notbad.add(lf)
1300 return origmatchfn(lf)
1300 return origmatchfn(lf)
1301 m.matchfn = lfmatchfn
1301 m.matchfn = lfmatchfn
1302 origbadfn = m.bad
1302 origbadfn = m.bad
1303 def lfbadfn(f, msg):
1303 def lfbadfn(f, msg):
1304 if not f in notbad:
1304 if not f in notbad:
1305 origbadfn(f, msg)
1305 origbadfn(f, msg)
1306 m.bad = lfbadfn
1306 m.bad = lfbadfn
1307 for f in ctx.walk(m):
1307 for f in ctx.walk(m):
1308 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1308 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1309 pathname=f)
1309 pathname=f)
1310 lf = lfutil.splitstandin(f)
1310 lf = lfutil.splitstandin(f)
1311 if lf is None or origmatchfn(f):
1311 if lf is None or origmatchfn(f):
1312 # duplicating unreachable code from commands.cat
1312 # duplicating unreachable code from commands.cat
1313 data = ctx[f].data()
1313 data = ctx[f].data()
1314 if opts.get('decode'):
1314 if opts.get('decode'):
1315 data = repo.wwritedata(f, data)
1315 data = repo.wwritedata(f, data)
1316 fp.write(data)
1316 fp.write(data)
1317 else:
1317 else:
1318 hash = lfutil.readstandin(repo, lf, ctx.rev())
1318 hash = lfutil.readstandin(repo, lf, ctx.rev())
1319 if not lfutil.inusercache(repo.ui, hash):
1319 if not lfutil.inusercache(repo.ui, hash):
1320 store = basestore._openstore(repo)
1320 store = basestore._openstore(repo)
1321 success, missing = store.get([(lf, hash)])
1321 success, missing = store.get([(lf, hash)])
1322 if len(success) != 1:
1322 if len(success) != 1:
1323 raise util.Abort(
1323 raise util.Abort(
1324 _('largefile %s is not in cache and could not be '
1324 _('largefile %s is not in cache and could not be '
1325 'downloaded') % lf)
1325 'downloaded') % lf)
1326 path = lfutil.usercachepath(repo.ui, hash)
1326 path = lfutil.usercachepath(repo.ui, hash)
1327 fpin = open(path, "rb")
1327 fpin = open(path, "rb")
1328 for chunk in util.filechunkiter(fpin, 128 * 1024):
1328 for chunk in util.filechunkiter(fpin, 128 * 1024):
1329 fp.write(chunk)
1329 fp.write(chunk)
1330 fpin.close()
1330 fpin.close()
1331 fp.close()
1331 fp.close()
1332 err = 0
1332 err = 0
1333 return err
1333 return err
1334
1334
1335 def mergeupdate(orig, repo, node, branchmerge, force, partial,
1335 def mergeupdate(orig, repo, node, branchmerge, force, partial,
1336 *args, **kwargs):
1336 *args, **kwargs):
1337 wlock = repo.wlock()
1337 wlock = repo.wlock()
1338 try:
1338 try:
1339 # branch | | |
1339 # branch | | |
1340 # merge | force | partial | action
1340 # merge | force | partial | action
1341 # -------+-------+---------+--------------
1341 # -------+-------+---------+--------------
1342 # x | x | x | linear-merge
1342 # x | x | x | linear-merge
1343 # o | x | x | branch-merge
1343 # o | x | x | branch-merge
1344 # x | o | x | overwrite (as clean update)
1344 # x | o | x | overwrite (as clean update)
1345 # o | o | x | force-branch-merge (*1)
1345 # o | o | x | force-branch-merge (*1)
1346 # x | x | o | (*)
1346 # x | x | o | (*)
1347 # o | x | o | (*)
1347 # o | x | o | (*)
1348 # x | o | o | overwrite (as revert)
1348 # x | o | o | overwrite (as revert)
1349 # o | o | o | (*)
1349 # o | o | o | (*)
1350 #
1350 #
1351 # (*) don't care
1351 # (*) don't care
1352 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1352 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1353
1353
1354 linearmerge = not branchmerge and not force and not partial
1354 linearmerge = not branchmerge and not force and not partial
1355
1355
1356 if linearmerge or (branchmerge and force and not partial):
1356 if linearmerge or (branchmerge and force and not partial):
1357 # update standins for linear-merge or force-branch-merge,
1357 # update standins for linear-merge or force-branch-merge,
1358 # because largefiles in the working directory may be modified
1358 # because largefiles in the working directory may be modified
1359 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1359 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1360 unsure, s = lfdirstate.status(match_.always(repo.root,
1360 unsure, s = lfdirstate.status(match_.always(repo.root,
1361 repo.getcwd()),
1361 repo.getcwd()),
1362 [], False, False, False)
1362 [], False, False, False)
1363 pctx = repo['.']
1363 pctx = repo['.']
1364 for lfile in unsure + s.modified:
1364 for lfile in unsure + s.modified:
1365 lfileabs = repo.wvfs.join(lfile)
1365 lfileabs = repo.wvfs.join(lfile)
1366 if not os.path.exists(lfileabs):
1366 if not os.path.exists(lfileabs):
1367 continue
1367 continue
1368 lfhash = lfutil.hashrepofile(repo, lfile)
1368 lfhash = lfutil.hashrepofile(repo, lfile)
1369 standin = lfutil.standin(lfile)
1369 standin = lfutil.standin(lfile)
1370 lfutil.writestandin(repo, standin, lfhash,
1370 lfutil.writestandin(repo, standin, lfhash,
1371 lfutil.getexecutable(lfileabs))
1371 lfutil.getexecutable(lfileabs))
1372 if (standin in pctx and
1372 if (standin in pctx and
1373 lfhash == lfutil.readstandin(repo, lfile, '.')):
1373 lfhash == lfutil.readstandin(repo, lfile, '.')):
1374 lfdirstate.normal(lfile)
1374 lfdirstate.normal(lfile)
1375 for lfile in s.added:
1375 for lfile in s.added:
1376 lfutil.updatestandin(repo, lfutil.standin(lfile))
1376 lfutil.updatestandin(repo, lfutil.standin(lfile))
1377 lfdirstate.write()
1377 lfdirstate.write()
1378
1378
1379 if linearmerge:
1379 if linearmerge:
1380 # Only call updatelfiles on the standins that have changed
1380 # Only call updatelfiles on the standins that have changed
1381 # to save time
1381 # to save time
1382 oldstandins = lfutil.getstandinsstate(repo)
1382 oldstandins = lfutil.getstandinsstate(repo)
1383
1383
1384 result = orig(repo, node, branchmerge, force, partial, *args, **kwargs)
1384 result = orig(repo, node, branchmerge, force, partial, *args, **kwargs)
1385
1385
1386 filelist = None
1386 filelist = None
1387 if linearmerge:
1387 if linearmerge:
1388 newstandins = lfutil.getstandinsstate(repo)
1388 newstandins = lfutil.getstandinsstate(repo)
1389 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1389 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1390
1390
1391 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1391 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1392 normallookup=partial, checked=linearmerge)
1392 normallookup=partial, checked=linearmerge)
1393
1393
1394 return result
1394 return result
1395 finally:
1395 finally:
1396 wlock.release()
1396 wlock.release()
1397
1397
1398 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1398 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1399 result = orig(repo, files, *args, **kwargs)
1399 result = orig(repo, files, *args, **kwargs)
1400
1400
1401 filelist = [lfutil.splitstandin(f) for f in files if lfutil.isstandin(f)]
1401 filelist = [lfutil.splitstandin(f) for f in files if lfutil.isstandin(f)]
1402 if filelist:
1402 if filelist:
1403 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1403 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1404 printmessage=False, normallookup=True)
1404 printmessage=False, normallookup=True)
1405
1405
1406 return result
1406 return result
@@ -1,3256 +1,3255 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, errno, re, tempfile, cStringIO, shutil
10 import os, sys, errno, re, tempfile, cStringIO, shutil
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 import match as matchmod
12 import match as matchmod
13 import context, repair, graphmod, revset, phases, obsolete, pathutil
13 import context, repair, graphmod, revset, phases, obsolete, pathutil
14 import changelog
14 import changelog
15 import bookmarks
15 import bookmarks
16 import encoding
16 import encoding
17 import crecord as crecordmod
17 import crecord as crecordmod
18 import lock as lockmod
18 import lock as lockmod
19
19
20 def parsealiases(cmd):
20 def parsealiases(cmd):
21 return cmd.lstrip("^").split("|")
21 return cmd.lstrip("^").split("|")
22
22
23 def setupwrapcolorwrite(ui):
23 def setupwrapcolorwrite(ui):
24 # wrap ui.write so diff output can be labeled/colorized
24 # wrap ui.write so diff output can be labeled/colorized
25 def wrapwrite(orig, *args, **kw):
25 def wrapwrite(orig, *args, **kw):
26 label = kw.pop('label', '')
26 label = kw.pop('label', '')
27 for chunk, l in patch.difflabel(lambda: args):
27 for chunk, l in patch.difflabel(lambda: args):
28 orig(chunk, label=label + l)
28 orig(chunk, label=label + l)
29
29
30 oldwrite = ui.write
30 oldwrite = ui.write
31 def wrap(*args, **kwargs):
31 def wrap(*args, **kwargs):
32 return wrapwrite(oldwrite, *args, **kwargs)
32 return wrapwrite(oldwrite, *args, **kwargs)
33 setattr(ui, 'write', wrap)
33 setattr(ui, 'write', wrap)
34 return oldwrite
34 return oldwrite
35
35
36 def filterchunks(ui, originalhunks, usecurses, testfile):
36 def filterchunks(ui, originalhunks, usecurses, testfile):
37 if usecurses:
37 if usecurses:
38 if testfile:
38 if testfile:
39 recordfn = crecordmod.testdecorator(testfile,
39 recordfn = crecordmod.testdecorator(testfile,
40 crecordmod.testchunkselector)
40 crecordmod.testchunkselector)
41 else:
41 else:
42 recordfn = crecordmod.chunkselector
42 recordfn = crecordmod.chunkselector
43
43
44 return crecordmod.filterpatch(ui, originalhunks, recordfn)
44 return crecordmod.filterpatch(ui, originalhunks, recordfn)
45
45
46 else:
46 else:
47 return patch.filterpatch(ui, originalhunks)
47 return patch.filterpatch(ui, originalhunks)
48
48
49 def recordfilter(ui, originalhunks):
49 def recordfilter(ui, originalhunks):
50 usecurses = ui.configbool('experimental', 'crecord', False)
50 usecurses = ui.configbool('experimental', 'crecord', False)
51 testfile = ui.config('experimental', 'crecordtest', None)
51 testfile = ui.config('experimental', 'crecordtest', None)
52 oldwrite = setupwrapcolorwrite(ui)
52 oldwrite = setupwrapcolorwrite(ui)
53 try:
53 try:
54 newchunks = filterchunks(ui, originalhunks, usecurses, testfile)
54 newchunks = filterchunks(ui, originalhunks, usecurses, testfile)
55 finally:
55 finally:
56 ui.write = oldwrite
56 ui.write = oldwrite
57 return newchunks
57 return newchunks
58
58
59 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
59 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
60 filterfn, *pats, **opts):
60 filterfn, *pats, **opts):
61 import merge as mergemod
61 import merge as mergemod
62 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
62 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
63 ishunk = lambda x: isinstance(x, hunkclasses)
63 ishunk = lambda x: isinstance(x, hunkclasses)
64
64
65 if not ui.interactive():
65 if not ui.interactive():
66 raise util.Abort(_('running non-interactively, use %s instead') %
66 raise util.Abort(_('running non-interactively, use %s instead') %
67 cmdsuggest)
67 cmdsuggest)
68
68
69 # make sure username is set before going interactive
69 # make sure username is set before going interactive
70 if not opts.get('user'):
70 if not opts.get('user'):
71 ui.username() # raise exception, username not provided
71 ui.username() # raise exception, username not provided
72
72
73 def recordfunc(ui, repo, message, match, opts):
73 def recordfunc(ui, repo, message, match, opts):
74 """This is generic record driver.
74 """This is generic record driver.
75
75
76 Its job is to interactively filter local changes, and
76 Its job is to interactively filter local changes, and
77 accordingly prepare working directory into a state in which the
77 accordingly prepare working directory into a state in which the
78 job can be delegated to a non-interactive commit command such as
78 job can be delegated to a non-interactive commit command such as
79 'commit' or 'qrefresh'.
79 'commit' or 'qrefresh'.
80
80
81 After the actual job is done by non-interactive command, the
81 After the actual job is done by non-interactive command, the
82 working directory is restored to its original state.
82 working directory is restored to its original state.
83
83
84 In the end we'll record interesting changes, and everything else
84 In the end we'll record interesting changes, and everything else
85 will be left in place, so the user can continue working.
85 will be left in place, so the user can continue working.
86 """
86 """
87
87
88 checkunfinished(repo, commit=True)
88 checkunfinished(repo, commit=True)
89 merge = len(repo[None].parents()) > 1
89 merge = len(repo[None].parents()) > 1
90 if merge:
90 if merge:
91 raise util.Abort(_('cannot partially commit a merge '
91 raise util.Abort(_('cannot partially commit a merge '
92 '(use "hg commit" instead)'))
92 '(use "hg commit" instead)'))
93
93
94 status = repo.status(match=match)
94 status = repo.status(match=match)
95 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
95 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
96 diffopts.nodates = True
96 diffopts.nodates = True
97 diffopts.git = True
97 diffopts.git = True
98 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
98 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
99 originalchunks = patch.parsepatch(originaldiff)
99 originalchunks = patch.parsepatch(originaldiff)
100
100
101 # 1. filter patch, so we have intending-to apply subset of it
101 # 1. filter patch, so we have intending-to apply subset of it
102 try:
102 try:
103 chunks = filterfn(ui, originalchunks)
103 chunks = filterfn(ui, originalchunks)
104 except patch.PatchError, err:
104 except patch.PatchError, err:
105 raise util.Abort(_('error parsing patch: %s') % err)
105 raise util.Abort(_('error parsing patch: %s') % err)
106
106
107 contenders = set()
107 contenders = set()
108 for h in chunks:
108 for h in chunks:
109 try:
109 try:
110 contenders.update(set(h.files()))
110 contenders.update(set(h.files()))
111 except AttributeError:
111 except AttributeError:
112 pass
112 pass
113
113
114 changed = status.modified + status.added + status.removed
114 changed = status.modified + status.added + status.removed
115 newfiles = [f for f in changed if f in contenders]
115 newfiles = [f for f in changed if f in contenders]
116 if not newfiles:
116 if not newfiles:
117 ui.status(_('no changes to record\n'))
117 ui.status(_('no changes to record\n'))
118 return 0
118 return 0
119
119
120 newandmodifiedfiles = set()
120 newandmodifiedfiles = set()
121 for h in chunks:
121 for h in chunks:
122 isnew = h.filename() in status.added
122 isnew = h.filename() in status.added
123 if ishunk(h) and isnew and not h in originalchunks:
123 if ishunk(h) and isnew and not h in originalchunks:
124 newandmodifiedfiles.add(h.filename())
124 newandmodifiedfiles.add(h.filename())
125
125
126 modified = set(status.modified)
126 modified = set(status.modified)
127
127
128 # 2. backup changed files, so we can restore them in the end
128 # 2. backup changed files, so we can restore them in the end
129
129
130 if backupall:
130 if backupall:
131 tobackup = changed
131 tobackup = changed
132 else:
132 else:
133 tobackup = [f for f in newfiles
133 tobackup = [f for f in newfiles
134 if f in modified or f in newandmodifiedfiles]
134 if f in modified or f in newandmodifiedfiles]
135
135
136 backups = {}
136 backups = {}
137 if tobackup:
137 if tobackup:
138 backupdir = repo.join('record-backups')
138 backupdir = repo.join('record-backups')
139 try:
139 try:
140 os.mkdir(backupdir)
140 os.mkdir(backupdir)
141 except OSError, err:
141 except OSError, err:
142 if err.errno != errno.EEXIST:
142 if err.errno != errno.EEXIST:
143 raise
143 raise
144 try:
144 try:
145 # backup continues
145 # backup continues
146 for f in tobackup:
146 for f in tobackup:
147 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
147 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
148 dir=backupdir)
148 dir=backupdir)
149 os.close(fd)
149 os.close(fd)
150 ui.debug('backup %r as %r\n' % (f, tmpname))
150 ui.debug('backup %r as %r\n' % (f, tmpname))
151 util.copyfile(repo.wjoin(f), tmpname)
151 util.copyfile(repo.wjoin(f), tmpname)
152 shutil.copystat(repo.wjoin(f), tmpname)
152 shutil.copystat(repo.wjoin(f), tmpname)
153 backups[f] = tmpname
153 backups[f] = tmpname
154
154
155 fp = cStringIO.StringIO()
155 fp = cStringIO.StringIO()
156 for c in chunks:
156 for c in chunks:
157 fname = c.filename()
157 fname = c.filename()
158 if fname in backups or fname in newandmodifiedfiles:
158 if fname in backups or fname in newandmodifiedfiles:
159 c.write(fp)
159 c.write(fp)
160 dopatch = fp.tell()
160 dopatch = fp.tell()
161 fp.seek(0)
161 fp.seek(0)
162
162
163 [os.unlink(c) for c in newandmodifiedfiles]
163 [os.unlink(c) for c in newandmodifiedfiles]
164
164
165 # 3a. apply filtered patch to clean repo (clean)
165 # 3a. apply filtered patch to clean repo (clean)
166 if backups:
166 if backups:
167 # Equivalent to hg.revert
167 # Equivalent to hg.revert
168 choices = lambda key: key in backups
168 choices = lambda key: key in backups
169 mergemod.update(repo, repo.dirstate.p1(),
169 mergemod.update(repo, repo.dirstate.p1(),
170 False, True, choices)
170 False, True, choices)
171
171
172
172
173 # 3b. (apply)
173 # 3b. (apply)
174 if dopatch:
174 if dopatch:
175 try:
175 try:
176 ui.debug('applying patch\n')
176 ui.debug('applying patch\n')
177 ui.debug(fp.getvalue())
177 ui.debug(fp.getvalue())
178 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
178 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
179 except patch.PatchError, err:
179 except patch.PatchError, err:
180 raise util.Abort(str(err))
180 raise util.Abort(str(err))
181 del fp
181 del fp
182
182
183 # 4. We prepared working directory according to filtered
183 # 4. We prepared working directory according to filtered
184 # patch. Now is the time to delegate the job to
184 # patch. Now is the time to delegate the job to
185 # commit/qrefresh or the like!
185 # commit/qrefresh or the like!
186
186
187 # Make all of the pathnames absolute.
187 # Make all of the pathnames absolute.
188 newfiles = [repo.wjoin(nf) for nf in newfiles]
188 newfiles = [repo.wjoin(nf) for nf in newfiles]
189 commitfunc(ui, repo, *newfiles, **opts)
189 commitfunc(ui, repo, *newfiles, **opts)
190
190
191 return 0
191 return 0
192 finally:
192 finally:
193 # 5. finally restore backed-up files
193 # 5. finally restore backed-up files
194 try:
194 try:
195 for realname, tmpname in backups.iteritems():
195 for realname, tmpname in backups.iteritems():
196 ui.debug('restoring %r to %r\n' % (tmpname, realname))
196 ui.debug('restoring %r to %r\n' % (tmpname, realname))
197 util.copyfile(tmpname, repo.wjoin(realname))
197 util.copyfile(tmpname, repo.wjoin(realname))
198 # Our calls to copystat() here and above are a
198 # Our calls to copystat() here and above are a
199 # hack to trick any editors that have f open that
199 # hack to trick any editors that have f open that
200 # we haven't modified them.
200 # we haven't modified them.
201 #
201 #
202 # Also note that this racy as an editor could
202 # Also note that this racy as an editor could
203 # notice the file's mtime before we've finished
203 # notice the file's mtime before we've finished
204 # writing it.
204 # writing it.
205 shutil.copystat(tmpname, repo.wjoin(realname))
205 shutil.copystat(tmpname, repo.wjoin(realname))
206 os.unlink(tmpname)
206 os.unlink(tmpname)
207 if tobackup:
207 if tobackup:
208 os.rmdir(backupdir)
208 os.rmdir(backupdir)
209 except OSError:
209 except OSError:
210 pass
210 pass
211
211
212 return commit(ui, repo, recordfunc, pats, opts)
212 return commit(ui, repo, recordfunc, pats, opts)
213
213
214 def findpossible(cmd, table, strict=False):
214 def findpossible(cmd, table, strict=False):
215 """
215 """
216 Return cmd -> (aliases, command table entry)
216 Return cmd -> (aliases, command table entry)
217 for each matching command.
217 for each matching command.
218 Return debug commands (or their aliases) only if no normal command matches.
218 Return debug commands (or their aliases) only if no normal command matches.
219 """
219 """
220 choice = {}
220 choice = {}
221 debugchoice = {}
221 debugchoice = {}
222
222
223 if cmd in table:
223 if cmd in table:
224 # short-circuit exact matches, "log" alias beats "^log|history"
224 # short-circuit exact matches, "log" alias beats "^log|history"
225 keys = [cmd]
225 keys = [cmd]
226 else:
226 else:
227 keys = table.keys()
227 keys = table.keys()
228
228
229 allcmds = []
229 allcmds = []
230 for e in keys:
230 for e in keys:
231 aliases = parsealiases(e)
231 aliases = parsealiases(e)
232 allcmds.extend(aliases)
232 allcmds.extend(aliases)
233 found = None
233 found = None
234 if cmd in aliases:
234 if cmd in aliases:
235 found = cmd
235 found = cmd
236 elif not strict:
236 elif not strict:
237 for a in aliases:
237 for a in aliases:
238 if a.startswith(cmd):
238 if a.startswith(cmd):
239 found = a
239 found = a
240 break
240 break
241 if found is not None:
241 if found is not None:
242 if aliases[0].startswith("debug") or found.startswith("debug"):
242 if aliases[0].startswith("debug") or found.startswith("debug"):
243 debugchoice[found] = (aliases, table[e])
243 debugchoice[found] = (aliases, table[e])
244 else:
244 else:
245 choice[found] = (aliases, table[e])
245 choice[found] = (aliases, table[e])
246
246
247 if not choice and debugchoice:
247 if not choice and debugchoice:
248 choice = debugchoice
248 choice = debugchoice
249
249
250 return choice, allcmds
250 return choice, allcmds
251
251
252 def findcmd(cmd, table, strict=True):
252 def findcmd(cmd, table, strict=True):
253 """Return (aliases, command table entry) for command string."""
253 """Return (aliases, command table entry) for command string."""
254 choice, allcmds = findpossible(cmd, table, strict)
254 choice, allcmds = findpossible(cmd, table, strict)
255
255
256 if cmd in choice:
256 if cmd in choice:
257 return choice[cmd]
257 return choice[cmd]
258
258
259 if len(choice) > 1:
259 if len(choice) > 1:
260 clist = choice.keys()
260 clist = choice.keys()
261 clist.sort()
261 clist.sort()
262 raise error.AmbiguousCommand(cmd, clist)
262 raise error.AmbiguousCommand(cmd, clist)
263
263
264 if choice:
264 if choice:
265 return choice.values()[0]
265 return choice.values()[0]
266
266
267 raise error.UnknownCommand(cmd, allcmds)
267 raise error.UnknownCommand(cmd, allcmds)
268
268
269 def findrepo(p):
269 def findrepo(p):
270 while not os.path.isdir(os.path.join(p, ".hg")):
270 while not os.path.isdir(os.path.join(p, ".hg")):
271 oldp, p = p, os.path.dirname(p)
271 oldp, p = p, os.path.dirname(p)
272 if p == oldp:
272 if p == oldp:
273 return None
273 return None
274
274
275 return p
275 return p
276
276
277 def bailifchanged(repo):
277 def bailifchanged(repo):
278 if repo.dirstate.p2() != nullid:
278 if repo.dirstate.p2() != nullid:
279 raise util.Abort(_('outstanding uncommitted merge'))
279 raise util.Abort(_('outstanding uncommitted merge'))
280 modified, added, removed, deleted = repo.status()[:4]
280 modified, added, removed, deleted = repo.status()[:4]
281 if modified or added or removed or deleted:
281 if modified or added or removed or deleted:
282 raise util.Abort(_('uncommitted changes'))
282 raise util.Abort(_('uncommitted changes'))
283 ctx = repo[None]
283 ctx = repo[None]
284 for s in sorted(ctx.substate):
284 for s in sorted(ctx.substate):
285 if ctx.sub(s).dirty():
285 if ctx.sub(s).dirty():
286 raise util.Abort(_("uncommitted changes in subrepo %s") % s)
286 raise util.Abort(_("uncommitted changes in subrepo %s") % s)
287
287
288 def logmessage(ui, opts):
288 def logmessage(ui, opts):
289 """ get the log message according to -m and -l option """
289 """ get the log message according to -m and -l option """
290 message = opts.get('message')
290 message = opts.get('message')
291 logfile = opts.get('logfile')
291 logfile = opts.get('logfile')
292
292
293 if message and logfile:
293 if message and logfile:
294 raise util.Abort(_('options --message and --logfile are mutually '
294 raise util.Abort(_('options --message and --logfile are mutually '
295 'exclusive'))
295 'exclusive'))
296 if not message and logfile:
296 if not message and logfile:
297 try:
297 try:
298 if logfile == '-':
298 if logfile == '-':
299 message = ui.fin.read()
299 message = ui.fin.read()
300 else:
300 else:
301 message = '\n'.join(util.readfile(logfile).splitlines())
301 message = '\n'.join(util.readfile(logfile).splitlines())
302 except IOError, inst:
302 except IOError, inst:
303 raise util.Abort(_("can't read commit message '%s': %s") %
303 raise util.Abort(_("can't read commit message '%s': %s") %
304 (logfile, inst.strerror))
304 (logfile, inst.strerror))
305 return message
305 return message
306
306
307 def mergeeditform(ctxorbool, baseformname):
307 def mergeeditform(ctxorbool, baseformname):
308 """return appropriate editform name (referencing a committemplate)
308 """return appropriate editform name (referencing a committemplate)
309
309
310 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
310 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
311 merging is committed.
311 merging is committed.
312
312
313 This returns baseformname with '.merge' appended if it is a merge,
313 This returns baseformname with '.merge' appended if it is a merge,
314 otherwise '.normal' is appended.
314 otherwise '.normal' is appended.
315 """
315 """
316 if isinstance(ctxorbool, bool):
316 if isinstance(ctxorbool, bool):
317 if ctxorbool:
317 if ctxorbool:
318 return baseformname + ".merge"
318 return baseformname + ".merge"
319 elif 1 < len(ctxorbool.parents()):
319 elif 1 < len(ctxorbool.parents()):
320 return baseformname + ".merge"
320 return baseformname + ".merge"
321
321
322 return baseformname + ".normal"
322 return baseformname + ".normal"
323
323
324 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
324 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
325 editform='', **opts):
325 editform='', **opts):
326 """get appropriate commit message editor according to '--edit' option
326 """get appropriate commit message editor according to '--edit' option
327
327
328 'finishdesc' is a function to be called with edited commit message
328 'finishdesc' is a function to be called with edited commit message
329 (= 'description' of the new changeset) just after editing, but
329 (= 'description' of the new changeset) just after editing, but
330 before checking empty-ness. It should return actual text to be
330 before checking empty-ness. It should return actual text to be
331 stored into history. This allows to change description before
331 stored into history. This allows to change description before
332 storing.
332 storing.
333
333
334 'extramsg' is a extra message to be shown in the editor instead of
334 'extramsg' is a extra message to be shown in the editor instead of
335 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
335 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
336 is automatically added.
336 is automatically added.
337
337
338 'editform' is a dot-separated list of names, to distinguish
338 'editform' is a dot-separated list of names, to distinguish
339 the purpose of commit text editing.
339 the purpose of commit text editing.
340
340
341 'getcommiteditor' returns 'commitforceeditor' regardless of
341 'getcommiteditor' returns 'commitforceeditor' regardless of
342 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
342 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
343 they are specific for usage in MQ.
343 they are specific for usage in MQ.
344 """
344 """
345 if edit or finishdesc or extramsg:
345 if edit or finishdesc or extramsg:
346 return lambda r, c, s: commitforceeditor(r, c, s,
346 return lambda r, c, s: commitforceeditor(r, c, s,
347 finishdesc=finishdesc,
347 finishdesc=finishdesc,
348 extramsg=extramsg,
348 extramsg=extramsg,
349 editform=editform)
349 editform=editform)
350 elif editform:
350 elif editform:
351 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
351 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
352 else:
352 else:
353 return commiteditor
353 return commiteditor
354
354
355 def loglimit(opts):
355 def loglimit(opts):
356 """get the log limit according to option -l/--limit"""
356 """get the log limit according to option -l/--limit"""
357 limit = opts.get('limit')
357 limit = opts.get('limit')
358 if limit:
358 if limit:
359 try:
359 try:
360 limit = int(limit)
360 limit = int(limit)
361 except ValueError:
361 except ValueError:
362 raise util.Abort(_('limit must be a positive integer'))
362 raise util.Abort(_('limit must be a positive integer'))
363 if limit <= 0:
363 if limit <= 0:
364 raise util.Abort(_('limit must be positive'))
364 raise util.Abort(_('limit must be positive'))
365 else:
365 else:
366 limit = None
366 limit = None
367 return limit
367 return limit
368
368
369 def makefilename(repo, pat, node, desc=None,
369 def makefilename(repo, pat, node, desc=None,
370 total=None, seqno=None, revwidth=None, pathname=None):
370 total=None, seqno=None, revwidth=None, pathname=None):
371 node_expander = {
371 node_expander = {
372 'H': lambda: hex(node),
372 'H': lambda: hex(node),
373 'R': lambda: str(repo.changelog.rev(node)),
373 'R': lambda: str(repo.changelog.rev(node)),
374 'h': lambda: short(node),
374 'h': lambda: short(node),
375 'm': lambda: re.sub('[^\w]', '_', str(desc))
375 'm': lambda: re.sub('[^\w]', '_', str(desc))
376 }
376 }
377 expander = {
377 expander = {
378 '%': lambda: '%',
378 '%': lambda: '%',
379 'b': lambda: os.path.basename(repo.root),
379 'b': lambda: os.path.basename(repo.root),
380 }
380 }
381
381
382 try:
382 try:
383 if node:
383 if node:
384 expander.update(node_expander)
384 expander.update(node_expander)
385 if node:
385 if node:
386 expander['r'] = (lambda:
386 expander['r'] = (lambda:
387 str(repo.changelog.rev(node)).zfill(revwidth or 0))
387 str(repo.changelog.rev(node)).zfill(revwidth or 0))
388 if total is not None:
388 if total is not None:
389 expander['N'] = lambda: str(total)
389 expander['N'] = lambda: str(total)
390 if seqno is not None:
390 if seqno is not None:
391 expander['n'] = lambda: str(seqno)
391 expander['n'] = lambda: str(seqno)
392 if total is not None and seqno is not None:
392 if total is not None and seqno is not None:
393 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
393 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
394 if pathname is not None:
394 if pathname is not None:
395 expander['s'] = lambda: os.path.basename(pathname)
395 expander['s'] = lambda: os.path.basename(pathname)
396 expander['d'] = lambda: os.path.dirname(pathname) or '.'
396 expander['d'] = lambda: os.path.dirname(pathname) or '.'
397 expander['p'] = lambda: pathname
397 expander['p'] = lambda: pathname
398
398
399 newname = []
399 newname = []
400 patlen = len(pat)
400 patlen = len(pat)
401 i = 0
401 i = 0
402 while i < patlen:
402 while i < patlen:
403 c = pat[i]
403 c = pat[i]
404 if c == '%':
404 if c == '%':
405 i += 1
405 i += 1
406 c = pat[i]
406 c = pat[i]
407 c = expander[c]()
407 c = expander[c]()
408 newname.append(c)
408 newname.append(c)
409 i += 1
409 i += 1
410 return ''.join(newname)
410 return ''.join(newname)
411 except KeyError, inst:
411 except KeyError, inst:
412 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
412 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
413 inst.args[0])
413 inst.args[0])
414
414
415 def makefileobj(repo, pat, node=None, desc=None, total=None,
415 def makefileobj(repo, pat, node=None, desc=None, total=None,
416 seqno=None, revwidth=None, mode='wb', modemap=None,
416 seqno=None, revwidth=None, mode='wb', modemap=None,
417 pathname=None):
417 pathname=None):
418
418
419 writable = mode not in ('r', 'rb')
419 writable = mode not in ('r', 'rb')
420
420
421 if not pat or pat == '-':
421 if not pat or pat == '-':
422 if writable:
422 if writable:
423 fp = repo.ui.fout
423 fp = repo.ui.fout
424 else:
424 else:
425 fp = repo.ui.fin
425 fp = repo.ui.fin
426 if util.safehasattr(fp, 'fileno'):
426 if util.safehasattr(fp, 'fileno'):
427 return os.fdopen(os.dup(fp.fileno()), mode)
427 return os.fdopen(os.dup(fp.fileno()), mode)
428 else:
428 else:
429 # if this fp can't be duped properly, return
429 # if this fp can't be duped properly, return
430 # a dummy object that can be closed
430 # a dummy object that can be closed
431 class wrappedfileobj(object):
431 class wrappedfileobj(object):
432 noop = lambda x: None
432 noop = lambda x: None
433 def __init__(self, f):
433 def __init__(self, f):
434 self.f = f
434 self.f = f
435 def __getattr__(self, attr):
435 def __getattr__(self, attr):
436 if attr == 'close':
436 if attr == 'close':
437 return self.noop
437 return self.noop
438 else:
438 else:
439 return getattr(self.f, attr)
439 return getattr(self.f, attr)
440
440
441 return wrappedfileobj(fp)
441 return wrappedfileobj(fp)
442 if util.safehasattr(pat, 'write') and writable:
442 if util.safehasattr(pat, 'write') and writable:
443 return pat
443 return pat
444 if util.safehasattr(pat, 'read') and 'r' in mode:
444 if util.safehasattr(pat, 'read') and 'r' in mode:
445 return pat
445 return pat
446 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
446 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
447 if modemap is not None:
447 if modemap is not None:
448 mode = modemap.get(fn, mode)
448 mode = modemap.get(fn, mode)
449 if mode == 'wb':
449 if mode == 'wb':
450 modemap[fn] = 'ab'
450 modemap[fn] = 'ab'
451 return open(fn, mode)
451 return open(fn, mode)
452
452
453 def openrevlog(repo, cmd, file_, opts):
453 def openrevlog(repo, cmd, file_, opts):
454 """opens the changelog, manifest, a filelog or a given revlog"""
454 """opens the changelog, manifest, a filelog or a given revlog"""
455 cl = opts['changelog']
455 cl = opts['changelog']
456 mf = opts['manifest']
456 mf = opts['manifest']
457 msg = None
457 msg = None
458 if cl and mf:
458 if cl and mf:
459 msg = _('cannot specify --changelog and --manifest at the same time')
459 msg = _('cannot specify --changelog and --manifest at the same time')
460 elif cl or mf:
460 elif cl or mf:
461 if file_:
461 if file_:
462 msg = _('cannot specify filename with --changelog or --manifest')
462 msg = _('cannot specify filename with --changelog or --manifest')
463 elif not repo:
463 elif not repo:
464 msg = _('cannot specify --changelog or --manifest '
464 msg = _('cannot specify --changelog or --manifest '
465 'without a repository')
465 'without a repository')
466 if msg:
466 if msg:
467 raise util.Abort(msg)
467 raise util.Abort(msg)
468
468
469 r = None
469 r = None
470 if repo:
470 if repo:
471 if cl:
471 if cl:
472 r = repo.unfiltered().changelog
472 r = repo.unfiltered().changelog
473 elif mf:
473 elif mf:
474 r = repo.manifest
474 r = repo.manifest
475 elif file_:
475 elif file_:
476 filelog = repo.file(file_)
476 filelog = repo.file(file_)
477 if len(filelog):
477 if len(filelog):
478 r = filelog
478 r = filelog
479 if not r:
479 if not r:
480 if not file_:
480 if not file_:
481 raise error.CommandError(cmd, _('invalid arguments'))
481 raise error.CommandError(cmd, _('invalid arguments'))
482 if not os.path.isfile(file_):
482 if not os.path.isfile(file_):
483 raise util.Abort(_("revlog '%s' not found") % file_)
483 raise util.Abort(_("revlog '%s' not found") % file_)
484 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
484 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
485 file_[:-2] + ".i")
485 file_[:-2] + ".i")
486 return r
486 return r
487
487
488 def copy(ui, repo, pats, opts, rename=False):
488 def copy(ui, repo, pats, opts, rename=False):
489 # called with the repo lock held
489 # called with the repo lock held
490 #
490 #
491 # hgsep => pathname that uses "/" to separate directories
491 # hgsep => pathname that uses "/" to separate directories
492 # ossep => pathname that uses os.sep to separate directories
492 # ossep => pathname that uses os.sep to separate directories
493 cwd = repo.getcwd()
493 cwd = repo.getcwd()
494 targets = {}
494 targets = {}
495 after = opts.get("after")
495 after = opts.get("after")
496 dryrun = opts.get("dry_run")
496 dryrun = opts.get("dry_run")
497 wctx = repo[None]
497 wctx = repo[None]
498
498
499 def walkpat(pat):
499 def walkpat(pat):
500 srcs = []
500 srcs = []
501 if after:
501 if after:
502 badstates = '?'
502 badstates = '?'
503 else:
503 else:
504 badstates = '?r'
504 badstates = '?r'
505 m = scmutil.match(repo[None], [pat], opts, globbed=True)
505 m = scmutil.match(repo[None], [pat], opts, globbed=True)
506 for abs in repo.walk(m):
506 for abs in repo.walk(m):
507 state = repo.dirstate[abs]
507 state = repo.dirstate[abs]
508 rel = m.rel(abs)
508 rel = m.rel(abs)
509 exact = m.exact(abs)
509 exact = m.exact(abs)
510 if state in badstates:
510 if state in badstates:
511 if exact and state == '?':
511 if exact and state == '?':
512 ui.warn(_('%s: not copying - file is not managed\n') % rel)
512 ui.warn(_('%s: not copying - file is not managed\n') % rel)
513 if exact and state == 'r':
513 if exact and state == 'r':
514 ui.warn(_('%s: not copying - file has been marked for'
514 ui.warn(_('%s: not copying - file has been marked for'
515 ' remove\n') % rel)
515 ' remove\n') % rel)
516 continue
516 continue
517 # abs: hgsep
517 # abs: hgsep
518 # rel: ossep
518 # rel: ossep
519 srcs.append((abs, rel, exact))
519 srcs.append((abs, rel, exact))
520 return srcs
520 return srcs
521
521
522 # abssrc: hgsep
522 # abssrc: hgsep
523 # relsrc: ossep
523 # relsrc: ossep
524 # otarget: ossep
524 # otarget: ossep
525 def copyfile(abssrc, relsrc, otarget, exact):
525 def copyfile(abssrc, relsrc, otarget, exact):
526 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
526 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
527 if '/' in abstarget:
527 if '/' in abstarget:
528 # We cannot normalize abstarget itself, this would prevent
528 # We cannot normalize abstarget itself, this would prevent
529 # case only renames, like a => A.
529 # case only renames, like a => A.
530 abspath, absname = abstarget.rsplit('/', 1)
530 abspath, absname = abstarget.rsplit('/', 1)
531 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
531 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
532 reltarget = repo.pathto(abstarget, cwd)
532 reltarget = repo.pathto(abstarget, cwd)
533 target = repo.wjoin(abstarget)
533 target = repo.wjoin(abstarget)
534 src = repo.wjoin(abssrc)
534 src = repo.wjoin(abssrc)
535 state = repo.dirstate[abstarget]
535 state = repo.dirstate[abstarget]
536
536
537 scmutil.checkportable(ui, abstarget)
537 scmutil.checkportable(ui, abstarget)
538
538
539 # check for collisions
539 # check for collisions
540 prevsrc = targets.get(abstarget)
540 prevsrc = targets.get(abstarget)
541 if prevsrc is not None:
541 if prevsrc is not None:
542 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
542 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
543 (reltarget, repo.pathto(abssrc, cwd),
543 (reltarget, repo.pathto(abssrc, cwd),
544 repo.pathto(prevsrc, cwd)))
544 repo.pathto(prevsrc, cwd)))
545 return
545 return
546
546
547 # check for overwrites
547 # check for overwrites
548 exists = os.path.lexists(target)
548 exists = os.path.lexists(target)
549 samefile = False
549 samefile = False
550 if exists and abssrc != abstarget:
550 if exists and abssrc != abstarget:
551 if (repo.dirstate.normalize(abssrc) ==
551 if (repo.dirstate.normalize(abssrc) ==
552 repo.dirstate.normalize(abstarget)):
552 repo.dirstate.normalize(abstarget)):
553 if not rename:
553 if not rename:
554 ui.warn(_("%s: can't copy - same file\n") % reltarget)
554 ui.warn(_("%s: can't copy - same file\n") % reltarget)
555 return
555 return
556 exists = False
556 exists = False
557 samefile = True
557 samefile = True
558
558
559 if not after and exists or after and state in 'mn':
559 if not after and exists or after and state in 'mn':
560 if not opts['force']:
560 if not opts['force']:
561 ui.warn(_('%s: not overwriting - file exists\n') %
561 ui.warn(_('%s: not overwriting - file exists\n') %
562 reltarget)
562 reltarget)
563 return
563 return
564
564
565 if after:
565 if after:
566 if not exists:
566 if not exists:
567 if rename:
567 if rename:
568 ui.warn(_('%s: not recording move - %s does not exist\n') %
568 ui.warn(_('%s: not recording move - %s does not exist\n') %
569 (relsrc, reltarget))
569 (relsrc, reltarget))
570 else:
570 else:
571 ui.warn(_('%s: not recording copy - %s does not exist\n') %
571 ui.warn(_('%s: not recording copy - %s does not exist\n') %
572 (relsrc, reltarget))
572 (relsrc, reltarget))
573 return
573 return
574 elif not dryrun:
574 elif not dryrun:
575 try:
575 try:
576 if exists:
576 if exists:
577 os.unlink(target)
577 os.unlink(target)
578 targetdir = os.path.dirname(target) or '.'
578 targetdir = os.path.dirname(target) or '.'
579 if not os.path.isdir(targetdir):
579 if not os.path.isdir(targetdir):
580 os.makedirs(targetdir)
580 os.makedirs(targetdir)
581 if samefile:
581 if samefile:
582 tmp = target + "~hgrename"
582 tmp = target + "~hgrename"
583 os.rename(src, tmp)
583 os.rename(src, tmp)
584 os.rename(tmp, target)
584 os.rename(tmp, target)
585 else:
585 else:
586 util.copyfile(src, target)
586 util.copyfile(src, target)
587 srcexists = True
587 srcexists = True
588 except IOError, inst:
588 except IOError, inst:
589 if inst.errno == errno.ENOENT:
589 if inst.errno == errno.ENOENT:
590 ui.warn(_('%s: deleted in working directory\n') % relsrc)
590 ui.warn(_('%s: deleted in working directory\n') % relsrc)
591 srcexists = False
591 srcexists = False
592 else:
592 else:
593 ui.warn(_('%s: cannot copy - %s\n') %
593 ui.warn(_('%s: cannot copy - %s\n') %
594 (relsrc, inst.strerror))
594 (relsrc, inst.strerror))
595 return True # report a failure
595 return True # report a failure
596
596
597 if ui.verbose or not exact:
597 if ui.verbose or not exact:
598 if rename:
598 if rename:
599 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
599 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
600 else:
600 else:
601 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
601 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
602
602
603 targets[abstarget] = abssrc
603 targets[abstarget] = abssrc
604
604
605 # fix up dirstate
605 # fix up dirstate
606 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
606 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
607 dryrun=dryrun, cwd=cwd)
607 dryrun=dryrun, cwd=cwd)
608 if rename and not dryrun:
608 if rename and not dryrun:
609 if not after and srcexists and not samefile:
609 if not after and srcexists and not samefile:
610 util.unlinkpath(repo.wjoin(abssrc))
610 util.unlinkpath(repo.wjoin(abssrc))
611 wctx.forget([abssrc])
611 wctx.forget([abssrc])
612
612
613 # pat: ossep
613 # pat: ossep
614 # dest ossep
614 # dest ossep
615 # srcs: list of (hgsep, hgsep, ossep, bool)
615 # srcs: list of (hgsep, hgsep, ossep, bool)
616 # return: function that takes hgsep and returns ossep
616 # return: function that takes hgsep and returns ossep
617 def targetpathfn(pat, dest, srcs):
617 def targetpathfn(pat, dest, srcs):
618 if os.path.isdir(pat):
618 if os.path.isdir(pat):
619 abspfx = pathutil.canonpath(repo.root, cwd, pat)
619 abspfx = pathutil.canonpath(repo.root, cwd, pat)
620 abspfx = util.localpath(abspfx)
620 abspfx = util.localpath(abspfx)
621 if destdirexists:
621 if destdirexists:
622 striplen = len(os.path.split(abspfx)[0])
622 striplen = len(os.path.split(abspfx)[0])
623 else:
623 else:
624 striplen = len(abspfx)
624 striplen = len(abspfx)
625 if striplen:
625 if striplen:
626 striplen += len(os.sep)
626 striplen += len(os.sep)
627 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
627 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
628 elif destdirexists:
628 elif destdirexists:
629 res = lambda p: os.path.join(dest,
629 res = lambda p: os.path.join(dest,
630 os.path.basename(util.localpath(p)))
630 os.path.basename(util.localpath(p)))
631 else:
631 else:
632 res = lambda p: dest
632 res = lambda p: dest
633 return res
633 return res
634
634
635 # pat: ossep
635 # pat: ossep
636 # dest ossep
636 # dest ossep
637 # srcs: list of (hgsep, hgsep, ossep, bool)
637 # srcs: list of (hgsep, hgsep, ossep, bool)
638 # return: function that takes hgsep and returns ossep
638 # return: function that takes hgsep and returns ossep
639 def targetpathafterfn(pat, dest, srcs):
639 def targetpathafterfn(pat, dest, srcs):
640 if matchmod.patkind(pat):
640 if matchmod.patkind(pat):
641 # a mercurial pattern
641 # a mercurial pattern
642 res = lambda p: os.path.join(dest,
642 res = lambda p: os.path.join(dest,
643 os.path.basename(util.localpath(p)))
643 os.path.basename(util.localpath(p)))
644 else:
644 else:
645 abspfx = pathutil.canonpath(repo.root, cwd, pat)
645 abspfx = pathutil.canonpath(repo.root, cwd, pat)
646 if len(abspfx) < len(srcs[0][0]):
646 if len(abspfx) < len(srcs[0][0]):
647 # A directory. Either the target path contains the last
647 # A directory. Either the target path contains the last
648 # component of the source path or it does not.
648 # component of the source path or it does not.
649 def evalpath(striplen):
649 def evalpath(striplen):
650 score = 0
650 score = 0
651 for s in srcs:
651 for s in srcs:
652 t = os.path.join(dest, util.localpath(s[0])[striplen:])
652 t = os.path.join(dest, util.localpath(s[0])[striplen:])
653 if os.path.lexists(t):
653 if os.path.lexists(t):
654 score += 1
654 score += 1
655 return score
655 return score
656
656
657 abspfx = util.localpath(abspfx)
657 abspfx = util.localpath(abspfx)
658 striplen = len(abspfx)
658 striplen = len(abspfx)
659 if striplen:
659 if striplen:
660 striplen += len(os.sep)
660 striplen += len(os.sep)
661 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
661 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
662 score = evalpath(striplen)
662 score = evalpath(striplen)
663 striplen1 = len(os.path.split(abspfx)[0])
663 striplen1 = len(os.path.split(abspfx)[0])
664 if striplen1:
664 if striplen1:
665 striplen1 += len(os.sep)
665 striplen1 += len(os.sep)
666 if evalpath(striplen1) > score:
666 if evalpath(striplen1) > score:
667 striplen = striplen1
667 striplen = striplen1
668 res = lambda p: os.path.join(dest,
668 res = lambda p: os.path.join(dest,
669 util.localpath(p)[striplen:])
669 util.localpath(p)[striplen:])
670 else:
670 else:
671 # a file
671 # a file
672 if destdirexists:
672 if destdirexists:
673 res = lambda p: os.path.join(dest,
673 res = lambda p: os.path.join(dest,
674 os.path.basename(util.localpath(p)))
674 os.path.basename(util.localpath(p)))
675 else:
675 else:
676 res = lambda p: dest
676 res = lambda p: dest
677 return res
677 return res
678
678
679
679
680 pats = scmutil.expandpats(pats)
680 pats = scmutil.expandpats(pats)
681 if not pats:
681 if not pats:
682 raise util.Abort(_('no source or destination specified'))
682 raise util.Abort(_('no source or destination specified'))
683 if len(pats) == 1:
683 if len(pats) == 1:
684 raise util.Abort(_('no destination specified'))
684 raise util.Abort(_('no destination specified'))
685 dest = pats.pop()
685 dest = pats.pop()
686 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
686 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
687 if not destdirexists:
687 if not destdirexists:
688 if len(pats) > 1 or matchmod.patkind(pats[0]):
688 if len(pats) > 1 or matchmod.patkind(pats[0]):
689 raise util.Abort(_('with multiple sources, destination must be an '
689 raise util.Abort(_('with multiple sources, destination must be an '
690 'existing directory'))
690 'existing directory'))
691 if util.endswithsep(dest):
691 if util.endswithsep(dest):
692 raise util.Abort(_('destination %s is not a directory') % dest)
692 raise util.Abort(_('destination %s is not a directory') % dest)
693
693
694 tfn = targetpathfn
694 tfn = targetpathfn
695 if after:
695 if after:
696 tfn = targetpathafterfn
696 tfn = targetpathafterfn
697 copylist = []
697 copylist = []
698 for pat in pats:
698 for pat in pats:
699 srcs = walkpat(pat)
699 srcs = walkpat(pat)
700 if not srcs:
700 if not srcs:
701 continue
701 continue
702 copylist.append((tfn(pat, dest, srcs), srcs))
702 copylist.append((tfn(pat, dest, srcs), srcs))
703 if not copylist:
703 if not copylist:
704 raise util.Abort(_('no files to copy'))
704 raise util.Abort(_('no files to copy'))
705
705
706 errors = 0
706 errors = 0
707 for targetpath, srcs in copylist:
707 for targetpath, srcs in copylist:
708 for abssrc, relsrc, exact in srcs:
708 for abssrc, relsrc, exact in srcs:
709 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
709 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
710 errors += 1
710 errors += 1
711
711
712 if errors:
712 if errors:
713 ui.warn(_('(consider using --after)\n'))
713 ui.warn(_('(consider using --after)\n'))
714
714
715 return errors != 0
715 return errors != 0
716
716
717 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
717 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
718 runargs=None, appendpid=False):
718 runargs=None, appendpid=False):
719 '''Run a command as a service.'''
719 '''Run a command as a service.'''
720
720
721 def writepid(pid):
721 def writepid(pid):
722 if opts['pid_file']:
722 if opts['pid_file']:
723 if appendpid:
723 if appendpid:
724 mode = 'a'
724 mode = 'a'
725 else:
725 else:
726 mode = 'w'
726 mode = 'w'
727 fp = open(opts['pid_file'], mode)
727 fp = open(opts['pid_file'], mode)
728 fp.write(str(pid) + '\n')
728 fp.write(str(pid) + '\n')
729 fp.close()
729 fp.close()
730
730
731 if opts['daemon'] and not opts['daemon_pipefds']:
731 if opts['daemon'] and not opts['daemon_pipefds']:
732 # Signal child process startup with file removal
732 # Signal child process startup with file removal
733 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
733 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
734 os.close(lockfd)
734 os.close(lockfd)
735 try:
735 try:
736 if not runargs:
736 if not runargs:
737 runargs = util.hgcmd() + sys.argv[1:]
737 runargs = util.hgcmd() + sys.argv[1:]
738 runargs.append('--daemon-pipefds=%s' % lockpath)
738 runargs.append('--daemon-pipefds=%s' % lockpath)
739 # Don't pass --cwd to the child process, because we've already
739 # Don't pass --cwd to the child process, because we've already
740 # changed directory.
740 # changed directory.
741 for i in xrange(1, len(runargs)):
741 for i in xrange(1, len(runargs)):
742 if runargs[i].startswith('--cwd='):
742 if runargs[i].startswith('--cwd='):
743 del runargs[i]
743 del runargs[i]
744 break
744 break
745 elif runargs[i].startswith('--cwd'):
745 elif runargs[i].startswith('--cwd'):
746 del runargs[i:i + 2]
746 del runargs[i:i + 2]
747 break
747 break
748 def condfn():
748 def condfn():
749 return not os.path.exists(lockpath)
749 return not os.path.exists(lockpath)
750 pid = util.rundetached(runargs, condfn)
750 pid = util.rundetached(runargs, condfn)
751 if pid < 0:
751 if pid < 0:
752 raise util.Abort(_('child process failed to start'))
752 raise util.Abort(_('child process failed to start'))
753 writepid(pid)
753 writepid(pid)
754 finally:
754 finally:
755 try:
755 try:
756 os.unlink(lockpath)
756 os.unlink(lockpath)
757 except OSError, e:
757 except OSError, e:
758 if e.errno != errno.ENOENT:
758 if e.errno != errno.ENOENT:
759 raise
759 raise
760 if parentfn:
760 if parentfn:
761 return parentfn(pid)
761 return parentfn(pid)
762 else:
762 else:
763 return
763 return
764
764
765 if initfn:
765 if initfn:
766 initfn()
766 initfn()
767
767
768 if not opts['daemon']:
768 if not opts['daemon']:
769 writepid(os.getpid())
769 writepid(os.getpid())
770
770
771 if opts['daemon_pipefds']:
771 if opts['daemon_pipefds']:
772 lockpath = opts['daemon_pipefds']
772 lockpath = opts['daemon_pipefds']
773 try:
773 try:
774 os.setsid()
774 os.setsid()
775 except AttributeError:
775 except AttributeError:
776 pass
776 pass
777 os.unlink(lockpath)
777 os.unlink(lockpath)
778 util.hidewindow()
778 util.hidewindow()
779 sys.stdout.flush()
779 sys.stdout.flush()
780 sys.stderr.flush()
780 sys.stderr.flush()
781
781
782 nullfd = os.open(os.devnull, os.O_RDWR)
782 nullfd = os.open(os.devnull, os.O_RDWR)
783 logfilefd = nullfd
783 logfilefd = nullfd
784 if logfile:
784 if logfile:
785 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
785 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
786 os.dup2(nullfd, 0)
786 os.dup2(nullfd, 0)
787 os.dup2(logfilefd, 1)
787 os.dup2(logfilefd, 1)
788 os.dup2(logfilefd, 2)
788 os.dup2(logfilefd, 2)
789 if nullfd not in (0, 1, 2):
789 if nullfd not in (0, 1, 2):
790 os.close(nullfd)
790 os.close(nullfd)
791 if logfile and logfilefd not in (0, 1, 2):
791 if logfile and logfilefd not in (0, 1, 2):
792 os.close(logfilefd)
792 os.close(logfilefd)
793
793
794 if runfn:
794 if runfn:
795 return runfn()
795 return runfn()
796
796
797 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
797 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
798 """Utility function used by commands.import to import a single patch
798 """Utility function used by commands.import to import a single patch
799
799
800 This function is explicitly defined here to help the evolve extension to
800 This function is explicitly defined here to help the evolve extension to
801 wrap this part of the import logic.
801 wrap this part of the import logic.
802
802
803 The API is currently a bit ugly because it a simple code translation from
803 The API is currently a bit ugly because it a simple code translation from
804 the import command. Feel free to make it better.
804 the import command. Feel free to make it better.
805
805
806 :hunk: a patch (as a binary string)
806 :hunk: a patch (as a binary string)
807 :parents: nodes that will be parent of the created commit
807 :parents: nodes that will be parent of the created commit
808 :opts: the full dict of option passed to the import command
808 :opts: the full dict of option passed to the import command
809 :msgs: list to save commit message to.
809 :msgs: list to save commit message to.
810 (used in case we need to save it when failing)
810 (used in case we need to save it when failing)
811 :updatefunc: a function that update a repo to a given node
811 :updatefunc: a function that update a repo to a given node
812 updatefunc(<repo>, <node>)
812 updatefunc(<repo>, <node>)
813 """
813 """
814 tmpname, message, user, date, branch, nodeid, p1, p2 = \
814 tmpname, message, user, date, branch, nodeid, p1, p2 = \
815 patch.extract(ui, hunk)
815 patch.extract(ui, hunk)
816
816
817 update = not opts.get('bypass')
817 update = not opts.get('bypass')
818 strip = opts["strip"]
818 strip = opts["strip"]
819 prefix = opts["prefix"]
819 prefix = opts["prefix"]
820 sim = float(opts.get('similarity') or 0)
820 sim = float(opts.get('similarity') or 0)
821 if not tmpname:
821 if not tmpname:
822 return (None, None, False)
822 return (None, None, False)
823 msg = _('applied to working directory')
823 msg = _('applied to working directory')
824
824
825 rejects = False
825 rejects = False
826
826
827 try:
827 try:
828 cmdline_message = logmessage(ui, opts)
828 cmdline_message = logmessage(ui, opts)
829 if cmdline_message:
829 if cmdline_message:
830 # pickup the cmdline msg
830 # pickup the cmdline msg
831 message = cmdline_message
831 message = cmdline_message
832 elif message:
832 elif message:
833 # pickup the patch msg
833 # pickup the patch msg
834 message = message.strip()
834 message = message.strip()
835 else:
835 else:
836 # launch the editor
836 # launch the editor
837 message = None
837 message = None
838 ui.debug('message:\n%s\n' % message)
838 ui.debug('message:\n%s\n' % message)
839
839
840 if len(parents) == 1:
840 if len(parents) == 1:
841 parents.append(repo[nullid])
841 parents.append(repo[nullid])
842 if opts.get('exact'):
842 if opts.get('exact'):
843 if not nodeid or not p1:
843 if not nodeid or not p1:
844 raise util.Abort(_('not a Mercurial patch'))
844 raise util.Abort(_('not a Mercurial patch'))
845 p1 = repo[p1]
845 p1 = repo[p1]
846 p2 = repo[p2 or nullid]
846 p2 = repo[p2 or nullid]
847 elif p2:
847 elif p2:
848 try:
848 try:
849 p1 = repo[p1]
849 p1 = repo[p1]
850 p2 = repo[p2]
850 p2 = repo[p2]
851 # Without any options, consider p2 only if the
851 # Without any options, consider p2 only if the
852 # patch is being applied on top of the recorded
852 # patch is being applied on top of the recorded
853 # first parent.
853 # first parent.
854 if p1 != parents[0]:
854 if p1 != parents[0]:
855 p1 = parents[0]
855 p1 = parents[0]
856 p2 = repo[nullid]
856 p2 = repo[nullid]
857 except error.RepoError:
857 except error.RepoError:
858 p1, p2 = parents
858 p1, p2 = parents
859 if p2.node() == nullid:
859 if p2.node() == nullid:
860 ui.warn(_("warning: import the patch as a normal revision\n"
860 ui.warn(_("warning: import the patch as a normal revision\n"
861 "(use --exact to import the patch as a merge)\n"))
861 "(use --exact to import the patch as a merge)\n"))
862 else:
862 else:
863 p1, p2 = parents
863 p1, p2 = parents
864
864
865 n = None
865 n = None
866 if update:
866 if update:
867 repo.dirstate.beginparentchange()
867 repo.dirstate.beginparentchange()
868 if p1 != parents[0]:
868 if p1 != parents[0]:
869 updatefunc(repo, p1.node())
869 updatefunc(repo, p1.node())
870 if p2 != parents[1]:
870 if p2 != parents[1]:
871 repo.setparents(p1.node(), p2.node())
871 repo.setparents(p1.node(), p2.node())
872
872
873 if opts.get('exact') or opts.get('import_branch'):
873 if opts.get('exact') or opts.get('import_branch'):
874 repo.dirstate.setbranch(branch or 'default')
874 repo.dirstate.setbranch(branch or 'default')
875
875
876 partial = opts.get('partial', False)
876 partial = opts.get('partial', False)
877 files = set()
877 files = set()
878 try:
878 try:
879 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
879 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
880 files=files, eolmode=None, similarity=sim / 100.0)
880 files=files, eolmode=None, similarity=sim / 100.0)
881 except patch.PatchError, e:
881 except patch.PatchError, e:
882 if not partial:
882 if not partial:
883 raise util.Abort(str(e))
883 raise util.Abort(str(e))
884 if partial:
884 if partial:
885 rejects = True
885 rejects = True
886
886
887 files = list(files)
887 files = list(files)
888 if opts.get('no_commit'):
888 if opts.get('no_commit'):
889 if message:
889 if message:
890 msgs.append(message)
890 msgs.append(message)
891 else:
891 else:
892 if opts.get('exact') or p2:
892 if opts.get('exact') or p2:
893 # If you got here, you either use --force and know what
893 # If you got here, you either use --force and know what
894 # you are doing or used --exact or a merge patch while
894 # you are doing or used --exact or a merge patch while
895 # being updated to its first parent.
895 # being updated to its first parent.
896 m = None
896 m = None
897 else:
897 else:
898 m = scmutil.matchfiles(repo, files or [])
898 m = scmutil.matchfiles(repo, files or [])
899 editform = mergeeditform(repo[None], 'import.normal')
899 editform = mergeeditform(repo[None], 'import.normal')
900 if opts.get('exact'):
900 if opts.get('exact'):
901 editor = None
901 editor = None
902 else:
902 else:
903 editor = getcommiteditor(editform=editform, **opts)
903 editor = getcommiteditor(editform=editform, **opts)
904 n = repo.commit(message, opts.get('user') or user,
904 n = repo.commit(message, opts.get('user') or user,
905 opts.get('date') or date, match=m,
905 opts.get('date') or date, match=m,
906 editor=editor, force=partial)
906 editor=editor, force=partial)
907 repo.dirstate.endparentchange()
907 repo.dirstate.endparentchange()
908 else:
908 else:
909 if opts.get('exact') or opts.get('import_branch'):
909 if opts.get('exact') or opts.get('import_branch'):
910 branch = branch or 'default'
910 branch = branch or 'default'
911 else:
911 else:
912 branch = p1.branch()
912 branch = p1.branch()
913 store = patch.filestore()
913 store = patch.filestore()
914 try:
914 try:
915 files = set()
915 files = set()
916 try:
916 try:
917 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
917 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
918 files, eolmode=None)
918 files, eolmode=None)
919 except patch.PatchError, e:
919 except patch.PatchError, e:
920 raise util.Abort(str(e))
920 raise util.Abort(str(e))
921 if opts.get('exact'):
921 if opts.get('exact'):
922 editor = None
922 editor = None
923 else:
923 else:
924 editor = getcommiteditor(editform='import.bypass')
924 editor = getcommiteditor(editform='import.bypass')
925 memctx = context.makememctx(repo, (p1.node(), p2.node()),
925 memctx = context.makememctx(repo, (p1.node(), p2.node()),
926 message,
926 message,
927 opts.get('user') or user,
927 opts.get('user') or user,
928 opts.get('date') or date,
928 opts.get('date') or date,
929 branch, files, store,
929 branch, files, store,
930 editor=editor)
930 editor=editor)
931 n = memctx.commit()
931 n = memctx.commit()
932 finally:
932 finally:
933 store.close()
933 store.close()
934 if opts.get('exact') and opts.get('no_commit'):
934 if opts.get('exact') and opts.get('no_commit'):
935 # --exact with --no-commit is still useful in that it does merge
935 # --exact with --no-commit is still useful in that it does merge
936 # and branch bits
936 # and branch bits
937 ui.warn(_("warning: can't check exact import with --no-commit\n"))
937 ui.warn(_("warning: can't check exact import with --no-commit\n"))
938 elif opts.get('exact') and hex(n) != nodeid:
938 elif opts.get('exact') and hex(n) != nodeid:
939 raise util.Abort(_('patch is damaged or loses information'))
939 raise util.Abort(_('patch is damaged or loses information'))
940 if n:
940 if n:
941 # i18n: refers to a short changeset id
941 # i18n: refers to a short changeset id
942 msg = _('created %s') % short(n)
942 msg = _('created %s') % short(n)
943 return (msg, n, rejects)
943 return (msg, n, rejects)
944 finally:
944 finally:
945 os.unlink(tmpname)
945 os.unlink(tmpname)
946
946
947 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
947 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
948 opts=None):
948 opts=None):
949 '''export changesets as hg patches.'''
949 '''export changesets as hg patches.'''
950
950
951 total = len(revs)
951 total = len(revs)
952 revwidth = max([len(str(rev)) for rev in revs])
952 revwidth = max([len(str(rev)) for rev in revs])
953 filemode = {}
953 filemode = {}
954
954
955 def single(rev, seqno, fp):
955 def single(rev, seqno, fp):
956 ctx = repo[rev]
956 ctx = repo[rev]
957 node = ctx.node()
957 node = ctx.node()
958 parents = [p.node() for p in ctx.parents() if p]
958 parents = [p.node() for p in ctx.parents() if p]
959 branch = ctx.branch()
959 branch = ctx.branch()
960 if switch_parent:
960 if switch_parent:
961 parents.reverse()
961 parents.reverse()
962
962
963 if parents:
963 if parents:
964 prev = parents[0]
964 prev = parents[0]
965 else:
965 else:
966 prev = nullid
966 prev = nullid
967
967
968 shouldclose = False
968 shouldclose = False
969 if not fp and len(template) > 0:
969 if not fp and len(template) > 0:
970 desc_lines = ctx.description().rstrip().split('\n')
970 desc_lines = ctx.description().rstrip().split('\n')
971 desc = desc_lines[0] #Commit always has a first line.
971 desc = desc_lines[0] #Commit always has a first line.
972 fp = makefileobj(repo, template, node, desc=desc, total=total,
972 fp = makefileobj(repo, template, node, desc=desc, total=total,
973 seqno=seqno, revwidth=revwidth, mode='wb',
973 seqno=seqno, revwidth=revwidth, mode='wb',
974 modemap=filemode)
974 modemap=filemode)
975 if fp != template:
975 if fp != template:
976 shouldclose = True
976 shouldclose = True
977 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
977 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
978 repo.ui.note("%s\n" % fp.name)
978 repo.ui.note("%s\n" % fp.name)
979
979
980 if not fp:
980 if not fp:
981 write = repo.ui.write
981 write = repo.ui.write
982 else:
982 else:
983 def write(s, **kw):
983 def write(s, **kw):
984 fp.write(s)
984 fp.write(s)
985
985
986
986
987 write("# HG changeset patch\n")
987 write("# HG changeset patch\n")
988 write("# User %s\n" % ctx.user())
988 write("# User %s\n" % ctx.user())
989 write("# Date %d %d\n" % ctx.date())
989 write("# Date %d %d\n" % ctx.date())
990 write("# %s\n" % util.datestr(ctx.date()))
990 write("# %s\n" % util.datestr(ctx.date()))
991 if branch and branch != 'default':
991 if branch and branch != 'default':
992 write("# Branch %s\n" % branch)
992 write("# Branch %s\n" % branch)
993 write("# Node ID %s\n" % hex(node))
993 write("# Node ID %s\n" % hex(node))
994 write("# Parent %s\n" % hex(prev))
994 write("# Parent %s\n" % hex(prev))
995 if len(parents) > 1:
995 if len(parents) > 1:
996 write("# Parent %s\n" % hex(parents[1]))
996 write("# Parent %s\n" % hex(parents[1]))
997 write(ctx.description().rstrip())
997 write(ctx.description().rstrip())
998 write("\n\n")
998 write("\n\n")
999
999
1000 for chunk, label in patch.diffui(repo, prev, node, opts=opts):
1000 for chunk, label in patch.diffui(repo, prev, node, opts=opts):
1001 write(chunk, label=label)
1001 write(chunk, label=label)
1002
1002
1003 if shouldclose:
1003 if shouldclose:
1004 fp.close()
1004 fp.close()
1005
1005
1006 for seqno, rev in enumerate(revs):
1006 for seqno, rev in enumerate(revs):
1007 single(rev, seqno + 1, fp)
1007 single(rev, seqno + 1, fp)
1008
1008
1009 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1009 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1010 changes=None, stat=False, fp=None, prefix='',
1010 changes=None, stat=False, fp=None, prefix='',
1011 relative='', listsubrepos=False):
1011 relative='', listsubrepos=False):
1012 '''show diff or diffstat.'''
1012 '''show diff or diffstat.'''
1013 if fp is None:
1013 if fp is None:
1014 write = ui.write
1014 write = ui.write
1015 else:
1015 else:
1016 def write(s, **kw):
1016 def write(s, **kw):
1017 fp.write(s)
1017 fp.write(s)
1018
1018
1019 if relative:
1019 if relative:
1020 relroot = pathutil.canonpath(repo.root, repo.getcwd(), relative)
1020 relroot = pathutil.canonpath(repo.root, repo.getcwd(), relative)
1021 else:
1021 else:
1022 relroot = ''
1022 relroot = ''
1023 if relroot != '':
1023 if relroot != '':
1024 # XXX relative roots currently don't work if the root is within a
1024 # XXX relative roots currently don't work if the root is within a
1025 # subrepo
1025 # subrepo
1026 uirelroot = match.uipath(relroot)
1026 uirelroot = match.uipath(relroot)
1027 relroot += '/'
1027 relroot += '/'
1028 for matchroot in match.files():
1028 for matchroot in match.files():
1029 if not matchroot.startswith(relroot):
1029 if not matchroot.startswith(relroot):
1030 ui.warn(_('warning: %s not inside relative root %s\n') % (
1030 ui.warn(_('warning: %s not inside relative root %s\n') % (
1031 match.uipath(matchroot), uirelroot))
1031 match.uipath(matchroot), uirelroot))
1032
1032
1033 if stat:
1033 if stat:
1034 diffopts = diffopts.copy(context=0)
1034 diffopts = diffopts.copy(context=0)
1035 width = 80
1035 width = 80
1036 if not ui.plain():
1036 if not ui.plain():
1037 width = ui.termwidth()
1037 width = ui.termwidth()
1038 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1038 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1039 prefix=prefix, relroot=relroot)
1039 prefix=prefix, relroot=relroot)
1040 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1040 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1041 width=width,
1041 width=width,
1042 git=diffopts.git):
1042 git=diffopts.git):
1043 write(chunk, label=label)
1043 write(chunk, label=label)
1044 else:
1044 else:
1045 for chunk, label in patch.diffui(repo, node1, node2, match,
1045 for chunk, label in patch.diffui(repo, node1, node2, match,
1046 changes, diffopts, prefix=prefix,
1046 changes, diffopts, prefix=prefix,
1047 relroot=relroot):
1047 relroot=relroot):
1048 write(chunk, label=label)
1048 write(chunk, label=label)
1049
1049
1050 if listsubrepos:
1050 if listsubrepos:
1051 ctx1 = repo[node1]
1051 ctx1 = repo[node1]
1052 ctx2 = repo[node2]
1052 ctx2 = repo[node2]
1053 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1053 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1054 tempnode2 = node2
1054 tempnode2 = node2
1055 try:
1055 try:
1056 if node2 is not None:
1056 if node2 is not None:
1057 tempnode2 = ctx2.substate[subpath][1]
1057 tempnode2 = ctx2.substate[subpath][1]
1058 except KeyError:
1058 except KeyError:
1059 # A subrepo that existed in node1 was deleted between node1 and
1059 # A subrepo that existed in node1 was deleted between node1 and
1060 # node2 (inclusive). Thus, ctx2's substate won't contain that
1060 # node2 (inclusive). Thus, ctx2's substate won't contain that
1061 # subpath. The best we can do is to ignore it.
1061 # subpath. The best we can do is to ignore it.
1062 tempnode2 = None
1062 tempnode2 = None
1063 submatch = matchmod.narrowmatcher(subpath, match)
1063 submatch = matchmod.narrowmatcher(subpath, match)
1064 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1064 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1065 stat=stat, fp=fp, prefix=prefix)
1065 stat=stat, fp=fp, prefix=prefix)
1066
1066
1067 class changeset_printer(object):
1067 class changeset_printer(object):
1068 '''show changeset information when templating not requested.'''
1068 '''show changeset information when templating not requested.'''
1069
1069
1070 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1070 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1071 self.ui = ui
1071 self.ui = ui
1072 self.repo = repo
1072 self.repo = repo
1073 self.buffered = buffered
1073 self.buffered = buffered
1074 self.matchfn = matchfn
1074 self.matchfn = matchfn
1075 self.diffopts = diffopts
1075 self.diffopts = diffopts
1076 self.header = {}
1076 self.header = {}
1077 self.hunk = {}
1077 self.hunk = {}
1078 self.lastheader = None
1078 self.lastheader = None
1079 self.footer = None
1079 self.footer = None
1080
1080
1081 def flush(self, rev):
1081 def flush(self, rev):
1082 if rev in self.header:
1082 if rev in self.header:
1083 h = self.header[rev]
1083 h = self.header[rev]
1084 if h != self.lastheader:
1084 if h != self.lastheader:
1085 self.lastheader = h
1085 self.lastheader = h
1086 self.ui.write(h)
1086 self.ui.write(h)
1087 del self.header[rev]
1087 del self.header[rev]
1088 if rev in self.hunk:
1088 if rev in self.hunk:
1089 self.ui.write(self.hunk[rev])
1089 self.ui.write(self.hunk[rev])
1090 del self.hunk[rev]
1090 del self.hunk[rev]
1091 return 1
1091 return 1
1092 return 0
1092 return 0
1093
1093
1094 def close(self):
1094 def close(self):
1095 if self.footer:
1095 if self.footer:
1096 self.ui.write(self.footer)
1096 self.ui.write(self.footer)
1097
1097
1098 def show(self, ctx, copies=None, matchfn=None, **props):
1098 def show(self, ctx, copies=None, matchfn=None, **props):
1099 if self.buffered:
1099 if self.buffered:
1100 self.ui.pushbuffer()
1100 self.ui.pushbuffer()
1101 self._show(ctx, copies, matchfn, props)
1101 self._show(ctx, copies, matchfn, props)
1102 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1102 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1103 else:
1103 else:
1104 self._show(ctx, copies, matchfn, props)
1104 self._show(ctx, copies, matchfn, props)
1105
1105
1106 def _show(self, ctx, copies, matchfn, props):
1106 def _show(self, ctx, copies, matchfn, props):
1107 '''show a single changeset or file revision'''
1107 '''show a single changeset or file revision'''
1108 changenode = ctx.node()
1108 changenode = ctx.node()
1109 rev = ctx.rev()
1109 rev = ctx.rev()
1110
1110
1111 if self.ui.quiet:
1111 if self.ui.quiet:
1112 self.ui.write("%d:%s\n" % (rev, short(changenode)),
1112 self.ui.write("%d:%s\n" % (rev, short(changenode)),
1113 label='log.node')
1113 label='log.node')
1114 return
1114 return
1115
1115
1116 log = self.repo.changelog
1116 log = self.repo.changelog
1117 date = util.datestr(ctx.date())
1117 date = util.datestr(ctx.date())
1118
1118
1119 if self.ui.debugflag:
1119 if self.ui.debugflag:
1120 hexfunc = hex
1120 hexfunc = hex
1121 else:
1121 else:
1122 hexfunc = short
1122 hexfunc = short
1123
1123
1124 parents = [(p, hexfunc(log.node(p)))
1124 parents = [(p, hexfunc(log.node(p)))
1125 for p in self._meaningful_parentrevs(log, rev)]
1125 for p in self._meaningful_parentrevs(log, rev)]
1126
1126
1127 # i18n: column positioning for "hg log"
1127 # i18n: column positioning for "hg log"
1128 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
1128 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
1129 label='log.changeset changeset.%s' % ctx.phasestr())
1129 label='log.changeset changeset.%s' % ctx.phasestr())
1130
1130
1131 # branches are shown first before any other names due to backwards
1131 # branches are shown first before any other names due to backwards
1132 # compatibility
1132 # compatibility
1133 branch = ctx.branch()
1133 branch = ctx.branch()
1134 # don't show the default branch name
1134 # don't show the default branch name
1135 if branch != 'default':
1135 if branch != 'default':
1136 # i18n: column positioning for "hg log"
1136 # i18n: column positioning for "hg log"
1137 self.ui.write(_("branch: %s\n") % branch,
1137 self.ui.write(_("branch: %s\n") % branch,
1138 label='log.branch')
1138 label='log.branch')
1139
1139
1140 for name, ns in self.repo.names.iteritems():
1140 for name, ns in self.repo.names.iteritems():
1141 # branches has special logic already handled above, so here we just
1141 # branches has special logic already handled above, so here we just
1142 # skip it
1142 # skip it
1143 if name == 'branches':
1143 if name == 'branches':
1144 continue
1144 continue
1145 # we will use the templatename as the color name since those two
1145 # we will use the templatename as the color name since those two
1146 # should be the same
1146 # should be the same
1147 for name in ns.names(self.repo, changenode):
1147 for name in ns.names(self.repo, changenode):
1148 self.ui.write(ns.logfmt % name,
1148 self.ui.write(ns.logfmt % name,
1149 label='log.%s' % ns.colorname)
1149 label='log.%s' % ns.colorname)
1150 if self.ui.debugflag:
1150 if self.ui.debugflag:
1151 # i18n: column positioning for "hg log"
1151 # i18n: column positioning for "hg log"
1152 self.ui.write(_("phase: %s\n") % _(ctx.phasestr()),
1152 self.ui.write(_("phase: %s\n") % _(ctx.phasestr()),
1153 label='log.phase')
1153 label='log.phase')
1154 for parent in parents:
1154 for parent in parents:
1155 label = 'log.parent changeset.%s' % self.repo[parent[0]].phasestr()
1155 label = 'log.parent changeset.%s' % self.repo[parent[0]].phasestr()
1156 # i18n: column positioning for "hg log"
1156 # i18n: column positioning for "hg log"
1157 self.ui.write(_("parent: %d:%s\n") % parent,
1157 self.ui.write(_("parent: %d:%s\n") % parent,
1158 label=label)
1158 label=label)
1159
1159
1160 if self.ui.debugflag:
1160 if self.ui.debugflag:
1161 mnode = ctx.manifestnode()
1161 mnode = ctx.manifestnode()
1162 # i18n: column positioning for "hg log"
1162 # i18n: column positioning for "hg log"
1163 self.ui.write(_("manifest: %d:%s\n") %
1163 self.ui.write(_("manifest: %d:%s\n") %
1164 (self.repo.manifest.rev(mnode), hex(mnode)),
1164 (self.repo.manifest.rev(mnode), hex(mnode)),
1165 label='ui.debug log.manifest')
1165 label='ui.debug log.manifest')
1166 # i18n: column positioning for "hg log"
1166 # i18n: column positioning for "hg log"
1167 self.ui.write(_("user: %s\n") % ctx.user(),
1167 self.ui.write(_("user: %s\n") % ctx.user(),
1168 label='log.user')
1168 label='log.user')
1169 # i18n: column positioning for "hg log"
1169 # i18n: column positioning for "hg log"
1170 self.ui.write(_("date: %s\n") % date,
1170 self.ui.write(_("date: %s\n") % date,
1171 label='log.date')
1171 label='log.date')
1172
1172
1173 if self.ui.debugflag:
1173 if self.ui.debugflag:
1174 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
1174 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
1175 for key, value in zip([# i18n: column positioning for "hg log"
1175 for key, value in zip([# i18n: column positioning for "hg log"
1176 _("files:"),
1176 _("files:"),
1177 # i18n: column positioning for "hg log"
1177 # i18n: column positioning for "hg log"
1178 _("files+:"),
1178 _("files+:"),
1179 # i18n: column positioning for "hg log"
1179 # i18n: column positioning for "hg log"
1180 _("files-:")], files):
1180 _("files-:")], files):
1181 if value:
1181 if value:
1182 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1182 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1183 label='ui.debug log.files')
1183 label='ui.debug log.files')
1184 elif ctx.files() and self.ui.verbose:
1184 elif ctx.files() and self.ui.verbose:
1185 # i18n: column positioning for "hg log"
1185 # i18n: column positioning for "hg log"
1186 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1186 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1187 label='ui.note log.files')
1187 label='ui.note log.files')
1188 if copies and self.ui.verbose:
1188 if copies and self.ui.verbose:
1189 copies = ['%s (%s)' % c for c in copies]
1189 copies = ['%s (%s)' % c for c in copies]
1190 # i18n: column positioning for "hg log"
1190 # i18n: column positioning for "hg log"
1191 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1191 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1192 label='ui.note log.copies')
1192 label='ui.note log.copies')
1193
1193
1194 extra = ctx.extra()
1194 extra = ctx.extra()
1195 if extra and self.ui.debugflag:
1195 if extra and self.ui.debugflag:
1196 for key, value in sorted(extra.items()):
1196 for key, value in sorted(extra.items()):
1197 # i18n: column positioning for "hg log"
1197 # i18n: column positioning for "hg log"
1198 self.ui.write(_("extra: %s=%s\n")
1198 self.ui.write(_("extra: %s=%s\n")
1199 % (key, value.encode('string_escape')),
1199 % (key, value.encode('string_escape')),
1200 label='ui.debug log.extra')
1200 label='ui.debug log.extra')
1201
1201
1202 description = ctx.description().strip()
1202 description = ctx.description().strip()
1203 if description:
1203 if description:
1204 if self.ui.verbose:
1204 if self.ui.verbose:
1205 self.ui.write(_("description:\n"),
1205 self.ui.write(_("description:\n"),
1206 label='ui.note log.description')
1206 label='ui.note log.description')
1207 self.ui.write(description,
1207 self.ui.write(description,
1208 label='ui.note log.description')
1208 label='ui.note log.description')
1209 self.ui.write("\n\n")
1209 self.ui.write("\n\n")
1210 else:
1210 else:
1211 # i18n: column positioning for "hg log"
1211 # i18n: column positioning for "hg log"
1212 self.ui.write(_("summary: %s\n") %
1212 self.ui.write(_("summary: %s\n") %
1213 description.splitlines()[0],
1213 description.splitlines()[0],
1214 label='log.summary')
1214 label='log.summary')
1215 self.ui.write("\n")
1215 self.ui.write("\n")
1216
1216
1217 self.showpatch(changenode, matchfn)
1217 self.showpatch(changenode, matchfn)
1218
1218
1219 def showpatch(self, node, matchfn):
1219 def showpatch(self, node, matchfn):
1220 if not matchfn:
1220 if not matchfn:
1221 matchfn = self.matchfn
1221 matchfn = self.matchfn
1222 if matchfn:
1222 if matchfn:
1223 stat = self.diffopts.get('stat')
1223 stat = self.diffopts.get('stat')
1224 diff = self.diffopts.get('patch')
1224 diff = self.diffopts.get('patch')
1225 diffopts = patch.diffallopts(self.ui, self.diffopts)
1225 diffopts = patch.diffallopts(self.ui, self.diffopts)
1226 prev = self.repo.changelog.parents(node)[0]
1226 prev = self.repo.changelog.parents(node)[0]
1227 if stat:
1227 if stat:
1228 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1228 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1229 match=matchfn, stat=True)
1229 match=matchfn, stat=True)
1230 if diff:
1230 if diff:
1231 if stat:
1231 if stat:
1232 self.ui.write("\n")
1232 self.ui.write("\n")
1233 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1233 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1234 match=matchfn, stat=False)
1234 match=matchfn, stat=False)
1235 self.ui.write("\n")
1235 self.ui.write("\n")
1236
1236
1237 def _meaningful_parentrevs(self, log, rev):
1237 def _meaningful_parentrevs(self, log, rev):
1238 """Return list of meaningful (or all if debug) parentrevs for rev.
1238 """Return list of meaningful (or all if debug) parentrevs for rev.
1239
1239
1240 For merges (two non-nullrev revisions) both parents are meaningful.
1240 For merges (two non-nullrev revisions) both parents are meaningful.
1241 Otherwise the first parent revision is considered meaningful if it
1241 Otherwise the first parent revision is considered meaningful if it
1242 is not the preceding revision.
1242 is not the preceding revision.
1243 """
1243 """
1244 parents = log.parentrevs(rev)
1244 parents = log.parentrevs(rev)
1245 if not self.ui.debugflag and parents[1] == nullrev:
1245 if not self.ui.debugflag and parents[1] == nullrev:
1246 if parents[0] >= rev - 1:
1246 if parents[0] >= rev - 1:
1247 parents = []
1247 parents = []
1248 else:
1248 else:
1249 parents = [parents[0]]
1249 parents = [parents[0]]
1250 return parents
1250 return parents
1251
1251
1252 class jsonchangeset(changeset_printer):
1252 class jsonchangeset(changeset_printer):
1253 '''format changeset information.'''
1253 '''format changeset information.'''
1254
1254
1255 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1255 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1256 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1256 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1257 self.cache = {}
1257 self.cache = {}
1258 self._first = True
1258 self._first = True
1259
1259
1260 def close(self):
1260 def close(self):
1261 if not self._first:
1261 if not self._first:
1262 self.ui.write("\n]\n")
1262 self.ui.write("\n]\n")
1263 else:
1263 else:
1264 self.ui.write("[]\n")
1264 self.ui.write("[]\n")
1265
1265
1266 def _show(self, ctx, copies, matchfn, props):
1266 def _show(self, ctx, copies, matchfn, props):
1267 '''show a single changeset or file revision'''
1267 '''show a single changeset or file revision'''
1268 hexnode = hex(ctx.node())
1268 hexnode = hex(ctx.node())
1269 rev = ctx.rev()
1269 rev = ctx.rev()
1270 j = encoding.jsonescape
1270 j = encoding.jsonescape
1271
1271
1272 if self._first:
1272 if self._first:
1273 self.ui.write("[\n {")
1273 self.ui.write("[\n {")
1274 self._first = False
1274 self._first = False
1275 else:
1275 else:
1276 self.ui.write(",\n {")
1276 self.ui.write(",\n {")
1277
1277
1278 if self.ui.quiet:
1278 if self.ui.quiet:
1279 self.ui.write('\n "rev": %d' % rev)
1279 self.ui.write('\n "rev": %d' % rev)
1280 self.ui.write(',\n "node": "%s"' % hexnode)
1280 self.ui.write(',\n "node": "%s"' % hexnode)
1281 self.ui.write('\n }')
1281 self.ui.write('\n }')
1282 return
1282 return
1283
1283
1284 self.ui.write('\n "rev": %d' % rev)
1284 self.ui.write('\n "rev": %d' % rev)
1285 self.ui.write(',\n "node": "%s"' % hexnode)
1285 self.ui.write(',\n "node": "%s"' % hexnode)
1286 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1286 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1287 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1287 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1288 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1288 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1289 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1289 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1290 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1290 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1291
1291
1292 self.ui.write(',\n "bookmarks": [%s]' %
1292 self.ui.write(',\n "bookmarks": [%s]' %
1293 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1293 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1294 self.ui.write(',\n "tags": [%s]' %
1294 self.ui.write(',\n "tags": [%s]' %
1295 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1295 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1296 self.ui.write(',\n "parents": [%s]' %
1296 self.ui.write(',\n "parents": [%s]' %
1297 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1297 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1298
1298
1299 if self.ui.debugflag:
1299 if self.ui.debugflag:
1300 self.ui.write(',\n "manifest": "%s"' % hex(ctx.manifestnode()))
1300 self.ui.write(',\n "manifest": "%s"' % hex(ctx.manifestnode()))
1301
1301
1302 self.ui.write(',\n "extra": {%s}' %
1302 self.ui.write(',\n "extra": {%s}' %
1303 ", ".join('"%s": "%s"' % (j(k), j(v))
1303 ", ".join('"%s": "%s"' % (j(k), j(v))
1304 for k, v in ctx.extra().items()))
1304 for k, v in ctx.extra().items()))
1305
1305
1306 files = ctx.p1().status(ctx)
1306 files = ctx.p1().status(ctx)
1307 self.ui.write(',\n "modified": [%s]' %
1307 self.ui.write(',\n "modified": [%s]' %
1308 ", ".join('"%s"' % j(f) for f in files[0]))
1308 ", ".join('"%s"' % j(f) for f in files[0]))
1309 self.ui.write(',\n "added": [%s]' %
1309 self.ui.write(',\n "added": [%s]' %
1310 ", ".join('"%s"' % j(f) for f in files[1]))
1310 ", ".join('"%s"' % j(f) for f in files[1]))
1311 self.ui.write(',\n "removed": [%s]' %
1311 self.ui.write(',\n "removed": [%s]' %
1312 ", ".join('"%s"' % j(f) for f in files[2]))
1312 ", ".join('"%s"' % j(f) for f in files[2]))
1313
1313
1314 elif self.ui.verbose:
1314 elif self.ui.verbose:
1315 self.ui.write(',\n "files": [%s]' %
1315 self.ui.write(',\n "files": [%s]' %
1316 ", ".join('"%s"' % j(f) for f in ctx.files()))
1316 ", ".join('"%s"' % j(f) for f in ctx.files()))
1317
1317
1318 if copies:
1318 if copies:
1319 self.ui.write(',\n "copies": {%s}' %
1319 self.ui.write(',\n "copies": {%s}' %
1320 ", ".join('"%s": "%s"' % (j(k), j(v))
1320 ", ".join('"%s": "%s"' % (j(k), j(v))
1321 for k, v in copies))
1321 for k, v in copies))
1322
1322
1323 matchfn = self.matchfn
1323 matchfn = self.matchfn
1324 if matchfn:
1324 if matchfn:
1325 stat = self.diffopts.get('stat')
1325 stat = self.diffopts.get('stat')
1326 diff = self.diffopts.get('patch')
1326 diff = self.diffopts.get('patch')
1327 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1327 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1328 node, prev = ctx.node(), ctx.p1().node()
1328 node, prev = ctx.node(), ctx.p1().node()
1329 if stat:
1329 if stat:
1330 self.ui.pushbuffer()
1330 self.ui.pushbuffer()
1331 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1331 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1332 match=matchfn, stat=True)
1332 match=matchfn, stat=True)
1333 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1333 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1334 if diff:
1334 if diff:
1335 self.ui.pushbuffer()
1335 self.ui.pushbuffer()
1336 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1336 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1337 match=matchfn, stat=False)
1337 match=matchfn, stat=False)
1338 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1338 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1339
1339
1340 self.ui.write("\n }")
1340 self.ui.write("\n }")
1341
1341
1342 class changeset_templater(changeset_printer):
1342 class changeset_templater(changeset_printer):
1343 '''format changeset information.'''
1343 '''format changeset information.'''
1344
1344
1345 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1345 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1346 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1346 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1347 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1347 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1348 defaulttempl = {
1348 defaulttempl = {
1349 'parent': '{rev}:{node|formatnode} ',
1349 'parent': '{rev}:{node|formatnode} ',
1350 'manifest': '{rev}:{node|formatnode}',
1350 'manifest': '{rev}:{node|formatnode}',
1351 'file_copy': '{name} ({source})',
1351 'file_copy': '{name} ({source})',
1352 'extra': '{key}={value|stringescape}'
1352 'extra': '{key}={value|stringescape}'
1353 }
1353 }
1354 # filecopy is preserved for compatibility reasons
1354 # filecopy is preserved for compatibility reasons
1355 defaulttempl['filecopy'] = defaulttempl['file_copy']
1355 defaulttempl['filecopy'] = defaulttempl['file_copy']
1356 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1356 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1357 cache=defaulttempl)
1357 cache=defaulttempl)
1358 if tmpl:
1358 if tmpl:
1359 self.t.cache['changeset'] = tmpl
1359 self.t.cache['changeset'] = tmpl
1360
1360
1361 self.cache = {}
1361 self.cache = {}
1362
1362
1363 def _meaningful_parentrevs(self, ctx):
1363 def _meaningful_parentrevs(self, ctx):
1364 """Return list of meaningful (or all if debug) parentrevs for rev.
1364 """Return list of meaningful (or all if debug) parentrevs for rev.
1365 """
1365 """
1366 parents = ctx.parents()
1366 parents = ctx.parents()
1367 if len(parents) > 1:
1367 if len(parents) > 1:
1368 return parents
1368 return parents
1369 if self.ui.debugflag:
1369 if self.ui.debugflag:
1370 return [parents[0], self.repo['null']]
1370 return [parents[0], self.repo['null']]
1371 if parents[0].rev() >= ctx.rev() - 1:
1371 if parents[0].rev() >= ctx.rev() - 1:
1372 return []
1372 return []
1373 return parents
1373 return parents
1374
1374
1375 def _show(self, ctx, copies, matchfn, props):
1375 def _show(self, ctx, copies, matchfn, props):
1376 '''show a single changeset or file revision'''
1376 '''show a single changeset or file revision'''
1377
1377
1378 showlist = templatekw.showlist
1378 showlist = templatekw.showlist
1379
1379
1380 # showparents() behaviour depends on ui trace level which
1380 # showparents() behaviour depends on ui trace level which
1381 # causes unexpected behaviours at templating level and makes
1381 # causes unexpected behaviours at templating level and makes
1382 # it harder to extract it in a standalone function. Its
1382 # it harder to extract it in a standalone function. Its
1383 # behaviour cannot be changed so leave it here for now.
1383 # behaviour cannot be changed so leave it here for now.
1384 def showparents(**args):
1384 def showparents(**args):
1385 ctx = args['ctx']
1385 ctx = args['ctx']
1386 parents = [[('rev', p.rev()),
1386 parents = [[('rev', p.rev()),
1387 ('node', p.hex()),
1387 ('node', p.hex()),
1388 ('phase', p.phasestr())]
1388 ('phase', p.phasestr())]
1389 for p in self._meaningful_parentrevs(ctx)]
1389 for p in self._meaningful_parentrevs(ctx)]
1390 return showlist('parent', parents, **args)
1390 return showlist('parent', parents, **args)
1391
1391
1392 props = props.copy()
1392 props = props.copy()
1393 props.update(templatekw.keywords)
1393 props.update(templatekw.keywords)
1394 props['parents'] = showparents
1394 props['parents'] = showparents
1395 props['templ'] = self.t
1395 props['templ'] = self.t
1396 props['ctx'] = ctx
1396 props['ctx'] = ctx
1397 props['repo'] = self.repo
1397 props['repo'] = self.repo
1398 props['revcache'] = {'copies': copies}
1398 props['revcache'] = {'copies': copies}
1399 props['cache'] = self.cache
1399 props['cache'] = self.cache
1400
1400
1401 # find correct templates for current mode
1401 # find correct templates for current mode
1402
1402
1403 tmplmodes = [
1403 tmplmodes = [
1404 (True, None),
1404 (True, None),
1405 (self.ui.verbose, 'verbose'),
1405 (self.ui.verbose, 'verbose'),
1406 (self.ui.quiet, 'quiet'),
1406 (self.ui.quiet, 'quiet'),
1407 (self.ui.debugflag, 'debug'),
1407 (self.ui.debugflag, 'debug'),
1408 ]
1408 ]
1409
1409
1410 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
1410 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
1411 for mode, postfix in tmplmodes:
1411 for mode, postfix in tmplmodes:
1412 for type in types:
1412 for type in types:
1413 cur = postfix and ('%s_%s' % (type, postfix)) or type
1413 cur = postfix and ('%s_%s' % (type, postfix)) or type
1414 if mode and cur in self.t:
1414 if mode and cur in self.t:
1415 types[type] = cur
1415 types[type] = cur
1416
1416
1417 try:
1417 try:
1418
1418
1419 # write header
1419 # write header
1420 if types['header']:
1420 if types['header']:
1421 h = templater.stringify(self.t(types['header'], **props))
1421 h = templater.stringify(self.t(types['header'], **props))
1422 if self.buffered:
1422 if self.buffered:
1423 self.header[ctx.rev()] = h
1423 self.header[ctx.rev()] = h
1424 else:
1424 else:
1425 if self.lastheader != h:
1425 if self.lastheader != h:
1426 self.lastheader = h
1426 self.lastheader = h
1427 self.ui.write(h)
1427 self.ui.write(h)
1428
1428
1429 # write changeset metadata, then patch if requested
1429 # write changeset metadata, then patch if requested
1430 key = types['changeset']
1430 key = types['changeset']
1431 self.ui.write(templater.stringify(self.t(key, **props)))
1431 self.ui.write(templater.stringify(self.t(key, **props)))
1432 self.showpatch(ctx.node(), matchfn)
1432 self.showpatch(ctx.node(), matchfn)
1433
1433
1434 if types['footer']:
1434 if types['footer']:
1435 if not self.footer:
1435 if not self.footer:
1436 self.footer = templater.stringify(self.t(types['footer'],
1436 self.footer = templater.stringify(self.t(types['footer'],
1437 **props))
1437 **props))
1438
1438
1439 except KeyError, inst:
1439 except KeyError, inst:
1440 msg = _("%s: no key named '%s'")
1440 msg = _("%s: no key named '%s'")
1441 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1441 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1442 except SyntaxError, inst:
1442 except SyntaxError, inst:
1443 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1443 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1444
1444
1445 def gettemplate(ui, tmpl, style):
1445 def gettemplate(ui, tmpl, style):
1446 """
1446 """
1447 Find the template matching the given template spec or style.
1447 Find the template matching the given template spec or style.
1448 """
1448 """
1449
1449
1450 # ui settings
1450 # ui settings
1451 if not tmpl and not style: # template are stronger than style
1451 if not tmpl and not style: # template are stronger than style
1452 tmpl = ui.config('ui', 'logtemplate')
1452 tmpl = ui.config('ui', 'logtemplate')
1453 if tmpl:
1453 if tmpl:
1454 try:
1454 try:
1455 tmpl = templater.parsestring(tmpl)
1455 tmpl = templater.parsestring(tmpl)
1456 except SyntaxError:
1456 except SyntaxError:
1457 tmpl = templater.parsestring(tmpl, quoted=False)
1457 tmpl = templater.parsestring(tmpl, quoted=False)
1458 return tmpl, None
1458 return tmpl, None
1459 else:
1459 else:
1460 style = util.expandpath(ui.config('ui', 'style', ''))
1460 style = util.expandpath(ui.config('ui', 'style', ''))
1461
1461
1462 if not tmpl and style:
1462 if not tmpl and style:
1463 mapfile = style
1463 mapfile = style
1464 if not os.path.split(mapfile)[0]:
1464 if not os.path.split(mapfile)[0]:
1465 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1465 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1466 or templater.templatepath(mapfile))
1466 or templater.templatepath(mapfile))
1467 if mapname:
1467 if mapname:
1468 mapfile = mapname
1468 mapfile = mapname
1469 return None, mapfile
1469 return None, mapfile
1470
1470
1471 if not tmpl:
1471 if not tmpl:
1472 return None, None
1472 return None, None
1473
1473
1474 # looks like a literal template?
1474 # looks like a literal template?
1475 if '{' in tmpl:
1475 if '{' in tmpl:
1476 return tmpl, None
1476 return tmpl, None
1477
1477
1478 # perhaps a stock style?
1478 # perhaps a stock style?
1479 if not os.path.split(tmpl)[0]:
1479 if not os.path.split(tmpl)[0]:
1480 mapname = (templater.templatepath('map-cmdline.' + tmpl)
1480 mapname = (templater.templatepath('map-cmdline.' + tmpl)
1481 or templater.templatepath(tmpl))
1481 or templater.templatepath(tmpl))
1482 if mapname and os.path.isfile(mapname):
1482 if mapname and os.path.isfile(mapname):
1483 return None, mapname
1483 return None, mapname
1484
1484
1485 # perhaps it's a reference to [templates]
1485 # perhaps it's a reference to [templates]
1486 t = ui.config('templates', tmpl)
1486 t = ui.config('templates', tmpl)
1487 if t:
1487 if t:
1488 try:
1488 try:
1489 tmpl = templater.parsestring(t)
1489 tmpl = templater.parsestring(t)
1490 except SyntaxError:
1490 except SyntaxError:
1491 tmpl = templater.parsestring(t, quoted=False)
1491 tmpl = templater.parsestring(t, quoted=False)
1492 return tmpl, None
1492 return tmpl, None
1493
1493
1494 if tmpl == 'list':
1494 if tmpl == 'list':
1495 ui.write(_("available styles: %s\n") % templater.stylelist())
1495 ui.write(_("available styles: %s\n") % templater.stylelist())
1496 raise util.Abort(_("specify a template"))
1496 raise util.Abort(_("specify a template"))
1497
1497
1498 # perhaps it's a path to a map or a template
1498 # perhaps it's a path to a map or a template
1499 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
1499 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
1500 # is it a mapfile for a style?
1500 # is it a mapfile for a style?
1501 if os.path.basename(tmpl).startswith("map-"):
1501 if os.path.basename(tmpl).startswith("map-"):
1502 return None, os.path.realpath(tmpl)
1502 return None, os.path.realpath(tmpl)
1503 tmpl = open(tmpl).read()
1503 tmpl = open(tmpl).read()
1504 return tmpl, None
1504 return tmpl, None
1505
1505
1506 # constant string?
1506 # constant string?
1507 return tmpl, None
1507 return tmpl, None
1508
1508
1509 def show_changeset(ui, repo, opts, buffered=False):
1509 def show_changeset(ui, repo, opts, buffered=False):
1510 """show one changeset using template or regular display.
1510 """show one changeset using template or regular display.
1511
1511
1512 Display format will be the first non-empty hit of:
1512 Display format will be the first non-empty hit of:
1513 1. option 'template'
1513 1. option 'template'
1514 2. option 'style'
1514 2. option 'style'
1515 3. [ui] setting 'logtemplate'
1515 3. [ui] setting 'logtemplate'
1516 4. [ui] setting 'style'
1516 4. [ui] setting 'style'
1517 If all of these values are either the unset or the empty string,
1517 If all of these values are either the unset or the empty string,
1518 regular display via changeset_printer() is done.
1518 regular display via changeset_printer() is done.
1519 """
1519 """
1520 # options
1520 # options
1521 matchfn = None
1521 matchfn = None
1522 if opts.get('patch') or opts.get('stat'):
1522 if opts.get('patch') or opts.get('stat'):
1523 matchfn = scmutil.matchall(repo)
1523 matchfn = scmutil.matchall(repo)
1524
1524
1525 if opts.get('template') == 'json':
1525 if opts.get('template') == 'json':
1526 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1526 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1527
1527
1528 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1528 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1529
1529
1530 if not tmpl and not mapfile:
1530 if not tmpl and not mapfile:
1531 return changeset_printer(ui, repo, matchfn, opts, buffered)
1531 return changeset_printer(ui, repo, matchfn, opts, buffered)
1532
1532
1533 try:
1533 try:
1534 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1534 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1535 buffered)
1535 buffered)
1536 except SyntaxError, inst:
1536 except SyntaxError, inst:
1537 raise util.Abort(inst.args[0])
1537 raise util.Abort(inst.args[0])
1538 return t
1538 return t
1539
1539
1540 def showmarker(ui, marker):
1540 def showmarker(ui, marker):
1541 """utility function to display obsolescence marker in a readable way
1541 """utility function to display obsolescence marker in a readable way
1542
1542
1543 To be used by debug function."""
1543 To be used by debug function."""
1544 ui.write(hex(marker.precnode()))
1544 ui.write(hex(marker.precnode()))
1545 for repl in marker.succnodes():
1545 for repl in marker.succnodes():
1546 ui.write(' ')
1546 ui.write(' ')
1547 ui.write(hex(repl))
1547 ui.write(hex(repl))
1548 ui.write(' %X ' % marker.flags())
1548 ui.write(' %X ' % marker.flags())
1549 parents = marker.parentnodes()
1549 parents = marker.parentnodes()
1550 if parents is not None:
1550 if parents is not None:
1551 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1551 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1552 ui.write('(%s) ' % util.datestr(marker.date()))
1552 ui.write('(%s) ' % util.datestr(marker.date()))
1553 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1553 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1554 sorted(marker.metadata().items())
1554 sorted(marker.metadata().items())
1555 if t[0] != 'date')))
1555 if t[0] != 'date')))
1556 ui.write('\n')
1556 ui.write('\n')
1557
1557
1558 def finddate(ui, repo, date):
1558 def finddate(ui, repo, date):
1559 """Find the tipmost changeset that matches the given date spec"""
1559 """Find the tipmost changeset that matches the given date spec"""
1560
1560
1561 df = util.matchdate(date)
1561 df = util.matchdate(date)
1562 m = scmutil.matchall(repo)
1562 m = scmutil.matchall(repo)
1563 results = {}
1563 results = {}
1564
1564
1565 def prep(ctx, fns):
1565 def prep(ctx, fns):
1566 d = ctx.date()
1566 d = ctx.date()
1567 if df(d[0]):
1567 if df(d[0]):
1568 results[ctx.rev()] = d
1568 results[ctx.rev()] = d
1569
1569
1570 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1570 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1571 rev = ctx.rev()
1571 rev = ctx.rev()
1572 if rev in results:
1572 if rev in results:
1573 ui.status(_("found revision %s from %s\n") %
1573 ui.status(_("found revision %s from %s\n") %
1574 (rev, util.datestr(results[rev])))
1574 (rev, util.datestr(results[rev])))
1575 return str(rev)
1575 return str(rev)
1576
1576
1577 raise util.Abort(_("revision matching date not found"))
1577 raise util.Abort(_("revision matching date not found"))
1578
1578
1579 def increasingwindows(windowsize=8, sizelimit=512):
1579 def increasingwindows(windowsize=8, sizelimit=512):
1580 while True:
1580 while True:
1581 yield windowsize
1581 yield windowsize
1582 if windowsize < sizelimit:
1582 if windowsize < sizelimit:
1583 windowsize *= 2
1583 windowsize *= 2
1584
1584
1585 class FileWalkError(Exception):
1585 class FileWalkError(Exception):
1586 pass
1586 pass
1587
1587
1588 def walkfilerevs(repo, match, follow, revs, fncache):
1588 def walkfilerevs(repo, match, follow, revs, fncache):
1589 '''Walks the file history for the matched files.
1589 '''Walks the file history for the matched files.
1590
1590
1591 Returns the changeset revs that are involved in the file history.
1591 Returns the changeset revs that are involved in the file history.
1592
1592
1593 Throws FileWalkError if the file history can't be walked using
1593 Throws FileWalkError if the file history can't be walked using
1594 filelogs alone.
1594 filelogs alone.
1595 '''
1595 '''
1596 wanted = set()
1596 wanted = set()
1597 copies = []
1597 copies = []
1598 minrev, maxrev = min(revs), max(revs)
1598 minrev, maxrev = min(revs), max(revs)
1599 def filerevgen(filelog, last):
1599 def filerevgen(filelog, last):
1600 """
1600 """
1601 Only files, no patterns. Check the history of each file.
1601 Only files, no patterns. Check the history of each file.
1602
1602
1603 Examines filelog entries within minrev, maxrev linkrev range
1603 Examines filelog entries within minrev, maxrev linkrev range
1604 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1604 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1605 tuples in backwards order
1605 tuples in backwards order
1606 """
1606 """
1607 cl_count = len(repo)
1607 cl_count = len(repo)
1608 revs = []
1608 revs = []
1609 for j in xrange(0, last + 1):
1609 for j in xrange(0, last + 1):
1610 linkrev = filelog.linkrev(j)
1610 linkrev = filelog.linkrev(j)
1611 if linkrev < minrev:
1611 if linkrev < minrev:
1612 continue
1612 continue
1613 # only yield rev for which we have the changelog, it can
1613 # only yield rev for which we have the changelog, it can
1614 # happen while doing "hg log" during a pull or commit
1614 # happen while doing "hg log" during a pull or commit
1615 if linkrev >= cl_count:
1615 if linkrev >= cl_count:
1616 break
1616 break
1617
1617
1618 parentlinkrevs = []
1618 parentlinkrevs = []
1619 for p in filelog.parentrevs(j):
1619 for p in filelog.parentrevs(j):
1620 if p != nullrev:
1620 if p != nullrev:
1621 parentlinkrevs.append(filelog.linkrev(p))
1621 parentlinkrevs.append(filelog.linkrev(p))
1622 n = filelog.node(j)
1622 n = filelog.node(j)
1623 revs.append((linkrev, parentlinkrevs,
1623 revs.append((linkrev, parentlinkrevs,
1624 follow and filelog.renamed(n)))
1624 follow and filelog.renamed(n)))
1625
1625
1626 return reversed(revs)
1626 return reversed(revs)
1627 def iterfiles():
1627 def iterfiles():
1628 pctx = repo['.']
1628 pctx = repo['.']
1629 for filename in match.files():
1629 for filename in match.files():
1630 if follow:
1630 if follow:
1631 if filename not in pctx:
1631 if filename not in pctx:
1632 raise util.Abort(_('cannot follow file not in parent '
1632 raise util.Abort(_('cannot follow file not in parent '
1633 'revision: "%s"') % filename)
1633 'revision: "%s"') % filename)
1634 yield filename, pctx[filename].filenode()
1634 yield filename, pctx[filename].filenode()
1635 else:
1635 else:
1636 yield filename, None
1636 yield filename, None
1637 for filename_node in copies:
1637 for filename_node in copies:
1638 yield filename_node
1638 yield filename_node
1639
1639
1640 for file_, node in iterfiles():
1640 for file_, node in iterfiles():
1641 filelog = repo.file(file_)
1641 filelog = repo.file(file_)
1642 if not len(filelog):
1642 if not len(filelog):
1643 if node is None:
1643 if node is None:
1644 # A zero count may be a directory or deleted file, so
1644 # A zero count may be a directory or deleted file, so
1645 # try to find matching entries on the slow path.
1645 # try to find matching entries on the slow path.
1646 if follow:
1646 if follow:
1647 raise util.Abort(
1647 raise util.Abort(
1648 _('cannot follow nonexistent file: "%s"') % file_)
1648 _('cannot follow nonexistent file: "%s"') % file_)
1649 raise FileWalkError("Cannot walk via filelog")
1649 raise FileWalkError("Cannot walk via filelog")
1650 else:
1650 else:
1651 continue
1651 continue
1652
1652
1653 if node is None:
1653 if node is None:
1654 last = len(filelog) - 1
1654 last = len(filelog) - 1
1655 else:
1655 else:
1656 last = filelog.rev(node)
1656 last = filelog.rev(node)
1657
1657
1658
1658
1659 # keep track of all ancestors of the file
1659 # keep track of all ancestors of the file
1660 ancestors = set([filelog.linkrev(last)])
1660 ancestors = set([filelog.linkrev(last)])
1661
1661
1662 # iterate from latest to oldest revision
1662 # iterate from latest to oldest revision
1663 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1663 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1664 if not follow:
1664 if not follow:
1665 if rev > maxrev:
1665 if rev > maxrev:
1666 continue
1666 continue
1667 else:
1667 else:
1668 # Note that last might not be the first interesting
1668 # Note that last might not be the first interesting
1669 # rev to us:
1669 # rev to us:
1670 # if the file has been changed after maxrev, we'll
1670 # if the file has been changed after maxrev, we'll
1671 # have linkrev(last) > maxrev, and we still need
1671 # have linkrev(last) > maxrev, and we still need
1672 # to explore the file graph
1672 # to explore the file graph
1673 if rev not in ancestors:
1673 if rev not in ancestors:
1674 continue
1674 continue
1675 # XXX insert 1327 fix here
1675 # XXX insert 1327 fix here
1676 if flparentlinkrevs:
1676 if flparentlinkrevs:
1677 ancestors.update(flparentlinkrevs)
1677 ancestors.update(flparentlinkrevs)
1678
1678
1679 fncache.setdefault(rev, []).append(file_)
1679 fncache.setdefault(rev, []).append(file_)
1680 wanted.add(rev)
1680 wanted.add(rev)
1681 if copied:
1681 if copied:
1682 copies.append(copied)
1682 copies.append(copied)
1683
1683
1684 return wanted
1684 return wanted
1685
1685
1686 class _followfilter(object):
1686 class _followfilter(object):
1687 def __init__(self, repo, onlyfirst=False):
1687 def __init__(self, repo, onlyfirst=False):
1688 self.repo = repo
1688 self.repo = repo
1689 self.startrev = nullrev
1689 self.startrev = nullrev
1690 self.roots = set()
1690 self.roots = set()
1691 self.onlyfirst = onlyfirst
1691 self.onlyfirst = onlyfirst
1692
1692
1693 def match(self, rev):
1693 def match(self, rev):
1694 def realparents(rev):
1694 def realparents(rev):
1695 if self.onlyfirst:
1695 if self.onlyfirst:
1696 return self.repo.changelog.parentrevs(rev)[0:1]
1696 return self.repo.changelog.parentrevs(rev)[0:1]
1697 else:
1697 else:
1698 return filter(lambda x: x != nullrev,
1698 return filter(lambda x: x != nullrev,
1699 self.repo.changelog.parentrevs(rev))
1699 self.repo.changelog.parentrevs(rev))
1700
1700
1701 if self.startrev == nullrev:
1701 if self.startrev == nullrev:
1702 self.startrev = rev
1702 self.startrev = rev
1703 return True
1703 return True
1704
1704
1705 if rev > self.startrev:
1705 if rev > self.startrev:
1706 # forward: all descendants
1706 # forward: all descendants
1707 if not self.roots:
1707 if not self.roots:
1708 self.roots.add(self.startrev)
1708 self.roots.add(self.startrev)
1709 for parent in realparents(rev):
1709 for parent in realparents(rev):
1710 if parent in self.roots:
1710 if parent in self.roots:
1711 self.roots.add(rev)
1711 self.roots.add(rev)
1712 return True
1712 return True
1713 else:
1713 else:
1714 # backwards: all parents
1714 # backwards: all parents
1715 if not self.roots:
1715 if not self.roots:
1716 self.roots.update(realparents(self.startrev))
1716 self.roots.update(realparents(self.startrev))
1717 if rev in self.roots:
1717 if rev in self.roots:
1718 self.roots.remove(rev)
1718 self.roots.remove(rev)
1719 self.roots.update(realparents(rev))
1719 self.roots.update(realparents(rev))
1720 return True
1720 return True
1721
1721
1722 return False
1722 return False
1723
1723
1724 def walkchangerevs(repo, match, opts, prepare):
1724 def walkchangerevs(repo, match, opts, prepare):
1725 '''Iterate over files and the revs in which they changed.
1725 '''Iterate over files and the revs in which they changed.
1726
1726
1727 Callers most commonly need to iterate backwards over the history
1727 Callers most commonly need to iterate backwards over the history
1728 in which they are interested. Doing so has awful (quadratic-looking)
1728 in which they are interested. Doing so has awful (quadratic-looking)
1729 performance, so we use iterators in a "windowed" way.
1729 performance, so we use iterators in a "windowed" way.
1730
1730
1731 We walk a window of revisions in the desired order. Within the
1731 We walk a window of revisions in the desired order. Within the
1732 window, we first walk forwards to gather data, then in the desired
1732 window, we first walk forwards to gather data, then in the desired
1733 order (usually backwards) to display it.
1733 order (usually backwards) to display it.
1734
1734
1735 This function returns an iterator yielding contexts. Before
1735 This function returns an iterator yielding contexts. Before
1736 yielding each context, the iterator will first call the prepare
1736 yielding each context, the iterator will first call the prepare
1737 function on each context in the window in forward order.'''
1737 function on each context in the window in forward order.'''
1738
1738
1739 follow = opts.get('follow') or opts.get('follow_first')
1739 follow = opts.get('follow') or opts.get('follow_first')
1740 revs = _logrevs(repo, opts)
1740 revs = _logrevs(repo, opts)
1741 if not revs:
1741 if not revs:
1742 return []
1742 return []
1743 wanted = set()
1743 wanted = set()
1744 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1744 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1745 fncache = {}
1745 fncache = {}
1746 change = repo.changectx
1746 change = repo.changectx
1747
1747
1748 # First step is to fill wanted, the set of revisions that we want to yield.
1748 # First step is to fill wanted, the set of revisions that we want to yield.
1749 # When it does not induce extra cost, we also fill fncache for revisions in
1749 # When it does not induce extra cost, we also fill fncache for revisions in
1750 # wanted: a cache of filenames that were changed (ctx.files()) and that
1750 # wanted: a cache of filenames that were changed (ctx.files()) and that
1751 # match the file filtering conditions.
1751 # match the file filtering conditions.
1752
1752
1753 if match.always():
1753 if match.always():
1754 # No files, no patterns. Display all revs.
1754 # No files, no patterns. Display all revs.
1755 wanted = revs
1755 wanted = revs
1756
1756
1757 if not slowpath and match.files():
1757 if not slowpath and match.files():
1758 # We only have to read through the filelog to find wanted revisions
1758 # We only have to read through the filelog to find wanted revisions
1759
1759
1760 try:
1760 try:
1761 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1761 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1762 except FileWalkError:
1762 except FileWalkError:
1763 slowpath = True
1763 slowpath = True
1764
1764
1765 # We decided to fall back to the slowpath because at least one
1765 # We decided to fall back to the slowpath because at least one
1766 # of the paths was not a file. Check to see if at least one of them
1766 # of the paths was not a file. Check to see if at least one of them
1767 # existed in history, otherwise simply return
1767 # existed in history, otherwise simply return
1768 for path in match.files():
1768 for path in match.files():
1769 if path == '.' or path in repo.store:
1769 if path == '.' or path in repo.store:
1770 break
1770 break
1771 else:
1771 else:
1772 return []
1772 return []
1773
1773
1774 if slowpath:
1774 if slowpath:
1775 # We have to read the changelog to match filenames against
1775 # We have to read the changelog to match filenames against
1776 # changed files
1776 # changed files
1777
1777
1778 if follow:
1778 if follow:
1779 raise util.Abort(_('can only follow copies/renames for explicit '
1779 raise util.Abort(_('can only follow copies/renames for explicit '
1780 'filenames'))
1780 'filenames'))
1781
1781
1782 # The slow path checks files modified in every changeset.
1782 # The slow path checks files modified in every changeset.
1783 # This is really slow on large repos, so compute the set lazily.
1783 # This is really slow on large repos, so compute the set lazily.
1784 class lazywantedset(object):
1784 class lazywantedset(object):
1785 def __init__(self):
1785 def __init__(self):
1786 self.set = set()
1786 self.set = set()
1787 self.revs = set(revs)
1787 self.revs = set(revs)
1788
1788
1789 # No need to worry about locality here because it will be accessed
1789 # No need to worry about locality here because it will be accessed
1790 # in the same order as the increasing window below.
1790 # in the same order as the increasing window below.
1791 def __contains__(self, value):
1791 def __contains__(self, value):
1792 if value in self.set:
1792 if value in self.set:
1793 return True
1793 return True
1794 elif not value in self.revs:
1794 elif not value in self.revs:
1795 return False
1795 return False
1796 else:
1796 else:
1797 self.revs.discard(value)
1797 self.revs.discard(value)
1798 ctx = change(value)
1798 ctx = change(value)
1799 matches = filter(match, ctx.files())
1799 matches = filter(match, ctx.files())
1800 if matches:
1800 if matches:
1801 fncache[value] = matches
1801 fncache[value] = matches
1802 self.set.add(value)
1802 self.set.add(value)
1803 return True
1803 return True
1804 return False
1804 return False
1805
1805
1806 def discard(self, value):
1806 def discard(self, value):
1807 self.revs.discard(value)
1807 self.revs.discard(value)
1808 self.set.discard(value)
1808 self.set.discard(value)
1809
1809
1810 wanted = lazywantedset()
1810 wanted = lazywantedset()
1811
1811
1812 # it might be worthwhile to do this in the iterator if the rev range
1812 # it might be worthwhile to do this in the iterator if the rev range
1813 # is descending and the prune args are all within that range
1813 # is descending and the prune args are all within that range
1814 for rev in opts.get('prune', ()):
1814 for rev in opts.get('prune', ()):
1815 rev = repo[rev].rev()
1815 rev = repo[rev].rev()
1816 ff = _followfilter(repo)
1816 ff = _followfilter(repo)
1817 stop = min(revs[0], revs[-1])
1817 stop = min(revs[0], revs[-1])
1818 for x in xrange(rev, stop - 1, -1):
1818 for x in xrange(rev, stop - 1, -1):
1819 if ff.match(x):
1819 if ff.match(x):
1820 wanted = wanted - [x]
1820 wanted = wanted - [x]
1821
1821
1822 # Now that wanted is correctly initialized, we can iterate over the
1822 # Now that wanted is correctly initialized, we can iterate over the
1823 # revision range, yielding only revisions in wanted.
1823 # revision range, yielding only revisions in wanted.
1824 def iterate():
1824 def iterate():
1825 if follow and not match.files():
1825 if follow and not match.files():
1826 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1826 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1827 def want(rev):
1827 def want(rev):
1828 return ff.match(rev) and rev in wanted
1828 return ff.match(rev) and rev in wanted
1829 else:
1829 else:
1830 def want(rev):
1830 def want(rev):
1831 return rev in wanted
1831 return rev in wanted
1832
1832
1833 it = iter(revs)
1833 it = iter(revs)
1834 stopiteration = False
1834 stopiteration = False
1835 for windowsize in increasingwindows():
1835 for windowsize in increasingwindows():
1836 nrevs = []
1836 nrevs = []
1837 for i in xrange(windowsize):
1837 for i in xrange(windowsize):
1838 try:
1838 try:
1839 rev = it.next()
1839 rev = it.next()
1840 if want(rev):
1840 if want(rev):
1841 nrevs.append(rev)
1841 nrevs.append(rev)
1842 except (StopIteration):
1842 except (StopIteration):
1843 stopiteration = True
1843 stopiteration = True
1844 break
1844 break
1845 for rev in sorted(nrevs):
1845 for rev in sorted(nrevs):
1846 fns = fncache.get(rev)
1846 fns = fncache.get(rev)
1847 ctx = change(rev)
1847 ctx = change(rev)
1848 if not fns:
1848 if not fns:
1849 def fns_generator():
1849 def fns_generator():
1850 for f in ctx.files():
1850 for f in ctx.files():
1851 if match(f):
1851 if match(f):
1852 yield f
1852 yield f
1853 fns = fns_generator()
1853 fns = fns_generator()
1854 prepare(ctx, fns)
1854 prepare(ctx, fns)
1855 for rev in nrevs:
1855 for rev in nrevs:
1856 yield change(rev)
1856 yield change(rev)
1857
1857
1858 if stopiteration:
1858 if stopiteration:
1859 break
1859 break
1860
1860
1861 return iterate()
1861 return iterate()
1862
1862
1863 def _makefollowlogfilematcher(repo, files, followfirst):
1863 def _makefollowlogfilematcher(repo, files, followfirst):
1864 # When displaying a revision with --patch --follow FILE, we have
1864 # When displaying a revision with --patch --follow FILE, we have
1865 # to know which file of the revision must be diffed. With
1865 # to know which file of the revision must be diffed. With
1866 # --follow, we want the names of the ancestors of FILE in the
1866 # --follow, we want the names of the ancestors of FILE in the
1867 # revision, stored in "fcache". "fcache" is populated by
1867 # revision, stored in "fcache". "fcache" is populated by
1868 # reproducing the graph traversal already done by --follow revset
1868 # reproducing the graph traversal already done by --follow revset
1869 # and relating linkrevs to file names (which is not "correct" but
1869 # and relating linkrevs to file names (which is not "correct" but
1870 # good enough).
1870 # good enough).
1871 fcache = {}
1871 fcache = {}
1872 fcacheready = [False]
1872 fcacheready = [False]
1873 pctx = repo['.']
1873 pctx = repo['.']
1874
1874
1875 def populate():
1875 def populate():
1876 for fn in files:
1876 for fn in files:
1877 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1877 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1878 for c in i:
1878 for c in i:
1879 fcache.setdefault(c.linkrev(), set()).add(c.path())
1879 fcache.setdefault(c.linkrev(), set()).add(c.path())
1880
1880
1881 def filematcher(rev):
1881 def filematcher(rev):
1882 if not fcacheready[0]:
1882 if not fcacheready[0]:
1883 # Lazy initialization
1883 # Lazy initialization
1884 fcacheready[0] = True
1884 fcacheready[0] = True
1885 populate()
1885 populate()
1886 return scmutil.matchfiles(repo, fcache.get(rev, []))
1886 return scmutil.matchfiles(repo, fcache.get(rev, []))
1887
1887
1888 return filematcher
1888 return filematcher
1889
1889
1890 def _makenofollowlogfilematcher(repo, pats, opts):
1890 def _makenofollowlogfilematcher(repo, pats, opts):
1891 '''hook for extensions to override the filematcher for non-follow cases'''
1891 '''hook for extensions to override the filematcher for non-follow cases'''
1892 return None
1892 return None
1893
1893
1894 def _makelogrevset(repo, pats, opts, revs):
1894 def _makelogrevset(repo, pats, opts, revs):
1895 """Return (expr, filematcher) where expr is a revset string built
1895 """Return (expr, filematcher) where expr is a revset string built
1896 from log options and file patterns or None. If --stat or --patch
1896 from log options and file patterns or None. If --stat or --patch
1897 are not passed filematcher is None. Otherwise it is a callable
1897 are not passed filematcher is None. Otherwise it is a callable
1898 taking a revision number and returning a match objects filtering
1898 taking a revision number and returning a match objects filtering
1899 the files to be detailed when displaying the revision.
1899 the files to be detailed when displaying the revision.
1900 """
1900 """
1901 opt2revset = {
1901 opt2revset = {
1902 'no_merges': ('not merge()', None),
1902 'no_merges': ('not merge()', None),
1903 'only_merges': ('merge()', None),
1903 'only_merges': ('merge()', None),
1904 '_ancestors': ('ancestors(%(val)s)', None),
1904 '_ancestors': ('ancestors(%(val)s)', None),
1905 '_fancestors': ('_firstancestors(%(val)s)', None),
1905 '_fancestors': ('_firstancestors(%(val)s)', None),
1906 '_descendants': ('descendants(%(val)s)', None),
1906 '_descendants': ('descendants(%(val)s)', None),
1907 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1907 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1908 '_matchfiles': ('_matchfiles(%(val)s)', None),
1908 '_matchfiles': ('_matchfiles(%(val)s)', None),
1909 'date': ('date(%(val)r)', None),
1909 'date': ('date(%(val)r)', None),
1910 'branch': ('branch(%(val)r)', ' or '),
1910 'branch': ('branch(%(val)r)', ' or '),
1911 '_patslog': ('filelog(%(val)r)', ' or '),
1911 '_patslog': ('filelog(%(val)r)', ' or '),
1912 '_patsfollow': ('follow(%(val)r)', ' or '),
1912 '_patsfollow': ('follow(%(val)r)', ' or '),
1913 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1913 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1914 'keyword': ('keyword(%(val)r)', ' or '),
1914 'keyword': ('keyword(%(val)r)', ' or '),
1915 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1915 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1916 'user': ('user(%(val)r)', ' or '),
1916 'user': ('user(%(val)r)', ' or '),
1917 }
1917 }
1918
1918
1919 opts = dict(opts)
1919 opts = dict(opts)
1920 # follow or not follow?
1920 # follow or not follow?
1921 follow = opts.get('follow') or opts.get('follow_first')
1921 follow = opts.get('follow') or opts.get('follow_first')
1922 if opts.get('follow_first'):
1922 if opts.get('follow_first'):
1923 followfirst = 1
1923 followfirst = 1
1924 else:
1924 else:
1925 followfirst = 0
1925 followfirst = 0
1926 # --follow with FILE behaviour depends on revs...
1926 # --follow with FILE behaviour depends on revs...
1927 it = iter(revs)
1927 it = iter(revs)
1928 startrev = it.next()
1928 startrev = it.next()
1929 try:
1929 try:
1930 followdescendants = startrev < it.next()
1930 followdescendants = startrev < it.next()
1931 except (StopIteration):
1931 except (StopIteration):
1932 followdescendants = False
1932 followdescendants = False
1933
1933
1934 # branch and only_branch are really aliases and must be handled at
1934 # branch and only_branch are really aliases and must be handled at
1935 # the same time
1935 # the same time
1936 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1936 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1937 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1937 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1938 # pats/include/exclude are passed to match.match() directly in
1938 # pats/include/exclude are passed to match.match() directly in
1939 # _matchfiles() revset but walkchangerevs() builds its matcher with
1939 # _matchfiles() revset but walkchangerevs() builds its matcher with
1940 # scmutil.match(). The difference is input pats are globbed on
1940 # scmutil.match(). The difference is input pats are globbed on
1941 # platforms without shell expansion (windows).
1941 # platforms without shell expansion (windows).
1942 pctx = repo[None]
1942 pctx = repo[None]
1943 match, pats = scmutil.matchandpats(pctx, pats, opts)
1943 match, pats = scmutil.matchandpats(pctx, pats, opts)
1944 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1944 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1945 if not slowpath:
1945 if not slowpath:
1946 for f in match.files():
1946 for f in match.files():
1947 if follow and f not in pctx:
1947 if follow and f not in pctx:
1948 # If the file exists, it may be a directory, so let it
1948 # If the file exists, it may be a directory, so let it
1949 # take the slow path.
1949 # take the slow path.
1950 if os.path.exists(repo.wjoin(f)):
1950 if os.path.exists(repo.wjoin(f)):
1951 slowpath = True
1951 slowpath = True
1952 continue
1952 continue
1953 else:
1953 else:
1954 raise util.Abort(_('cannot follow file not in parent '
1954 raise util.Abort(_('cannot follow file not in parent '
1955 'revision: "%s"') % f)
1955 'revision: "%s"') % f)
1956 filelog = repo.file(f)
1956 filelog = repo.file(f)
1957 if not filelog:
1957 if not filelog:
1958 # A zero count may be a directory or deleted file, so
1958 # A zero count may be a directory or deleted file, so
1959 # try to find matching entries on the slow path.
1959 # try to find matching entries on the slow path.
1960 if follow:
1960 if follow:
1961 raise util.Abort(
1961 raise util.Abort(
1962 _('cannot follow nonexistent file: "%s"') % f)
1962 _('cannot follow nonexistent file: "%s"') % f)
1963 slowpath = True
1963 slowpath = True
1964
1964
1965 # We decided to fall back to the slowpath because at least one
1965 # We decided to fall back to the slowpath because at least one
1966 # of the paths was not a file. Check to see if at least one of them
1966 # of the paths was not a file. Check to see if at least one of them
1967 # existed in history - in that case, we'll continue down the
1967 # existed in history - in that case, we'll continue down the
1968 # slowpath; otherwise, we can turn off the slowpath
1968 # slowpath; otherwise, we can turn off the slowpath
1969 if slowpath:
1969 if slowpath:
1970 for path in match.files():
1970 for path in match.files():
1971 if path == '.' or path in repo.store:
1971 if path == '.' or path in repo.store:
1972 break
1972 break
1973 else:
1973 else:
1974 slowpath = False
1974 slowpath = False
1975
1975
1976 fpats = ('_patsfollow', '_patsfollowfirst')
1976 fpats = ('_patsfollow', '_patsfollowfirst')
1977 fnopats = (('_ancestors', '_fancestors'),
1977 fnopats = (('_ancestors', '_fancestors'),
1978 ('_descendants', '_fdescendants'))
1978 ('_descendants', '_fdescendants'))
1979 if slowpath:
1979 if slowpath:
1980 # See walkchangerevs() slow path.
1980 # See walkchangerevs() slow path.
1981 #
1981 #
1982 # pats/include/exclude cannot be represented as separate
1982 # pats/include/exclude cannot be represented as separate
1983 # revset expressions as their filtering logic applies at file
1983 # revset expressions as their filtering logic applies at file
1984 # level. For instance "-I a -X a" matches a revision touching
1984 # level. For instance "-I a -X a" matches a revision touching
1985 # "a" and "b" while "file(a) and not file(b)" does
1985 # "a" and "b" while "file(a) and not file(b)" does
1986 # not. Besides, filesets are evaluated against the working
1986 # not. Besides, filesets are evaluated against the working
1987 # directory.
1987 # directory.
1988 matchargs = ['r:', 'd:relpath']
1988 matchargs = ['r:', 'd:relpath']
1989 for p in pats:
1989 for p in pats:
1990 matchargs.append('p:' + p)
1990 matchargs.append('p:' + p)
1991 for p in opts.get('include', []):
1991 for p in opts.get('include', []):
1992 matchargs.append('i:' + p)
1992 matchargs.append('i:' + p)
1993 for p in opts.get('exclude', []):
1993 for p in opts.get('exclude', []):
1994 matchargs.append('x:' + p)
1994 matchargs.append('x:' + p)
1995 matchargs = ','.join(('%r' % p) for p in matchargs)
1995 matchargs = ','.join(('%r' % p) for p in matchargs)
1996 opts['_matchfiles'] = matchargs
1996 opts['_matchfiles'] = matchargs
1997 if follow:
1997 if follow:
1998 opts[fnopats[0][followfirst]] = '.'
1998 opts[fnopats[0][followfirst]] = '.'
1999 else:
1999 else:
2000 if follow:
2000 if follow:
2001 if pats:
2001 if pats:
2002 # follow() revset interprets its file argument as a
2002 # follow() revset interprets its file argument as a
2003 # manifest entry, so use match.files(), not pats.
2003 # manifest entry, so use match.files(), not pats.
2004 opts[fpats[followfirst]] = list(match.files())
2004 opts[fpats[followfirst]] = list(match.files())
2005 else:
2005 else:
2006 op = fnopats[followdescendants][followfirst]
2006 op = fnopats[followdescendants][followfirst]
2007 opts[op] = 'rev(%d)' % startrev
2007 opts[op] = 'rev(%d)' % startrev
2008 else:
2008 else:
2009 opts['_patslog'] = list(pats)
2009 opts['_patslog'] = list(pats)
2010
2010
2011 filematcher = None
2011 filematcher = None
2012 if opts.get('patch') or opts.get('stat'):
2012 if opts.get('patch') or opts.get('stat'):
2013 # When following files, track renames via a special matcher.
2013 # When following files, track renames via a special matcher.
2014 # If we're forced to take the slowpath it means we're following
2014 # If we're forced to take the slowpath it means we're following
2015 # at least one pattern/directory, so don't bother with rename tracking.
2015 # at least one pattern/directory, so don't bother with rename tracking.
2016 if follow and not match.always() and not slowpath:
2016 if follow and not match.always() and not slowpath:
2017 # _makefollowlogfilematcher expects its files argument to be
2017 # _makefollowlogfilematcher expects its files argument to be
2018 # relative to the repo root, so use match.files(), not pats.
2018 # relative to the repo root, so use match.files(), not pats.
2019 filematcher = _makefollowlogfilematcher(repo, match.files(),
2019 filematcher = _makefollowlogfilematcher(repo, match.files(),
2020 followfirst)
2020 followfirst)
2021 else:
2021 else:
2022 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2022 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2023 if filematcher is None:
2023 if filematcher is None:
2024 filematcher = lambda rev: match
2024 filematcher = lambda rev: match
2025
2025
2026 expr = []
2026 expr = []
2027 for op, val in sorted(opts.iteritems()):
2027 for op, val in sorted(opts.iteritems()):
2028 if not val:
2028 if not val:
2029 continue
2029 continue
2030 if op not in opt2revset:
2030 if op not in opt2revset:
2031 continue
2031 continue
2032 revop, andor = opt2revset[op]
2032 revop, andor = opt2revset[op]
2033 if '%(val)' not in revop:
2033 if '%(val)' not in revop:
2034 expr.append(revop)
2034 expr.append(revop)
2035 else:
2035 else:
2036 if not isinstance(val, list):
2036 if not isinstance(val, list):
2037 e = revop % {'val': val}
2037 e = revop % {'val': val}
2038 else:
2038 else:
2039 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2039 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2040 expr.append(e)
2040 expr.append(e)
2041
2041
2042 if expr:
2042 if expr:
2043 expr = '(' + ' and '.join(expr) + ')'
2043 expr = '(' + ' and '.join(expr) + ')'
2044 else:
2044 else:
2045 expr = None
2045 expr = None
2046 return expr, filematcher
2046 return expr, filematcher
2047
2047
2048 def _logrevs(repo, opts):
2048 def _logrevs(repo, opts):
2049 # Default --rev value depends on --follow but --follow behaviour
2049 # Default --rev value depends on --follow but --follow behaviour
2050 # depends on revisions resolved from --rev...
2050 # depends on revisions resolved from --rev...
2051 follow = opts.get('follow') or opts.get('follow_first')
2051 follow = opts.get('follow') or opts.get('follow_first')
2052 if opts.get('rev'):
2052 if opts.get('rev'):
2053 revs = scmutil.revrange(repo, opts['rev'])
2053 revs = scmutil.revrange(repo, opts['rev'])
2054 elif follow and repo.dirstate.p1() == nullid:
2054 elif follow and repo.dirstate.p1() == nullid:
2055 revs = revset.baseset()
2055 revs = revset.baseset()
2056 elif follow:
2056 elif follow:
2057 revs = repo.revs('reverse(:.)')
2057 revs = repo.revs('reverse(:.)')
2058 else:
2058 else:
2059 revs = revset.spanset(repo)
2059 revs = revset.spanset(repo)
2060 revs.reverse()
2060 revs.reverse()
2061 return revs
2061 return revs
2062
2062
2063 def getgraphlogrevs(repo, pats, opts):
2063 def getgraphlogrevs(repo, pats, opts):
2064 """Return (revs, expr, filematcher) where revs is an iterable of
2064 """Return (revs, expr, filematcher) where revs is an iterable of
2065 revision numbers, expr is a revset string built from log options
2065 revision numbers, expr is a revset string built from log options
2066 and file patterns or None, and used to filter 'revs'. If --stat or
2066 and file patterns or None, and used to filter 'revs'. If --stat or
2067 --patch are not passed filematcher is None. Otherwise it is a
2067 --patch are not passed filematcher is None. Otherwise it is a
2068 callable taking a revision number and returning a match objects
2068 callable taking a revision number and returning a match objects
2069 filtering the files to be detailed when displaying the revision.
2069 filtering the files to be detailed when displaying the revision.
2070 """
2070 """
2071 limit = loglimit(opts)
2071 limit = loglimit(opts)
2072 revs = _logrevs(repo, opts)
2072 revs = _logrevs(repo, opts)
2073 if not revs:
2073 if not revs:
2074 return revset.baseset(), None, None
2074 return revset.baseset(), None, None
2075 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2075 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2076 if opts.get('rev'):
2076 if opts.get('rev'):
2077 # User-specified revs might be unsorted, but don't sort before
2077 # User-specified revs might be unsorted, but don't sort before
2078 # _makelogrevset because it might depend on the order of revs
2078 # _makelogrevset because it might depend on the order of revs
2079 revs.sort(reverse=True)
2079 revs.sort(reverse=True)
2080 if expr:
2080 if expr:
2081 # Revset matchers often operate faster on revisions in changelog
2081 # Revset matchers often operate faster on revisions in changelog
2082 # order, because most filters deal with the changelog.
2082 # order, because most filters deal with the changelog.
2083 revs.reverse()
2083 revs.reverse()
2084 matcher = revset.match(repo.ui, expr)
2084 matcher = revset.match(repo.ui, expr)
2085 # Revset matches can reorder revisions. "A or B" typically returns
2085 # Revset matches can reorder revisions. "A or B" typically returns
2086 # returns the revision matching A then the revision matching B. Sort
2086 # returns the revision matching A then the revision matching B. Sort
2087 # again to fix that.
2087 # again to fix that.
2088 revs = matcher(repo, revs)
2088 revs = matcher(repo, revs)
2089 revs.sort(reverse=True)
2089 revs.sort(reverse=True)
2090 if limit is not None:
2090 if limit is not None:
2091 limitedrevs = []
2091 limitedrevs = []
2092 for idx, rev in enumerate(revs):
2092 for idx, rev in enumerate(revs):
2093 if idx >= limit:
2093 if idx >= limit:
2094 break
2094 break
2095 limitedrevs.append(rev)
2095 limitedrevs.append(rev)
2096 revs = revset.baseset(limitedrevs)
2096 revs = revset.baseset(limitedrevs)
2097
2097
2098 return revs, expr, filematcher
2098 return revs, expr, filematcher
2099
2099
2100 def getlogrevs(repo, pats, opts):
2100 def getlogrevs(repo, pats, opts):
2101 """Return (revs, expr, filematcher) where revs is an iterable of
2101 """Return (revs, expr, filematcher) where revs is an iterable of
2102 revision numbers, expr is a revset string built from log options
2102 revision numbers, expr is a revset string built from log options
2103 and file patterns or None, and used to filter 'revs'. If --stat or
2103 and file patterns or None, and used to filter 'revs'. If --stat or
2104 --patch are not passed filematcher is None. Otherwise it is a
2104 --patch are not passed filematcher is None. Otherwise it is a
2105 callable taking a revision number and returning a match objects
2105 callable taking a revision number and returning a match objects
2106 filtering the files to be detailed when displaying the revision.
2106 filtering the files to be detailed when displaying the revision.
2107 """
2107 """
2108 limit = loglimit(opts)
2108 limit = loglimit(opts)
2109 revs = _logrevs(repo, opts)
2109 revs = _logrevs(repo, opts)
2110 if not revs:
2110 if not revs:
2111 return revset.baseset([]), None, None
2111 return revset.baseset([]), None, None
2112 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2112 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2113 if expr:
2113 if expr:
2114 # Revset matchers often operate faster on revisions in changelog
2114 # Revset matchers often operate faster on revisions in changelog
2115 # order, because most filters deal with the changelog.
2115 # order, because most filters deal with the changelog.
2116 if not opts.get('rev'):
2116 if not opts.get('rev'):
2117 revs.reverse()
2117 revs.reverse()
2118 matcher = revset.match(repo.ui, expr)
2118 matcher = revset.match(repo.ui, expr)
2119 # Revset matches can reorder revisions. "A or B" typically returns
2119 # Revset matches can reorder revisions. "A or B" typically returns
2120 # returns the revision matching A then the revision matching B. Sort
2120 # returns the revision matching A then the revision matching B. Sort
2121 # again to fix that.
2121 # again to fix that.
2122 revs = matcher(repo, revs)
2122 revs = matcher(repo, revs)
2123 if not opts.get('rev'):
2123 if not opts.get('rev'):
2124 revs.sort(reverse=True)
2124 revs.sort(reverse=True)
2125 if limit is not None:
2125 if limit is not None:
2126 count = 0
2126 count = 0
2127 limitedrevs = []
2127 limitedrevs = []
2128 it = iter(revs)
2128 it = iter(revs)
2129 while count < limit:
2129 while count < limit:
2130 try:
2130 try:
2131 limitedrevs.append(it.next())
2131 limitedrevs.append(it.next())
2132 except (StopIteration):
2132 except (StopIteration):
2133 break
2133 break
2134 count += 1
2134 count += 1
2135 revs = revset.baseset(limitedrevs)
2135 revs = revset.baseset(limitedrevs)
2136
2136
2137 return revs, expr, filematcher
2137 return revs, expr, filematcher
2138
2138
2139 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2139 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2140 filematcher=None):
2140 filematcher=None):
2141 seen, state = [], graphmod.asciistate()
2141 seen, state = [], graphmod.asciistate()
2142 for rev, type, ctx, parents in dag:
2142 for rev, type, ctx, parents in dag:
2143 char = 'o'
2143 char = 'o'
2144 if ctx.node() in showparents:
2144 if ctx.node() in showparents:
2145 char = '@'
2145 char = '@'
2146 elif ctx.obsolete():
2146 elif ctx.obsolete():
2147 char = 'x'
2147 char = 'x'
2148 elif ctx.closesbranch():
2148 elif ctx.closesbranch():
2149 char = '_'
2149 char = '_'
2150 copies = None
2150 copies = None
2151 if getrenamed and ctx.rev():
2151 if getrenamed and ctx.rev():
2152 copies = []
2152 copies = []
2153 for fn in ctx.files():
2153 for fn in ctx.files():
2154 rename = getrenamed(fn, ctx.rev())
2154 rename = getrenamed(fn, ctx.rev())
2155 if rename:
2155 if rename:
2156 copies.append((fn, rename[0]))
2156 copies.append((fn, rename[0]))
2157 revmatchfn = None
2157 revmatchfn = None
2158 if filematcher is not None:
2158 if filematcher is not None:
2159 revmatchfn = filematcher(ctx.rev())
2159 revmatchfn = filematcher(ctx.rev())
2160 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2160 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2161 lines = displayer.hunk.pop(rev).split('\n')
2161 lines = displayer.hunk.pop(rev).split('\n')
2162 if not lines[-1]:
2162 if not lines[-1]:
2163 del lines[-1]
2163 del lines[-1]
2164 displayer.flush(rev)
2164 displayer.flush(rev)
2165 edges = edgefn(type, char, lines, seen, rev, parents)
2165 edges = edgefn(type, char, lines, seen, rev, parents)
2166 for type, char, lines, coldata in edges:
2166 for type, char, lines, coldata in edges:
2167 graphmod.ascii(ui, state, type, char, lines, coldata)
2167 graphmod.ascii(ui, state, type, char, lines, coldata)
2168 displayer.close()
2168 displayer.close()
2169
2169
2170 def graphlog(ui, repo, *pats, **opts):
2170 def graphlog(ui, repo, *pats, **opts):
2171 # Parameters are identical to log command ones
2171 # Parameters are identical to log command ones
2172 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2172 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2173 revdag = graphmod.dagwalker(repo, revs)
2173 revdag = graphmod.dagwalker(repo, revs)
2174
2174
2175 getrenamed = None
2175 getrenamed = None
2176 if opts.get('copies'):
2176 if opts.get('copies'):
2177 endrev = None
2177 endrev = None
2178 if opts.get('rev'):
2178 if opts.get('rev'):
2179 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2179 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2180 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2180 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2181 displayer = show_changeset(ui, repo, opts, buffered=True)
2181 displayer = show_changeset(ui, repo, opts, buffered=True)
2182 showparents = [ctx.node() for ctx in repo[None].parents()]
2182 showparents = [ctx.node() for ctx in repo[None].parents()]
2183 displaygraph(ui, revdag, displayer, showparents,
2183 displaygraph(ui, revdag, displayer, showparents,
2184 graphmod.asciiedges, getrenamed, filematcher)
2184 graphmod.asciiedges, getrenamed, filematcher)
2185
2185
2186 def checkunsupportedgraphflags(pats, opts):
2186 def checkunsupportedgraphflags(pats, opts):
2187 for op in ["newest_first"]:
2187 for op in ["newest_first"]:
2188 if op in opts and opts[op]:
2188 if op in opts and opts[op]:
2189 raise util.Abort(_("-G/--graph option is incompatible with --%s")
2189 raise util.Abort(_("-G/--graph option is incompatible with --%s")
2190 % op.replace("_", "-"))
2190 % op.replace("_", "-"))
2191
2191
2192 def graphrevs(repo, nodes, opts):
2192 def graphrevs(repo, nodes, opts):
2193 limit = loglimit(opts)
2193 limit = loglimit(opts)
2194 nodes.reverse()
2194 nodes.reverse()
2195 if limit is not None:
2195 if limit is not None:
2196 nodes = nodes[:limit]
2196 nodes = nodes[:limit]
2197 return graphmod.nodes(repo, nodes)
2197 return graphmod.nodes(repo, nodes)
2198
2198
2199 def add(ui, repo, match, prefix, explicitonly, **opts):
2199 def add(ui, repo, match, prefix, explicitonly, **opts):
2200 join = lambda f: os.path.join(prefix, f)
2200 join = lambda f: os.path.join(prefix, f)
2201 bad = []
2201 bad = []
2202 oldbad = match.bad
2202 oldbad = match.bad
2203 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2203 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2204 names = []
2204 names = []
2205 wctx = repo[None]
2205 wctx = repo[None]
2206 cca = None
2206 cca = None
2207 abort, warn = scmutil.checkportabilityalert(ui)
2207 abort, warn = scmutil.checkportabilityalert(ui)
2208 if abort or warn:
2208 if abort or warn:
2209 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2209 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2210 for f in wctx.walk(match):
2210 for f in wctx.walk(match):
2211 exact = match.exact(f)
2211 exact = match.exact(f)
2212 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2212 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2213 if cca:
2213 if cca:
2214 cca(f)
2214 cca(f)
2215 names.append(f)
2215 names.append(f)
2216 if ui.verbose or not exact:
2216 if ui.verbose or not exact:
2217 ui.status(_('adding %s\n') % match.rel(f))
2217 ui.status(_('adding %s\n') % match.rel(f))
2218
2218
2219 for subpath in sorted(wctx.substate):
2219 for subpath in sorted(wctx.substate):
2220 sub = wctx.sub(subpath)
2220 sub = wctx.sub(subpath)
2221 try:
2221 try:
2222 submatch = matchmod.narrowmatcher(subpath, match)
2222 submatch = matchmod.narrowmatcher(subpath, match)
2223 if opts.get('subrepos'):
2223 if opts.get('subrepos'):
2224 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2224 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2225 else:
2225 else:
2226 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2226 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2227 except error.LookupError:
2227 except error.LookupError:
2228 ui.status(_("skipping missing subrepository: %s\n")
2228 ui.status(_("skipping missing subrepository: %s\n")
2229 % join(subpath))
2229 % join(subpath))
2230
2230
2231 if not opts.get('dry_run'):
2231 if not opts.get('dry_run'):
2232 rejected = wctx.add(names, prefix)
2232 rejected = wctx.add(names, prefix)
2233 bad.extend(f for f in rejected if f in match.files())
2233 bad.extend(f for f in rejected if f in match.files())
2234 return bad
2234 return bad
2235
2235
2236 def forget(ui, repo, match, prefix, explicitonly):
2236 def forget(ui, repo, match, prefix, explicitonly):
2237 join = lambda f: os.path.join(prefix, f)
2237 join = lambda f: os.path.join(prefix, f)
2238 bad = []
2238 bad = []
2239 oldbad = match.bad
2239 oldbad = match.bad
2240 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2240 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2241 wctx = repo[None]
2241 wctx = repo[None]
2242 forgot = []
2242 forgot = []
2243 s = repo.status(match=match, clean=True)
2243 s = repo.status(match=match, clean=True)
2244 forget = sorted(s[0] + s[1] + s[3] + s[6])
2244 forget = sorted(s[0] + s[1] + s[3] + s[6])
2245 if explicitonly:
2245 if explicitonly:
2246 forget = [f for f in forget if match.exact(f)]
2246 forget = [f for f in forget if match.exact(f)]
2247
2247
2248 for subpath in sorted(wctx.substate):
2248 for subpath in sorted(wctx.substate):
2249 sub = wctx.sub(subpath)
2249 sub = wctx.sub(subpath)
2250 try:
2250 try:
2251 submatch = matchmod.narrowmatcher(subpath, match)
2251 submatch = matchmod.narrowmatcher(subpath, match)
2252 subbad, subforgot = sub.forget(submatch, prefix)
2252 subbad, subforgot = sub.forget(submatch, prefix)
2253 bad.extend([subpath + '/' + f for f in subbad])
2253 bad.extend([subpath + '/' + f for f in subbad])
2254 forgot.extend([subpath + '/' + f for f in subforgot])
2254 forgot.extend([subpath + '/' + f for f in subforgot])
2255 except error.LookupError:
2255 except error.LookupError:
2256 ui.status(_("skipping missing subrepository: %s\n")
2256 ui.status(_("skipping missing subrepository: %s\n")
2257 % join(subpath))
2257 % join(subpath))
2258
2258
2259 if not explicitonly:
2259 if not explicitonly:
2260 for f in match.files():
2260 for f in match.files():
2261 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2261 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2262 if f not in forgot:
2262 if f not in forgot:
2263 if repo.wvfs.exists(f):
2263 if repo.wvfs.exists(f):
2264 ui.warn(_('not removing %s: '
2264 ui.warn(_('not removing %s: '
2265 'file is already untracked\n')
2265 'file is already untracked\n')
2266 % match.rel(f))
2266 % match.rel(f))
2267 bad.append(f)
2267 bad.append(f)
2268
2268
2269 for f in forget:
2269 for f in forget:
2270 if ui.verbose or not match.exact(f):
2270 if ui.verbose or not match.exact(f):
2271 ui.status(_('removing %s\n') % match.rel(f))
2271 ui.status(_('removing %s\n') % match.rel(f))
2272
2272
2273 rejected = wctx.forget(forget, prefix)
2273 rejected = wctx.forget(forget, prefix)
2274 bad.extend(f for f in rejected if f in match.files())
2274 bad.extend(f for f in rejected if f in match.files())
2275 forgot.extend(f for f in forget if f not in rejected)
2275 forgot.extend(f for f in forget if f not in rejected)
2276 return bad, forgot
2276 return bad, forgot
2277
2277
2278 def files(ui, ctx, m, fm, fmt, subrepos):
2278 def files(ui, ctx, m, fm, fmt, subrepos):
2279 rev = ctx.rev()
2279 rev = ctx.rev()
2280 ret = 1
2280 ret = 1
2281 ds = ctx.repo().dirstate
2281 ds = ctx.repo().dirstate
2282
2282
2283 for f in ctx.matches(m):
2283 for f in ctx.matches(m):
2284 if rev is None and ds[f] == 'r':
2284 if rev is None and ds[f] == 'r':
2285 continue
2285 continue
2286 fm.startitem()
2286 fm.startitem()
2287 if ui.verbose:
2287 if ui.verbose:
2288 fc = ctx[f]
2288 fc = ctx[f]
2289 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2289 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2290 fm.data(abspath=f)
2290 fm.data(abspath=f)
2291 fm.write('path', fmt, m.rel(f))
2291 fm.write('path', fmt, m.rel(f))
2292 ret = 0
2292 ret = 0
2293
2293
2294 if subrepos:
2294 if subrepos:
2295 for subpath in sorted(ctx.substate):
2295 for subpath in sorted(ctx.substate):
2296 sub = ctx.sub(subpath)
2296 sub = ctx.sub(subpath)
2297 try:
2297 try:
2298 submatch = matchmod.narrowmatcher(subpath, m)
2298 submatch = matchmod.narrowmatcher(subpath, m)
2299 if sub.printfiles(ui, submatch, fm, fmt) == 0:
2299 if sub.printfiles(ui, submatch, fm, fmt) == 0:
2300 ret = 0
2300 ret = 0
2301 except error.LookupError:
2301 except error.LookupError:
2302 ui.status(_("skipping missing subrepository: %s\n")
2302 ui.status(_("skipping missing subrepository: %s\n")
2303 % m.abs(subpath))
2303 % m.abs(subpath))
2304
2304
2305 return ret
2305 return ret
2306
2306
2307 def remove(ui, repo, m, prefix, after, force, subrepos):
2307 def remove(ui, repo, m, prefix, after, force, subrepos):
2308 join = lambda f: os.path.join(prefix, f)
2308 join = lambda f: os.path.join(prefix, f)
2309 ret = 0
2309 ret = 0
2310 s = repo.status(match=m, clean=True)
2310 s = repo.status(match=m, clean=True)
2311 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2311 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2312
2312
2313 wctx = repo[None]
2313 wctx = repo[None]
2314
2314
2315 for subpath in sorted(wctx.substate):
2315 for subpath in sorted(wctx.substate):
2316 def matchessubrepo(matcher, subpath):
2316 def matchessubrepo(matcher, subpath):
2317 if matcher.exact(subpath):
2317 if matcher.exact(subpath):
2318 return True
2318 return True
2319 for f in matcher.files():
2319 for f in matcher.files():
2320 if f.startswith(subpath):
2320 if f.startswith(subpath):
2321 return True
2321 return True
2322 return False
2322 return False
2323
2323
2324 if subrepos or matchessubrepo(m, subpath):
2324 if subrepos or matchessubrepo(m, subpath):
2325 sub = wctx.sub(subpath)
2325 sub = wctx.sub(subpath)
2326 try:
2326 try:
2327 submatch = matchmod.narrowmatcher(subpath, m)
2327 submatch = matchmod.narrowmatcher(subpath, m)
2328 if sub.removefiles(submatch, prefix, after, force, subrepos):
2328 if sub.removefiles(submatch, prefix, after, force, subrepos):
2329 ret = 1
2329 ret = 1
2330 except error.LookupError:
2330 except error.LookupError:
2331 ui.status(_("skipping missing subrepository: %s\n")
2331 ui.status(_("skipping missing subrepository: %s\n")
2332 % join(subpath))
2332 % join(subpath))
2333
2333
2334 # warn about failure to delete explicit files/dirs
2334 # warn about failure to delete explicit files/dirs
2335 deleteddirs = scmutil.dirs(deleted)
2335 deleteddirs = scmutil.dirs(deleted)
2336 for f in m.files():
2336 for f in m.files():
2337 def insubrepo():
2337 def insubrepo():
2338 for subpath in wctx.substate:
2338 for subpath in wctx.substate:
2339 if f.startswith(subpath):
2339 if f.startswith(subpath):
2340 return True
2340 return True
2341 return False
2341 return False
2342
2342
2343 isdir = f in deleteddirs or f in wctx.dirs()
2343 isdir = f in deleteddirs or f in wctx.dirs()
2344 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2344 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2345 continue
2345 continue
2346
2346
2347 if repo.wvfs.exists(f):
2347 if repo.wvfs.exists(f):
2348 if repo.wvfs.isdir(f):
2348 if repo.wvfs.isdir(f):
2349 ui.warn(_('not removing %s: no tracked files\n')
2349 ui.warn(_('not removing %s: no tracked files\n')
2350 % m.rel(f))
2350 % m.rel(f))
2351 else:
2351 else:
2352 ui.warn(_('not removing %s: file is untracked\n')
2352 ui.warn(_('not removing %s: file is untracked\n')
2353 % m.rel(f))
2353 % m.rel(f))
2354 # missing files will generate a warning elsewhere
2354 # missing files will generate a warning elsewhere
2355 ret = 1
2355 ret = 1
2356
2356
2357 if force:
2357 if force:
2358 list = modified + deleted + clean + added
2358 list = modified + deleted + clean + added
2359 elif after:
2359 elif after:
2360 list = deleted
2360 list = deleted
2361 for f in modified + added + clean:
2361 for f in modified + added + clean:
2362 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2362 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2363 ret = 1
2363 ret = 1
2364 else:
2364 else:
2365 list = deleted + clean
2365 list = deleted + clean
2366 for f in modified:
2366 for f in modified:
2367 ui.warn(_('not removing %s: file is modified (use -f'
2367 ui.warn(_('not removing %s: file is modified (use -f'
2368 ' to force removal)\n') % m.rel(f))
2368 ' to force removal)\n') % m.rel(f))
2369 ret = 1
2369 ret = 1
2370 for f in added:
2370 for f in added:
2371 ui.warn(_('not removing %s: file has been marked for add'
2371 ui.warn(_('not removing %s: file has been marked for add'
2372 ' (use forget to undo)\n') % m.rel(f))
2372 ' (use forget to undo)\n') % m.rel(f))
2373 ret = 1
2373 ret = 1
2374
2374
2375 for f in sorted(list):
2375 for f in sorted(list):
2376 if ui.verbose or not m.exact(f):
2376 if ui.verbose or not m.exact(f):
2377 ui.status(_('removing %s\n') % m.rel(f))
2377 ui.status(_('removing %s\n') % m.rel(f))
2378
2378
2379 wlock = repo.wlock()
2379 wlock = repo.wlock()
2380 try:
2380 try:
2381 if not after:
2381 if not after:
2382 for f in list:
2382 for f in list:
2383 if f in added:
2383 if f in added:
2384 continue # we never unlink added files on remove
2384 continue # we never unlink added files on remove
2385 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2385 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2386 repo[None].forget(list)
2386 repo[None].forget(list)
2387 finally:
2387 finally:
2388 wlock.release()
2388 wlock.release()
2389
2389
2390 return ret
2390 return ret
2391
2391
2392 def cat(ui, repo, ctx, matcher, prefix, **opts):
2392 def cat(ui, repo, ctx, matcher, prefix, **opts):
2393 err = 1
2393 err = 1
2394
2394
2395 def write(path):
2395 def write(path):
2396 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2396 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2397 pathname=os.path.join(prefix, path))
2397 pathname=os.path.join(prefix, path))
2398 data = ctx[path].data()
2398 data = ctx[path].data()
2399 if opts.get('decode'):
2399 if opts.get('decode'):
2400 data = repo.wwritedata(path, data)
2400 data = repo.wwritedata(path, data)
2401 fp.write(data)
2401 fp.write(data)
2402 fp.close()
2402 fp.close()
2403
2403
2404 # Automation often uses hg cat on single files, so special case it
2404 # Automation often uses hg cat on single files, so special case it
2405 # for performance to avoid the cost of parsing the manifest.
2405 # for performance to avoid the cost of parsing the manifest.
2406 if len(matcher.files()) == 1 and not matcher.anypats():
2406 if len(matcher.files()) == 1 and not matcher.anypats():
2407 file = matcher.files()[0]
2407 file = matcher.files()[0]
2408 mf = repo.manifest
2408 mf = repo.manifest
2409 mfnode = ctx._changeset[0]
2409 mfnode = ctx._changeset[0]
2410 if mf.find(mfnode, file)[0]:
2410 if mf.find(mfnode, file)[0]:
2411 write(file)
2411 write(file)
2412 return 0
2412 return 0
2413
2413
2414 # Don't warn about "missing" files that are really in subrepos
2414 # Don't warn about "missing" files that are really in subrepos
2415 bad = matcher.bad
2415 bad = matcher.bad
2416
2416
2417 def badfn(path, msg):
2417 def badfn(path, msg):
2418 for subpath in ctx.substate:
2418 for subpath in ctx.substate:
2419 if path.startswith(subpath):
2419 if path.startswith(subpath):
2420 return
2420 return
2421 bad(path, msg)
2421 bad(path, msg)
2422
2422
2423 matcher.bad = badfn
2423 matcher.bad = badfn
2424
2424
2425 for abs in ctx.walk(matcher):
2425 for abs in ctx.walk(matcher):
2426 write(abs)
2426 write(abs)
2427 err = 0
2427 err = 0
2428
2428
2429 matcher.bad = bad
2429 matcher.bad = bad
2430
2430
2431 for subpath in sorted(ctx.substate):
2431 for subpath in sorted(ctx.substate):
2432 sub = ctx.sub(subpath)
2432 sub = ctx.sub(subpath)
2433 try:
2433 try:
2434 submatch = matchmod.narrowmatcher(subpath, matcher)
2434 submatch = matchmod.narrowmatcher(subpath, matcher)
2435
2435
2436 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2436 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2437 **opts):
2437 **opts):
2438 err = 0
2438 err = 0
2439 except error.RepoLookupError:
2439 except error.RepoLookupError:
2440 ui.status(_("skipping missing subrepository: %s\n")
2440 ui.status(_("skipping missing subrepository: %s\n")
2441 % os.path.join(prefix, subpath))
2441 % os.path.join(prefix, subpath))
2442
2442
2443 return err
2443 return err
2444
2444
2445 def commit(ui, repo, commitfunc, pats, opts):
2445 def commit(ui, repo, commitfunc, pats, opts):
2446 '''commit the specified files or all outstanding changes'''
2446 '''commit the specified files or all outstanding changes'''
2447 date = opts.get('date')
2447 date = opts.get('date')
2448 if date:
2448 if date:
2449 opts['date'] = util.parsedate(date)
2449 opts['date'] = util.parsedate(date)
2450 message = logmessage(ui, opts)
2450 message = logmessage(ui, opts)
2451 matcher = scmutil.match(repo[None], pats, opts)
2451 matcher = scmutil.match(repo[None], pats, opts)
2452
2452
2453 # extract addremove carefully -- this function can be called from a command
2453 # extract addremove carefully -- this function can be called from a command
2454 # that doesn't support addremove
2454 # that doesn't support addremove
2455 if opts.get('addremove'):
2455 if opts.get('addremove'):
2456 if scmutil.addremove(repo, matcher, "", opts) != 0:
2456 if scmutil.addremove(repo, matcher, "", opts) != 0:
2457 raise util.Abort(
2457 raise util.Abort(
2458 _("failed to mark all new/missing files as added/removed"))
2458 _("failed to mark all new/missing files as added/removed"))
2459
2459
2460 return commitfunc(ui, repo, message, matcher, opts)
2460 return commitfunc(ui, repo, message, matcher, opts)
2461
2461
2462 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2462 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2463 # amend will reuse the existing user if not specified, but the obsolete
2463 # amend will reuse the existing user if not specified, but the obsolete
2464 # marker creation requires that the current user's name is specified.
2464 # marker creation requires that the current user's name is specified.
2465 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2465 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2466 ui.username() # raise exception if username not set
2466 ui.username() # raise exception if username not set
2467
2467
2468 ui.note(_('amending changeset %s\n') % old)
2468 ui.note(_('amending changeset %s\n') % old)
2469 base = old.p1()
2469 base = old.p1()
2470
2470
2471 wlock = lock = newid = None
2471 wlock = lock = newid = None
2472 try:
2472 try:
2473 wlock = repo.wlock()
2473 wlock = repo.wlock()
2474 lock = repo.lock()
2474 lock = repo.lock()
2475 tr = repo.transaction('amend')
2475 tr = repo.transaction('amend')
2476 try:
2476 try:
2477 # See if we got a message from -m or -l, if not, open the editor
2477 # See if we got a message from -m or -l, if not, open the editor
2478 # with the message of the changeset to amend
2478 # with the message of the changeset to amend
2479 message = logmessage(ui, opts)
2479 message = logmessage(ui, opts)
2480 # ensure logfile does not conflict with later enforcement of the
2480 # ensure logfile does not conflict with later enforcement of the
2481 # message. potential logfile content has been processed by
2481 # message. potential logfile content has been processed by
2482 # `logmessage` anyway.
2482 # `logmessage` anyway.
2483 opts.pop('logfile')
2483 opts.pop('logfile')
2484 # First, do a regular commit to record all changes in the working
2484 # First, do a regular commit to record all changes in the working
2485 # directory (if there are any)
2485 # directory (if there are any)
2486 ui.callhooks = False
2486 ui.callhooks = False
2487 currentbookmark = repo._bookmarkcurrent
2487 currentbookmark = repo._bookmarkcurrent
2488 try:
2488 try:
2489 repo._bookmarkcurrent = None
2489 repo._bookmarkcurrent = None
2490 opts['message'] = 'temporary amend commit for %s' % old
2490 opts['message'] = 'temporary amend commit for %s' % old
2491 node = commit(ui, repo, commitfunc, pats, opts)
2491 node = commit(ui, repo, commitfunc, pats, opts)
2492 finally:
2492 finally:
2493 repo._bookmarkcurrent = currentbookmark
2493 repo._bookmarkcurrent = currentbookmark
2494 ui.callhooks = True
2494 ui.callhooks = True
2495 ctx = repo[node]
2495 ctx = repo[node]
2496
2496
2497 # Participating changesets:
2497 # Participating changesets:
2498 #
2498 #
2499 # node/ctx o - new (intermediate) commit that contains changes
2499 # node/ctx o - new (intermediate) commit that contains changes
2500 # | from working dir to go into amending commit
2500 # | from working dir to go into amending commit
2501 # | (or a workingctx if there were no changes)
2501 # | (or a workingctx if there were no changes)
2502 # |
2502 # |
2503 # old o - changeset to amend
2503 # old o - changeset to amend
2504 # |
2504 # |
2505 # base o - parent of amending changeset
2505 # base o - parent of amending changeset
2506
2506
2507 # Update extra dict from amended commit (e.g. to preserve graft
2507 # Update extra dict from amended commit (e.g. to preserve graft
2508 # source)
2508 # source)
2509 extra.update(old.extra())
2509 extra.update(old.extra())
2510
2510
2511 # Also update it from the intermediate commit or from the wctx
2511 # Also update it from the intermediate commit or from the wctx
2512 extra.update(ctx.extra())
2512 extra.update(ctx.extra())
2513
2513
2514 if len(old.parents()) > 1:
2514 if len(old.parents()) > 1:
2515 # ctx.files() isn't reliable for merges, so fall back to the
2515 # ctx.files() isn't reliable for merges, so fall back to the
2516 # slower repo.status() method
2516 # slower repo.status() method
2517 files = set([fn for st in repo.status(base, old)[:3]
2517 files = set([fn for st in repo.status(base, old)[:3]
2518 for fn in st])
2518 for fn in st])
2519 else:
2519 else:
2520 files = set(old.files())
2520 files = set(old.files())
2521
2521
2522 # Second, we use either the commit we just did, or if there were no
2522 # Second, we use either the commit we just did, or if there were no
2523 # changes the parent of the working directory as the version of the
2523 # changes the parent of the working directory as the version of the
2524 # files in the final amend commit
2524 # files in the final amend commit
2525 if node:
2525 if node:
2526 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2526 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2527
2527
2528 user = ctx.user()
2528 user = ctx.user()
2529 date = ctx.date()
2529 date = ctx.date()
2530 # Recompute copies (avoid recording a -> b -> a)
2530 # Recompute copies (avoid recording a -> b -> a)
2531 copied = copies.pathcopies(base, ctx)
2531 copied = copies.pathcopies(base, ctx)
2532 if old.p2:
2532 if old.p2:
2533 copied.update(copies.pathcopies(old.p2(), ctx))
2533 copied.update(copies.pathcopies(old.p2(), ctx))
2534
2534
2535 # Prune files which were reverted by the updates: if old
2535 # Prune files which were reverted by the updates: if old
2536 # introduced file X and our intermediate commit, node,
2536 # introduced file X and our intermediate commit, node,
2537 # renamed that file, then those two files are the same and
2537 # renamed that file, then those two files are the same and
2538 # we can discard X from our list of files. Likewise if X
2538 # we can discard X from our list of files. Likewise if X
2539 # was deleted, it's no longer relevant
2539 # was deleted, it's no longer relevant
2540 files.update(ctx.files())
2540 files.update(ctx.files())
2541
2541
2542 def samefile(f):
2542 def samefile(f):
2543 if f in ctx.manifest():
2543 if f in ctx.manifest():
2544 a = ctx.filectx(f)
2544 a = ctx.filectx(f)
2545 if f in base.manifest():
2545 if f in base.manifest():
2546 b = base.filectx(f)
2546 b = base.filectx(f)
2547 return (not a.cmp(b)
2547 return (not a.cmp(b)
2548 and a.flags() == b.flags())
2548 and a.flags() == b.flags())
2549 else:
2549 else:
2550 return False
2550 return False
2551 else:
2551 else:
2552 return f not in base.manifest()
2552 return f not in base.manifest()
2553 files = [f for f in files if not samefile(f)]
2553 files = [f for f in files if not samefile(f)]
2554
2554
2555 def filectxfn(repo, ctx_, path):
2555 def filectxfn(repo, ctx_, path):
2556 try:
2556 try:
2557 fctx = ctx[path]
2557 fctx = ctx[path]
2558 flags = fctx.flags()
2558 flags = fctx.flags()
2559 mctx = context.memfilectx(repo,
2559 mctx = context.memfilectx(repo,
2560 fctx.path(), fctx.data(),
2560 fctx.path(), fctx.data(),
2561 islink='l' in flags,
2561 islink='l' in flags,
2562 isexec='x' in flags,
2562 isexec='x' in flags,
2563 copied=copied.get(path))
2563 copied=copied.get(path))
2564 return mctx
2564 return mctx
2565 except KeyError:
2565 except KeyError:
2566 return None
2566 return None
2567 else:
2567 else:
2568 ui.note(_('copying changeset %s to %s\n') % (old, base))
2568 ui.note(_('copying changeset %s to %s\n') % (old, base))
2569
2569
2570 # Use version of files as in the old cset
2570 # Use version of files as in the old cset
2571 def filectxfn(repo, ctx_, path):
2571 def filectxfn(repo, ctx_, path):
2572 try:
2572 try:
2573 return old.filectx(path)
2573 return old.filectx(path)
2574 except KeyError:
2574 except KeyError:
2575 return None
2575 return None
2576
2576
2577 user = opts.get('user') or old.user()
2577 user = opts.get('user') or old.user()
2578 date = opts.get('date') or old.date()
2578 date = opts.get('date') or old.date()
2579 editform = mergeeditform(old, 'commit.amend')
2579 editform = mergeeditform(old, 'commit.amend')
2580 editor = getcommiteditor(editform=editform, **opts)
2580 editor = getcommiteditor(editform=editform, **opts)
2581 if not message:
2581 if not message:
2582 editor = getcommiteditor(edit=True, editform=editform)
2582 editor = getcommiteditor(edit=True, editform=editform)
2583 message = old.description()
2583 message = old.description()
2584
2584
2585 pureextra = extra.copy()
2585 pureextra = extra.copy()
2586 extra['amend_source'] = old.hex()
2586 extra['amend_source'] = old.hex()
2587
2587
2588 new = context.memctx(repo,
2588 new = context.memctx(repo,
2589 parents=[base.node(), old.p2().node()],
2589 parents=[base.node(), old.p2().node()],
2590 text=message,
2590 text=message,
2591 files=files,
2591 files=files,
2592 filectxfn=filectxfn,
2592 filectxfn=filectxfn,
2593 user=user,
2593 user=user,
2594 date=date,
2594 date=date,
2595 extra=extra,
2595 extra=extra,
2596 editor=editor)
2596 editor=editor)
2597
2597
2598 newdesc = changelog.stripdesc(new.description())
2598 newdesc = changelog.stripdesc(new.description())
2599 if ((not node)
2599 if ((not node)
2600 and newdesc == old.description()
2600 and newdesc == old.description()
2601 and user == old.user()
2601 and user == old.user()
2602 and date == old.date()
2602 and date == old.date()
2603 and pureextra == old.extra()):
2603 and pureextra == old.extra()):
2604 # nothing changed. continuing here would create a new node
2604 # nothing changed. continuing here would create a new node
2605 # anyway because of the amend_source noise.
2605 # anyway because of the amend_source noise.
2606 #
2606 #
2607 # This not what we expect from amend.
2607 # This not what we expect from amend.
2608 return old.node()
2608 return old.node()
2609
2609
2610 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2610 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2611 try:
2611 try:
2612 if opts.get('secret'):
2612 if opts.get('secret'):
2613 commitphase = 'secret'
2613 commitphase = 'secret'
2614 else:
2614 else:
2615 commitphase = old.phase()
2615 commitphase = old.phase()
2616 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2616 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2617 newid = repo.commitctx(new)
2617 newid = repo.commitctx(new)
2618 finally:
2618 finally:
2619 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2619 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2620 if newid != old.node():
2620 if newid != old.node():
2621 # Reroute the working copy parent to the new changeset
2621 # Reroute the working copy parent to the new changeset
2622 repo.setparents(newid, nullid)
2622 repo.setparents(newid, nullid)
2623
2623
2624 # Move bookmarks from old parent to amend commit
2624 # Move bookmarks from old parent to amend commit
2625 bms = repo.nodebookmarks(old.node())
2625 bms = repo.nodebookmarks(old.node())
2626 if bms:
2626 if bms:
2627 marks = repo._bookmarks
2627 marks = repo._bookmarks
2628 for bm in bms:
2628 for bm in bms:
2629 marks[bm] = newid
2629 marks[bm] = newid
2630 marks.write()
2630 marks.write()
2631 #commit the whole amend process
2631 #commit the whole amend process
2632 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2632 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2633 if createmarkers and newid != old.node():
2633 if createmarkers and newid != old.node():
2634 # mark the new changeset as successor of the rewritten one
2634 # mark the new changeset as successor of the rewritten one
2635 new = repo[newid]
2635 new = repo[newid]
2636 obs = [(old, (new,))]
2636 obs = [(old, (new,))]
2637 if node:
2637 if node:
2638 obs.append((ctx, ()))
2638 obs.append((ctx, ()))
2639
2639
2640 obsolete.createmarkers(repo, obs)
2640 obsolete.createmarkers(repo, obs)
2641 tr.close()
2641 tr.close()
2642 finally:
2642 finally:
2643 tr.release()
2643 tr.release()
2644 if not createmarkers and newid != old.node():
2644 if not createmarkers and newid != old.node():
2645 # Strip the intermediate commit (if there was one) and the amended
2645 # Strip the intermediate commit (if there was one) and the amended
2646 # commit
2646 # commit
2647 if node:
2647 if node:
2648 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2648 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2649 ui.note(_('stripping amended changeset %s\n') % old)
2649 ui.note(_('stripping amended changeset %s\n') % old)
2650 repair.strip(ui, repo, old.node(), topic='amend-backup')
2650 repair.strip(ui, repo, old.node(), topic='amend-backup')
2651 finally:
2651 finally:
2652 if newid is None:
2652 if newid is None:
2653 repo.dirstate.invalidate()
2653 repo.dirstate.invalidate()
2654 lockmod.release(lock, wlock)
2654 lockmod.release(lock, wlock)
2655 return newid
2655 return newid
2656
2656
2657 def commiteditor(repo, ctx, subs, editform=''):
2657 def commiteditor(repo, ctx, subs, editform=''):
2658 if ctx.description():
2658 if ctx.description():
2659 return ctx.description()
2659 return ctx.description()
2660 return commitforceeditor(repo, ctx, subs, editform=editform)
2660 return commitforceeditor(repo, ctx, subs, editform=editform)
2661
2661
2662 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2662 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2663 editform=''):
2663 editform=''):
2664 if not extramsg:
2664 if not extramsg:
2665 extramsg = _("Leave message empty to abort commit.")
2665 extramsg = _("Leave message empty to abort commit.")
2666
2666
2667 forms = [e for e in editform.split('.') if e]
2667 forms = [e for e in editform.split('.') if e]
2668 forms.insert(0, 'changeset')
2668 forms.insert(0, 'changeset')
2669 while forms:
2669 while forms:
2670 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2670 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2671 if tmpl:
2671 if tmpl:
2672 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2672 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2673 break
2673 break
2674 forms.pop()
2674 forms.pop()
2675 else:
2675 else:
2676 committext = buildcommittext(repo, ctx, subs, extramsg)
2676 committext = buildcommittext(repo, ctx, subs, extramsg)
2677
2677
2678 # run editor in the repository root
2678 # run editor in the repository root
2679 olddir = os.getcwd()
2679 olddir = os.getcwd()
2680 os.chdir(repo.root)
2680 os.chdir(repo.root)
2681 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2681 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2682 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2682 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2683 os.chdir(olddir)
2683 os.chdir(olddir)
2684
2684
2685 if finishdesc:
2685 if finishdesc:
2686 text = finishdesc(text)
2686 text = finishdesc(text)
2687 if not text.strip():
2687 if not text.strip():
2688 raise util.Abort(_("empty commit message"))
2688 raise util.Abort(_("empty commit message"))
2689
2689
2690 return text
2690 return text
2691
2691
2692 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2692 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2693 ui = repo.ui
2693 ui = repo.ui
2694 tmpl, mapfile = gettemplate(ui, tmpl, None)
2694 tmpl, mapfile = gettemplate(ui, tmpl, None)
2695
2695
2696 try:
2696 try:
2697 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2697 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2698 except SyntaxError, inst:
2698 except SyntaxError, inst:
2699 raise util.Abort(inst.args[0])
2699 raise util.Abort(inst.args[0])
2700
2700
2701 for k, v in repo.ui.configitems('committemplate'):
2701 for k, v in repo.ui.configitems('committemplate'):
2702 if k != 'changeset':
2702 if k != 'changeset':
2703 t.t.cache[k] = v
2703 t.t.cache[k] = v
2704
2704
2705 if not extramsg:
2705 if not extramsg:
2706 extramsg = '' # ensure that extramsg is string
2706 extramsg = '' # ensure that extramsg is string
2707
2707
2708 ui.pushbuffer()
2708 ui.pushbuffer()
2709 t.show(ctx, extramsg=extramsg)
2709 t.show(ctx, extramsg=extramsg)
2710 return ui.popbuffer()
2710 return ui.popbuffer()
2711
2711
2712 def buildcommittext(repo, ctx, subs, extramsg):
2712 def buildcommittext(repo, ctx, subs, extramsg):
2713 edittext = []
2713 edittext = []
2714 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2714 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2715 if ctx.description():
2715 if ctx.description():
2716 edittext.append(ctx.description())
2716 edittext.append(ctx.description())
2717 edittext.append("")
2717 edittext.append("")
2718 edittext.append("") # Empty line between message and comments.
2718 edittext.append("") # Empty line between message and comments.
2719 edittext.append(_("HG: Enter commit message."
2719 edittext.append(_("HG: Enter commit message."
2720 " Lines beginning with 'HG:' are removed."))
2720 " Lines beginning with 'HG:' are removed."))
2721 edittext.append("HG: %s" % extramsg)
2721 edittext.append("HG: %s" % extramsg)
2722 edittext.append("HG: --")
2722 edittext.append("HG: --")
2723 edittext.append(_("HG: user: %s") % ctx.user())
2723 edittext.append(_("HG: user: %s") % ctx.user())
2724 if ctx.p2():
2724 if ctx.p2():
2725 edittext.append(_("HG: branch merge"))
2725 edittext.append(_("HG: branch merge"))
2726 if ctx.branch():
2726 if ctx.branch():
2727 edittext.append(_("HG: branch '%s'") % ctx.branch())
2727 edittext.append(_("HG: branch '%s'") % ctx.branch())
2728 if bookmarks.iscurrent(repo):
2728 if bookmarks.iscurrent(repo):
2729 edittext.append(_("HG: bookmark '%s'") % repo._bookmarkcurrent)
2729 edittext.append(_("HG: bookmark '%s'") % repo._bookmarkcurrent)
2730 edittext.extend([_("HG: subrepo %s") % s for s in subs])
2730 edittext.extend([_("HG: subrepo %s") % s for s in subs])
2731 edittext.extend([_("HG: added %s") % f for f in added])
2731 edittext.extend([_("HG: added %s") % f for f in added])
2732 edittext.extend([_("HG: changed %s") % f for f in modified])
2732 edittext.extend([_("HG: changed %s") % f for f in modified])
2733 edittext.extend([_("HG: removed %s") % f for f in removed])
2733 edittext.extend([_("HG: removed %s") % f for f in removed])
2734 if not added and not modified and not removed:
2734 if not added and not modified and not removed:
2735 edittext.append(_("HG: no files changed"))
2735 edittext.append(_("HG: no files changed"))
2736 edittext.append("")
2736 edittext.append("")
2737
2737
2738 return "\n".join(edittext)
2738 return "\n".join(edittext)
2739
2739
2740 def commitstatus(repo, node, branch, bheads=None, opts={}):
2740 def commitstatus(repo, node, branch, bheads=None, opts={}):
2741 ctx = repo[node]
2741 ctx = repo[node]
2742 parents = ctx.parents()
2742 parents = ctx.parents()
2743
2743
2744 if (not opts.get('amend') and bheads and node not in bheads and not
2744 if (not opts.get('amend') and bheads and node not in bheads and not
2745 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2745 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2746 repo.ui.status(_('created new head\n'))
2746 repo.ui.status(_('created new head\n'))
2747 # The message is not printed for initial roots. For the other
2747 # The message is not printed for initial roots. For the other
2748 # changesets, it is printed in the following situations:
2748 # changesets, it is printed in the following situations:
2749 #
2749 #
2750 # Par column: for the 2 parents with ...
2750 # Par column: for the 2 parents with ...
2751 # N: null or no parent
2751 # N: null or no parent
2752 # B: parent is on another named branch
2752 # B: parent is on another named branch
2753 # C: parent is a regular non head changeset
2753 # C: parent is a regular non head changeset
2754 # H: parent was a branch head of the current branch
2754 # H: parent was a branch head of the current branch
2755 # Msg column: whether we print "created new head" message
2755 # Msg column: whether we print "created new head" message
2756 # In the following, it is assumed that there already exists some
2756 # In the following, it is assumed that there already exists some
2757 # initial branch heads of the current branch, otherwise nothing is
2757 # initial branch heads of the current branch, otherwise nothing is
2758 # printed anyway.
2758 # printed anyway.
2759 #
2759 #
2760 # Par Msg Comment
2760 # Par Msg Comment
2761 # N N y additional topo root
2761 # N N y additional topo root
2762 #
2762 #
2763 # B N y additional branch root
2763 # B N y additional branch root
2764 # C N y additional topo head
2764 # C N y additional topo head
2765 # H N n usual case
2765 # H N n usual case
2766 #
2766 #
2767 # B B y weird additional branch root
2767 # B B y weird additional branch root
2768 # C B y branch merge
2768 # C B y branch merge
2769 # H B n merge with named branch
2769 # H B n merge with named branch
2770 #
2770 #
2771 # C C y additional head from merge
2771 # C C y additional head from merge
2772 # C H n merge with a head
2772 # C H n merge with a head
2773 #
2773 #
2774 # H H n head merge: head count decreases
2774 # H H n head merge: head count decreases
2775
2775
2776 if not opts.get('close_branch'):
2776 if not opts.get('close_branch'):
2777 for r in parents:
2777 for r in parents:
2778 if r.closesbranch() and r.branch() == branch:
2778 if r.closesbranch() and r.branch() == branch:
2779 repo.ui.status(_('reopening closed branch head %d\n') % r)
2779 repo.ui.status(_('reopening closed branch head %d\n') % r)
2780
2780
2781 if repo.ui.debugflag:
2781 if repo.ui.debugflag:
2782 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2782 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2783 elif repo.ui.verbose:
2783 elif repo.ui.verbose:
2784 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2784 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2785
2785
2786 def revert(ui, repo, ctx, parents, *pats, **opts):
2786 def revert(ui, repo, ctx, parents, *pats, **opts):
2787 parent, p2 = parents
2787 parent, p2 = parents
2788 node = ctx.node()
2788 node = ctx.node()
2789
2789
2790 mf = ctx.manifest()
2790 mf = ctx.manifest()
2791 if node == p2:
2791 if node == p2:
2792 parent = p2
2792 parent = p2
2793 if node == parent:
2793 if node == parent:
2794 pmf = mf
2794 pmf = mf
2795 else:
2795 else:
2796 pmf = None
2796 pmf = None
2797
2797
2798 # need all matching names in dirstate and manifest of target rev,
2798 # need all matching names in dirstate and manifest of target rev,
2799 # so have to walk both. do not print errors if files exist in one
2799 # so have to walk both. do not print errors if files exist in one
2800 # but not other.
2800 # but not other.
2801
2801
2802 # `names` is a mapping for all elements in working copy and target revision
2802 # `names` is a mapping for all elements in working copy and target revision
2803 # The mapping is in the form:
2803 # The mapping is in the form:
2804 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2804 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2805 names = {}
2805 names = {}
2806
2806
2807 wlock = repo.wlock()
2807 wlock = repo.wlock()
2808 try:
2808 try:
2809 ## filling of the `names` mapping
2809 ## filling of the `names` mapping
2810 # walk dirstate to fill `names`
2810 # walk dirstate to fill `names`
2811
2811
2812 m = scmutil.match(repo[None], pats, opts)
2812 m = scmutil.match(repo[None], pats, opts)
2813 if not m.always() or node != parent:
2813 if not m.always() or node != parent:
2814 m.bad = lambda x, y: False
2814 m.bad = lambda x, y: False
2815 for abs in repo.walk(m):
2815 for abs in repo.walk(m):
2816 names[abs] = m.rel(abs), m.exact(abs)
2816 names[abs] = m.rel(abs), m.exact(abs)
2817
2817
2818 # walk target manifest to fill `names`
2818 # walk target manifest to fill `names`
2819
2819
2820 def badfn(path, msg):
2820 def badfn(path, msg):
2821 if path in names:
2821 if path in names:
2822 return
2822 return
2823 if path in ctx.substate:
2823 if path in ctx.substate:
2824 return
2824 return
2825 path_ = path + '/'
2825 path_ = path + '/'
2826 for f in names:
2826 for f in names:
2827 if f.startswith(path_):
2827 if f.startswith(path_):
2828 return
2828 return
2829 ui.warn("%s: %s\n" % (m.rel(path), msg))
2829 ui.warn("%s: %s\n" % (m.rel(path), msg))
2830
2830
2831 m = scmutil.match(ctx, pats, opts)
2832 m.bad = badfn
2831 m.bad = badfn
2833 for abs in ctx.walk(m):
2832 for abs in ctx.walk(m):
2834 if abs not in names:
2833 if abs not in names:
2835 names[abs] = m.rel(abs), m.exact(abs)
2834 names[abs] = m.rel(abs), m.exact(abs)
2836
2835
2837 # Find status of all file in `names`.
2836 # Find status of all file in `names`.
2838 m = scmutil.matchfiles(repo, names)
2837 m = scmutil.matchfiles(repo, names)
2839
2838
2840 changes = repo.status(node1=node, match=m,
2839 changes = repo.status(node1=node, match=m,
2841 unknown=True, ignored=True, clean=True)
2840 unknown=True, ignored=True, clean=True)
2842 else:
2841 else:
2843 changes = repo.status(match=m)
2842 changes = repo.status(match=m)
2844 for kind in changes:
2843 for kind in changes:
2845 for abs in kind:
2844 for abs in kind:
2846 names[abs] = m.rel(abs), m.exact(abs)
2845 names[abs] = m.rel(abs), m.exact(abs)
2847
2846
2848 m = scmutil.matchfiles(repo, names)
2847 m = scmutil.matchfiles(repo, names)
2849
2848
2850 modified = set(changes.modified)
2849 modified = set(changes.modified)
2851 added = set(changes.added)
2850 added = set(changes.added)
2852 removed = set(changes.removed)
2851 removed = set(changes.removed)
2853 _deleted = set(changes.deleted)
2852 _deleted = set(changes.deleted)
2854 unknown = set(changes.unknown)
2853 unknown = set(changes.unknown)
2855 unknown.update(changes.ignored)
2854 unknown.update(changes.ignored)
2856 clean = set(changes.clean)
2855 clean = set(changes.clean)
2857 modadded = set()
2856 modadded = set()
2858
2857
2859 # split between files known in target manifest and the others
2858 # split between files known in target manifest and the others
2860 smf = set(mf)
2859 smf = set(mf)
2861
2860
2862 # determine the exact nature of the deleted changesets
2861 # determine the exact nature of the deleted changesets
2863 deladded = _deleted - smf
2862 deladded = _deleted - smf
2864 deleted = _deleted - deladded
2863 deleted = _deleted - deladded
2865
2864
2866 # We need to account for the state of the file in the dirstate,
2865 # We need to account for the state of the file in the dirstate,
2867 # even when we revert against something else than parent. This will
2866 # even when we revert against something else than parent. This will
2868 # slightly alter the behavior of revert (doing back up or not, delete
2867 # slightly alter the behavior of revert (doing back up or not, delete
2869 # or just forget etc).
2868 # or just forget etc).
2870 if parent == node:
2869 if parent == node:
2871 dsmodified = modified
2870 dsmodified = modified
2872 dsadded = added
2871 dsadded = added
2873 dsremoved = removed
2872 dsremoved = removed
2874 # store all local modifications, useful later for rename detection
2873 # store all local modifications, useful later for rename detection
2875 localchanges = dsmodified | dsadded
2874 localchanges = dsmodified | dsadded
2876 modified, added, removed = set(), set(), set()
2875 modified, added, removed = set(), set(), set()
2877 else:
2876 else:
2878 changes = repo.status(node1=parent, match=m)
2877 changes = repo.status(node1=parent, match=m)
2879 dsmodified = set(changes.modified)
2878 dsmodified = set(changes.modified)
2880 dsadded = set(changes.added)
2879 dsadded = set(changes.added)
2881 dsremoved = set(changes.removed)
2880 dsremoved = set(changes.removed)
2882 # store all local modifications, useful later for rename detection
2881 # store all local modifications, useful later for rename detection
2883 localchanges = dsmodified | dsadded
2882 localchanges = dsmodified | dsadded
2884
2883
2885 # only take into account for removes between wc and target
2884 # only take into account for removes between wc and target
2886 clean |= dsremoved - removed
2885 clean |= dsremoved - removed
2887 dsremoved &= removed
2886 dsremoved &= removed
2888 # distinct between dirstate remove and other
2887 # distinct between dirstate remove and other
2889 removed -= dsremoved
2888 removed -= dsremoved
2890
2889
2891 modadded = added & dsmodified
2890 modadded = added & dsmodified
2892 added -= modadded
2891 added -= modadded
2893
2892
2894 # tell newly modified apart.
2893 # tell newly modified apart.
2895 dsmodified &= modified
2894 dsmodified &= modified
2896 dsmodified |= modified & dsadded # dirstate added may needs backup
2895 dsmodified |= modified & dsadded # dirstate added may needs backup
2897 modified -= dsmodified
2896 modified -= dsmodified
2898
2897
2899 # We need to wait for some post-processing to update this set
2898 # We need to wait for some post-processing to update this set
2900 # before making the distinction. The dirstate will be used for
2899 # before making the distinction. The dirstate will be used for
2901 # that purpose.
2900 # that purpose.
2902 dsadded = added
2901 dsadded = added
2903
2902
2904 # in case of merge, files that are actually added can be reported as
2903 # in case of merge, files that are actually added can be reported as
2905 # modified, we need to post process the result
2904 # modified, we need to post process the result
2906 if p2 != nullid:
2905 if p2 != nullid:
2907 if pmf is None:
2906 if pmf is None:
2908 # only need parent manifest in the merge case,
2907 # only need parent manifest in the merge case,
2909 # so do not read by default
2908 # so do not read by default
2910 pmf = repo[parent].manifest()
2909 pmf = repo[parent].manifest()
2911 mergeadd = dsmodified - set(pmf)
2910 mergeadd = dsmodified - set(pmf)
2912 dsadded |= mergeadd
2911 dsadded |= mergeadd
2913 dsmodified -= mergeadd
2912 dsmodified -= mergeadd
2914
2913
2915 # if f is a rename, update `names` to also revert the source
2914 # if f is a rename, update `names` to also revert the source
2916 cwd = repo.getcwd()
2915 cwd = repo.getcwd()
2917 for f in localchanges:
2916 for f in localchanges:
2918 src = repo.dirstate.copied(f)
2917 src = repo.dirstate.copied(f)
2919 # XXX should we check for rename down to target node?
2918 # XXX should we check for rename down to target node?
2920 if src and src not in names and repo.dirstate[src] == 'r':
2919 if src and src not in names and repo.dirstate[src] == 'r':
2921 dsremoved.add(src)
2920 dsremoved.add(src)
2922 names[src] = (repo.pathto(src, cwd), True)
2921 names[src] = (repo.pathto(src, cwd), True)
2923
2922
2924 # distinguish between file to forget and the other
2923 # distinguish between file to forget and the other
2925 added = set()
2924 added = set()
2926 for abs in dsadded:
2925 for abs in dsadded:
2927 if repo.dirstate[abs] != 'a':
2926 if repo.dirstate[abs] != 'a':
2928 added.add(abs)
2927 added.add(abs)
2929 dsadded -= added
2928 dsadded -= added
2930
2929
2931 for abs in deladded:
2930 for abs in deladded:
2932 if repo.dirstate[abs] == 'a':
2931 if repo.dirstate[abs] == 'a':
2933 dsadded.add(abs)
2932 dsadded.add(abs)
2934 deladded -= dsadded
2933 deladded -= dsadded
2935
2934
2936 # For files marked as removed, we check if an unknown file is present at
2935 # For files marked as removed, we check if an unknown file is present at
2937 # the same path. If a such file exists it may need to be backed up.
2936 # the same path. If a such file exists it may need to be backed up.
2938 # Making the distinction at this stage helps have simpler backup
2937 # Making the distinction at this stage helps have simpler backup
2939 # logic.
2938 # logic.
2940 removunk = set()
2939 removunk = set()
2941 for abs in removed:
2940 for abs in removed:
2942 target = repo.wjoin(abs)
2941 target = repo.wjoin(abs)
2943 if os.path.lexists(target):
2942 if os.path.lexists(target):
2944 removunk.add(abs)
2943 removunk.add(abs)
2945 removed -= removunk
2944 removed -= removunk
2946
2945
2947 dsremovunk = set()
2946 dsremovunk = set()
2948 for abs in dsremoved:
2947 for abs in dsremoved:
2949 target = repo.wjoin(abs)
2948 target = repo.wjoin(abs)
2950 if os.path.lexists(target):
2949 if os.path.lexists(target):
2951 dsremovunk.add(abs)
2950 dsremovunk.add(abs)
2952 dsremoved -= dsremovunk
2951 dsremoved -= dsremovunk
2953
2952
2954 # action to be actually performed by revert
2953 # action to be actually performed by revert
2955 # (<list of file>, message>) tuple
2954 # (<list of file>, message>) tuple
2956 actions = {'revert': ([], _('reverting %s\n')),
2955 actions = {'revert': ([], _('reverting %s\n')),
2957 'add': ([], _('adding %s\n')),
2956 'add': ([], _('adding %s\n')),
2958 'remove': ([], _('removing %s\n')),
2957 'remove': ([], _('removing %s\n')),
2959 'drop': ([], _('removing %s\n')),
2958 'drop': ([], _('removing %s\n')),
2960 'forget': ([], _('forgetting %s\n')),
2959 'forget': ([], _('forgetting %s\n')),
2961 'undelete': ([], _('undeleting %s\n')),
2960 'undelete': ([], _('undeleting %s\n')),
2962 'noop': (None, _('no changes needed to %s\n')),
2961 'noop': (None, _('no changes needed to %s\n')),
2963 'unknown': (None, _('file not managed: %s\n')),
2962 'unknown': (None, _('file not managed: %s\n')),
2964 }
2963 }
2965
2964
2966 # "constant" that convey the backup strategy.
2965 # "constant" that convey the backup strategy.
2967 # All set to `discard` if `no-backup` is set do avoid checking
2966 # All set to `discard` if `no-backup` is set do avoid checking
2968 # no_backup lower in the code.
2967 # no_backup lower in the code.
2969 # These values are ordered for comparison purposes
2968 # These values are ordered for comparison purposes
2970 backup = 2 # unconditionally do backup
2969 backup = 2 # unconditionally do backup
2971 check = 1 # check if the existing file differs from target
2970 check = 1 # check if the existing file differs from target
2972 discard = 0 # never do backup
2971 discard = 0 # never do backup
2973 if opts.get('no_backup'):
2972 if opts.get('no_backup'):
2974 backup = check = discard
2973 backup = check = discard
2975
2974
2976 backupanddel = actions['remove']
2975 backupanddel = actions['remove']
2977 if not opts.get('no_backup'):
2976 if not opts.get('no_backup'):
2978 backupanddel = actions['drop']
2977 backupanddel = actions['drop']
2979
2978
2980 disptable = (
2979 disptable = (
2981 # dispatch table:
2980 # dispatch table:
2982 # file state
2981 # file state
2983 # action
2982 # action
2984 # make backup
2983 # make backup
2985
2984
2986 ## Sets that results that will change file on disk
2985 ## Sets that results that will change file on disk
2987 # Modified compared to target, no local change
2986 # Modified compared to target, no local change
2988 (modified, actions['revert'], discard),
2987 (modified, actions['revert'], discard),
2989 # Modified compared to target, but local file is deleted
2988 # Modified compared to target, but local file is deleted
2990 (deleted, actions['revert'], discard),
2989 (deleted, actions['revert'], discard),
2991 # Modified compared to target, local change
2990 # Modified compared to target, local change
2992 (dsmodified, actions['revert'], backup),
2991 (dsmodified, actions['revert'], backup),
2993 # Added since target
2992 # Added since target
2994 (added, actions['remove'], discard),
2993 (added, actions['remove'], discard),
2995 # Added in working directory
2994 # Added in working directory
2996 (dsadded, actions['forget'], discard),
2995 (dsadded, actions['forget'], discard),
2997 # Added since target, have local modification
2996 # Added since target, have local modification
2998 (modadded, backupanddel, backup),
2997 (modadded, backupanddel, backup),
2999 # Added since target but file is missing in working directory
2998 # Added since target but file is missing in working directory
3000 (deladded, actions['drop'], discard),
2999 (deladded, actions['drop'], discard),
3001 # Removed since target, before working copy parent
3000 # Removed since target, before working copy parent
3002 (removed, actions['add'], discard),
3001 (removed, actions['add'], discard),
3003 # Same as `removed` but an unknown file exists at the same path
3002 # Same as `removed` but an unknown file exists at the same path
3004 (removunk, actions['add'], check),
3003 (removunk, actions['add'], check),
3005 # Removed since targe, marked as such in working copy parent
3004 # Removed since targe, marked as such in working copy parent
3006 (dsremoved, actions['undelete'], discard),
3005 (dsremoved, actions['undelete'], discard),
3007 # Same as `dsremoved` but an unknown file exists at the same path
3006 # Same as `dsremoved` but an unknown file exists at the same path
3008 (dsremovunk, actions['undelete'], check),
3007 (dsremovunk, actions['undelete'], check),
3009 ## the following sets does not result in any file changes
3008 ## the following sets does not result in any file changes
3010 # File with no modification
3009 # File with no modification
3011 (clean, actions['noop'], discard),
3010 (clean, actions['noop'], discard),
3012 # Existing file, not tracked anywhere
3011 # Existing file, not tracked anywhere
3013 (unknown, actions['unknown'], discard),
3012 (unknown, actions['unknown'], discard),
3014 )
3013 )
3015
3014
3016 wctx = repo[None]
3015 wctx = repo[None]
3017 for abs, (rel, exact) in sorted(names.items()):
3016 for abs, (rel, exact) in sorted(names.items()):
3018 # target file to be touch on disk (relative to cwd)
3017 # target file to be touch on disk (relative to cwd)
3019 target = repo.wjoin(abs)
3018 target = repo.wjoin(abs)
3020 # search the entry in the dispatch table.
3019 # search the entry in the dispatch table.
3021 # if the file is in any of these sets, it was touched in the working
3020 # if the file is in any of these sets, it was touched in the working
3022 # directory parent and we are sure it needs to be reverted.
3021 # directory parent and we are sure it needs to be reverted.
3023 for table, (xlist, msg), dobackup in disptable:
3022 for table, (xlist, msg), dobackup in disptable:
3024 if abs not in table:
3023 if abs not in table:
3025 continue
3024 continue
3026 if xlist is not None:
3025 if xlist is not None:
3027 xlist.append(abs)
3026 xlist.append(abs)
3028 if dobackup and (backup <= dobackup
3027 if dobackup and (backup <= dobackup
3029 or wctx[abs].cmp(ctx[abs])):
3028 or wctx[abs].cmp(ctx[abs])):
3030 bakname = "%s.orig" % rel
3029 bakname = "%s.orig" % rel
3031 ui.note(_('saving current version of %s as %s\n') %
3030 ui.note(_('saving current version of %s as %s\n') %
3032 (rel, bakname))
3031 (rel, bakname))
3033 if not opts.get('dry_run'):
3032 if not opts.get('dry_run'):
3034 util.rename(target, bakname)
3033 util.rename(target, bakname)
3035 if ui.verbose or not exact:
3034 if ui.verbose or not exact:
3036 if not isinstance(msg, basestring):
3035 if not isinstance(msg, basestring):
3037 msg = msg(abs)
3036 msg = msg(abs)
3038 ui.status(msg % rel)
3037 ui.status(msg % rel)
3039 elif exact:
3038 elif exact:
3040 ui.warn(msg % rel)
3039 ui.warn(msg % rel)
3041 break
3040 break
3042
3041
3043
3042
3044 if not opts.get('dry_run'):
3043 if not opts.get('dry_run'):
3045 needdata = ('revert', 'add', 'undelete')
3044 needdata = ('revert', 'add', 'undelete')
3046 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3045 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3047 interactive = opts.get('interactive', False)
3046 interactive = opts.get('interactive', False)
3048 _performrevert(repo, parents, ctx, actions, interactive)
3047 _performrevert(repo, parents, ctx, actions, interactive)
3049
3048
3050 # get the list of subrepos that must be reverted
3049 # get the list of subrepos that must be reverted
3051 subrepomatch = scmutil.match(ctx, pats, opts)
3050 subrepomatch = scmutil.match(ctx, pats, opts)
3052 targetsubs = sorted(s for s in ctx.substate if subrepomatch(s))
3051 targetsubs = sorted(s for s in ctx.substate if subrepomatch(s))
3053
3052
3054 if targetsubs:
3053 if targetsubs:
3055 # Revert the subrepos on the revert list
3054 # Revert the subrepos on the revert list
3056 for sub in targetsubs:
3055 for sub in targetsubs:
3057 ctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3056 ctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3058 finally:
3057 finally:
3059 wlock.release()
3058 wlock.release()
3060
3059
3061 def _revertprefetch(repo, ctx, *files):
3060 def _revertprefetch(repo, ctx, *files):
3062 """Let extension changing the storage layer prefetch content"""
3061 """Let extension changing the storage layer prefetch content"""
3063 pass
3062 pass
3064
3063
3065 def _performrevert(repo, parents, ctx, actions, interactive=False):
3064 def _performrevert(repo, parents, ctx, actions, interactive=False):
3066 """function that actually perform all the actions computed for revert
3065 """function that actually perform all the actions computed for revert
3067
3066
3068 This is an independent function to let extension to plug in and react to
3067 This is an independent function to let extension to plug in and react to
3069 the imminent revert.
3068 the imminent revert.
3070
3069
3071 Make sure you have the working directory locked when calling this function.
3070 Make sure you have the working directory locked when calling this function.
3072 """
3071 """
3073 parent, p2 = parents
3072 parent, p2 = parents
3074 node = ctx.node()
3073 node = ctx.node()
3075 def checkout(f):
3074 def checkout(f):
3076 fc = ctx[f]
3075 fc = ctx[f]
3077 repo.wwrite(f, fc.data(), fc.flags())
3076 repo.wwrite(f, fc.data(), fc.flags())
3078
3077
3079 audit_path = pathutil.pathauditor(repo.root)
3078 audit_path = pathutil.pathauditor(repo.root)
3080 for f in actions['forget'][0]:
3079 for f in actions['forget'][0]:
3081 repo.dirstate.drop(f)
3080 repo.dirstate.drop(f)
3082 for f in actions['remove'][0]:
3081 for f in actions['remove'][0]:
3083 audit_path(f)
3082 audit_path(f)
3084 util.unlinkpath(repo.wjoin(f))
3083 util.unlinkpath(repo.wjoin(f))
3085 repo.dirstate.remove(f)
3084 repo.dirstate.remove(f)
3086 for f in actions['drop'][0]:
3085 for f in actions['drop'][0]:
3087 audit_path(f)
3086 audit_path(f)
3088 repo.dirstate.remove(f)
3087 repo.dirstate.remove(f)
3089
3088
3090 normal = None
3089 normal = None
3091 if node == parent:
3090 if node == parent:
3092 # We're reverting to our parent. If possible, we'd like status
3091 # We're reverting to our parent. If possible, we'd like status
3093 # to report the file as clean. We have to use normallookup for
3092 # to report the file as clean. We have to use normallookup for
3094 # merges to avoid losing information about merged/dirty files.
3093 # merges to avoid losing information about merged/dirty files.
3095 if p2 != nullid:
3094 if p2 != nullid:
3096 normal = repo.dirstate.normallookup
3095 normal = repo.dirstate.normallookup
3097 else:
3096 else:
3098 normal = repo.dirstate.normal
3097 normal = repo.dirstate.normal
3099
3098
3100 if interactive:
3099 if interactive:
3101 # Prompt the user for changes to revert
3100 # Prompt the user for changes to revert
3102 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3101 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3103 m = scmutil.match(ctx, torevert, {})
3102 m = scmutil.match(ctx, torevert, {})
3104 diff = patch.diff(repo, None, ctx.node(), m)
3103 diff = patch.diff(repo, None, ctx.node(), m)
3105 originalchunks = patch.parsepatch(diff)
3104 originalchunks = patch.parsepatch(diff)
3106 try:
3105 try:
3107 chunks = recordfilter(repo.ui, originalchunks)
3106 chunks = recordfilter(repo.ui, originalchunks)
3108 except patch.PatchError, err:
3107 except patch.PatchError, err:
3109 raise util.Abort(_('error parsing patch: %s') % err)
3108 raise util.Abort(_('error parsing patch: %s') % err)
3110
3109
3111 # Apply changes
3110 # Apply changes
3112 fp = cStringIO.StringIO()
3111 fp = cStringIO.StringIO()
3113 for c in chunks:
3112 for c in chunks:
3114 c.write(fp)
3113 c.write(fp)
3115 dopatch = fp.tell()
3114 dopatch = fp.tell()
3116 fp.seek(0)
3115 fp.seek(0)
3117 if dopatch:
3116 if dopatch:
3118 try:
3117 try:
3119 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3118 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3120 except patch.PatchError, err:
3119 except patch.PatchError, err:
3121 raise util.Abort(str(err))
3120 raise util.Abort(str(err))
3122 del fp
3121 del fp
3123
3122
3124 for f in actions['revert'][0]:
3123 for f in actions['revert'][0]:
3125 if normal:
3124 if normal:
3126 normal(f)
3125 normal(f)
3127
3126
3128 else:
3127 else:
3129 for f in actions['revert'][0]:
3128 for f in actions['revert'][0]:
3130 checkout(f)
3129 checkout(f)
3131 if normal:
3130 if normal:
3132 normal(f)
3131 normal(f)
3133
3132
3134 for f in actions['add'][0]:
3133 for f in actions['add'][0]:
3135 checkout(f)
3134 checkout(f)
3136 repo.dirstate.add(f)
3135 repo.dirstate.add(f)
3137
3136
3138 normal = repo.dirstate.normallookup
3137 normal = repo.dirstate.normallookup
3139 if node == parent and p2 == nullid:
3138 if node == parent and p2 == nullid:
3140 normal = repo.dirstate.normal
3139 normal = repo.dirstate.normal
3141 for f in actions['undelete'][0]:
3140 for f in actions['undelete'][0]:
3142 checkout(f)
3141 checkout(f)
3143 normal(f)
3142 normal(f)
3144
3143
3145 copied = copies.pathcopies(repo[parent], ctx)
3144 copied = copies.pathcopies(repo[parent], ctx)
3146
3145
3147 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3146 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3148 if f in copied:
3147 if f in copied:
3149 repo.dirstate.copy(copied[f], f)
3148 repo.dirstate.copy(copied[f], f)
3150
3149
3151 def command(table):
3150 def command(table):
3152 """Returns a function object to be used as a decorator for making commands.
3151 """Returns a function object to be used as a decorator for making commands.
3153
3152
3154 This function receives a command table as its argument. The table should
3153 This function receives a command table as its argument. The table should
3155 be a dict.
3154 be a dict.
3156
3155
3157 The returned function can be used as a decorator for adding commands
3156 The returned function can be used as a decorator for adding commands
3158 to that command table. This function accepts multiple arguments to define
3157 to that command table. This function accepts multiple arguments to define
3159 a command.
3158 a command.
3160
3159
3161 The first argument is the command name.
3160 The first argument is the command name.
3162
3161
3163 The options argument is an iterable of tuples defining command arguments.
3162 The options argument is an iterable of tuples defining command arguments.
3164 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3163 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3165
3164
3166 The synopsis argument defines a short, one line summary of how to use the
3165 The synopsis argument defines a short, one line summary of how to use the
3167 command. This shows up in the help output.
3166 command. This shows up in the help output.
3168
3167
3169 The norepo argument defines whether the command does not require a
3168 The norepo argument defines whether the command does not require a
3170 local repository. Most commands operate against a repository, thus the
3169 local repository. Most commands operate against a repository, thus the
3171 default is False.
3170 default is False.
3172
3171
3173 The optionalrepo argument defines whether the command optionally requires
3172 The optionalrepo argument defines whether the command optionally requires
3174 a local repository.
3173 a local repository.
3175
3174
3176 The inferrepo argument defines whether to try to find a repository from the
3175 The inferrepo argument defines whether to try to find a repository from the
3177 command line arguments. If True, arguments will be examined for potential
3176 command line arguments. If True, arguments will be examined for potential
3178 repository locations. See ``findrepo()``. If a repository is found, it
3177 repository locations. See ``findrepo()``. If a repository is found, it
3179 will be used.
3178 will be used.
3180 """
3179 """
3181 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3180 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3182 inferrepo=False):
3181 inferrepo=False):
3183 def decorator(func):
3182 def decorator(func):
3184 if synopsis:
3183 if synopsis:
3185 table[name] = func, list(options), synopsis
3184 table[name] = func, list(options), synopsis
3186 else:
3185 else:
3187 table[name] = func, list(options)
3186 table[name] = func, list(options)
3188
3187
3189 if norepo:
3188 if norepo:
3190 # Avoid import cycle.
3189 # Avoid import cycle.
3191 import commands
3190 import commands
3192 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3191 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3193
3192
3194 if optionalrepo:
3193 if optionalrepo:
3195 import commands
3194 import commands
3196 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3195 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3197
3196
3198 if inferrepo:
3197 if inferrepo:
3199 import commands
3198 import commands
3200 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3199 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3201
3200
3202 return func
3201 return func
3203 return decorator
3202 return decorator
3204
3203
3205 return cmd
3204 return cmd
3206
3205
3207 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3206 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3208 # commands.outgoing. "missing" is "missing" of the result of
3207 # commands.outgoing. "missing" is "missing" of the result of
3209 # "findcommonoutgoing()"
3208 # "findcommonoutgoing()"
3210 outgoinghooks = util.hooks()
3209 outgoinghooks = util.hooks()
3211
3210
3212 # a list of (ui, repo) functions called by commands.summary
3211 # a list of (ui, repo) functions called by commands.summary
3213 summaryhooks = util.hooks()
3212 summaryhooks = util.hooks()
3214
3213
3215 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3214 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3216 #
3215 #
3217 # functions should return tuple of booleans below, if 'changes' is None:
3216 # functions should return tuple of booleans below, if 'changes' is None:
3218 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3217 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3219 #
3218 #
3220 # otherwise, 'changes' is a tuple of tuples below:
3219 # otherwise, 'changes' is a tuple of tuples below:
3221 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3220 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3222 # - (desturl, destbranch, destpeer, outgoing)
3221 # - (desturl, destbranch, destpeer, outgoing)
3223 summaryremotehooks = util.hooks()
3222 summaryremotehooks = util.hooks()
3224
3223
3225 # A list of state files kept by multistep operations like graft.
3224 # A list of state files kept by multistep operations like graft.
3226 # Since graft cannot be aborted, it is considered 'clearable' by update.
3225 # Since graft cannot be aborted, it is considered 'clearable' by update.
3227 # note: bisect is intentionally excluded
3226 # note: bisect is intentionally excluded
3228 # (state file, clearable, allowcommit, error, hint)
3227 # (state file, clearable, allowcommit, error, hint)
3229 unfinishedstates = [
3228 unfinishedstates = [
3230 ('graftstate', True, False, _('graft in progress'),
3229 ('graftstate', True, False, _('graft in progress'),
3231 _("use 'hg graft --continue' or 'hg update' to abort")),
3230 _("use 'hg graft --continue' or 'hg update' to abort")),
3232 ('updatestate', True, False, _('last update was interrupted'),
3231 ('updatestate', True, False, _('last update was interrupted'),
3233 _("use 'hg update' to get a consistent checkout"))
3232 _("use 'hg update' to get a consistent checkout"))
3234 ]
3233 ]
3235
3234
3236 def checkunfinished(repo, commit=False):
3235 def checkunfinished(repo, commit=False):
3237 '''Look for an unfinished multistep operation, like graft, and abort
3236 '''Look for an unfinished multistep operation, like graft, and abort
3238 if found. It's probably good to check this right before
3237 if found. It's probably good to check this right before
3239 bailifchanged().
3238 bailifchanged().
3240 '''
3239 '''
3241 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3240 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3242 if commit and allowcommit:
3241 if commit and allowcommit:
3243 continue
3242 continue
3244 if repo.vfs.exists(f):
3243 if repo.vfs.exists(f):
3245 raise util.Abort(msg, hint=hint)
3244 raise util.Abort(msg, hint=hint)
3246
3245
3247 def clearunfinished(repo):
3246 def clearunfinished(repo):
3248 '''Check for unfinished operations (as above), and clear the ones
3247 '''Check for unfinished operations (as above), and clear the ones
3249 that are clearable.
3248 that are clearable.
3250 '''
3249 '''
3251 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3250 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3252 if not clearable and repo.vfs.exists(f):
3251 if not clearable and repo.vfs.exists(f):
3253 raise util.Abort(msg, hint=hint)
3252 raise util.Abort(msg, hint=hint)
3254 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3253 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3255 if clearable and repo.vfs.exists(f):
3254 if clearable and repo.vfs.exists(f):
3256 util.unlink(repo.join(f))
3255 util.unlink(repo.join(f))
@@ -1,181 +1,176 b''
1 $ hg init
1 $ hg init
2
2
3 Set up history and working copy
3 Set up history and working copy
4
4
5 $ python $TESTDIR/generate-working-copy-states.py state 2 1
5 $ python $TESTDIR/generate-working-copy-states.py state 2 1
6 $ hg addremove -q --similarity 0
6 $ hg addremove -q --similarity 0
7 $ hg commit -m first
7 $ hg commit -m first
8
8
9 $ python $TESTDIR/generate-working-copy-states.py state 2 2
9 $ python $TESTDIR/generate-working-copy-states.py state 2 2
10 $ hg addremove -q --similarity 0
10 $ hg addremove -q --similarity 0
11 $ hg commit -m second
11 $ hg commit -m second
12
12
13 $ python $TESTDIR/generate-working-copy-states.py state 2 wc
13 $ python $TESTDIR/generate-working-copy-states.py state 2 wc
14 $ hg addremove -q --similarity 0
14 $ hg addremove -q --similarity 0
15 $ hg forget *_*_*-untracked
15 $ hg forget *_*_*-untracked
16 $ rm *_*_missing-*
16 $ rm *_*_missing-*
17
17
18 Test status
18 Test status
19
19
20 $ hg st -A 'set:modified()'
20 $ hg st -A 'set:modified()'
21 M content1_content1_content3-tracked
21 M content1_content1_content3-tracked
22 M content1_content2_content1-tracked
22 M content1_content2_content1-tracked
23 M content1_content2_content3-tracked
23 M content1_content2_content3-tracked
24 M missing_content2_content3-tracked
24 M missing_content2_content3-tracked
25
25
26 $ hg st -A 'set:added()'
26 $ hg st -A 'set:added()'
27 A content1_missing_content1-tracked
27 A content1_missing_content1-tracked
28 A content1_missing_content3-tracked
28 A content1_missing_content3-tracked
29 A missing_missing_content3-tracked
29 A missing_missing_content3-tracked
30
30
31 $ hg st -A 'set:removed()'
31 $ hg st -A 'set:removed()'
32 R content1_content1_content1-untracked
32 R content1_content1_content1-untracked
33 R content1_content1_content3-untracked
33 R content1_content1_content3-untracked
34 R content1_content1_missing-untracked
34 R content1_content1_missing-untracked
35 R content1_content2_content1-untracked
35 R content1_content2_content1-untracked
36 R content1_content2_content2-untracked
36 R content1_content2_content2-untracked
37 R content1_content2_content3-untracked
37 R content1_content2_content3-untracked
38 R content1_content2_missing-untracked
38 R content1_content2_missing-untracked
39 R missing_content2_content2-untracked
39 R missing_content2_content2-untracked
40 R missing_content2_content3-untracked
40 R missing_content2_content3-untracked
41 R missing_content2_missing-untracked
41 R missing_content2_missing-untracked
42
42
43 $ hg st -A 'set:deleted()'
43 $ hg st -A 'set:deleted()'
44 ! content1_content1_missing-tracked
44 ! content1_content1_missing-tracked
45 ! content1_content2_missing-tracked
45 ! content1_content2_missing-tracked
46 ! content1_missing_missing-tracked
46 ! content1_missing_missing-tracked
47 ! missing_content2_missing-tracked
47 ! missing_content2_missing-tracked
48 ! missing_missing_missing-tracked
48 ! missing_missing_missing-tracked
49
49
50 $ hg st -A 'set:unknown()'
50 $ hg st -A 'set:unknown()'
51 ? content1_missing_content1-untracked
51 ? content1_missing_content1-untracked
52 ? content1_missing_content3-untracked
52 ? content1_missing_content3-untracked
53 ? missing_missing_content3-untracked
53 ? missing_missing_content3-untracked
54
54
55 $ hg st -A 'set:clean()'
55 $ hg st -A 'set:clean()'
56 C content1_content1_content1-tracked
56 C content1_content1_content1-tracked
57 C content1_content2_content2-tracked
57 C content1_content2_content2-tracked
58 C missing_content2_content2-tracked
58 C missing_content2_content2-tracked
59
59
60 Test log
60 Test log
61
61
62 $ hg log -T '{rev}\n' --stat 'set:modified()'
62 $ hg log -T '{rev}\n' --stat 'set:modified()'
63 1
63 1
64 content1_content2_content1-tracked | 2 +-
64 content1_content2_content1-tracked | 2 +-
65 content1_content2_content3-tracked | 2 +-
65 content1_content2_content3-tracked | 2 +-
66 missing_content2_content3-tracked | 1 +
66 missing_content2_content3-tracked | 1 +
67 3 files changed, 3 insertions(+), 2 deletions(-)
67 3 files changed, 3 insertions(+), 2 deletions(-)
68
68
69 0
69 0
70 content1_content1_content3-tracked | 1 +
70 content1_content1_content3-tracked | 1 +
71 content1_content2_content1-tracked | 1 +
71 content1_content2_content1-tracked | 1 +
72 content1_content2_content3-tracked | 1 +
72 content1_content2_content3-tracked | 1 +
73 3 files changed, 3 insertions(+), 0 deletions(-)
73 3 files changed, 3 insertions(+), 0 deletions(-)
74
74
75 $ hg log -T '{rev}\n' --stat 'set:added()'
75 $ hg log -T '{rev}\n' --stat 'set:added()'
76 1
76 1
77 content1_missing_content1-tracked | 1 -
77 content1_missing_content1-tracked | 1 -
78 content1_missing_content3-tracked | 1 -
78 content1_missing_content3-tracked | 1 -
79 2 files changed, 0 insertions(+), 2 deletions(-)
79 2 files changed, 0 insertions(+), 2 deletions(-)
80
80
81 0
81 0
82 content1_missing_content1-tracked | 1 +
82 content1_missing_content1-tracked | 1 +
83 content1_missing_content3-tracked | 1 +
83 content1_missing_content3-tracked | 1 +
84 2 files changed, 2 insertions(+), 0 deletions(-)
84 2 files changed, 2 insertions(+), 0 deletions(-)
85
85
86 $ hg log -T '{rev}\n' --stat 'set:removed()'
86 $ hg log -T '{rev}\n' --stat 'set:removed()'
87 1
87 1
88 content1_content2_content1-untracked | 2 +-
88 content1_content2_content1-untracked | 2 +-
89 content1_content2_content2-untracked | 2 +-
89 content1_content2_content2-untracked | 2 +-
90 content1_content2_content3-untracked | 2 +-
90 content1_content2_content3-untracked | 2 +-
91 content1_content2_missing-untracked | 2 +-
91 content1_content2_missing-untracked | 2 +-
92 missing_content2_content2-untracked | 1 +
92 missing_content2_content2-untracked | 1 +
93 missing_content2_content3-untracked | 1 +
93 missing_content2_content3-untracked | 1 +
94 missing_content2_missing-untracked | 1 +
94 missing_content2_missing-untracked | 1 +
95 7 files changed, 7 insertions(+), 4 deletions(-)
95 7 files changed, 7 insertions(+), 4 deletions(-)
96
96
97 0
97 0
98 content1_content1_content1-untracked | 1 +
98 content1_content1_content1-untracked | 1 +
99 content1_content1_content3-untracked | 1 +
99 content1_content1_content3-untracked | 1 +
100 content1_content1_missing-untracked | 1 +
100 content1_content1_missing-untracked | 1 +
101 content1_content2_content1-untracked | 1 +
101 content1_content2_content1-untracked | 1 +
102 content1_content2_content2-untracked | 1 +
102 content1_content2_content2-untracked | 1 +
103 content1_content2_content3-untracked | 1 +
103 content1_content2_content3-untracked | 1 +
104 content1_content2_missing-untracked | 1 +
104 content1_content2_missing-untracked | 1 +
105 7 files changed, 7 insertions(+), 0 deletions(-)
105 7 files changed, 7 insertions(+), 0 deletions(-)
106
106
107 $ hg log -T '{rev}\n' --stat 'set:deleted()'
107 $ hg log -T '{rev}\n' --stat 'set:deleted()'
108 1
108 1
109 content1_content2_missing-tracked | 2 +-
109 content1_content2_missing-tracked | 2 +-
110 content1_missing_missing-tracked | 1 -
110 content1_missing_missing-tracked | 1 -
111 missing_content2_missing-tracked | 1 +
111 missing_content2_missing-tracked | 1 +
112 3 files changed, 2 insertions(+), 2 deletions(-)
112 3 files changed, 2 insertions(+), 2 deletions(-)
113
113
114 0
114 0
115 content1_content1_missing-tracked | 1 +
115 content1_content1_missing-tracked | 1 +
116 content1_content2_missing-tracked | 1 +
116 content1_content2_missing-tracked | 1 +
117 content1_missing_missing-tracked | 1 +
117 content1_missing_missing-tracked | 1 +
118 3 files changed, 3 insertions(+), 0 deletions(-)
118 3 files changed, 3 insertions(+), 0 deletions(-)
119
119
120 $ hg log -T '{rev}\n' --stat 'set:unknown()'
120 $ hg log -T '{rev}\n' --stat 'set:unknown()'
121 1
121 1
122 content1_missing_content1-untracked | 1 -
122 content1_missing_content1-untracked | 1 -
123 content1_missing_content3-untracked | 1 -
123 content1_missing_content3-untracked | 1 -
124 2 files changed, 0 insertions(+), 2 deletions(-)
124 2 files changed, 0 insertions(+), 2 deletions(-)
125
125
126 0
126 0
127 content1_missing_content1-untracked | 1 +
127 content1_missing_content1-untracked | 1 +
128 content1_missing_content3-untracked | 1 +
128 content1_missing_content3-untracked | 1 +
129 2 files changed, 2 insertions(+), 0 deletions(-)
129 2 files changed, 2 insertions(+), 0 deletions(-)
130
130
131 $ hg log -T '{rev}\n' --stat 'set:clean()'
131 $ hg log -T '{rev}\n' --stat 'set:clean()'
132 1
132 1
133 content1_content2_content2-tracked | 2 +-
133 content1_content2_content2-tracked | 2 +-
134 missing_content2_content2-tracked | 1 +
134 missing_content2_content2-tracked | 1 +
135 2 files changed, 2 insertions(+), 1 deletions(-)
135 2 files changed, 2 insertions(+), 1 deletions(-)
136
136
137 0
137 0
138 content1_content1_content1-tracked | 1 +
138 content1_content1_content1-tracked | 1 +
139 content1_content2_content2-tracked | 1 +
139 content1_content2_content2-tracked | 1 +
140 2 files changed, 2 insertions(+), 0 deletions(-)
140 2 files changed, 2 insertions(+), 0 deletions(-)
141
141
142 Test revert
142 Test revert
143
143
144 BROKEN: the files that get undeleted were not modified, they were removed,
145 and content1_content2_missing-tracked was also not modified, it was deleted
146
147 $ hg revert 'set:modified()'
144 $ hg revert 'set:modified()'
148 reverting content1_content1_content3-tracked
145 reverting content1_content1_content3-tracked
149 reverting content1_content2_content1-tracked
146 reverting content1_content2_content1-tracked
150 undeleting content1_content2_content1-untracked
151 undeleting content1_content2_content2-untracked
152 reverting content1_content2_content3-tracked
147 reverting content1_content2_content3-tracked
153 undeleting content1_content2_content3-untracked
154 reverting content1_content2_missing-tracked
155 undeleting content1_content2_missing-untracked
156 reverting missing_content2_content3-tracked
148 reverting missing_content2_content3-tracked
157
149
158 BROKEN: only the files that get forgotten are correct
159
160 $ hg revert 'set:added()'
150 $ hg revert 'set:added()'
161 forgetting content1_missing_content1-tracked
151 forgetting content1_missing_content1-tracked
162 forgetting content1_missing_content3-tracked
152 forgetting content1_missing_content3-tracked
163 undeleting missing_content2_content2-untracked
164 undeleting missing_content2_content3-untracked
165 reverting missing_content2_missing-tracked
166 undeleting missing_content2_missing-untracked
167 forgetting missing_missing_content3-tracked
153 forgetting missing_missing_content3-tracked
168
154
169 $ hg revert 'set:removed()'
155 $ hg revert 'set:removed()'
170 undeleting content1_content1_content1-untracked
156 undeleting content1_content1_content1-untracked
171 undeleting content1_content1_content3-untracked
157 undeleting content1_content1_content3-untracked
172 undeleting content1_content1_missing-untracked
158 undeleting content1_content1_missing-untracked
159 undeleting content1_content2_content1-untracked
160 undeleting content1_content2_content2-untracked
161 undeleting content1_content2_content3-untracked
162 undeleting content1_content2_missing-untracked
163 undeleting missing_content2_content2-untracked
164 undeleting missing_content2_content3-untracked
165 undeleting missing_content2_missing-untracked
173
166
174 $ hg revert 'set:deleted()'
167 $ hg revert 'set:deleted()'
175 reverting content1_content1_missing-tracked
168 reverting content1_content1_missing-tracked
169 reverting content1_content2_missing-tracked
176 forgetting content1_missing_missing-tracked
170 forgetting content1_missing_missing-tracked
171 reverting missing_content2_missing-tracked
177 forgetting missing_missing_missing-tracked
172 forgetting missing_missing_missing-tracked
178
173
179 $ hg revert 'set:unknown()'
174 $ hg revert 'set:unknown()'
180
175
181 $ hg revert 'set:clean()'
176 $ hg revert 'set:clean()'
General Comments 0
You need to be logged in to leave comments. Login now