##// END OF EJS Templates
revset: use delayregistrar to register predicate in extension easily...
FUJIWARA Katsunori -
r27586:42910f9f default
parent child Browse files
Show More
@@ -1,1433 +1,1435 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10
10
11 import os
11 import os
12 import copy
12 import copy
13
13
14 from mercurial import hg, util, cmdutil, scmutil, match as match_, \
14 from mercurial import hg, util, cmdutil, scmutil, match as match_, \
15 archival, pathutil, revset, error
15 archival, pathutil, revset, error
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17
17
18 import lfutil
18 import lfutil
19 import lfcommands
19 import lfcommands
20 import basestore
20 import basestore
21
21
22 # -- Utility functions: commonly/repeatedly needed functionality ---------------
22 # -- Utility functions: commonly/repeatedly needed functionality ---------------
23
23
24 def composelargefilematcher(match, manifest):
24 def composelargefilematcher(match, manifest):
25 '''create a matcher that matches only the largefiles in the original
25 '''create a matcher that matches only the largefiles in the original
26 matcher'''
26 matcher'''
27 m = copy.copy(match)
27 m = copy.copy(match)
28 lfile = lambda f: lfutil.standin(f) in manifest
28 lfile = lambda f: lfutil.standin(f) in manifest
29 m._files = filter(lfile, m._files)
29 m._files = filter(lfile, m._files)
30 m._fileroots = set(m._files)
30 m._fileroots = set(m._files)
31 m._always = False
31 m._always = False
32 origmatchfn = m.matchfn
32 origmatchfn = m.matchfn
33 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
33 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
34 return m
34 return m
35
35
36 def composenormalfilematcher(match, manifest, exclude=None):
36 def composenormalfilematcher(match, manifest, exclude=None):
37 excluded = set()
37 excluded = set()
38 if exclude is not None:
38 if exclude is not None:
39 excluded.update(exclude)
39 excluded.update(exclude)
40
40
41 m = copy.copy(match)
41 m = copy.copy(match)
42 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
42 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
43 manifest or f in excluded)
43 manifest or f in excluded)
44 m._files = filter(notlfile, m._files)
44 m._files = filter(notlfile, m._files)
45 m._fileroots = set(m._files)
45 m._fileroots = set(m._files)
46 m._always = False
46 m._always = False
47 origmatchfn = m.matchfn
47 origmatchfn = m.matchfn
48 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
48 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
49 return m
49 return m
50
50
51 def installnormalfilesmatchfn(manifest):
51 def installnormalfilesmatchfn(manifest):
52 '''installmatchfn with a matchfn that ignores all largefiles'''
52 '''installmatchfn with a matchfn that ignores all largefiles'''
53 def overridematch(ctx, pats=(), opts=None, globbed=False,
53 def overridematch(ctx, pats=(), opts=None, globbed=False,
54 default='relpath', badfn=None):
54 default='relpath', badfn=None):
55 if opts is None:
55 if opts is None:
56 opts = {}
56 opts = {}
57 match = oldmatch(ctx, pats, opts, globbed, default, badfn=badfn)
57 match = oldmatch(ctx, pats, opts, globbed, default, badfn=badfn)
58 return composenormalfilematcher(match, manifest)
58 return composenormalfilematcher(match, manifest)
59 oldmatch = installmatchfn(overridematch)
59 oldmatch = installmatchfn(overridematch)
60
60
61 def installmatchfn(f):
61 def installmatchfn(f):
62 '''monkey patch the scmutil module with a custom match function.
62 '''monkey patch the scmutil module with a custom match function.
63 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
63 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
64 oldmatch = scmutil.match
64 oldmatch = scmutil.match
65 setattr(f, 'oldmatch', oldmatch)
65 setattr(f, 'oldmatch', oldmatch)
66 scmutil.match = f
66 scmutil.match = f
67 return oldmatch
67 return oldmatch
68
68
69 def restorematchfn():
69 def restorematchfn():
70 '''restores scmutil.match to what it was before installmatchfn
70 '''restores scmutil.match to what it was before installmatchfn
71 was called. no-op if scmutil.match is its original function.
71 was called. no-op if scmutil.match is its original function.
72
72
73 Note that n calls to installmatchfn will require n calls to
73 Note that n calls to installmatchfn will require n calls to
74 restore the original matchfn.'''
74 restore the original matchfn.'''
75 scmutil.match = getattr(scmutil.match, 'oldmatch')
75 scmutil.match = getattr(scmutil.match, 'oldmatch')
76
76
77 def installmatchandpatsfn(f):
77 def installmatchandpatsfn(f):
78 oldmatchandpats = scmutil.matchandpats
78 oldmatchandpats = scmutil.matchandpats
79 setattr(f, 'oldmatchandpats', oldmatchandpats)
79 setattr(f, 'oldmatchandpats', oldmatchandpats)
80 scmutil.matchandpats = f
80 scmutil.matchandpats = f
81 return oldmatchandpats
81 return oldmatchandpats
82
82
83 def restorematchandpatsfn():
83 def restorematchandpatsfn():
84 '''restores scmutil.matchandpats to what it was before
84 '''restores scmutil.matchandpats to what it was before
85 installmatchandpatsfn was called. No-op if scmutil.matchandpats
85 installmatchandpatsfn was called. No-op if scmutil.matchandpats
86 is its original function.
86 is its original function.
87
87
88 Note that n calls to installmatchandpatsfn will require n calls
88 Note that n calls to installmatchandpatsfn will require n calls
89 to restore the original matchfn.'''
89 to restore the original matchfn.'''
90 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
90 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
91 scmutil.matchandpats)
91 scmutil.matchandpats)
92
92
93 def addlargefiles(ui, repo, isaddremove, matcher, **opts):
93 def addlargefiles(ui, repo, isaddremove, matcher, **opts):
94 large = opts.get('large')
94 large = opts.get('large')
95 lfsize = lfutil.getminsize(
95 lfsize = lfutil.getminsize(
96 ui, lfutil.islfilesrepo(repo), opts.get('lfsize'))
96 ui, lfutil.islfilesrepo(repo), opts.get('lfsize'))
97
97
98 lfmatcher = None
98 lfmatcher = None
99 if lfutil.islfilesrepo(repo):
99 if lfutil.islfilesrepo(repo):
100 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
100 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
101 if lfpats:
101 if lfpats:
102 lfmatcher = match_.match(repo.root, '', list(lfpats))
102 lfmatcher = match_.match(repo.root, '', list(lfpats))
103
103
104 lfnames = []
104 lfnames = []
105 m = matcher
105 m = matcher
106
106
107 wctx = repo[None]
107 wctx = repo[None]
108 for f in repo.walk(match_.badmatch(m, lambda x, y: None)):
108 for f in repo.walk(match_.badmatch(m, lambda x, y: None)):
109 exact = m.exact(f)
109 exact = m.exact(f)
110 lfile = lfutil.standin(f) in wctx
110 lfile = lfutil.standin(f) in wctx
111 nfile = f in wctx
111 nfile = f in wctx
112 exists = lfile or nfile
112 exists = lfile or nfile
113
113
114 # addremove in core gets fancy with the name, add doesn't
114 # addremove in core gets fancy with the name, add doesn't
115 if isaddremove:
115 if isaddremove:
116 name = m.uipath(f)
116 name = m.uipath(f)
117 else:
117 else:
118 name = m.rel(f)
118 name = m.rel(f)
119
119
120 # Don't warn the user when they attempt to add a normal tracked file.
120 # Don't warn the user when they attempt to add a normal tracked file.
121 # The normal add code will do that for us.
121 # The normal add code will do that for us.
122 if exact and exists:
122 if exact and exists:
123 if lfile:
123 if lfile:
124 ui.warn(_('%s already a largefile\n') % name)
124 ui.warn(_('%s already a largefile\n') % name)
125 continue
125 continue
126
126
127 if (exact or not exists) and not lfutil.isstandin(f):
127 if (exact or not exists) and not lfutil.isstandin(f):
128 # In case the file was removed previously, but not committed
128 # In case the file was removed previously, but not committed
129 # (issue3507)
129 # (issue3507)
130 if not repo.wvfs.exists(f):
130 if not repo.wvfs.exists(f):
131 continue
131 continue
132
132
133 abovemin = (lfsize and
133 abovemin = (lfsize and
134 repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024)
134 repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024)
135 if large or abovemin or (lfmatcher and lfmatcher(f)):
135 if large or abovemin or (lfmatcher and lfmatcher(f)):
136 lfnames.append(f)
136 lfnames.append(f)
137 if ui.verbose or not exact:
137 if ui.verbose or not exact:
138 ui.status(_('adding %s as a largefile\n') % name)
138 ui.status(_('adding %s as a largefile\n') % name)
139
139
140 bad = []
140 bad = []
141
141
142 # Need to lock, otherwise there could be a race condition between
142 # Need to lock, otherwise there could be a race condition between
143 # when standins are created and added to the repo.
143 # when standins are created and added to the repo.
144 wlock = repo.wlock()
144 wlock = repo.wlock()
145 try:
145 try:
146 if not opts.get('dry_run'):
146 if not opts.get('dry_run'):
147 standins = []
147 standins = []
148 lfdirstate = lfutil.openlfdirstate(ui, repo)
148 lfdirstate = lfutil.openlfdirstate(ui, repo)
149 for f in lfnames:
149 for f in lfnames:
150 standinname = lfutil.standin(f)
150 standinname = lfutil.standin(f)
151 lfutil.writestandin(repo, standinname, hash='',
151 lfutil.writestandin(repo, standinname, hash='',
152 executable=lfutil.getexecutable(repo.wjoin(f)))
152 executable=lfutil.getexecutable(repo.wjoin(f)))
153 standins.append(standinname)
153 standins.append(standinname)
154 if lfdirstate[f] == 'r':
154 if lfdirstate[f] == 'r':
155 lfdirstate.normallookup(f)
155 lfdirstate.normallookup(f)
156 else:
156 else:
157 lfdirstate.add(f)
157 lfdirstate.add(f)
158 lfdirstate.write()
158 lfdirstate.write()
159 bad += [lfutil.splitstandin(f)
159 bad += [lfutil.splitstandin(f)
160 for f in repo[None].add(standins)
160 for f in repo[None].add(standins)
161 if f in m.files()]
161 if f in m.files()]
162
162
163 added = [f for f in lfnames if f not in bad]
163 added = [f for f in lfnames if f not in bad]
164 finally:
164 finally:
165 wlock.release()
165 wlock.release()
166 return added, bad
166 return added, bad
167
167
168 def removelargefiles(ui, repo, isaddremove, matcher, **opts):
168 def removelargefiles(ui, repo, isaddremove, matcher, **opts):
169 after = opts.get('after')
169 after = opts.get('after')
170 m = composelargefilematcher(matcher, repo[None].manifest())
170 m = composelargefilematcher(matcher, repo[None].manifest())
171 try:
171 try:
172 repo.lfstatus = True
172 repo.lfstatus = True
173 s = repo.status(match=m, clean=not isaddremove)
173 s = repo.status(match=m, clean=not isaddremove)
174 finally:
174 finally:
175 repo.lfstatus = False
175 repo.lfstatus = False
176 manifest = repo[None].manifest()
176 manifest = repo[None].manifest()
177 modified, added, deleted, clean = [[f for f in list
177 modified, added, deleted, clean = [[f for f in list
178 if lfutil.standin(f) in manifest]
178 if lfutil.standin(f) in manifest]
179 for list in (s.modified, s.added,
179 for list in (s.modified, s.added,
180 s.deleted, s.clean)]
180 s.deleted, s.clean)]
181
181
182 def warn(files, msg):
182 def warn(files, msg):
183 for f in files:
183 for f in files:
184 ui.warn(msg % m.rel(f))
184 ui.warn(msg % m.rel(f))
185 return int(len(files) > 0)
185 return int(len(files) > 0)
186
186
187 result = 0
187 result = 0
188
188
189 if after:
189 if after:
190 remove = deleted
190 remove = deleted
191 result = warn(modified + added + clean,
191 result = warn(modified + added + clean,
192 _('not removing %s: file still exists\n'))
192 _('not removing %s: file still exists\n'))
193 else:
193 else:
194 remove = deleted + clean
194 remove = deleted + clean
195 result = warn(modified, _('not removing %s: file is modified (use -f'
195 result = warn(modified, _('not removing %s: file is modified (use -f'
196 ' to force removal)\n'))
196 ' to force removal)\n'))
197 result = warn(added, _('not removing %s: file has been marked for add'
197 result = warn(added, _('not removing %s: file has been marked for add'
198 ' (use forget to undo)\n')) or result
198 ' (use forget to undo)\n')) or result
199
199
200 # Need to lock because standin files are deleted then removed from the
200 # Need to lock because standin files are deleted then removed from the
201 # repository and we could race in-between.
201 # repository and we could race in-between.
202 wlock = repo.wlock()
202 wlock = repo.wlock()
203 try:
203 try:
204 lfdirstate = lfutil.openlfdirstate(ui, repo)
204 lfdirstate = lfutil.openlfdirstate(ui, repo)
205 for f in sorted(remove):
205 for f in sorted(remove):
206 if ui.verbose or not m.exact(f):
206 if ui.verbose or not m.exact(f):
207 # addremove in core gets fancy with the name, remove doesn't
207 # addremove in core gets fancy with the name, remove doesn't
208 if isaddremove:
208 if isaddremove:
209 name = m.uipath(f)
209 name = m.uipath(f)
210 else:
210 else:
211 name = m.rel(f)
211 name = m.rel(f)
212 ui.status(_('removing %s\n') % name)
212 ui.status(_('removing %s\n') % name)
213
213
214 if not opts.get('dry_run'):
214 if not opts.get('dry_run'):
215 if not after:
215 if not after:
216 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
216 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
217
217
218 if opts.get('dry_run'):
218 if opts.get('dry_run'):
219 return result
219 return result
220
220
221 remove = [lfutil.standin(f) for f in remove]
221 remove = [lfutil.standin(f) for f in remove]
222 # If this is being called by addremove, let the original addremove
222 # If this is being called by addremove, let the original addremove
223 # function handle this.
223 # function handle this.
224 if not isaddremove:
224 if not isaddremove:
225 for f in remove:
225 for f in remove:
226 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
226 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
227 repo[None].forget(remove)
227 repo[None].forget(remove)
228
228
229 for f in remove:
229 for f in remove:
230 lfutil.synclfdirstate(repo, lfdirstate, lfutil.splitstandin(f),
230 lfutil.synclfdirstate(repo, lfdirstate, lfutil.splitstandin(f),
231 False)
231 False)
232
232
233 lfdirstate.write()
233 lfdirstate.write()
234 finally:
234 finally:
235 wlock.release()
235 wlock.release()
236
236
237 return result
237 return result
238
238
239 # For overriding mercurial.hgweb.webcommands so that largefiles will
239 # For overriding mercurial.hgweb.webcommands so that largefiles will
240 # appear at their right place in the manifests.
240 # appear at their right place in the manifests.
241 def decodepath(orig, path):
241 def decodepath(orig, path):
242 return lfutil.splitstandin(path) or path
242 return lfutil.splitstandin(path) or path
243
243
244 # -- Wrappers: modify existing commands --------------------------------
244 # -- Wrappers: modify existing commands --------------------------------
245
245
246 def overrideadd(orig, ui, repo, *pats, **opts):
246 def overrideadd(orig, ui, repo, *pats, **opts):
247 if opts.get('normal') and opts.get('large'):
247 if opts.get('normal') and opts.get('large'):
248 raise error.Abort(_('--normal cannot be used with --large'))
248 raise error.Abort(_('--normal cannot be used with --large'))
249 return orig(ui, repo, *pats, **opts)
249 return orig(ui, repo, *pats, **opts)
250
250
251 def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
251 def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
252 # The --normal flag short circuits this override
252 # The --normal flag short circuits this override
253 if opts.get('normal'):
253 if opts.get('normal'):
254 return orig(ui, repo, matcher, prefix, explicitonly, **opts)
254 return orig(ui, repo, matcher, prefix, explicitonly, **opts)
255
255
256 ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts)
256 ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts)
257 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(),
257 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(),
258 ladded)
258 ladded)
259 bad = orig(ui, repo, normalmatcher, prefix, explicitonly, **opts)
259 bad = orig(ui, repo, normalmatcher, prefix, explicitonly, **opts)
260
260
261 bad.extend(f for f in lbad)
261 bad.extend(f for f in lbad)
262 return bad
262 return bad
263
263
264 def cmdutilremove(orig, ui, repo, matcher, prefix, after, force, subrepos):
264 def cmdutilremove(orig, ui, repo, matcher, prefix, after, force, subrepos):
265 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
265 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
266 result = orig(ui, repo, normalmatcher, prefix, after, force, subrepos)
266 result = orig(ui, repo, normalmatcher, prefix, after, force, subrepos)
267 return removelargefiles(ui, repo, False, matcher, after=after,
267 return removelargefiles(ui, repo, False, matcher, after=after,
268 force=force) or result
268 force=force) or result
269
269
270 def overridestatusfn(orig, repo, rev2, **opts):
270 def overridestatusfn(orig, repo, rev2, **opts):
271 try:
271 try:
272 repo._repo.lfstatus = True
272 repo._repo.lfstatus = True
273 return orig(repo, rev2, **opts)
273 return orig(repo, rev2, **opts)
274 finally:
274 finally:
275 repo._repo.lfstatus = False
275 repo._repo.lfstatus = False
276
276
277 def overridestatus(orig, ui, repo, *pats, **opts):
277 def overridestatus(orig, ui, repo, *pats, **opts):
278 try:
278 try:
279 repo.lfstatus = True
279 repo.lfstatus = True
280 return orig(ui, repo, *pats, **opts)
280 return orig(ui, repo, *pats, **opts)
281 finally:
281 finally:
282 repo.lfstatus = False
282 repo.lfstatus = False
283
283
284 def overridedirty(orig, repo, ignoreupdate=False):
284 def overridedirty(orig, repo, ignoreupdate=False):
285 try:
285 try:
286 repo._repo.lfstatus = True
286 repo._repo.lfstatus = True
287 return orig(repo, ignoreupdate)
287 return orig(repo, ignoreupdate)
288 finally:
288 finally:
289 repo._repo.lfstatus = False
289 repo._repo.lfstatus = False
290
290
291 def overridelog(orig, ui, repo, *pats, **opts):
291 def overridelog(orig, ui, repo, *pats, **opts):
292 def overridematchandpats(ctx, pats=(), opts=None, globbed=False,
292 def overridematchandpats(ctx, pats=(), opts=None, globbed=False,
293 default='relpath', badfn=None):
293 default='relpath', badfn=None):
294 """Matcher that merges root directory with .hglf, suitable for log.
294 """Matcher that merges root directory with .hglf, suitable for log.
295 It is still possible to match .hglf directly.
295 It is still possible to match .hglf directly.
296 For any listed files run log on the standin too.
296 For any listed files run log on the standin too.
297 matchfn tries both the given filename and with .hglf stripped.
297 matchfn tries both the given filename and with .hglf stripped.
298 """
298 """
299 if opts is None:
299 if opts is None:
300 opts = {}
300 opts = {}
301 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default,
301 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default,
302 badfn=badfn)
302 badfn=badfn)
303 m, p = copy.copy(matchandpats)
303 m, p = copy.copy(matchandpats)
304
304
305 if m.always():
305 if m.always():
306 # We want to match everything anyway, so there's no benefit trying
306 # We want to match everything anyway, so there's no benefit trying
307 # to add standins.
307 # to add standins.
308 return matchandpats
308 return matchandpats
309
309
310 pats = set(p)
310 pats = set(p)
311
311
312 def fixpats(pat, tostandin=lfutil.standin):
312 def fixpats(pat, tostandin=lfutil.standin):
313 if pat.startswith('set:'):
313 if pat.startswith('set:'):
314 return pat
314 return pat
315
315
316 kindpat = match_._patsplit(pat, None)
316 kindpat = match_._patsplit(pat, None)
317
317
318 if kindpat[0] is not None:
318 if kindpat[0] is not None:
319 return kindpat[0] + ':' + tostandin(kindpat[1])
319 return kindpat[0] + ':' + tostandin(kindpat[1])
320 return tostandin(kindpat[1])
320 return tostandin(kindpat[1])
321
321
322 if m._cwd:
322 if m._cwd:
323 hglf = lfutil.shortname
323 hglf = lfutil.shortname
324 back = util.pconvert(m.rel(hglf)[:-len(hglf)])
324 back = util.pconvert(m.rel(hglf)[:-len(hglf)])
325
325
326 def tostandin(f):
326 def tostandin(f):
327 # The file may already be a standin, so truncate the back
327 # The file may already be a standin, so truncate the back
328 # prefix and test before mangling it. This avoids turning
328 # prefix and test before mangling it. This avoids turning
329 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
329 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
330 if f.startswith(back) and lfutil.splitstandin(f[len(back):]):
330 if f.startswith(back) and lfutil.splitstandin(f[len(back):]):
331 return f
331 return f
332
332
333 # An absolute path is from outside the repo, so truncate the
333 # An absolute path is from outside the repo, so truncate the
334 # path to the root before building the standin. Otherwise cwd
334 # path to the root before building the standin. Otherwise cwd
335 # is somewhere in the repo, relative to root, and needs to be
335 # is somewhere in the repo, relative to root, and needs to be
336 # prepended before building the standin.
336 # prepended before building the standin.
337 if os.path.isabs(m._cwd):
337 if os.path.isabs(m._cwd):
338 f = f[len(back):]
338 f = f[len(back):]
339 else:
339 else:
340 f = m._cwd + '/' + f
340 f = m._cwd + '/' + f
341 return back + lfutil.standin(f)
341 return back + lfutil.standin(f)
342
342
343 pats.update(fixpats(f, tostandin) for f in p)
343 pats.update(fixpats(f, tostandin) for f in p)
344 else:
344 else:
345 def tostandin(f):
345 def tostandin(f):
346 if lfutil.splitstandin(f):
346 if lfutil.splitstandin(f):
347 return f
347 return f
348 return lfutil.standin(f)
348 return lfutil.standin(f)
349 pats.update(fixpats(f, tostandin) for f in p)
349 pats.update(fixpats(f, tostandin) for f in p)
350
350
351 for i in range(0, len(m._files)):
351 for i in range(0, len(m._files)):
352 # Don't add '.hglf' to m.files, since that is already covered by '.'
352 # Don't add '.hglf' to m.files, since that is already covered by '.'
353 if m._files[i] == '.':
353 if m._files[i] == '.':
354 continue
354 continue
355 standin = lfutil.standin(m._files[i])
355 standin = lfutil.standin(m._files[i])
356 # If the "standin" is a directory, append instead of replace to
356 # If the "standin" is a directory, append instead of replace to
357 # support naming a directory on the command line with only
357 # support naming a directory on the command line with only
358 # largefiles. The original directory is kept to support normal
358 # largefiles. The original directory is kept to support normal
359 # files.
359 # files.
360 if standin in repo[ctx.node()]:
360 if standin in repo[ctx.node()]:
361 m._files[i] = standin
361 m._files[i] = standin
362 elif m._files[i] not in repo[ctx.node()] \
362 elif m._files[i] not in repo[ctx.node()] \
363 and repo.wvfs.isdir(standin):
363 and repo.wvfs.isdir(standin):
364 m._files.append(standin)
364 m._files.append(standin)
365
365
366 m._fileroots = set(m._files)
366 m._fileroots = set(m._files)
367 m._always = False
367 m._always = False
368 origmatchfn = m.matchfn
368 origmatchfn = m.matchfn
369 def lfmatchfn(f):
369 def lfmatchfn(f):
370 lf = lfutil.splitstandin(f)
370 lf = lfutil.splitstandin(f)
371 if lf is not None and origmatchfn(lf):
371 if lf is not None and origmatchfn(lf):
372 return True
372 return True
373 r = origmatchfn(f)
373 r = origmatchfn(f)
374 return r
374 return r
375 m.matchfn = lfmatchfn
375 m.matchfn = lfmatchfn
376
376
377 ui.debug('updated patterns: %s\n' % sorted(pats))
377 ui.debug('updated patterns: %s\n' % sorted(pats))
378 return m, pats
378 return m, pats
379
379
380 # For hg log --patch, the match object is used in two different senses:
380 # For hg log --patch, the match object is used in two different senses:
381 # (1) to determine what revisions should be printed out, and
381 # (1) to determine what revisions should be printed out, and
382 # (2) to determine what files to print out diffs for.
382 # (2) to determine what files to print out diffs for.
383 # The magic matchandpats override should be used for case (1) but not for
383 # The magic matchandpats override should be used for case (1) but not for
384 # case (2).
384 # case (2).
385 def overridemakelogfilematcher(repo, pats, opts, badfn=None):
385 def overridemakelogfilematcher(repo, pats, opts, badfn=None):
386 wctx = repo[None]
386 wctx = repo[None]
387 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
387 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
388 return lambda rev: match
388 return lambda rev: match
389
389
390 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
390 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
391 oldmakelogfilematcher = cmdutil._makenofollowlogfilematcher
391 oldmakelogfilematcher = cmdutil._makenofollowlogfilematcher
392 setattr(cmdutil, '_makenofollowlogfilematcher', overridemakelogfilematcher)
392 setattr(cmdutil, '_makenofollowlogfilematcher', overridemakelogfilematcher)
393
393
394 try:
394 try:
395 return orig(ui, repo, *pats, **opts)
395 return orig(ui, repo, *pats, **opts)
396 finally:
396 finally:
397 restorematchandpatsfn()
397 restorematchandpatsfn()
398 setattr(cmdutil, '_makenofollowlogfilematcher', oldmakelogfilematcher)
398 setattr(cmdutil, '_makenofollowlogfilematcher', oldmakelogfilematcher)
399
399
400 def overrideverify(orig, ui, repo, *pats, **opts):
400 def overrideverify(orig, ui, repo, *pats, **opts):
401 large = opts.pop('large', False)
401 large = opts.pop('large', False)
402 all = opts.pop('lfa', False)
402 all = opts.pop('lfa', False)
403 contents = opts.pop('lfc', False)
403 contents = opts.pop('lfc', False)
404
404
405 result = orig(ui, repo, *pats, **opts)
405 result = orig(ui, repo, *pats, **opts)
406 if large or all or contents:
406 if large or all or contents:
407 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
407 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
408 return result
408 return result
409
409
410 def overridedebugstate(orig, ui, repo, *pats, **opts):
410 def overridedebugstate(orig, ui, repo, *pats, **opts):
411 large = opts.pop('large', False)
411 large = opts.pop('large', False)
412 if large:
412 if large:
413 class fakerepo(object):
413 class fakerepo(object):
414 dirstate = lfutil.openlfdirstate(ui, repo)
414 dirstate = lfutil.openlfdirstate(ui, repo)
415 orig(ui, fakerepo, *pats, **opts)
415 orig(ui, fakerepo, *pats, **opts)
416 else:
416 else:
417 orig(ui, repo, *pats, **opts)
417 orig(ui, repo, *pats, **opts)
418
418
419 # Before starting the manifest merge, merge.updates will call
419 # Before starting the manifest merge, merge.updates will call
420 # _checkunknownfile to check if there are any files in the merged-in
420 # _checkunknownfile to check if there are any files in the merged-in
421 # changeset that collide with unknown files in the working copy.
421 # changeset that collide with unknown files in the working copy.
422 #
422 #
423 # The largefiles are seen as unknown, so this prevents us from merging
423 # The largefiles are seen as unknown, so this prevents us from merging
424 # in a file 'foo' if we already have a largefile with the same name.
424 # in a file 'foo' if we already have a largefile with the same name.
425 #
425 #
426 # The overridden function filters the unknown files by removing any
426 # The overridden function filters the unknown files by removing any
427 # largefiles. This makes the merge proceed and we can then handle this
427 # largefiles. This makes the merge proceed and we can then handle this
428 # case further in the overridden calculateupdates function below.
428 # case further in the overridden calculateupdates function below.
429 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
429 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
430 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
430 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
431 return False
431 return False
432 return origfn(repo, wctx, mctx, f, f2)
432 return origfn(repo, wctx, mctx, f, f2)
433
433
434 # The manifest merge handles conflicts on the manifest level. We want
434 # The manifest merge handles conflicts on the manifest level. We want
435 # to handle changes in largefile-ness of files at this level too.
435 # to handle changes in largefile-ness of files at this level too.
436 #
436 #
437 # The strategy is to run the original calculateupdates and then process
437 # The strategy is to run the original calculateupdates and then process
438 # the action list it outputs. There are two cases we need to deal with:
438 # the action list it outputs. There are two cases we need to deal with:
439 #
439 #
440 # 1. Normal file in p1, largefile in p2. Here the largefile is
440 # 1. Normal file in p1, largefile in p2. Here the largefile is
441 # detected via its standin file, which will enter the working copy
441 # detected via its standin file, which will enter the working copy
442 # with a "get" action. It is not "merge" since the standin is all
442 # with a "get" action. It is not "merge" since the standin is all
443 # Mercurial is concerned with at this level -- the link to the
443 # Mercurial is concerned with at this level -- the link to the
444 # existing normal file is not relevant here.
444 # existing normal file is not relevant here.
445 #
445 #
446 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
446 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
447 # since the largefile will be present in the working copy and
447 # since the largefile will be present in the working copy and
448 # different from the normal file in p2. Mercurial therefore
448 # different from the normal file in p2. Mercurial therefore
449 # triggers a merge action.
449 # triggers a merge action.
450 #
450 #
451 # In both cases, we prompt the user and emit new actions to either
451 # In both cases, we prompt the user and emit new actions to either
452 # remove the standin (if the normal file was kept) or to remove the
452 # remove the standin (if the normal file was kept) or to remove the
453 # normal file and get the standin (if the largefile was kept). The
453 # normal file and get the standin (if the largefile was kept). The
454 # default prompt answer is to use the largefile version since it was
454 # default prompt answer is to use the largefile version since it was
455 # presumably changed on purpose.
455 # presumably changed on purpose.
456 #
456 #
457 # Finally, the merge.applyupdates function will then take care of
457 # Finally, the merge.applyupdates function will then take care of
458 # writing the files into the working copy and lfcommands.updatelfiles
458 # writing the files into the working copy and lfcommands.updatelfiles
459 # will update the largefiles.
459 # will update the largefiles.
460 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
460 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
461 acceptremote, followcopies, matcher=None):
461 acceptremote, followcopies, matcher=None):
462 overwrite = force and not branchmerge
462 overwrite = force and not branchmerge
463 actions, diverge, renamedelete = origfn(
463 actions, diverge, renamedelete = origfn(
464 repo, p1, p2, pas, branchmerge, force, acceptremote,
464 repo, p1, p2, pas, branchmerge, force, acceptremote,
465 followcopies, matcher=matcher)
465 followcopies, matcher=matcher)
466
466
467 if overwrite:
467 if overwrite:
468 return actions, diverge, renamedelete
468 return actions, diverge, renamedelete
469
469
470 # Convert to dictionary with filename as key and action as value.
470 # Convert to dictionary with filename as key and action as value.
471 lfiles = set()
471 lfiles = set()
472 for f in actions:
472 for f in actions:
473 splitstandin = f and lfutil.splitstandin(f)
473 splitstandin = f and lfutil.splitstandin(f)
474 if splitstandin in p1:
474 if splitstandin in p1:
475 lfiles.add(splitstandin)
475 lfiles.add(splitstandin)
476 elif lfutil.standin(f) in p1:
476 elif lfutil.standin(f) in p1:
477 lfiles.add(f)
477 lfiles.add(f)
478
478
479 for lfile in lfiles:
479 for lfile in lfiles:
480 standin = lfutil.standin(lfile)
480 standin = lfutil.standin(lfile)
481 (lm, largs, lmsg) = actions.get(lfile, (None, None, None))
481 (lm, largs, lmsg) = actions.get(lfile, (None, None, None))
482 (sm, sargs, smsg) = actions.get(standin, (None, None, None))
482 (sm, sargs, smsg) = actions.get(standin, (None, None, None))
483 if sm in ('g', 'dc') and lm != 'r':
483 if sm in ('g', 'dc') and lm != 'r':
484 if sm == 'dc':
484 if sm == 'dc':
485 f1, f2, fa, move, anc = sargs
485 f1, f2, fa, move, anc = sargs
486 sargs = (p2[f2].flags(),)
486 sargs = (p2[f2].flags(),)
487 # Case 1: normal file in the working copy, largefile in
487 # Case 1: normal file in the working copy, largefile in
488 # the second parent
488 # the second parent
489 usermsg = _('remote turned local normal file %s into a largefile\n'
489 usermsg = _('remote turned local normal file %s into a largefile\n'
490 'use (l)argefile or keep (n)ormal file?'
490 'use (l)argefile or keep (n)ormal file?'
491 '$$ &Largefile $$ &Normal file') % lfile
491 '$$ &Largefile $$ &Normal file') % lfile
492 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
492 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
493 actions[lfile] = ('r', None, 'replaced by standin')
493 actions[lfile] = ('r', None, 'replaced by standin')
494 actions[standin] = ('g', sargs, 'replaces standin')
494 actions[standin] = ('g', sargs, 'replaces standin')
495 else: # keep local normal file
495 else: # keep local normal file
496 actions[lfile] = ('k', None, 'replaces standin')
496 actions[lfile] = ('k', None, 'replaces standin')
497 if branchmerge:
497 if branchmerge:
498 actions[standin] = ('k', None, 'replaced by non-standin')
498 actions[standin] = ('k', None, 'replaced by non-standin')
499 else:
499 else:
500 actions[standin] = ('r', None, 'replaced by non-standin')
500 actions[standin] = ('r', None, 'replaced by non-standin')
501 elif lm in ('g', 'dc') and sm != 'r':
501 elif lm in ('g', 'dc') and sm != 'r':
502 if lm == 'dc':
502 if lm == 'dc':
503 f1, f2, fa, move, anc = largs
503 f1, f2, fa, move, anc = largs
504 largs = (p2[f2].flags(),)
504 largs = (p2[f2].flags(),)
505 # Case 2: largefile in the working copy, normal file in
505 # Case 2: largefile in the working copy, normal file in
506 # the second parent
506 # the second parent
507 usermsg = _('remote turned local largefile %s into a normal file\n'
507 usermsg = _('remote turned local largefile %s into a normal file\n'
508 'keep (l)argefile or use (n)ormal file?'
508 'keep (l)argefile or use (n)ormal file?'
509 '$$ &Largefile $$ &Normal file') % lfile
509 '$$ &Largefile $$ &Normal file') % lfile
510 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
510 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
511 if branchmerge:
511 if branchmerge:
512 # largefile can be restored from standin safely
512 # largefile can be restored from standin safely
513 actions[lfile] = ('k', None, 'replaced by standin')
513 actions[lfile] = ('k', None, 'replaced by standin')
514 actions[standin] = ('k', None, 'replaces standin')
514 actions[standin] = ('k', None, 'replaces standin')
515 else:
515 else:
516 # "lfile" should be marked as "removed" without
516 # "lfile" should be marked as "removed" without
517 # removal of itself
517 # removal of itself
518 actions[lfile] = ('lfmr', None,
518 actions[lfile] = ('lfmr', None,
519 'forget non-standin largefile')
519 'forget non-standin largefile')
520
520
521 # linear-merge should treat this largefile as 're-added'
521 # linear-merge should treat this largefile as 're-added'
522 actions[standin] = ('a', None, 'keep standin')
522 actions[standin] = ('a', None, 'keep standin')
523 else: # pick remote normal file
523 else: # pick remote normal file
524 actions[lfile] = ('g', largs, 'replaces standin')
524 actions[lfile] = ('g', largs, 'replaces standin')
525 actions[standin] = ('r', None, 'replaced by non-standin')
525 actions[standin] = ('r', None, 'replaced by non-standin')
526
526
527 return actions, diverge, renamedelete
527 return actions, diverge, renamedelete
528
528
529 def mergerecordupdates(orig, repo, actions, branchmerge):
529 def mergerecordupdates(orig, repo, actions, branchmerge):
530 if 'lfmr' in actions:
530 if 'lfmr' in actions:
531 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
531 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
532 for lfile, args, msg in actions['lfmr']:
532 for lfile, args, msg in actions['lfmr']:
533 # this should be executed before 'orig', to execute 'remove'
533 # this should be executed before 'orig', to execute 'remove'
534 # before all other actions
534 # before all other actions
535 repo.dirstate.remove(lfile)
535 repo.dirstate.remove(lfile)
536 # make sure lfile doesn't get synclfdirstate'd as normal
536 # make sure lfile doesn't get synclfdirstate'd as normal
537 lfdirstate.add(lfile)
537 lfdirstate.add(lfile)
538 lfdirstate.write()
538 lfdirstate.write()
539
539
540 return orig(repo, actions, branchmerge)
540 return orig(repo, actions, branchmerge)
541
541
542
542
543 # Override filemerge to prompt the user about how they wish to merge
543 # Override filemerge to prompt the user about how they wish to merge
544 # largefiles. This will handle identical edits without prompting the user.
544 # largefiles. This will handle identical edits without prompting the user.
545 def overridefilemerge(origfn, premerge, repo, mynode, orig, fcd, fco, fca,
545 def overridefilemerge(origfn, premerge, repo, mynode, orig, fcd, fco, fca,
546 labels=None):
546 labels=None):
547 if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
547 if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
548 return origfn(premerge, repo, mynode, orig, fcd, fco, fca,
548 return origfn(premerge, repo, mynode, orig, fcd, fco, fca,
549 labels=labels)
549 labels=labels)
550
550
551 ahash = fca.data().strip().lower()
551 ahash = fca.data().strip().lower()
552 dhash = fcd.data().strip().lower()
552 dhash = fcd.data().strip().lower()
553 ohash = fco.data().strip().lower()
553 ohash = fco.data().strip().lower()
554 if (ohash != ahash and
554 if (ohash != ahash and
555 ohash != dhash and
555 ohash != dhash and
556 (dhash == ahash or
556 (dhash == ahash or
557 repo.ui.promptchoice(
557 repo.ui.promptchoice(
558 _('largefile %s has a merge conflict\nancestor was %s\n'
558 _('largefile %s has a merge conflict\nancestor was %s\n'
559 'keep (l)ocal %s or\ntake (o)ther %s?'
559 'keep (l)ocal %s or\ntake (o)ther %s?'
560 '$$ &Local $$ &Other') %
560 '$$ &Local $$ &Other') %
561 (lfutil.splitstandin(orig), ahash, dhash, ohash),
561 (lfutil.splitstandin(orig), ahash, dhash, ohash),
562 0) == 1)):
562 0) == 1)):
563 repo.wwrite(fcd.path(), fco.data(), fco.flags())
563 repo.wwrite(fcd.path(), fco.data(), fco.flags())
564 return True, 0, False
564 return True, 0, False
565
565
566 def copiespathcopies(orig, ctx1, ctx2, match=None):
566 def copiespathcopies(orig, ctx1, ctx2, match=None):
567 copies = orig(ctx1, ctx2, match=match)
567 copies = orig(ctx1, ctx2, match=match)
568 updated = {}
568 updated = {}
569
569
570 for k, v in copies.iteritems():
570 for k, v in copies.iteritems():
571 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
571 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
572
572
573 return updated
573 return updated
574
574
575 # Copy first changes the matchers to match standins instead of
575 # Copy first changes the matchers to match standins instead of
576 # largefiles. Then it overrides util.copyfile in that function it
576 # largefiles. Then it overrides util.copyfile in that function it
577 # checks if the destination largefile already exists. It also keeps a
577 # checks if the destination largefile already exists. It also keeps a
578 # list of copied files so that the largefiles can be copied and the
578 # list of copied files so that the largefiles can be copied and the
579 # dirstate updated.
579 # dirstate updated.
580 def overridecopy(orig, ui, repo, pats, opts, rename=False):
580 def overridecopy(orig, ui, repo, pats, opts, rename=False):
581 # doesn't remove largefile on rename
581 # doesn't remove largefile on rename
582 if len(pats) < 2:
582 if len(pats) < 2:
583 # this isn't legal, let the original function deal with it
583 # this isn't legal, let the original function deal with it
584 return orig(ui, repo, pats, opts, rename)
584 return orig(ui, repo, pats, opts, rename)
585
585
586 # This could copy both lfiles and normal files in one command,
586 # This could copy both lfiles and normal files in one command,
587 # but we don't want to do that. First replace their matcher to
587 # but we don't want to do that. First replace their matcher to
588 # only match normal files and run it, then replace it to just
588 # only match normal files and run it, then replace it to just
589 # match largefiles and run it again.
589 # match largefiles and run it again.
590 nonormalfiles = False
590 nonormalfiles = False
591 nolfiles = False
591 nolfiles = False
592 installnormalfilesmatchfn(repo[None].manifest())
592 installnormalfilesmatchfn(repo[None].manifest())
593 try:
593 try:
594 result = orig(ui, repo, pats, opts, rename)
594 result = orig(ui, repo, pats, opts, rename)
595 except error.Abort as e:
595 except error.Abort as e:
596 if str(e) != _('no files to copy'):
596 if str(e) != _('no files to copy'):
597 raise e
597 raise e
598 else:
598 else:
599 nonormalfiles = True
599 nonormalfiles = True
600 result = 0
600 result = 0
601 finally:
601 finally:
602 restorematchfn()
602 restorematchfn()
603
603
604 # The first rename can cause our current working directory to be removed.
604 # The first rename can cause our current working directory to be removed.
605 # In that case there is nothing left to copy/rename so just quit.
605 # In that case there is nothing left to copy/rename so just quit.
606 try:
606 try:
607 repo.getcwd()
607 repo.getcwd()
608 except OSError:
608 except OSError:
609 return result
609 return result
610
610
611 def makestandin(relpath):
611 def makestandin(relpath):
612 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
612 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
613 return os.path.join(repo.wjoin(lfutil.standin(path)))
613 return os.path.join(repo.wjoin(lfutil.standin(path)))
614
614
615 fullpats = scmutil.expandpats(pats)
615 fullpats = scmutil.expandpats(pats)
616 dest = fullpats[-1]
616 dest = fullpats[-1]
617
617
618 if os.path.isdir(dest):
618 if os.path.isdir(dest):
619 if not os.path.isdir(makestandin(dest)):
619 if not os.path.isdir(makestandin(dest)):
620 os.makedirs(makestandin(dest))
620 os.makedirs(makestandin(dest))
621
621
622 try:
622 try:
623 # When we call orig below it creates the standins but we don't add
623 # When we call orig below it creates the standins but we don't add
624 # them to the dir state until later so lock during that time.
624 # them to the dir state until later so lock during that time.
625 wlock = repo.wlock()
625 wlock = repo.wlock()
626
626
627 manifest = repo[None].manifest()
627 manifest = repo[None].manifest()
628 def overridematch(ctx, pats=(), opts=None, globbed=False,
628 def overridematch(ctx, pats=(), opts=None, globbed=False,
629 default='relpath', badfn=None):
629 default='relpath', badfn=None):
630 if opts is None:
630 if opts is None:
631 opts = {}
631 opts = {}
632 newpats = []
632 newpats = []
633 # The patterns were previously mangled to add the standin
633 # The patterns were previously mangled to add the standin
634 # directory; we need to remove that now
634 # directory; we need to remove that now
635 for pat in pats:
635 for pat in pats:
636 if match_.patkind(pat) is None and lfutil.shortname in pat:
636 if match_.patkind(pat) is None and lfutil.shortname in pat:
637 newpats.append(pat.replace(lfutil.shortname, ''))
637 newpats.append(pat.replace(lfutil.shortname, ''))
638 else:
638 else:
639 newpats.append(pat)
639 newpats.append(pat)
640 match = oldmatch(ctx, newpats, opts, globbed, default, badfn=badfn)
640 match = oldmatch(ctx, newpats, opts, globbed, default, badfn=badfn)
641 m = copy.copy(match)
641 m = copy.copy(match)
642 lfile = lambda f: lfutil.standin(f) in manifest
642 lfile = lambda f: lfutil.standin(f) in manifest
643 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
643 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
644 m._fileroots = set(m._files)
644 m._fileroots = set(m._files)
645 origmatchfn = m.matchfn
645 origmatchfn = m.matchfn
646 m.matchfn = lambda f: (lfutil.isstandin(f) and
646 m.matchfn = lambda f: (lfutil.isstandin(f) and
647 (f in manifest) and
647 (f in manifest) and
648 origmatchfn(lfutil.splitstandin(f)) or
648 origmatchfn(lfutil.splitstandin(f)) or
649 None)
649 None)
650 return m
650 return m
651 oldmatch = installmatchfn(overridematch)
651 oldmatch = installmatchfn(overridematch)
652 listpats = []
652 listpats = []
653 for pat in pats:
653 for pat in pats:
654 if match_.patkind(pat) is not None:
654 if match_.patkind(pat) is not None:
655 listpats.append(pat)
655 listpats.append(pat)
656 else:
656 else:
657 listpats.append(makestandin(pat))
657 listpats.append(makestandin(pat))
658
658
659 try:
659 try:
660 origcopyfile = util.copyfile
660 origcopyfile = util.copyfile
661 copiedfiles = []
661 copiedfiles = []
662 def overridecopyfile(src, dest):
662 def overridecopyfile(src, dest):
663 if (lfutil.shortname in src and
663 if (lfutil.shortname in src and
664 dest.startswith(repo.wjoin(lfutil.shortname))):
664 dest.startswith(repo.wjoin(lfutil.shortname))):
665 destlfile = dest.replace(lfutil.shortname, '')
665 destlfile = dest.replace(lfutil.shortname, '')
666 if not opts['force'] and os.path.exists(destlfile):
666 if not opts['force'] and os.path.exists(destlfile):
667 raise IOError('',
667 raise IOError('',
668 _('destination largefile already exists'))
668 _('destination largefile already exists'))
669 copiedfiles.append((src, dest))
669 copiedfiles.append((src, dest))
670 origcopyfile(src, dest)
670 origcopyfile(src, dest)
671
671
672 util.copyfile = overridecopyfile
672 util.copyfile = overridecopyfile
673 result += orig(ui, repo, listpats, opts, rename)
673 result += orig(ui, repo, listpats, opts, rename)
674 finally:
674 finally:
675 util.copyfile = origcopyfile
675 util.copyfile = origcopyfile
676
676
677 lfdirstate = lfutil.openlfdirstate(ui, repo)
677 lfdirstate = lfutil.openlfdirstate(ui, repo)
678 for (src, dest) in copiedfiles:
678 for (src, dest) in copiedfiles:
679 if (lfutil.shortname in src and
679 if (lfutil.shortname in src and
680 dest.startswith(repo.wjoin(lfutil.shortname))):
680 dest.startswith(repo.wjoin(lfutil.shortname))):
681 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
681 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
682 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
682 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
683 destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
683 destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
684 if not os.path.isdir(destlfiledir):
684 if not os.path.isdir(destlfiledir):
685 os.makedirs(destlfiledir)
685 os.makedirs(destlfiledir)
686 if rename:
686 if rename:
687 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
687 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
688
688
689 # The file is gone, but this deletes any empty parent
689 # The file is gone, but this deletes any empty parent
690 # directories as a side-effect.
690 # directories as a side-effect.
691 util.unlinkpath(repo.wjoin(srclfile), True)
691 util.unlinkpath(repo.wjoin(srclfile), True)
692 lfdirstate.remove(srclfile)
692 lfdirstate.remove(srclfile)
693 else:
693 else:
694 util.copyfile(repo.wjoin(srclfile),
694 util.copyfile(repo.wjoin(srclfile),
695 repo.wjoin(destlfile))
695 repo.wjoin(destlfile))
696
696
697 lfdirstate.add(destlfile)
697 lfdirstate.add(destlfile)
698 lfdirstate.write()
698 lfdirstate.write()
699 except error.Abort as e:
699 except error.Abort as e:
700 if str(e) != _('no files to copy'):
700 if str(e) != _('no files to copy'):
701 raise e
701 raise e
702 else:
702 else:
703 nolfiles = True
703 nolfiles = True
704 finally:
704 finally:
705 restorematchfn()
705 restorematchfn()
706 wlock.release()
706 wlock.release()
707
707
708 if nolfiles and nonormalfiles:
708 if nolfiles and nonormalfiles:
709 raise error.Abort(_('no files to copy'))
709 raise error.Abort(_('no files to copy'))
710
710
711 return result
711 return result
712
712
713 # When the user calls revert, we have to be careful to not revert any
713 # When the user calls revert, we have to be careful to not revert any
714 # changes to other largefiles accidentally. This means we have to keep
714 # changes to other largefiles accidentally. This means we have to keep
715 # track of the largefiles that are being reverted so we only pull down
715 # track of the largefiles that are being reverted so we only pull down
716 # the necessary largefiles.
716 # the necessary largefiles.
717 #
717 #
718 # Standins are only updated (to match the hash of largefiles) before
718 # Standins are only updated (to match the hash of largefiles) before
719 # commits. Update the standins then run the original revert, changing
719 # commits. Update the standins then run the original revert, changing
720 # the matcher to hit standins instead of largefiles. Based on the
720 # the matcher to hit standins instead of largefiles. Based on the
721 # resulting standins update the largefiles.
721 # resulting standins update the largefiles.
722 def overriderevert(orig, ui, repo, ctx, parents, *pats, **opts):
722 def overriderevert(orig, ui, repo, ctx, parents, *pats, **opts):
723 # Because we put the standins in a bad state (by updating them)
723 # Because we put the standins in a bad state (by updating them)
724 # and then return them to a correct state we need to lock to
724 # and then return them to a correct state we need to lock to
725 # prevent others from changing them in their incorrect state.
725 # prevent others from changing them in their incorrect state.
726 wlock = repo.wlock()
726 wlock = repo.wlock()
727 try:
727 try:
728 lfdirstate = lfutil.openlfdirstate(ui, repo)
728 lfdirstate = lfutil.openlfdirstate(ui, repo)
729 s = lfutil.lfdirstatestatus(lfdirstate, repo)
729 s = lfutil.lfdirstatestatus(lfdirstate, repo)
730 lfdirstate.write()
730 lfdirstate.write()
731 for lfile in s.modified:
731 for lfile in s.modified:
732 lfutil.updatestandin(repo, lfutil.standin(lfile))
732 lfutil.updatestandin(repo, lfutil.standin(lfile))
733 for lfile in s.deleted:
733 for lfile in s.deleted:
734 if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
734 if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
735 os.unlink(repo.wjoin(lfutil.standin(lfile)))
735 os.unlink(repo.wjoin(lfutil.standin(lfile)))
736
736
737 oldstandins = lfutil.getstandinsstate(repo)
737 oldstandins = lfutil.getstandinsstate(repo)
738
738
739 def overridematch(mctx, pats=(), opts=None, globbed=False,
739 def overridematch(mctx, pats=(), opts=None, globbed=False,
740 default='relpath', badfn=None):
740 default='relpath', badfn=None):
741 if opts is None:
741 if opts is None:
742 opts = {}
742 opts = {}
743 match = oldmatch(mctx, pats, opts, globbed, default, badfn=badfn)
743 match = oldmatch(mctx, pats, opts, globbed, default, badfn=badfn)
744 m = copy.copy(match)
744 m = copy.copy(match)
745
745
746 # revert supports recursing into subrepos, and though largefiles
746 # revert supports recursing into subrepos, and though largefiles
747 # currently doesn't work correctly in that case, this match is
747 # currently doesn't work correctly in that case, this match is
748 # called, so the lfdirstate above may not be the correct one for
748 # called, so the lfdirstate above may not be the correct one for
749 # this invocation of match.
749 # this invocation of match.
750 lfdirstate = lfutil.openlfdirstate(mctx.repo().ui, mctx.repo(),
750 lfdirstate = lfutil.openlfdirstate(mctx.repo().ui, mctx.repo(),
751 False)
751 False)
752
752
753 def tostandin(f):
753 def tostandin(f):
754 standin = lfutil.standin(f)
754 standin = lfutil.standin(f)
755 if standin in ctx or standin in mctx:
755 if standin in ctx or standin in mctx:
756 return standin
756 return standin
757 elif standin in repo[None] or lfdirstate[f] == 'r':
757 elif standin in repo[None] or lfdirstate[f] == 'r':
758 return None
758 return None
759 return f
759 return f
760 m._files = [tostandin(f) for f in m._files]
760 m._files = [tostandin(f) for f in m._files]
761 m._files = [f for f in m._files if f is not None]
761 m._files = [f for f in m._files if f is not None]
762 m._fileroots = set(m._files)
762 m._fileroots = set(m._files)
763 origmatchfn = m.matchfn
763 origmatchfn = m.matchfn
764 def matchfn(f):
764 def matchfn(f):
765 if lfutil.isstandin(f):
765 if lfutil.isstandin(f):
766 return (origmatchfn(lfutil.splitstandin(f)) and
766 return (origmatchfn(lfutil.splitstandin(f)) and
767 (f in ctx or f in mctx))
767 (f in ctx or f in mctx))
768 return origmatchfn(f)
768 return origmatchfn(f)
769 m.matchfn = matchfn
769 m.matchfn = matchfn
770 return m
770 return m
771 oldmatch = installmatchfn(overridematch)
771 oldmatch = installmatchfn(overridematch)
772 try:
772 try:
773 orig(ui, repo, ctx, parents, *pats, **opts)
773 orig(ui, repo, ctx, parents, *pats, **opts)
774 finally:
774 finally:
775 restorematchfn()
775 restorematchfn()
776
776
777 newstandins = lfutil.getstandinsstate(repo)
777 newstandins = lfutil.getstandinsstate(repo)
778 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
778 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
779 # lfdirstate should be 'normallookup'-ed for updated files,
779 # lfdirstate should be 'normallookup'-ed for updated files,
780 # because reverting doesn't touch dirstate for 'normal' files
780 # because reverting doesn't touch dirstate for 'normal' files
781 # when target revision is explicitly specified: in such case,
781 # when target revision is explicitly specified: in such case,
782 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
782 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
783 # of target (standin) file.
783 # of target (standin) file.
784 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
784 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
785 normallookup=True)
785 normallookup=True)
786
786
787 finally:
787 finally:
788 wlock.release()
788 wlock.release()
789
789
790 # after pulling changesets, we need to take some extra care to get
790 # after pulling changesets, we need to take some extra care to get
791 # largefiles updated remotely
791 # largefiles updated remotely
792 def overridepull(orig, ui, repo, source=None, **opts):
792 def overridepull(orig, ui, repo, source=None, **opts):
793 revsprepull = len(repo)
793 revsprepull = len(repo)
794 if not source:
794 if not source:
795 source = 'default'
795 source = 'default'
796 repo.lfpullsource = source
796 repo.lfpullsource = source
797 result = orig(ui, repo, source, **opts)
797 result = orig(ui, repo, source, **opts)
798 revspostpull = len(repo)
798 revspostpull = len(repo)
799 lfrevs = opts.get('lfrev', [])
799 lfrevs = opts.get('lfrev', [])
800 if opts.get('all_largefiles'):
800 if opts.get('all_largefiles'):
801 lfrevs.append('pulled()')
801 lfrevs.append('pulled()')
802 if lfrevs and revspostpull > revsprepull:
802 if lfrevs and revspostpull > revsprepull:
803 numcached = 0
803 numcached = 0
804 repo.firstpulled = revsprepull # for pulled() revset expression
804 repo.firstpulled = revsprepull # for pulled() revset expression
805 try:
805 try:
806 for rev in scmutil.revrange(repo, lfrevs):
806 for rev in scmutil.revrange(repo, lfrevs):
807 ui.note(_('pulling largefiles for revision %s\n') % rev)
807 ui.note(_('pulling largefiles for revision %s\n') % rev)
808 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
808 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
809 numcached += len(cached)
809 numcached += len(cached)
810 finally:
810 finally:
811 del repo.firstpulled
811 del repo.firstpulled
812 ui.status(_("%d largefiles cached\n") % numcached)
812 ui.status(_("%d largefiles cached\n") % numcached)
813 return result
813 return result
814
814
815 revsetpredicate = revset.extpredicate()
816
817 @revsetpredicate('pulled()')
815 def pulledrevsetsymbol(repo, subset, x):
818 def pulledrevsetsymbol(repo, subset, x):
816 """``pulled()``
819 """Changesets that just has been pulled.
817 Changesets that just has been pulled.
818
820
819 Only available with largefiles from pull --lfrev expressions.
821 Only available with largefiles from pull --lfrev expressions.
820
822
821 .. container:: verbose
823 .. container:: verbose
822
824
823 Some examples:
825 Some examples:
824
826
825 - pull largefiles for all new changesets::
827 - pull largefiles for all new changesets::
826
828
827 hg pull -lfrev "pulled()"
829 hg pull -lfrev "pulled()"
828
830
829 - pull largefiles for all new branch heads::
831 - pull largefiles for all new branch heads::
830
832
831 hg pull -lfrev "head(pulled()) and not closed()"
833 hg pull -lfrev "head(pulled()) and not closed()"
832
834
833 """
835 """
834
836
835 try:
837 try:
836 firstpulled = repo.firstpulled
838 firstpulled = repo.firstpulled
837 except AttributeError:
839 except AttributeError:
838 raise error.Abort(_("pulled() only available in --lfrev"))
840 raise error.Abort(_("pulled() only available in --lfrev"))
839 return revset.baseset([r for r in subset if r >= firstpulled])
841 return revset.baseset([r for r in subset if r >= firstpulled])
840
842
841 def overrideclone(orig, ui, source, dest=None, **opts):
843 def overrideclone(orig, ui, source, dest=None, **opts):
842 d = dest
844 d = dest
843 if d is None:
845 if d is None:
844 d = hg.defaultdest(source)
846 d = hg.defaultdest(source)
845 if opts.get('all_largefiles') and not hg.islocal(d):
847 if opts.get('all_largefiles') and not hg.islocal(d):
846 raise error.Abort(_(
848 raise error.Abort(_(
847 '--all-largefiles is incompatible with non-local destination %s') %
849 '--all-largefiles is incompatible with non-local destination %s') %
848 d)
850 d)
849
851
850 return orig(ui, source, dest, **opts)
852 return orig(ui, source, dest, **opts)
851
853
852 def hgclone(orig, ui, opts, *args, **kwargs):
854 def hgclone(orig, ui, opts, *args, **kwargs):
853 result = orig(ui, opts, *args, **kwargs)
855 result = orig(ui, opts, *args, **kwargs)
854
856
855 if result is not None:
857 if result is not None:
856 sourcerepo, destrepo = result
858 sourcerepo, destrepo = result
857 repo = destrepo.local()
859 repo = destrepo.local()
858
860
859 # When cloning to a remote repo (like through SSH), no repo is available
861 # When cloning to a remote repo (like through SSH), no repo is available
860 # from the peer. Therefore the largefiles can't be downloaded and the
862 # from the peer. Therefore the largefiles can't be downloaded and the
861 # hgrc can't be updated.
863 # hgrc can't be updated.
862 if not repo:
864 if not repo:
863 return result
865 return result
864
866
865 # If largefiles is required for this repo, permanently enable it locally
867 # If largefiles is required for this repo, permanently enable it locally
866 if 'largefiles' in repo.requirements:
868 if 'largefiles' in repo.requirements:
867 fp = repo.vfs('hgrc', 'a', text=True)
869 fp = repo.vfs('hgrc', 'a', text=True)
868 try:
870 try:
869 fp.write('\n[extensions]\nlargefiles=\n')
871 fp.write('\n[extensions]\nlargefiles=\n')
870 finally:
872 finally:
871 fp.close()
873 fp.close()
872
874
873 # Caching is implicitly limited to 'rev' option, since the dest repo was
875 # Caching is implicitly limited to 'rev' option, since the dest repo was
874 # truncated at that point. The user may expect a download count with
876 # truncated at that point. The user may expect a download count with
875 # this option, so attempt whether or not this is a largefile repo.
877 # this option, so attempt whether or not this is a largefile repo.
876 if opts.get('all_largefiles'):
878 if opts.get('all_largefiles'):
877 success, missing = lfcommands.downloadlfiles(ui, repo, None)
879 success, missing = lfcommands.downloadlfiles(ui, repo, None)
878
880
879 if missing != 0:
881 if missing != 0:
880 return None
882 return None
881
883
882 return result
884 return result
883
885
884 def overriderebase(orig, ui, repo, **opts):
886 def overriderebase(orig, ui, repo, **opts):
885 if not util.safehasattr(repo, '_largefilesenabled'):
887 if not util.safehasattr(repo, '_largefilesenabled'):
886 return orig(ui, repo, **opts)
888 return orig(ui, repo, **opts)
887
889
888 resuming = opts.get('continue')
890 resuming = opts.get('continue')
889 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
891 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
890 repo._lfstatuswriters.append(lambda *msg, **opts: None)
892 repo._lfstatuswriters.append(lambda *msg, **opts: None)
891 try:
893 try:
892 return orig(ui, repo, **opts)
894 return orig(ui, repo, **opts)
893 finally:
895 finally:
894 repo._lfstatuswriters.pop()
896 repo._lfstatuswriters.pop()
895 repo._lfcommithooks.pop()
897 repo._lfcommithooks.pop()
896
898
897 def overridearchivecmd(orig, ui, repo, dest, **opts):
899 def overridearchivecmd(orig, ui, repo, dest, **opts):
898 repo.unfiltered().lfstatus = True
900 repo.unfiltered().lfstatus = True
899
901
900 try:
902 try:
901 return orig(ui, repo.unfiltered(), dest, **opts)
903 return orig(ui, repo.unfiltered(), dest, **opts)
902 finally:
904 finally:
903 repo.unfiltered().lfstatus = False
905 repo.unfiltered().lfstatus = False
904
906
905 def hgwebarchive(orig, web, req, tmpl):
907 def hgwebarchive(orig, web, req, tmpl):
906 web.repo.lfstatus = True
908 web.repo.lfstatus = True
907
909
908 try:
910 try:
909 return orig(web, req, tmpl)
911 return orig(web, req, tmpl)
910 finally:
912 finally:
911 web.repo.lfstatus = False
913 web.repo.lfstatus = False
912
914
913 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
915 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
914 prefix='', mtime=None, subrepos=None):
916 prefix='', mtime=None, subrepos=None):
915 # For some reason setting repo.lfstatus in hgwebarchive only changes the
917 # For some reason setting repo.lfstatus in hgwebarchive only changes the
916 # unfiltered repo's attr, so check that as well.
918 # unfiltered repo's attr, so check that as well.
917 if not repo.lfstatus and not repo.unfiltered().lfstatus:
919 if not repo.lfstatus and not repo.unfiltered().lfstatus:
918 return orig(repo, dest, node, kind, decode, matchfn, prefix, mtime,
920 return orig(repo, dest, node, kind, decode, matchfn, prefix, mtime,
919 subrepos)
921 subrepos)
920
922
921 # No need to lock because we are only reading history and
923 # No need to lock because we are only reading history and
922 # largefile caches, neither of which are modified.
924 # largefile caches, neither of which are modified.
923 if node is not None:
925 if node is not None:
924 lfcommands.cachelfiles(repo.ui, repo, node)
926 lfcommands.cachelfiles(repo.ui, repo, node)
925
927
926 if kind not in archival.archivers:
928 if kind not in archival.archivers:
927 raise error.Abort(_("unknown archive type '%s'") % kind)
929 raise error.Abort(_("unknown archive type '%s'") % kind)
928
930
929 ctx = repo[node]
931 ctx = repo[node]
930
932
931 if kind == 'files':
933 if kind == 'files':
932 if prefix:
934 if prefix:
933 raise error.Abort(
935 raise error.Abort(
934 _('cannot give prefix when archiving to files'))
936 _('cannot give prefix when archiving to files'))
935 else:
937 else:
936 prefix = archival.tidyprefix(dest, kind, prefix)
938 prefix = archival.tidyprefix(dest, kind, prefix)
937
939
938 def write(name, mode, islink, getdata):
940 def write(name, mode, islink, getdata):
939 if matchfn and not matchfn(name):
941 if matchfn and not matchfn(name):
940 return
942 return
941 data = getdata()
943 data = getdata()
942 if decode:
944 if decode:
943 data = repo.wwritedata(name, data)
945 data = repo.wwritedata(name, data)
944 archiver.addfile(prefix + name, mode, islink, data)
946 archiver.addfile(prefix + name, mode, islink, data)
945
947
946 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
948 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
947
949
948 if repo.ui.configbool("ui", "archivemeta", True):
950 if repo.ui.configbool("ui", "archivemeta", True):
949 write('.hg_archival.txt', 0o644, False,
951 write('.hg_archival.txt', 0o644, False,
950 lambda: archival.buildmetadata(ctx))
952 lambda: archival.buildmetadata(ctx))
951
953
952 for f in ctx:
954 for f in ctx:
953 ff = ctx.flags(f)
955 ff = ctx.flags(f)
954 getdata = ctx[f].data
956 getdata = ctx[f].data
955 if lfutil.isstandin(f):
957 if lfutil.isstandin(f):
956 if node is not None:
958 if node is not None:
957 path = lfutil.findfile(repo, getdata().strip())
959 path = lfutil.findfile(repo, getdata().strip())
958
960
959 if path is None:
961 if path is None:
960 raise error.Abort(
962 raise error.Abort(
961 _('largefile %s not found in repo store or system cache')
963 _('largefile %s not found in repo store or system cache')
962 % lfutil.splitstandin(f))
964 % lfutil.splitstandin(f))
963 else:
965 else:
964 path = lfutil.splitstandin(f)
966 path = lfutil.splitstandin(f)
965
967
966 f = lfutil.splitstandin(f)
968 f = lfutil.splitstandin(f)
967
969
968 def getdatafn():
970 def getdatafn():
969 fd = None
971 fd = None
970 try:
972 try:
971 fd = open(path, 'rb')
973 fd = open(path, 'rb')
972 return fd.read()
974 return fd.read()
973 finally:
975 finally:
974 if fd:
976 if fd:
975 fd.close()
977 fd.close()
976
978
977 getdata = getdatafn
979 getdata = getdatafn
978 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
980 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
979
981
980 if subrepos:
982 if subrepos:
981 for subpath in sorted(ctx.substate):
983 for subpath in sorted(ctx.substate):
982 sub = ctx.workingsub(subpath)
984 sub = ctx.workingsub(subpath)
983 submatch = match_.narrowmatcher(subpath, matchfn)
985 submatch = match_.narrowmatcher(subpath, matchfn)
984 sub._repo.lfstatus = True
986 sub._repo.lfstatus = True
985 sub.archive(archiver, prefix, submatch)
987 sub.archive(archiver, prefix, submatch)
986
988
987 archiver.done()
989 archiver.done()
988
990
989 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None):
991 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None):
990 if not repo._repo.lfstatus:
992 if not repo._repo.lfstatus:
991 return orig(repo, archiver, prefix, match)
993 return orig(repo, archiver, prefix, match)
992
994
993 repo._get(repo._state + ('hg',))
995 repo._get(repo._state + ('hg',))
994 rev = repo._state[1]
996 rev = repo._state[1]
995 ctx = repo._repo[rev]
997 ctx = repo._repo[rev]
996
998
997 if ctx.node() is not None:
999 if ctx.node() is not None:
998 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
1000 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
999
1001
1000 def write(name, mode, islink, getdata):
1002 def write(name, mode, islink, getdata):
1001 # At this point, the standin has been replaced with the largefile name,
1003 # At this point, the standin has been replaced with the largefile name,
1002 # so the normal matcher works here without the lfutil variants.
1004 # so the normal matcher works here without the lfutil variants.
1003 if match and not match(f):
1005 if match and not match(f):
1004 return
1006 return
1005 data = getdata()
1007 data = getdata()
1006
1008
1007 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
1009 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
1008
1010
1009 for f in ctx:
1011 for f in ctx:
1010 ff = ctx.flags(f)
1012 ff = ctx.flags(f)
1011 getdata = ctx[f].data
1013 getdata = ctx[f].data
1012 if lfutil.isstandin(f):
1014 if lfutil.isstandin(f):
1013 if ctx.node() is not None:
1015 if ctx.node() is not None:
1014 path = lfutil.findfile(repo._repo, getdata().strip())
1016 path = lfutil.findfile(repo._repo, getdata().strip())
1015
1017
1016 if path is None:
1018 if path is None:
1017 raise error.Abort(
1019 raise error.Abort(
1018 _('largefile %s not found in repo store or system cache')
1020 _('largefile %s not found in repo store or system cache')
1019 % lfutil.splitstandin(f))
1021 % lfutil.splitstandin(f))
1020 else:
1022 else:
1021 path = lfutil.splitstandin(f)
1023 path = lfutil.splitstandin(f)
1022
1024
1023 f = lfutil.splitstandin(f)
1025 f = lfutil.splitstandin(f)
1024
1026
1025 def getdatafn():
1027 def getdatafn():
1026 fd = None
1028 fd = None
1027 try:
1029 try:
1028 fd = open(os.path.join(prefix, path), 'rb')
1030 fd = open(os.path.join(prefix, path), 'rb')
1029 return fd.read()
1031 return fd.read()
1030 finally:
1032 finally:
1031 if fd:
1033 if fd:
1032 fd.close()
1034 fd.close()
1033
1035
1034 getdata = getdatafn
1036 getdata = getdatafn
1035
1037
1036 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
1038 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
1037
1039
1038 for subpath in sorted(ctx.substate):
1040 for subpath in sorted(ctx.substate):
1039 sub = ctx.workingsub(subpath)
1041 sub = ctx.workingsub(subpath)
1040 submatch = match_.narrowmatcher(subpath, match)
1042 submatch = match_.narrowmatcher(subpath, match)
1041 sub._repo.lfstatus = True
1043 sub._repo.lfstatus = True
1042 sub.archive(archiver, prefix + repo._path + '/', submatch)
1044 sub.archive(archiver, prefix + repo._path + '/', submatch)
1043
1045
1044 # If a largefile is modified, the change is not reflected in its
1046 # If a largefile is modified, the change is not reflected in its
1045 # standin until a commit. cmdutil.bailifchanged() raises an exception
1047 # standin until a commit. cmdutil.bailifchanged() raises an exception
1046 # if the repo has uncommitted changes. Wrap it to also check if
1048 # if the repo has uncommitted changes. Wrap it to also check if
1047 # largefiles were changed. This is used by bisect, backout and fetch.
1049 # largefiles were changed. This is used by bisect, backout and fetch.
1048 def overridebailifchanged(orig, repo, *args, **kwargs):
1050 def overridebailifchanged(orig, repo, *args, **kwargs):
1049 orig(repo, *args, **kwargs)
1051 orig(repo, *args, **kwargs)
1050 repo.lfstatus = True
1052 repo.lfstatus = True
1051 s = repo.status()
1053 s = repo.status()
1052 repo.lfstatus = False
1054 repo.lfstatus = False
1053 if s.modified or s.added or s.removed or s.deleted:
1055 if s.modified or s.added or s.removed or s.deleted:
1054 raise error.Abort(_('uncommitted changes'))
1056 raise error.Abort(_('uncommitted changes'))
1055
1057
1056 def cmdutilforget(orig, ui, repo, match, prefix, explicitonly):
1058 def cmdutilforget(orig, ui, repo, match, prefix, explicitonly):
1057 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1059 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1058 bad, forgot = orig(ui, repo, normalmatcher, prefix, explicitonly)
1060 bad, forgot = orig(ui, repo, normalmatcher, prefix, explicitonly)
1059 m = composelargefilematcher(match, repo[None].manifest())
1061 m = composelargefilematcher(match, repo[None].manifest())
1060
1062
1061 try:
1063 try:
1062 repo.lfstatus = True
1064 repo.lfstatus = True
1063 s = repo.status(match=m, clean=True)
1065 s = repo.status(match=m, clean=True)
1064 finally:
1066 finally:
1065 repo.lfstatus = False
1067 repo.lfstatus = False
1066 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1068 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1067 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
1069 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
1068
1070
1069 for f in forget:
1071 for f in forget:
1070 if lfutil.standin(f) not in repo.dirstate and not \
1072 if lfutil.standin(f) not in repo.dirstate and not \
1071 repo.wvfs.isdir(lfutil.standin(f)):
1073 repo.wvfs.isdir(lfutil.standin(f)):
1072 ui.warn(_('not removing %s: file is already untracked\n')
1074 ui.warn(_('not removing %s: file is already untracked\n')
1073 % m.rel(f))
1075 % m.rel(f))
1074 bad.append(f)
1076 bad.append(f)
1075
1077
1076 for f in forget:
1078 for f in forget:
1077 if ui.verbose or not m.exact(f):
1079 if ui.verbose or not m.exact(f):
1078 ui.status(_('removing %s\n') % m.rel(f))
1080 ui.status(_('removing %s\n') % m.rel(f))
1079
1081
1080 # Need to lock because standin files are deleted then removed from the
1082 # Need to lock because standin files are deleted then removed from the
1081 # repository and we could race in-between.
1083 # repository and we could race in-between.
1082 wlock = repo.wlock()
1084 wlock = repo.wlock()
1083 try:
1085 try:
1084 lfdirstate = lfutil.openlfdirstate(ui, repo)
1086 lfdirstate = lfutil.openlfdirstate(ui, repo)
1085 for f in forget:
1087 for f in forget:
1086 if lfdirstate[f] == 'a':
1088 if lfdirstate[f] == 'a':
1087 lfdirstate.drop(f)
1089 lfdirstate.drop(f)
1088 else:
1090 else:
1089 lfdirstate.remove(f)
1091 lfdirstate.remove(f)
1090 lfdirstate.write()
1092 lfdirstate.write()
1091 standins = [lfutil.standin(f) for f in forget]
1093 standins = [lfutil.standin(f) for f in forget]
1092 for f in standins:
1094 for f in standins:
1093 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1095 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1094 rejected = repo[None].forget(standins)
1096 rejected = repo[None].forget(standins)
1095 finally:
1097 finally:
1096 wlock.release()
1098 wlock.release()
1097
1099
1098 bad.extend(f for f in rejected if f in m.files())
1100 bad.extend(f for f in rejected if f in m.files())
1099 forgot.extend(f for f in forget if f not in rejected)
1101 forgot.extend(f for f in forget if f not in rejected)
1100 return bad, forgot
1102 return bad, forgot
1101
1103
1102 def _getoutgoings(repo, other, missing, addfunc):
1104 def _getoutgoings(repo, other, missing, addfunc):
1103 """get pairs of filename and largefile hash in outgoing revisions
1105 """get pairs of filename and largefile hash in outgoing revisions
1104 in 'missing'.
1106 in 'missing'.
1105
1107
1106 largefiles already existing on 'other' repository are ignored.
1108 largefiles already existing on 'other' repository are ignored.
1107
1109
1108 'addfunc' is invoked with each unique pairs of filename and
1110 'addfunc' is invoked with each unique pairs of filename and
1109 largefile hash value.
1111 largefile hash value.
1110 """
1112 """
1111 knowns = set()
1113 knowns = set()
1112 lfhashes = set()
1114 lfhashes = set()
1113 def dedup(fn, lfhash):
1115 def dedup(fn, lfhash):
1114 k = (fn, lfhash)
1116 k = (fn, lfhash)
1115 if k not in knowns:
1117 if k not in knowns:
1116 knowns.add(k)
1118 knowns.add(k)
1117 lfhashes.add(lfhash)
1119 lfhashes.add(lfhash)
1118 lfutil.getlfilestoupload(repo, missing, dedup)
1120 lfutil.getlfilestoupload(repo, missing, dedup)
1119 if lfhashes:
1121 if lfhashes:
1120 lfexists = basestore._openstore(repo, other).exists(lfhashes)
1122 lfexists = basestore._openstore(repo, other).exists(lfhashes)
1121 for fn, lfhash in knowns:
1123 for fn, lfhash in knowns:
1122 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1124 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1123 addfunc(fn, lfhash)
1125 addfunc(fn, lfhash)
1124
1126
1125 def outgoinghook(ui, repo, other, opts, missing):
1127 def outgoinghook(ui, repo, other, opts, missing):
1126 if opts.pop('large', None):
1128 if opts.pop('large', None):
1127 lfhashes = set()
1129 lfhashes = set()
1128 if ui.debugflag:
1130 if ui.debugflag:
1129 toupload = {}
1131 toupload = {}
1130 def addfunc(fn, lfhash):
1132 def addfunc(fn, lfhash):
1131 if fn not in toupload:
1133 if fn not in toupload:
1132 toupload[fn] = []
1134 toupload[fn] = []
1133 toupload[fn].append(lfhash)
1135 toupload[fn].append(lfhash)
1134 lfhashes.add(lfhash)
1136 lfhashes.add(lfhash)
1135 def showhashes(fn):
1137 def showhashes(fn):
1136 for lfhash in sorted(toupload[fn]):
1138 for lfhash in sorted(toupload[fn]):
1137 ui.debug(' %s\n' % (lfhash))
1139 ui.debug(' %s\n' % (lfhash))
1138 else:
1140 else:
1139 toupload = set()
1141 toupload = set()
1140 def addfunc(fn, lfhash):
1142 def addfunc(fn, lfhash):
1141 toupload.add(fn)
1143 toupload.add(fn)
1142 lfhashes.add(lfhash)
1144 lfhashes.add(lfhash)
1143 def showhashes(fn):
1145 def showhashes(fn):
1144 pass
1146 pass
1145 _getoutgoings(repo, other, missing, addfunc)
1147 _getoutgoings(repo, other, missing, addfunc)
1146
1148
1147 if not toupload:
1149 if not toupload:
1148 ui.status(_('largefiles: no files to upload\n'))
1150 ui.status(_('largefiles: no files to upload\n'))
1149 else:
1151 else:
1150 ui.status(_('largefiles to upload (%d entities):\n')
1152 ui.status(_('largefiles to upload (%d entities):\n')
1151 % (len(lfhashes)))
1153 % (len(lfhashes)))
1152 for file in sorted(toupload):
1154 for file in sorted(toupload):
1153 ui.status(lfutil.splitstandin(file) + '\n')
1155 ui.status(lfutil.splitstandin(file) + '\n')
1154 showhashes(file)
1156 showhashes(file)
1155 ui.status('\n')
1157 ui.status('\n')
1156
1158
1157 def summaryremotehook(ui, repo, opts, changes):
1159 def summaryremotehook(ui, repo, opts, changes):
1158 largeopt = opts.get('large', False)
1160 largeopt = opts.get('large', False)
1159 if changes is None:
1161 if changes is None:
1160 if largeopt:
1162 if largeopt:
1161 return (False, True) # only outgoing check is needed
1163 return (False, True) # only outgoing check is needed
1162 else:
1164 else:
1163 return (False, False)
1165 return (False, False)
1164 elif largeopt:
1166 elif largeopt:
1165 url, branch, peer, outgoing = changes[1]
1167 url, branch, peer, outgoing = changes[1]
1166 if peer is None:
1168 if peer is None:
1167 # i18n: column positioning for "hg summary"
1169 # i18n: column positioning for "hg summary"
1168 ui.status(_('largefiles: (no remote repo)\n'))
1170 ui.status(_('largefiles: (no remote repo)\n'))
1169 return
1171 return
1170
1172
1171 toupload = set()
1173 toupload = set()
1172 lfhashes = set()
1174 lfhashes = set()
1173 def addfunc(fn, lfhash):
1175 def addfunc(fn, lfhash):
1174 toupload.add(fn)
1176 toupload.add(fn)
1175 lfhashes.add(lfhash)
1177 lfhashes.add(lfhash)
1176 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1178 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1177
1179
1178 if not toupload:
1180 if not toupload:
1179 # i18n: column positioning for "hg summary"
1181 # i18n: column positioning for "hg summary"
1180 ui.status(_('largefiles: (no files to upload)\n'))
1182 ui.status(_('largefiles: (no files to upload)\n'))
1181 else:
1183 else:
1182 # i18n: column positioning for "hg summary"
1184 # i18n: column positioning for "hg summary"
1183 ui.status(_('largefiles: %d entities for %d files to upload\n')
1185 ui.status(_('largefiles: %d entities for %d files to upload\n')
1184 % (len(lfhashes), len(toupload)))
1186 % (len(lfhashes), len(toupload)))
1185
1187
1186 def overridesummary(orig, ui, repo, *pats, **opts):
1188 def overridesummary(orig, ui, repo, *pats, **opts):
1187 try:
1189 try:
1188 repo.lfstatus = True
1190 repo.lfstatus = True
1189 orig(ui, repo, *pats, **opts)
1191 orig(ui, repo, *pats, **opts)
1190 finally:
1192 finally:
1191 repo.lfstatus = False
1193 repo.lfstatus = False
1192
1194
1193 def scmutiladdremove(orig, repo, matcher, prefix, opts=None, dry_run=None,
1195 def scmutiladdremove(orig, repo, matcher, prefix, opts=None, dry_run=None,
1194 similarity=None):
1196 similarity=None):
1195 if opts is None:
1197 if opts is None:
1196 opts = {}
1198 opts = {}
1197 if not lfutil.islfilesrepo(repo):
1199 if not lfutil.islfilesrepo(repo):
1198 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1200 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1199 # Get the list of missing largefiles so we can remove them
1201 # Get the list of missing largefiles so we can remove them
1200 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1202 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1201 unsure, s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
1203 unsure, s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
1202 False, False, False)
1204 False, False, False)
1203
1205
1204 # Call into the normal remove code, but the removing of the standin, we want
1206 # Call into the normal remove code, but the removing of the standin, we want
1205 # to have handled by original addremove. Monkey patching here makes sure
1207 # to have handled by original addremove. Monkey patching here makes sure
1206 # we don't remove the standin in the largefiles code, preventing a very
1208 # we don't remove the standin in the largefiles code, preventing a very
1207 # confused state later.
1209 # confused state later.
1208 if s.deleted:
1210 if s.deleted:
1209 m = copy.copy(matcher)
1211 m = copy.copy(matcher)
1210
1212
1211 # The m._files and m._map attributes are not changed to the deleted list
1213 # The m._files and m._map attributes are not changed to the deleted list
1212 # because that affects the m.exact() test, which in turn governs whether
1214 # because that affects the m.exact() test, which in turn governs whether
1213 # or not the file name is printed, and how. Simply limit the original
1215 # or not the file name is printed, and how. Simply limit the original
1214 # matches to those in the deleted status list.
1216 # matches to those in the deleted status list.
1215 matchfn = m.matchfn
1217 matchfn = m.matchfn
1216 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1218 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1217
1219
1218 removelargefiles(repo.ui, repo, True, m, **opts)
1220 removelargefiles(repo.ui, repo, True, m, **opts)
1219 # Call into the normal add code, and any files that *should* be added as
1221 # Call into the normal add code, and any files that *should* be added as
1220 # largefiles will be
1222 # largefiles will be
1221 added, bad = addlargefiles(repo.ui, repo, True, matcher, **opts)
1223 added, bad = addlargefiles(repo.ui, repo, True, matcher, **opts)
1222 # Now that we've handled largefiles, hand off to the original addremove
1224 # Now that we've handled largefiles, hand off to the original addremove
1223 # function to take care of the rest. Make sure it doesn't do anything with
1225 # function to take care of the rest. Make sure it doesn't do anything with
1224 # largefiles by passing a matcher that will ignore them.
1226 # largefiles by passing a matcher that will ignore them.
1225 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1227 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1226 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1228 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1227
1229
1228 # Calling purge with --all will cause the largefiles to be deleted.
1230 # Calling purge with --all will cause the largefiles to be deleted.
1229 # Override repo.status to prevent this from happening.
1231 # Override repo.status to prevent this from happening.
1230 def overridepurge(orig, ui, repo, *dirs, **opts):
1232 def overridepurge(orig, ui, repo, *dirs, **opts):
1231 # XXX Monkey patching a repoview will not work. The assigned attribute will
1233 # XXX Monkey patching a repoview will not work. The assigned attribute will
1232 # be set on the unfiltered repo, but we will only lookup attributes in the
1234 # be set on the unfiltered repo, but we will only lookup attributes in the
1233 # unfiltered repo if the lookup in the repoview object itself fails. As the
1235 # unfiltered repo if the lookup in the repoview object itself fails. As the
1234 # monkey patched method exists on the repoview class the lookup will not
1236 # monkey patched method exists on the repoview class the lookup will not
1235 # fail. As a result, the original version will shadow the monkey patched
1237 # fail. As a result, the original version will shadow the monkey patched
1236 # one, defeating the monkey patch.
1238 # one, defeating the monkey patch.
1237 #
1239 #
1238 # As a work around we use an unfiltered repo here. We should do something
1240 # As a work around we use an unfiltered repo here. We should do something
1239 # cleaner instead.
1241 # cleaner instead.
1240 repo = repo.unfiltered()
1242 repo = repo.unfiltered()
1241 oldstatus = repo.status
1243 oldstatus = repo.status
1242 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1244 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1243 clean=False, unknown=False, listsubrepos=False):
1245 clean=False, unknown=False, listsubrepos=False):
1244 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1246 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1245 listsubrepos)
1247 listsubrepos)
1246 lfdirstate = lfutil.openlfdirstate(ui, repo)
1248 lfdirstate = lfutil.openlfdirstate(ui, repo)
1247 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1249 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1248 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1250 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1249 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1251 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1250 unknown, ignored, r.clean)
1252 unknown, ignored, r.clean)
1251 repo.status = overridestatus
1253 repo.status = overridestatus
1252 orig(ui, repo, *dirs, **opts)
1254 orig(ui, repo, *dirs, **opts)
1253 repo.status = oldstatus
1255 repo.status = oldstatus
1254 def overriderollback(orig, ui, repo, **opts):
1256 def overriderollback(orig, ui, repo, **opts):
1255 wlock = repo.wlock()
1257 wlock = repo.wlock()
1256 try:
1258 try:
1257 before = repo.dirstate.parents()
1259 before = repo.dirstate.parents()
1258 orphans = set(f for f in repo.dirstate
1260 orphans = set(f for f in repo.dirstate
1259 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1261 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1260 result = orig(ui, repo, **opts)
1262 result = orig(ui, repo, **opts)
1261 after = repo.dirstate.parents()
1263 after = repo.dirstate.parents()
1262 if before == after:
1264 if before == after:
1263 return result # no need to restore standins
1265 return result # no need to restore standins
1264
1266
1265 pctx = repo['.']
1267 pctx = repo['.']
1266 for f in repo.dirstate:
1268 for f in repo.dirstate:
1267 if lfutil.isstandin(f):
1269 if lfutil.isstandin(f):
1268 orphans.discard(f)
1270 orphans.discard(f)
1269 if repo.dirstate[f] == 'r':
1271 if repo.dirstate[f] == 'r':
1270 repo.wvfs.unlinkpath(f, ignoremissing=True)
1272 repo.wvfs.unlinkpath(f, ignoremissing=True)
1271 elif f in pctx:
1273 elif f in pctx:
1272 fctx = pctx[f]
1274 fctx = pctx[f]
1273 repo.wwrite(f, fctx.data(), fctx.flags())
1275 repo.wwrite(f, fctx.data(), fctx.flags())
1274 else:
1276 else:
1275 # content of standin is not so important in 'a',
1277 # content of standin is not so important in 'a',
1276 # 'm' or 'n' (coming from the 2nd parent) cases
1278 # 'm' or 'n' (coming from the 2nd parent) cases
1277 lfutil.writestandin(repo, f, '', False)
1279 lfutil.writestandin(repo, f, '', False)
1278 for standin in orphans:
1280 for standin in orphans:
1279 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1281 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1280
1282
1281 lfdirstate = lfutil.openlfdirstate(ui, repo)
1283 lfdirstate = lfutil.openlfdirstate(ui, repo)
1282 orphans = set(lfdirstate)
1284 orphans = set(lfdirstate)
1283 lfiles = lfutil.listlfiles(repo)
1285 lfiles = lfutil.listlfiles(repo)
1284 for file in lfiles:
1286 for file in lfiles:
1285 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1287 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1286 orphans.discard(file)
1288 orphans.discard(file)
1287 for lfile in orphans:
1289 for lfile in orphans:
1288 lfdirstate.drop(lfile)
1290 lfdirstate.drop(lfile)
1289 lfdirstate.write()
1291 lfdirstate.write()
1290 finally:
1292 finally:
1291 wlock.release()
1293 wlock.release()
1292 return result
1294 return result
1293
1295
1294 def overridetransplant(orig, ui, repo, *revs, **opts):
1296 def overridetransplant(orig, ui, repo, *revs, **opts):
1295 resuming = opts.get('continue')
1297 resuming = opts.get('continue')
1296 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1298 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1297 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1299 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1298 try:
1300 try:
1299 result = orig(ui, repo, *revs, **opts)
1301 result = orig(ui, repo, *revs, **opts)
1300 finally:
1302 finally:
1301 repo._lfstatuswriters.pop()
1303 repo._lfstatuswriters.pop()
1302 repo._lfcommithooks.pop()
1304 repo._lfcommithooks.pop()
1303 return result
1305 return result
1304
1306
1305 def overridecat(orig, ui, repo, file1, *pats, **opts):
1307 def overridecat(orig, ui, repo, file1, *pats, **opts):
1306 ctx = scmutil.revsingle(repo, opts.get('rev'))
1308 ctx = scmutil.revsingle(repo, opts.get('rev'))
1307 err = 1
1309 err = 1
1308 notbad = set()
1310 notbad = set()
1309 m = scmutil.match(ctx, (file1,) + pats, opts)
1311 m = scmutil.match(ctx, (file1,) + pats, opts)
1310 origmatchfn = m.matchfn
1312 origmatchfn = m.matchfn
1311 def lfmatchfn(f):
1313 def lfmatchfn(f):
1312 if origmatchfn(f):
1314 if origmatchfn(f):
1313 return True
1315 return True
1314 lf = lfutil.splitstandin(f)
1316 lf = lfutil.splitstandin(f)
1315 if lf is None:
1317 if lf is None:
1316 return False
1318 return False
1317 notbad.add(lf)
1319 notbad.add(lf)
1318 return origmatchfn(lf)
1320 return origmatchfn(lf)
1319 m.matchfn = lfmatchfn
1321 m.matchfn = lfmatchfn
1320 origbadfn = m.bad
1322 origbadfn = m.bad
1321 def lfbadfn(f, msg):
1323 def lfbadfn(f, msg):
1322 if not f in notbad:
1324 if not f in notbad:
1323 origbadfn(f, msg)
1325 origbadfn(f, msg)
1324 m.bad = lfbadfn
1326 m.bad = lfbadfn
1325
1327
1326 origvisitdirfn = m.visitdir
1328 origvisitdirfn = m.visitdir
1327 def lfvisitdirfn(dir):
1329 def lfvisitdirfn(dir):
1328 if dir == lfutil.shortname:
1330 if dir == lfutil.shortname:
1329 return True
1331 return True
1330 ret = origvisitdirfn(dir)
1332 ret = origvisitdirfn(dir)
1331 if ret:
1333 if ret:
1332 return ret
1334 return ret
1333 lf = lfutil.splitstandin(dir)
1335 lf = lfutil.splitstandin(dir)
1334 if lf is None:
1336 if lf is None:
1335 return False
1337 return False
1336 return origvisitdirfn(lf)
1338 return origvisitdirfn(lf)
1337 m.visitdir = lfvisitdirfn
1339 m.visitdir = lfvisitdirfn
1338
1340
1339 for f in ctx.walk(m):
1341 for f in ctx.walk(m):
1340 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1342 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1341 pathname=f)
1343 pathname=f)
1342 lf = lfutil.splitstandin(f)
1344 lf = lfutil.splitstandin(f)
1343 if lf is None or origmatchfn(f):
1345 if lf is None or origmatchfn(f):
1344 # duplicating unreachable code from commands.cat
1346 # duplicating unreachable code from commands.cat
1345 data = ctx[f].data()
1347 data = ctx[f].data()
1346 if opts.get('decode'):
1348 if opts.get('decode'):
1347 data = repo.wwritedata(f, data)
1349 data = repo.wwritedata(f, data)
1348 fp.write(data)
1350 fp.write(data)
1349 else:
1351 else:
1350 hash = lfutil.readstandin(repo, lf, ctx.rev())
1352 hash = lfutil.readstandin(repo, lf, ctx.rev())
1351 if not lfutil.inusercache(repo.ui, hash):
1353 if not lfutil.inusercache(repo.ui, hash):
1352 store = basestore._openstore(repo)
1354 store = basestore._openstore(repo)
1353 success, missing = store.get([(lf, hash)])
1355 success, missing = store.get([(lf, hash)])
1354 if len(success) != 1:
1356 if len(success) != 1:
1355 raise error.Abort(
1357 raise error.Abort(
1356 _('largefile %s is not in cache and could not be '
1358 _('largefile %s is not in cache and could not be '
1357 'downloaded') % lf)
1359 'downloaded') % lf)
1358 path = lfutil.usercachepath(repo.ui, hash)
1360 path = lfutil.usercachepath(repo.ui, hash)
1359 fpin = open(path, "rb")
1361 fpin = open(path, "rb")
1360 for chunk in util.filechunkiter(fpin, 128 * 1024):
1362 for chunk in util.filechunkiter(fpin, 128 * 1024):
1361 fp.write(chunk)
1363 fp.write(chunk)
1362 fpin.close()
1364 fpin.close()
1363 fp.close()
1365 fp.close()
1364 err = 0
1366 err = 0
1365 return err
1367 return err
1366
1368
1367 def mergeupdate(orig, repo, node, branchmerge, force,
1369 def mergeupdate(orig, repo, node, branchmerge, force,
1368 *args, **kwargs):
1370 *args, **kwargs):
1369 matcher = kwargs.get('matcher', None)
1371 matcher = kwargs.get('matcher', None)
1370 # note if this is a partial update
1372 # note if this is a partial update
1371 partial = matcher and not matcher.always()
1373 partial = matcher and not matcher.always()
1372 wlock = repo.wlock()
1374 wlock = repo.wlock()
1373 try:
1375 try:
1374 # branch | | |
1376 # branch | | |
1375 # merge | force | partial | action
1377 # merge | force | partial | action
1376 # -------+-------+---------+--------------
1378 # -------+-------+---------+--------------
1377 # x | x | x | linear-merge
1379 # x | x | x | linear-merge
1378 # o | x | x | branch-merge
1380 # o | x | x | branch-merge
1379 # x | o | x | overwrite (as clean update)
1381 # x | o | x | overwrite (as clean update)
1380 # o | o | x | force-branch-merge (*1)
1382 # o | o | x | force-branch-merge (*1)
1381 # x | x | o | (*)
1383 # x | x | o | (*)
1382 # o | x | o | (*)
1384 # o | x | o | (*)
1383 # x | o | o | overwrite (as revert)
1385 # x | o | o | overwrite (as revert)
1384 # o | o | o | (*)
1386 # o | o | o | (*)
1385 #
1387 #
1386 # (*) don't care
1388 # (*) don't care
1387 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1389 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1388
1390
1389 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1391 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1390 unsure, s = lfdirstate.status(match_.always(repo.root,
1392 unsure, s = lfdirstate.status(match_.always(repo.root,
1391 repo.getcwd()),
1393 repo.getcwd()),
1392 [], False, False, False)
1394 [], False, False, False)
1393 pctx = repo['.']
1395 pctx = repo['.']
1394 for lfile in unsure + s.modified:
1396 for lfile in unsure + s.modified:
1395 lfileabs = repo.wvfs.join(lfile)
1397 lfileabs = repo.wvfs.join(lfile)
1396 if not os.path.exists(lfileabs):
1398 if not os.path.exists(lfileabs):
1397 continue
1399 continue
1398 lfhash = lfutil.hashrepofile(repo, lfile)
1400 lfhash = lfutil.hashrepofile(repo, lfile)
1399 standin = lfutil.standin(lfile)
1401 standin = lfutil.standin(lfile)
1400 lfutil.writestandin(repo, standin, lfhash,
1402 lfutil.writestandin(repo, standin, lfhash,
1401 lfutil.getexecutable(lfileabs))
1403 lfutil.getexecutable(lfileabs))
1402 if (standin in pctx and
1404 if (standin in pctx and
1403 lfhash == lfutil.readstandin(repo, lfile, '.')):
1405 lfhash == lfutil.readstandin(repo, lfile, '.')):
1404 lfdirstate.normal(lfile)
1406 lfdirstate.normal(lfile)
1405 for lfile in s.added:
1407 for lfile in s.added:
1406 lfutil.updatestandin(repo, lfutil.standin(lfile))
1408 lfutil.updatestandin(repo, lfutil.standin(lfile))
1407 lfdirstate.write()
1409 lfdirstate.write()
1408
1410
1409 oldstandins = lfutil.getstandinsstate(repo)
1411 oldstandins = lfutil.getstandinsstate(repo)
1410
1412
1411 result = orig(repo, node, branchmerge, force, *args, **kwargs)
1413 result = orig(repo, node, branchmerge, force, *args, **kwargs)
1412
1414
1413 newstandins = lfutil.getstandinsstate(repo)
1415 newstandins = lfutil.getstandinsstate(repo)
1414 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1416 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1415 if branchmerge or force or partial:
1417 if branchmerge or force or partial:
1416 filelist.extend(s.deleted + s.removed)
1418 filelist.extend(s.deleted + s.removed)
1417
1419
1418 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1420 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1419 normallookup=partial)
1421 normallookup=partial)
1420
1422
1421 return result
1423 return result
1422 finally:
1424 finally:
1423 wlock.release()
1425 wlock.release()
1424
1426
1425 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1427 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1426 result = orig(repo, files, *args, **kwargs)
1428 result = orig(repo, files, *args, **kwargs)
1427
1429
1428 filelist = [lfutil.splitstandin(f) for f in files if lfutil.isstandin(f)]
1430 filelist = [lfutil.splitstandin(f) for f in files if lfutil.isstandin(f)]
1429 if filelist:
1431 if filelist:
1430 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1432 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1431 printmessage=False, normallookup=True)
1433 printmessage=False, normallookup=True)
1432
1434
1433 return result
1435 return result
@@ -1,172 +1,173 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''setup for largefiles extension: uisetup'''
9 '''setup for largefiles extension: uisetup'''
10
10
11 from mercurial import archival, cmdutil, commands, extensions, filemerge, hg, \
11 from mercurial import archival, cmdutil, commands, extensions, filemerge, hg, \
12 httppeer, merge, scmutil, sshpeer, wireproto, revset, subrepo, copies
12 httppeer, merge, scmutil, sshpeer, wireproto, subrepo, copies
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14 from mercurial.hgweb import hgweb_mod, webcommands
14 from mercurial.hgweb import hgweb_mod, webcommands
15
15
16 import overrides
16 import overrides
17 import proto
17 import proto
18
18
19 def uisetup(ui):
19 def uisetup(ui):
20 # Disable auto-status for some commands which assume that all
20 # Disable auto-status for some commands which assume that all
21 # files in the result are under Mercurial's control
21 # files in the result are under Mercurial's control
22
22
23 entry = extensions.wrapcommand(commands.table, 'add',
23 entry = extensions.wrapcommand(commands.table, 'add',
24 overrides.overrideadd)
24 overrides.overrideadd)
25 addopt = [('', 'large', None, _('add as largefile')),
25 addopt = [('', 'large', None, _('add as largefile')),
26 ('', 'normal', None, _('add as normal file')),
26 ('', 'normal', None, _('add as normal file')),
27 ('', 'lfsize', '', _('add all files above this size '
27 ('', 'lfsize', '', _('add all files above this size '
28 '(in megabytes) as largefiles '
28 '(in megabytes) as largefiles '
29 '(default: 10)'))]
29 '(default: 10)'))]
30 entry[1].extend(addopt)
30 entry[1].extend(addopt)
31
31
32 # The scmutil function is called both by the (trivial) addremove command,
32 # The scmutil function is called both by the (trivial) addremove command,
33 # and in the process of handling commit -A (issue3542)
33 # and in the process of handling commit -A (issue3542)
34 entry = extensions.wrapfunction(scmutil, 'addremove',
34 entry = extensions.wrapfunction(scmutil, 'addremove',
35 overrides.scmutiladdremove)
35 overrides.scmutiladdremove)
36 extensions.wrapfunction(cmdutil, 'add', overrides.cmdutiladd)
36 extensions.wrapfunction(cmdutil, 'add', overrides.cmdutiladd)
37 extensions.wrapfunction(cmdutil, 'remove', overrides.cmdutilremove)
37 extensions.wrapfunction(cmdutil, 'remove', overrides.cmdutilremove)
38 extensions.wrapfunction(cmdutil, 'forget', overrides.cmdutilforget)
38 extensions.wrapfunction(cmdutil, 'forget', overrides.cmdutilforget)
39
39
40 extensions.wrapfunction(copies, 'pathcopies', overrides.copiespathcopies)
40 extensions.wrapfunction(copies, 'pathcopies', overrides.copiespathcopies)
41
41
42 # Subrepos call status function
42 # Subrepos call status function
43 entry = extensions.wrapcommand(commands.table, 'status',
43 entry = extensions.wrapcommand(commands.table, 'status',
44 overrides.overridestatus)
44 overrides.overridestatus)
45 entry = extensions.wrapfunction(subrepo.hgsubrepo, 'status',
45 entry = extensions.wrapfunction(subrepo.hgsubrepo, 'status',
46 overrides.overridestatusfn)
46 overrides.overridestatusfn)
47
47
48 entry = extensions.wrapcommand(commands.table, 'log',
48 entry = extensions.wrapcommand(commands.table, 'log',
49 overrides.overridelog)
49 overrides.overridelog)
50 entry = extensions.wrapcommand(commands.table, 'rollback',
50 entry = extensions.wrapcommand(commands.table, 'rollback',
51 overrides.overriderollback)
51 overrides.overriderollback)
52 entry = extensions.wrapcommand(commands.table, 'verify',
52 entry = extensions.wrapcommand(commands.table, 'verify',
53 overrides.overrideverify)
53 overrides.overrideverify)
54
54
55 verifyopt = [('', 'large', None,
55 verifyopt = [('', 'large', None,
56 _('verify that all largefiles in current revision exists')),
56 _('verify that all largefiles in current revision exists')),
57 ('', 'lfa', None,
57 ('', 'lfa', None,
58 _('verify largefiles in all revisions, not just current')),
58 _('verify largefiles in all revisions, not just current')),
59 ('', 'lfc', None,
59 ('', 'lfc', None,
60 _('verify local largefile contents, not just existence'))]
60 _('verify local largefile contents, not just existence'))]
61 entry[1].extend(verifyopt)
61 entry[1].extend(verifyopt)
62
62
63 entry = extensions.wrapcommand(commands.table, 'debugstate',
63 entry = extensions.wrapcommand(commands.table, 'debugstate',
64 overrides.overridedebugstate)
64 overrides.overridedebugstate)
65 debugstateopt = [('', 'large', None, _('display largefiles dirstate'))]
65 debugstateopt = [('', 'large', None, _('display largefiles dirstate'))]
66 entry[1].extend(debugstateopt)
66 entry[1].extend(debugstateopt)
67
67
68 outgoing = lambda orgfunc, *arg, **kwargs: orgfunc(*arg, **kwargs)
68 outgoing = lambda orgfunc, *arg, **kwargs: orgfunc(*arg, **kwargs)
69 entry = extensions.wrapcommand(commands.table, 'outgoing', outgoing)
69 entry = extensions.wrapcommand(commands.table, 'outgoing', outgoing)
70 outgoingopt = [('', 'large', None, _('display outgoing largefiles'))]
70 outgoingopt = [('', 'large', None, _('display outgoing largefiles'))]
71 entry[1].extend(outgoingopt)
71 entry[1].extend(outgoingopt)
72 cmdutil.outgoinghooks.add('largefiles', overrides.outgoinghook)
72 cmdutil.outgoinghooks.add('largefiles', overrides.outgoinghook)
73 entry = extensions.wrapcommand(commands.table, 'summary',
73 entry = extensions.wrapcommand(commands.table, 'summary',
74 overrides.overridesummary)
74 overrides.overridesummary)
75 summaryopt = [('', 'large', None, _('display outgoing largefiles'))]
75 summaryopt = [('', 'large', None, _('display outgoing largefiles'))]
76 entry[1].extend(summaryopt)
76 entry[1].extend(summaryopt)
77 cmdutil.summaryremotehooks.add('largefiles', overrides.summaryremotehook)
77 cmdutil.summaryremotehooks.add('largefiles', overrides.summaryremotehook)
78
78
79 entry = extensions.wrapcommand(commands.table, 'pull',
79 entry = extensions.wrapcommand(commands.table, 'pull',
80 overrides.overridepull)
80 overrides.overridepull)
81 pullopt = [('', 'all-largefiles', None,
81 pullopt = [('', 'all-largefiles', None,
82 _('download all pulled versions of largefiles (DEPRECATED)')),
82 _('download all pulled versions of largefiles (DEPRECATED)')),
83 ('', 'lfrev', [],
83 ('', 'lfrev', [],
84 _('download largefiles for these revisions'), _('REV'))]
84 _('download largefiles for these revisions'), _('REV'))]
85 entry[1].extend(pullopt)
85 entry[1].extend(pullopt)
86 revset.symbols['pulled'] = overrides.pulledrevsetsymbol
87
86
88 entry = extensions.wrapcommand(commands.table, 'clone',
87 entry = extensions.wrapcommand(commands.table, 'clone',
89 overrides.overrideclone)
88 overrides.overrideclone)
90 cloneopt = [('', 'all-largefiles', None,
89 cloneopt = [('', 'all-largefiles', None,
91 _('download all versions of all largefiles'))]
90 _('download all versions of all largefiles'))]
92 entry[1].extend(cloneopt)
91 entry[1].extend(cloneopt)
93 entry = extensions.wrapfunction(hg, 'clone', overrides.hgclone)
92 entry = extensions.wrapfunction(hg, 'clone', overrides.hgclone)
94
93
95 entry = extensions.wrapcommand(commands.table, 'cat',
94 entry = extensions.wrapcommand(commands.table, 'cat',
96 overrides.overridecat)
95 overrides.overridecat)
97 entry = extensions.wrapfunction(merge, '_checkunknownfile',
96 entry = extensions.wrapfunction(merge, '_checkunknownfile',
98 overrides.overridecheckunknownfile)
97 overrides.overridecheckunknownfile)
99 entry = extensions.wrapfunction(merge, 'calculateupdates',
98 entry = extensions.wrapfunction(merge, 'calculateupdates',
100 overrides.overridecalculateupdates)
99 overrides.overridecalculateupdates)
101 entry = extensions.wrapfunction(merge, 'recordupdates',
100 entry = extensions.wrapfunction(merge, 'recordupdates',
102 overrides.mergerecordupdates)
101 overrides.mergerecordupdates)
103 entry = extensions.wrapfunction(merge, 'update',
102 entry = extensions.wrapfunction(merge, 'update',
104 overrides.mergeupdate)
103 overrides.mergeupdate)
105 entry = extensions.wrapfunction(filemerge, '_filemerge',
104 entry = extensions.wrapfunction(filemerge, '_filemerge',
106 overrides.overridefilemerge)
105 overrides.overridefilemerge)
107 entry = extensions.wrapfunction(cmdutil, 'copy',
106 entry = extensions.wrapfunction(cmdutil, 'copy',
108 overrides.overridecopy)
107 overrides.overridecopy)
109
108
110 # Summary calls dirty on the subrepos
109 # Summary calls dirty on the subrepos
111 entry = extensions.wrapfunction(subrepo.hgsubrepo, 'dirty',
110 entry = extensions.wrapfunction(subrepo.hgsubrepo, 'dirty',
112 overrides.overridedirty)
111 overrides.overridedirty)
113
112
114 entry = extensions.wrapfunction(cmdutil, 'revert',
113 entry = extensions.wrapfunction(cmdutil, 'revert',
115 overrides.overriderevert)
114 overrides.overriderevert)
116
115
117 extensions.wrapcommand(commands.table, 'archive',
116 extensions.wrapcommand(commands.table, 'archive',
118 overrides.overridearchivecmd)
117 overrides.overridearchivecmd)
119 extensions.wrapfunction(archival, 'archive', overrides.overridearchive)
118 extensions.wrapfunction(archival, 'archive', overrides.overridearchive)
120 extensions.wrapfunction(subrepo.hgsubrepo, 'archive',
119 extensions.wrapfunction(subrepo.hgsubrepo, 'archive',
121 overrides.hgsubrepoarchive)
120 overrides.hgsubrepoarchive)
122 extensions.wrapfunction(webcommands, 'archive',
121 extensions.wrapfunction(webcommands, 'archive',
123 overrides.hgwebarchive)
122 overrides.hgwebarchive)
124 extensions.wrapfunction(cmdutil, 'bailifchanged',
123 extensions.wrapfunction(cmdutil, 'bailifchanged',
125 overrides.overridebailifchanged)
124 overrides.overridebailifchanged)
126
125
127 extensions.wrapfunction(scmutil, 'marktouched',
126 extensions.wrapfunction(scmutil, 'marktouched',
128 overrides.scmutilmarktouched)
127 overrides.scmutilmarktouched)
129
128
130 # create the new wireproto commands ...
129 # create the new wireproto commands ...
131 wireproto.commands['putlfile'] = (proto.putlfile, 'sha')
130 wireproto.commands['putlfile'] = (proto.putlfile, 'sha')
132 wireproto.commands['getlfile'] = (proto.getlfile, 'sha')
131 wireproto.commands['getlfile'] = (proto.getlfile, 'sha')
133 wireproto.commands['statlfile'] = (proto.statlfile, 'sha')
132 wireproto.commands['statlfile'] = (proto.statlfile, 'sha')
134
133
135 # ... and wrap some existing ones
134 # ... and wrap some existing ones
136 wireproto.commands['capabilities'] = (proto.capabilities, '')
135 wireproto.commands['capabilities'] = (proto.capabilities, '')
137 wireproto.commands['heads'] = (proto.heads, '')
136 wireproto.commands['heads'] = (proto.heads, '')
138 wireproto.commands['lheads'] = (wireproto.heads, '')
137 wireproto.commands['lheads'] = (wireproto.heads, '')
139
138
140 # make putlfile behave the same as push and {get,stat}lfile behave
139 # make putlfile behave the same as push and {get,stat}lfile behave
141 # the same as pull w.r.t. permissions checks
140 # the same as pull w.r.t. permissions checks
142 hgweb_mod.perms['putlfile'] = 'push'
141 hgweb_mod.perms['putlfile'] = 'push'
143 hgweb_mod.perms['getlfile'] = 'pull'
142 hgweb_mod.perms['getlfile'] = 'pull'
144 hgweb_mod.perms['statlfile'] = 'pull'
143 hgweb_mod.perms['statlfile'] = 'pull'
145
144
146 extensions.wrapfunction(webcommands, 'decodepath', overrides.decodepath)
145 extensions.wrapfunction(webcommands, 'decodepath', overrides.decodepath)
147
146
148 # the hello wireproto command uses wireproto.capabilities, so it won't see
147 # the hello wireproto command uses wireproto.capabilities, so it won't see
149 # our largefiles capability unless we replace the actual function as well.
148 # our largefiles capability unless we replace the actual function as well.
150 proto.capabilitiesorig = wireproto.capabilities
149 proto.capabilitiesorig = wireproto.capabilities
151 wireproto.capabilities = proto.capabilities
150 wireproto.capabilities = proto.capabilities
152
151
153 # can't do this in reposetup because it needs to have happened before
152 # can't do this in reposetup because it needs to have happened before
154 # wirerepo.__init__ is called
153 # wirerepo.__init__ is called
155 proto.ssholdcallstream = sshpeer.sshpeer._callstream
154 proto.ssholdcallstream = sshpeer.sshpeer._callstream
156 proto.httpoldcallstream = httppeer.httppeer._callstream
155 proto.httpoldcallstream = httppeer.httppeer._callstream
157 sshpeer.sshpeer._callstream = proto.sshrepocallstream
156 sshpeer.sshpeer._callstream = proto.sshrepocallstream
158 httppeer.httppeer._callstream = proto.httprepocallstream
157 httppeer.httppeer._callstream = proto.httprepocallstream
159
158
160 # override some extensions' stuff as well
159 # override some extensions' stuff as well
161 for name, module in extensions.extensions():
160 for name, module in extensions.extensions():
162 if name == 'purge':
161 if name == 'purge':
163 extensions.wrapcommand(getattr(module, 'cmdtable'), 'purge',
162 extensions.wrapcommand(getattr(module, 'cmdtable'), 'purge',
164 overrides.overridepurge)
163 overrides.overridepurge)
165 if name == 'rebase':
164 if name == 'rebase':
166 extensions.wrapcommand(getattr(module, 'cmdtable'), 'rebase',
165 extensions.wrapcommand(getattr(module, 'cmdtable'), 'rebase',
167 overrides.overriderebase)
166 overrides.overriderebase)
168 extensions.wrapfunction(module, 'rebase',
167 extensions.wrapfunction(module, 'rebase',
169 overrides.overriderebase)
168 overrides.overriderebase)
170 if name == 'transplant':
169 if name == 'transplant':
171 extensions.wrapcommand(getattr(module, 'cmdtable'), 'transplant',
170 extensions.wrapcommand(getattr(module, 'cmdtable'), 'transplant',
172 overrides.overridetransplant)
171 overrides.overridetransplant)
172
173 overrides.revsetpredicate.setup()
@@ -1,3607 +1,3609 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''manage a stack of patches
8 '''manage a stack of patches
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use :hg:`help command` for more details)::
17 Common tasks (use :hg:`help command` for more details)::
18
18
19 create new patch qnew
19 create new patch qnew
20 import existing patch qimport
20 import existing patch qimport
21
21
22 print patch series qseries
22 print patch series qseries
23 print applied patches qapplied
23 print applied patches qapplied
24
24
25 add known patch to applied stack qpush
25 add known patch to applied stack qpush
26 remove patch from applied stack qpop
26 remove patch from applied stack qpop
27 refresh contents of top applied patch qrefresh
27 refresh contents of top applied patch qrefresh
28
28
29 By default, mq will automatically use git patches when required to
29 By default, mq will automatically use git patches when required to
30 avoid losing file mode changes, copy records, binary files or empty
30 avoid losing file mode changes, copy records, binary files or empty
31 files creations or deletions. This behavior can be configured with::
31 files creations or deletions. This behavior can be configured with::
32
32
33 [mq]
33 [mq]
34 git = auto/keep/yes/no
34 git = auto/keep/yes/no
35
35
36 If set to 'keep', mq will obey the [diff] section configuration while
36 If set to 'keep', mq will obey the [diff] section configuration while
37 preserving existing git patches upon qrefresh. If set to 'yes' or
37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 'no', mq will override the [diff] section and always generate git or
38 'no', mq will override the [diff] section and always generate git or
39 regular patches, possibly losing data in the second case.
39 regular patches, possibly losing data in the second case.
40
40
41 It may be desirable for mq changesets to be kept in the secret phase (see
41 It may be desirable for mq changesets to be kept in the secret phase (see
42 :hg:`help phases`), which can be enabled with the following setting::
42 :hg:`help phases`), which can be enabled with the following setting::
43
43
44 [mq]
44 [mq]
45 secret = True
45 secret = True
46
46
47 You will by default be managing a patch queue named "patches". You can
47 You will by default be managing a patch queue named "patches". You can
48 create other, independent patch queues with the :hg:`qqueue` command.
48 create other, independent patch queues with the :hg:`qqueue` command.
49
49
50 If the working directory contains uncommitted files, qpush, qpop and
50 If the working directory contains uncommitted files, qpush, qpop and
51 qgoto abort immediately. If -f/--force is used, the changes are
51 qgoto abort immediately. If -f/--force is used, the changes are
52 discarded. Setting::
52 discarded. Setting::
53
53
54 [mq]
54 [mq]
55 keepchanges = True
55 keepchanges = True
56
56
57 make them behave as if --keep-changes were passed, and non-conflicting
57 make them behave as if --keep-changes were passed, and non-conflicting
58 local changes will be tolerated and preserved. If incompatible options
58 local changes will be tolerated and preserved. If incompatible options
59 such as -f/--force or --exact are passed, this setting is ignored.
59 such as -f/--force or --exact are passed, this setting is ignored.
60
60
61 This extension used to provide a strip command. This command now lives
61 This extension used to provide a strip command. This command now lives
62 in the strip extension.
62 in the strip extension.
63 '''
63 '''
64
64
65 from mercurial.i18n import _
65 from mercurial.i18n import _
66 from mercurial.node import bin, hex, short, nullid, nullrev
66 from mercurial.node import bin, hex, short, nullid, nullrev
67 from mercurial.lock import release
67 from mercurial.lock import release
68 from mercurial import commands, cmdutil, hg, scmutil, util, revset
68 from mercurial import commands, cmdutil, hg, scmutil, util, revset
69 from mercurial import extensions, error, phases
69 from mercurial import extensions, error, phases
70 from mercurial import patch as patchmod
70 from mercurial import patch as patchmod
71 from mercurial import lock as lockmod
71 from mercurial import lock as lockmod
72 from mercurial import localrepo
72 from mercurial import localrepo
73 from mercurial import subrepo
73 from mercurial import subrepo
74 import os, re, errno, shutil
74 import os, re, errno, shutil
75
75
76 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
76 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
77
77
78 cmdtable = {}
78 cmdtable = {}
79 command = cmdutil.command(cmdtable)
79 command = cmdutil.command(cmdtable)
80 # Note for extension authors: ONLY specify testedwith = 'internal' for
80 # Note for extension authors: ONLY specify testedwith = 'internal' for
81 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
81 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
82 # be specifying the version(s) of Mercurial they are tested with, or
82 # be specifying the version(s) of Mercurial they are tested with, or
83 # leave the attribute unspecified.
83 # leave the attribute unspecified.
84 testedwith = 'internal'
84 testedwith = 'internal'
85
85
86 # force load strip extension formerly included in mq and import some utility
86 # force load strip extension formerly included in mq and import some utility
87 try:
87 try:
88 stripext = extensions.find('strip')
88 stripext = extensions.find('strip')
89 except KeyError:
89 except KeyError:
90 # note: load is lazy so we could avoid the try-except,
90 # note: load is lazy so we could avoid the try-except,
91 # but I (marmoute) prefer this explicit code.
91 # but I (marmoute) prefer this explicit code.
92 class dummyui(object):
92 class dummyui(object):
93 def debug(self, msg):
93 def debug(self, msg):
94 pass
94 pass
95 stripext = extensions.load(dummyui(), 'strip', '')
95 stripext = extensions.load(dummyui(), 'strip', '')
96
96
97 strip = stripext.strip
97 strip = stripext.strip
98 checksubstate = stripext.checksubstate
98 checksubstate = stripext.checksubstate
99 checklocalchanges = stripext.checklocalchanges
99 checklocalchanges = stripext.checklocalchanges
100
100
101
101
102 # Patch names looks like unix-file names.
102 # Patch names looks like unix-file names.
103 # They must be joinable with queue directory and result in the patch path.
103 # They must be joinable with queue directory and result in the patch path.
104 normname = util.normpath
104 normname = util.normpath
105
105
106 class statusentry(object):
106 class statusentry(object):
107 def __init__(self, node, name):
107 def __init__(self, node, name):
108 self.node, self.name = node, name
108 self.node, self.name = node, name
109 def __repr__(self):
109 def __repr__(self):
110 return hex(self.node) + ':' + self.name
110 return hex(self.node) + ':' + self.name
111
111
112 # The order of the headers in 'hg export' HG patches:
112 # The order of the headers in 'hg export' HG patches:
113 HGHEADERS = [
113 HGHEADERS = [
114 # '# HG changeset patch',
114 # '# HG changeset patch',
115 '# User ',
115 '# User ',
116 '# Date ',
116 '# Date ',
117 '# ',
117 '# ',
118 '# Branch ',
118 '# Branch ',
119 '# Node ID ',
119 '# Node ID ',
120 '# Parent ', # can occur twice for merges - but that is not relevant for mq
120 '# Parent ', # can occur twice for merges - but that is not relevant for mq
121 ]
121 ]
122 # The order of headers in plain 'mail style' patches:
122 # The order of headers in plain 'mail style' patches:
123 PLAINHEADERS = {
123 PLAINHEADERS = {
124 'from': 0,
124 'from': 0,
125 'date': 1,
125 'date': 1,
126 'subject': 2,
126 'subject': 2,
127 }
127 }
128
128
129 def inserthgheader(lines, header, value):
129 def inserthgheader(lines, header, value):
130 """Assuming lines contains a HG patch header, add a header line with value.
130 """Assuming lines contains a HG patch header, add a header line with value.
131 >>> try: inserthgheader([], '# Date ', 'z')
131 >>> try: inserthgheader([], '# Date ', 'z')
132 ... except ValueError, inst: print "oops"
132 ... except ValueError, inst: print "oops"
133 oops
133 oops
134 >>> inserthgheader(['# HG changeset patch'], '# Date ', 'z')
134 >>> inserthgheader(['# HG changeset patch'], '# Date ', 'z')
135 ['# HG changeset patch', '# Date z']
135 ['# HG changeset patch', '# Date z']
136 >>> inserthgheader(['# HG changeset patch', ''], '# Date ', 'z')
136 >>> inserthgheader(['# HG changeset patch', ''], '# Date ', 'z')
137 ['# HG changeset patch', '# Date z', '']
137 ['# HG changeset patch', '# Date z', '']
138 >>> inserthgheader(['# HG changeset patch', '# User y'], '# Date ', 'z')
138 >>> inserthgheader(['# HG changeset patch', '# User y'], '# Date ', 'z')
139 ['# HG changeset patch', '# User y', '# Date z']
139 ['# HG changeset patch', '# User y', '# Date z']
140 >>> inserthgheader(['# HG changeset patch', '# Date x', '# User y'],
140 >>> inserthgheader(['# HG changeset patch', '# Date x', '# User y'],
141 ... '# User ', 'z')
141 ... '# User ', 'z')
142 ['# HG changeset patch', '# Date x', '# User z']
142 ['# HG changeset patch', '# Date x', '# User z']
143 >>> inserthgheader(['# HG changeset patch', '# Date y'], '# Date ', 'z')
143 >>> inserthgheader(['# HG changeset patch', '# Date y'], '# Date ', 'z')
144 ['# HG changeset patch', '# Date z']
144 ['# HG changeset patch', '# Date z']
145 >>> inserthgheader(['# HG changeset patch', '', '# Date y'], '# Date ', 'z')
145 >>> inserthgheader(['# HG changeset patch', '', '# Date y'], '# Date ', 'z')
146 ['# HG changeset patch', '# Date z', '', '# Date y']
146 ['# HG changeset patch', '# Date z', '', '# Date y']
147 >>> inserthgheader(['# HG changeset patch', '# Parent y'], '# Date ', 'z')
147 >>> inserthgheader(['# HG changeset patch', '# Parent y'], '# Date ', 'z')
148 ['# HG changeset patch', '# Date z', '# Parent y']
148 ['# HG changeset patch', '# Date z', '# Parent y']
149 """
149 """
150 start = lines.index('# HG changeset patch') + 1
150 start = lines.index('# HG changeset patch') + 1
151 newindex = HGHEADERS.index(header)
151 newindex = HGHEADERS.index(header)
152 bestpos = len(lines)
152 bestpos = len(lines)
153 for i in range(start, len(lines)):
153 for i in range(start, len(lines)):
154 line = lines[i]
154 line = lines[i]
155 if not line.startswith('# '):
155 if not line.startswith('# '):
156 bestpos = min(bestpos, i)
156 bestpos = min(bestpos, i)
157 break
157 break
158 for lineindex, h in enumerate(HGHEADERS):
158 for lineindex, h in enumerate(HGHEADERS):
159 if line.startswith(h):
159 if line.startswith(h):
160 if lineindex == newindex:
160 if lineindex == newindex:
161 lines[i] = header + value
161 lines[i] = header + value
162 return lines
162 return lines
163 if lineindex > newindex:
163 if lineindex > newindex:
164 bestpos = min(bestpos, i)
164 bestpos = min(bestpos, i)
165 break # next line
165 break # next line
166 lines.insert(bestpos, header + value)
166 lines.insert(bestpos, header + value)
167 return lines
167 return lines
168
168
169 def insertplainheader(lines, header, value):
169 def insertplainheader(lines, header, value):
170 """For lines containing a plain patch header, add a header line with value.
170 """For lines containing a plain patch header, add a header line with value.
171 >>> insertplainheader([], 'Date', 'z')
171 >>> insertplainheader([], 'Date', 'z')
172 ['Date: z']
172 ['Date: z']
173 >>> insertplainheader([''], 'Date', 'z')
173 >>> insertplainheader([''], 'Date', 'z')
174 ['Date: z', '']
174 ['Date: z', '']
175 >>> insertplainheader(['x'], 'Date', 'z')
175 >>> insertplainheader(['x'], 'Date', 'z')
176 ['Date: z', '', 'x']
176 ['Date: z', '', 'x']
177 >>> insertplainheader(['From: y', 'x'], 'Date', 'z')
177 >>> insertplainheader(['From: y', 'x'], 'Date', 'z')
178 ['From: y', 'Date: z', '', 'x']
178 ['From: y', 'Date: z', '', 'x']
179 >>> insertplainheader([' date : x', ' from : y', ''], 'From', 'z')
179 >>> insertplainheader([' date : x', ' from : y', ''], 'From', 'z')
180 [' date : x', 'From: z', '']
180 [' date : x', 'From: z', '']
181 >>> insertplainheader(['', 'Date: y'], 'Date', 'z')
181 >>> insertplainheader(['', 'Date: y'], 'Date', 'z')
182 ['Date: z', '', 'Date: y']
182 ['Date: z', '', 'Date: y']
183 >>> insertplainheader(['foo: bar', 'DATE: z', 'x'], 'From', 'y')
183 >>> insertplainheader(['foo: bar', 'DATE: z', 'x'], 'From', 'y')
184 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
184 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
185 """
185 """
186 newprio = PLAINHEADERS[header.lower()]
186 newprio = PLAINHEADERS[header.lower()]
187 bestpos = len(lines)
187 bestpos = len(lines)
188 for i, line in enumerate(lines):
188 for i, line in enumerate(lines):
189 if ':' in line:
189 if ':' in line:
190 lheader = line.split(':', 1)[0].strip().lower()
190 lheader = line.split(':', 1)[0].strip().lower()
191 lprio = PLAINHEADERS.get(lheader, newprio + 1)
191 lprio = PLAINHEADERS.get(lheader, newprio + 1)
192 if lprio == newprio:
192 if lprio == newprio:
193 lines[i] = '%s: %s' % (header, value)
193 lines[i] = '%s: %s' % (header, value)
194 return lines
194 return lines
195 if lprio > newprio and i < bestpos:
195 if lprio > newprio and i < bestpos:
196 bestpos = i
196 bestpos = i
197 else:
197 else:
198 if line:
198 if line:
199 lines.insert(i, '')
199 lines.insert(i, '')
200 if i < bestpos:
200 if i < bestpos:
201 bestpos = i
201 bestpos = i
202 break
202 break
203 lines.insert(bestpos, '%s: %s' % (header, value))
203 lines.insert(bestpos, '%s: %s' % (header, value))
204 return lines
204 return lines
205
205
206 class patchheader(object):
206 class patchheader(object):
207 def __init__(self, pf, plainmode=False):
207 def __init__(self, pf, plainmode=False):
208 def eatdiff(lines):
208 def eatdiff(lines):
209 while lines:
209 while lines:
210 l = lines[-1]
210 l = lines[-1]
211 if (l.startswith("diff -") or
211 if (l.startswith("diff -") or
212 l.startswith("Index:") or
212 l.startswith("Index:") or
213 l.startswith("===========")):
213 l.startswith("===========")):
214 del lines[-1]
214 del lines[-1]
215 else:
215 else:
216 break
216 break
217 def eatempty(lines):
217 def eatempty(lines):
218 while lines:
218 while lines:
219 if not lines[-1].strip():
219 if not lines[-1].strip():
220 del lines[-1]
220 del lines[-1]
221 else:
221 else:
222 break
222 break
223
223
224 message = []
224 message = []
225 comments = []
225 comments = []
226 user = None
226 user = None
227 date = None
227 date = None
228 parent = None
228 parent = None
229 format = None
229 format = None
230 subject = None
230 subject = None
231 branch = None
231 branch = None
232 nodeid = None
232 nodeid = None
233 diffstart = 0
233 diffstart = 0
234
234
235 for line in file(pf):
235 for line in file(pf):
236 line = line.rstrip()
236 line = line.rstrip()
237 if (line.startswith('diff --git')
237 if (line.startswith('diff --git')
238 or (diffstart and line.startswith('+++ '))):
238 or (diffstart and line.startswith('+++ '))):
239 diffstart = 2
239 diffstart = 2
240 break
240 break
241 diffstart = 0 # reset
241 diffstart = 0 # reset
242 if line.startswith("--- "):
242 if line.startswith("--- "):
243 diffstart = 1
243 diffstart = 1
244 continue
244 continue
245 elif format == "hgpatch":
245 elif format == "hgpatch":
246 # parse values when importing the result of an hg export
246 # parse values when importing the result of an hg export
247 if line.startswith("# User "):
247 if line.startswith("# User "):
248 user = line[7:]
248 user = line[7:]
249 elif line.startswith("# Date "):
249 elif line.startswith("# Date "):
250 date = line[7:]
250 date = line[7:]
251 elif line.startswith("# Parent "):
251 elif line.startswith("# Parent "):
252 parent = line[9:].lstrip() # handle double trailing space
252 parent = line[9:].lstrip() # handle double trailing space
253 elif line.startswith("# Branch "):
253 elif line.startswith("# Branch "):
254 branch = line[9:]
254 branch = line[9:]
255 elif line.startswith("# Node ID "):
255 elif line.startswith("# Node ID "):
256 nodeid = line[10:]
256 nodeid = line[10:]
257 elif not line.startswith("# ") and line:
257 elif not line.startswith("# ") and line:
258 message.append(line)
258 message.append(line)
259 format = None
259 format = None
260 elif line == '# HG changeset patch':
260 elif line == '# HG changeset patch':
261 message = []
261 message = []
262 format = "hgpatch"
262 format = "hgpatch"
263 elif (format != "tagdone" and (line.startswith("Subject: ") or
263 elif (format != "tagdone" and (line.startswith("Subject: ") or
264 line.startswith("subject: "))):
264 line.startswith("subject: "))):
265 subject = line[9:]
265 subject = line[9:]
266 format = "tag"
266 format = "tag"
267 elif (format != "tagdone" and (line.startswith("From: ") or
267 elif (format != "tagdone" and (line.startswith("From: ") or
268 line.startswith("from: "))):
268 line.startswith("from: "))):
269 user = line[6:]
269 user = line[6:]
270 format = "tag"
270 format = "tag"
271 elif (format != "tagdone" and (line.startswith("Date: ") or
271 elif (format != "tagdone" and (line.startswith("Date: ") or
272 line.startswith("date: "))):
272 line.startswith("date: "))):
273 date = line[6:]
273 date = line[6:]
274 format = "tag"
274 format = "tag"
275 elif format == "tag" and line == "":
275 elif format == "tag" and line == "":
276 # when looking for tags (subject: from: etc) they
276 # when looking for tags (subject: from: etc) they
277 # end once you find a blank line in the source
277 # end once you find a blank line in the source
278 format = "tagdone"
278 format = "tagdone"
279 elif message or line:
279 elif message or line:
280 message.append(line)
280 message.append(line)
281 comments.append(line)
281 comments.append(line)
282
282
283 eatdiff(message)
283 eatdiff(message)
284 eatdiff(comments)
284 eatdiff(comments)
285 # Remember the exact starting line of the patch diffs before consuming
285 # Remember the exact starting line of the patch diffs before consuming
286 # empty lines, for external use by TortoiseHg and others
286 # empty lines, for external use by TortoiseHg and others
287 self.diffstartline = len(comments)
287 self.diffstartline = len(comments)
288 eatempty(message)
288 eatempty(message)
289 eatempty(comments)
289 eatempty(comments)
290
290
291 # make sure message isn't empty
291 # make sure message isn't empty
292 if format and format.startswith("tag") and subject:
292 if format and format.startswith("tag") and subject:
293 message.insert(0, subject)
293 message.insert(0, subject)
294
294
295 self.message = message
295 self.message = message
296 self.comments = comments
296 self.comments = comments
297 self.user = user
297 self.user = user
298 self.date = date
298 self.date = date
299 self.parent = parent
299 self.parent = parent
300 # nodeid and branch are for external use by TortoiseHg and others
300 # nodeid and branch are for external use by TortoiseHg and others
301 self.nodeid = nodeid
301 self.nodeid = nodeid
302 self.branch = branch
302 self.branch = branch
303 self.haspatch = diffstart > 1
303 self.haspatch = diffstart > 1
304 self.plainmode = (plainmode or
304 self.plainmode = (plainmode or
305 '# HG changeset patch' not in self.comments and
305 '# HG changeset patch' not in self.comments and
306 any(c.startswith('Date: ') or
306 any(c.startswith('Date: ') or
307 c.startswith('From: ')
307 c.startswith('From: ')
308 for c in self.comments))
308 for c in self.comments))
309
309
310 def setuser(self, user):
310 def setuser(self, user):
311 try:
311 try:
312 inserthgheader(self.comments, '# User ', user)
312 inserthgheader(self.comments, '# User ', user)
313 except ValueError:
313 except ValueError:
314 if self.plainmode:
314 if self.plainmode:
315 insertplainheader(self.comments, 'From', user)
315 insertplainheader(self.comments, 'From', user)
316 else:
316 else:
317 tmp = ['# HG changeset patch', '# User ' + user]
317 tmp = ['# HG changeset patch', '# User ' + user]
318 self.comments = tmp + self.comments
318 self.comments = tmp + self.comments
319 self.user = user
319 self.user = user
320
320
321 def setdate(self, date):
321 def setdate(self, date):
322 try:
322 try:
323 inserthgheader(self.comments, '# Date ', date)
323 inserthgheader(self.comments, '# Date ', date)
324 except ValueError:
324 except ValueError:
325 if self.plainmode:
325 if self.plainmode:
326 insertplainheader(self.comments, 'Date', date)
326 insertplainheader(self.comments, 'Date', date)
327 else:
327 else:
328 tmp = ['# HG changeset patch', '# Date ' + date]
328 tmp = ['# HG changeset patch', '# Date ' + date]
329 self.comments = tmp + self.comments
329 self.comments = tmp + self.comments
330 self.date = date
330 self.date = date
331
331
332 def setparent(self, parent):
332 def setparent(self, parent):
333 try:
333 try:
334 inserthgheader(self.comments, '# Parent ', parent)
334 inserthgheader(self.comments, '# Parent ', parent)
335 except ValueError:
335 except ValueError:
336 if not self.plainmode:
336 if not self.plainmode:
337 tmp = ['# HG changeset patch', '# Parent ' + parent]
337 tmp = ['# HG changeset patch', '# Parent ' + parent]
338 self.comments = tmp + self.comments
338 self.comments = tmp + self.comments
339 self.parent = parent
339 self.parent = parent
340
340
341 def setmessage(self, message):
341 def setmessage(self, message):
342 if self.comments:
342 if self.comments:
343 self._delmsg()
343 self._delmsg()
344 self.message = [message]
344 self.message = [message]
345 if message:
345 if message:
346 if self.plainmode and self.comments and self.comments[-1]:
346 if self.plainmode and self.comments and self.comments[-1]:
347 self.comments.append('')
347 self.comments.append('')
348 self.comments.append(message)
348 self.comments.append(message)
349
349
350 def __str__(self):
350 def __str__(self):
351 s = '\n'.join(self.comments).rstrip()
351 s = '\n'.join(self.comments).rstrip()
352 if not s:
352 if not s:
353 return ''
353 return ''
354 return s + '\n\n'
354 return s + '\n\n'
355
355
356 def _delmsg(self):
356 def _delmsg(self):
357 '''Remove existing message, keeping the rest of the comments fields.
357 '''Remove existing message, keeping the rest of the comments fields.
358 If comments contains 'subject: ', message will prepend
358 If comments contains 'subject: ', message will prepend
359 the field and a blank line.'''
359 the field and a blank line.'''
360 if self.message:
360 if self.message:
361 subj = 'subject: ' + self.message[0].lower()
361 subj = 'subject: ' + self.message[0].lower()
362 for i in xrange(len(self.comments)):
362 for i in xrange(len(self.comments)):
363 if subj == self.comments[i].lower():
363 if subj == self.comments[i].lower():
364 del self.comments[i]
364 del self.comments[i]
365 self.message = self.message[2:]
365 self.message = self.message[2:]
366 break
366 break
367 ci = 0
367 ci = 0
368 for mi in self.message:
368 for mi in self.message:
369 while mi != self.comments[ci]:
369 while mi != self.comments[ci]:
370 ci += 1
370 ci += 1
371 del self.comments[ci]
371 del self.comments[ci]
372
372
373 def newcommit(repo, phase, *args, **kwargs):
373 def newcommit(repo, phase, *args, **kwargs):
374 """helper dedicated to ensure a commit respect mq.secret setting
374 """helper dedicated to ensure a commit respect mq.secret setting
375
375
376 It should be used instead of repo.commit inside the mq source for operation
376 It should be used instead of repo.commit inside the mq source for operation
377 creating new changeset.
377 creating new changeset.
378 """
378 """
379 repo = repo.unfiltered()
379 repo = repo.unfiltered()
380 if phase is None:
380 if phase is None:
381 if repo.ui.configbool('mq', 'secret', False):
381 if repo.ui.configbool('mq', 'secret', False):
382 phase = phases.secret
382 phase = phases.secret
383 if phase is not None:
383 if phase is not None:
384 phasebackup = repo.ui.backupconfig('phases', 'new-commit')
384 phasebackup = repo.ui.backupconfig('phases', 'new-commit')
385 allowemptybackup = repo.ui.backupconfig('ui', 'allowemptycommit')
385 allowemptybackup = repo.ui.backupconfig('ui', 'allowemptycommit')
386 try:
386 try:
387 if phase is not None:
387 if phase is not None:
388 repo.ui.setconfig('phases', 'new-commit', phase, 'mq')
388 repo.ui.setconfig('phases', 'new-commit', phase, 'mq')
389 repo.ui.setconfig('ui', 'allowemptycommit', True)
389 repo.ui.setconfig('ui', 'allowemptycommit', True)
390 return repo.commit(*args, **kwargs)
390 return repo.commit(*args, **kwargs)
391 finally:
391 finally:
392 repo.ui.restoreconfig(allowemptybackup)
392 repo.ui.restoreconfig(allowemptybackup)
393 if phase is not None:
393 if phase is not None:
394 repo.ui.restoreconfig(phasebackup)
394 repo.ui.restoreconfig(phasebackup)
395
395
396 class AbortNoCleanup(error.Abort):
396 class AbortNoCleanup(error.Abort):
397 pass
397 pass
398
398
399 def makepatchname(existing, title, fallbackname):
399 def makepatchname(existing, title, fallbackname):
400 """Return a suitable filename for title, adding a suffix to make
400 """Return a suitable filename for title, adding a suffix to make
401 it unique in the existing list"""
401 it unique in the existing list"""
402 namebase = re.sub('[\s\W_]+', '_', title.lower()).strip('_')
402 namebase = re.sub('[\s\W_]+', '_', title.lower()).strip('_')
403 if not namebase:
403 if not namebase:
404 namebase = fallbackname
404 namebase = fallbackname
405 name = namebase
405 name = namebase
406 i = 0
406 i = 0
407 while name in existing:
407 while name in existing:
408 i += 1
408 i += 1
409 name = '%s__%s' % (namebase, i)
409 name = '%s__%s' % (namebase, i)
410 return name
410 return name
411
411
412 class queue(object):
412 class queue(object):
413 def __init__(self, ui, baseui, path, patchdir=None):
413 def __init__(self, ui, baseui, path, patchdir=None):
414 self.basepath = path
414 self.basepath = path
415 try:
415 try:
416 fh = open(os.path.join(path, 'patches.queue'))
416 fh = open(os.path.join(path, 'patches.queue'))
417 cur = fh.read().rstrip()
417 cur = fh.read().rstrip()
418 fh.close()
418 fh.close()
419 if not cur:
419 if not cur:
420 curpath = os.path.join(path, 'patches')
420 curpath = os.path.join(path, 'patches')
421 else:
421 else:
422 curpath = os.path.join(path, 'patches-' + cur)
422 curpath = os.path.join(path, 'patches-' + cur)
423 except IOError:
423 except IOError:
424 curpath = os.path.join(path, 'patches')
424 curpath = os.path.join(path, 'patches')
425 self.path = patchdir or curpath
425 self.path = patchdir or curpath
426 self.opener = scmutil.opener(self.path)
426 self.opener = scmutil.opener(self.path)
427 self.ui = ui
427 self.ui = ui
428 self.baseui = baseui
428 self.baseui = baseui
429 self.applieddirty = False
429 self.applieddirty = False
430 self.seriesdirty = False
430 self.seriesdirty = False
431 self.added = []
431 self.added = []
432 self.seriespath = "series"
432 self.seriespath = "series"
433 self.statuspath = "status"
433 self.statuspath = "status"
434 self.guardspath = "guards"
434 self.guardspath = "guards"
435 self.activeguards = None
435 self.activeguards = None
436 self.guardsdirty = False
436 self.guardsdirty = False
437 # Handle mq.git as a bool with extended values
437 # Handle mq.git as a bool with extended values
438 try:
438 try:
439 gitmode = ui.configbool('mq', 'git', None)
439 gitmode = ui.configbool('mq', 'git', None)
440 if gitmode is None:
440 if gitmode is None:
441 raise error.ConfigError
441 raise error.ConfigError
442 if gitmode:
442 if gitmode:
443 self.gitmode = 'yes'
443 self.gitmode = 'yes'
444 else:
444 else:
445 self.gitmode = 'no'
445 self.gitmode = 'no'
446 except error.ConfigError:
446 except error.ConfigError:
447 # let's have check-config ignore the type mismatch
447 # let's have check-config ignore the type mismatch
448 self.gitmode = ui.config(r'mq', 'git', 'auto').lower()
448 self.gitmode = ui.config(r'mq', 'git', 'auto').lower()
449 # deprecated config: mq.plain
449 # deprecated config: mq.plain
450 self.plainmode = ui.configbool('mq', 'plain', False)
450 self.plainmode = ui.configbool('mq', 'plain', False)
451 self.checkapplied = True
451 self.checkapplied = True
452
452
453 @util.propertycache
453 @util.propertycache
454 def applied(self):
454 def applied(self):
455 def parselines(lines):
455 def parselines(lines):
456 for l in lines:
456 for l in lines:
457 entry = l.split(':', 1)
457 entry = l.split(':', 1)
458 if len(entry) > 1:
458 if len(entry) > 1:
459 n, name = entry
459 n, name = entry
460 yield statusentry(bin(n), name)
460 yield statusentry(bin(n), name)
461 elif l.strip():
461 elif l.strip():
462 self.ui.warn(_('malformated mq status line: %s\n') % entry)
462 self.ui.warn(_('malformated mq status line: %s\n') % entry)
463 # else we ignore empty lines
463 # else we ignore empty lines
464 try:
464 try:
465 lines = self.opener.read(self.statuspath).splitlines()
465 lines = self.opener.read(self.statuspath).splitlines()
466 return list(parselines(lines))
466 return list(parselines(lines))
467 except IOError as e:
467 except IOError as e:
468 if e.errno == errno.ENOENT:
468 if e.errno == errno.ENOENT:
469 return []
469 return []
470 raise
470 raise
471
471
472 @util.propertycache
472 @util.propertycache
473 def fullseries(self):
473 def fullseries(self):
474 try:
474 try:
475 return self.opener.read(self.seriespath).splitlines()
475 return self.opener.read(self.seriespath).splitlines()
476 except IOError as e:
476 except IOError as e:
477 if e.errno == errno.ENOENT:
477 if e.errno == errno.ENOENT:
478 return []
478 return []
479 raise
479 raise
480
480
481 @util.propertycache
481 @util.propertycache
482 def series(self):
482 def series(self):
483 self.parseseries()
483 self.parseseries()
484 return self.series
484 return self.series
485
485
486 @util.propertycache
486 @util.propertycache
487 def seriesguards(self):
487 def seriesguards(self):
488 self.parseseries()
488 self.parseseries()
489 return self.seriesguards
489 return self.seriesguards
490
490
491 def invalidate(self):
491 def invalidate(self):
492 for a in 'applied fullseries series seriesguards'.split():
492 for a in 'applied fullseries series seriesguards'.split():
493 if a in self.__dict__:
493 if a in self.__dict__:
494 delattr(self, a)
494 delattr(self, a)
495 self.applieddirty = False
495 self.applieddirty = False
496 self.seriesdirty = False
496 self.seriesdirty = False
497 self.guardsdirty = False
497 self.guardsdirty = False
498 self.activeguards = None
498 self.activeguards = None
499
499
500 def diffopts(self, opts=None, patchfn=None):
500 def diffopts(self, opts=None, patchfn=None):
501 diffopts = patchmod.diffopts(self.ui, opts)
501 diffopts = patchmod.diffopts(self.ui, opts)
502 if self.gitmode == 'auto':
502 if self.gitmode == 'auto':
503 diffopts.upgrade = True
503 diffopts.upgrade = True
504 elif self.gitmode == 'keep':
504 elif self.gitmode == 'keep':
505 pass
505 pass
506 elif self.gitmode in ('yes', 'no'):
506 elif self.gitmode in ('yes', 'no'):
507 diffopts.git = self.gitmode == 'yes'
507 diffopts.git = self.gitmode == 'yes'
508 else:
508 else:
509 raise error.Abort(_('mq.git option can be auto/keep/yes/no'
509 raise error.Abort(_('mq.git option can be auto/keep/yes/no'
510 ' got %s') % self.gitmode)
510 ' got %s') % self.gitmode)
511 if patchfn:
511 if patchfn:
512 diffopts = self.patchopts(diffopts, patchfn)
512 diffopts = self.patchopts(diffopts, patchfn)
513 return diffopts
513 return diffopts
514
514
515 def patchopts(self, diffopts, *patches):
515 def patchopts(self, diffopts, *patches):
516 """Return a copy of input diff options with git set to true if
516 """Return a copy of input diff options with git set to true if
517 referenced patch is a git patch and should be preserved as such.
517 referenced patch is a git patch and should be preserved as such.
518 """
518 """
519 diffopts = diffopts.copy()
519 diffopts = diffopts.copy()
520 if not diffopts.git and self.gitmode == 'keep':
520 if not diffopts.git and self.gitmode == 'keep':
521 for patchfn in patches:
521 for patchfn in patches:
522 patchf = self.opener(patchfn, 'r')
522 patchf = self.opener(patchfn, 'r')
523 # if the patch was a git patch, refresh it as a git patch
523 # if the patch was a git patch, refresh it as a git patch
524 for line in patchf:
524 for line in patchf:
525 if line.startswith('diff --git'):
525 if line.startswith('diff --git'):
526 diffopts.git = True
526 diffopts.git = True
527 break
527 break
528 patchf.close()
528 patchf.close()
529 return diffopts
529 return diffopts
530
530
531 def join(self, *p):
531 def join(self, *p):
532 return os.path.join(self.path, *p)
532 return os.path.join(self.path, *p)
533
533
534 def findseries(self, patch):
534 def findseries(self, patch):
535 def matchpatch(l):
535 def matchpatch(l):
536 l = l.split('#', 1)[0]
536 l = l.split('#', 1)[0]
537 return l.strip() == patch
537 return l.strip() == patch
538 for index, l in enumerate(self.fullseries):
538 for index, l in enumerate(self.fullseries):
539 if matchpatch(l):
539 if matchpatch(l):
540 return index
540 return index
541 return None
541 return None
542
542
543 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
543 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
544
544
545 def parseseries(self):
545 def parseseries(self):
546 self.series = []
546 self.series = []
547 self.seriesguards = []
547 self.seriesguards = []
548 for l in self.fullseries:
548 for l in self.fullseries:
549 h = l.find('#')
549 h = l.find('#')
550 if h == -1:
550 if h == -1:
551 patch = l
551 patch = l
552 comment = ''
552 comment = ''
553 elif h == 0:
553 elif h == 0:
554 continue
554 continue
555 else:
555 else:
556 patch = l[:h]
556 patch = l[:h]
557 comment = l[h:]
557 comment = l[h:]
558 patch = patch.strip()
558 patch = patch.strip()
559 if patch:
559 if patch:
560 if patch in self.series:
560 if patch in self.series:
561 raise error.Abort(_('%s appears more than once in %s') %
561 raise error.Abort(_('%s appears more than once in %s') %
562 (patch, self.join(self.seriespath)))
562 (patch, self.join(self.seriespath)))
563 self.series.append(patch)
563 self.series.append(patch)
564 self.seriesguards.append(self.guard_re.findall(comment))
564 self.seriesguards.append(self.guard_re.findall(comment))
565
565
566 def checkguard(self, guard):
566 def checkguard(self, guard):
567 if not guard:
567 if not guard:
568 return _('guard cannot be an empty string')
568 return _('guard cannot be an empty string')
569 bad_chars = '# \t\r\n\f'
569 bad_chars = '# \t\r\n\f'
570 first = guard[0]
570 first = guard[0]
571 if first in '-+':
571 if first in '-+':
572 return (_('guard %r starts with invalid character: %r') %
572 return (_('guard %r starts with invalid character: %r') %
573 (guard, first))
573 (guard, first))
574 for c in bad_chars:
574 for c in bad_chars:
575 if c in guard:
575 if c in guard:
576 return _('invalid character in guard %r: %r') % (guard, c)
576 return _('invalid character in guard %r: %r') % (guard, c)
577
577
578 def setactive(self, guards):
578 def setactive(self, guards):
579 for guard in guards:
579 for guard in guards:
580 bad = self.checkguard(guard)
580 bad = self.checkguard(guard)
581 if bad:
581 if bad:
582 raise error.Abort(bad)
582 raise error.Abort(bad)
583 guards = sorted(set(guards))
583 guards = sorted(set(guards))
584 self.ui.debug('active guards: %s\n' % ' '.join(guards))
584 self.ui.debug('active guards: %s\n' % ' '.join(guards))
585 self.activeguards = guards
585 self.activeguards = guards
586 self.guardsdirty = True
586 self.guardsdirty = True
587
587
588 def active(self):
588 def active(self):
589 if self.activeguards is None:
589 if self.activeguards is None:
590 self.activeguards = []
590 self.activeguards = []
591 try:
591 try:
592 guards = self.opener.read(self.guardspath).split()
592 guards = self.opener.read(self.guardspath).split()
593 except IOError as err:
593 except IOError as err:
594 if err.errno != errno.ENOENT:
594 if err.errno != errno.ENOENT:
595 raise
595 raise
596 guards = []
596 guards = []
597 for i, guard in enumerate(guards):
597 for i, guard in enumerate(guards):
598 bad = self.checkguard(guard)
598 bad = self.checkguard(guard)
599 if bad:
599 if bad:
600 self.ui.warn('%s:%d: %s\n' %
600 self.ui.warn('%s:%d: %s\n' %
601 (self.join(self.guardspath), i + 1, bad))
601 (self.join(self.guardspath), i + 1, bad))
602 else:
602 else:
603 self.activeguards.append(guard)
603 self.activeguards.append(guard)
604 return self.activeguards
604 return self.activeguards
605
605
606 def setguards(self, idx, guards):
606 def setguards(self, idx, guards):
607 for g in guards:
607 for g in guards:
608 if len(g) < 2:
608 if len(g) < 2:
609 raise error.Abort(_('guard %r too short') % g)
609 raise error.Abort(_('guard %r too short') % g)
610 if g[0] not in '-+':
610 if g[0] not in '-+':
611 raise error.Abort(_('guard %r starts with invalid char') % g)
611 raise error.Abort(_('guard %r starts with invalid char') % g)
612 bad = self.checkguard(g[1:])
612 bad = self.checkguard(g[1:])
613 if bad:
613 if bad:
614 raise error.Abort(bad)
614 raise error.Abort(bad)
615 drop = self.guard_re.sub('', self.fullseries[idx])
615 drop = self.guard_re.sub('', self.fullseries[idx])
616 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
616 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
617 self.parseseries()
617 self.parseseries()
618 self.seriesdirty = True
618 self.seriesdirty = True
619
619
620 def pushable(self, idx):
620 def pushable(self, idx):
621 if isinstance(idx, str):
621 if isinstance(idx, str):
622 idx = self.series.index(idx)
622 idx = self.series.index(idx)
623 patchguards = self.seriesguards[idx]
623 patchguards = self.seriesguards[idx]
624 if not patchguards:
624 if not patchguards:
625 return True, None
625 return True, None
626 guards = self.active()
626 guards = self.active()
627 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
627 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
628 if exactneg:
628 if exactneg:
629 return False, repr(exactneg[0])
629 return False, repr(exactneg[0])
630 pos = [g for g in patchguards if g[0] == '+']
630 pos = [g for g in patchguards if g[0] == '+']
631 exactpos = [g for g in pos if g[1:] in guards]
631 exactpos = [g for g in pos if g[1:] in guards]
632 if pos:
632 if pos:
633 if exactpos:
633 if exactpos:
634 return True, repr(exactpos[0])
634 return True, repr(exactpos[0])
635 return False, ' '.join(map(repr, pos))
635 return False, ' '.join(map(repr, pos))
636 return True, ''
636 return True, ''
637
637
638 def explainpushable(self, idx, all_patches=False):
638 def explainpushable(self, idx, all_patches=False):
639 if all_patches:
639 if all_patches:
640 write = self.ui.write
640 write = self.ui.write
641 else:
641 else:
642 write = self.ui.warn
642 write = self.ui.warn
643
643
644 if all_patches or self.ui.verbose:
644 if all_patches or self.ui.verbose:
645 if isinstance(idx, str):
645 if isinstance(idx, str):
646 idx = self.series.index(idx)
646 idx = self.series.index(idx)
647 pushable, why = self.pushable(idx)
647 pushable, why = self.pushable(idx)
648 if all_patches and pushable:
648 if all_patches and pushable:
649 if why is None:
649 if why is None:
650 write(_('allowing %s - no guards in effect\n') %
650 write(_('allowing %s - no guards in effect\n') %
651 self.series[idx])
651 self.series[idx])
652 else:
652 else:
653 if not why:
653 if not why:
654 write(_('allowing %s - no matching negative guards\n') %
654 write(_('allowing %s - no matching negative guards\n') %
655 self.series[idx])
655 self.series[idx])
656 else:
656 else:
657 write(_('allowing %s - guarded by %s\n') %
657 write(_('allowing %s - guarded by %s\n') %
658 (self.series[idx], why))
658 (self.series[idx], why))
659 if not pushable:
659 if not pushable:
660 if why:
660 if why:
661 write(_('skipping %s - guarded by %s\n') %
661 write(_('skipping %s - guarded by %s\n') %
662 (self.series[idx], why))
662 (self.series[idx], why))
663 else:
663 else:
664 write(_('skipping %s - no matching guards\n') %
664 write(_('skipping %s - no matching guards\n') %
665 self.series[idx])
665 self.series[idx])
666
666
667 def savedirty(self):
667 def savedirty(self):
668 def writelist(items, path):
668 def writelist(items, path):
669 fp = self.opener(path, 'w')
669 fp = self.opener(path, 'w')
670 for i in items:
670 for i in items:
671 fp.write("%s\n" % i)
671 fp.write("%s\n" % i)
672 fp.close()
672 fp.close()
673 if self.applieddirty:
673 if self.applieddirty:
674 writelist(map(str, self.applied), self.statuspath)
674 writelist(map(str, self.applied), self.statuspath)
675 self.applieddirty = False
675 self.applieddirty = False
676 if self.seriesdirty:
676 if self.seriesdirty:
677 writelist(self.fullseries, self.seriespath)
677 writelist(self.fullseries, self.seriespath)
678 self.seriesdirty = False
678 self.seriesdirty = False
679 if self.guardsdirty:
679 if self.guardsdirty:
680 writelist(self.activeguards, self.guardspath)
680 writelist(self.activeguards, self.guardspath)
681 self.guardsdirty = False
681 self.guardsdirty = False
682 if self.added:
682 if self.added:
683 qrepo = self.qrepo()
683 qrepo = self.qrepo()
684 if qrepo:
684 if qrepo:
685 qrepo[None].add(f for f in self.added if f not in qrepo[None])
685 qrepo[None].add(f for f in self.added if f not in qrepo[None])
686 self.added = []
686 self.added = []
687
687
688 def removeundo(self, repo):
688 def removeundo(self, repo):
689 undo = repo.sjoin('undo')
689 undo = repo.sjoin('undo')
690 if not os.path.exists(undo):
690 if not os.path.exists(undo):
691 return
691 return
692 try:
692 try:
693 os.unlink(undo)
693 os.unlink(undo)
694 except OSError as inst:
694 except OSError as inst:
695 self.ui.warn(_('error removing undo: %s\n') % str(inst))
695 self.ui.warn(_('error removing undo: %s\n') % str(inst))
696
696
697 def backup(self, repo, files, copy=False):
697 def backup(self, repo, files, copy=False):
698 # backup local changes in --force case
698 # backup local changes in --force case
699 for f in sorted(files):
699 for f in sorted(files):
700 absf = repo.wjoin(f)
700 absf = repo.wjoin(f)
701 if os.path.lexists(absf):
701 if os.path.lexists(absf):
702 self.ui.note(_('saving current version of %s as %s\n') %
702 self.ui.note(_('saving current version of %s as %s\n') %
703 (f, cmdutil.origpath(self.ui, repo, f)))
703 (f, cmdutil.origpath(self.ui, repo, f)))
704
704
705 absorig = cmdutil.origpath(self.ui, repo, absf)
705 absorig = cmdutil.origpath(self.ui, repo, absf)
706 if copy:
706 if copy:
707 util.copyfile(absf, absorig)
707 util.copyfile(absf, absorig)
708 else:
708 else:
709 util.rename(absf, absorig)
709 util.rename(absf, absorig)
710
710
711 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
711 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
712 fp=None, changes=None, opts={}):
712 fp=None, changes=None, opts={}):
713 stat = opts.get('stat')
713 stat = opts.get('stat')
714 m = scmutil.match(repo[node1], files, opts)
714 m = scmutil.match(repo[node1], files, opts)
715 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
715 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
716 changes, stat, fp)
716 changes, stat, fp)
717
717
718 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
718 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
719 # first try just applying the patch
719 # first try just applying the patch
720 (err, n) = self.apply(repo, [patch], update_status=False,
720 (err, n) = self.apply(repo, [patch], update_status=False,
721 strict=True, merge=rev)
721 strict=True, merge=rev)
722
722
723 if err == 0:
723 if err == 0:
724 return (err, n)
724 return (err, n)
725
725
726 if n is None:
726 if n is None:
727 raise error.Abort(_("apply failed for patch %s") % patch)
727 raise error.Abort(_("apply failed for patch %s") % patch)
728
728
729 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
729 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
730
730
731 # apply failed, strip away that rev and merge.
731 # apply failed, strip away that rev and merge.
732 hg.clean(repo, head)
732 hg.clean(repo, head)
733 strip(self.ui, repo, [n], update=False, backup=False)
733 strip(self.ui, repo, [n], update=False, backup=False)
734
734
735 ctx = repo[rev]
735 ctx = repo[rev]
736 ret = hg.merge(repo, rev)
736 ret = hg.merge(repo, rev)
737 if ret:
737 if ret:
738 raise error.Abort(_("update returned %d") % ret)
738 raise error.Abort(_("update returned %d") % ret)
739 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
739 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
740 if n is None:
740 if n is None:
741 raise error.Abort(_("repo commit failed"))
741 raise error.Abort(_("repo commit failed"))
742 try:
742 try:
743 ph = patchheader(mergeq.join(patch), self.plainmode)
743 ph = patchheader(mergeq.join(patch), self.plainmode)
744 except Exception:
744 except Exception:
745 raise error.Abort(_("unable to read %s") % patch)
745 raise error.Abort(_("unable to read %s") % patch)
746
746
747 diffopts = self.patchopts(diffopts, patch)
747 diffopts = self.patchopts(diffopts, patch)
748 patchf = self.opener(patch, "w")
748 patchf = self.opener(patch, "w")
749 comments = str(ph)
749 comments = str(ph)
750 if comments:
750 if comments:
751 patchf.write(comments)
751 patchf.write(comments)
752 self.printdiff(repo, diffopts, head, n, fp=patchf)
752 self.printdiff(repo, diffopts, head, n, fp=patchf)
753 patchf.close()
753 patchf.close()
754 self.removeundo(repo)
754 self.removeundo(repo)
755 return (0, n)
755 return (0, n)
756
756
757 def qparents(self, repo, rev=None):
757 def qparents(self, repo, rev=None):
758 """return the mq handled parent or p1
758 """return the mq handled parent or p1
759
759
760 In some case where mq get himself in being the parent of a merge the
760 In some case where mq get himself in being the parent of a merge the
761 appropriate parent may be p2.
761 appropriate parent may be p2.
762 (eg: an in progress merge started with mq disabled)
762 (eg: an in progress merge started with mq disabled)
763
763
764 If no parent are managed by mq, p1 is returned.
764 If no parent are managed by mq, p1 is returned.
765 """
765 """
766 if rev is None:
766 if rev is None:
767 (p1, p2) = repo.dirstate.parents()
767 (p1, p2) = repo.dirstate.parents()
768 if p2 == nullid:
768 if p2 == nullid:
769 return p1
769 return p1
770 if not self.applied:
770 if not self.applied:
771 return None
771 return None
772 return self.applied[-1].node
772 return self.applied[-1].node
773 p1, p2 = repo.changelog.parents(rev)
773 p1, p2 = repo.changelog.parents(rev)
774 if p2 != nullid and p2 in [x.node for x in self.applied]:
774 if p2 != nullid and p2 in [x.node for x in self.applied]:
775 return p2
775 return p2
776 return p1
776 return p1
777
777
778 def mergepatch(self, repo, mergeq, series, diffopts):
778 def mergepatch(self, repo, mergeq, series, diffopts):
779 if not self.applied:
779 if not self.applied:
780 # each of the patches merged in will have two parents. This
780 # each of the patches merged in will have two parents. This
781 # can confuse the qrefresh, qdiff, and strip code because it
781 # can confuse the qrefresh, qdiff, and strip code because it
782 # needs to know which parent is actually in the patch queue.
782 # needs to know which parent is actually in the patch queue.
783 # so, we insert a merge marker with only one parent. This way
783 # so, we insert a merge marker with only one parent. This way
784 # the first patch in the queue is never a merge patch
784 # the first patch in the queue is never a merge patch
785 #
785 #
786 pname = ".hg.patches.merge.marker"
786 pname = ".hg.patches.merge.marker"
787 n = newcommit(repo, None, '[mq]: merge marker', force=True)
787 n = newcommit(repo, None, '[mq]: merge marker', force=True)
788 self.removeundo(repo)
788 self.removeundo(repo)
789 self.applied.append(statusentry(n, pname))
789 self.applied.append(statusentry(n, pname))
790 self.applieddirty = True
790 self.applieddirty = True
791
791
792 head = self.qparents(repo)
792 head = self.qparents(repo)
793
793
794 for patch in series:
794 for patch in series:
795 patch = mergeq.lookup(patch, strict=True)
795 patch = mergeq.lookup(patch, strict=True)
796 if not patch:
796 if not patch:
797 self.ui.warn(_("patch %s does not exist\n") % patch)
797 self.ui.warn(_("patch %s does not exist\n") % patch)
798 return (1, None)
798 return (1, None)
799 pushable, reason = self.pushable(patch)
799 pushable, reason = self.pushable(patch)
800 if not pushable:
800 if not pushable:
801 self.explainpushable(patch, all_patches=True)
801 self.explainpushable(patch, all_patches=True)
802 continue
802 continue
803 info = mergeq.isapplied(patch)
803 info = mergeq.isapplied(patch)
804 if not info:
804 if not info:
805 self.ui.warn(_("patch %s is not applied\n") % patch)
805 self.ui.warn(_("patch %s is not applied\n") % patch)
806 return (1, None)
806 return (1, None)
807 rev = info[1]
807 rev = info[1]
808 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
808 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
809 if head:
809 if head:
810 self.applied.append(statusentry(head, patch))
810 self.applied.append(statusentry(head, patch))
811 self.applieddirty = True
811 self.applieddirty = True
812 if err:
812 if err:
813 return (err, head)
813 return (err, head)
814 self.savedirty()
814 self.savedirty()
815 return (0, head)
815 return (0, head)
816
816
817 def patch(self, repo, patchfile):
817 def patch(self, repo, patchfile):
818 '''Apply patchfile to the working directory.
818 '''Apply patchfile to the working directory.
819 patchfile: name of patch file'''
819 patchfile: name of patch file'''
820 files = set()
820 files = set()
821 try:
821 try:
822 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
822 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
823 files=files, eolmode=None)
823 files=files, eolmode=None)
824 return (True, list(files), fuzz)
824 return (True, list(files), fuzz)
825 except Exception as inst:
825 except Exception as inst:
826 self.ui.note(str(inst) + '\n')
826 self.ui.note(str(inst) + '\n')
827 if not self.ui.verbose:
827 if not self.ui.verbose:
828 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
828 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
829 self.ui.traceback()
829 self.ui.traceback()
830 return (False, list(files), False)
830 return (False, list(files), False)
831
831
832 def apply(self, repo, series, list=False, update_status=True,
832 def apply(self, repo, series, list=False, update_status=True,
833 strict=False, patchdir=None, merge=None, all_files=None,
833 strict=False, patchdir=None, merge=None, all_files=None,
834 tobackup=None, keepchanges=False):
834 tobackup=None, keepchanges=False):
835 wlock = lock = tr = None
835 wlock = lock = tr = None
836 try:
836 try:
837 wlock = repo.wlock()
837 wlock = repo.wlock()
838 lock = repo.lock()
838 lock = repo.lock()
839 tr = repo.transaction("qpush")
839 tr = repo.transaction("qpush")
840 try:
840 try:
841 ret = self._apply(repo, series, list, update_status,
841 ret = self._apply(repo, series, list, update_status,
842 strict, patchdir, merge, all_files=all_files,
842 strict, patchdir, merge, all_files=all_files,
843 tobackup=tobackup, keepchanges=keepchanges)
843 tobackup=tobackup, keepchanges=keepchanges)
844 tr.close()
844 tr.close()
845 self.savedirty()
845 self.savedirty()
846 return ret
846 return ret
847 except AbortNoCleanup:
847 except AbortNoCleanup:
848 tr.close()
848 tr.close()
849 self.savedirty()
849 self.savedirty()
850 raise
850 raise
851 except: # re-raises
851 except: # re-raises
852 try:
852 try:
853 tr.abort()
853 tr.abort()
854 finally:
854 finally:
855 self.invalidate()
855 self.invalidate()
856 raise
856 raise
857 finally:
857 finally:
858 release(tr, lock, wlock)
858 release(tr, lock, wlock)
859 self.removeundo(repo)
859 self.removeundo(repo)
860
860
861 def _apply(self, repo, series, list=False, update_status=True,
861 def _apply(self, repo, series, list=False, update_status=True,
862 strict=False, patchdir=None, merge=None, all_files=None,
862 strict=False, patchdir=None, merge=None, all_files=None,
863 tobackup=None, keepchanges=False):
863 tobackup=None, keepchanges=False):
864 """returns (error, hash)
864 """returns (error, hash)
865
865
866 error = 1 for unable to read, 2 for patch failed, 3 for patch
866 error = 1 for unable to read, 2 for patch failed, 3 for patch
867 fuzz. tobackup is None or a set of files to backup before they
867 fuzz. tobackup is None or a set of files to backup before they
868 are modified by a patch.
868 are modified by a patch.
869 """
869 """
870 # TODO unify with commands.py
870 # TODO unify with commands.py
871 if not patchdir:
871 if not patchdir:
872 patchdir = self.path
872 patchdir = self.path
873 err = 0
873 err = 0
874 n = None
874 n = None
875 for patchname in series:
875 for patchname in series:
876 pushable, reason = self.pushable(patchname)
876 pushable, reason = self.pushable(patchname)
877 if not pushable:
877 if not pushable:
878 self.explainpushable(patchname, all_patches=True)
878 self.explainpushable(patchname, all_patches=True)
879 continue
879 continue
880 self.ui.status(_("applying %s\n") % patchname)
880 self.ui.status(_("applying %s\n") % patchname)
881 pf = os.path.join(patchdir, patchname)
881 pf = os.path.join(patchdir, patchname)
882
882
883 try:
883 try:
884 ph = patchheader(self.join(patchname), self.plainmode)
884 ph = patchheader(self.join(patchname), self.plainmode)
885 except IOError:
885 except IOError:
886 self.ui.warn(_("unable to read %s\n") % patchname)
886 self.ui.warn(_("unable to read %s\n") % patchname)
887 err = 1
887 err = 1
888 break
888 break
889
889
890 message = ph.message
890 message = ph.message
891 if not message:
891 if not message:
892 # The commit message should not be translated
892 # The commit message should not be translated
893 message = "imported patch %s\n" % patchname
893 message = "imported patch %s\n" % patchname
894 else:
894 else:
895 if list:
895 if list:
896 # The commit message should not be translated
896 # The commit message should not be translated
897 message.append("\nimported patch %s" % patchname)
897 message.append("\nimported patch %s" % patchname)
898 message = '\n'.join(message)
898 message = '\n'.join(message)
899
899
900 if ph.haspatch:
900 if ph.haspatch:
901 if tobackup:
901 if tobackup:
902 touched = patchmod.changedfiles(self.ui, repo, pf)
902 touched = patchmod.changedfiles(self.ui, repo, pf)
903 touched = set(touched) & tobackup
903 touched = set(touched) & tobackup
904 if touched and keepchanges:
904 if touched and keepchanges:
905 raise AbortNoCleanup(
905 raise AbortNoCleanup(
906 _("conflicting local changes found"),
906 _("conflicting local changes found"),
907 hint=_("did you forget to qrefresh?"))
907 hint=_("did you forget to qrefresh?"))
908 self.backup(repo, touched, copy=True)
908 self.backup(repo, touched, copy=True)
909 tobackup = tobackup - touched
909 tobackup = tobackup - touched
910 (patcherr, files, fuzz) = self.patch(repo, pf)
910 (patcherr, files, fuzz) = self.patch(repo, pf)
911 if all_files is not None:
911 if all_files is not None:
912 all_files.update(files)
912 all_files.update(files)
913 patcherr = not patcherr
913 patcherr = not patcherr
914 else:
914 else:
915 self.ui.warn(_("patch %s is empty\n") % patchname)
915 self.ui.warn(_("patch %s is empty\n") % patchname)
916 patcherr, files, fuzz = 0, [], 0
916 patcherr, files, fuzz = 0, [], 0
917
917
918 if merge and files:
918 if merge and files:
919 # Mark as removed/merged and update dirstate parent info
919 # Mark as removed/merged and update dirstate parent info
920 removed = []
920 removed = []
921 merged = []
921 merged = []
922 for f in files:
922 for f in files:
923 if os.path.lexists(repo.wjoin(f)):
923 if os.path.lexists(repo.wjoin(f)):
924 merged.append(f)
924 merged.append(f)
925 else:
925 else:
926 removed.append(f)
926 removed.append(f)
927 repo.dirstate.beginparentchange()
927 repo.dirstate.beginparentchange()
928 for f in removed:
928 for f in removed:
929 repo.dirstate.remove(f)
929 repo.dirstate.remove(f)
930 for f in merged:
930 for f in merged:
931 repo.dirstate.merge(f)
931 repo.dirstate.merge(f)
932 p1, p2 = repo.dirstate.parents()
932 p1, p2 = repo.dirstate.parents()
933 repo.setparents(p1, merge)
933 repo.setparents(p1, merge)
934 repo.dirstate.endparentchange()
934 repo.dirstate.endparentchange()
935
935
936 if all_files and '.hgsubstate' in all_files:
936 if all_files and '.hgsubstate' in all_files:
937 wctx = repo[None]
937 wctx = repo[None]
938 pctx = repo['.']
938 pctx = repo['.']
939 overwrite = False
939 overwrite = False
940 mergedsubstate = subrepo.submerge(repo, pctx, wctx, wctx,
940 mergedsubstate = subrepo.submerge(repo, pctx, wctx, wctx,
941 overwrite)
941 overwrite)
942 files += mergedsubstate.keys()
942 files += mergedsubstate.keys()
943
943
944 match = scmutil.matchfiles(repo, files or [])
944 match = scmutil.matchfiles(repo, files or [])
945 oldtip = repo['tip']
945 oldtip = repo['tip']
946 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
946 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
947 force=True)
947 force=True)
948 if repo['tip'] == oldtip:
948 if repo['tip'] == oldtip:
949 raise error.Abort(_("qpush exactly duplicates child changeset"))
949 raise error.Abort(_("qpush exactly duplicates child changeset"))
950 if n is None:
950 if n is None:
951 raise error.Abort(_("repository commit failed"))
951 raise error.Abort(_("repository commit failed"))
952
952
953 if update_status:
953 if update_status:
954 self.applied.append(statusentry(n, patchname))
954 self.applied.append(statusentry(n, patchname))
955
955
956 if patcherr:
956 if patcherr:
957 self.ui.warn(_("patch failed, rejects left in working "
957 self.ui.warn(_("patch failed, rejects left in working "
958 "directory\n"))
958 "directory\n"))
959 err = 2
959 err = 2
960 break
960 break
961
961
962 if fuzz and strict:
962 if fuzz and strict:
963 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
963 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
964 err = 3
964 err = 3
965 break
965 break
966 return (err, n)
966 return (err, n)
967
967
968 def _cleanup(self, patches, numrevs, keep=False):
968 def _cleanup(self, patches, numrevs, keep=False):
969 if not keep:
969 if not keep:
970 r = self.qrepo()
970 r = self.qrepo()
971 if r:
971 if r:
972 r[None].forget(patches)
972 r[None].forget(patches)
973 for p in patches:
973 for p in patches:
974 try:
974 try:
975 os.unlink(self.join(p))
975 os.unlink(self.join(p))
976 except OSError as inst:
976 except OSError as inst:
977 if inst.errno != errno.ENOENT:
977 if inst.errno != errno.ENOENT:
978 raise
978 raise
979
979
980 qfinished = []
980 qfinished = []
981 if numrevs:
981 if numrevs:
982 qfinished = self.applied[:numrevs]
982 qfinished = self.applied[:numrevs]
983 del self.applied[:numrevs]
983 del self.applied[:numrevs]
984 self.applieddirty = True
984 self.applieddirty = True
985
985
986 unknown = []
986 unknown = []
987
987
988 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
988 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
989 reverse=True):
989 reverse=True):
990 if i is not None:
990 if i is not None:
991 del self.fullseries[i]
991 del self.fullseries[i]
992 else:
992 else:
993 unknown.append(p)
993 unknown.append(p)
994
994
995 if unknown:
995 if unknown:
996 if numrevs:
996 if numrevs:
997 rev = dict((entry.name, entry.node) for entry in qfinished)
997 rev = dict((entry.name, entry.node) for entry in qfinished)
998 for p in unknown:
998 for p in unknown:
999 msg = _('revision %s refers to unknown patches: %s\n')
999 msg = _('revision %s refers to unknown patches: %s\n')
1000 self.ui.warn(msg % (short(rev[p]), p))
1000 self.ui.warn(msg % (short(rev[p]), p))
1001 else:
1001 else:
1002 msg = _('unknown patches: %s\n')
1002 msg = _('unknown patches: %s\n')
1003 raise error.Abort(''.join(msg % p for p in unknown))
1003 raise error.Abort(''.join(msg % p for p in unknown))
1004
1004
1005 self.parseseries()
1005 self.parseseries()
1006 self.seriesdirty = True
1006 self.seriesdirty = True
1007 return [entry.node for entry in qfinished]
1007 return [entry.node for entry in qfinished]
1008
1008
1009 def _revpatches(self, repo, revs):
1009 def _revpatches(self, repo, revs):
1010 firstrev = repo[self.applied[0].node].rev()
1010 firstrev = repo[self.applied[0].node].rev()
1011 patches = []
1011 patches = []
1012 for i, rev in enumerate(revs):
1012 for i, rev in enumerate(revs):
1013
1013
1014 if rev < firstrev:
1014 if rev < firstrev:
1015 raise error.Abort(_('revision %d is not managed') % rev)
1015 raise error.Abort(_('revision %d is not managed') % rev)
1016
1016
1017 ctx = repo[rev]
1017 ctx = repo[rev]
1018 base = self.applied[i].node
1018 base = self.applied[i].node
1019 if ctx.node() != base:
1019 if ctx.node() != base:
1020 msg = _('cannot delete revision %d above applied patches')
1020 msg = _('cannot delete revision %d above applied patches')
1021 raise error.Abort(msg % rev)
1021 raise error.Abort(msg % rev)
1022
1022
1023 patch = self.applied[i].name
1023 patch = self.applied[i].name
1024 for fmt in ('[mq]: %s', 'imported patch %s'):
1024 for fmt in ('[mq]: %s', 'imported patch %s'):
1025 if ctx.description() == fmt % patch:
1025 if ctx.description() == fmt % patch:
1026 msg = _('patch %s finalized without changeset message\n')
1026 msg = _('patch %s finalized without changeset message\n')
1027 repo.ui.status(msg % patch)
1027 repo.ui.status(msg % patch)
1028 break
1028 break
1029
1029
1030 patches.append(patch)
1030 patches.append(patch)
1031 return patches
1031 return patches
1032
1032
1033 def finish(self, repo, revs):
1033 def finish(self, repo, revs):
1034 # Manually trigger phase computation to ensure phasedefaults is
1034 # Manually trigger phase computation to ensure phasedefaults is
1035 # executed before we remove the patches.
1035 # executed before we remove the patches.
1036 repo._phasecache
1036 repo._phasecache
1037 patches = self._revpatches(repo, sorted(revs))
1037 patches = self._revpatches(repo, sorted(revs))
1038 qfinished = self._cleanup(patches, len(patches))
1038 qfinished = self._cleanup(patches, len(patches))
1039 if qfinished and repo.ui.configbool('mq', 'secret', False):
1039 if qfinished and repo.ui.configbool('mq', 'secret', False):
1040 # only use this logic when the secret option is added
1040 # only use this logic when the secret option is added
1041 oldqbase = repo[qfinished[0]]
1041 oldqbase = repo[qfinished[0]]
1042 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
1042 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
1043 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1043 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1044 tr = repo.transaction('qfinish')
1044 tr = repo.transaction('qfinish')
1045 try:
1045 try:
1046 phases.advanceboundary(repo, tr, tphase, qfinished)
1046 phases.advanceboundary(repo, tr, tphase, qfinished)
1047 tr.close()
1047 tr.close()
1048 finally:
1048 finally:
1049 tr.release()
1049 tr.release()
1050
1050
1051 def delete(self, repo, patches, opts):
1051 def delete(self, repo, patches, opts):
1052 if not patches and not opts.get('rev'):
1052 if not patches and not opts.get('rev'):
1053 raise error.Abort(_('qdelete requires at least one revision or '
1053 raise error.Abort(_('qdelete requires at least one revision or '
1054 'patch name'))
1054 'patch name'))
1055
1055
1056 realpatches = []
1056 realpatches = []
1057 for patch in patches:
1057 for patch in patches:
1058 patch = self.lookup(patch, strict=True)
1058 patch = self.lookup(patch, strict=True)
1059 info = self.isapplied(patch)
1059 info = self.isapplied(patch)
1060 if info:
1060 if info:
1061 raise error.Abort(_("cannot delete applied patch %s") % patch)
1061 raise error.Abort(_("cannot delete applied patch %s") % patch)
1062 if patch not in self.series:
1062 if patch not in self.series:
1063 raise error.Abort(_("patch %s not in series file") % patch)
1063 raise error.Abort(_("patch %s not in series file") % patch)
1064 if patch not in realpatches:
1064 if patch not in realpatches:
1065 realpatches.append(patch)
1065 realpatches.append(patch)
1066
1066
1067 numrevs = 0
1067 numrevs = 0
1068 if opts.get('rev'):
1068 if opts.get('rev'):
1069 if not self.applied:
1069 if not self.applied:
1070 raise error.Abort(_('no patches applied'))
1070 raise error.Abort(_('no patches applied'))
1071 revs = scmutil.revrange(repo, opts.get('rev'))
1071 revs = scmutil.revrange(repo, opts.get('rev'))
1072 revs.sort()
1072 revs.sort()
1073 revpatches = self._revpatches(repo, revs)
1073 revpatches = self._revpatches(repo, revs)
1074 realpatches += revpatches
1074 realpatches += revpatches
1075 numrevs = len(revpatches)
1075 numrevs = len(revpatches)
1076
1076
1077 self._cleanup(realpatches, numrevs, opts.get('keep'))
1077 self._cleanup(realpatches, numrevs, opts.get('keep'))
1078
1078
1079 def checktoppatch(self, repo):
1079 def checktoppatch(self, repo):
1080 '''check that working directory is at qtip'''
1080 '''check that working directory is at qtip'''
1081 if self.applied:
1081 if self.applied:
1082 top = self.applied[-1].node
1082 top = self.applied[-1].node
1083 patch = self.applied[-1].name
1083 patch = self.applied[-1].name
1084 if repo.dirstate.p1() != top:
1084 if repo.dirstate.p1() != top:
1085 raise error.Abort(_("working directory revision is not qtip"))
1085 raise error.Abort(_("working directory revision is not qtip"))
1086 return top, patch
1086 return top, patch
1087 return None, None
1087 return None, None
1088
1088
1089 def putsubstate2changes(self, substatestate, changes):
1089 def putsubstate2changes(self, substatestate, changes):
1090 for files in changes[:3]:
1090 for files in changes[:3]:
1091 if '.hgsubstate' in files:
1091 if '.hgsubstate' in files:
1092 return # already listed up
1092 return # already listed up
1093 # not yet listed up
1093 # not yet listed up
1094 if substatestate in 'a?':
1094 if substatestate in 'a?':
1095 changes[1].append('.hgsubstate')
1095 changes[1].append('.hgsubstate')
1096 elif substatestate in 'r':
1096 elif substatestate in 'r':
1097 changes[2].append('.hgsubstate')
1097 changes[2].append('.hgsubstate')
1098 else: # modified
1098 else: # modified
1099 changes[0].append('.hgsubstate')
1099 changes[0].append('.hgsubstate')
1100
1100
1101 def checklocalchanges(self, repo, force=False, refresh=True):
1101 def checklocalchanges(self, repo, force=False, refresh=True):
1102 excsuffix = ''
1102 excsuffix = ''
1103 if refresh:
1103 if refresh:
1104 excsuffix = ', qrefresh first'
1104 excsuffix = ', qrefresh first'
1105 # plain versions for i18n tool to detect them
1105 # plain versions for i18n tool to detect them
1106 _("local changes found, qrefresh first")
1106 _("local changes found, qrefresh first")
1107 _("local changed subrepos found, qrefresh first")
1107 _("local changed subrepos found, qrefresh first")
1108 return checklocalchanges(repo, force, excsuffix)
1108 return checklocalchanges(repo, force, excsuffix)
1109
1109
1110 _reserved = ('series', 'status', 'guards', '.', '..')
1110 _reserved = ('series', 'status', 'guards', '.', '..')
1111 def checkreservedname(self, name):
1111 def checkreservedname(self, name):
1112 if name in self._reserved:
1112 if name in self._reserved:
1113 raise error.Abort(_('"%s" cannot be used as the name of a patch')
1113 raise error.Abort(_('"%s" cannot be used as the name of a patch')
1114 % name)
1114 % name)
1115 for prefix in ('.hg', '.mq'):
1115 for prefix in ('.hg', '.mq'):
1116 if name.startswith(prefix):
1116 if name.startswith(prefix):
1117 raise error.Abort(_('patch name cannot begin with "%s"')
1117 raise error.Abort(_('patch name cannot begin with "%s"')
1118 % prefix)
1118 % prefix)
1119 for c in ('#', ':', '\r', '\n'):
1119 for c in ('#', ':', '\r', '\n'):
1120 if c in name:
1120 if c in name:
1121 raise error.Abort(_('%r cannot be used in the name of a patch')
1121 raise error.Abort(_('%r cannot be used in the name of a patch')
1122 % c)
1122 % c)
1123
1123
1124 def checkpatchname(self, name, force=False):
1124 def checkpatchname(self, name, force=False):
1125 self.checkreservedname(name)
1125 self.checkreservedname(name)
1126 if not force and os.path.exists(self.join(name)):
1126 if not force and os.path.exists(self.join(name)):
1127 if os.path.isdir(self.join(name)):
1127 if os.path.isdir(self.join(name)):
1128 raise error.Abort(_('"%s" already exists as a directory')
1128 raise error.Abort(_('"%s" already exists as a directory')
1129 % name)
1129 % name)
1130 else:
1130 else:
1131 raise error.Abort(_('patch "%s" already exists') % name)
1131 raise error.Abort(_('patch "%s" already exists') % name)
1132
1132
1133 def checkkeepchanges(self, keepchanges, force):
1133 def checkkeepchanges(self, keepchanges, force):
1134 if force and keepchanges:
1134 if force and keepchanges:
1135 raise error.Abort(_('cannot use both --force and --keep-changes'))
1135 raise error.Abort(_('cannot use both --force and --keep-changes'))
1136
1136
1137 def new(self, repo, patchfn, *pats, **opts):
1137 def new(self, repo, patchfn, *pats, **opts):
1138 """options:
1138 """options:
1139 msg: a string or a no-argument function returning a string
1139 msg: a string or a no-argument function returning a string
1140 """
1140 """
1141 msg = opts.get('msg')
1141 msg = opts.get('msg')
1142 edit = opts.get('edit')
1142 edit = opts.get('edit')
1143 editform = opts.get('editform', 'mq.qnew')
1143 editform = opts.get('editform', 'mq.qnew')
1144 user = opts.get('user')
1144 user = opts.get('user')
1145 date = opts.get('date')
1145 date = opts.get('date')
1146 if date:
1146 if date:
1147 date = util.parsedate(date)
1147 date = util.parsedate(date)
1148 diffopts = self.diffopts({'git': opts.get('git')})
1148 diffopts = self.diffopts({'git': opts.get('git')})
1149 if opts.get('checkname', True):
1149 if opts.get('checkname', True):
1150 self.checkpatchname(patchfn)
1150 self.checkpatchname(patchfn)
1151 inclsubs = checksubstate(repo)
1151 inclsubs = checksubstate(repo)
1152 if inclsubs:
1152 if inclsubs:
1153 substatestate = repo.dirstate['.hgsubstate']
1153 substatestate = repo.dirstate['.hgsubstate']
1154 if opts.get('include') or opts.get('exclude') or pats:
1154 if opts.get('include') or opts.get('exclude') or pats:
1155 # detect missing files in pats
1155 # detect missing files in pats
1156 def badfn(f, msg):
1156 def badfn(f, msg):
1157 if f != '.hgsubstate': # .hgsubstate is auto-created
1157 if f != '.hgsubstate': # .hgsubstate is auto-created
1158 raise error.Abort('%s: %s' % (f, msg))
1158 raise error.Abort('%s: %s' % (f, msg))
1159 match = scmutil.match(repo[None], pats, opts, badfn=badfn)
1159 match = scmutil.match(repo[None], pats, opts, badfn=badfn)
1160 changes = repo.status(match=match)
1160 changes = repo.status(match=match)
1161 else:
1161 else:
1162 changes = self.checklocalchanges(repo, force=True)
1162 changes = self.checklocalchanges(repo, force=True)
1163 commitfiles = list(inclsubs)
1163 commitfiles = list(inclsubs)
1164 for files in changes[:3]:
1164 for files in changes[:3]:
1165 commitfiles.extend(files)
1165 commitfiles.extend(files)
1166 match = scmutil.matchfiles(repo, commitfiles)
1166 match = scmutil.matchfiles(repo, commitfiles)
1167 if len(repo[None].parents()) > 1:
1167 if len(repo[None].parents()) > 1:
1168 raise error.Abort(_('cannot manage merge changesets'))
1168 raise error.Abort(_('cannot manage merge changesets'))
1169 self.checktoppatch(repo)
1169 self.checktoppatch(repo)
1170 insert = self.fullseriesend()
1170 insert = self.fullseriesend()
1171 wlock = repo.wlock()
1171 wlock = repo.wlock()
1172 try:
1172 try:
1173 try:
1173 try:
1174 # if patch file write fails, abort early
1174 # if patch file write fails, abort early
1175 p = self.opener(patchfn, "w")
1175 p = self.opener(patchfn, "w")
1176 except IOError as e:
1176 except IOError as e:
1177 raise error.Abort(_('cannot write patch "%s": %s')
1177 raise error.Abort(_('cannot write patch "%s": %s')
1178 % (patchfn, e.strerror))
1178 % (patchfn, e.strerror))
1179 try:
1179 try:
1180 defaultmsg = "[mq]: %s" % patchfn
1180 defaultmsg = "[mq]: %s" % patchfn
1181 editor = cmdutil.getcommiteditor(editform=editform)
1181 editor = cmdutil.getcommiteditor(editform=editform)
1182 if edit:
1182 if edit:
1183 def finishdesc(desc):
1183 def finishdesc(desc):
1184 if desc.rstrip():
1184 if desc.rstrip():
1185 return desc
1185 return desc
1186 else:
1186 else:
1187 return defaultmsg
1187 return defaultmsg
1188 # i18n: this message is shown in editor with "HG: " prefix
1188 # i18n: this message is shown in editor with "HG: " prefix
1189 extramsg = _('Leave message empty to use default message.')
1189 extramsg = _('Leave message empty to use default message.')
1190 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1190 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1191 extramsg=extramsg,
1191 extramsg=extramsg,
1192 editform=editform)
1192 editform=editform)
1193 commitmsg = msg
1193 commitmsg = msg
1194 else:
1194 else:
1195 commitmsg = msg or defaultmsg
1195 commitmsg = msg or defaultmsg
1196
1196
1197 n = newcommit(repo, None, commitmsg, user, date, match=match,
1197 n = newcommit(repo, None, commitmsg, user, date, match=match,
1198 force=True, editor=editor)
1198 force=True, editor=editor)
1199 if n is None:
1199 if n is None:
1200 raise error.Abort(_("repo commit failed"))
1200 raise error.Abort(_("repo commit failed"))
1201 try:
1201 try:
1202 self.fullseries[insert:insert] = [patchfn]
1202 self.fullseries[insert:insert] = [patchfn]
1203 self.applied.append(statusentry(n, patchfn))
1203 self.applied.append(statusentry(n, patchfn))
1204 self.parseseries()
1204 self.parseseries()
1205 self.seriesdirty = True
1205 self.seriesdirty = True
1206 self.applieddirty = True
1206 self.applieddirty = True
1207 nctx = repo[n]
1207 nctx = repo[n]
1208 ph = patchheader(self.join(patchfn), self.plainmode)
1208 ph = patchheader(self.join(patchfn), self.plainmode)
1209 if user:
1209 if user:
1210 ph.setuser(user)
1210 ph.setuser(user)
1211 if date:
1211 if date:
1212 ph.setdate('%s %s' % date)
1212 ph.setdate('%s %s' % date)
1213 ph.setparent(hex(nctx.p1().node()))
1213 ph.setparent(hex(nctx.p1().node()))
1214 msg = nctx.description().strip()
1214 msg = nctx.description().strip()
1215 if msg == defaultmsg.strip():
1215 if msg == defaultmsg.strip():
1216 msg = ''
1216 msg = ''
1217 ph.setmessage(msg)
1217 ph.setmessage(msg)
1218 p.write(str(ph))
1218 p.write(str(ph))
1219 if commitfiles:
1219 if commitfiles:
1220 parent = self.qparents(repo, n)
1220 parent = self.qparents(repo, n)
1221 if inclsubs:
1221 if inclsubs:
1222 self.putsubstate2changes(substatestate, changes)
1222 self.putsubstate2changes(substatestate, changes)
1223 chunks = patchmod.diff(repo, node1=parent, node2=n,
1223 chunks = patchmod.diff(repo, node1=parent, node2=n,
1224 changes=changes, opts=diffopts)
1224 changes=changes, opts=diffopts)
1225 for chunk in chunks:
1225 for chunk in chunks:
1226 p.write(chunk)
1226 p.write(chunk)
1227 p.close()
1227 p.close()
1228 r = self.qrepo()
1228 r = self.qrepo()
1229 if r:
1229 if r:
1230 r[None].add([patchfn])
1230 r[None].add([patchfn])
1231 except: # re-raises
1231 except: # re-raises
1232 repo.rollback()
1232 repo.rollback()
1233 raise
1233 raise
1234 except Exception:
1234 except Exception:
1235 patchpath = self.join(patchfn)
1235 patchpath = self.join(patchfn)
1236 try:
1236 try:
1237 os.unlink(patchpath)
1237 os.unlink(patchpath)
1238 except OSError:
1238 except OSError:
1239 self.ui.warn(_('error unlinking %s\n') % patchpath)
1239 self.ui.warn(_('error unlinking %s\n') % patchpath)
1240 raise
1240 raise
1241 self.removeundo(repo)
1241 self.removeundo(repo)
1242 finally:
1242 finally:
1243 release(wlock)
1243 release(wlock)
1244
1244
1245 def isapplied(self, patch):
1245 def isapplied(self, patch):
1246 """returns (index, rev, patch)"""
1246 """returns (index, rev, patch)"""
1247 for i, a in enumerate(self.applied):
1247 for i, a in enumerate(self.applied):
1248 if a.name == patch:
1248 if a.name == patch:
1249 return (i, a.node, a.name)
1249 return (i, a.node, a.name)
1250 return None
1250 return None
1251
1251
1252 # if the exact patch name does not exist, we try a few
1252 # if the exact patch name does not exist, we try a few
1253 # variations. If strict is passed, we try only #1
1253 # variations. If strict is passed, we try only #1
1254 #
1254 #
1255 # 1) a number (as string) to indicate an offset in the series file
1255 # 1) a number (as string) to indicate an offset in the series file
1256 # 2) a unique substring of the patch name was given
1256 # 2) a unique substring of the patch name was given
1257 # 3) patchname[-+]num to indicate an offset in the series file
1257 # 3) patchname[-+]num to indicate an offset in the series file
1258 def lookup(self, patch, strict=False):
1258 def lookup(self, patch, strict=False):
1259 def partialname(s):
1259 def partialname(s):
1260 if s in self.series:
1260 if s in self.series:
1261 return s
1261 return s
1262 matches = [x for x in self.series if s in x]
1262 matches = [x for x in self.series if s in x]
1263 if len(matches) > 1:
1263 if len(matches) > 1:
1264 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1264 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1265 for m in matches:
1265 for m in matches:
1266 self.ui.warn(' %s\n' % m)
1266 self.ui.warn(' %s\n' % m)
1267 return None
1267 return None
1268 if matches:
1268 if matches:
1269 return matches[0]
1269 return matches[0]
1270 if self.series and self.applied:
1270 if self.series and self.applied:
1271 if s == 'qtip':
1271 if s == 'qtip':
1272 return self.series[self.seriesend(True) - 1]
1272 return self.series[self.seriesend(True) - 1]
1273 if s == 'qbase':
1273 if s == 'qbase':
1274 return self.series[0]
1274 return self.series[0]
1275 return None
1275 return None
1276
1276
1277 if patch in self.series:
1277 if patch in self.series:
1278 return patch
1278 return patch
1279
1279
1280 if not os.path.isfile(self.join(patch)):
1280 if not os.path.isfile(self.join(patch)):
1281 try:
1281 try:
1282 sno = int(patch)
1282 sno = int(patch)
1283 except (ValueError, OverflowError):
1283 except (ValueError, OverflowError):
1284 pass
1284 pass
1285 else:
1285 else:
1286 if -len(self.series) <= sno < len(self.series):
1286 if -len(self.series) <= sno < len(self.series):
1287 return self.series[sno]
1287 return self.series[sno]
1288
1288
1289 if not strict:
1289 if not strict:
1290 res = partialname(patch)
1290 res = partialname(patch)
1291 if res:
1291 if res:
1292 return res
1292 return res
1293 minus = patch.rfind('-')
1293 minus = patch.rfind('-')
1294 if minus >= 0:
1294 if minus >= 0:
1295 res = partialname(patch[:minus])
1295 res = partialname(patch[:minus])
1296 if res:
1296 if res:
1297 i = self.series.index(res)
1297 i = self.series.index(res)
1298 try:
1298 try:
1299 off = int(patch[minus + 1:] or 1)
1299 off = int(patch[minus + 1:] or 1)
1300 except (ValueError, OverflowError):
1300 except (ValueError, OverflowError):
1301 pass
1301 pass
1302 else:
1302 else:
1303 if i - off >= 0:
1303 if i - off >= 0:
1304 return self.series[i - off]
1304 return self.series[i - off]
1305 plus = patch.rfind('+')
1305 plus = patch.rfind('+')
1306 if plus >= 0:
1306 if plus >= 0:
1307 res = partialname(patch[:plus])
1307 res = partialname(patch[:plus])
1308 if res:
1308 if res:
1309 i = self.series.index(res)
1309 i = self.series.index(res)
1310 try:
1310 try:
1311 off = int(patch[plus + 1:] or 1)
1311 off = int(patch[plus + 1:] or 1)
1312 except (ValueError, OverflowError):
1312 except (ValueError, OverflowError):
1313 pass
1313 pass
1314 else:
1314 else:
1315 if i + off < len(self.series):
1315 if i + off < len(self.series):
1316 return self.series[i + off]
1316 return self.series[i + off]
1317 raise error.Abort(_("patch %s not in series") % patch)
1317 raise error.Abort(_("patch %s not in series") % patch)
1318
1318
1319 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1319 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1320 all=False, move=False, exact=False, nobackup=False,
1320 all=False, move=False, exact=False, nobackup=False,
1321 keepchanges=False):
1321 keepchanges=False):
1322 self.checkkeepchanges(keepchanges, force)
1322 self.checkkeepchanges(keepchanges, force)
1323 diffopts = self.diffopts()
1323 diffopts = self.diffopts()
1324 wlock = repo.wlock()
1324 wlock = repo.wlock()
1325 try:
1325 try:
1326 heads = []
1326 heads = []
1327 for hs in repo.branchmap().itervalues():
1327 for hs in repo.branchmap().itervalues():
1328 heads.extend(hs)
1328 heads.extend(hs)
1329 if not heads:
1329 if not heads:
1330 heads = [nullid]
1330 heads = [nullid]
1331 if repo.dirstate.p1() not in heads and not exact:
1331 if repo.dirstate.p1() not in heads and not exact:
1332 self.ui.status(_("(working directory not at a head)\n"))
1332 self.ui.status(_("(working directory not at a head)\n"))
1333
1333
1334 if not self.series:
1334 if not self.series:
1335 self.ui.warn(_('no patches in series\n'))
1335 self.ui.warn(_('no patches in series\n'))
1336 return 0
1336 return 0
1337
1337
1338 # Suppose our series file is: A B C and the current 'top'
1338 # Suppose our series file is: A B C and the current 'top'
1339 # patch is B. qpush C should be performed (moving forward)
1339 # patch is B. qpush C should be performed (moving forward)
1340 # qpush B is a NOP (no change) qpush A is an error (can't
1340 # qpush B is a NOP (no change) qpush A is an error (can't
1341 # go backwards with qpush)
1341 # go backwards with qpush)
1342 if patch:
1342 if patch:
1343 patch = self.lookup(patch)
1343 patch = self.lookup(patch)
1344 info = self.isapplied(patch)
1344 info = self.isapplied(patch)
1345 if info and info[0] >= len(self.applied) - 1:
1345 if info and info[0] >= len(self.applied) - 1:
1346 self.ui.warn(
1346 self.ui.warn(
1347 _('qpush: %s is already at the top\n') % patch)
1347 _('qpush: %s is already at the top\n') % patch)
1348 return 0
1348 return 0
1349
1349
1350 pushable, reason = self.pushable(patch)
1350 pushable, reason = self.pushable(patch)
1351 if pushable:
1351 if pushable:
1352 if self.series.index(patch) < self.seriesend():
1352 if self.series.index(patch) < self.seriesend():
1353 raise error.Abort(
1353 raise error.Abort(
1354 _("cannot push to a previous patch: %s") % patch)
1354 _("cannot push to a previous patch: %s") % patch)
1355 else:
1355 else:
1356 if reason:
1356 if reason:
1357 reason = _('guarded by %s') % reason
1357 reason = _('guarded by %s') % reason
1358 else:
1358 else:
1359 reason = _('no matching guards')
1359 reason = _('no matching guards')
1360 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1360 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1361 return 1
1361 return 1
1362 elif all:
1362 elif all:
1363 patch = self.series[-1]
1363 patch = self.series[-1]
1364 if self.isapplied(patch):
1364 if self.isapplied(patch):
1365 self.ui.warn(_('all patches are currently applied\n'))
1365 self.ui.warn(_('all patches are currently applied\n'))
1366 return 0
1366 return 0
1367
1367
1368 # Following the above example, starting at 'top' of B:
1368 # Following the above example, starting at 'top' of B:
1369 # qpush should be performed (pushes C), but a subsequent
1369 # qpush should be performed (pushes C), but a subsequent
1370 # qpush without an argument is an error (nothing to
1370 # qpush without an argument is an error (nothing to
1371 # apply). This allows a loop of "...while hg qpush..." to
1371 # apply). This allows a loop of "...while hg qpush..." to
1372 # work as it detects an error when done
1372 # work as it detects an error when done
1373 start = self.seriesend()
1373 start = self.seriesend()
1374 if start == len(self.series):
1374 if start == len(self.series):
1375 self.ui.warn(_('patch series already fully applied\n'))
1375 self.ui.warn(_('patch series already fully applied\n'))
1376 return 1
1376 return 1
1377 if not force and not keepchanges:
1377 if not force and not keepchanges:
1378 self.checklocalchanges(repo, refresh=self.applied)
1378 self.checklocalchanges(repo, refresh=self.applied)
1379
1379
1380 if exact:
1380 if exact:
1381 if keepchanges:
1381 if keepchanges:
1382 raise error.Abort(
1382 raise error.Abort(
1383 _("cannot use --exact and --keep-changes together"))
1383 _("cannot use --exact and --keep-changes together"))
1384 if move:
1384 if move:
1385 raise error.Abort(_('cannot use --exact and --move '
1385 raise error.Abort(_('cannot use --exact and --move '
1386 'together'))
1386 'together'))
1387 if self.applied:
1387 if self.applied:
1388 raise error.Abort(_('cannot push --exact with applied '
1388 raise error.Abort(_('cannot push --exact with applied '
1389 'patches'))
1389 'patches'))
1390 root = self.series[start]
1390 root = self.series[start]
1391 target = patchheader(self.join(root), self.plainmode).parent
1391 target = patchheader(self.join(root), self.plainmode).parent
1392 if not target:
1392 if not target:
1393 raise error.Abort(
1393 raise error.Abort(
1394 _("%s does not have a parent recorded") % root)
1394 _("%s does not have a parent recorded") % root)
1395 if not repo[target] == repo['.']:
1395 if not repo[target] == repo['.']:
1396 hg.update(repo, target)
1396 hg.update(repo, target)
1397
1397
1398 if move:
1398 if move:
1399 if not patch:
1399 if not patch:
1400 raise error.Abort(_("please specify the patch to move"))
1400 raise error.Abort(_("please specify the patch to move"))
1401 for fullstart, rpn in enumerate(self.fullseries):
1401 for fullstart, rpn in enumerate(self.fullseries):
1402 # strip markers for patch guards
1402 # strip markers for patch guards
1403 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1403 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1404 break
1404 break
1405 for i, rpn in enumerate(self.fullseries[fullstart:]):
1405 for i, rpn in enumerate(self.fullseries[fullstart:]):
1406 # strip markers for patch guards
1406 # strip markers for patch guards
1407 if self.guard_re.split(rpn, 1)[0] == patch:
1407 if self.guard_re.split(rpn, 1)[0] == patch:
1408 break
1408 break
1409 index = fullstart + i
1409 index = fullstart + i
1410 assert index < len(self.fullseries)
1410 assert index < len(self.fullseries)
1411 fullpatch = self.fullseries[index]
1411 fullpatch = self.fullseries[index]
1412 del self.fullseries[index]
1412 del self.fullseries[index]
1413 self.fullseries.insert(fullstart, fullpatch)
1413 self.fullseries.insert(fullstart, fullpatch)
1414 self.parseseries()
1414 self.parseseries()
1415 self.seriesdirty = True
1415 self.seriesdirty = True
1416
1416
1417 self.applieddirty = True
1417 self.applieddirty = True
1418 if start > 0:
1418 if start > 0:
1419 self.checktoppatch(repo)
1419 self.checktoppatch(repo)
1420 if not patch:
1420 if not patch:
1421 patch = self.series[start]
1421 patch = self.series[start]
1422 end = start + 1
1422 end = start + 1
1423 else:
1423 else:
1424 end = self.series.index(patch, start) + 1
1424 end = self.series.index(patch, start) + 1
1425
1425
1426 tobackup = set()
1426 tobackup = set()
1427 if (not nobackup and force) or keepchanges:
1427 if (not nobackup and force) or keepchanges:
1428 status = self.checklocalchanges(repo, force=True)
1428 status = self.checklocalchanges(repo, force=True)
1429 if keepchanges:
1429 if keepchanges:
1430 tobackup.update(status.modified + status.added +
1430 tobackup.update(status.modified + status.added +
1431 status.removed + status.deleted)
1431 status.removed + status.deleted)
1432 else:
1432 else:
1433 tobackup.update(status.modified + status.added)
1433 tobackup.update(status.modified + status.added)
1434
1434
1435 s = self.series[start:end]
1435 s = self.series[start:end]
1436 all_files = set()
1436 all_files = set()
1437 try:
1437 try:
1438 if mergeq:
1438 if mergeq:
1439 ret = self.mergepatch(repo, mergeq, s, diffopts)
1439 ret = self.mergepatch(repo, mergeq, s, diffopts)
1440 else:
1440 else:
1441 ret = self.apply(repo, s, list, all_files=all_files,
1441 ret = self.apply(repo, s, list, all_files=all_files,
1442 tobackup=tobackup, keepchanges=keepchanges)
1442 tobackup=tobackup, keepchanges=keepchanges)
1443 except AbortNoCleanup:
1443 except AbortNoCleanup:
1444 raise
1444 raise
1445 except: # re-raises
1445 except: # re-raises
1446 self.ui.warn(_('cleaning up working directory...\n'))
1446 self.ui.warn(_('cleaning up working directory...\n'))
1447 cmdutil.revert(self.ui, repo, repo['.'],
1447 cmdutil.revert(self.ui, repo, repo['.'],
1448 repo.dirstate.parents(), no_backup=True)
1448 repo.dirstate.parents(), no_backup=True)
1449 # only remove unknown files that we know we touched or
1449 # only remove unknown files that we know we touched or
1450 # created while patching
1450 # created while patching
1451 for f in all_files:
1451 for f in all_files:
1452 if f not in repo.dirstate:
1452 if f not in repo.dirstate:
1453 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1453 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1454 self.ui.warn(_('done\n'))
1454 self.ui.warn(_('done\n'))
1455 raise
1455 raise
1456
1456
1457 if not self.applied:
1457 if not self.applied:
1458 return ret[0]
1458 return ret[0]
1459 top = self.applied[-1].name
1459 top = self.applied[-1].name
1460 if ret[0] and ret[0] > 1:
1460 if ret[0] and ret[0] > 1:
1461 msg = _("errors during apply, please fix and qrefresh %s\n")
1461 msg = _("errors during apply, please fix and qrefresh %s\n")
1462 self.ui.write(msg % top)
1462 self.ui.write(msg % top)
1463 else:
1463 else:
1464 self.ui.write(_("now at: %s\n") % top)
1464 self.ui.write(_("now at: %s\n") % top)
1465 return ret[0]
1465 return ret[0]
1466
1466
1467 finally:
1467 finally:
1468 wlock.release()
1468 wlock.release()
1469
1469
1470 def pop(self, repo, patch=None, force=False, update=True, all=False,
1470 def pop(self, repo, patch=None, force=False, update=True, all=False,
1471 nobackup=False, keepchanges=False):
1471 nobackup=False, keepchanges=False):
1472 self.checkkeepchanges(keepchanges, force)
1472 self.checkkeepchanges(keepchanges, force)
1473 wlock = repo.wlock()
1473 wlock = repo.wlock()
1474 try:
1474 try:
1475 if patch:
1475 if patch:
1476 # index, rev, patch
1476 # index, rev, patch
1477 info = self.isapplied(patch)
1477 info = self.isapplied(patch)
1478 if not info:
1478 if not info:
1479 patch = self.lookup(patch)
1479 patch = self.lookup(patch)
1480 info = self.isapplied(patch)
1480 info = self.isapplied(patch)
1481 if not info:
1481 if not info:
1482 raise error.Abort(_("patch %s is not applied") % patch)
1482 raise error.Abort(_("patch %s is not applied") % patch)
1483
1483
1484 if not self.applied:
1484 if not self.applied:
1485 # Allow qpop -a to work repeatedly,
1485 # Allow qpop -a to work repeatedly,
1486 # but not qpop without an argument
1486 # but not qpop without an argument
1487 self.ui.warn(_("no patches applied\n"))
1487 self.ui.warn(_("no patches applied\n"))
1488 return not all
1488 return not all
1489
1489
1490 if all:
1490 if all:
1491 start = 0
1491 start = 0
1492 elif patch:
1492 elif patch:
1493 start = info[0] + 1
1493 start = info[0] + 1
1494 else:
1494 else:
1495 start = len(self.applied) - 1
1495 start = len(self.applied) - 1
1496
1496
1497 if start >= len(self.applied):
1497 if start >= len(self.applied):
1498 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1498 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1499 return
1499 return
1500
1500
1501 if not update:
1501 if not update:
1502 parents = repo.dirstate.parents()
1502 parents = repo.dirstate.parents()
1503 rr = [x.node for x in self.applied]
1503 rr = [x.node for x in self.applied]
1504 for p in parents:
1504 for p in parents:
1505 if p in rr:
1505 if p in rr:
1506 self.ui.warn(_("qpop: forcing dirstate update\n"))
1506 self.ui.warn(_("qpop: forcing dirstate update\n"))
1507 update = True
1507 update = True
1508 else:
1508 else:
1509 parents = [p.node() for p in repo[None].parents()]
1509 parents = [p.node() for p in repo[None].parents()]
1510 needupdate = False
1510 needupdate = False
1511 for entry in self.applied[start:]:
1511 for entry in self.applied[start:]:
1512 if entry.node in parents:
1512 if entry.node in parents:
1513 needupdate = True
1513 needupdate = True
1514 break
1514 break
1515 update = needupdate
1515 update = needupdate
1516
1516
1517 tobackup = set()
1517 tobackup = set()
1518 if update:
1518 if update:
1519 s = self.checklocalchanges(repo, force=force or keepchanges)
1519 s = self.checklocalchanges(repo, force=force or keepchanges)
1520 if force:
1520 if force:
1521 if not nobackup:
1521 if not nobackup:
1522 tobackup.update(s.modified + s.added)
1522 tobackup.update(s.modified + s.added)
1523 elif keepchanges:
1523 elif keepchanges:
1524 tobackup.update(s.modified + s.added +
1524 tobackup.update(s.modified + s.added +
1525 s.removed + s.deleted)
1525 s.removed + s.deleted)
1526
1526
1527 self.applieddirty = True
1527 self.applieddirty = True
1528 end = len(self.applied)
1528 end = len(self.applied)
1529 rev = self.applied[start].node
1529 rev = self.applied[start].node
1530
1530
1531 try:
1531 try:
1532 heads = repo.changelog.heads(rev)
1532 heads = repo.changelog.heads(rev)
1533 except error.LookupError:
1533 except error.LookupError:
1534 node = short(rev)
1534 node = short(rev)
1535 raise error.Abort(_('trying to pop unknown node %s') % node)
1535 raise error.Abort(_('trying to pop unknown node %s') % node)
1536
1536
1537 if heads != [self.applied[-1].node]:
1537 if heads != [self.applied[-1].node]:
1538 raise error.Abort(_("popping would remove a revision not "
1538 raise error.Abort(_("popping would remove a revision not "
1539 "managed by this patch queue"))
1539 "managed by this patch queue"))
1540 if not repo[self.applied[-1].node].mutable():
1540 if not repo[self.applied[-1].node].mutable():
1541 raise error.Abort(
1541 raise error.Abort(
1542 _("popping would remove a public revision"),
1542 _("popping would remove a public revision"),
1543 hint=_('see "hg help phases" for details'))
1543 hint=_('see "hg help phases" for details'))
1544
1544
1545 # we know there are no local changes, so we can make a simplified
1545 # we know there are no local changes, so we can make a simplified
1546 # form of hg.update.
1546 # form of hg.update.
1547 if update:
1547 if update:
1548 qp = self.qparents(repo, rev)
1548 qp = self.qparents(repo, rev)
1549 ctx = repo[qp]
1549 ctx = repo[qp]
1550 m, a, r, d = repo.status(qp, '.')[:4]
1550 m, a, r, d = repo.status(qp, '.')[:4]
1551 if d:
1551 if d:
1552 raise error.Abort(_("deletions found between repo revs"))
1552 raise error.Abort(_("deletions found between repo revs"))
1553
1553
1554 tobackup = set(a + m + r) & tobackup
1554 tobackup = set(a + m + r) & tobackup
1555 if keepchanges and tobackup:
1555 if keepchanges and tobackup:
1556 raise error.Abort(_("local changes found, qrefresh first"))
1556 raise error.Abort(_("local changes found, qrefresh first"))
1557 self.backup(repo, tobackup)
1557 self.backup(repo, tobackup)
1558 repo.dirstate.beginparentchange()
1558 repo.dirstate.beginparentchange()
1559 for f in a:
1559 for f in a:
1560 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1560 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1561 repo.dirstate.drop(f)
1561 repo.dirstate.drop(f)
1562 for f in m + r:
1562 for f in m + r:
1563 fctx = ctx[f]
1563 fctx = ctx[f]
1564 repo.wwrite(f, fctx.data(), fctx.flags())
1564 repo.wwrite(f, fctx.data(), fctx.flags())
1565 repo.dirstate.normal(f)
1565 repo.dirstate.normal(f)
1566 repo.setparents(qp, nullid)
1566 repo.setparents(qp, nullid)
1567 repo.dirstate.endparentchange()
1567 repo.dirstate.endparentchange()
1568 for patch in reversed(self.applied[start:end]):
1568 for patch in reversed(self.applied[start:end]):
1569 self.ui.status(_("popping %s\n") % patch.name)
1569 self.ui.status(_("popping %s\n") % patch.name)
1570 del self.applied[start:end]
1570 del self.applied[start:end]
1571 strip(self.ui, repo, [rev], update=False, backup=False)
1571 strip(self.ui, repo, [rev], update=False, backup=False)
1572 for s, state in repo['.'].substate.items():
1572 for s, state in repo['.'].substate.items():
1573 repo['.'].sub(s).get(state)
1573 repo['.'].sub(s).get(state)
1574 if self.applied:
1574 if self.applied:
1575 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1575 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1576 else:
1576 else:
1577 self.ui.write(_("patch queue now empty\n"))
1577 self.ui.write(_("patch queue now empty\n"))
1578 finally:
1578 finally:
1579 wlock.release()
1579 wlock.release()
1580
1580
1581 def diff(self, repo, pats, opts):
1581 def diff(self, repo, pats, opts):
1582 top, patch = self.checktoppatch(repo)
1582 top, patch = self.checktoppatch(repo)
1583 if not top:
1583 if not top:
1584 self.ui.write(_("no patches applied\n"))
1584 self.ui.write(_("no patches applied\n"))
1585 return
1585 return
1586 qp = self.qparents(repo, top)
1586 qp = self.qparents(repo, top)
1587 if opts.get('reverse'):
1587 if opts.get('reverse'):
1588 node1, node2 = None, qp
1588 node1, node2 = None, qp
1589 else:
1589 else:
1590 node1, node2 = qp, None
1590 node1, node2 = qp, None
1591 diffopts = self.diffopts(opts, patch)
1591 diffopts = self.diffopts(opts, patch)
1592 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1592 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1593
1593
1594 def refresh(self, repo, pats=None, **opts):
1594 def refresh(self, repo, pats=None, **opts):
1595 if not self.applied:
1595 if not self.applied:
1596 self.ui.write(_("no patches applied\n"))
1596 self.ui.write(_("no patches applied\n"))
1597 return 1
1597 return 1
1598 msg = opts.get('msg', '').rstrip()
1598 msg = opts.get('msg', '').rstrip()
1599 edit = opts.get('edit')
1599 edit = opts.get('edit')
1600 editform = opts.get('editform', 'mq.qrefresh')
1600 editform = opts.get('editform', 'mq.qrefresh')
1601 newuser = opts.get('user')
1601 newuser = opts.get('user')
1602 newdate = opts.get('date')
1602 newdate = opts.get('date')
1603 if newdate:
1603 if newdate:
1604 newdate = '%d %d' % util.parsedate(newdate)
1604 newdate = '%d %d' % util.parsedate(newdate)
1605 wlock = repo.wlock()
1605 wlock = repo.wlock()
1606
1606
1607 try:
1607 try:
1608 self.checktoppatch(repo)
1608 self.checktoppatch(repo)
1609 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1609 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1610 if repo.changelog.heads(top) != [top]:
1610 if repo.changelog.heads(top) != [top]:
1611 raise error.Abort(_("cannot qrefresh a revision with children"))
1611 raise error.Abort(_("cannot qrefresh a revision with children"))
1612 if not repo[top].mutable():
1612 if not repo[top].mutable():
1613 raise error.Abort(_("cannot qrefresh public revision"),
1613 raise error.Abort(_("cannot qrefresh public revision"),
1614 hint=_('see "hg help phases" for details'))
1614 hint=_('see "hg help phases" for details'))
1615
1615
1616 cparents = repo.changelog.parents(top)
1616 cparents = repo.changelog.parents(top)
1617 patchparent = self.qparents(repo, top)
1617 patchparent = self.qparents(repo, top)
1618
1618
1619 inclsubs = checksubstate(repo, hex(patchparent))
1619 inclsubs = checksubstate(repo, hex(patchparent))
1620 if inclsubs:
1620 if inclsubs:
1621 substatestate = repo.dirstate['.hgsubstate']
1621 substatestate = repo.dirstate['.hgsubstate']
1622
1622
1623 ph = patchheader(self.join(patchfn), self.plainmode)
1623 ph = patchheader(self.join(patchfn), self.plainmode)
1624 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1624 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1625 if newuser:
1625 if newuser:
1626 ph.setuser(newuser)
1626 ph.setuser(newuser)
1627 if newdate:
1627 if newdate:
1628 ph.setdate(newdate)
1628 ph.setdate(newdate)
1629 ph.setparent(hex(patchparent))
1629 ph.setparent(hex(patchparent))
1630
1630
1631 # only commit new patch when write is complete
1631 # only commit new patch when write is complete
1632 patchf = self.opener(patchfn, 'w', atomictemp=True)
1632 patchf = self.opener(patchfn, 'w', atomictemp=True)
1633
1633
1634 # update the dirstate in place, strip off the qtip commit
1634 # update the dirstate in place, strip off the qtip commit
1635 # and then commit.
1635 # and then commit.
1636 #
1636 #
1637 # this should really read:
1637 # this should really read:
1638 # mm, dd, aa = repo.status(top, patchparent)[:3]
1638 # mm, dd, aa = repo.status(top, patchparent)[:3]
1639 # but we do it backwards to take advantage of manifest/changelog
1639 # but we do it backwards to take advantage of manifest/changelog
1640 # caching against the next repo.status call
1640 # caching against the next repo.status call
1641 mm, aa, dd = repo.status(patchparent, top)[:3]
1641 mm, aa, dd = repo.status(patchparent, top)[:3]
1642 changes = repo.changelog.read(top)
1642 changes = repo.changelog.read(top)
1643 man = repo.manifest.read(changes[0])
1643 man = repo.manifest.read(changes[0])
1644 aaa = aa[:]
1644 aaa = aa[:]
1645 matchfn = scmutil.match(repo[None], pats, opts)
1645 matchfn = scmutil.match(repo[None], pats, opts)
1646 # in short mode, we only diff the files included in the
1646 # in short mode, we only diff the files included in the
1647 # patch already plus specified files
1647 # patch already plus specified files
1648 if opts.get('short'):
1648 if opts.get('short'):
1649 # if amending a patch, we start with existing
1649 # if amending a patch, we start with existing
1650 # files plus specified files - unfiltered
1650 # files plus specified files - unfiltered
1651 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1651 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1652 # filter with include/exclude options
1652 # filter with include/exclude options
1653 matchfn = scmutil.match(repo[None], opts=opts)
1653 matchfn = scmutil.match(repo[None], opts=opts)
1654 else:
1654 else:
1655 match = scmutil.matchall(repo)
1655 match = scmutil.matchall(repo)
1656 m, a, r, d = repo.status(match=match)[:4]
1656 m, a, r, d = repo.status(match=match)[:4]
1657 mm = set(mm)
1657 mm = set(mm)
1658 aa = set(aa)
1658 aa = set(aa)
1659 dd = set(dd)
1659 dd = set(dd)
1660
1660
1661 # we might end up with files that were added between
1661 # we might end up with files that were added between
1662 # qtip and the dirstate parent, but then changed in the
1662 # qtip and the dirstate parent, but then changed in the
1663 # local dirstate. in this case, we want them to only
1663 # local dirstate. in this case, we want them to only
1664 # show up in the added section
1664 # show up in the added section
1665 for x in m:
1665 for x in m:
1666 if x not in aa:
1666 if x not in aa:
1667 mm.add(x)
1667 mm.add(x)
1668 # we might end up with files added by the local dirstate that
1668 # we might end up with files added by the local dirstate that
1669 # were deleted by the patch. In this case, they should only
1669 # were deleted by the patch. In this case, they should only
1670 # show up in the changed section.
1670 # show up in the changed section.
1671 for x in a:
1671 for x in a:
1672 if x in dd:
1672 if x in dd:
1673 dd.remove(x)
1673 dd.remove(x)
1674 mm.add(x)
1674 mm.add(x)
1675 else:
1675 else:
1676 aa.add(x)
1676 aa.add(x)
1677 # make sure any files deleted in the local dirstate
1677 # make sure any files deleted in the local dirstate
1678 # are not in the add or change column of the patch
1678 # are not in the add or change column of the patch
1679 forget = []
1679 forget = []
1680 for x in d + r:
1680 for x in d + r:
1681 if x in aa:
1681 if x in aa:
1682 aa.remove(x)
1682 aa.remove(x)
1683 forget.append(x)
1683 forget.append(x)
1684 continue
1684 continue
1685 else:
1685 else:
1686 mm.discard(x)
1686 mm.discard(x)
1687 dd.add(x)
1687 dd.add(x)
1688
1688
1689 m = list(mm)
1689 m = list(mm)
1690 r = list(dd)
1690 r = list(dd)
1691 a = list(aa)
1691 a = list(aa)
1692
1692
1693 # create 'match' that includes the files to be recommitted.
1693 # create 'match' that includes the files to be recommitted.
1694 # apply matchfn via repo.status to ensure correct case handling.
1694 # apply matchfn via repo.status to ensure correct case handling.
1695 cm, ca, cr, cd = repo.status(patchparent, match=matchfn)[:4]
1695 cm, ca, cr, cd = repo.status(patchparent, match=matchfn)[:4]
1696 allmatches = set(cm + ca + cr + cd)
1696 allmatches = set(cm + ca + cr + cd)
1697 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1697 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1698
1698
1699 files = set(inclsubs)
1699 files = set(inclsubs)
1700 for x in refreshchanges:
1700 for x in refreshchanges:
1701 files.update(x)
1701 files.update(x)
1702 match = scmutil.matchfiles(repo, files)
1702 match = scmutil.matchfiles(repo, files)
1703
1703
1704 bmlist = repo[top].bookmarks()
1704 bmlist = repo[top].bookmarks()
1705
1705
1706 dsguard = None
1706 dsguard = None
1707 try:
1707 try:
1708 dsguard = cmdutil.dirstateguard(repo, 'mq.refresh')
1708 dsguard = cmdutil.dirstateguard(repo, 'mq.refresh')
1709 if diffopts.git or diffopts.upgrade:
1709 if diffopts.git or diffopts.upgrade:
1710 copies = {}
1710 copies = {}
1711 for dst in a:
1711 for dst in a:
1712 src = repo.dirstate.copied(dst)
1712 src = repo.dirstate.copied(dst)
1713 # during qfold, the source file for copies may
1713 # during qfold, the source file for copies may
1714 # be removed. Treat this as a simple add.
1714 # be removed. Treat this as a simple add.
1715 if src is not None and src in repo.dirstate:
1715 if src is not None and src in repo.dirstate:
1716 copies.setdefault(src, []).append(dst)
1716 copies.setdefault(src, []).append(dst)
1717 repo.dirstate.add(dst)
1717 repo.dirstate.add(dst)
1718 # remember the copies between patchparent and qtip
1718 # remember the copies between patchparent and qtip
1719 for dst in aaa:
1719 for dst in aaa:
1720 f = repo.file(dst)
1720 f = repo.file(dst)
1721 src = f.renamed(man[dst])
1721 src = f.renamed(man[dst])
1722 if src:
1722 if src:
1723 copies.setdefault(src[0], []).extend(
1723 copies.setdefault(src[0], []).extend(
1724 copies.get(dst, []))
1724 copies.get(dst, []))
1725 if dst in a:
1725 if dst in a:
1726 copies[src[0]].append(dst)
1726 copies[src[0]].append(dst)
1727 # we can't copy a file created by the patch itself
1727 # we can't copy a file created by the patch itself
1728 if dst in copies:
1728 if dst in copies:
1729 del copies[dst]
1729 del copies[dst]
1730 for src, dsts in copies.iteritems():
1730 for src, dsts in copies.iteritems():
1731 for dst in dsts:
1731 for dst in dsts:
1732 repo.dirstate.copy(src, dst)
1732 repo.dirstate.copy(src, dst)
1733 else:
1733 else:
1734 for dst in a:
1734 for dst in a:
1735 repo.dirstate.add(dst)
1735 repo.dirstate.add(dst)
1736 # Drop useless copy information
1736 # Drop useless copy information
1737 for f in list(repo.dirstate.copies()):
1737 for f in list(repo.dirstate.copies()):
1738 repo.dirstate.copy(None, f)
1738 repo.dirstate.copy(None, f)
1739 for f in r:
1739 for f in r:
1740 repo.dirstate.remove(f)
1740 repo.dirstate.remove(f)
1741 # if the patch excludes a modified file, mark that
1741 # if the patch excludes a modified file, mark that
1742 # file with mtime=0 so status can see it.
1742 # file with mtime=0 so status can see it.
1743 mm = []
1743 mm = []
1744 for i in xrange(len(m) - 1, -1, -1):
1744 for i in xrange(len(m) - 1, -1, -1):
1745 if not matchfn(m[i]):
1745 if not matchfn(m[i]):
1746 mm.append(m[i])
1746 mm.append(m[i])
1747 del m[i]
1747 del m[i]
1748 for f in m:
1748 for f in m:
1749 repo.dirstate.normal(f)
1749 repo.dirstate.normal(f)
1750 for f in mm:
1750 for f in mm:
1751 repo.dirstate.normallookup(f)
1751 repo.dirstate.normallookup(f)
1752 for f in forget:
1752 for f in forget:
1753 repo.dirstate.drop(f)
1753 repo.dirstate.drop(f)
1754
1754
1755 user = ph.user or changes[1]
1755 user = ph.user or changes[1]
1756
1756
1757 oldphase = repo[top].phase()
1757 oldphase = repo[top].phase()
1758
1758
1759 # assumes strip can roll itself back if interrupted
1759 # assumes strip can roll itself back if interrupted
1760 repo.setparents(*cparents)
1760 repo.setparents(*cparents)
1761 self.applied.pop()
1761 self.applied.pop()
1762 self.applieddirty = True
1762 self.applieddirty = True
1763 strip(self.ui, repo, [top], update=False, backup=False)
1763 strip(self.ui, repo, [top], update=False, backup=False)
1764 dsguard.close()
1764 dsguard.close()
1765 finally:
1765 finally:
1766 release(dsguard)
1766 release(dsguard)
1767
1767
1768 try:
1768 try:
1769 # might be nice to attempt to roll back strip after this
1769 # might be nice to attempt to roll back strip after this
1770
1770
1771 defaultmsg = "[mq]: %s" % patchfn
1771 defaultmsg = "[mq]: %s" % patchfn
1772 editor = cmdutil.getcommiteditor(editform=editform)
1772 editor = cmdutil.getcommiteditor(editform=editform)
1773 if edit:
1773 if edit:
1774 def finishdesc(desc):
1774 def finishdesc(desc):
1775 if desc.rstrip():
1775 if desc.rstrip():
1776 ph.setmessage(desc)
1776 ph.setmessage(desc)
1777 return desc
1777 return desc
1778 return defaultmsg
1778 return defaultmsg
1779 # i18n: this message is shown in editor with "HG: " prefix
1779 # i18n: this message is shown in editor with "HG: " prefix
1780 extramsg = _('Leave message empty to use default message.')
1780 extramsg = _('Leave message empty to use default message.')
1781 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1781 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1782 extramsg=extramsg,
1782 extramsg=extramsg,
1783 editform=editform)
1783 editform=editform)
1784 message = msg or "\n".join(ph.message)
1784 message = msg or "\n".join(ph.message)
1785 elif not msg:
1785 elif not msg:
1786 if not ph.message:
1786 if not ph.message:
1787 message = defaultmsg
1787 message = defaultmsg
1788 else:
1788 else:
1789 message = "\n".join(ph.message)
1789 message = "\n".join(ph.message)
1790 else:
1790 else:
1791 message = msg
1791 message = msg
1792 ph.setmessage(msg)
1792 ph.setmessage(msg)
1793
1793
1794 # Ensure we create a new changeset in the same phase than
1794 # Ensure we create a new changeset in the same phase than
1795 # the old one.
1795 # the old one.
1796 lock = tr = None
1796 lock = tr = None
1797 try:
1797 try:
1798 lock = repo.lock()
1798 lock = repo.lock()
1799 tr = repo.transaction('mq')
1799 tr = repo.transaction('mq')
1800 n = newcommit(repo, oldphase, message, user, ph.date,
1800 n = newcommit(repo, oldphase, message, user, ph.date,
1801 match=match, force=True, editor=editor)
1801 match=match, force=True, editor=editor)
1802 # only write patch after a successful commit
1802 # only write patch after a successful commit
1803 c = [list(x) for x in refreshchanges]
1803 c = [list(x) for x in refreshchanges]
1804 if inclsubs:
1804 if inclsubs:
1805 self.putsubstate2changes(substatestate, c)
1805 self.putsubstate2changes(substatestate, c)
1806 chunks = patchmod.diff(repo, patchparent,
1806 chunks = patchmod.diff(repo, patchparent,
1807 changes=c, opts=diffopts)
1807 changes=c, opts=diffopts)
1808 comments = str(ph)
1808 comments = str(ph)
1809 if comments:
1809 if comments:
1810 patchf.write(comments)
1810 patchf.write(comments)
1811 for chunk in chunks:
1811 for chunk in chunks:
1812 patchf.write(chunk)
1812 patchf.write(chunk)
1813 patchf.close()
1813 patchf.close()
1814
1814
1815 marks = repo._bookmarks
1815 marks = repo._bookmarks
1816 for bm in bmlist:
1816 for bm in bmlist:
1817 marks[bm] = n
1817 marks[bm] = n
1818 marks.recordchange(tr)
1818 marks.recordchange(tr)
1819 tr.close()
1819 tr.close()
1820
1820
1821 self.applied.append(statusentry(n, patchfn))
1821 self.applied.append(statusentry(n, patchfn))
1822 finally:
1822 finally:
1823 lockmod.release(lock, tr)
1823 lockmod.release(lock, tr)
1824 except: # re-raises
1824 except: # re-raises
1825 ctx = repo[cparents[0]]
1825 ctx = repo[cparents[0]]
1826 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1826 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1827 self.savedirty()
1827 self.savedirty()
1828 self.ui.warn(_('qrefresh interrupted while patch was popped! '
1828 self.ui.warn(_('qrefresh interrupted while patch was popped! '
1829 '(revert --all, qpush to recover)\n'))
1829 '(revert --all, qpush to recover)\n'))
1830 raise
1830 raise
1831 finally:
1831 finally:
1832 wlock.release()
1832 wlock.release()
1833 self.removeundo(repo)
1833 self.removeundo(repo)
1834
1834
1835 def init(self, repo, create=False):
1835 def init(self, repo, create=False):
1836 if not create and os.path.isdir(self.path):
1836 if not create and os.path.isdir(self.path):
1837 raise error.Abort(_("patch queue directory already exists"))
1837 raise error.Abort(_("patch queue directory already exists"))
1838 try:
1838 try:
1839 os.mkdir(self.path)
1839 os.mkdir(self.path)
1840 except OSError as inst:
1840 except OSError as inst:
1841 if inst.errno != errno.EEXIST or not create:
1841 if inst.errno != errno.EEXIST or not create:
1842 raise
1842 raise
1843 if create:
1843 if create:
1844 return self.qrepo(create=True)
1844 return self.qrepo(create=True)
1845
1845
1846 def unapplied(self, repo, patch=None):
1846 def unapplied(self, repo, patch=None):
1847 if patch and patch not in self.series:
1847 if patch and patch not in self.series:
1848 raise error.Abort(_("patch %s is not in series file") % patch)
1848 raise error.Abort(_("patch %s is not in series file") % patch)
1849 if not patch:
1849 if not patch:
1850 start = self.seriesend()
1850 start = self.seriesend()
1851 else:
1851 else:
1852 start = self.series.index(patch) + 1
1852 start = self.series.index(patch) + 1
1853 unapplied = []
1853 unapplied = []
1854 for i in xrange(start, len(self.series)):
1854 for i in xrange(start, len(self.series)):
1855 pushable, reason = self.pushable(i)
1855 pushable, reason = self.pushable(i)
1856 if pushable:
1856 if pushable:
1857 unapplied.append((i, self.series[i]))
1857 unapplied.append((i, self.series[i]))
1858 self.explainpushable(i)
1858 self.explainpushable(i)
1859 return unapplied
1859 return unapplied
1860
1860
1861 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1861 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1862 summary=False):
1862 summary=False):
1863 def displayname(pfx, patchname, state):
1863 def displayname(pfx, patchname, state):
1864 if pfx:
1864 if pfx:
1865 self.ui.write(pfx)
1865 self.ui.write(pfx)
1866 if summary:
1866 if summary:
1867 ph = patchheader(self.join(patchname), self.plainmode)
1867 ph = patchheader(self.join(patchname), self.plainmode)
1868 if ph.message:
1868 if ph.message:
1869 msg = ph.message[0]
1869 msg = ph.message[0]
1870 else:
1870 else:
1871 msg = ''
1871 msg = ''
1872
1872
1873 if self.ui.formatted():
1873 if self.ui.formatted():
1874 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1874 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1875 if width > 0:
1875 if width > 0:
1876 msg = util.ellipsis(msg, width)
1876 msg = util.ellipsis(msg, width)
1877 else:
1877 else:
1878 msg = ''
1878 msg = ''
1879 self.ui.write(patchname, label='qseries.' + state)
1879 self.ui.write(patchname, label='qseries.' + state)
1880 self.ui.write(': ')
1880 self.ui.write(': ')
1881 self.ui.write(msg, label='qseries.message.' + state)
1881 self.ui.write(msg, label='qseries.message.' + state)
1882 else:
1882 else:
1883 self.ui.write(patchname, label='qseries.' + state)
1883 self.ui.write(patchname, label='qseries.' + state)
1884 self.ui.write('\n')
1884 self.ui.write('\n')
1885
1885
1886 applied = set([p.name for p in self.applied])
1886 applied = set([p.name for p in self.applied])
1887 if length is None:
1887 if length is None:
1888 length = len(self.series) - start
1888 length = len(self.series) - start
1889 if not missing:
1889 if not missing:
1890 if self.ui.verbose:
1890 if self.ui.verbose:
1891 idxwidth = len(str(start + length - 1))
1891 idxwidth = len(str(start + length - 1))
1892 for i in xrange(start, start + length):
1892 for i in xrange(start, start + length):
1893 patch = self.series[i]
1893 patch = self.series[i]
1894 if patch in applied:
1894 if patch in applied:
1895 char, state = 'A', 'applied'
1895 char, state = 'A', 'applied'
1896 elif self.pushable(i)[0]:
1896 elif self.pushable(i)[0]:
1897 char, state = 'U', 'unapplied'
1897 char, state = 'U', 'unapplied'
1898 else:
1898 else:
1899 char, state = 'G', 'guarded'
1899 char, state = 'G', 'guarded'
1900 pfx = ''
1900 pfx = ''
1901 if self.ui.verbose:
1901 if self.ui.verbose:
1902 pfx = '%*d %s ' % (idxwidth, i, char)
1902 pfx = '%*d %s ' % (idxwidth, i, char)
1903 elif status and status != char:
1903 elif status and status != char:
1904 continue
1904 continue
1905 displayname(pfx, patch, state)
1905 displayname(pfx, patch, state)
1906 else:
1906 else:
1907 msng_list = []
1907 msng_list = []
1908 for root, dirs, files in os.walk(self.path):
1908 for root, dirs, files in os.walk(self.path):
1909 d = root[len(self.path) + 1:]
1909 d = root[len(self.path) + 1:]
1910 for f in files:
1910 for f in files:
1911 fl = os.path.join(d, f)
1911 fl = os.path.join(d, f)
1912 if (fl not in self.series and
1912 if (fl not in self.series and
1913 fl not in (self.statuspath, self.seriespath,
1913 fl not in (self.statuspath, self.seriespath,
1914 self.guardspath)
1914 self.guardspath)
1915 and not fl.startswith('.')):
1915 and not fl.startswith('.')):
1916 msng_list.append(fl)
1916 msng_list.append(fl)
1917 for x in sorted(msng_list):
1917 for x in sorted(msng_list):
1918 pfx = self.ui.verbose and ('D ') or ''
1918 pfx = self.ui.verbose and ('D ') or ''
1919 displayname(pfx, x, 'missing')
1919 displayname(pfx, x, 'missing')
1920
1920
1921 def issaveline(self, l):
1921 def issaveline(self, l):
1922 if l.name == '.hg.patches.save.line':
1922 if l.name == '.hg.patches.save.line':
1923 return True
1923 return True
1924
1924
1925 def qrepo(self, create=False):
1925 def qrepo(self, create=False):
1926 ui = self.baseui.copy()
1926 ui = self.baseui.copy()
1927 if create or os.path.isdir(self.join(".hg")):
1927 if create or os.path.isdir(self.join(".hg")):
1928 return hg.repository(ui, path=self.path, create=create)
1928 return hg.repository(ui, path=self.path, create=create)
1929
1929
1930 def restore(self, repo, rev, delete=None, qupdate=None):
1930 def restore(self, repo, rev, delete=None, qupdate=None):
1931 desc = repo[rev].description().strip()
1931 desc = repo[rev].description().strip()
1932 lines = desc.splitlines()
1932 lines = desc.splitlines()
1933 i = 0
1933 i = 0
1934 datastart = None
1934 datastart = None
1935 series = []
1935 series = []
1936 applied = []
1936 applied = []
1937 qpp = None
1937 qpp = None
1938 for i, line in enumerate(lines):
1938 for i, line in enumerate(lines):
1939 if line == 'Patch Data:':
1939 if line == 'Patch Data:':
1940 datastart = i + 1
1940 datastart = i + 1
1941 elif line.startswith('Dirstate:'):
1941 elif line.startswith('Dirstate:'):
1942 l = line.rstrip()
1942 l = line.rstrip()
1943 l = l[10:].split(' ')
1943 l = l[10:].split(' ')
1944 qpp = [bin(x) for x in l]
1944 qpp = [bin(x) for x in l]
1945 elif datastart is not None:
1945 elif datastart is not None:
1946 l = line.rstrip()
1946 l = line.rstrip()
1947 n, name = l.split(':', 1)
1947 n, name = l.split(':', 1)
1948 if n:
1948 if n:
1949 applied.append(statusentry(bin(n), name))
1949 applied.append(statusentry(bin(n), name))
1950 else:
1950 else:
1951 series.append(l)
1951 series.append(l)
1952 if datastart is None:
1952 if datastart is None:
1953 self.ui.warn(_("no saved patch data found\n"))
1953 self.ui.warn(_("no saved patch data found\n"))
1954 return 1
1954 return 1
1955 self.ui.warn(_("restoring status: %s\n") % lines[0])
1955 self.ui.warn(_("restoring status: %s\n") % lines[0])
1956 self.fullseries = series
1956 self.fullseries = series
1957 self.applied = applied
1957 self.applied = applied
1958 self.parseseries()
1958 self.parseseries()
1959 self.seriesdirty = True
1959 self.seriesdirty = True
1960 self.applieddirty = True
1960 self.applieddirty = True
1961 heads = repo.changelog.heads()
1961 heads = repo.changelog.heads()
1962 if delete:
1962 if delete:
1963 if rev not in heads:
1963 if rev not in heads:
1964 self.ui.warn(_("save entry has children, leaving it alone\n"))
1964 self.ui.warn(_("save entry has children, leaving it alone\n"))
1965 else:
1965 else:
1966 self.ui.warn(_("removing save entry %s\n") % short(rev))
1966 self.ui.warn(_("removing save entry %s\n") % short(rev))
1967 pp = repo.dirstate.parents()
1967 pp = repo.dirstate.parents()
1968 if rev in pp:
1968 if rev in pp:
1969 update = True
1969 update = True
1970 else:
1970 else:
1971 update = False
1971 update = False
1972 strip(self.ui, repo, [rev], update=update, backup=False)
1972 strip(self.ui, repo, [rev], update=update, backup=False)
1973 if qpp:
1973 if qpp:
1974 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1974 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1975 (short(qpp[0]), short(qpp[1])))
1975 (short(qpp[0]), short(qpp[1])))
1976 if qupdate:
1976 if qupdate:
1977 self.ui.status(_("updating queue directory\n"))
1977 self.ui.status(_("updating queue directory\n"))
1978 r = self.qrepo()
1978 r = self.qrepo()
1979 if not r:
1979 if not r:
1980 self.ui.warn(_("unable to load queue repository\n"))
1980 self.ui.warn(_("unable to load queue repository\n"))
1981 return 1
1981 return 1
1982 hg.clean(r, qpp[0])
1982 hg.clean(r, qpp[0])
1983
1983
1984 def save(self, repo, msg=None):
1984 def save(self, repo, msg=None):
1985 if not self.applied:
1985 if not self.applied:
1986 self.ui.warn(_("save: no patches applied, exiting\n"))
1986 self.ui.warn(_("save: no patches applied, exiting\n"))
1987 return 1
1987 return 1
1988 if self.issaveline(self.applied[-1]):
1988 if self.issaveline(self.applied[-1]):
1989 self.ui.warn(_("status is already saved\n"))
1989 self.ui.warn(_("status is already saved\n"))
1990 return 1
1990 return 1
1991
1991
1992 if not msg:
1992 if not msg:
1993 msg = _("hg patches saved state")
1993 msg = _("hg patches saved state")
1994 else:
1994 else:
1995 msg = "hg patches: " + msg.rstrip('\r\n')
1995 msg = "hg patches: " + msg.rstrip('\r\n')
1996 r = self.qrepo()
1996 r = self.qrepo()
1997 if r:
1997 if r:
1998 pp = r.dirstate.parents()
1998 pp = r.dirstate.parents()
1999 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1999 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
2000 msg += "\n\nPatch Data:\n"
2000 msg += "\n\nPatch Data:\n"
2001 msg += ''.join('%s\n' % x for x in self.applied)
2001 msg += ''.join('%s\n' % x for x in self.applied)
2002 msg += ''.join(':%s\n' % x for x in self.fullseries)
2002 msg += ''.join(':%s\n' % x for x in self.fullseries)
2003 n = repo.commit(msg, force=True)
2003 n = repo.commit(msg, force=True)
2004 if not n:
2004 if not n:
2005 self.ui.warn(_("repo commit failed\n"))
2005 self.ui.warn(_("repo commit failed\n"))
2006 return 1
2006 return 1
2007 self.applied.append(statusentry(n, '.hg.patches.save.line'))
2007 self.applied.append(statusentry(n, '.hg.patches.save.line'))
2008 self.applieddirty = True
2008 self.applieddirty = True
2009 self.removeundo(repo)
2009 self.removeundo(repo)
2010
2010
2011 def fullseriesend(self):
2011 def fullseriesend(self):
2012 if self.applied:
2012 if self.applied:
2013 p = self.applied[-1].name
2013 p = self.applied[-1].name
2014 end = self.findseries(p)
2014 end = self.findseries(p)
2015 if end is None:
2015 if end is None:
2016 return len(self.fullseries)
2016 return len(self.fullseries)
2017 return end + 1
2017 return end + 1
2018 return 0
2018 return 0
2019
2019
2020 def seriesend(self, all_patches=False):
2020 def seriesend(self, all_patches=False):
2021 """If all_patches is False, return the index of the next pushable patch
2021 """If all_patches is False, return the index of the next pushable patch
2022 in the series, or the series length. If all_patches is True, return the
2022 in the series, or the series length. If all_patches is True, return the
2023 index of the first patch past the last applied one.
2023 index of the first patch past the last applied one.
2024 """
2024 """
2025 end = 0
2025 end = 0
2026 def nextpatch(start):
2026 def nextpatch(start):
2027 if all_patches or start >= len(self.series):
2027 if all_patches or start >= len(self.series):
2028 return start
2028 return start
2029 for i in xrange(start, len(self.series)):
2029 for i in xrange(start, len(self.series)):
2030 p, reason = self.pushable(i)
2030 p, reason = self.pushable(i)
2031 if p:
2031 if p:
2032 return i
2032 return i
2033 self.explainpushable(i)
2033 self.explainpushable(i)
2034 return len(self.series)
2034 return len(self.series)
2035 if self.applied:
2035 if self.applied:
2036 p = self.applied[-1].name
2036 p = self.applied[-1].name
2037 try:
2037 try:
2038 end = self.series.index(p)
2038 end = self.series.index(p)
2039 except ValueError:
2039 except ValueError:
2040 return 0
2040 return 0
2041 return nextpatch(end + 1)
2041 return nextpatch(end + 1)
2042 return nextpatch(end)
2042 return nextpatch(end)
2043
2043
2044 def appliedname(self, index):
2044 def appliedname(self, index):
2045 pname = self.applied[index].name
2045 pname = self.applied[index].name
2046 if not self.ui.verbose:
2046 if not self.ui.verbose:
2047 p = pname
2047 p = pname
2048 else:
2048 else:
2049 p = str(self.series.index(pname)) + " " + pname
2049 p = str(self.series.index(pname)) + " " + pname
2050 return p
2050 return p
2051
2051
2052 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
2052 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
2053 force=None, git=False):
2053 force=None, git=False):
2054 def checkseries(patchname):
2054 def checkseries(patchname):
2055 if patchname in self.series:
2055 if patchname in self.series:
2056 raise error.Abort(_('patch %s is already in the series file')
2056 raise error.Abort(_('patch %s is already in the series file')
2057 % patchname)
2057 % patchname)
2058
2058
2059 if rev:
2059 if rev:
2060 if files:
2060 if files:
2061 raise error.Abort(_('option "-r" not valid when importing '
2061 raise error.Abort(_('option "-r" not valid when importing '
2062 'files'))
2062 'files'))
2063 rev = scmutil.revrange(repo, rev)
2063 rev = scmutil.revrange(repo, rev)
2064 rev.sort(reverse=True)
2064 rev.sort(reverse=True)
2065 elif not files:
2065 elif not files:
2066 raise error.Abort(_('no files or revisions specified'))
2066 raise error.Abort(_('no files or revisions specified'))
2067 if (len(files) > 1 or len(rev) > 1) and patchname:
2067 if (len(files) > 1 or len(rev) > 1) and patchname:
2068 raise error.Abort(_('option "-n" not valid when importing multiple '
2068 raise error.Abort(_('option "-n" not valid when importing multiple '
2069 'patches'))
2069 'patches'))
2070 imported = []
2070 imported = []
2071 if rev:
2071 if rev:
2072 # If mq patches are applied, we can only import revisions
2072 # If mq patches are applied, we can only import revisions
2073 # that form a linear path to qbase.
2073 # that form a linear path to qbase.
2074 # Otherwise, they should form a linear path to a head.
2074 # Otherwise, they should form a linear path to a head.
2075 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2075 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2076 if len(heads) > 1:
2076 if len(heads) > 1:
2077 raise error.Abort(_('revision %d is the root of more than one '
2077 raise error.Abort(_('revision %d is the root of more than one '
2078 'branch') % rev.last())
2078 'branch') % rev.last())
2079 if self.applied:
2079 if self.applied:
2080 base = repo.changelog.node(rev.first())
2080 base = repo.changelog.node(rev.first())
2081 if base in [n.node for n in self.applied]:
2081 if base in [n.node for n in self.applied]:
2082 raise error.Abort(_('revision %d is already managed')
2082 raise error.Abort(_('revision %d is already managed')
2083 % rev.first())
2083 % rev.first())
2084 if heads != [self.applied[-1].node]:
2084 if heads != [self.applied[-1].node]:
2085 raise error.Abort(_('revision %d is not the parent of '
2085 raise error.Abort(_('revision %d is not the parent of '
2086 'the queue') % rev.first())
2086 'the queue') % rev.first())
2087 base = repo.changelog.rev(self.applied[0].node)
2087 base = repo.changelog.rev(self.applied[0].node)
2088 lastparent = repo.changelog.parentrevs(base)[0]
2088 lastparent = repo.changelog.parentrevs(base)[0]
2089 else:
2089 else:
2090 if heads != [repo.changelog.node(rev.first())]:
2090 if heads != [repo.changelog.node(rev.first())]:
2091 raise error.Abort(_('revision %d has unmanaged children')
2091 raise error.Abort(_('revision %d has unmanaged children')
2092 % rev.first())
2092 % rev.first())
2093 lastparent = None
2093 lastparent = None
2094
2094
2095 diffopts = self.diffopts({'git': git})
2095 diffopts = self.diffopts({'git': git})
2096 tr = repo.transaction('qimport')
2096 tr = repo.transaction('qimport')
2097 try:
2097 try:
2098 for r in rev:
2098 for r in rev:
2099 if not repo[r].mutable():
2099 if not repo[r].mutable():
2100 raise error.Abort(_('revision %d is not mutable') % r,
2100 raise error.Abort(_('revision %d is not mutable') % r,
2101 hint=_('see "hg help phases" '
2101 hint=_('see "hg help phases" '
2102 'for details'))
2102 'for details'))
2103 p1, p2 = repo.changelog.parentrevs(r)
2103 p1, p2 = repo.changelog.parentrevs(r)
2104 n = repo.changelog.node(r)
2104 n = repo.changelog.node(r)
2105 if p2 != nullrev:
2105 if p2 != nullrev:
2106 raise error.Abort(_('cannot import merge revision %d')
2106 raise error.Abort(_('cannot import merge revision %d')
2107 % r)
2107 % r)
2108 if lastparent and lastparent != r:
2108 if lastparent and lastparent != r:
2109 raise error.Abort(_('revision %d is not the parent of '
2109 raise error.Abort(_('revision %d is not the parent of '
2110 '%d')
2110 '%d')
2111 % (r, lastparent))
2111 % (r, lastparent))
2112 lastparent = p1
2112 lastparent = p1
2113
2113
2114 if not patchname:
2114 if not patchname:
2115 patchname = makepatchname(self.fullseries,
2115 patchname = makepatchname(self.fullseries,
2116 repo[r].description().split('\n', 1)[0],
2116 repo[r].description().split('\n', 1)[0],
2117 '%d.diff' % r)
2117 '%d.diff' % r)
2118 checkseries(patchname)
2118 checkseries(patchname)
2119 self.checkpatchname(patchname, force)
2119 self.checkpatchname(patchname, force)
2120 self.fullseries.insert(0, patchname)
2120 self.fullseries.insert(0, patchname)
2121
2121
2122 patchf = self.opener(patchname, "w")
2122 patchf = self.opener(patchname, "w")
2123 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
2123 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
2124 patchf.close()
2124 patchf.close()
2125
2125
2126 se = statusentry(n, patchname)
2126 se = statusentry(n, patchname)
2127 self.applied.insert(0, se)
2127 self.applied.insert(0, se)
2128
2128
2129 self.added.append(patchname)
2129 self.added.append(patchname)
2130 imported.append(patchname)
2130 imported.append(patchname)
2131 patchname = None
2131 patchname = None
2132 if rev and repo.ui.configbool('mq', 'secret', False):
2132 if rev and repo.ui.configbool('mq', 'secret', False):
2133 # if we added anything with --rev, move the secret root
2133 # if we added anything with --rev, move the secret root
2134 phases.retractboundary(repo, tr, phases.secret, [n])
2134 phases.retractboundary(repo, tr, phases.secret, [n])
2135 self.parseseries()
2135 self.parseseries()
2136 self.applieddirty = True
2136 self.applieddirty = True
2137 self.seriesdirty = True
2137 self.seriesdirty = True
2138 tr.close()
2138 tr.close()
2139 finally:
2139 finally:
2140 tr.release()
2140 tr.release()
2141
2141
2142 for i, filename in enumerate(files):
2142 for i, filename in enumerate(files):
2143 if existing:
2143 if existing:
2144 if filename == '-':
2144 if filename == '-':
2145 raise error.Abort(_('-e is incompatible with import from -')
2145 raise error.Abort(_('-e is incompatible with import from -')
2146 )
2146 )
2147 filename = normname(filename)
2147 filename = normname(filename)
2148 self.checkreservedname(filename)
2148 self.checkreservedname(filename)
2149 if util.url(filename).islocal():
2149 if util.url(filename).islocal():
2150 originpath = self.join(filename)
2150 originpath = self.join(filename)
2151 if not os.path.isfile(originpath):
2151 if not os.path.isfile(originpath):
2152 raise error.Abort(
2152 raise error.Abort(
2153 _("patch %s does not exist") % filename)
2153 _("patch %s does not exist") % filename)
2154
2154
2155 if patchname:
2155 if patchname:
2156 self.checkpatchname(patchname, force)
2156 self.checkpatchname(patchname, force)
2157
2157
2158 self.ui.write(_('renaming %s to %s\n')
2158 self.ui.write(_('renaming %s to %s\n')
2159 % (filename, patchname))
2159 % (filename, patchname))
2160 util.rename(originpath, self.join(patchname))
2160 util.rename(originpath, self.join(patchname))
2161 else:
2161 else:
2162 patchname = filename
2162 patchname = filename
2163
2163
2164 else:
2164 else:
2165 if filename == '-' and not patchname:
2165 if filename == '-' and not patchname:
2166 raise error.Abort(_('need --name to import a patch from -'))
2166 raise error.Abort(_('need --name to import a patch from -'))
2167 elif not patchname:
2167 elif not patchname:
2168 patchname = normname(os.path.basename(filename.rstrip('/')))
2168 patchname = normname(os.path.basename(filename.rstrip('/')))
2169 self.checkpatchname(patchname, force)
2169 self.checkpatchname(patchname, force)
2170 try:
2170 try:
2171 if filename == '-':
2171 if filename == '-':
2172 text = self.ui.fin.read()
2172 text = self.ui.fin.read()
2173 else:
2173 else:
2174 fp = hg.openpath(self.ui, filename)
2174 fp = hg.openpath(self.ui, filename)
2175 text = fp.read()
2175 text = fp.read()
2176 fp.close()
2176 fp.close()
2177 except (OSError, IOError):
2177 except (OSError, IOError):
2178 raise error.Abort(_("unable to read file %s") % filename)
2178 raise error.Abort(_("unable to read file %s") % filename)
2179 patchf = self.opener(patchname, "w")
2179 patchf = self.opener(patchname, "w")
2180 patchf.write(text)
2180 patchf.write(text)
2181 patchf.close()
2181 patchf.close()
2182 if not force:
2182 if not force:
2183 checkseries(patchname)
2183 checkseries(patchname)
2184 if patchname not in self.series:
2184 if patchname not in self.series:
2185 index = self.fullseriesend() + i
2185 index = self.fullseriesend() + i
2186 self.fullseries[index:index] = [patchname]
2186 self.fullseries[index:index] = [patchname]
2187 self.parseseries()
2187 self.parseseries()
2188 self.seriesdirty = True
2188 self.seriesdirty = True
2189 self.ui.warn(_("adding %s to series file\n") % patchname)
2189 self.ui.warn(_("adding %s to series file\n") % patchname)
2190 self.added.append(patchname)
2190 self.added.append(patchname)
2191 imported.append(patchname)
2191 imported.append(patchname)
2192 patchname = None
2192 patchname = None
2193
2193
2194 self.removeundo(repo)
2194 self.removeundo(repo)
2195 return imported
2195 return imported
2196
2196
2197 def fixkeepchangesopts(ui, opts):
2197 def fixkeepchangesopts(ui, opts):
2198 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2198 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2199 or opts.get('exact')):
2199 or opts.get('exact')):
2200 return opts
2200 return opts
2201 opts = dict(opts)
2201 opts = dict(opts)
2202 opts['keep_changes'] = True
2202 opts['keep_changes'] = True
2203 return opts
2203 return opts
2204
2204
2205 @command("qdelete|qremove|qrm",
2205 @command("qdelete|qremove|qrm",
2206 [('k', 'keep', None, _('keep patch file')),
2206 [('k', 'keep', None, _('keep patch file')),
2207 ('r', 'rev', [],
2207 ('r', 'rev', [],
2208 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2208 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2209 _('hg qdelete [-k] [PATCH]...'))
2209 _('hg qdelete [-k] [PATCH]...'))
2210 def delete(ui, repo, *patches, **opts):
2210 def delete(ui, repo, *patches, **opts):
2211 """remove patches from queue
2211 """remove patches from queue
2212
2212
2213 The patches must not be applied, and at least one patch is required. Exact
2213 The patches must not be applied, and at least one patch is required. Exact
2214 patch identifiers must be given. With -k/--keep, the patch files are
2214 patch identifiers must be given. With -k/--keep, the patch files are
2215 preserved in the patch directory.
2215 preserved in the patch directory.
2216
2216
2217 To stop managing a patch and move it into permanent history,
2217 To stop managing a patch and move it into permanent history,
2218 use the :hg:`qfinish` command."""
2218 use the :hg:`qfinish` command."""
2219 q = repo.mq
2219 q = repo.mq
2220 q.delete(repo, patches, opts)
2220 q.delete(repo, patches, opts)
2221 q.savedirty()
2221 q.savedirty()
2222 return 0
2222 return 0
2223
2223
2224 @command("qapplied",
2224 @command("qapplied",
2225 [('1', 'last', None, _('show only the preceding applied patch'))
2225 [('1', 'last', None, _('show only the preceding applied patch'))
2226 ] + seriesopts,
2226 ] + seriesopts,
2227 _('hg qapplied [-1] [-s] [PATCH]'))
2227 _('hg qapplied [-1] [-s] [PATCH]'))
2228 def applied(ui, repo, patch=None, **opts):
2228 def applied(ui, repo, patch=None, **opts):
2229 """print the patches already applied
2229 """print the patches already applied
2230
2230
2231 Returns 0 on success."""
2231 Returns 0 on success."""
2232
2232
2233 q = repo.mq
2233 q = repo.mq
2234
2234
2235 if patch:
2235 if patch:
2236 if patch not in q.series:
2236 if patch not in q.series:
2237 raise error.Abort(_("patch %s is not in series file") % patch)
2237 raise error.Abort(_("patch %s is not in series file") % patch)
2238 end = q.series.index(patch) + 1
2238 end = q.series.index(patch) + 1
2239 else:
2239 else:
2240 end = q.seriesend(True)
2240 end = q.seriesend(True)
2241
2241
2242 if opts.get('last') and not end:
2242 if opts.get('last') and not end:
2243 ui.write(_("no patches applied\n"))
2243 ui.write(_("no patches applied\n"))
2244 return 1
2244 return 1
2245 elif opts.get('last') and end == 1:
2245 elif opts.get('last') and end == 1:
2246 ui.write(_("only one patch applied\n"))
2246 ui.write(_("only one patch applied\n"))
2247 return 1
2247 return 1
2248 elif opts.get('last'):
2248 elif opts.get('last'):
2249 start = end - 2
2249 start = end - 2
2250 end = 1
2250 end = 1
2251 else:
2251 else:
2252 start = 0
2252 start = 0
2253
2253
2254 q.qseries(repo, length=end, start=start, status='A',
2254 q.qseries(repo, length=end, start=start, status='A',
2255 summary=opts.get('summary'))
2255 summary=opts.get('summary'))
2256
2256
2257
2257
2258 @command("qunapplied",
2258 @command("qunapplied",
2259 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2259 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2260 _('hg qunapplied [-1] [-s] [PATCH]'))
2260 _('hg qunapplied [-1] [-s] [PATCH]'))
2261 def unapplied(ui, repo, patch=None, **opts):
2261 def unapplied(ui, repo, patch=None, **opts):
2262 """print the patches not yet applied
2262 """print the patches not yet applied
2263
2263
2264 Returns 0 on success."""
2264 Returns 0 on success."""
2265
2265
2266 q = repo.mq
2266 q = repo.mq
2267 if patch:
2267 if patch:
2268 if patch not in q.series:
2268 if patch not in q.series:
2269 raise error.Abort(_("patch %s is not in series file") % patch)
2269 raise error.Abort(_("patch %s is not in series file") % patch)
2270 start = q.series.index(patch) + 1
2270 start = q.series.index(patch) + 1
2271 else:
2271 else:
2272 start = q.seriesend(True)
2272 start = q.seriesend(True)
2273
2273
2274 if start == len(q.series) and opts.get('first'):
2274 if start == len(q.series) and opts.get('first'):
2275 ui.write(_("all patches applied\n"))
2275 ui.write(_("all patches applied\n"))
2276 return 1
2276 return 1
2277
2277
2278 if opts.get('first'):
2278 if opts.get('first'):
2279 length = 1
2279 length = 1
2280 else:
2280 else:
2281 length = None
2281 length = None
2282 q.qseries(repo, start=start, length=length, status='U',
2282 q.qseries(repo, start=start, length=length, status='U',
2283 summary=opts.get('summary'))
2283 summary=opts.get('summary'))
2284
2284
2285 @command("qimport",
2285 @command("qimport",
2286 [('e', 'existing', None, _('import file in patch directory')),
2286 [('e', 'existing', None, _('import file in patch directory')),
2287 ('n', 'name', '',
2287 ('n', 'name', '',
2288 _('name of patch file'), _('NAME')),
2288 _('name of patch file'), _('NAME')),
2289 ('f', 'force', None, _('overwrite existing files')),
2289 ('f', 'force', None, _('overwrite existing files')),
2290 ('r', 'rev', [],
2290 ('r', 'rev', [],
2291 _('place existing revisions under mq control'), _('REV')),
2291 _('place existing revisions under mq control'), _('REV')),
2292 ('g', 'git', None, _('use git extended diff format')),
2292 ('g', 'git', None, _('use git extended diff format')),
2293 ('P', 'push', None, _('qpush after importing'))],
2293 ('P', 'push', None, _('qpush after importing'))],
2294 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'))
2294 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'))
2295 def qimport(ui, repo, *filename, **opts):
2295 def qimport(ui, repo, *filename, **opts):
2296 """import a patch or existing changeset
2296 """import a patch or existing changeset
2297
2297
2298 The patch is inserted into the series after the last applied
2298 The patch is inserted into the series after the last applied
2299 patch. If no patches have been applied, qimport prepends the patch
2299 patch. If no patches have been applied, qimport prepends the patch
2300 to the series.
2300 to the series.
2301
2301
2302 The patch will have the same name as its source file unless you
2302 The patch will have the same name as its source file unless you
2303 give it a new one with -n/--name.
2303 give it a new one with -n/--name.
2304
2304
2305 You can register an existing patch inside the patch directory with
2305 You can register an existing patch inside the patch directory with
2306 the -e/--existing flag.
2306 the -e/--existing flag.
2307
2307
2308 With -f/--force, an existing patch of the same name will be
2308 With -f/--force, an existing patch of the same name will be
2309 overwritten.
2309 overwritten.
2310
2310
2311 An existing changeset may be placed under mq control with -r/--rev
2311 An existing changeset may be placed under mq control with -r/--rev
2312 (e.g. qimport --rev . -n patch will place the current revision
2312 (e.g. qimport --rev . -n patch will place the current revision
2313 under mq control). With -g/--git, patches imported with --rev will
2313 under mq control). With -g/--git, patches imported with --rev will
2314 use the git diff format. See the diffs help topic for information
2314 use the git diff format. See the diffs help topic for information
2315 on why this is important for preserving rename/copy information
2315 on why this is important for preserving rename/copy information
2316 and permission changes. Use :hg:`qfinish` to remove changesets
2316 and permission changes. Use :hg:`qfinish` to remove changesets
2317 from mq control.
2317 from mq control.
2318
2318
2319 To import a patch from standard input, pass - as the patch file.
2319 To import a patch from standard input, pass - as the patch file.
2320 When importing from standard input, a patch name must be specified
2320 When importing from standard input, a patch name must be specified
2321 using the --name flag.
2321 using the --name flag.
2322
2322
2323 To import an existing patch while renaming it::
2323 To import an existing patch while renaming it::
2324
2324
2325 hg qimport -e existing-patch -n new-name
2325 hg qimport -e existing-patch -n new-name
2326
2326
2327 Returns 0 if import succeeded.
2327 Returns 0 if import succeeded.
2328 """
2328 """
2329 lock = repo.lock() # cause this may move phase
2329 lock = repo.lock() # cause this may move phase
2330 try:
2330 try:
2331 q = repo.mq
2331 q = repo.mq
2332 try:
2332 try:
2333 imported = q.qimport(
2333 imported = q.qimport(
2334 repo, filename, patchname=opts.get('name'),
2334 repo, filename, patchname=opts.get('name'),
2335 existing=opts.get('existing'), force=opts.get('force'),
2335 existing=opts.get('existing'), force=opts.get('force'),
2336 rev=opts.get('rev'), git=opts.get('git'))
2336 rev=opts.get('rev'), git=opts.get('git'))
2337 finally:
2337 finally:
2338 q.savedirty()
2338 q.savedirty()
2339 finally:
2339 finally:
2340 lock.release()
2340 lock.release()
2341
2341
2342 if imported and opts.get('push') and not opts.get('rev'):
2342 if imported and opts.get('push') and not opts.get('rev'):
2343 return q.push(repo, imported[-1])
2343 return q.push(repo, imported[-1])
2344 return 0
2344 return 0
2345
2345
2346 def qinit(ui, repo, create):
2346 def qinit(ui, repo, create):
2347 """initialize a new queue repository
2347 """initialize a new queue repository
2348
2348
2349 This command also creates a series file for ordering patches, and
2349 This command also creates a series file for ordering patches, and
2350 an mq-specific .hgignore file in the queue repository, to exclude
2350 an mq-specific .hgignore file in the queue repository, to exclude
2351 the status and guards files (these contain mostly transient state).
2351 the status and guards files (these contain mostly transient state).
2352
2352
2353 Returns 0 if initialization succeeded."""
2353 Returns 0 if initialization succeeded."""
2354 q = repo.mq
2354 q = repo.mq
2355 r = q.init(repo, create)
2355 r = q.init(repo, create)
2356 q.savedirty()
2356 q.savedirty()
2357 if r:
2357 if r:
2358 if not os.path.exists(r.wjoin('.hgignore')):
2358 if not os.path.exists(r.wjoin('.hgignore')):
2359 fp = r.wvfs('.hgignore', 'w')
2359 fp = r.wvfs('.hgignore', 'w')
2360 fp.write('^\\.hg\n')
2360 fp.write('^\\.hg\n')
2361 fp.write('^\\.mq\n')
2361 fp.write('^\\.mq\n')
2362 fp.write('syntax: glob\n')
2362 fp.write('syntax: glob\n')
2363 fp.write('status\n')
2363 fp.write('status\n')
2364 fp.write('guards\n')
2364 fp.write('guards\n')
2365 fp.close()
2365 fp.close()
2366 if not os.path.exists(r.wjoin('series')):
2366 if not os.path.exists(r.wjoin('series')):
2367 r.wvfs('series', 'w').close()
2367 r.wvfs('series', 'w').close()
2368 r[None].add(['.hgignore', 'series'])
2368 r[None].add(['.hgignore', 'series'])
2369 commands.add(ui, r)
2369 commands.add(ui, r)
2370 return 0
2370 return 0
2371
2371
2372 @command("^qinit",
2372 @command("^qinit",
2373 [('c', 'create-repo', None, _('create queue repository'))],
2373 [('c', 'create-repo', None, _('create queue repository'))],
2374 _('hg qinit [-c]'))
2374 _('hg qinit [-c]'))
2375 def init(ui, repo, **opts):
2375 def init(ui, repo, **opts):
2376 """init a new queue repository (DEPRECATED)
2376 """init a new queue repository (DEPRECATED)
2377
2377
2378 The queue repository is unversioned by default. If
2378 The queue repository is unversioned by default. If
2379 -c/--create-repo is specified, qinit will create a separate nested
2379 -c/--create-repo is specified, qinit will create a separate nested
2380 repository for patches (qinit -c may also be run later to convert
2380 repository for patches (qinit -c may also be run later to convert
2381 an unversioned patch repository into a versioned one). You can use
2381 an unversioned patch repository into a versioned one). You can use
2382 qcommit to commit changes to this queue repository.
2382 qcommit to commit changes to this queue repository.
2383
2383
2384 This command is deprecated. Without -c, it's implied by other relevant
2384 This command is deprecated. Without -c, it's implied by other relevant
2385 commands. With -c, use :hg:`init --mq` instead."""
2385 commands. With -c, use :hg:`init --mq` instead."""
2386 return qinit(ui, repo, create=opts.get('create_repo'))
2386 return qinit(ui, repo, create=opts.get('create_repo'))
2387
2387
2388 @command("qclone",
2388 @command("qclone",
2389 [('', 'pull', None, _('use pull protocol to copy metadata')),
2389 [('', 'pull', None, _('use pull protocol to copy metadata')),
2390 ('U', 'noupdate', None,
2390 ('U', 'noupdate', None,
2391 _('do not update the new working directories')),
2391 _('do not update the new working directories')),
2392 ('', 'uncompressed', None,
2392 ('', 'uncompressed', None,
2393 _('use uncompressed transfer (fast over LAN)')),
2393 _('use uncompressed transfer (fast over LAN)')),
2394 ('p', 'patches', '',
2394 ('p', 'patches', '',
2395 _('location of source patch repository'), _('REPO')),
2395 _('location of source patch repository'), _('REPO')),
2396 ] + commands.remoteopts,
2396 ] + commands.remoteopts,
2397 _('hg qclone [OPTION]... SOURCE [DEST]'),
2397 _('hg qclone [OPTION]... SOURCE [DEST]'),
2398 norepo=True)
2398 norepo=True)
2399 def clone(ui, source, dest=None, **opts):
2399 def clone(ui, source, dest=None, **opts):
2400 '''clone main and patch repository at same time
2400 '''clone main and patch repository at same time
2401
2401
2402 If source is local, destination will have no patches applied. If
2402 If source is local, destination will have no patches applied. If
2403 source is remote, this command can not check if patches are
2403 source is remote, this command can not check if patches are
2404 applied in source, so cannot guarantee that patches are not
2404 applied in source, so cannot guarantee that patches are not
2405 applied in destination. If you clone remote repository, be sure
2405 applied in destination. If you clone remote repository, be sure
2406 before that it has no patches applied.
2406 before that it has no patches applied.
2407
2407
2408 Source patch repository is looked for in <src>/.hg/patches by
2408 Source patch repository is looked for in <src>/.hg/patches by
2409 default. Use -p <url> to change.
2409 default. Use -p <url> to change.
2410
2410
2411 The patch directory must be a nested Mercurial repository, as
2411 The patch directory must be a nested Mercurial repository, as
2412 would be created by :hg:`init --mq`.
2412 would be created by :hg:`init --mq`.
2413
2413
2414 Return 0 on success.
2414 Return 0 on success.
2415 '''
2415 '''
2416 def patchdir(repo):
2416 def patchdir(repo):
2417 """compute a patch repo url from a repo object"""
2417 """compute a patch repo url from a repo object"""
2418 url = repo.url()
2418 url = repo.url()
2419 if url.endswith('/'):
2419 if url.endswith('/'):
2420 url = url[:-1]
2420 url = url[:-1]
2421 return url + '/.hg/patches'
2421 return url + '/.hg/patches'
2422
2422
2423 # main repo (destination and sources)
2423 # main repo (destination and sources)
2424 if dest is None:
2424 if dest is None:
2425 dest = hg.defaultdest(source)
2425 dest = hg.defaultdest(source)
2426 sr = hg.peer(ui, opts, ui.expandpath(source))
2426 sr = hg.peer(ui, opts, ui.expandpath(source))
2427
2427
2428 # patches repo (source only)
2428 # patches repo (source only)
2429 if opts.get('patches'):
2429 if opts.get('patches'):
2430 patchespath = ui.expandpath(opts.get('patches'))
2430 patchespath = ui.expandpath(opts.get('patches'))
2431 else:
2431 else:
2432 patchespath = patchdir(sr)
2432 patchespath = patchdir(sr)
2433 try:
2433 try:
2434 hg.peer(ui, opts, patchespath)
2434 hg.peer(ui, opts, patchespath)
2435 except error.RepoError:
2435 except error.RepoError:
2436 raise error.Abort(_('versioned patch repository not found'
2436 raise error.Abort(_('versioned patch repository not found'
2437 ' (see init --mq)'))
2437 ' (see init --mq)'))
2438 qbase, destrev = None, None
2438 qbase, destrev = None, None
2439 if sr.local():
2439 if sr.local():
2440 repo = sr.local()
2440 repo = sr.local()
2441 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2441 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2442 qbase = repo.mq.applied[0].node
2442 qbase = repo.mq.applied[0].node
2443 if not hg.islocal(dest):
2443 if not hg.islocal(dest):
2444 heads = set(repo.heads())
2444 heads = set(repo.heads())
2445 destrev = list(heads.difference(repo.heads(qbase)))
2445 destrev = list(heads.difference(repo.heads(qbase)))
2446 destrev.append(repo.changelog.parents(qbase)[0])
2446 destrev.append(repo.changelog.parents(qbase)[0])
2447 elif sr.capable('lookup'):
2447 elif sr.capable('lookup'):
2448 try:
2448 try:
2449 qbase = sr.lookup('qbase')
2449 qbase = sr.lookup('qbase')
2450 except error.RepoError:
2450 except error.RepoError:
2451 pass
2451 pass
2452
2452
2453 ui.note(_('cloning main repository\n'))
2453 ui.note(_('cloning main repository\n'))
2454 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2454 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2455 pull=opts.get('pull'),
2455 pull=opts.get('pull'),
2456 rev=destrev,
2456 rev=destrev,
2457 update=False,
2457 update=False,
2458 stream=opts.get('uncompressed'))
2458 stream=opts.get('uncompressed'))
2459
2459
2460 ui.note(_('cloning patch repository\n'))
2460 ui.note(_('cloning patch repository\n'))
2461 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2461 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2462 pull=opts.get('pull'), update=not opts.get('noupdate'),
2462 pull=opts.get('pull'), update=not opts.get('noupdate'),
2463 stream=opts.get('uncompressed'))
2463 stream=opts.get('uncompressed'))
2464
2464
2465 if dr.local():
2465 if dr.local():
2466 repo = dr.local()
2466 repo = dr.local()
2467 if qbase:
2467 if qbase:
2468 ui.note(_('stripping applied patches from destination '
2468 ui.note(_('stripping applied patches from destination '
2469 'repository\n'))
2469 'repository\n'))
2470 strip(ui, repo, [qbase], update=False, backup=None)
2470 strip(ui, repo, [qbase], update=False, backup=None)
2471 if not opts.get('noupdate'):
2471 if not opts.get('noupdate'):
2472 ui.note(_('updating destination repository\n'))
2472 ui.note(_('updating destination repository\n'))
2473 hg.update(repo, repo.changelog.tip())
2473 hg.update(repo, repo.changelog.tip())
2474
2474
2475 @command("qcommit|qci",
2475 @command("qcommit|qci",
2476 commands.table["^commit|ci"][1],
2476 commands.table["^commit|ci"][1],
2477 _('hg qcommit [OPTION]... [FILE]...'),
2477 _('hg qcommit [OPTION]... [FILE]...'),
2478 inferrepo=True)
2478 inferrepo=True)
2479 def commit(ui, repo, *pats, **opts):
2479 def commit(ui, repo, *pats, **opts):
2480 """commit changes in the queue repository (DEPRECATED)
2480 """commit changes in the queue repository (DEPRECATED)
2481
2481
2482 This command is deprecated; use :hg:`commit --mq` instead."""
2482 This command is deprecated; use :hg:`commit --mq` instead."""
2483 q = repo.mq
2483 q = repo.mq
2484 r = q.qrepo()
2484 r = q.qrepo()
2485 if not r:
2485 if not r:
2486 raise error.Abort('no queue repository')
2486 raise error.Abort('no queue repository')
2487 commands.commit(r.ui, r, *pats, **opts)
2487 commands.commit(r.ui, r, *pats, **opts)
2488
2488
2489 @command("qseries",
2489 @command("qseries",
2490 [('m', 'missing', None, _('print patches not in series')),
2490 [('m', 'missing', None, _('print patches not in series')),
2491 ] + seriesopts,
2491 ] + seriesopts,
2492 _('hg qseries [-ms]'))
2492 _('hg qseries [-ms]'))
2493 def series(ui, repo, **opts):
2493 def series(ui, repo, **opts):
2494 """print the entire series file
2494 """print the entire series file
2495
2495
2496 Returns 0 on success."""
2496 Returns 0 on success."""
2497 repo.mq.qseries(repo, missing=opts.get('missing'),
2497 repo.mq.qseries(repo, missing=opts.get('missing'),
2498 summary=opts.get('summary'))
2498 summary=opts.get('summary'))
2499 return 0
2499 return 0
2500
2500
2501 @command("qtop", seriesopts, _('hg qtop [-s]'))
2501 @command("qtop", seriesopts, _('hg qtop [-s]'))
2502 def top(ui, repo, **opts):
2502 def top(ui, repo, **opts):
2503 """print the name of the current patch
2503 """print the name of the current patch
2504
2504
2505 Returns 0 on success."""
2505 Returns 0 on success."""
2506 q = repo.mq
2506 q = repo.mq
2507 if q.applied:
2507 if q.applied:
2508 t = q.seriesend(True)
2508 t = q.seriesend(True)
2509 else:
2509 else:
2510 t = 0
2510 t = 0
2511
2511
2512 if t:
2512 if t:
2513 q.qseries(repo, start=t - 1, length=1, status='A',
2513 q.qseries(repo, start=t - 1, length=1, status='A',
2514 summary=opts.get('summary'))
2514 summary=opts.get('summary'))
2515 else:
2515 else:
2516 ui.write(_("no patches applied\n"))
2516 ui.write(_("no patches applied\n"))
2517 return 1
2517 return 1
2518
2518
2519 @command("qnext", seriesopts, _('hg qnext [-s]'))
2519 @command("qnext", seriesopts, _('hg qnext [-s]'))
2520 def next(ui, repo, **opts):
2520 def next(ui, repo, **opts):
2521 """print the name of the next pushable patch
2521 """print the name of the next pushable patch
2522
2522
2523 Returns 0 on success."""
2523 Returns 0 on success."""
2524 q = repo.mq
2524 q = repo.mq
2525 end = q.seriesend()
2525 end = q.seriesend()
2526 if end == len(q.series):
2526 if end == len(q.series):
2527 ui.write(_("all patches applied\n"))
2527 ui.write(_("all patches applied\n"))
2528 return 1
2528 return 1
2529 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2529 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2530
2530
2531 @command("qprev", seriesopts, _('hg qprev [-s]'))
2531 @command("qprev", seriesopts, _('hg qprev [-s]'))
2532 def prev(ui, repo, **opts):
2532 def prev(ui, repo, **opts):
2533 """print the name of the preceding applied patch
2533 """print the name of the preceding applied patch
2534
2534
2535 Returns 0 on success."""
2535 Returns 0 on success."""
2536 q = repo.mq
2536 q = repo.mq
2537 l = len(q.applied)
2537 l = len(q.applied)
2538 if l == 1:
2538 if l == 1:
2539 ui.write(_("only one patch applied\n"))
2539 ui.write(_("only one patch applied\n"))
2540 return 1
2540 return 1
2541 if not l:
2541 if not l:
2542 ui.write(_("no patches applied\n"))
2542 ui.write(_("no patches applied\n"))
2543 return 1
2543 return 1
2544 idx = q.series.index(q.applied[-2].name)
2544 idx = q.series.index(q.applied[-2].name)
2545 q.qseries(repo, start=idx, length=1, status='A',
2545 q.qseries(repo, start=idx, length=1, status='A',
2546 summary=opts.get('summary'))
2546 summary=opts.get('summary'))
2547
2547
2548 def setupheaderopts(ui, opts):
2548 def setupheaderopts(ui, opts):
2549 if not opts.get('user') and opts.get('currentuser'):
2549 if not opts.get('user') and opts.get('currentuser'):
2550 opts['user'] = ui.username()
2550 opts['user'] = ui.username()
2551 if not opts.get('date') and opts.get('currentdate'):
2551 if not opts.get('date') and opts.get('currentdate'):
2552 opts['date'] = "%d %d" % util.makedate()
2552 opts['date'] = "%d %d" % util.makedate()
2553
2553
2554 @command("^qnew",
2554 @command("^qnew",
2555 [('e', 'edit', None, _('invoke editor on commit messages')),
2555 [('e', 'edit', None, _('invoke editor on commit messages')),
2556 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2556 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2557 ('g', 'git', None, _('use git extended diff format')),
2557 ('g', 'git', None, _('use git extended diff format')),
2558 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2558 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2559 ('u', 'user', '',
2559 ('u', 'user', '',
2560 _('add "From: <USER>" to patch'), _('USER')),
2560 _('add "From: <USER>" to patch'), _('USER')),
2561 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2561 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2562 ('d', 'date', '',
2562 ('d', 'date', '',
2563 _('add "Date: <DATE>" to patch'), _('DATE'))
2563 _('add "Date: <DATE>" to patch'), _('DATE'))
2564 ] + commands.walkopts + commands.commitopts,
2564 ] + commands.walkopts + commands.commitopts,
2565 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
2565 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
2566 inferrepo=True)
2566 inferrepo=True)
2567 def new(ui, repo, patch, *args, **opts):
2567 def new(ui, repo, patch, *args, **opts):
2568 """create a new patch
2568 """create a new patch
2569
2569
2570 qnew creates a new patch on top of the currently-applied patch (if
2570 qnew creates a new patch on top of the currently-applied patch (if
2571 any). The patch will be initialized with any outstanding changes
2571 any). The patch will be initialized with any outstanding changes
2572 in the working directory. You may also use -I/--include,
2572 in the working directory. You may also use -I/--include,
2573 -X/--exclude, and/or a list of files after the patch name to add
2573 -X/--exclude, and/or a list of files after the patch name to add
2574 only changes to matching files to the new patch, leaving the rest
2574 only changes to matching files to the new patch, leaving the rest
2575 as uncommitted modifications.
2575 as uncommitted modifications.
2576
2576
2577 -u/--user and -d/--date can be used to set the (given) user and
2577 -u/--user and -d/--date can be used to set the (given) user and
2578 date, respectively. -U/--currentuser and -D/--currentdate set user
2578 date, respectively. -U/--currentuser and -D/--currentdate set user
2579 to current user and date to current date.
2579 to current user and date to current date.
2580
2580
2581 -e/--edit, -m/--message or -l/--logfile set the patch header as
2581 -e/--edit, -m/--message or -l/--logfile set the patch header as
2582 well as the commit message. If none is specified, the header is
2582 well as the commit message. If none is specified, the header is
2583 empty and the commit message is '[mq]: PATCH'.
2583 empty and the commit message is '[mq]: PATCH'.
2584
2584
2585 Use the -g/--git option to keep the patch in the git extended diff
2585 Use the -g/--git option to keep the patch in the git extended diff
2586 format. Read the diffs help topic for more information on why this
2586 format. Read the diffs help topic for more information on why this
2587 is important for preserving permission changes and copy/rename
2587 is important for preserving permission changes and copy/rename
2588 information.
2588 information.
2589
2589
2590 Returns 0 on successful creation of a new patch.
2590 Returns 0 on successful creation of a new patch.
2591 """
2591 """
2592 msg = cmdutil.logmessage(ui, opts)
2592 msg = cmdutil.logmessage(ui, opts)
2593 q = repo.mq
2593 q = repo.mq
2594 opts['msg'] = msg
2594 opts['msg'] = msg
2595 setupheaderopts(ui, opts)
2595 setupheaderopts(ui, opts)
2596 q.new(repo, patch, *args, **opts)
2596 q.new(repo, patch, *args, **opts)
2597 q.savedirty()
2597 q.savedirty()
2598 return 0
2598 return 0
2599
2599
2600 @command("^qrefresh",
2600 @command("^qrefresh",
2601 [('e', 'edit', None, _('invoke editor on commit messages')),
2601 [('e', 'edit', None, _('invoke editor on commit messages')),
2602 ('g', 'git', None, _('use git extended diff format')),
2602 ('g', 'git', None, _('use git extended diff format')),
2603 ('s', 'short', None,
2603 ('s', 'short', None,
2604 _('refresh only files already in the patch and specified files')),
2604 _('refresh only files already in the patch and specified files')),
2605 ('U', 'currentuser', None,
2605 ('U', 'currentuser', None,
2606 _('add/update author field in patch with current user')),
2606 _('add/update author field in patch with current user')),
2607 ('u', 'user', '',
2607 ('u', 'user', '',
2608 _('add/update author field in patch with given user'), _('USER')),
2608 _('add/update author field in patch with given user'), _('USER')),
2609 ('D', 'currentdate', None,
2609 ('D', 'currentdate', None,
2610 _('add/update date field in patch with current date')),
2610 _('add/update date field in patch with current date')),
2611 ('d', 'date', '',
2611 ('d', 'date', '',
2612 _('add/update date field in patch with given date'), _('DATE'))
2612 _('add/update date field in patch with given date'), _('DATE'))
2613 ] + commands.walkopts + commands.commitopts,
2613 ] + commands.walkopts + commands.commitopts,
2614 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2614 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2615 inferrepo=True)
2615 inferrepo=True)
2616 def refresh(ui, repo, *pats, **opts):
2616 def refresh(ui, repo, *pats, **opts):
2617 """update the current patch
2617 """update the current patch
2618
2618
2619 If any file patterns are provided, the refreshed patch will
2619 If any file patterns are provided, the refreshed patch will
2620 contain only the modifications that match those patterns; the
2620 contain only the modifications that match those patterns; the
2621 remaining modifications will remain in the working directory.
2621 remaining modifications will remain in the working directory.
2622
2622
2623 If -s/--short is specified, files currently included in the patch
2623 If -s/--short is specified, files currently included in the patch
2624 will be refreshed just like matched files and remain in the patch.
2624 will be refreshed just like matched files and remain in the patch.
2625
2625
2626 If -e/--edit is specified, Mercurial will start your configured editor for
2626 If -e/--edit is specified, Mercurial will start your configured editor for
2627 you to enter a message. In case qrefresh fails, you will find a backup of
2627 you to enter a message. In case qrefresh fails, you will find a backup of
2628 your message in ``.hg/last-message.txt``.
2628 your message in ``.hg/last-message.txt``.
2629
2629
2630 hg add/remove/copy/rename work as usual, though you might want to
2630 hg add/remove/copy/rename work as usual, though you might want to
2631 use git-style patches (-g/--git or [diff] git=1) to track copies
2631 use git-style patches (-g/--git or [diff] git=1) to track copies
2632 and renames. See the diffs help topic for more information on the
2632 and renames. See the diffs help topic for more information on the
2633 git diff format.
2633 git diff format.
2634
2634
2635 Returns 0 on success.
2635 Returns 0 on success.
2636 """
2636 """
2637 q = repo.mq
2637 q = repo.mq
2638 message = cmdutil.logmessage(ui, opts)
2638 message = cmdutil.logmessage(ui, opts)
2639 setupheaderopts(ui, opts)
2639 setupheaderopts(ui, opts)
2640 wlock = repo.wlock()
2640 wlock = repo.wlock()
2641 try:
2641 try:
2642 ret = q.refresh(repo, pats, msg=message, **opts)
2642 ret = q.refresh(repo, pats, msg=message, **opts)
2643 q.savedirty()
2643 q.savedirty()
2644 return ret
2644 return ret
2645 finally:
2645 finally:
2646 wlock.release()
2646 wlock.release()
2647
2647
2648 @command("^qdiff",
2648 @command("^qdiff",
2649 commands.diffopts + commands.diffopts2 + commands.walkopts,
2649 commands.diffopts + commands.diffopts2 + commands.walkopts,
2650 _('hg qdiff [OPTION]... [FILE]...'),
2650 _('hg qdiff [OPTION]... [FILE]...'),
2651 inferrepo=True)
2651 inferrepo=True)
2652 def diff(ui, repo, *pats, **opts):
2652 def diff(ui, repo, *pats, **opts):
2653 """diff of the current patch and subsequent modifications
2653 """diff of the current patch and subsequent modifications
2654
2654
2655 Shows a diff which includes the current patch as well as any
2655 Shows a diff which includes the current patch as well as any
2656 changes which have been made in the working directory since the
2656 changes which have been made in the working directory since the
2657 last refresh (thus showing what the current patch would become
2657 last refresh (thus showing what the current patch would become
2658 after a qrefresh).
2658 after a qrefresh).
2659
2659
2660 Use :hg:`diff` if you only want to see the changes made since the
2660 Use :hg:`diff` if you only want to see the changes made since the
2661 last qrefresh, or :hg:`export qtip` if you want to see changes
2661 last qrefresh, or :hg:`export qtip` if you want to see changes
2662 made by the current patch without including changes made since the
2662 made by the current patch without including changes made since the
2663 qrefresh.
2663 qrefresh.
2664
2664
2665 Returns 0 on success.
2665 Returns 0 on success.
2666 """
2666 """
2667 repo.mq.diff(repo, pats, opts)
2667 repo.mq.diff(repo, pats, opts)
2668 return 0
2668 return 0
2669
2669
2670 @command('qfold',
2670 @command('qfold',
2671 [('e', 'edit', None, _('invoke editor on commit messages')),
2671 [('e', 'edit', None, _('invoke editor on commit messages')),
2672 ('k', 'keep', None, _('keep folded patch files')),
2672 ('k', 'keep', None, _('keep folded patch files')),
2673 ] + commands.commitopts,
2673 ] + commands.commitopts,
2674 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2674 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2675 def fold(ui, repo, *files, **opts):
2675 def fold(ui, repo, *files, **opts):
2676 """fold the named patches into the current patch
2676 """fold the named patches into the current patch
2677
2677
2678 Patches must not yet be applied. Each patch will be successively
2678 Patches must not yet be applied. Each patch will be successively
2679 applied to the current patch in the order given. If all the
2679 applied to the current patch in the order given. If all the
2680 patches apply successfully, the current patch will be refreshed
2680 patches apply successfully, the current patch will be refreshed
2681 with the new cumulative patch, and the folded patches will be
2681 with the new cumulative patch, and the folded patches will be
2682 deleted. With -k/--keep, the folded patch files will not be
2682 deleted. With -k/--keep, the folded patch files will not be
2683 removed afterwards.
2683 removed afterwards.
2684
2684
2685 The header for each folded patch will be concatenated with the
2685 The header for each folded patch will be concatenated with the
2686 current patch header, separated by a line of ``* * *``.
2686 current patch header, separated by a line of ``* * *``.
2687
2687
2688 Returns 0 on success."""
2688 Returns 0 on success."""
2689 q = repo.mq
2689 q = repo.mq
2690 if not files:
2690 if not files:
2691 raise error.Abort(_('qfold requires at least one patch name'))
2691 raise error.Abort(_('qfold requires at least one patch name'))
2692 if not q.checktoppatch(repo)[0]:
2692 if not q.checktoppatch(repo)[0]:
2693 raise error.Abort(_('no patches applied'))
2693 raise error.Abort(_('no patches applied'))
2694 q.checklocalchanges(repo)
2694 q.checklocalchanges(repo)
2695
2695
2696 message = cmdutil.logmessage(ui, opts)
2696 message = cmdutil.logmessage(ui, opts)
2697
2697
2698 parent = q.lookup('qtip')
2698 parent = q.lookup('qtip')
2699 patches = []
2699 patches = []
2700 messages = []
2700 messages = []
2701 for f in files:
2701 for f in files:
2702 p = q.lookup(f)
2702 p = q.lookup(f)
2703 if p in patches or p == parent:
2703 if p in patches or p == parent:
2704 ui.warn(_('skipping already folded patch %s\n') % p)
2704 ui.warn(_('skipping already folded patch %s\n') % p)
2705 if q.isapplied(p):
2705 if q.isapplied(p):
2706 raise error.Abort(_('qfold cannot fold already applied patch %s')
2706 raise error.Abort(_('qfold cannot fold already applied patch %s')
2707 % p)
2707 % p)
2708 patches.append(p)
2708 patches.append(p)
2709
2709
2710 for p in patches:
2710 for p in patches:
2711 if not message:
2711 if not message:
2712 ph = patchheader(q.join(p), q.plainmode)
2712 ph = patchheader(q.join(p), q.plainmode)
2713 if ph.message:
2713 if ph.message:
2714 messages.append(ph.message)
2714 messages.append(ph.message)
2715 pf = q.join(p)
2715 pf = q.join(p)
2716 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2716 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2717 if not patchsuccess:
2717 if not patchsuccess:
2718 raise error.Abort(_('error folding patch %s') % p)
2718 raise error.Abort(_('error folding patch %s') % p)
2719
2719
2720 if not message:
2720 if not message:
2721 ph = patchheader(q.join(parent), q.plainmode)
2721 ph = patchheader(q.join(parent), q.plainmode)
2722 message = ph.message
2722 message = ph.message
2723 for msg in messages:
2723 for msg in messages:
2724 if msg:
2724 if msg:
2725 if message:
2725 if message:
2726 message.append('* * *')
2726 message.append('* * *')
2727 message.extend(msg)
2727 message.extend(msg)
2728 message = '\n'.join(message)
2728 message = '\n'.join(message)
2729
2729
2730 diffopts = q.patchopts(q.diffopts(), *patches)
2730 diffopts = q.patchopts(q.diffopts(), *patches)
2731 wlock = repo.wlock()
2731 wlock = repo.wlock()
2732 try:
2732 try:
2733 q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
2733 q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
2734 editform='mq.qfold')
2734 editform='mq.qfold')
2735 q.delete(repo, patches, opts)
2735 q.delete(repo, patches, opts)
2736 q.savedirty()
2736 q.savedirty()
2737 finally:
2737 finally:
2738 wlock.release()
2738 wlock.release()
2739
2739
2740 @command("qgoto",
2740 @command("qgoto",
2741 [('', 'keep-changes', None,
2741 [('', 'keep-changes', None,
2742 _('tolerate non-conflicting local changes')),
2742 _('tolerate non-conflicting local changes')),
2743 ('f', 'force', None, _('overwrite any local changes')),
2743 ('f', 'force', None, _('overwrite any local changes')),
2744 ('', 'no-backup', None, _('do not save backup copies of files'))],
2744 ('', 'no-backup', None, _('do not save backup copies of files'))],
2745 _('hg qgoto [OPTION]... PATCH'))
2745 _('hg qgoto [OPTION]... PATCH'))
2746 def goto(ui, repo, patch, **opts):
2746 def goto(ui, repo, patch, **opts):
2747 '''push or pop patches until named patch is at top of stack
2747 '''push or pop patches until named patch is at top of stack
2748
2748
2749 Returns 0 on success.'''
2749 Returns 0 on success.'''
2750 opts = fixkeepchangesopts(ui, opts)
2750 opts = fixkeepchangesopts(ui, opts)
2751 q = repo.mq
2751 q = repo.mq
2752 patch = q.lookup(patch)
2752 patch = q.lookup(patch)
2753 nobackup = opts.get('no_backup')
2753 nobackup = opts.get('no_backup')
2754 keepchanges = opts.get('keep_changes')
2754 keepchanges = opts.get('keep_changes')
2755 if q.isapplied(patch):
2755 if q.isapplied(patch):
2756 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2756 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2757 keepchanges=keepchanges)
2757 keepchanges=keepchanges)
2758 else:
2758 else:
2759 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2759 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2760 keepchanges=keepchanges)
2760 keepchanges=keepchanges)
2761 q.savedirty()
2761 q.savedirty()
2762 return ret
2762 return ret
2763
2763
2764 @command("qguard",
2764 @command("qguard",
2765 [('l', 'list', None, _('list all patches and guards')),
2765 [('l', 'list', None, _('list all patches and guards')),
2766 ('n', 'none', None, _('drop all guards'))],
2766 ('n', 'none', None, _('drop all guards'))],
2767 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2767 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2768 def guard(ui, repo, *args, **opts):
2768 def guard(ui, repo, *args, **opts):
2769 '''set or print guards for a patch
2769 '''set or print guards for a patch
2770
2770
2771 Guards control whether a patch can be pushed. A patch with no
2771 Guards control whether a patch can be pushed. A patch with no
2772 guards is always pushed. A patch with a positive guard ("+foo") is
2772 guards is always pushed. A patch with a positive guard ("+foo") is
2773 pushed only if the :hg:`qselect` command has activated it. A patch with
2773 pushed only if the :hg:`qselect` command has activated it. A patch with
2774 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2774 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2775 has activated it.
2775 has activated it.
2776
2776
2777 With no arguments, print the currently active guards.
2777 With no arguments, print the currently active guards.
2778 With arguments, set guards for the named patch.
2778 With arguments, set guards for the named patch.
2779
2779
2780 .. note::
2780 .. note::
2781
2781
2782 Specifying negative guards now requires '--'.
2782 Specifying negative guards now requires '--'.
2783
2783
2784 To set guards on another patch::
2784 To set guards on another patch::
2785
2785
2786 hg qguard other.patch -- +2.6.17 -stable
2786 hg qguard other.patch -- +2.6.17 -stable
2787
2787
2788 Returns 0 on success.
2788 Returns 0 on success.
2789 '''
2789 '''
2790 def status(idx):
2790 def status(idx):
2791 guards = q.seriesguards[idx] or ['unguarded']
2791 guards = q.seriesguards[idx] or ['unguarded']
2792 if q.series[idx] in applied:
2792 if q.series[idx] in applied:
2793 state = 'applied'
2793 state = 'applied'
2794 elif q.pushable(idx)[0]:
2794 elif q.pushable(idx)[0]:
2795 state = 'unapplied'
2795 state = 'unapplied'
2796 else:
2796 else:
2797 state = 'guarded'
2797 state = 'guarded'
2798 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2798 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2799 ui.write('%s: ' % ui.label(q.series[idx], label))
2799 ui.write('%s: ' % ui.label(q.series[idx], label))
2800
2800
2801 for i, guard in enumerate(guards):
2801 for i, guard in enumerate(guards):
2802 if guard.startswith('+'):
2802 if guard.startswith('+'):
2803 ui.write(guard, label='qguard.positive')
2803 ui.write(guard, label='qguard.positive')
2804 elif guard.startswith('-'):
2804 elif guard.startswith('-'):
2805 ui.write(guard, label='qguard.negative')
2805 ui.write(guard, label='qguard.negative')
2806 else:
2806 else:
2807 ui.write(guard, label='qguard.unguarded')
2807 ui.write(guard, label='qguard.unguarded')
2808 if i != len(guards) - 1:
2808 if i != len(guards) - 1:
2809 ui.write(' ')
2809 ui.write(' ')
2810 ui.write('\n')
2810 ui.write('\n')
2811 q = repo.mq
2811 q = repo.mq
2812 applied = set(p.name for p in q.applied)
2812 applied = set(p.name for p in q.applied)
2813 patch = None
2813 patch = None
2814 args = list(args)
2814 args = list(args)
2815 if opts.get('list'):
2815 if opts.get('list'):
2816 if args or opts.get('none'):
2816 if args or opts.get('none'):
2817 raise error.Abort(_('cannot mix -l/--list with options or '
2817 raise error.Abort(_('cannot mix -l/--list with options or '
2818 'arguments'))
2818 'arguments'))
2819 for i in xrange(len(q.series)):
2819 for i in xrange(len(q.series)):
2820 status(i)
2820 status(i)
2821 return
2821 return
2822 if not args or args[0][0:1] in '-+':
2822 if not args or args[0][0:1] in '-+':
2823 if not q.applied:
2823 if not q.applied:
2824 raise error.Abort(_('no patches applied'))
2824 raise error.Abort(_('no patches applied'))
2825 patch = q.applied[-1].name
2825 patch = q.applied[-1].name
2826 if patch is None and args[0][0:1] not in '-+':
2826 if patch is None and args[0][0:1] not in '-+':
2827 patch = args.pop(0)
2827 patch = args.pop(0)
2828 if patch is None:
2828 if patch is None:
2829 raise error.Abort(_('no patch to work with'))
2829 raise error.Abort(_('no patch to work with'))
2830 if args or opts.get('none'):
2830 if args or opts.get('none'):
2831 idx = q.findseries(patch)
2831 idx = q.findseries(patch)
2832 if idx is None:
2832 if idx is None:
2833 raise error.Abort(_('no patch named %s') % patch)
2833 raise error.Abort(_('no patch named %s') % patch)
2834 q.setguards(idx, args)
2834 q.setguards(idx, args)
2835 q.savedirty()
2835 q.savedirty()
2836 else:
2836 else:
2837 status(q.series.index(q.lookup(patch)))
2837 status(q.series.index(q.lookup(patch)))
2838
2838
2839 @command("qheader", [], _('hg qheader [PATCH]'))
2839 @command("qheader", [], _('hg qheader [PATCH]'))
2840 def header(ui, repo, patch=None):
2840 def header(ui, repo, patch=None):
2841 """print the header of the topmost or specified patch
2841 """print the header of the topmost or specified patch
2842
2842
2843 Returns 0 on success."""
2843 Returns 0 on success."""
2844 q = repo.mq
2844 q = repo.mq
2845
2845
2846 if patch:
2846 if patch:
2847 patch = q.lookup(patch)
2847 patch = q.lookup(patch)
2848 else:
2848 else:
2849 if not q.applied:
2849 if not q.applied:
2850 ui.write(_('no patches applied\n'))
2850 ui.write(_('no patches applied\n'))
2851 return 1
2851 return 1
2852 patch = q.lookup('qtip')
2852 patch = q.lookup('qtip')
2853 ph = patchheader(q.join(patch), q.plainmode)
2853 ph = patchheader(q.join(patch), q.plainmode)
2854
2854
2855 ui.write('\n'.join(ph.message) + '\n')
2855 ui.write('\n'.join(ph.message) + '\n')
2856
2856
2857 def lastsavename(path):
2857 def lastsavename(path):
2858 (directory, base) = os.path.split(path)
2858 (directory, base) = os.path.split(path)
2859 names = os.listdir(directory)
2859 names = os.listdir(directory)
2860 namere = re.compile("%s.([0-9]+)" % base)
2860 namere = re.compile("%s.([0-9]+)" % base)
2861 maxindex = None
2861 maxindex = None
2862 maxname = None
2862 maxname = None
2863 for f in names:
2863 for f in names:
2864 m = namere.match(f)
2864 m = namere.match(f)
2865 if m:
2865 if m:
2866 index = int(m.group(1))
2866 index = int(m.group(1))
2867 if maxindex is None or index > maxindex:
2867 if maxindex is None or index > maxindex:
2868 maxindex = index
2868 maxindex = index
2869 maxname = f
2869 maxname = f
2870 if maxname:
2870 if maxname:
2871 return (os.path.join(directory, maxname), maxindex)
2871 return (os.path.join(directory, maxname), maxindex)
2872 return (None, None)
2872 return (None, None)
2873
2873
2874 def savename(path):
2874 def savename(path):
2875 (last, index) = lastsavename(path)
2875 (last, index) = lastsavename(path)
2876 if last is None:
2876 if last is None:
2877 index = 0
2877 index = 0
2878 newpath = path + ".%d" % (index + 1)
2878 newpath = path + ".%d" % (index + 1)
2879 return newpath
2879 return newpath
2880
2880
2881 @command("^qpush",
2881 @command("^qpush",
2882 [('', 'keep-changes', None,
2882 [('', 'keep-changes', None,
2883 _('tolerate non-conflicting local changes')),
2883 _('tolerate non-conflicting local changes')),
2884 ('f', 'force', None, _('apply on top of local changes')),
2884 ('f', 'force', None, _('apply on top of local changes')),
2885 ('e', 'exact', None,
2885 ('e', 'exact', None,
2886 _('apply the target patch to its recorded parent')),
2886 _('apply the target patch to its recorded parent')),
2887 ('l', 'list', None, _('list patch name in commit text')),
2887 ('l', 'list', None, _('list patch name in commit text')),
2888 ('a', 'all', None, _('apply all patches')),
2888 ('a', 'all', None, _('apply all patches')),
2889 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2889 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2890 ('n', 'name', '',
2890 ('n', 'name', '',
2891 _('merge queue name (DEPRECATED)'), _('NAME')),
2891 _('merge queue name (DEPRECATED)'), _('NAME')),
2892 ('', 'move', None,
2892 ('', 'move', None,
2893 _('reorder patch series and apply only the patch')),
2893 _('reorder patch series and apply only the patch')),
2894 ('', 'no-backup', None, _('do not save backup copies of files'))],
2894 ('', 'no-backup', None, _('do not save backup copies of files'))],
2895 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2895 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2896 def push(ui, repo, patch=None, **opts):
2896 def push(ui, repo, patch=None, **opts):
2897 """push the next patch onto the stack
2897 """push the next patch onto the stack
2898
2898
2899 By default, abort if the working directory contains uncommitted
2899 By default, abort if the working directory contains uncommitted
2900 changes. With --keep-changes, abort only if the uncommitted files
2900 changes. With --keep-changes, abort only if the uncommitted files
2901 overlap with patched files. With -f/--force, backup and patch over
2901 overlap with patched files. With -f/--force, backup and patch over
2902 uncommitted changes.
2902 uncommitted changes.
2903
2903
2904 Return 0 on success.
2904 Return 0 on success.
2905 """
2905 """
2906 q = repo.mq
2906 q = repo.mq
2907 mergeq = None
2907 mergeq = None
2908
2908
2909 opts = fixkeepchangesopts(ui, opts)
2909 opts = fixkeepchangesopts(ui, opts)
2910 if opts.get('merge'):
2910 if opts.get('merge'):
2911 if opts.get('name'):
2911 if opts.get('name'):
2912 newpath = repo.join(opts.get('name'))
2912 newpath = repo.join(opts.get('name'))
2913 else:
2913 else:
2914 newpath, i = lastsavename(q.path)
2914 newpath, i = lastsavename(q.path)
2915 if not newpath:
2915 if not newpath:
2916 ui.warn(_("no saved queues found, please use -n\n"))
2916 ui.warn(_("no saved queues found, please use -n\n"))
2917 return 1
2917 return 1
2918 mergeq = queue(ui, repo.baseui, repo.path, newpath)
2918 mergeq = queue(ui, repo.baseui, repo.path, newpath)
2919 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2919 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2920 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2920 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2921 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2921 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2922 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2922 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2923 keepchanges=opts.get('keep_changes'))
2923 keepchanges=opts.get('keep_changes'))
2924 return ret
2924 return ret
2925
2925
2926 @command("^qpop",
2926 @command("^qpop",
2927 [('a', 'all', None, _('pop all patches')),
2927 [('a', 'all', None, _('pop all patches')),
2928 ('n', 'name', '',
2928 ('n', 'name', '',
2929 _('queue name to pop (DEPRECATED)'), _('NAME')),
2929 _('queue name to pop (DEPRECATED)'), _('NAME')),
2930 ('', 'keep-changes', None,
2930 ('', 'keep-changes', None,
2931 _('tolerate non-conflicting local changes')),
2931 _('tolerate non-conflicting local changes')),
2932 ('f', 'force', None, _('forget any local changes to patched files')),
2932 ('f', 'force', None, _('forget any local changes to patched files')),
2933 ('', 'no-backup', None, _('do not save backup copies of files'))],
2933 ('', 'no-backup', None, _('do not save backup copies of files'))],
2934 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2934 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2935 def pop(ui, repo, patch=None, **opts):
2935 def pop(ui, repo, patch=None, **opts):
2936 """pop the current patch off the stack
2936 """pop the current patch off the stack
2937
2937
2938 Without argument, pops off the top of the patch stack. If given a
2938 Without argument, pops off the top of the patch stack. If given a
2939 patch name, keeps popping off patches until the named patch is at
2939 patch name, keeps popping off patches until the named patch is at
2940 the top of the stack.
2940 the top of the stack.
2941
2941
2942 By default, abort if the working directory contains uncommitted
2942 By default, abort if the working directory contains uncommitted
2943 changes. With --keep-changes, abort only if the uncommitted files
2943 changes. With --keep-changes, abort only if the uncommitted files
2944 overlap with patched files. With -f/--force, backup and discard
2944 overlap with patched files. With -f/--force, backup and discard
2945 changes made to such files.
2945 changes made to such files.
2946
2946
2947 Return 0 on success.
2947 Return 0 on success.
2948 """
2948 """
2949 opts = fixkeepchangesopts(ui, opts)
2949 opts = fixkeepchangesopts(ui, opts)
2950 localupdate = True
2950 localupdate = True
2951 if opts.get('name'):
2951 if opts.get('name'):
2952 q = queue(ui, repo.baseui, repo.path, repo.join(opts.get('name')))
2952 q = queue(ui, repo.baseui, repo.path, repo.join(opts.get('name')))
2953 ui.warn(_('using patch queue: %s\n') % q.path)
2953 ui.warn(_('using patch queue: %s\n') % q.path)
2954 localupdate = False
2954 localupdate = False
2955 else:
2955 else:
2956 q = repo.mq
2956 q = repo.mq
2957 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2957 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2958 all=opts.get('all'), nobackup=opts.get('no_backup'),
2958 all=opts.get('all'), nobackup=opts.get('no_backup'),
2959 keepchanges=opts.get('keep_changes'))
2959 keepchanges=opts.get('keep_changes'))
2960 q.savedirty()
2960 q.savedirty()
2961 return ret
2961 return ret
2962
2962
2963 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2963 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2964 def rename(ui, repo, patch, name=None, **opts):
2964 def rename(ui, repo, patch, name=None, **opts):
2965 """rename a patch
2965 """rename a patch
2966
2966
2967 With one argument, renames the current patch to PATCH1.
2967 With one argument, renames the current patch to PATCH1.
2968 With two arguments, renames PATCH1 to PATCH2.
2968 With two arguments, renames PATCH1 to PATCH2.
2969
2969
2970 Returns 0 on success."""
2970 Returns 0 on success."""
2971 q = repo.mq
2971 q = repo.mq
2972 if not name:
2972 if not name:
2973 name = patch
2973 name = patch
2974 patch = None
2974 patch = None
2975
2975
2976 if patch:
2976 if patch:
2977 patch = q.lookup(patch)
2977 patch = q.lookup(patch)
2978 else:
2978 else:
2979 if not q.applied:
2979 if not q.applied:
2980 ui.write(_('no patches applied\n'))
2980 ui.write(_('no patches applied\n'))
2981 return
2981 return
2982 patch = q.lookup('qtip')
2982 patch = q.lookup('qtip')
2983 absdest = q.join(name)
2983 absdest = q.join(name)
2984 if os.path.isdir(absdest):
2984 if os.path.isdir(absdest):
2985 name = normname(os.path.join(name, os.path.basename(patch)))
2985 name = normname(os.path.join(name, os.path.basename(patch)))
2986 absdest = q.join(name)
2986 absdest = q.join(name)
2987 q.checkpatchname(name)
2987 q.checkpatchname(name)
2988
2988
2989 ui.note(_('renaming %s to %s\n') % (patch, name))
2989 ui.note(_('renaming %s to %s\n') % (patch, name))
2990 i = q.findseries(patch)
2990 i = q.findseries(patch)
2991 guards = q.guard_re.findall(q.fullseries[i])
2991 guards = q.guard_re.findall(q.fullseries[i])
2992 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
2992 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
2993 q.parseseries()
2993 q.parseseries()
2994 q.seriesdirty = True
2994 q.seriesdirty = True
2995
2995
2996 info = q.isapplied(patch)
2996 info = q.isapplied(patch)
2997 if info:
2997 if info:
2998 q.applied[info[0]] = statusentry(info[1], name)
2998 q.applied[info[0]] = statusentry(info[1], name)
2999 q.applieddirty = True
2999 q.applieddirty = True
3000
3000
3001 destdir = os.path.dirname(absdest)
3001 destdir = os.path.dirname(absdest)
3002 if not os.path.isdir(destdir):
3002 if not os.path.isdir(destdir):
3003 os.makedirs(destdir)
3003 os.makedirs(destdir)
3004 util.rename(q.join(patch), absdest)
3004 util.rename(q.join(patch), absdest)
3005 r = q.qrepo()
3005 r = q.qrepo()
3006 if r and patch in r.dirstate:
3006 if r and patch in r.dirstate:
3007 wctx = r[None]
3007 wctx = r[None]
3008 wlock = r.wlock()
3008 wlock = r.wlock()
3009 try:
3009 try:
3010 if r.dirstate[patch] == 'a':
3010 if r.dirstate[patch] == 'a':
3011 r.dirstate.drop(patch)
3011 r.dirstate.drop(patch)
3012 r.dirstate.add(name)
3012 r.dirstate.add(name)
3013 else:
3013 else:
3014 wctx.copy(patch, name)
3014 wctx.copy(patch, name)
3015 wctx.forget([patch])
3015 wctx.forget([patch])
3016 finally:
3016 finally:
3017 wlock.release()
3017 wlock.release()
3018
3018
3019 q.savedirty()
3019 q.savedirty()
3020
3020
3021 @command("qrestore",
3021 @command("qrestore",
3022 [('d', 'delete', None, _('delete save entry')),
3022 [('d', 'delete', None, _('delete save entry')),
3023 ('u', 'update', None, _('update queue working directory'))],
3023 ('u', 'update', None, _('update queue working directory'))],
3024 _('hg qrestore [-d] [-u] REV'))
3024 _('hg qrestore [-d] [-u] REV'))
3025 def restore(ui, repo, rev, **opts):
3025 def restore(ui, repo, rev, **opts):
3026 """restore the queue state saved by a revision (DEPRECATED)
3026 """restore the queue state saved by a revision (DEPRECATED)
3027
3027
3028 This command is deprecated, use :hg:`rebase` instead."""
3028 This command is deprecated, use :hg:`rebase` instead."""
3029 rev = repo.lookup(rev)
3029 rev = repo.lookup(rev)
3030 q = repo.mq
3030 q = repo.mq
3031 q.restore(repo, rev, delete=opts.get('delete'),
3031 q.restore(repo, rev, delete=opts.get('delete'),
3032 qupdate=opts.get('update'))
3032 qupdate=opts.get('update'))
3033 q.savedirty()
3033 q.savedirty()
3034 return 0
3034 return 0
3035
3035
3036 @command("qsave",
3036 @command("qsave",
3037 [('c', 'copy', None, _('copy patch directory')),
3037 [('c', 'copy', None, _('copy patch directory')),
3038 ('n', 'name', '',
3038 ('n', 'name', '',
3039 _('copy directory name'), _('NAME')),
3039 _('copy directory name'), _('NAME')),
3040 ('e', 'empty', None, _('clear queue status file')),
3040 ('e', 'empty', None, _('clear queue status file')),
3041 ('f', 'force', None, _('force copy'))] + commands.commitopts,
3041 ('f', 'force', None, _('force copy'))] + commands.commitopts,
3042 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
3042 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
3043 def save(ui, repo, **opts):
3043 def save(ui, repo, **opts):
3044 """save current queue state (DEPRECATED)
3044 """save current queue state (DEPRECATED)
3045
3045
3046 This command is deprecated, use :hg:`rebase` instead."""
3046 This command is deprecated, use :hg:`rebase` instead."""
3047 q = repo.mq
3047 q = repo.mq
3048 message = cmdutil.logmessage(ui, opts)
3048 message = cmdutil.logmessage(ui, opts)
3049 ret = q.save(repo, msg=message)
3049 ret = q.save(repo, msg=message)
3050 if ret:
3050 if ret:
3051 return ret
3051 return ret
3052 q.savedirty() # save to .hg/patches before copying
3052 q.savedirty() # save to .hg/patches before copying
3053 if opts.get('copy'):
3053 if opts.get('copy'):
3054 path = q.path
3054 path = q.path
3055 if opts.get('name'):
3055 if opts.get('name'):
3056 newpath = os.path.join(q.basepath, opts.get('name'))
3056 newpath = os.path.join(q.basepath, opts.get('name'))
3057 if os.path.exists(newpath):
3057 if os.path.exists(newpath):
3058 if not os.path.isdir(newpath):
3058 if not os.path.isdir(newpath):
3059 raise error.Abort(_('destination %s exists and is not '
3059 raise error.Abort(_('destination %s exists and is not '
3060 'a directory') % newpath)
3060 'a directory') % newpath)
3061 if not opts.get('force'):
3061 if not opts.get('force'):
3062 raise error.Abort(_('destination %s exists, '
3062 raise error.Abort(_('destination %s exists, '
3063 'use -f to force') % newpath)
3063 'use -f to force') % newpath)
3064 else:
3064 else:
3065 newpath = savename(path)
3065 newpath = savename(path)
3066 ui.warn(_("copy %s to %s\n") % (path, newpath))
3066 ui.warn(_("copy %s to %s\n") % (path, newpath))
3067 util.copyfiles(path, newpath)
3067 util.copyfiles(path, newpath)
3068 if opts.get('empty'):
3068 if opts.get('empty'):
3069 del q.applied[:]
3069 del q.applied[:]
3070 q.applieddirty = True
3070 q.applieddirty = True
3071 q.savedirty()
3071 q.savedirty()
3072 return 0
3072 return 0
3073
3073
3074
3074
3075 @command("qselect",
3075 @command("qselect",
3076 [('n', 'none', None, _('disable all guards')),
3076 [('n', 'none', None, _('disable all guards')),
3077 ('s', 'series', None, _('list all guards in series file')),
3077 ('s', 'series', None, _('list all guards in series file')),
3078 ('', 'pop', None, _('pop to before first guarded applied patch')),
3078 ('', 'pop', None, _('pop to before first guarded applied patch')),
3079 ('', 'reapply', None, _('pop, then reapply patches'))],
3079 ('', 'reapply', None, _('pop, then reapply patches'))],
3080 _('hg qselect [OPTION]... [GUARD]...'))
3080 _('hg qselect [OPTION]... [GUARD]...'))
3081 def select(ui, repo, *args, **opts):
3081 def select(ui, repo, *args, **opts):
3082 '''set or print guarded patches to push
3082 '''set or print guarded patches to push
3083
3083
3084 Use the :hg:`qguard` command to set or print guards on patch, then use
3084 Use the :hg:`qguard` command to set or print guards on patch, then use
3085 qselect to tell mq which guards to use. A patch will be pushed if
3085 qselect to tell mq which guards to use. A patch will be pushed if
3086 it has no guards or any positive guards match the currently
3086 it has no guards or any positive guards match the currently
3087 selected guard, but will not be pushed if any negative guards
3087 selected guard, but will not be pushed if any negative guards
3088 match the current guard. For example::
3088 match the current guard. For example::
3089
3089
3090 qguard foo.patch -- -stable (negative guard)
3090 qguard foo.patch -- -stable (negative guard)
3091 qguard bar.patch +stable (positive guard)
3091 qguard bar.patch +stable (positive guard)
3092 qselect stable
3092 qselect stable
3093
3093
3094 This activates the "stable" guard. mq will skip foo.patch (because
3094 This activates the "stable" guard. mq will skip foo.patch (because
3095 it has a negative match) but push bar.patch (because it has a
3095 it has a negative match) but push bar.patch (because it has a
3096 positive match).
3096 positive match).
3097
3097
3098 With no arguments, prints the currently active guards.
3098 With no arguments, prints the currently active guards.
3099 With one argument, sets the active guard.
3099 With one argument, sets the active guard.
3100
3100
3101 Use -n/--none to deactivate guards (no other arguments needed).
3101 Use -n/--none to deactivate guards (no other arguments needed).
3102 When no guards are active, patches with positive guards are
3102 When no guards are active, patches with positive guards are
3103 skipped and patches with negative guards are pushed.
3103 skipped and patches with negative guards are pushed.
3104
3104
3105 qselect can change the guards on applied patches. It does not pop
3105 qselect can change the guards on applied patches. It does not pop
3106 guarded patches by default. Use --pop to pop back to the last
3106 guarded patches by default. Use --pop to pop back to the last
3107 applied patch that is not guarded. Use --reapply (which implies
3107 applied patch that is not guarded. Use --reapply (which implies
3108 --pop) to push back to the current patch afterwards, but skip
3108 --pop) to push back to the current patch afterwards, but skip
3109 guarded patches.
3109 guarded patches.
3110
3110
3111 Use -s/--series to print a list of all guards in the series file
3111 Use -s/--series to print a list of all guards in the series file
3112 (no other arguments needed). Use -v for more information.
3112 (no other arguments needed). Use -v for more information.
3113
3113
3114 Returns 0 on success.'''
3114 Returns 0 on success.'''
3115
3115
3116 q = repo.mq
3116 q = repo.mq
3117 guards = q.active()
3117 guards = q.active()
3118 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3118 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3119 if args or opts.get('none'):
3119 if args or opts.get('none'):
3120 old_unapplied = q.unapplied(repo)
3120 old_unapplied = q.unapplied(repo)
3121 old_guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3121 old_guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3122 q.setactive(args)
3122 q.setactive(args)
3123 q.savedirty()
3123 q.savedirty()
3124 if not args:
3124 if not args:
3125 ui.status(_('guards deactivated\n'))
3125 ui.status(_('guards deactivated\n'))
3126 if not opts.get('pop') and not opts.get('reapply'):
3126 if not opts.get('pop') and not opts.get('reapply'):
3127 unapplied = q.unapplied(repo)
3127 unapplied = q.unapplied(repo)
3128 guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3128 guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3129 if len(unapplied) != len(old_unapplied):
3129 if len(unapplied) != len(old_unapplied):
3130 ui.status(_('number of unguarded, unapplied patches has '
3130 ui.status(_('number of unguarded, unapplied patches has '
3131 'changed from %d to %d\n') %
3131 'changed from %d to %d\n') %
3132 (len(old_unapplied), len(unapplied)))
3132 (len(old_unapplied), len(unapplied)))
3133 if len(guarded) != len(old_guarded):
3133 if len(guarded) != len(old_guarded):
3134 ui.status(_('number of guarded, applied patches has changed '
3134 ui.status(_('number of guarded, applied patches has changed '
3135 'from %d to %d\n') %
3135 'from %d to %d\n') %
3136 (len(old_guarded), len(guarded)))
3136 (len(old_guarded), len(guarded)))
3137 elif opts.get('series'):
3137 elif opts.get('series'):
3138 guards = {}
3138 guards = {}
3139 noguards = 0
3139 noguards = 0
3140 for gs in q.seriesguards:
3140 for gs in q.seriesguards:
3141 if not gs:
3141 if not gs:
3142 noguards += 1
3142 noguards += 1
3143 for g in gs:
3143 for g in gs:
3144 guards.setdefault(g, 0)
3144 guards.setdefault(g, 0)
3145 guards[g] += 1
3145 guards[g] += 1
3146 if ui.verbose:
3146 if ui.verbose:
3147 guards['NONE'] = noguards
3147 guards['NONE'] = noguards
3148 guards = guards.items()
3148 guards = guards.items()
3149 guards.sort(key=lambda x: x[0][1:])
3149 guards.sort(key=lambda x: x[0][1:])
3150 if guards:
3150 if guards:
3151 ui.note(_('guards in series file:\n'))
3151 ui.note(_('guards in series file:\n'))
3152 for guard, count in guards:
3152 for guard, count in guards:
3153 ui.note('%2d ' % count)
3153 ui.note('%2d ' % count)
3154 ui.write(guard, '\n')
3154 ui.write(guard, '\n')
3155 else:
3155 else:
3156 ui.note(_('no guards in series file\n'))
3156 ui.note(_('no guards in series file\n'))
3157 else:
3157 else:
3158 if guards:
3158 if guards:
3159 ui.note(_('active guards:\n'))
3159 ui.note(_('active guards:\n'))
3160 for g in guards:
3160 for g in guards:
3161 ui.write(g, '\n')
3161 ui.write(g, '\n')
3162 else:
3162 else:
3163 ui.write(_('no active guards\n'))
3163 ui.write(_('no active guards\n'))
3164 reapply = opts.get('reapply') and q.applied and q.applied[-1].name
3164 reapply = opts.get('reapply') and q.applied and q.applied[-1].name
3165 popped = False
3165 popped = False
3166 if opts.get('pop') or opts.get('reapply'):
3166 if opts.get('pop') or opts.get('reapply'):
3167 for i in xrange(len(q.applied)):
3167 for i in xrange(len(q.applied)):
3168 if not pushable(i):
3168 if not pushable(i):
3169 ui.status(_('popping guarded patches\n'))
3169 ui.status(_('popping guarded patches\n'))
3170 popped = True
3170 popped = True
3171 if i == 0:
3171 if i == 0:
3172 q.pop(repo, all=True)
3172 q.pop(repo, all=True)
3173 else:
3173 else:
3174 q.pop(repo, q.applied[i - 1].name)
3174 q.pop(repo, q.applied[i - 1].name)
3175 break
3175 break
3176 if popped:
3176 if popped:
3177 try:
3177 try:
3178 if reapply:
3178 if reapply:
3179 ui.status(_('reapplying unguarded patches\n'))
3179 ui.status(_('reapplying unguarded patches\n'))
3180 q.push(repo, reapply)
3180 q.push(repo, reapply)
3181 finally:
3181 finally:
3182 q.savedirty()
3182 q.savedirty()
3183
3183
3184 @command("qfinish",
3184 @command("qfinish",
3185 [('a', 'applied', None, _('finish all applied changesets'))],
3185 [('a', 'applied', None, _('finish all applied changesets'))],
3186 _('hg qfinish [-a] [REV]...'))
3186 _('hg qfinish [-a] [REV]...'))
3187 def finish(ui, repo, *revrange, **opts):
3187 def finish(ui, repo, *revrange, **opts):
3188 """move applied patches into repository history
3188 """move applied patches into repository history
3189
3189
3190 Finishes the specified revisions (corresponding to applied
3190 Finishes the specified revisions (corresponding to applied
3191 patches) by moving them out of mq control into regular repository
3191 patches) by moving them out of mq control into regular repository
3192 history.
3192 history.
3193
3193
3194 Accepts a revision range or the -a/--applied option. If --applied
3194 Accepts a revision range or the -a/--applied option. If --applied
3195 is specified, all applied mq revisions are removed from mq
3195 is specified, all applied mq revisions are removed from mq
3196 control. Otherwise, the given revisions must be at the base of the
3196 control. Otherwise, the given revisions must be at the base of the
3197 stack of applied patches.
3197 stack of applied patches.
3198
3198
3199 This can be especially useful if your changes have been applied to
3199 This can be especially useful if your changes have been applied to
3200 an upstream repository, or if you are about to push your changes
3200 an upstream repository, or if you are about to push your changes
3201 to upstream.
3201 to upstream.
3202
3202
3203 Returns 0 on success.
3203 Returns 0 on success.
3204 """
3204 """
3205 if not opts.get('applied') and not revrange:
3205 if not opts.get('applied') and not revrange:
3206 raise error.Abort(_('no revisions specified'))
3206 raise error.Abort(_('no revisions specified'))
3207 elif opts.get('applied'):
3207 elif opts.get('applied'):
3208 revrange = ('qbase::qtip',) + revrange
3208 revrange = ('qbase::qtip',) + revrange
3209
3209
3210 q = repo.mq
3210 q = repo.mq
3211 if not q.applied:
3211 if not q.applied:
3212 ui.status(_('no patches applied\n'))
3212 ui.status(_('no patches applied\n'))
3213 return 0
3213 return 0
3214
3214
3215 revs = scmutil.revrange(repo, revrange)
3215 revs = scmutil.revrange(repo, revrange)
3216 if repo['.'].rev() in revs and repo[None].files():
3216 if repo['.'].rev() in revs and repo[None].files():
3217 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3217 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3218 # queue.finish may changes phases but leave the responsibility to lock the
3218 # queue.finish may changes phases but leave the responsibility to lock the
3219 # repo to the caller to avoid deadlock with wlock. This command code is
3219 # repo to the caller to avoid deadlock with wlock. This command code is
3220 # responsibility for this locking.
3220 # responsibility for this locking.
3221 lock = repo.lock()
3221 lock = repo.lock()
3222 try:
3222 try:
3223 q.finish(repo, revs)
3223 q.finish(repo, revs)
3224 q.savedirty()
3224 q.savedirty()
3225 finally:
3225 finally:
3226 lock.release()
3226 lock.release()
3227 return 0
3227 return 0
3228
3228
3229 @command("qqueue",
3229 @command("qqueue",
3230 [('l', 'list', False, _('list all available queues')),
3230 [('l', 'list', False, _('list all available queues')),
3231 ('', 'active', False, _('print name of active queue')),
3231 ('', 'active', False, _('print name of active queue')),
3232 ('c', 'create', False, _('create new queue')),
3232 ('c', 'create', False, _('create new queue')),
3233 ('', 'rename', False, _('rename active queue')),
3233 ('', 'rename', False, _('rename active queue')),
3234 ('', 'delete', False, _('delete reference to queue')),
3234 ('', 'delete', False, _('delete reference to queue')),
3235 ('', 'purge', False, _('delete queue, and remove patch dir')),
3235 ('', 'purge', False, _('delete queue, and remove patch dir')),
3236 ],
3236 ],
3237 _('[OPTION] [QUEUE]'))
3237 _('[OPTION] [QUEUE]'))
3238 def qqueue(ui, repo, name=None, **opts):
3238 def qqueue(ui, repo, name=None, **opts):
3239 '''manage multiple patch queues
3239 '''manage multiple patch queues
3240
3240
3241 Supports switching between different patch queues, as well as creating
3241 Supports switching between different patch queues, as well as creating
3242 new patch queues and deleting existing ones.
3242 new patch queues and deleting existing ones.
3243
3243
3244 Omitting a queue name or specifying -l/--list will show you the registered
3244 Omitting a queue name or specifying -l/--list will show you the registered
3245 queues - by default the "normal" patches queue is registered. The currently
3245 queues - by default the "normal" patches queue is registered. The currently
3246 active queue will be marked with "(active)". Specifying --active will print
3246 active queue will be marked with "(active)". Specifying --active will print
3247 only the name of the active queue.
3247 only the name of the active queue.
3248
3248
3249 To create a new queue, use -c/--create. The queue is automatically made
3249 To create a new queue, use -c/--create. The queue is automatically made
3250 active, except in the case where there are applied patches from the
3250 active, except in the case where there are applied patches from the
3251 currently active queue in the repository. Then the queue will only be
3251 currently active queue in the repository. Then the queue will only be
3252 created and switching will fail.
3252 created and switching will fail.
3253
3253
3254 To delete an existing queue, use --delete. You cannot delete the currently
3254 To delete an existing queue, use --delete. You cannot delete the currently
3255 active queue.
3255 active queue.
3256
3256
3257 Returns 0 on success.
3257 Returns 0 on success.
3258 '''
3258 '''
3259 q = repo.mq
3259 q = repo.mq
3260 _defaultqueue = 'patches'
3260 _defaultqueue = 'patches'
3261 _allqueues = 'patches.queues'
3261 _allqueues = 'patches.queues'
3262 _activequeue = 'patches.queue'
3262 _activequeue = 'patches.queue'
3263
3263
3264 def _getcurrent():
3264 def _getcurrent():
3265 cur = os.path.basename(q.path)
3265 cur = os.path.basename(q.path)
3266 if cur.startswith('patches-'):
3266 if cur.startswith('patches-'):
3267 cur = cur[8:]
3267 cur = cur[8:]
3268 return cur
3268 return cur
3269
3269
3270 def _noqueues():
3270 def _noqueues():
3271 try:
3271 try:
3272 fh = repo.vfs(_allqueues, 'r')
3272 fh = repo.vfs(_allqueues, 'r')
3273 fh.close()
3273 fh.close()
3274 except IOError:
3274 except IOError:
3275 return True
3275 return True
3276
3276
3277 return False
3277 return False
3278
3278
3279 def _getqueues():
3279 def _getqueues():
3280 current = _getcurrent()
3280 current = _getcurrent()
3281
3281
3282 try:
3282 try:
3283 fh = repo.vfs(_allqueues, 'r')
3283 fh = repo.vfs(_allqueues, 'r')
3284 queues = [queue.strip() for queue in fh if queue.strip()]
3284 queues = [queue.strip() for queue in fh if queue.strip()]
3285 fh.close()
3285 fh.close()
3286 if current not in queues:
3286 if current not in queues:
3287 queues.append(current)
3287 queues.append(current)
3288 except IOError:
3288 except IOError:
3289 queues = [_defaultqueue]
3289 queues = [_defaultqueue]
3290
3290
3291 return sorted(queues)
3291 return sorted(queues)
3292
3292
3293 def _setactive(name):
3293 def _setactive(name):
3294 if q.applied:
3294 if q.applied:
3295 raise error.Abort(_('new queue created, but cannot make active '
3295 raise error.Abort(_('new queue created, but cannot make active '
3296 'as patches are applied'))
3296 'as patches are applied'))
3297 _setactivenocheck(name)
3297 _setactivenocheck(name)
3298
3298
3299 def _setactivenocheck(name):
3299 def _setactivenocheck(name):
3300 fh = repo.vfs(_activequeue, 'w')
3300 fh = repo.vfs(_activequeue, 'w')
3301 if name != 'patches':
3301 if name != 'patches':
3302 fh.write(name)
3302 fh.write(name)
3303 fh.close()
3303 fh.close()
3304
3304
3305 def _addqueue(name):
3305 def _addqueue(name):
3306 fh = repo.vfs(_allqueues, 'a')
3306 fh = repo.vfs(_allqueues, 'a')
3307 fh.write('%s\n' % (name,))
3307 fh.write('%s\n' % (name,))
3308 fh.close()
3308 fh.close()
3309
3309
3310 def _queuedir(name):
3310 def _queuedir(name):
3311 if name == 'patches':
3311 if name == 'patches':
3312 return repo.join('patches')
3312 return repo.join('patches')
3313 else:
3313 else:
3314 return repo.join('patches-' + name)
3314 return repo.join('patches-' + name)
3315
3315
3316 def _validname(name):
3316 def _validname(name):
3317 for n in name:
3317 for n in name:
3318 if n in ':\\/.':
3318 if n in ':\\/.':
3319 return False
3319 return False
3320 return True
3320 return True
3321
3321
3322 def _delete(name):
3322 def _delete(name):
3323 if name not in existing:
3323 if name not in existing:
3324 raise error.Abort(_('cannot delete queue that does not exist'))
3324 raise error.Abort(_('cannot delete queue that does not exist'))
3325
3325
3326 current = _getcurrent()
3326 current = _getcurrent()
3327
3327
3328 if name == current:
3328 if name == current:
3329 raise error.Abort(_('cannot delete currently active queue'))
3329 raise error.Abort(_('cannot delete currently active queue'))
3330
3330
3331 fh = repo.vfs('patches.queues.new', 'w')
3331 fh = repo.vfs('patches.queues.new', 'w')
3332 for queue in existing:
3332 for queue in existing:
3333 if queue == name:
3333 if queue == name:
3334 continue
3334 continue
3335 fh.write('%s\n' % (queue,))
3335 fh.write('%s\n' % (queue,))
3336 fh.close()
3336 fh.close()
3337 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3337 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3338
3338
3339 if not name or opts.get('list') or opts.get('active'):
3339 if not name or opts.get('list') or opts.get('active'):
3340 current = _getcurrent()
3340 current = _getcurrent()
3341 if opts.get('active'):
3341 if opts.get('active'):
3342 ui.write('%s\n' % (current,))
3342 ui.write('%s\n' % (current,))
3343 return
3343 return
3344 for queue in _getqueues():
3344 for queue in _getqueues():
3345 ui.write('%s' % (queue,))
3345 ui.write('%s' % (queue,))
3346 if queue == current and not ui.quiet:
3346 if queue == current and not ui.quiet:
3347 ui.write(_(' (active)\n'))
3347 ui.write(_(' (active)\n'))
3348 else:
3348 else:
3349 ui.write('\n')
3349 ui.write('\n')
3350 return
3350 return
3351
3351
3352 if not _validname(name):
3352 if not _validname(name):
3353 raise error.Abort(
3353 raise error.Abort(
3354 _('invalid queue name, may not contain the characters ":\\/."'))
3354 _('invalid queue name, may not contain the characters ":\\/."'))
3355
3355
3356 existing = _getqueues()
3356 existing = _getqueues()
3357
3357
3358 if opts.get('create'):
3358 if opts.get('create'):
3359 if name in existing:
3359 if name in existing:
3360 raise error.Abort(_('queue "%s" already exists') % name)
3360 raise error.Abort(_('queue "%s" already exists') % name)
3361 if _noqueues():
3361 if _noqueues():
3362 _addqueue(_defaultqueue)
3362 _addqueue(_defaultqueue)
3363 _addqueue(name)
3363 _addqueue(name)
3364 _setactive(name)
3364 _setactive(name)
3365 elif opts.get('rename'):
3365 elif opts.get('rename'):
3366 current = _getcurrent()
3366 current = _getcurrent()
3367 if name == current:
3367 if name == current:
3368 raise error.Abort(_('can\'t rename "%s" to its current name')
3368 raise error.Abort(_('can\'t rename "%s" to its current name')
3369 % name)
3369 % name)
3370 if name in existing:
3370 if name in existing:
3371 raise error.Abort(_('queue "%s" already exists') % name)
3371 raise error.Abort(_('queue "%s" already exists') % name)
3372
3372
3373 olddir = _queuedir(current)
3373 olddir = _queuedir(current)
3374 newdir = _queuedir(name)
3374 newdir = _queuedir(name)
3375
3375
3376 if os.path.exists(newdir):
3376 if os.path.exists(newdir):
3377 raise error.Abort(_('non-queue directory "%s" already exists') %
3377 raise error.Abort(_('non-queue directory "%s" already exists') %
3378 newdir)
3378 newdir)
3379
3379
3380 fh = repo.vfs('patches.queues.new', 'w')
3380 fh = repo.vfs('patches.queues.new', 'w')
3381 for queue in existing:
3381 for queue in existing:
3382 if queue == current:
3382 if queue == current:
3383 fh.write('%s\n' % (name,))
3383 fh.write('%s\n' % (name,))
3384 if os.path.exists(olddir):
3384 if os.path.exists(olddir):
3385 util.rename(olddir, newdir)
3385 util.rename(olddir, newdir)
3386 else:
3386 else:
3387 fh.write('%s\n' % (queue,))
3387 fh.write('%s\n' % (queue,))
3388 fh.close()
3388 fh.close()
3389 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3389 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3390 _setactivenocheck(name)
3390 _setactivenocheck(name)
3391 elif opts.get('delete'):
3391 elif opts.get('delete'):
3392 _delete(name)
3392 _delete(name)
3393 elif opts.get('purge'):
3393 elif opts.get('purge'):
3394 if name in existing:
3394 if name in existing:
3395 _delete(name)
3395 _delete(name)
3396 qdir = _queuedir(name)
3396 qdir = _queuedir(name)
3397 if os.path.exists(qdir):
3397 if os.path.exists(qdir):
3398 shutil.rmtree(qdir)
3398 shutil.rmtree(qdir)
3399 else:
3399 else:
3400 if name not in existing:
3400 if name not in existing:
3401 raise error.Abort(_('use --create to create a new queue'))
3401 raise error.Abort(_('use --create to create a new queue'))
3402 _setactive(name)
3402 _setactive(name)
3403
3403
3404 def mqphasedefaults(repo, roots):
3404 def mqphasedefaults(repo, roots):
3405 """callback used to set mq changeset as secret when no phase data exists"""
3405 """callback used to set mq changeset as secret when no phase data exists"""
3406 if repo.mq.applied:
3406 if repo.mq.applied:
3407 if repo.ui.configbool('mq', 'secret', False):
3407 if repo.ui.configbool('mq', 'secret', False):
3408 mqphase = phases.secret
3408 mqphase = phases.secret
3409 else:
3409 else:
3410 mqphase = phases.draft
3410 mqphase = phases.draft
3411 qbase = repo[repo.mq.applied[0].node]
3411 qbase = repo[repo.mq.applied[0].node]
3412 roots[mqphase].add(qbase.node())
3412 roots[mqphase].add(qbase.node())
3413 return roots
3413 return roots
3414
3414
3415 def reposetup(ui, repo):
3415 def reposetup(ui, repo):
3416 class mqrepo(repo.__class__):
3416 class mqrepo(repo.__class__):
3417 @localrepo.unfilteredpropertycache
3417 @localrepo.unfilteredpropertycache
3418 def mq(self):
3418 def mq(self):
3419 return queue(self.ui, self.baseui, self.path)
3419 return queue(self.ui, self.baseui, self.path)
3420
3420
3421 def invalidateall(self):
3421 def invalidateall(self):
3422 super(mqrepo, self).invalidateall()
3422 super(mqrepo, self).invalidateall()
3423 if localrepo.hasunfilteredcache(self, 'mq'):
3423 if localrepo.hasunfilteredcache(self, 'mq'):
3424 # recreate mq in case queue path was changed
3424 # recreate mq in case queue path was changed
3425 delattr(self.unfiltered(), 'mq')
3425 delattr(self.unfiltered(), 'mq')
3426
3426
3427 def abortifwdirpatched(self, errmsg, force=False):
3427 def abortifwdirpatched(self, errmsg, force=False):
3428 if self.mq.applied and self.mq.checkapplied and not force:
3428 if self.mq.applied and self.mq.checkapplied and not force:
3429 parents = self.dirstate.parents()
3429 parents = self.dirstate.parents()
3430 patches = [s.node for s in self.mq.applied]
3430 patches = [s.node for s in self.mq.applied]
3431 if parents[0] in patches or parents[1] in patches:
3431 if parents[0] in patches or parents[1] in patches:
3432 raise error.Abort(errmsg)
3432 raise error.Abort(errmsg)
3433
3433
3434 def commit(self, text="", user=None, date=None, match=None,
3434 def commit(self, text="", user=None, date=None, match=None,
3435 force=False, editor=False, extra={}):
3435 force=False, editor=False, extra={}):
3436 self.abortifwdirpatched(
3436 self.abortifwdirpatched(
3437 _('cannot commit over an applied mq patch'),
3437 _('cannot commit over an applied mq patch'),
3438 force)
3438 force)
3439
3439
3440 return super(mqrepo, self).commit(text, user, date, match, force,
3440 return super(mqrepo, self).commit(text, user, date, match, force,
3441 editor, extra)
3441 editor, extra)
3442
3442
3443 def checkpush(self, pushop):
3443 def checkpush(self, pushop):
3444 if self.mq.applied and self.mq.checkapplied and not pushop.force:
3444 if self.mq.applied and self.mq.checkapplied and not pushop.force:
3445 outapplied = [e.node for e in self.mq.applied]
3445 outapplied = [e.node for e in self.mq.applied]
3446 if pushop.revs:
3446 if pushop.revs:
3447 # Assume applied patches have no non-patch descendants and
3447 # Assume applied patches have no non-patch descendants and
3448 # are not on remote already. Filtering any changeset not
3448 # are not on remote already. Filtering any changeset not
3449 # pushed.
3449 # pushed.
3450 heads = set(pushop.revs)
3450 heads = set(pushop.revs)
3451 for node in reversed(outapplied):
3451 for node in reversed(outapplied):
3452 if node in heads:
3452 if node in heads:
3453 break
3453 break
3454 else:
3454 else:
3455 outapplied.pop()
3455 outapplied.pop()
3456 # looking for pushed and shared changeset
3456 # looking for pushed and shared changeset
3457 for node in outapplied:
3457 for node in outapplied:
3458 if self[node].phase() < phases.secret:
3458 if self[node].phase() < phases.secret:
3459 raise error.Abort(_('source has mq patches applied'))
3459 raise error.Abort(_('source has mq patches applied'))
3460 # no non-secret patches pushed
3460 # no non-secret patches pushed
3461 super(mqrepo, self).checkpush(pushop)
3461 super(mqrepo, self).checkpush(pushop)
3462
3462
3463 def _findtags(self):
3463 def _findtags(self):
3464 '''augment tags from base class with patch tags'''
3464 '''augment tags from base class with patch tags'''
3465 result = super(mqrepo, self)._findtags()
3465 result = super(mqrepo, self)._findtags()
3466
3466
3467 q = self.mq
3467 q = self.mq
3468 if not q.applied:
3468 if not q.applied:
3469 return result
3469 return result
3470
3470
3471 mqtags = [(patch.node, patch.name) for patch in q.applied]
3471 mqtags = [(patch.node, patch.name) for patch in q.applied]
3472
3472
3473 try:
3473 try:
3474 # for now ignore filtering business
3474 # for now ignore filtering business
3475 self.unfiltered().changelog.rev(mqtags[-1][0])
3475 self.unfiltered().changelog.rev(mqtags[-1][0])
3476 except error.LookupError:
3476 except error.LookupError:
3477 self.ui.warn(_('mq status file refers to unknown node %s\n')
3477 self.ui.warn(_('mq status file refers to unknown node %s\n')
3478 % short(mqtags[-1][0]))
3478 % short(mqtags[-1][0]))
3479 return result
3479 return result
3480
3480
3481 # do not add fake tags for filtered revisions
3481 # do not add fake tags for filtered revisions
3482 included = self.changelog.hasnode
3482 included = self.changelog.hasnode
3483 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
3483 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
3484 if not mqtags:
3484 if not mqtags:
3485 return result
3485 return result
3486
3486
3487 mqtags.append((mqtags[-1][0], 'qtip'))
3487 mqtags.append((mqtags[-1][0], 'qtip'))
3488 mqtags.append((mqtags[0][0], 'qbase'))
3488 mqtags.append((mqtags[0][0], 'qbase'))
3489 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3489 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3490 tags = result[0]
3490 tags = result[0]
3491 for patch in mqtags:
3491 for patch in mqtags:
3492 if patch[1] in tags:
3492 if patch[1] in tags:
3493 self.ui.warn(_('tag %s overrides mq patch of the same '
3493 self.ui.warn(_('tag %s overrides mq patch of the same '
3494 'name\n') % patch[1])
3494 'name\n') % patch[1])
3495 else:
3495 else:
3496 tags[patch[1]] = patch[0]
3496 tags[patch[1]] = patch[0]
3497
3497
3498 return result
3498 return result
3499
3499
3500 if repo.local():
3500 if repo.local():
3501 repo.__class__ = mqrepo
3501 repo.__class__ = mqrepo
3502
3502
3503 repo._phasedefaults.append(mqphasedefaults)
3503 repo._phasedefaults.append(mqphasedefaults)
3504
3504
3505 def mqimport(orig, ui, repo, *args, **kwargs):
3505 def mqimport(orig, ui, repo, *args, **kwargs):
3506 if (util.safehasattr(repo, 'abortifwdirpatched')
3506 if (util.safehasattr(repo, 'abortifwdirpatched')
3507 and not kwargs.get('no_commit', False)):
3507 and not kwargs.get('no_commit', False)):
3508 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3508 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3509 kwargs.get('force'))
3509 kwargs.get('force'))
3510 return orig(ui, repo, *args, **kwargs)
3510 return orig(ui, repo, *args, **kwargs)
3511
3511
3512 def mqinit(orig, ui, *args, **kwargs):
3512 def mqinit(orig, ui, *args, **kwargs):
3513 mq = kwargs.pop('mq', None)
3513 mq = kwargs.pop('mq', None)
3514
3514
3515 if not mq:
3515 if not mq:
3516 return orig(ui, *args, **kwargs)
3516 return orig(ui, *args, **kwargs)
3517
3517
3518 if args:
3518 if args:
3519 repopath = args[0]
3519 repopath = args[0]
3520 if not hg.islocal(repopath):
3520 if not hg.islocal(repopath):
3521 raise error.Abort(_('only a local queue repository '
3521 raise error.Abort(_('only a local queue repository '
3522 'may be initialized'))
3522 'may be initialized'))
3523 else:
3523 else:
3524 repopath = cmdutil.findrepo(os.getcwd())
3524 repopath = cmdutil.findrepo(os.getcwd())
3525 if not repopath:
3525 if not repopath:
3526 raise error.Abort(_('there is no Mercurial repository here '
3526 raise error.Abort(_('there is no Mercurial repository here '
3527 '(.hg not found)'))
3527 '(.hg not found)'))
3528 repo = hg.repository(ui, repopath)
3528 repo = hg.repository(ui, repopath)
3529 return qinit(ui, repo, True)
3529 return qinit(ui, repo, True)
3530
3530
3531 def mqcommand(orig, ui, repo, *args, **kwargs):
3531 def mqcommand(orig, ui, repo, *args, **kwargs):
3532 """Add --mq option to operate on patch repository instead of main"""
3532 """Add --mq option to operate on patch repository instead of main"""
3533
3533
3534 # some commands do not like getting unknown options
3534 # some commands do not like getting unknown options
3535 mq = kwargs.pop('mq', None)
3535 mq = kwargs.pop('mq', None)
3536
3536
3537 if not mq:
3537 if not mq:
3538 return orig(ui, repo, *args, **kwargs)
3538 return orig(ui, repo, *args, **kwargs)
3539
3539
3540 q = repo.mq
3540 q = repo.mq
3541 r = q.qrepo()
3541 r = q.qrepo()
3542 if not r:
3542 if not r:
3543 raise error.Abort(_('no queue repository'))
3543 raise error.Abort(_('no queue repository'))
3544 return orig(r.ui, r, *args, **kwargs)
3544 return orig(r.ui, r, *args, **kwargs)
3545
3545
3546 def summaryhook(ui, repo):
3546 def summaryhook(ui, repo):
3547 q = repo.mq
3547 q = repo.mq
3548 m = []
3548 m = []
3549 a, u = len(q.applied), len(q.unapplied(repo))
3549 a, u = len(q.applied), len(q.unapplied(repo))
3550 if a:
3550 if a:
3551 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3551 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3552 if u:
3552 if u:
3553 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3553 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3554 if m:
3554 if m:
3555 # i18n: column positioning for "hg summary"
3555 # i18n: column positioning for "hg summary"
3556 ui.write(_("mq: %s\n") % ', '.join(m))
3556 ui.write(_("mq: %s\n") % ', '.join(m))
3557 else:
3557 else:
3558 # i18n: column positioning for "hg summary"
3558 # i18n: column positioning for "hg summary"
3559 ui.note(_("mq: (empty queue)\n"))
3559 ui.note(_("mq: (empty queue)\n"))
3560
3560
3561 revsetpredicate = revset.extpredicate()
3562
3563 @revsetpredicate('mq()')
3561 def revsetmq(repo, subset, x):
3564 def revsetmq(repo, subset, x):
3562 """``mq()``
3565 """Changesets managed by MQ.
3563 Changesets managed by MQ.
3564 """
3566 """
3565 revset.getargs(x, 0, 0, _("mq takes no arguments"))
3567 revset.getargs(x, 0, 0, _("mq takes no arguments"))
3566 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3568 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3567 return revset.baseset([r for r in subset if r in applied])
3569 return revset.baseset([r for r in subset if r in applied])
3568
3570
3569 # tell hggettext to extract docstrings from these functions:
3571 # tell hggettext to extract docstrings from these functions:
3570 i18nfunctions = [revsetmq]
3572 i18nfunctions = [revsetmq]
3571
3573
3572 def extsetup(ui):
3574 def extsetup(ui):
3573 # Ensure mq wrappers are called first, regardless of extension load order by
3575 # Ensure mq wrappers are called first, regardless of extension load order by
3574 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3576 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3575 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3577 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3576
3578
3577 extensions.wrapcommand(commands.table, 'import', mqimport)
3579 extensions.wrapcommand(commands.table, 'import', mqimport)
3578 cmdutil.summaryhooks.add('mq', summaryhook)
3580 cmdutil.summaryhooks.add('mq', summaryhook)
3579
3581
3580 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3582 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3581 entry[1].extend(mqopt)
3583 entry[1].extend(mqopt)
3582
3584
3583 nowrap = set(commands.norepo.split(" "))
3585 nowrap = set(commands.norepo.split(" "))
3584
3586
3585 def dotable(cmdtable):
3587 def dotable(cmdtable):
3586 for cmd in cmdtable.keys():
3588 for cmd in cmdtable.keys():
3587 cmd = cmdutil.parsealiases(cmd)[0]
3589 cmd = cmdutil.parsealiases(cmd)[0]
3588 if cmd in nowrap:
3590 if cmd in nowrap:
3589 continue
3591 continue
3590 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3592 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3591 entry[1].extend(mqopt)
3593 entry[1].extend(mqopt)
3592
3594
3593 dotable(commands.table)
3595 dotable(commands.table)
3594
3596
3595 for extname, extmodule in extensions.extensions():
3597 for extname, extmodule in extensions.extensions():
3596 if extmodule.__file__ != __file__:
3598 if extmodule.__file__ != __file__:
3597 dotable(getattr(extmodule, 'cmdtable', {}))
3599 dotable(getattr(extmodule, 'cmdtable', {}))
3598
3600
3599 revset.symbols['mq'] = revsetmq
3601 revsetpredicate.setup()
3600
3602
3601 colortable = {'qguard.negative': 'red',
3603 colortable = {'qguard.negative': 'red',
3602 'qguard.positive': 'yellow',
3604 'qguard.positive': 'yellow',
3603 'qguard.unguarded': 'green',
3605 'qguard.unguarded': 'green',
3604 'qseries.applied': 'blue bold underline',
3606 'qseries.applied': 'blue bold underline',
3605 'qseries.guarded': 'black bold',
3607 'qseries.guarded': 'black bold',
3606 'qseries.missing': 'red bold',
3608 'qseries.missing': 'red bold',
3607 'qseries.unapplied': 'black bold'}
3609 'qseries.unapplied': 'black bold'}
@@ -1,1241 +1,1244 b''
1 # rebase.py - rebasing feature for mercurial
1 # rebase.py - rebasing feature for mercurial
2 #
2 #
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to move sets of revisions to a different ancestor
8 '''command to move sets of revisions to a different ancestor
9
9
10 This extension lets you rebase changesets in an existing Mercurial
10 This extension lets you rebase changesets in an existing Mercurial
11 repository.
11 repository.
12
12
13 For more information:
13 For more information:
14 https://mercurial-scm.org/wiki/RebaseExtension
14 https://mercurial-scm.org/wiki/RebaseExtension
15 '''
15 '''
16
16
17 from mercurial import hg, util, repair, merge, cmdutil, commands, bookmarks
17 from mercurial import hg, util, repair, merge, cmdutil, commands, bookmarks
18 from mercurial import extensions, patch, scmutil, phases, obsolete, error
18 from mercurial import extensions, patch, scmutil, phases, obsolete, error
19 from mercurial import copies, repoview, revset
19 from mercurial import copies, repoview, revset
20 from mercurial.commands import templateopts
20 from mercurial.commands import templateopts
21 from mercurial.node import nullrev, nullid, hex, short
21 from mercurial.node import nullrev, nullid, hex, short
22 from mercurial.lock import release
22 from mercurial.lock import release
23 from mercurial.i18n import _
23 from mercurial.i18n import _
24 import os, errno
24 import os, errno
25
25
26 # The following constants are used throughout the rebase module. The ordering of
26 # The following constants are used throughout the rebase module. The ordering of
27 # their values must be maintained.
27 # their values must be maintained.
28
28
29 # Indicates that a revision needs to be rebased
29 # Indicates that a revision needs to be rebased
30 revtodo = -1
30 revtodo = -1
31 nullmerge = -2
31 nullmerge = -2
32 revignored = -3
32 revignored = -3
33 # successor in rebase destination
33 # successor in rebase destination
34 revprecursor = -4
34 revprecursor = -4
35 # plain prune (no successor)
35 # plain prune (no successor)
36 revpruned = -5
36 revpruned = -5
37 revskipped = (revignored, revprecursor, revpruned)
37 revskipped = (revignored, revprecursor, revpruned)
38
38
39 cmdtable = {}
39 cmdtable = {}
40 command = cmdutil.command(cmdtable)
40 command = cmdutil.command(cmdtable)
41 # Note for extension authors: ONLY specify testedwith = 'internal' for
41 # Note for extension authors: ONLY specify testedwith = 'internal' for
42 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
42 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
43 # be specifying the version(s) of Mercurial they are tested with, or
43 # be specifying the version(s) of Mercurial they are tested with, or
44 # leave the attribute unspecified.
44 # leave the attribute unspecified.
45 testedwith = 'internal'
45 testedwith = 'internal'
46
46
47 def _nothingtorebase():
47 def _nothingtorebase():
48 return 1
48 return 1
49
49
50 def _makeextrafn(copiers):
50 def _makeextrafn(copiers):
51 """make an extrafn out of the given copy-functions.
51 """make an extrafn out of the given copy-functions.
52
52
53 A copy function takes a context and an extra dict, and mutates the
53 A copy function takes a context and an extra dict, and mutates the
54 extra dict as needed based on the given context.
54 extra dict as needed based on the given context.
55 """
55 """
56 def extrafn(ctx, extra):
56 def extrafn(ctx, extra):
57 for c in copiers:
57 for c in copiers:
58 c(ctx, extra)
58 c(ctx, extra)
59 return extrafn
59 return extrafn
60
60
61 def _destrebase(repo):
61 def _destrebase(repo):
62 # Destination defaults to the latest revision in the
62 # Destination defaults to the latest revision in the
63 # current branch
63 # current branch
64 branch = repo[None].branch()
64 branch = repo[None].branch()
65 return repo[branch].rev()
65 return repo[branch].rev()
66
66
67 revsetpredicate = revset.extpredicate()
68
69 @revsetpredicate('_destrebase')
67 def _revsetdestrebase(repo, subset, x):
70 def _revsetdestrebase(repo, subset, x):
68 # ``_rebasedefaultdest()``
71 # ``_rebasedefaultdest()``
69
72
70 # default destination for rebase.
73 # default destination for rebase.
71 # # XXX: Currently private because I expect the signature to change.
74 # # XXX: Currently private because I expect the signature to change.
72 # # XXX: - taking rev as arguments,
75 # # XXX: - taking rev as arguments,
73 # # XXX: - bailing out in case of ambiguity vs returning all data.
76 # # XXX: - bailing out in case of ambiguity vs returning all data.
74 # # XXX: - probably merging with the merge destination.
77 # # XXX: - probably merging with the merge destination.
75 # i18n: "_rebasedefaultdest" is a keyword
78 # i18n: "_rebasedefaultdest" is a keyword
76 revset.getargs(x, 0, 0, _("_rebasedefaultdest takes no arguments"))
79 revset.getargs(x, 0, 0, _("_rebasedefaultdest takes no arguments"))
77 return subset & revset.baseset([_destrebase(repo)])
80 return subset & revset.baseset([_destrebase(repo)])
78
81
79 @command('rebase',
82 @command('rebase',
80 [('s', 'source', '',
83 [('s', 'source', '',
81 _('rebase the specified changeset and descendants'), _('REV')),
84 _('rebase the specified changeset and descendants'), _('REV')),
82 ('b', 'base', '',
85 ('b', 'base', '',
83 _('rebase everything from branching point of specified changeset'),
86 _('rebase everything from branching point of specified changeset'),
84 _('REV')),
87 _('REV')),
85 ('r', 'rev', [],
88 ('r', 'rev', [],
86 _('rebase these revisions'),
89 _('rebase these revisions'),
87 _('REV')),
90 _('REV')),
88 ('d', 'dest', '',
91 ('d', 'dest', '',
89 _('rebase onto the specified changeset'), _('REV')),
92 _('rebase onto the specified changeset'), _('REV')),
90 ('', 'collapse', False, _('collapse the rebased changesets')),
93 ('', 'collapse', False, _('collapse the rebased changesets')),
91 ('m', 'message', '',
94 ('m', 'message', '',
92 _('use text as collapse commit message'), _('TEXT')),
95 _('use text as collapse commit message'), _('TEXT')),
93 ('e', 'edit', False, _('invoke editor on commit messages')),
96 ('e', 'edit', False, _('invoke editor on commit messages')),
94 ('l', 'logfile', '',
97 ('l', 'logfile', '',
95 _('read collapse commit message from file'), _('FILE')),
98 _('read collapse commit message from file'), _('FILE')),
96 ('k', 'keep', False, _('keep original changesets')),
99 ('k', 'keep', False, _('keep original changesets')),
97 ('', 'keepbranches', False, _('keep original branch names')),
100 ('', 'keepbranches', False, _('keep original branch names')),
98 ('D', 'detach', False, _('(DEPRECATED)')),
101 ('D', 'detach', False, _('(DEPRECATED)')),
99 ('i', 'interactive', False, _('(DEPRECATED)')),
102 ('i', 'interactive', False, _('(DEPRECATED)')),
100 ('t', 'tool', '', _('specify merge tool')),
103 ('t', 'tool', '', _('specify merge tool')),
101 ('c', 'continue', False, _('continue an interrupted rebase')),
104 ('c', 'continue', False, _('continue an interrupted rebase')),
102 ('a', 'abort', False, _('abort an interrupted rebase'))] +
105 ('a', 'abort', False, _('abort an interrupted rebase'))] +
103 templateopts,
106 templateopts,
104 _('[-s REV | -b REV] [-d REV] [OPTION]'))
107 _('[-s REV | -b REV] [-d REV] [OPTION]'))
105 def rebase(ui, repo, **opts):
108 def rebase(ui, repo, **opts):
106 """move changeset (and descendants) to a different branch
109 """move changeset (and descendants) to a different branch
107
110
108 Rebase uses repeated merging to graft changesets from one part of
111 Rebase uses repeated merging to graft changesets from one part of
109 history (the source) onto another (the destination). This can be
112 history (the source) onto another (the destination). This can be
110 useful for linearizing *local* changes relative to a master
113 useful for linearizing *local* changes relative to a master
111 development tree.
114 development tree.
112
115
113 Published commits cannot be rebased (see :hg:`help phases`).
116 Published commits cannot be rebased (see :hg:`help phases`).
114 To copy commits, see :hg:`help graft`.
117 To copy commits, see :hg:`help graft`.
115
118
116 If you don't specify a destination changeset (``-d/--dest``),
119 If you don't specify a destination changeset (``-d/--dest``),
117 rebase uses the current branch tip as the destination. (The
120 rebase uses the current branch tip as the destination. (The
118 destination changeset is not modified by rebasing, but new
121 destination changeset is not modified by rebasing, but new
119 changesets are added as its descendants.)
122 changesets are added as its descendants.)
120
123
121 There are three ways to select changesets::
124 There are three ways to select changesets::
122
125
123 1. Explicitly select them using ``--rev``.
126 1. Explicitly select them using ``--rev``.
124
127
125 2. Use ``--source`` to select a root changeset and include all of its
128 2. Use ``--source`` to select a root changeset and include all of its
126 descendants.
129 descendants.
127
130
128 3. Use ``--base`` to select a changeset; rebase will find ancestors
131 3. Use ``--base`` to select a changeset; rebase will find ancestors
129 and their descendants which are not also ancestors of the destination.
132 and their descendants which are not also ancestors of the destination.
130
133
131 Rebase will destroy original changesets unless you use ``--keep``.
134 Rebase will destroy original changesets unless you use ``--keep``.
132 It will also move your bookmarks (even if you do).
135 It will also move your bookmarks (even if you do).
133
136
134 Some changesets may be dropped if they do not contribute changes
137 Some changesets may be dropped if they do not contribute changes
135 (e.g. merges from the destination branch).
138 (e.g. merges from the destination branch).
136
139
137 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
140 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
138 a named branch with two heads. You will need to explicitly specify source
141 a named branch with two heads. You will need to explicitly specify source
139 and/or destination.
142 and/or destination.
140
143
141 If a rebase is interrupted to manually resolve a conflict, it can be
144 If a rebase is interrupted to manually resolve a conflict, it can be
142 continued with --continue/-c or aborted with --abort/-a.
145 continued with --continue/-c or aborted with --abort/-a.
143
146
144 .. container:: verbose
147 .. container:: verbose
145
148
146 Examples:
149 Examples:
147
150
148 - move "local changes" (current commit back to branching point)
151 - move "local changes" (current commit back to branching point)
149 to the current branch tip after a pull::
152 to the current branch tip after a pull::
150
153
151 hg rebase
154 hg rebase
152
155
153 - move a single changeset to the stable branch::
156 - move a single changeset to the stable branch::
154
157
155 hg rebase -r 5f493448 -d stable
158 hg rebase -r 5f493448 -d stable
156
159
157 - splice a commit and all its descendants onto another part of history::
160 - splice a commit and all its descendants onto another part of history::
158
161
159 hg rebase --source c0c3 --dest 4cf9
162 hg rebase --source c0c3 --dest 4cf9
160
163
161 - rebase everything on a branch marked by a bookmark onto the
164 - rebase everything on a branch marked by a bookmark onto the
162 default branch::
165 default branch::
163
166
164 hg rebase --base myfeature --dest default
167 hg rebase --base myfeature --dest default
165
168
166 - collapse a sequence of changes into a single commit::
169 - collapse a sequence of changes into a single commit::
167
170
168 hg rebase --collapse -r 1520:1525 -d .
171 hg rebase --collapse -r 1520:1525 -d .
169
172
170 - move a named branch while preserving its name::
173 - move a named branch while preserving its name::
171
174
172 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
175 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
173
176
174 Returns 0 on success, 1 if nothing to rebase or there are
177 Returns 0 on success, 1 if nothing to rebase or there are
175 unresolved conflicts.
178 unresolved conflicts.
176
179
177 """
180 """
178 originalwd = target = None
181 originalwd = target = None
179 activebookmark = None
182 activebookmark = None
180 external = nullrev
183 external = nullrev
181 # Mapping between the old revision id and either what is the new rebased
184 # Mapping between the old revision id and either what is the new rebased
182 # revision or what needs to be done with the old revision. The state dict
185 # revision or what needs to be done with the old revision. The state dict
183 # will be what contains most of the rebase progress state.
186 # will be what contains most of the rebase progress state.
184 state = {}
187 state = {}
185 skipped = set()
188 skipped = set()
186 targetancestors = set()
189 targetancestors = set()
187
190
188
191
189 lock = wlock = None
192 lock = wlock = None
190 try:
193 try:
191 wlock = repo.wlock()
194 wlock = repo.wlock()
192 lock = repo.lock()
195 lock = repo.lock()
193
196
194 # Validate input and define rebasing points
197 # Validate input and define rebasing points
195 destf = opts.get('dest', None)
198 destf = opts.get('dest', None)
196 srcf = opts.get('source', None)
199 srcf = opts.get('source', None)
197 basef = opts.get('base', None)
200 basef = opts.get('base', None)
198 revf = opts.get('rev', [])
201 revf = opts.get('rev', [])
199 contf = opts.get('continue')
202 contf = opts.get('continue')
200 abortf = opts.get('abort')
203 abortf = opts.get('abort')
201 collapsef = opts.get('collapse', False)
204 collapsef = opts.get('collapse', False)
202 collapsemsg = cmdutil.logmessage(ui, opts)
205 collapsemsg = cmdutil.logmessage(ui, opts)
203 date = opts.get('date', None)
206 date = opts.get('date', None)
204 e = opts.get('extrafn') # internal, used by e.g. hgsubversion
207 e = opts.get('extrafn') # internal, used by e.g. hgsubversion
205 extrafns = []
208 extrafns = []
206 if e:
209 if e:
207 extrafns = [e]
210 extrafns = [e]
208 keepf = opts.get('keep', False)
211 keepf = opts.get('keep', False)
209 keepbranchesf = opts.get('keepbranches', False)
212 keepbranchesf = opts.get('keepbranches', False)
210 # keepopen is not meant for use on the command line, but by
213 # keepopen is not meant for use on the command line, but by
211 # other extensions
214 # other extensions
212 keepopen = opts.get('keepopen', False)
215 keepopen = opts.get('keepopen', False)
213
216
214 if opts.get('interactive'):
217 if opts.get('interactive'):
215 try:
218 try:
216 if extensions.find('histedit'):
219 if extensions.find('histedit'):
217 enablehistedit = ''
220 enablehistedit = ''
218 except KeyError:
221 except KeyError:
219 enablehistedit = " --config extensions.histedit="
222 enablehistedit = " --config extensions.histedit="
220 help = "hg%s help -e histedit" % enablehistedit
223 help = "hg%s help -e histedit" % enablehistedit
221 msg = _("interactive history editing is supported by the "
224 msg = _("interactive history editing is supported by the "
222 "'histedit' extension (see \"%s\")") % help
225 "'histedit' extension (see \"%s\")") % help
223 raise error.Abort(msg)
226 raise error.Abort(msg)
224
227
225 if collapsemsg and not collapsef:
228 if collapsemsg and not collapsef:
226 raise error.Abort(
229 raise error.Abort(
227 _('message can only be specified with collapse'))
230 _('message can only be specified with collapse'))
228
231
229 if contf or abortf:
232 if contf or abortf:
230 if contf and abortf:
233 if contf and abortf:
231 raise error.Abort(_('cannot use both abort and continue'))
234 raise error.Abort(_('cannot use both abort and continue'))
232 if collapsef:
235 if collapsef:
233 raise error.Abort(
236 raise error.Abort(
234 _('cannot use collapse with continue or abort'))
237 _('cannot use collapse with continue or abort'))
235 if srcf or basef or destf:
238 if srcf or basef or destf:
236 raise error.Abort(
239 raise error.Abort(
237 _('abort and continue do not allow specifying revisions'))
240 _('abort and continue do not allow specifying revisions'))
238 if abortf and opts.get('tool', False):
241 if abortf and opts.get('tool', False):
239 ui.warn(_('tool option will be ignored\n'))
242 ui.warn(_('tool option will be ignored\n'))
240
243
241 try:
244 try:
242 (originalwd, target, state, skipped, collapsef, keepf,
245 (originalwd, target, state, skipped, collapsef, keepf,
243 keepbranchesf, external, activebookmark) = restorestatus(repo)
246 keepbranchesf, external, activebookmark) = restorestatus(repo)
244 except error.RepoLookupError:
247 except error.RepoLookupError:
245 if abortf:
248 if abortf:
246 clearstatus(repo)
249 clearstatus(repo)
247 repo.ui.warn(_('rebase aborted (no revision is removed,'
250 repo.ui.warn(_('rebase aborted (no revision is removed,'
248 ' only broken state is cleared)\n'))
251 ' only broken state is cleared)\n'))
249 return 0
252 return 0
250 else:
253 else:
251 msg = _('cannot continue inconsistent rebase')
254 msg = _('cannot continue inconsistent rebase')
252 hint = _('use "hg rebase --abort" to clear broken state')
255 hint = _('use "hg rebase --abort" to clear broken state')
253 raise error.Abort(msg, hint=hint)
256 raise error.Abort(msg, hint=hint)
254 if abortf:
257 if abortf:
255 return abort(repo, originalwd, target, state,
258 return abort(repo, originalwd, target, state,
256 activebookmark=activebookmark)
259 activebookmark=activebookmark)
257 else:
260 else:
258 if srcf and basef:
261 if srcf and basef:
259 raise error.Abort(_('cannot specify both a '
262 raise error.Abort(_('cannot specify both a '
260 'source and a base'))
263 'source and a base'))
261 if revf and basef:
264 if revf and basef:
262 raise error.Abort(_('cannot specify both a '
265 raise error.Abort(_('cannot specify both a '
263 'revision and a base'))
266 'revision and a base'))
264 if revf and srcf:
267 if revf and srcf:
265 raise error.Abort(_('cannot specify both a '
268 raise error.Abort(_('cannot specify both a '
266 'revision and a source'))
269 'revision and a source'))
267
270
268 cmdutil.checkunfinished(repo)
271 cmdutil.checkunfinished(repo)
269 cmdutil.bailifchanged(repo)
272 cmdutil.bailifchanged(repo)
270
273
271 if destf:
274 if destf:
272 dest = scmutil.revsingle(repo, destf)
275 dest = scmutil.revsingle(repo, destf)
273 else:
276 else:
274 dest = repo[_destrebase(repo)]
277 dest = repo[_destrebase(repo)]
275 destf = str(dest)
278 destf = str(dest)
276
279
277 if revf:
280 if revf:
278 rebaseset = scmutil.revrange(repo, revf)
281 rebaseset = scmutil.revrange(repo, revf)
279 if not rebaseset:
282 if not rebaseset:
280 ui.status(_('empty "rev" revision set - '
283 ui.status(_('empty "rev" revision set - '
281 'nothing to rebase\n'))
284 'nothing to rebase\n'))
282 return _nothingtorebase()
285 return _nothingtorebase()
283 elif srcf:
286 elif srcf:
284 src = scmutil.revrange(repo, [srcf])
287 src = scmutil.revrange(repo, [srcf])
285 if not src:
288 if not src:
286 ui.status(_('empty "source" revision set - '
289 ui.status(_('empty "source" revision set - '
287 'nothing to rebase\n'))
290 'nothing to rebase\n'))
288 return _nothingtorebase()
291 return _nothingtorebase()
289 rebaseset = repo.revs('(%ld)::', src)
292 rebaseset = repo.revs('(%ld)::', src)
290 assert rebaseset
293 assert rebaseset
291 else:
294 else:
292 base = scmutil.revrange(repo, [basef or '.'])
295 base = scmutil.revrange(repo, [basef or '.'])
293 if not base:
296 if not base:
294 ui.status(_('empty "base" revision set - '
297 ui.status(_('empty "base" revision set - '
295 "can't compute rebase set\n"))
298 "can't compute rebase set\n"))
296 return _nothingtorebase()
299 return _nothingtorebase()
297 commonanc = repo.revs('ancestor(%ld, %d)', base, dest).first()
300 commonanc = repo.revs('ancestor(%ld, %d)', base, dest).first()
298 if commonanc is not None:
301 if commonanc is not None:
299 rebaseset = repo.revs('(%d::(%ld) - %d)::',
302 rebaseset = repo.revs('(%d::(%ld) - %d)::',
300 commonanc, base, commonanc)
303 commonanc, base, commonanc)
301 else:
304 else:
302 rebaseset = []
305 rebaseset = []
303
306
304 if not rebaseset:
307 if not rebaseset:
305 # transform to list because smartsets are not comparable to
308 # transform to list because smartsets are not comparable to
306 # lists. This should be improved to honor laziness of
309 # lists. This should be improved to honor laziness of
307 # smartset.
310 # smartset.
308 if list(base) == [dest.rev()]:
311 if list(base) == [dest.rev()]:
309 if basef:
312 if basef:
310 ui.status(_('nothing to rebase - %s is both "base"'
313 ui.status(_('nothing to rebase - %s is both "base"'
311 ' and destination\n') % dest)
314 ' and destination\n') % dest)
312 else:
315 else:
313 ui.status(_('nothing to rebase - working directory '
316 ui.status(_('nothing to rebase - working directory '
314 'parent is also destination\n'))
317 'parent is also destination\n'))
315 elif not repo.revs('%ld - ::%d', base, dest):
318 elif not repo.revs('%ld - ::%d', base, dest):
316 if basef:
319 if basef:
317 ui.status(_('nothing to rebase - "base" %s is '
320 ui.status(_('nothing to rebase - "base" %s is '
318 'already an ancestor of destination '
321 'already an ancestor of destination '
319 '%s\n') %
322 '%s\n') %
320 ('+'.join(str(repo[r]) for r in base),
323 ('+'.join(str(repo[r]) for r in base),
321 dest))
324 dest))
322 else:
325 else:
323 ui.status(_('nothing to rebase - working '
326 ui.status(_('nothing to rebase - working '
324 'directory parent is already an '
327 'directory parent is already an '
325 'ancestor of destination %s\n') % dest)
328 'ancestor of destination %s\n') % dest)
326 else: # can it happen?
329 else: # can it happen?
327 ui.status(_('nothing to rebase from %s to %s\n') %
330 ui.status(_('nothing to rebase from %s to %s\n') %
328 ('+'.join(str(repo[r]) for r in base), dest))
331 ('+'.join(str(repo[r]) for r in base), dest))
329 return _nothingtorebase()
332 return _nothingtorebase()
330
333
331 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
334 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
332 if (not (keepf or allowunstable)
335 if (not (keepf or allowunstable)
333 and repo.revs('first(children(%ld) - %ld)',
336 and repo.revs('first(children(%ld) - %ld)',
334 rebaseset, rebaseset)):
337 rebaseset, rebaseset)):
335 raise error.Abort(
338 raise error.Abort(
336 _("can't remove original changesets with"
339 _("can't remove original changesets with"
337 " unrebased descendants"),
340 " unrebased descendants"),
338 hint=_('use --keep to keep original changesets'))
341 hint=_('use --keep to keep original changesets'))
339
342
340 obsoletenotrebased = {}
343 obsoletenotrebased = {}
341 if ui.configbool('experimental', 'rebaseskipobsolete'):
344 if ui.configbool('experimental', 'rebaseskipobsolete'):
342 rebasesetrevs = set(rebaseset)
345 rebasesetrevs = set(rebaseset)
343 obsoletenotrebased = _computeobsoletenotrebased(repo,
346 obsoletenotrebased = _computeobsoletenotrebased(repo,
344 rebasesetrevs,
347 rebasesetrevs,
345 dest)
348 dest)
346
349
347 # - plain prune (no successor) changesets are rebased
350 # - plain prune (no successor) changesets are rebased
348 # - split changesets are not rebased if at least one of the
351 # - split changesets are not rebased if at least one of the
349 # changeset resulting from the split is an ancestor of dest
352 # changeset resulting from the split is an ancestor of dest
350 rebaseset = rebasesetrevs - set(obsoletenotrebased)
353 rebaseset = rebasesetrevs - set(obsoletenotrebased)
351 if rebasesetrevs and not rebaseset:
354 if rebasesetrevs and not rebaseset:
352 msg = _('all requested changesets have equivalents '
355 msg = _('all requested changesets have equivalents '
353 'or were marked as obsolete')
356 'or were marked as obsolete')
354 hint = _('to force the rebase, set the config '
357 hint = _('to force the rebase, set the config '
355 'experimental.rebaseskipobsolete to False')
358 'experimental.rebaseskipobsolete to False')
356 raise error.Abort(msg, hint=hint)
359 raise error.Abort(msg, hint=hint)
357
360
358 result = buildstate(repo, dest, rebaseset, collapsef,
361 result = buildstate(repo, dest, rebaseset, collapsef,
359 obsoletenotrebased)
362 obsoletenotrebased)
360
363
361 if not result:
364 if not result:
362 # Empty state built, nothing to rebase
365 # Empty state built, nothing to rebase
363 ui.status(_('nothing to rebase\n'))
366 ui.status(_('nothing to rebase\n'))
364 return _nothingtorebase()
367 return _nothingtorebase()
365
368
366 root = min(rebaseset)
369 root = min(rebaseset)
367 if not keepf and not repo[root].mutable():
370 if not keepf and not repo[root].mutable():
368 raise error.Abort(_("can't rebase public changeset %s")
371 raise error.Abort(_("can't rebase public changeset %s")
369 % repo[root],
372 % repo[root],
370 hint=_('see "hg help phases" for details'))
373 hint=_('see "hg help phases" for details'))
371
374
372 originalwd, target, state = result
375 originalwd, target, state = result
373 if collapsef:
376 if collapsef:
374 targetancestors = repo.changelog.ancestors([target],
377 targetancestors = repo.changelog.ancestors([target],
375 inclusive=True)
378 inclusive=True)
376 external = externalparent(repo, state, targetancestors)
379 external = externalparent(repo, state, targetancestors)
377
380
378 if dest.closesbranch() and not keepbranchesf:
381 if dest.closesbranch() and not keepbranchesf:
379 ui.status(_('reopening closed branch head %s\n') % dest)
382 ui.status(_('reopening closed branch head %s\n') % dest)
380
383
381 if keepbranchesf and collapsef:
384 if keepbranchesf and collapsef:
382 branches = set()
385 branches = set()
383 for rev in state:
386 for rev in state:
384 branches.add(repo[rev].branch())
387 branches.add(repo[rev].branch())
385 if len(branches) > 1:
388 if len(branches) > 1:
386 raise error.Abort(_('cannot collapse multiple named '
389 raise error.Abort(_('cannot collapse multiple named '
387 'branches'))
390 'branches'))
388
391
389 # Rebase
392 # Rebase
390 if not targetancestors:
393 if not targetancestors:
391 targetancestors = repo.changelog.ancestors([target], inclusive=True)
394 targetancestors = repo.changelog.ancestors([target], inclusive=True)
392
395
393 # Keep track of the current bookmarks in order to reset them later
396 # Keep track of the current bookmarks in order to reset them later
394 currentbookmarks = repo._bookmarks.copy()
397 currentbookmarks = repo._bookmarks.copy()
395 activebookmark = activebookmark or repo._activebookmark
398 activebookmark = activebookmark or repo._activebookmark
396 if activebookmark:
399 if activebookmark:
397 bookmarks.deactivate(repo)
400 bookmarks.deactivate(repo)
398
401
399 extrafn = _makeextrafn(extrafns)
402 extrafn = _makeextrafn(extrafns)
400
403
401 sortedstate = sorted(state)
404 sortedstate = sorted(state)
402 total = len(sortedstate)
405 total = len(sortedstate)
403 pos = 0
406 pos = 0
404 for rev in sortedstate:
407 for rev in sortedstate:
405 ctx = repo[rev]
408 ctx = repo[rev]
406 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
409 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
407 ctx.description().split('\n', 1)[0])
410 ctx.description().split('\n', 1)[0])
408 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
411 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
409 if names:
412 if names:
410 desc += ' (%s)' % ' '.join(names)
413 desc += ' (%s)' % ' '.join(names)
411 pos += 1
414 pos += 1
412 if state[rev] == revtodo:
415 if state[rev] == revtodo:
413 ui.status(_('rebasing %s\n') % desc)
416 ui.status(_('rebasing %s\n') % desc)
414 ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
417 ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
415 _('changesets'), total)
418 _('changesets'), total)
416 p1, p2, base = defineparents(repo, rev, target, state,
419 p1, p2, base = defineparents(repo, rev, target, state,
417 targetancestors)
420 targetancestors)
418 storestatus(repo, originalwd, target, state, collapsef, keepf,
421 storestatus(repo, originalwd, target, state, collapsef, keepf,
419 keepbranchesf, external, activebookmark)
422 keepbranchesf, external, activebookmark)
420 if len(repo[None].parents()) == 2:
423 if len(repo[None].parents()) == 2:
421 repo.ui.debug('resuming interrupted rebase\n')
424 repo.ui.debug('resuming interrupted rebase\n')
422 else:
425 else:
423 try:
426 try:
424 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
427 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
425 'rebase')
428 'rebase')
426 stats = rebasenode(repo, rev, p1, base, state,
429 stats = rebasenode(repo, rev, p1, base, state,
427 collapsef, target)
430 collapsef, target)
428 if stats and stats[3] > 0:
431 if stats and stats[3] > 0:
429 raise error.InterventionRequired(
432 raise error.InterventionRequired(
430 _('unresolved conflicts (see hg '
433 _('unresolved conflicts (see hg '
431 'resolve, then hg rebase --continue)'))
434 'resolve, then hg rebase --continue)'))
432 finally:
435 finally:
433 ui.setconfig('ui', 'forcemerge', '', 'rebase')
436 ui.setconfig('ui', 'forcemerge', '', 'rebase')
434 if not collapsef:
437 if not collapsef:
435 merging = p2 != nullrev
438 merging = p2 != nullrev
436 editform = cmdutil.mergeeditform(merging, 'rebase')
439 editform = cmdutil.mergeeditform(merging, 'rebase')
437 editor = cmdutil.getcommiteditor(editform=editform, **opts)
440 editor = cmdutil.getcommiteditor(editform=editform, **opts)
438 newnode = concludenode(repo, rev, p1, p2, extrafn=extrafn,
441 newnode = concludenode(repo, rev, p1, p2, extrafn=extrafn,
439 editor=editor,
442 editor=editor,
440 keepbranches=keepbranchesf,
443 keepbranches=keepbranchesf,
441 date=date)
444 date=date)
442 else:
445 else:
443 # Skip commit if we are collapsing
446 # Skip commit if we are collapsing
444 repo.dirstate.beginparentchange()
447 repo.dirstate.beginparentchange()
445 repo.setparents(repo[p1].node())
448 repo.setparents(repo[p1].node())
446 repo.dirstate.endparentchange()
449 repo.dirstate.endparentchange()
447 newnode = None
450 newnode = None
448 # Update the state
451 # Update the state
449 if newnode is not None:
452 if newnode is not None:
450 state[rev] = repo[newnode].rev()
453 state[rev] = repo[newnode].rev()
451 ui.debug('rebased as %s\n' % short(newnode))
454 ui.debug('rebased as %s\n' % short(newnode))
452 else:
455 else:
453 if not collapsef:
456 if not collapsef:
454 ui.warn(_('note: rebase of %d:%s created no changes '
457 ui.warn(_('note: rebase of %d:%s created no changes '
455 'to commit\n') % (rev, ctx))
458 'to commit\n') % (rev, ctx))
456 skipped.add(rev)
459 skipped.add(rev)
457 state[rev] = p1
460 state[rev] = p1
458 ui.debug('next revision set to %s\n' % p1)
461 ui.debug('next revision set to %s\n' % p1)
459 elif state[rev] == nullmerge:
462 elif state[rev] == nullmerge:
460 ui.debug('ignoring null merge rebase of %s\n' % rev)
463 ui.debug('ignoring null merge rebase of %s\n' % rev)
461 elif state[rev] == revignored:
464 elif state[rev] == revignored:
462 ui.status(_('not rebasing ignored %s\n') % desc)
465 ui.status(_('not rebasing ignored %s\n') % desc)
463 elif state[rev] == revprecursor:
466 elif state[rev] == revprecursor:
464 targetctx = repo[obsoletenotrebased[rev]]
467 targetctx = repo[obsoletenotrebased[rev]]
465 desctarget = '%d:%s "%s"' % (targetctx.rev(), targetctx,
468 desctarget = '%d:%s "%s"' % (targetctx.rev(), targetctx,
466 targetctx.description().split('\n', 1)[0])
469 targetctx.description().split('\n', 1)[0])
467 msg = _('note: not rebasing %s, already in destination as %s\n')
470 msg = _('note: not rebasing %s, already in destination as %s\n')
468 ui.status(msg % (desc, desctarget))
471 ui.status(msg % (desc, desctarget))
469 elif state[rev] == revpruned:
472 elif state[rev] == revpruned:
470 msg = _('note: not rebasing %s, it has no successor\n')
473 msg = _('note: not rebasing %s, it has no successor\n')
471 ui.status(msg % desc)
474 ui.status(msg % desc)
472 else:
475 else:
473 ui.status(_('already rebased %s as %s\n') %
476 ui.status(_('already rebased %s as %s\n') %
474 (desc, repo[state[rev]]))
477 (desc, repo[state[rev]]))
475
478
476 ui.progress(_('rebasing'), None)
479 ui.progress(_('rebasing'), None)
477 ui.note(_('rebase merging completed\n'))
480 ui.note(_('rebase merging completed\n'))
478
481
479 if collapsef and not keepopen:
482 if collapsef and not keepopen:
480 p1, p2, _base = defineparents(repo, min(state), target,
483 p1, p2, _base = defineparents(repo, min(state), target,
481 state, targetancestors)
484 state, targetancestors)
482 editopt = opts.get('edit')
485 editopt = opts.get('edit')
483 editform = 'rebase.collapse'
486 editform = 'rebase.collapse'
484 if collapsemsg:
487 if collapsemsg:
485 commitmsg = collapsemsg
488 commitmsg = collapsemsg
486 else:
489 else:
487 commitmsg = 'Collapsed revision'
490 commitmsg = 'Collapsed revision'
488 for rebased in state:
491 for rebased in state:
489 if rebased not in skipped and state[rebased] > nullmerge:
492 if rebased not in skipped and state[rebased] > nullmerge:
490 commitmsg += '\n* %s' % repo[rebased].description()
493 commitmsg += '\n* %s' % repo[rebased].description()
491 editopt = True
494 editopt = True
492 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
495 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
493 newnode = concludenode(repo, rev, p1, external, commitmsg=commitmsg,
496 newnode = concludenode(repo, rev, p1, external, commitmsg=commitmsg,
494 extrafn=extrafn, editor=editor,
497 extrafn=extrafn, editor=editor,
495 keepbranches=keepbranchesf,
498 keepbranches=keepbranchesf,
496 date=date)
499 date=date)
497 if newnode is None:
500 if newnode is None:
498 newrev = target
501 newrev = target
499 else:
502 else:
500 newrev = repo[newnode].rev()
503 newrev = repo[newnode].rev()
501 for oldrev in state.iterkeys():
504 for oldrev in state.iterkeys():
502 if state[oldrev] > nullmerge:
505 if state[oldrev] > nullmerge:
503 state[oldrev] = newrev
506 state[oldrev] = newrev
504
507
505 if 'qtip' in repo.tags():
508 if 'qtip' in repo.tags():
506 updatemq(repo, state, skipped, **opts)
509 updatemq(repo, state, skipped, **opts)
507
510
508 if currentbookmarks:
511 if currentbookmarks:
509 # Nodeids are needed to reset bookmarks
512 # Nodeids are needed to reset bookmarks
510 nstate = {}
513 nstate = {}
511 for k, v in state.iteritems():
514 for k, v in state.iteritems():
512 if v > nullmerge:
515 if v > nullmerge:
513 nstate[repo[k].node()] = repo[v].node()
516 nstate[repo[k].node()] = repo[v].node()
514 # XXX this is the same as dest.node() for the non-continue path --
517 # XXX this is the same as dest.node() for the non-continue path --
515 # this should probably be cleaned up
518 # this should probably be cleaned up
516 targetnode = repo[target].node()
519 targetnode = repo[target].node()
517
520
518 # restore original working directory
521 # restore original working directory
519 # (we do this before stripping)
522 # (we do this before stripping)
520 newwd = state.get(originalwd, originalwd)
523 newwd = state.get(originalwd, originalwd)
521 if newwd < 0:
524 if newwd < 0:
522 # original directory is a parent of rebase set root or ignored
525 # original directory is a parent of rebase set root or ignored
523 newwd = originalwd
526 newwd = originalwd
524 if newwd not in [c.rev() for c in repo[None].parents()]:
527 if newwd not in [c.rev() for c in repo[None].parents()]:
525 ui.note(_("update back to initial working directory parent\n"))
528 ui.note(_("update back to initial working directory parent\n"))
526 hg.updaterepo(repo, newwd, False)
529 hg.updaterepo(repo, newwd, False)
527
530
528 if not keepf:
531 if not keepf:
529 collapsedas = None
532 collapsedas = None
530 if collapsef:
533 if collapsef:
531 collapsedas = newnode
534 collapsedas = newnode
532 clearrebased(ui, repo, state, skipped, collapsedas)
535 clearrebased(ui, repo, state, skipped, collapsedas)
533
536
534 tr = None
537 tr = None
535 try:
538 try:
536 tr = repo.transaction('bookmark')
539 tr = repo.transaction('bookmark')
537 if currentbookmarks:
540 if currentbookmarks:
538 updatebookmarks(repo, targetnode, nstate, currentbookmarks, tr)
541 updatebookmarks(repo, targetnode, nstate, currentbookmarks, tr)
539 if activebookmark not in repo._bookmarks:
542 if activebookmark not in repo._bookmarks:
540 # active bookmark was divergent one and has been deleted
543 # active bookmark was divergent one and has been deleted
541 activebookmark = None
544 activebookmark = None
542 tr.close()
545 tr.close()
543 finally:
546 finally:
544 release(tr)
547 release(tr)
545 clearstatus(repo)
548 clearstatus(repo)
546
549
547 ui.note(_("rebase completed\n"))
550 ui.note(_("rebase completed\n"))
548 util.unlinkpath(repo.sjoin('undo'), ignoremissing=True)
551 util.unlinkpath(repo.sjoin('undo'), ignoremissing=True)
549 if skipped:
552 if skipped:
550 ui.note(_("%d revisions have been skipped\n") % len(skipped))
553 ui.note(_("%d revisions have been skipped\n") % len(skipped))
551
554
552 if (activebookmark and
555 if (activebookmark and
553 repo['.'].node() == repo._bookmarks[activebookmark]):
556 repo['.'].node() == repo._bookmarks[activebookmark]):
554 bookmarks.activate(repo, activebookmark)
557 bookmarks.activate(repo, activebookmark)
555
558
556 finally:
559 finally:
557 release(lock, wlock)
560 release(lock, wlock)
558
561
559 def externalparent(repo, state, targetancestors):
562 def externalparent(repo, state, targetancestors):
560 """Return the revision that should be used as the second parent
563 """Return the revision that should be used as the second parent
561 when the revisions in state is collapsed on top of targetancestors.
564 when the revisions in state is collapsed on top of targetancestors.
562 Abort if there is more than one parent.
565 Abort if there is more than one parent.
563 """
566 """
564 parents = set()
567 parents = set()
565 source = min(state)
568 source = min(state)
566 for rev in state:
569 for rev in state:
567 if rev == source:
570 if rev == source:
568 continue
571 continue
569 for p in repo[rev].parents():
572 for p in repo[rev].parents():
570 if (p.rev() not in state
573 if (p.rev() not in state
571 and p.rev() not in targetancestors):
574 and p.rev() not in targetancestors):
572 parents.add(p.rev())
575 parents.add(p.rev())
573 if not parents:
576 if not parents:
574 return nullrev
577 return nullrev
575 if len(parents) == 1:
578 if len(parents) == 1:
576 return parents.pop()
579 return parents.pop()
577 raise error.Abort(_('unable to collapse on top of %s, there is more '
580 raise error.Abort(_('unable to collapse on top of %s, there is more '
578 'than one external parent: %s') %
581 'than one external parent: %s') %
579 (max(targetancestors),
582 (max(targetancestors),
580 ', '.join(str(p) for p in sorted(parents))))
583 ', '.join(str(p) for p in sorted(parents))))
581
584
582 def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None,
585 def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None,
583 keepbranches=False, date=None):
586 keepbranches=False, date=None):
584 '''Commit the wd changes with parents p1 and p2. Reuse commit info from rev
587 '''Commit the wd changes with parents p1 and p2. Reuse commit info from rev
585 but also store useful information in extra.
588 but also store useful information in extra.
586 Return node of committed revision.'''
589 Return node of committed revision.'''
587 dsguard = cmdutil.dirstateguard(repo, 'rebase')
590 dsguard = cmdutil.dirstateguard(repo, 'rebase')
588 try:
591 try:
589 repo.setparents(repo[p1].node(), repo[p2].node())
592 repo.setparents(repo[p1].node(), repo[p2].node())
590 ctx = repo[rev]
593 ctx = repo[rev]
591 if commitmsg is None:
594 if commitmsg is None:
592 commitmsg = ctx.description()
595 commitmsg = ctx.description()
593 keepbranch = keepbranches and repo[p1].branch() != ctx.branch()
596 keepbranch = keepbranches and repo[p1].branch() != ctx.branch()
594 extra = ctx.extra().copy()
597 extra = ctx.extra().copy()
595 if not keepbranches:
598 if not keepbranches:
596 del extra['branch']
599 del extra['branch']
597 extra['rebase_source'] = ctx.hex()
600 extra['rebase_source'] = ctx.hex()
598 if extrafn:
601 if extrafn:
599 extrafn(ctx, extra)
602 extrafn(ctx, extra)
600
603
601 backup = repo.ui.backupconfig('phases', 'new-commit')
604 backup = repo.ui.backupconfig('phases', 'new-commit')
602 try:
605 try:
603 targetphase = max(ctx.phase(), phases.draft)
606 targetphase = max(ctx.phase(), phases.draft)
604 repo.ui.setconfig('phases', 'new-commit', targetphase, 'rebase')
607 repo.ui.setconfig('phases', 'new-commit', targetphase, 'rebase')
605 if keepbranch:
608 if keepbranch:
606 repo.ui.setconfig('ui', 'allowemptycommit', True)
609 repo.ui.setconfig('ui', 'allowemptycommit', True)
607 # Commit might fail if unresolved files exist
610 # Commit might fail if unresolved files exist
608 if date is None:
611 if date is None:
609 date = ctx.date()
612 date = ctx.date()
610 newnode = repo.commit(text=commitmsg, user=ctx.user(),
613 newnode = repo.commit(text=commitmsg, user=ctx.user(),
611 date=date, extra=extra, editor=editor)
614 date=date, extra=extra, editor=editor)
612 finally:
615 finally:
613 repo.ui.restoreconfig(backup)
616 repo.ui.restoreconfig(backup)
614
617
615 repo.dirstate.setbranch(repo[newnode].branch())
618 repo.dirstate.setbranch(repo[newnode].branch())
616 dsguard.close()
619 dsguard.close()
617 return newnode
620 return newnode
618 finally:
621 finally:
619 release(dsguard)
622 release(dsguard)
620
623
621 def rebasenode(repo, rev, p1, base, state, collapse, target):
624 def rebasenode(repo, rev, p1, base, state, collapse, target):
622 'Rebase a single revision rev on top of p1 using base as merge ancestor'
625 'Rebase a single revision rev on top of p1 using base as merge ancestor'
623 # Merge phase
626 # Merge phase
624 # Update to target and merge it with local
627 # Update to target and merge it with local
625 if repo['.'].rev() != p1:
628 if repo['.'].rev() != p1:
626 repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1]))
629 repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1]))
627 merge.update(repo, p1, False, True)
630 merge.update(repo, p1, False, True)
628 else:
631 else:
629 repo.ui.debug(" already in target\n")
632 repo.ui.debug(" already in target\n")
630 repo.dirstate.write(repo.currenttransaction())
633 repo.dirstate.write(repo.currenttransaction())
631 repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev]))
634 repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev]))
632 if base is not None:
635 if base is not None:
633 repo.ui.debug(" detach base %d:%s\n" % (base, repo[base]))
636 repo.ui.debug(" detach base %d:%s\n" % (base, repo[base]))
634 # When collapsing in-place, the parent is the common ancestor, we
637 # When collapsing in-place, the parent is the common ancestor, we
635 # have to allow merging with it.
638 # have to allow merging with it.
636 stats = merge.update(repo, rev, True, True, base, collapse,
639 stats = merge.update(repo, rev, True, True, base, collapse,
637 labels=['dest', 'source'])
640 labels=['dest', 'source'])
638 if collapse:
641 if collapse:
639 copies.duplicatecopies(repo, rev, target)
642 copies.duplicatecopies(repo, rev, target)
640 else:
643 else:
641 # If we're not using --collapse, we need to
644 # If we're not using --collapse, we need to
642 # duplicate copies between the revision we're
645 # duplicate copies between the revision we're
643 # rebasing and its first parent, but *not*
646 # rebasing and its first parent, but *not*
644 # duplicate any copies that have already been
647 # duplicate any copies that have already been
645 # performed in the destination.
648 # performed in the destination.
646 p1rev = repo[rev].p1().rev()
649 p1rev = repo[rev].p1().rev()
647 copies.duplicatecopies(repo, rev, p1rev, skiprev=target)
650 copies.duplicatecopies(repo, rev, p1rev, skiprev=target)
648 return stats
651 return stats
649
652
650 def nearestrebased(repo, rev, state):
653 def nearestrebased(repo, rev, state):
651 """return the nearest ancestors of rev in the rebase result"""
654 """return the nearest ancestors of rev in the rebase result"""
652 rebased = [r for r in state if state[r] > nullmerge]
655 rebased = [r for r in state if state[r] > nullmerge]
653 candidates = repo.revs('max(%ld and (::%d))', rebased, rev)
656 candidates = repo.revs('max(%ld and (::%d))', rebased, rev)
654 if candidates:
657 if candidates:
655 return state[candidates.first()]
658 return state[candidates.first()]
656 else:
659 else:
657 return None
660 return None
658
661
659 def defineparents(repo, rev, target, state, targetancestors):
662 def defineparents(repo, rev, target, state, targetancestors):
660 'Return the new parent relationship of the revision that will be rebased'
663 'Return the new parent relationship of the revision that will be rebased'
661 parents = repo[rev].parents()
664 parents = repo[rev].parents()
662 p1 = p2 = nullrev
665 p1 = p2 = nullrev
663
666
664 p1n = parents[0].rev()
667 p1n = parents[0].rev()
665 if p1n in targetancestors:
668 if p1n in targetancestors:
666 p1 = target
669 p1 = target
667 elif p1n in state:
670 elif p1n in state:
668 if state[p1n] == nullmerge:
671 if state[p1n] == nullmerge:
669 p1 = target
672 p1 = target
670 elif state[p1n] in revskipped:
673 elif state[p1n] in revskipped:
671 p1 = nearestrebased(repo, p1n, state)
674 p1 = nearestrebased(repo, p1n, state)
672 if p1 is None:
675 if p1 is None:
673 p1 = target
676 p1 = target
674 else:
677 else:
675 p1 = state[p1n]
678 p1 = state[p1n]
676 else: # p1n external
679 else: # p1n external
677 p1 = target
680 p1 = target
678 p2 = p1n
681 p2 = p1n
679
682
680 if len(parents) == 2 and parents[1].rev() not in targetancestors:
683 if len(parents) == 2 and parents[1].rev() not in targetancestors:
681 p2n = parents[1].rev()
684 p2n = parents[1].rev()
682 # interesting second parent
685 # interesting second parent
683 if p2n in state:
686 if p2n in state:
684 if p1 == target: # p1n in targetancestors or external
687 if p1 == target: # p1n in targetancestors or external
685 p1 = state[p2n]
688 p1 = state[p2n]
686 elif state[p2n] in revskipped:
689 elif state[p2n] in revskipped:
687 p2 = nearestrebased(repo, p2n, state)
690 p2 = nearestrebased(repo, p2n, state)
688 if p2 is None:
691 if p2 is None:
689 # no ancestors rebased yet, detach
692 # no ancestors rebased yet, detach
690 p2 = target
693 p2 = target
691 else:
694 else:
692 p2 = state[p2n]
695 p2 = state[p2n]
693 else: # p2n external
696 else: # p2n external
694 if p2 != nullrev: # p1n external too => rev is a merged revision
697 if p2 != nullrev: # p1n external too => rev is a merged revision
695 raise error.Abort(_('cannot use revision %d as base, result '
698 raise error.Abort(_('cannot use revision %d as base, result '
696 'would have 3 parents') % rev)
699 'would have 3 parents') % rev)
697 p2 = p2n
700 p2 = p2n
698 repo.ui.debug(" future parents are %d and %d\n" %
701 repo.ui.debug(" future parents are %d and %d\n" %
699 (repo[p1].rev(), repo[p2].rev()))
702 (repo[p1].rev(), repo[p2].rev()))
700
703
701 if rev == min(state):
704 if rev == min(state):
702 # Case (1) initial changeset of a non-detaching rebase.
705 # Case (1) initial changeset of a non-detaching rebase.
703 # Let the merge mechanism find the base itself.
706 # Let the merge mechanism find the base itself.
704 base = None
707 base = None
705 elif not repo[rev].p2():
708 elif not repo[rev].p2():
706 # Case (2) detaching the node with a single parent, use this parent
709 # Case (2) detaching the node with a single parent, use this parent
707 base = repo[rev].p1().rev()
710 base = repo[rev].p1().rev()
708 else:
711 else:
709 # Assuming there is a p1, this is the case where there also is a p2.
712 # Assuming there is a p1, this is the case where there also is a p2.
710 # We are thus rebasing a merge and need to pick the right merge base.
713 # We are thus rebasing a merge and need to pick the right merge base.
711 #
714 #
712 # Imagine we have:
715 # Imagine we have:
713 # - M: current rebase revision in this step
716 # - M: current rebase revision in this step
714 # - A: one parent of M
717 # - A: one parent of M
715 # - B: other parent of M
718 # - B: other parent of M
716 # - D: destination of this merge step (p1 var)
719 # - D: destination of this merge step (p1 var)
717 #
720 #
718 # Consider the case where D is a descendant of A or B and the other is
721 # Consider the case where D is a descendant of A or B and the other is
719 # 'outside'. In this case, the right merge base is the D ancestor.
722 # 'outside'. In this case, the right merge base is the D ancestor.
720 #
723 #
721 # An informal proof, assuming A is 'outside' and B is the D ancestor:
724 # An informal proof, assuming A is 'outside' and B is the D ancestor:
722 #
725 #
723 # If we pick B as the base, the merge involves:
726 # If we pick B as the base, the merge involves:
724 # - changes from B to M (actual changeset payload)
727 # - changes from B to M (actual changeset payload)
725 # - changes from B to D (induced by rebase) as D is a rebased
728 # - changes from B to D (induced by rebase) as D is a rebased
726 # version of B)
729 # version of B)
727 # Which exactly represent the rebase operation.
730 # Which exactly represent the rebase operation.
728 #
731 #
729 # If we pick A as the base, the merge involves:
732 # If we pick A as the base, the merge involves:
730 # - changes from A to M (actual changeset payload)
733 # - changes from A to M (actual changeset payload)
731 # - changes from A to D (with include changes between unrelated A and B
734 # - changes from A to D (with include changes between unrelated A and B
732 # plus changes induced by rebase)
735 # plus changes induced by rebase)
733 # Which does not represent anything sensible and creates a lot of
736 # Which does not represent anything sensible and creates a lot of
734 # conflicts. A is thus not the right choice - B is.
737 # conflicts. A is thus not the right choice - B is.
735 #
738 #
736 # Note: The base found in this 'proof' is only correct in the specified
739 # Note: The base found in this 'proof' is only correct in the specified
737 # case. This base does not make sense if is not D a descendant of A or B
740 # case. This base does not make sense if is not D a descendant of A or B
738 # or if the other is not parent 'outside' (especially not if the other
741 # or if the other is not parent 'outside' (especially not if the other
739 # parent has been rebased). The current implementation does not
742 # parent has been rebased). The current implementation does not
740 # make it feasible to consider different cases separately. In these
743 # make it feasible to consider different cases separately. In these
741 # other cases we currently just leave it to the user to correctly
744 # other cases we currently just leave it to the user to correctly
742 # resolve an impossible merge using a wrong ancestor.
745 # resolve an impossible merge using a wrong ancestor.
743 for p in repo[rev].parents():
746 for p in repo[rev].parents():
744 if state.get(p.rev()) == p1:
747 if state.get(p.rev()) == p1:
745 base = p.rev()
748 base = p.rev()
746 break
749 break
747 else: # fallback when base not found
750 else: # fallback when base not found
748 base = None
751 base = None
749
752
750 # Raise because this function is called wrong (see issue 4106)
753 # Raise because this function is called wrong (see issue 4106)
751 raise AssertionError('no base found to rebase on '
754 raise AssertionError('no base found to rebase on '
752 '(defineparents called wrong)')
755 '(defineparents called wrong)')
753 return p1, p2, base
756 return p1, p2, base
754
757
755 def isagitpatch(repo, patchname):
758 def isagitpatch(repo, patchname):
756 'Return true if the given patch is in git format'
759 'Return true if the given patch is in git format'
757 mqpatch = os.path.join(repo.mq.path, patchname)
760 mqpatch = os.path.join(repo.mq.path, patchname)
758 for line in patch.linereader(file(mqpatch, 'rb')):
761 for line in patch.linereader(file(mqpatch, 'rb')):
759 if line.startswith('diff --git'):
762 if line.startswith('diff --git'):
760 return True
763 return True
761 return False
764 return False
762
765
763 def updatemq(repo, state, skipped, **opts):
766 def updatemq(repo, state, skipped, **opts):
764 'Update rebased mq patches - finalize and then import them'
767 'Update rebased mq patches - finalize and then import them'
765 mqrebase = {}
768 mqrebase = {}
766 mq = repo.mq
769 mq = repo.mq
767 original_series = mq.fullseries[:]
770 original_series = mq.fullseries[:]
768 skippedpatches = set()
771 skippedpatches = set()
769
772
770 for p in mq.applied:
773 for p in mq.applied:
771 rev = repo[p.node].rev()
774 rev = repo[p.node].rev()
772 if rev in state:
775 if rev in state:
773 repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
776 repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
774 (rev, p.name))
777 (rev, p.name))
775 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
778 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
776 else:
779 else:
777 # Applied but not rebased, not sure this should happen
780 # Applied but not rebased, not sure this should happen
778 skippedpatches.add(p.name)
781 skippedpatches.add(p.name)
779
782
780 if mqrebase:
783 if mqrebase:
781 mq.finish(repo, mqrebase.keys())
784 mq.finish(repo, mqrebase.keys())
782
785
783 # We must start import from the newest revision
786 # We must start import from the newest revision
784 for rev in sorted(mqrebase, reverse=True):
787 for rev in sorted(mqrebase, reverse=True):
785 if rev not in skipped:
788 if rev not in skipped:
786 name, isgit = mqrebase[rev]
789 name, isgit = mqrebase[rev]
787 repo.ui.note(_('updating mq patch %s to %s:%s\n') %
790 repo.ui.note(_('updating mq patch %s to %s:%s\n') %
788 (name, state[rev], repo[state[rev]]))
791 (name, state[rev], repo[state[rev]]))
789 mq.qimport(repo, (), patchname=name, git=isgit,
792 mq.qimport(repo, (), patchname=name, git=isgit,
790 rev=[str(state[rev])])
793 rev=[str(state[rev])])
791 else:
794 else:
792 # Rebased and skipped
795 # Rebased and skipped
793 skippedpatches.add(mqrebase[rev][0])
796 skippedpatches.add(mqrebase[rev][0])
794
797
795 # Patches were either applied and rebased and imported in
798 # Patches were either applied and rebased and imported in
796 # order, applied and removed or unapplied. Discard the removed
799 # order, applied and removed or unapplied. Discard the removed
797 # ones while preserving the original series order and guards.
800 # ones while preserving the original series order and guards.
798 newseries = [s for s in original_series
801 newseries = [s for s in original_series
799 if mq.guard_re.split(s, 1)[0] not in skippedpatches]
802 if mq.guard_re.split(s, 1)[0] not in skippedpatches]
800 mq.fullseries[:] = newseries
803 mq.fullseries[:] = newseries
801 mq.seriesdirty = True
804 mq.seriesdirty = True
802 mq.savedirty()
805 mq.savedirty()
803
806
804 def updatebookmarks(repo, targetnode, nstate, originalbookmarks, tr):
807 def updatebookmarks(repo, targetnode, nstate, originalbookmarks, tr):
805 'Move bookmarks to their correct changesets, and delete divergent ones'
808 'Move bookmarks to their correct changesets, and delete divergent ones'
806 marks = repo._bookmarks
809 marks = repo._bookmarks
807 for k, v in originalbookmarks.iteritems():
810 for k, v in originalbookmarks.iteritems():
808 if v in nstate:
811 if v in nstate:
809 # update the bookmarks for revs that have moved
812 # update the bookmarks for revs that have moved
810 marks[k] = nstate[v]
813 marks[k] = nstate[v]
811 bookmarks.deletedivergent(repo, [targetnode], k)
814 bookmarks.deletedivergent(repo, [targetnode], k)
812 marks.recordchange(tr)
815 marks.recordchange(tr)
813
816
814 def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches,
817 def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches,
815 external, activebookmark):
818 external, activebookmark):
816 'Store the current status to allow recovery'
819 'Store the current status to allow recovery'
817 f = repo.vfs("rebasestate", "w")
820 f = repo.vfs("rebasestate", "w")
818 f.write(repo[originalwd].hex() + '\n')
821 f.write(repo[originalwd].hex() + '\n')
819 f.write(repo[target].hex() + '\n')
822 f.write(repo[target].hex() + '\n')
820 f.write(repo[external].hex() + '\n')
823 f.write(repo[external].hex() + '\n')
821 f.write('%d\n' % int(collapse))
824 f.write('%d\n' % int(collapse))
822 f.write('%d\n' % int(keep))
825 f.write('%d\n' % int(keep))
823 f.write('%d\n' % int(keepbranches))
826 f.write('%d\n' % int(keepbranches))
824 f.write('%s\n' % (activebookmark or ''))
827 f.write('%s\n' % (activebookmark or ''))
825 for d, v in state.iteritems():
828 for d, v in state.iteritems():
826 oldrev = repo[d].hex()
829 oldrev = repo[d].hex()
827 if v >= 0:
830 if v >= 0:
828 newrev = repo[v].hex()
831 newrev = repo[v].hex()
829 elif v == revtodo:
832 elif v == revtodo:
830 # To maintain format compatibility, we have to use nullid.
833 # To maintain format compatibility, we have to use nullid.
831 # Please do remove this special case when upgrading the format.
834 # Please do remove this special case when upgrading the format.
832 newrev = hex(nullid)
835 newrev = hex(nullid)
833 else:
836 else:
834 newrev = v
837 newrev = v
835 f.write("%s:%s\n" % (oldrev, newrev))
838 f.write("%s:%s\n" % (oldrev, newrev))
836 f.close()
839 f.close()
837 repo.ui.debug('rebase status stored\n')
840 repo.ui.debug('rebase status stored\n')
838
841
839 def clearstatus(repo):
842 def clearstatus(repo):
840 'Remove the status files'
843 'Remove the status files'
841 _clearrebasesetvisibiliy(repo)
844 _clearrebasesetvisibiliy(repo)
842 util.unlinkpath(repo.join("rebasestate"), ignoremissing=True)
845 util.unlinkpath(repo.join("rebasestate"), ignoremissing=True)
843
846
844 def restorestatus(repo):
847 def restorestatus(repo):
845 'Restore a previously stored status'
848 'Restore a previously stored status'
846 keepbranches = None
849 keepbranches = None
847 target = None
850 target = None
848 collapse = False
851 collapse = False
849 external = nullrev
852 external = nullrev
850 activebookmark = None
853 activebookmark = None
851 state = {}
854 state = {}
852
855
853 try:
856 try:
854 f = repo.vfs("rebasestate")
857 f = repo.vfs("rebasestate")
855 for i, l in enumerate(f.read().splitlines()):
858 for i, l in enumerate(f.read().splitlines()):
856 if i == 0:
859 if i == 0:
857 originalwd = repo[l].rev()
860 originalwd = repo[l].rev()
858 elif i == 1:
861 elif i == 1:
859 target = repo[l].rev()
862 target = repo[l].rev()
860 elif i == 2:
863 elif i == 2:
861 external = repo[l].rev()
864 external = repo[l].rev()
862 elif i == 3:
865 elif i == 3:
863 collapse = bool(int(l))
866 collapse = bool(int(l))
864 elif i == 4:
867 elif i == 4:
865 keep = bool(int(l))
868 keep = bool(int(l))
866 elif i == 5:
869 elif i == 5:
867 keepbranches = bool(int(l))
870 keepbranches = bool(int(l))
868 elif i == 6 and not (len(l) == 81 and ':' in l):
871 elif i == 6 and not (len(l) == 81 and ':' in l):
869 # line 6 is a recent addition, so for backwards compatibility
872 # line 6 is a recent addition, so for backwards compatibility
870 # check that the line doesn't look like the oldrev:newrev lines
873 # check that the line doesn't look like the oldrev:newrev lines
871 activebookmark = l
874 activebookmark = l
872 else:
875 else:
873 oldrev, newrev = l.split(':')
876 oldrev, newrev = l.split(':')
874 if newrev in (str(nullmerge), str(revignored),
877 if newrev in (str(nullmerge), str(revignored),
875 str(revprecursor), str(revpruned)):
878 str(revprecursor), str(revpruned)):
876 state[repo[oldrev].rev()] = int(newrev)
879 state[repo[oldrev].rev()] = int(newrev)
877 elif newrev == nullid:
880 elif newrev == nullid:
878 state[repo[oldrev].rev()] = revtodo
881 state[repo[oldrev].rev()] = revtodo
879 # Legacy compat special case
882 # Legacy compat special case
880 else:
883 else:
881 state[repo[oldrev].rev()] = repo[newrev].rev()
884 state[repo[oldrev].rev()] = repo[newrev].rev()
882
885
883 except IOError as err:
886 except IOError as err:
884 if err.errno != errno.ENOENT:
887 if err.errno != errno.ENOENT:
885 raise
888 raise
886 raise error.Abort(_('no rebase in progress'))
889 raise error.Abort(_('no rebase in progress'))
887
890
888 if keepbranches is None:
891 if keepbranches is None:
889 raise error.Abort(_('.hg/rebasestate is incomplete'))
892 raise error.Abort(_('.hg/rebasestate is incomplete'))
890
893
891 skipped = set()
894 skipped = set()
892 # recompute the set of skipped revs
895 # recompute the set of skipped revs
893 if not collapse:
896 if not collapse:
894 seen = set([target])
897 seen = set([target])
895 for old, new in sorted(state.items()):
898 for old, new in sorted(state.items()):
896 if new != revtodo and new in seen:
899 if new != revtodo and new in seen:
897 skipped.add(old)
900 skipped.add(old)
898 seen.add(new)
901 seen.add(new)
899 repo.ui.debug('computed skipped revs: %s\n' %
902 repo.ui.debug('computed skipped revs: %s\n' %
900 (' '.join(str(r) for r in sorted(skipped)) or None))
903 (' '.join(str(r) for r in sorted(skipped)) or None))
901 repo.ui.debug('rebase status resumed\n')
904 repo.ui.debug('rebase status resumed\n')
902 _setrebasesetvisibility(repo, state.keys())
905 _setrebasesetvisibility(repo, state.keys())
903 return (originalwd, target, state, skipped,
906 return (originalwd, target, state, skipped,
904 collapse, keep, keepbranches, external, activebookmark)
907 collapse, keep, keepbranches, external, activebookmark)
905
908
906 def needupdate(repo, state):
909 def needupdate(repo, state):
907 '''check whether we should `update --clean` away from a merge, or if
910 '''check whether we should `update --clean` away from a merge, or if
908 somehow the working dir got forcibly updated, e.g. by older hg'''
911 somehow the working dir got forcibly updated, e.g. by older hg'''
909 parents = [p.rev() for p in repo[None].parents()]
912 parents = [p.rev() for p in repo[None].parents()]
910
913
911 # Are we in a merge state at all?
914 # Are we in a merge state at all?
912 if len(parents) < 2:
915 if len(parents) < 2:
913 return False
916 return False
914
917
915 # We should be standing on the first as-of-yet unrebased commit.
918 # We should be standing on the first as-of-yet unrebased commit.
916 firstunrebased = min([old for old, new in state.iteritems()
919 firstunrebased = min([old for old, new in state.iteritems()
917 if new == nullrev])
920 if new == nullrev])
918 if firstunrebased in parents:
921 if firstunrebased in parents:
919 return True
922 return True
920
923
921 return False
924 return False
922
925
923 def abort(repo, originalwd, target, state, activebookmark=None):
926 def abort(repo, originalwd, target, state, activebookmark=None):
924 '''Restore the repository to its original state. Additional args:
927 '''Restore the repository to its original state. Additional args:
925
928
926 activebookmark: the name of the bookmark that should be active after the
929 activebookmark: the name of the bookmark that should be active after the
927 restore'''
930 restore'''
928
931
929 try:
932 try:
930 # If the first commits in the rebased set get skipped during the rebase,
933 # If the first commits in the rebased set get skipped during the rebase,
931 # their values within the state mapping will be the target rev id. The
934 # their values within the state mapping will be the target rev id. The
932 # dstates list must must not contain the target rev (issue4896)
935 # dstates list must must not contain the target rev (issue4896)
933 dstates = [s for s in state.values() if s >= 0 and s != target]
936 dstates = [s for s in state.values() if s >= 0 and s != target]
934 immutable = [d for d in dstates if not repo[d].mutable()]
937 immutable = [d for d in dstates if not repo[d].mutable()]
935 cleanup = True
938 cleanup = True
936 if immutable:
939 if immutable:
937 repo.ui.warn(_("warning: can't clean up public changesets %s\n")
940 repo.ui.warn(_("warning: can't clean up public changesets %s\n")
938 % ', '.join(str(repo[r]) for r in immutable),
941 % ', '.join(str(repo[r]) for r in immutable),
939 hint=_('see "hg help phases" for details'))
942 hint=_('see "hg help phases" for details'))
940 cleanup = False
943 cleanup = False
941
944
942 descendants = set()
945 descendants = set()
943 if dstates:
946 if dstates:
944 descendants = set(repo.changelog.descendants(dstates))
947 descendants = set(repo.changelog.descendants(dstates))
945 if descendants - set(dstates):
948 if descendants - set(dstates):
946 repo.ui.warn(_("warning: new changesets detected on target branch, "
949 repo.ui.warn(_("warning: new changesets detected on target branch, "
947 "can't strip\n"))
950 "can't strip\n"))
948 cleanup = False
951 cleanup = False
949
952
950 if cleanup:
953 if cleanup:
951 # Update away from the rebase if necessary
954 # Update away from the rebase if necessary
952 if needupdate(repo, state):
955 if needupdate(repo, state):
953 merge.update(repo, originalwd, False, True)
956 merge.update(repo, originalwd, False, True)
954
957
955 # Strip from the first rebased revision
958 # Strip from the first rebased revision
956 rebased = filter(lambda x: x >= 0 and x != target, state.values())
959 rebased = filter(lambda x: x >= 0 and x != target, state.values())
957 if rebased:
960 if rebased:
958 strippoints = [
961 strippoints = [
959 c.node() for c in repo.set('roots(%ld)', rebased)]
962 c.node() for c in repo.set('roots(%ld)', rebased)]
960 # no backup of rebased cset versions needed
963 # no backup of rebased cset versions needed
961 repair.strip(repo.ui, repo, strippoints)
964 repair.strip(repo.ui, repo, strippoints)
962
965
963 if activebookmark and activebookmark in repo._bookmarks:
966 if activebookmark and activebookmark in repo._bookmarks:
964 bookmarks.activate(repo, activebookmark)
967 bookmarks.activate(repo, activebookmark)
965
968
966 finally:
969 finally:
967 clearstatus(repo)
970 clearstatus(repo)
968 repo.ui.warn(_('rebase aborted\n'))
971 repo.ui.warn(_('rebase aborted\n'))
969 return 0
972 return 0
970
973
971 def buildstate(repo, dest, rebaseset, collapse, obsoletenotrebased):
974 def buildstate(repo, dest, rebaseset, collapse, obsoletenotrebased):
972 '''Define which revisions are going to be rebased and where
975 '''Define which revisions are going to be rebased and where
973
976
974 repo: repo
977 repo: repo
975 dest: context
978 dest: context
976 rebaseset: set of rev
979 rebaseset: set of rev
977 '''
980 '''
978 _setrebasesetvisibility(repo, rebaseset)
981 _setrebasesetvisibility(repo, rebaseset)
979
982
980 # This check isn't strictly necessary, since mq detects commits over an
983 # This check isn't strictly necessary, since mq detects commits over an
981 # applied patch. But it prevents messing up the working directory when
984 # applied patch. But it prevents messing up the working directory when
982 # a partially completed rebase is blocked by mq.
985 # a partially completed rebase is blocked by mq.
983 if 'qtip' in repo.tags() and (dest.node() in
986 if 'qtip' in repo.tags() and (dest.node() in
984 [s.node for s in repo.mq.applied]):
987 [s.node for s in repo.mq.applied]):
985 raise error.Abort(_('cannot rebase onto an applied mq patch'))
988 raise error.Abort(_('cannot rebase onto an applied mq patch'))
986
989
987 roots = list(repo.set('roots(%ld)', rebaseset))
990 roots = list(repo.set('roots(%ld)', rebaseset))
988 if not roots:
991 if not roots:
989 raise error.Abort(_('no matching revisions'))
992 raise error.Abort(_('no matching revisions'))
990 roots.sort()
993 roots.sort()
991 state = {}
994 state = {}
992 detachset = set()
995 detachset = set()
993 for root in roots:
996 for root in roots:
994 commonbase = root.ancestor(dest)
997 commonbase = root.ancestor(dest)
995 if commonbase == root:
998 if commonbase == root:
996 raise error.Abort(_('source is ancestor of destination'))
999 raise error.Abort(_('source is ancestor of destination'))
997 if commonbase == dest:
1000 if commonbase == dest:
998 samebranch = root.branch() == dest.branch()
1001 samebranch = root.branch() == dest.branch()
999 if not collapse and samebranch and root in dest.children():
1002 if not collapse and samebranch and root in dest.children():
1000 repo.ui.debug('source is a child of destination\n')
1003 repo.ui.debug('source is a child of destination\n')
1001 return None
1004 return None
1002
1005
1003 repo.ui.debug('rebase onto %d starting from %s\n' % (dest, root))
1006 repo.ui.debug('rebase onto %d starting from %s\n' % (dest, root))
1004 state.update(dict.fromkeys(rebaseset, revtodo))
1007 state.update(dict.fromkeys(rebaseset, revtodo))
1005 # Rebase tries to turn <dest> into a parent of <root> while
1008 # Rebase tries to turn <dest> into a parent of <root> while
1006 # preserving the number of parents of rebased changesets:
1009 # preserving the number of parents of rebased changesets:
1007 #
1010 #
1008 # - A changeset with a single parent will always be rebased as a
1011 # - A changeset with a single parent will always be rebased as a
1009 # changeset with a single parent.
1012 # changeset with a single parent.
1010 #
1013 #
1011 # - A merge will be rebased as merge unless its parents are both
1014 # - A merge will be rebased as merge unless its parents are both
1012 # ancestors of <dest> or are themselves in the rebased set and
1015 # ancestors of <dest> or are themselves in the rebased set and
1013 # pruned while rebased.
1016 # pruned while rebased.
1014 #
1017 #
1015 # If one parent of <root> is an ancestor of <dest>, the rebased
1018 # If one parent of <root> is an ancestor of <dest>, the rebased
1016 # version of this parent will be <dest>. This is always true with
1019 # version of this parent will be <dest>. This is always true with
1017 # --base option.
1020 # --base option.
1018 #
1021 #
1019 # Otherwise, we need to *replace* the original parents with
1022 # Otherwise, we need to *replace* the original parents with
1020 # <dest>. This "detaches" the rebased set from its former location
1023 # <dest>. This "detaches" the rebased set from its former location
1021 # and rebases it onto <dest>. Changes introduced by ancestors of
1024 # and rebases it onto <dest>. Changes introduced by ancestors of
1022 # <root> not common with <dest> (the detachset, marked as
1025 # <root> not common with <dest> (the detachset, marked as
1023 # nullmerge) are "removed" from the rebased changesets.
1026 # nullmerge) are "removed" from the rebased changesets.
1024 #
1027 #
1025 # - If <root> has a single parent, set it to <dest>.
1028 # - If <root> has a single parent, set it to <dest>.
1026 #
1029 #
1027 # - If <root> is a merge, we cannot decide which parent to
1030 # - If <root> is a merge, we cannot decide which parent to
1028 # replace, the rebase operation is not clearly defined.
1031 # replace, the rebase operation is not clearly defined.
1029 #
1032 #
1030 # The table below sums up this behavior:
1033 # The table below sums up this behavior:
1031 #
1034 #
1032 # +------------------+----------------------+-------------------------+
1035 # +------------------+----------------------+-------------------------+
1033 # | | one parent | merge |
1036 # | | one parent | merge |
1034 # +------------------+----------------------+-------------------------+
1037 # +------------------+----------------------+-------------------------+
1035 # | parent in | new parent is <dest> | parents in ::<dest> are |
1038 # | parent in | new parent is <dest> | parents in ::<dest> are |
1036 # | ::<dest> | | remapped to <dest> |
1039 # | ::<dest> | | remapped to <dest> |
1037 # +------------------+----------------------+-------------------------+
1040 # +------------------+----------------------+-------------------------+
1038 # | unrelated source | new parent is <dest> | ambiguous, abort |
1041 # | unrelated source | new parent is <dest> | ambiguous, abort |
1039 # +------------------+----------------------+-------------------------+
1042 # +------------------+----------------------+-------------------------+
1040 #
1043 #
1041 # The actual abort is handled by `defineparents`
1044 # The actual abort is handled by `defineparents`
1042 if len(root.parents()) <= 1:
1045 if len(root.parents()) <= 1:
1043 # ancestors of <root> not ancestors of <dest>
1046 # ancestors of <root> not ancestors of <dest>
1044 detachset.update(repo.changelog.findmissingrevs([commonbase.rev()],
1047 detachset.update(repo.changelog.findmissingrevs([commonbase.rev()],
1045 [root.rev()]))
1048 [root.rev()]))
1046 for r in detachset:
1049 for r in detachset:
1047 if r not in state:
1050 if r not in state:
1048 state[r] = nullmerge
1051 state[r] = nullmerge
1049 if len(roots) > 1:
1052 if len(roots) > 1:
1050 # If we have multiple roots, we may have "hole" in the rebase set.
1053 # If we have multiple roots, we may have "hole" in the rebase set.
1051 # Rebase roots that descend from those "hole" should not be detached as
1054 # Rebase roots that descend from those "hole" should not be detached as
1052 # other root are. We use the special `revignored` to inform rebase that
1055 # other root are. We use the special `revignored` to inform rebase that
1053 # the revision should be ignored but that `defineparents` should search
1056 # the revision should be ignored but that `defineparents` should search
1054 # a rebase destination that make sense regarding rebased topology.
1057 # a rebase destination that make sense regarding rebased topology.
1055 rebasedomain = set(repo.revs('%ld::%ld', rebaseset, rebaseset))
1058 rebasedomain = set(repo.revs('%ld::%ld', rebaseset, rebaseset))
1056 for ignored in set(rebasedomain) - set(rebaseset):
1059 for ignored in set(rebasedomain) - set(rebaseset):
1057 state[ignored] = revignored
1060 state[ignored] = revignored
1058 for r in obsoletenotrebased:
1061 for r in obsoletenotrebased:
1059 if obsoletenotrebased[r] is None:
1062 if obsoletenotrebased[r] is None:
1060 state[r] = revpruned
1063 state[r] = revpruned
1061 else:
1064 else:
1062 state[r] = revprecursor
1065 state[r] = revprecursor
1063 return repo['.'].rev(), dest.rev(), state
1066 return repo['.'].rev(), dest.rev(), state
1064
1067
1065 def clearrebased(ui, repo, state, skipped, collapsedas=None):
1068 def clearrebased(ui, repo, state, skipped, collapsedas=None):
1066 """dispose of rebased revision at the end of the rebase
1069 """dispose of rebased revision at the end of the rebase
1067
1070
1068 If `collapsedas` is not None, the rebase was a collapse whose result if the
1071 If `collapsedas` is not None, the rebase was a collapse whose result if the
1069 `collapsedas` node."""
1072 `collapsedas` node."""
1070 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1073 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1071 markers = []
1074 markers = []
1072 for rev, newrev in sorted(state.items()):
1075 for rev, newrev in sorted(state.items()):
1073 if newrev >= 0:
1076 if newrev >= 0:
1074 if rev in skipped:
1077 if rev in skipped:
1075 succs = ()
1078 succs = ()
1076 elif collapsedas is not None:
1079 elif collapsedas is not None:
1077 succs = (repo[collapsedas],)
1080 succs = (repo[collapsedas],)
1078 else:
1081 else:
1079 succs = (repo[newrev],)
1082 succs = (repo[newrev],)
1080 markers.append((repo[rev], succs))
1083 markers.append((repo[rev], succs))
1081 if markers:
1084 if markers:
1082 obsolete.createmarkers(repo, markers)
1085 obsolete.createmarkers(repo, markers)
1083 else:
1086 else:
1084 rebased = [rev for rev in state if state[rev] > nullmerge]
1087 rebased = [rev for rev in state if state[rev] > nullmerge]
1085 if rebased:
1088 if rebased:
1086 stripped = []
1089 stripped = []
1087 for root in repo.set('roots(%ld)', rebased):
1090 for root in repo.set('roots(%ld)', rebased):
1088 if set(repo.changelog.descendants([root.rev()])) - set(state):
1091 if set(repo.changelog.descendants([root.rev()])) - set(state):
1089 ui.warn(_("warning: new changesets detected "
1092 ui.warn(_("warning: new changesets detected "
1090 "on source branch, not stripping\n"))
1093 "on source branch, not stripping\n"))
1091 else:
1094 else:
1092 stripped.append(root.node())
1095 stripped.append(root.node())
1093 if stripped:
1096 if stripped:
1094 # backup the old csets by default
1097 # backup the old csets by default
1095 repair.strip(ui, repo, stripped, "all")
1098 repair.strip(ui, repo, stripped, "all")
1096
1099
1097
1100
1098 def pullrebase(orig, ui, repo, *args, **opts):
1101 def pullrebase(orig, ui, repo, *args, **opts):
1099 'Call rebase after pull if the latter has been invoked with --rebase'
1102 'Call rebase after pull if the latter has been invoked with --rebase'
1100 ret = None
1103 ret = None
1101 if opts.get('rebase'):
1104 if opts.get('rebase'):
1102 wlock = lock = None
1105 wlock = lock = None
1103 try:
1106 try:
1104 wlock = repo.wlock()
1107 wlock = repo.wlock()
1105 lock = repo.lock()
1108 lock = repo.lock()
1106 if opts.get('update'):
1109 if opts.get('update'):
1107 del opts['update']
1110 del opts['update']
1108 ui.debug('--update and --rebase are not compatible, ignoring '
1111 ui.debug('--update and --rebase are not compatible, ignoring '
1109 'the update flag\n')
1112 'the update flag\n')
1110
1113
1111 movemarkfrom = repo['.'].node()
1114 movemarkfrom = repo['.'].node()
1112 revsprepull = len(repo)
1115 revsprepull = len(repo)
1113 origpostincoming = commands.postincoming
1116 origpostincoming = commands.postincoming
1114 def _dummy(*args, **kwargs):
1117 def _dummy(*args, **kwargs):
1115 pass
1118 pass
1116 commands.postincoming = _dummy
1119 commands.postincoming = _dummy
1117 try:
1120 try:
1118 ret = orig(ui, repo, *args, **opts)
1121 ret = orig(ui, repo, *args, **opts)
1119 finally:
1122 finally:
1120 commands.postincoming = origpostincoming
1123 commands.postincoming = origpostincoming
1121 revspostpull = len(repo)
1124 revspostpull = len(repo)
1122 if revspostpull > revsprepull:
1125 if revspostpull > revsprepull:
1123 # --rev option from pull conflict with rebase own --rev
1126 # --rev option from pull conflict with rebase own --rev
1124 # dropping it
1127 # dropping it
1125 if 'rev' in opts:
1128 if 'rev' in opts:
1126 del opts['rev']
1129 del opts['rev']
1127 # positional argument from pull conflicts with rebase's own
1130 # positional argument from pull conflicts with rebase's own
1128 # --source.
1131 # --source.
1129 if 'source' in opts:
1132 if 'source' in opts:
1130 del opts['source']
1133 del opts['source']
1131 rebase(ui, repo, **opts)
1134 rebase(ui, repo, **opts)
1132 branch = repo[None].branch()
1135 branch = repo[None].branch()
1133 dest = repo[branch].rev()
1136 dest = repo[branch].rev()
1134 if dest != repo['.'].rev():
1137 if dest != repo['.'].rev():
1135 # there was nothing to rebase we force an update
1138 # there was nothing to rebase we force an update
1136 hg.update(repo, dest)
1139 hg.update(repo, dest)
1137 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
1140 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
1138 ui.status(_("updating bookmark %s\n")
1141 ui.status(_("updating bookmark %s\n")
1139 % repo._activebookmark)
1142 % repo._activebookmark)
1140 finally:
1143 finally:
1141 release(lock, wlock)
1144 release(lock, wlock)
1142 else:
1145 else:
1143 if opts.get('tool'):
1146 if opts.get('tool'):
1144 raise error.Abort(_('--tool can only be used with --rebase'))
1147 raise error.Abort(_('--tool can only be used with --rebase'))
1145 ret = orig(ui, repo, *args, **opts)
1148 ret = orig(ui, repo, *args, **opts)
1146
1149
1147 return ret
1150 return ret
1148
1151
1149 def _setrebasesetvisibility(repo, revs):
1152 def _setrebasesetvisibility(repo, revs):
1150 """store the currently rebased set on the repo object
1153 """store the currently rebased set on the repo object
1151
1154
1152 This is used by another function to prevent rebased revision to because
1155 This is used by another function to prevent rebased revision to because
1153 hidden (see issue4505)"""
1156 hidden (see issue4505)"""
1154 repo = repo.unfiltered()
1157 repo = repo.unfiltered()
1155 revs = set(revs)
1158 revs = set(revs)
1156 repo._rebaseset = revs
1159 repo._rebaseset = revs
1157 # invalidate cache if visibility changes
1160 # invalidate cache if visibility changes
1158 hiddens = repo.filteredrevcache.get('visible', set())
1161 hiddens = repo.filteredrevcache.get('visible', set())
1159 if revs & hiddens:
1162 if revs & hiddens:
1160 repo.invalidatevolatilesets()
1163 repo.invalidatevolatilesets()
1161
1164
1162 def _clearrebasesetvisibiliy(repo):
1165 def _clearrebasesetvisibiliy(repo):
1163 """remove rebaseset data from the repo"""
1166 """remove rebaseset data from the repo"""
1164 repo = repo.unfiltered()
1167 repo = repo.unfiltered()
1165 if '_rebaseset' in vars(repo):
1168 if '_rebaseset' in vars(repo):
1166 del repo._rebaseset
1169 del repo._rebaseset
1167
1170
1168 def _rebasedvisible(orig, repo):
1171 def _rebasedvisible(orig, repo):
1169 """ensure rebased revs stay visible (see issue4505)"""
1172 """ensure rebased revs stay visible (see issue4505)"""
1170 blockers = orig(repo)
1173 blockers = orig(repo)
1171 blockers.update(getattr(repo, '_rebaseset', ()))
1174 blockers.update(getattr(repo, '_rebaseset', ()))
1172 return blockers
1175 return blockers
1173
1176
1174 def _computeobsoletenotrebased(repo, rebasesetrevs, dest):
1177 def _computeobsoletenotrebased(repo, rebasesetrevs, dest):
1175 """return a mapping obsolete => successor for all obsolete nodes to be
1178 """return a mapping obsolete => successor for all obsolete nodes to be
1176 rebased that have a successors in the destination
1179 rebased that have a successors in the destination
1177
1180
1178 obsolete => None entries in the mapping indicate nodes with no succesor"""
1181 obsolete => None entries in the mapping indicate nodes with no succesor"""
1179 obsoletenotrebased = {}
1182 obsoletenotrebased = {}
1180
1183
1181 # Build a mapping successor => obsolete nodes for the obsolete
1184 # Build a mapping successor => obsolete nodes for the obsolete
1182 # nodes to be rebased
1185 # nodes to be rebased
1183 allsuccessors = {}
1186 allsuccessors = {}
1184 cl = repo.changelog
1187 cl = repo.changelog
1185 for r in rebasesetrevs:
1188 for r in rebasesetrevs:
1186 n = repo[r]
1189 n = repo[r]
1187 if n.obsolete():
1190 if n.obsolete():
1188 node = cl.node(r)
1191 node = cl.node(r)
1189 for s in obsolete.allsuccessors(repo.obsstore, [node]):
1192 for s in obsolete.allsuccessors(repo.obsstore, [node]):
1190 try:
1193 try:
1191 allsuccessors[cl.rev(s)] = cl.rev(node)
1194 allsuccessors[cl.rev(s)] = cl.rev(node)
1192 except LookupError:
1195 except LookupError:
1193 pass
1196 pass
1194
1197
1195 if allsuccessors:
1198 if allsuccessors:
1196 # Look for successors of obsolete nodes to be rebased among
1199 # Look for successors of obsolete nodes to be rebased among
1197 # the ancestors of dest
1200 # the ancestors of dest
1198 ancs = cl.ancestors([repo[dest].rev()],
1201 ancs = cl.ancestors([repo[dest].rev()],
1199 stoprev=min(allsuccessors),
1202 stoprev=min(allsuccessors),
1200 inclusive=True)
1203 inclusive=True)
1201 for s in allsuccessors:
1204 for s in allsuccessors:
1202 if s in ancs:
1205 if s in ancs:
1203 obsoletenotrebased[allsuccessors[s]] = s
1206 obsoletenotrebased[allsuccessors[s]] = s
1204 elif (s == allsuccessors[s] and
1207 elif (s == allsuccessors[s] and
1205 allsuccessors.values().count(s) == 1):
1208 allsuccessors.values().count(s) == 1):
1206 # plain prune
1209 # plain prune
1207 obsoletenotrebased[s] = None
1210 obsoletenotrebased[s] = None
1208
1211
1209 return obsoletenotrebased
1212 return obsoletenotrebased
1210
1213
1211 def summaryhook(ui, repo):
1214 def summaryhook(ui, repo):
1212 if not os.path.exists(repo.join('rebasestate')):
1215 if not os.path.exists(repo.join('rebasestate')):
1213 return
1216 return
1214 try:
1217 try:
1215 state = restorestatus(repo)[2]
1218 state = restorestatus(repo)[2]
1216 except error.RepoLookupError:
1219 except error.RepoLookupError:
1217 # i18n: column positioning for "hg summary"
1220 # i18n: column positioning for "hg summary"
1218 msg = _('rebase: (use "hg rebase --abort" to clear broken state)\n')
1221 msg = _('rebase: (use "hg rebase --abort" to clear broken state)\n')
1219 ui.write(msg)
1222 ui.write(msg)
1220 return
1223 return
1221 numrebased = len([i for i in state.itervalues() if i >= 0])
1224 numrebased = len([i for i in state.itervalues() if i >= 0])
1222 # i18n: column positioning for "hg summary"
1225 # i18n: column positioning for "hg summary"
1223 ui.write(_('rebase: %s, %s (rebase --continue)\n') %
1226 ui.write(_('rebase: %s, %s (rebase --continue)\n') %
1224 (ui.label(_('%d rebased'), 'rebase.rebased') % numrebased,
1227 (ui.label(_('%d rebased'), 'rebase.rebased') % numrebased,
1225 ui.label(_('%d remaining'), 'rebase.remaining') %
1228 ui.label(_('%d remaining'), 'rebase.remaining') %
1226 (len(state) - numrebased)))
1229 (len(state) - numrebased)))
1227
1230
1228 def uisetup(ui):
1231 def uisetup(ui):
1229 #Replace pull with a decorator to provide --rebase option
1232 #Replace pull with a decorator to provide --rebase option
1230 entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
1233 entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
1231 entry[1].append(('', 'rebase', None,
1234 entry[1].append(('', 'rebase', None,
1232 _("rebase working directory to branch head")))
1235 _("rebase working directory to branch head")))
1233 entry[1].append(('t', 'tool', '',
1236 entry[1].append(('t', 'tool', '',
1234 _("specify merge tool for rebase")))
1237 _("specify merge tool for rebase")))
1235 cmdutil.summaryhooks.add('rebase', summaryhook)
1238 cmdutil.summaryhooks.add('rebase', summaryhook)
1236 cmdutil.unfinishedstates.append(
1239 cmdutil.unfinishedstates.append(
1237 ['rebasestate', False, False, _('rebase in progress'),
1240 ['rebasestate', False, False, _('rebase in progress'),
1238 _("use 'hg rebase --continue' or 'hg rebase --abort'")])
1241 _("use 'hg rebase --continue' or 'hg rebase --abort'")])
1239 # ensure rebased rev are not hidden
1242 # ensure rebased rev are not hidden
1240 extensions.wrapfunction(repoview, '_getdynamicblockers', _rebasedvisible)
1243 extensions.wrapfunction(repoview, '_getdynamicblockers', _rebasedvisible)
1241 revset.symbols['_destrebase'] = _revsetdestrebase
1244 revsetpredicate.setup()
@@ -1,721 +1,723 b''
1 # Patch transplanting extension for Mercurial
1 # Patch transplanting extension for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to transplant changesets from another branch
8 '''command to transplant changesets from another branch
9
9
10 This extension allows you to transplant changes to another parent revision,
10 This extension allows you to transplant changes to another parent revision,
11 possibly in another repository. The transplant is done using 'diff' patches.
11 possibly in another repository. The transplant is done using 'diff' patches.
12
12
13 Transplanted patches are recorded in .hg/transplant/transplants, as a
13 Transplanted patches are recorded in .hg/transplant/transplants, as a
14 map from a changeset hash to its hash in the source repository.
14 map from a changeset hash to its hash in the source repository.
15 '''
15 '''
16
16
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 import os, tempfile
18 import os, tempfile
19 from mercurial.node import short
19 from mercurial.node import short
20 from mercurial import bundlerepo, hg, merge, match
20 from mercurial import bundlerepo, hg, merge, match
21 from mercurial import patch, revlog, scmutil, util, error, cmdutil
21 from mercurial import patch, revlog, scmutil, util, error, cmdutil
22 from mercurial import revset, templatekw, exchange
22 from mercurial import revset, templatekw, exchange
23 from mercurial import lock as lockmod
23 from mercurial import lock as lockmod
24
24
25 class TransplantError(error.Abort):
25 class TransplantError(error.Abort):
26 pass
26 pass
27
27
28 cmdtable = {}
28 cmdtable = {}
29 command = cmdutil.command(cmdtable)
29 command = cmdutil.command(cmdtable)
30 # Note for extension authors: ONLY specify testedwith = 'internal' for
30 # Note for extension authors: ONLY specify testedwith = 'internal' for
31 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
31 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
32 # be specifying the version(s) of Mercurial they are tested with, or
32 # be specifying the version(s) of Mercurial they are tested with, or
33 # leave the attribute unspecified.
33 # leave the attribute unspecified.
34 testedwith = 'internal'
34 testedwith = 'internal'
35
35
36 class transplantentry(object):
36 class transplantentry(object):
37 def __init__(self, lnode, rnode):
37 def __init__(self, lnode, rnode):
38 self.lnode = lnode
38 self.lnode = lnode
39 self.rnode = rnode
39 self.rnode = rnode
40
40
41 class transplants(object):
41 class transplants(object):
42 def __init__(self, path=None, transplantfile=None, opener=None):
42 def __init__(self, path=None, transplantfile=None, opener=None):
43 self.path = path
43 self.path = path
44 self.transplantfile = transplantfile
44 self.transplantfile = transplantfile
45 self.opener = opener
45 self.opener = opener
46
46
47 if not opener:
47 if not opener:
48 self.opener = scmutil.opener(self.path)
48 self.opener = scmutil.opener(self.path)
49 self.transplants = {}
49 self.transplants = {}
50 self.dirty = False
50 self.dirty = False
51 self.read()
51 self.read()
52
52
53 def read(self):
53 def read(self):
54 abspath = os.path.join(self.path, self.transplantfile)
54 abspath = os.path.join(self.path, self.transplantfile)
55 if self.transplantfile and os.path.exists(abspath):
55 if self.transplantfile and os.path.exists(abspath):
56 for line in self.opener.read(self.transplantfile).splitlines():
56 for line in self.opener.read(self.transplantfile).splitlines():
57 lnode, rnode = map(revlog.bin, line.split(':'))
57 lnode, rnode = map(revlog.bin, line.split(':'))
58 list = self.transplants.setdefault(rnode, [])
58 list = self.transplants.setdefault(rnode, [])
59 list.append(transplantentry(lnode, rnode))
59 list.append(transplantentry(lnode, rnode))
60
60
61 def write(self):
61 def write(self):
62 if self.dirty and self.transplantfile:
62 if self.dirty and self.transplantfile:
63 if not os.path.isdir(self.path):
63 if not os.path.isdir(self.path):
64 os.mkdir(self.path)
64 os.mkdir(self.path)
65 fp = self.opener(self.transplantfile, 'w')
65 fp = self.opener(self.transplantfile, 'w')
66 for list in self.transplants.itervalues():
66 for list in self.transplants.itervalues():
67 for t in list:
67 for t in list:
68 l, r = map(revlog.hex, (t.lnode, t.rnode))
68 l, r = map(revlog.hex, (t.lnode, t.rnode))
69 fp.write(l + ':' + r + '\n')
69 fp.write(l + ':' + r + '\n')
70 fp.close()
70 fp.close()
71 self.dirty = False
71 self.dirty = False
72
72
73 def get(self, rnode):
73 def get(self, rnode):
74 return self.transplants.get(rnode) or []
74 return self.transplants.get(rnode) or []
75
75
76 def set(self, lnode, rnode):
76 def set(self, lnode, rnode):
77 list = self.transplants.setdefault(rnode, [])
77 list = self.transplants.setdefault(rnode, [])
78 list.append(transplantentry(lnode, rnode))
78 list.append(transplantentry(lnode, rnode))
79 self.dirty = True
79 self.dirty = True
80
80
81 def remove(self, transplant):
81 def remove(self, transplant):
82 list = self.transplants.get(transplant.rnode)
82 list = self.transplants.get(transplant.rnode)
83 if list:
83 if list:
84 del list[list.index(transplant)]
84 del list[list.index(transplant)]
85 self.dirty = True
85 self.dirty = True
86
86
87 class transplanter(object):
87 class transplanter(object):
88 def __init__(self, ui, repo, opts):
88 def __init__(self, ui, repo, opts):
89 self.ui = ui
89 self.ui = ui
90 self.path = repo.join('transplant')
90 self.path = repo.join('transplant')
91 self.opener = scmutil.opener(self.path)
91 self.opener = scmutil.opener(self.path)
92 self.transplants = transplants(self.path, 'transplants',
92 self.transplants = transplants(self.path, 'transplants',
93 opener=self.opener)
93 opener=self.opener)
94 def getcommiteditor():
94 def getcommiteditor():
95 editform = cmdutil.mergeeditform(repo[None], 'transplant')
95 editform = cmdutil.mergeeditform(repo[None], 'transplant')
96 return cmdutil.getcommiteditor(editform=editform, **opts)
96 return cmdutil.getcommiteditor(editform=editform, **opts)
97 self.getcommiteditor = getcommiteditor
97 self.getcommiteditor = getcommiteditor
98
98
99 def applied(self, repo, node, parent):
99 def applied(self, repo, node, parent):
100 '''returns True if a node is already an ancestor of parent
100 '''returns True if a node is already an ancestor of parent
101 or is parent or has already been transplanted'''
101 or is parent or has already been transplanted'''
102 if hasnode(repo, parent):
102 if hasnode(repo, parent):
103 parentrev = repo.changelog.rev(parent)
103 parentrev = repo.changelog.rev(parent)
104 if hasnode(repo, node):
104 if hasnode(repo, node):
105 rev = repo.changelog.rev(node)
105 rev = repo.changelog.rev(node)
106 reachable = repo.changelog.ancestors([parentrev], rev,
106 reachable = repo.changelog.ancestors([parentrev], rev,
107 inclusive=True)
107 inclusive=True)
108 if rev in reachable:
108 if rev in reachable:
109 return True
109 return True
110 for t in self.transplants.get(node):
110 for t in self.transplants.get(node):
111 # it might have been stripped
111 # it might have been stripped
112 if not hasnode(repo, t.lnode):
112 if not hasnode(repo, t.lnode):
113 self.transplants.remove(t)
113 self.transplants.remove(t)
114 return False
114 return False
115 lnoderev = repo.changelog.rev(t.lnode)
115 lnoderev = repo.changelog.rev(t.lnode)
116 if lnoderev in repo.changelog.ancestors([parentrev], lnoderev,
116 if lnoderev in repo.changelog.ancestors([parentrev], lnoderev,
117 inclusive=True):
117 inclusive=True):
118 return True
118 return True
119 return False
119 return False
120
120
121 def apply(self, repo, source, revmap, merges, opts=None):
121 def apply(self, repo, source, revmap, merges, opts=None):
122 '''apply the revisions in revmap one by one in revision order'''
122 '''apply the revisions in revmap one by one in revision order'''
123 if opts is None:
123 if opts is None:
124 opts = {}
124 opts = {}
125 revs = sorted(revmap)
125 revs = sorted(revmap)
126 p1, p2 = repo.dirstate.parents()
126 p1, p2 = repo.dirstate.parents()
127 pulls = []
127 pulls = []
128 diffopts = patch.difffeatureopts(self.ui, opts)
128 diffopts = patch.difffeatureopts(self.ui, opts)
129 diffopts.git = True
129 diffopts.git = True
130
130
131 lock = tr = None
131 lock = tr = None
132 try:
132 try:
133 lock = repo.lock()
133 lock = repo.lock()
134 tr = repo.transaction('transplant')
134 tr = repo.transaction('transplant')
135 for rev in revs:
135 for rev in revs:
136 node = revmap[rev]
136 node = revmap[rev]
137 revstr = '%s:%s' % (rev, short(node))
137 revstr = '%s:%s' % (rev, short(node))
138
138
139 if self.applied(repo, node, p1):
139 if self.applied(repo, node, p1):
140 self.ui.warn(_('skipping already applied revision %s\n') %
140 self.ui.warn(_('skipping already applied revision %s\n') %
141 revstr)
141 revstr)
142 continue
142 continue
143
143
144 parents = source.changelog.parents(node)
144 parents = source.changelog.parents(node)
145 if not (opts.get('filter') or opts.get('log')):
145 if not (opts.get('filter') or opts.get('log')):
146 # If the changeset parent is the same as the
146 # If the changeset parent is the same as the
147 # wdir's parent, just pull it.
147 # wdir's parent, just pull it.
148 if parents[0] == p1:
148 if parents[0] == p1:
149 pulls.append(node)
149 pulls.append(node)
150 p1 = node
150 p1 = node
151 continue
151 continue
152 if pulls:
152 if pulls:
153 if source != repo:
153 if source != repo:
154 exchange.pull(repo, source.peer(), heads=pulls)
154 exchange.pull(repo, source.peer(), heads=pulls)
155 merge.update(repo, pulls[-1], False, False)
155 merge.update(repo, pulls[-1], False, False)
156 p1, p2 = repo.dirstate.parents()
156 p1, p2 = repo.dirstate.parents()
157 pulls = []
157 pulls = []
158
158
159 domerge = False
159 domerge = False
160 if node in merges:
160 if node in merges:
161 # pulling all the merge revs at once would mean we
161 # pulling all the merge revs at once would mean we
162 # couldn't transplant after the latest even if
162 # couldn't transplant after the latest even if
163 # transplants before them fail.
163 # transplants before them fail.
164 domerge = True
164 domerge = True
165 if not hasnode(repo, node):
165 if not hasnode(repo, node):
166 exchange.pull(repo, source.peer(), heads=[node])
166 exchange.pull(repo, source.peer(), heads=[node])
167
167
168 skipmerge = False
168 skipmerge = False
169 if parents[1] != revlog.nullid:
169 if parents[1] != revlog.nullid:
170 if not opts.get('parent'):
170 if not opts.get('parent'):
171 self.ui.note(_('skipping merge changeset %s:%s\n')
171 self.ui.note(_('skipping merge changeset %s:%s\n')
172 % (rev, short(node)))
172 % (rev, short(node)))
173 skipmerge = True
173 skipmerge = True
174 else:
174 else:
175 parent = source.lookup(opts['parent'])
175 parent = source.lookup(opts['parent'])
176 if parent not in parents:
176 if parent not in parents:
177 raise error.Abort(_('%s is not a parent of %s') %
177 raise error.Abort(_('%s is not a parent of %s') %
178 (short(parent), short(node)))
178 (short(parent), short(node)))
179 else:
179 else:
180 parent = parents[0]
180 parent = parents[0]
181
181
182 if skipmerge:
182 if skipmerge:
183 patchfile = None
183 patchfile = None
184 else:
184 else:
185 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
185 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
186 fp = os.fdopen(fd, 'w')
186 fp = os.fdopen(fd, 'w')
187 gen = patch.diff(source, parent, node, opts=diffopts)
187 gen = patch.diff(source, parent, node, opts=diffopts)
188 for chunk in gen:
188 for chunk in gen:
189 fp.write(chunk)
189 fp.write(chunk)
190 fp.close()
190 fp.close()
191
191
192 del revmap[rev]
192 del revmap[rev]
193 if patchfile or domerge:
193 if patchfile or domerge:
194 try:
194 try:
195 try:
195 try:
196 n = self.applyone(repo, node,
196 n = self.applyone(repo, node,
197 source.changelog.read(node),
197 source.changelog.read(node),
198 patchfile, merge=domerge,
198 patchfile, merge=domerge,
199 log=opts.get('log'),
199 log=opts.get('log'),
200 filter=opts.get('filter'))
200 filter=opts.get('filter'))
201 except TransplantError:
201 except TransplantError:
202 # Do not rollback, it is up to the user to
202 # Do not rollback, it is up to the user to
203 # fix the merge or cancel everything
203 # fix the merge or cancel everything
204 tr.close()
204 tr.close()
205 raise
205 raise
206 if n and domerge:
206 if n and domerge:
207 self.ui.status(_('%s merged at %s\n') % (revstr,
207 self.ui.status(_('%s merged at %s\n') % (revstr,
208 short(n)))
208 short(n)))
209 elif n:
209 elif n:
210 self.ui.status(_('%s transplanted to %s\n')
210 self.ui.status(_('%s transplanted to %s\n')
211 % (short(node),
211 % (short(node),
212 short(n)))
212 short(n)))
213 finally:
213 finally:
214 if patchfile:
214 if patchfile:
215 os.unlink(patchfile)
215 os.unlink(patchfile)
216 tr.close()
216 tr.close()
217 if pulls:
217 if pulls:
218 exchange.pull(repo, source.peer(), heads=pulls)
218 exchange.pull(repo, source.peer(), heads=pulls)
219 merge.update(repo, pulls[-1], False, False)
219 merge.update(repo, pulls[-1], False, False)
220 finally:
220 finally:
221 self.saveseries(revmap, merges)
221 self.saveseries(revmap, merges)
222 self.transplants.write()
222 self.transplants.write()
223 if tr:
223 if tr:
224 tr.release()
224 tr.release()
225 if lock:
225 if lock:
226 lock.release()
226 lock.release()
227
227
228 def filter(self, filter, node, changelog, patchfile):
228 def filter(self, filter, node, changelog, patchfile):
229 '''arbitrarily rewrite changeset before applying it'''
229 '''arbitrarily rewrite changeset before applying it'''
230
230
231 self.ui.status(_('filtering %s\n') % patchfile)
231 self.ui.status(_('filtering %s\n') % patchfile)
232 user, date, msg = (changelog[1], changelog[2], changelog[4])
232 user, date, msg = (changelog[1], changelog[2], changelog[4])
233 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
233 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
234 fp = os.fdopen(fd, 'w')
234 fp = os.fdopen(fd, 'w')
235 fp.write("# HG changeset patch\n")
235 fp.write("# HG changeset patch\n")
236 fp.write("# User %s\n" % user)
236 fp.write("# User %s\n" % user)
237 fp.write("# Date %d %d\n" % date)
237 fp.write("# Date %d %d\n" % date)
238 fp.write(msg + '\n')
238 fp.write(msg + '\n')
239 fp.close()
239 fp.close()
240
240
241 try:
241 try:
242 self.ui.system('%s %s %s' % (filter, util.shellquote(headerfile),
242 self.ui.system('%s %s %s' % (filter, util.shellquote(headerfile),
243 util.shellquote(patchfile)),
243 util.shellquote(patchfile)),
244 environ={'HGUSER': changelog[1],
244 environ={'HGUSER': changelog[1],
245 'HGREVISION': revlog.hex(node),
245 'HGREVISION': revlog.hex(node),
246 },
246 },
247 onerr=error.Abort, errprefix=_('filter failed'))
247 onerr=error.Abort, errprefix=_('filter failed'))
248 user, date, msg = self.parselog(file(headerfile))[1:4]
248 user, date, msg = self.parselog(file(headerfile))[1:4]
249 finally:
249 finally:
250 os.unlink(headerfile)
250 os.unlink(headerfile)
251
251
252 return (user, date, msg)
252 return (user, date, msg)
253
253
254 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
254 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
255 filter=None):
255 filter=None):
256 '''apply the patch in patchfile to the repository as a transplant'''
256 '''apply the patch in patchfile to the repository as a transplant'''
257 (manifest, user, (time, timezone), files, message) = cl[:5]
257 (manifest, user, (time, timezone), files, message) = cl[:5]
258 date = "%d %d" % (time, timezone)
258 date = "%d %d" % (time, timezone)
259 extra = {'transplant_source': node}
259 extra = {'transplant_source': node}
260 if filter:
260 if filter:
261 (user, date, message) = self.filter(filter, node, cl, patchfile)
261 (user, date, message) = self.filter(filter, node, cl, patchfile)
262
262
263 if log:
263 if log:
264 # we don't translate messages inserted into commits
264 # we don't translate messages inserted into commits
265 message += '\n(transplanted from %s)' % revlog.hex(node)
265 message += '\n(transplanted from %s)' % revlog.hex(node)
266
266
267 self.ui.status(_('applying %s\n') % short(node))
267 self.ui.status(_('applying %s\n') % short(node))
268 self.ui.note('%s %s\n%s\n' % (user, date, message))
268 self.ui.note('%s %s\n%s\n' % (user, date, message))
269
269
270 if not patchfile and not merge:
270 if not patchfile and not merge:
271 raise error.Abort(_('can only omit patchfile if merging'))
271 raise error.Abort(_('can only omit patchfile if merging'))
272 if patchfile:
272 if patchfile:
273 try:
273 try:
274 files = set()
274 files = set()
275 patch.patch(self.ui, repo, patchfile, files=files, eolmode=None)
275 patch.patch(self.ui, repo, patchfile, files=files, eolmode=None)
276 files = list(files)
276 files = list(files)
277 except Exception as inst:
277 except Exception as inst:
278 seriespath = os.path.join(self.path, 'series')
278 seriespath = os.path.join(self.path, 'series')
279 if os.path.exists(seriespath):
279 if os.path.exists(seriespath):
280 os.unlink(seriespath)
280 os.unlink(seriespath)
281 p1 = repo.dirstate.p1()
281 p1 = repo.dirstate.p1()
282 p2 = node
282 p2 = node
283 self.log(user, date, message, p1, p2, merge=merge)
283 self.log(user, date, message, p1, p2, merge=merge)
284 self.ui.write(str(inst) + '\n')
284 self.ui.write(str(inst) + '\n')
285 raise TransplantError(_('fix up the merge and run '
285 raise TransplantError(_('fix up the merge and run '
286 'hg transplant --continue'))
286 'hg transplant --continue'))
287 else:
287 else:
288 files = None
288 files = None
289 if merge:
289 if merge:
290 p1, p2 = repo.dirstate.parents()
290 p1, p2 = repo.dirstate.parents()
291 repo.setparents(p1, node)
291 repo.setparents(p1, node)
292 m = match.always(repo.root, '')
292 m = match.always(repo.root, '')
293 else:
293 else:
294 m = match.exact(repo.root, '', files)
294 m = match.exact(repo.root, '', files)
295
295
296 n = repo.commit(message, user, date, extra=extra, match=m,
296 n = repo.commit(message, user, date, extra=extra, match=m,
297 editor=self.getcommiteditor())
297 editor=self.getcommiteditor())
298 if not n:
298 if not n:
299 self.ui.warn(_('skipping emptied changeset %s\n') % short(node))
299 self.ui.warn(_('skipping emptied changeset %s\n') % short(node))
300 return None
300 return None
301 if not merge:
301 if not merge:
302 self.transplants.set(n, node)
302 self.transplants.set(n, node)
303
303
304 return n
304 return n
305
305
306 def resume(self, repo, source, opts):
306 def resume(self, repo, source, opts):
307 '''recover last transaction and apply remaining changesets'''
307 '''recover last transaction and apply remaining changesets'''
308 if os.path.exists(os.path.join(self.path, 'journal')):
308 if os.path.exists(os.path.join(self.path, 'journal')):
309 n, node = self.recover(repo, source, opts)
309 n, node = self.recover(repo, source, opts)
310 if n:
310 if n:
311 self.ui.status(_('%s transplanted as %s\n') % (short(node),
311 self.ui.status(_('%s transplanted as %s\n') % (short(node),
312 short(n)))
312 short(n)))
313 else:
313 else:
314 self.ui.status(_('%s skipped due to empty diff\n')
314 self.ui.status(_('%s skipped due to empty diff\n')
315 % (short(node),))
315 % (short(node),))
316 seriespath = os.path.join(self.path, 'series')
316 seriespath = os.path.join(self.path, 'series')
317 if not os.path.exists(seriespath):
317 if not os.path.exists(seriespath):
318 self.transplants.write()
318 self.transplants.write()
319 return
319 return
320 nodes, merges = self.readseries()
320 nodes, merges = self.readseries()
321 revmap = {}
321 revmap = {}
322 for n in nodes:
322 for n in nodes:
323 revmap[source.changelog.rev(n)] = n
323 revmap[source.changelog.rev(n)] = n
324 os.unlink(seriespath)
324 os.unlink(seriespath)
325
325
326 self.apply(repo, source, revmap, merges, opts)
326 self.apply(repo, source, revmap, merges, opts)
327
327
328 def recover(self, repo, source, opts):
328 def recover(self, repo, source, opts):
329 '''commit working directory using journal metadata'''
329 '''commit working directory using journal metadata'''
330 node, user, date, message, parents = self.readlog()
330 node, user, date, message, parents = self.readlog()
331 merge = False
331 merge = False
332
332
333 if not user or not date or not message or not parents[0]:
333 if not user or not date or not message or not parents[0]:
334 raise error.Abort(_('transplant log file is corrupt'))
334 raise error.Abort(_('transplant log file is corrupt'))
335
335
336 parent = parents[0]
336 parent = parents[0]
337 if len(parents) > 1:
337 if len(parents) > 1:
338 if opts.get('parent'):
338 if opts.get('parent'):
339 parent = source.lookup(opts['parent'])
339 parent = source.lookup(opts['parent'])
340 if parent not in parents:
340 if parent not in parents:
341 raise error.Abort(_('%s is not a parent of %s') %
341 raise error.Abort(_('%s is not a parent of %s') %
342 (short(parent), short(node)))
342 (short(parent), short(node)))
343 else:
343 else:
344 merge = True
344 merge = True
345
345
346 extra = {'transplant_source': node}
346 extra = {'transplant_source': node}
347 try:
347 try:
348 p1, p2 = repo.dirstate.parents()
348 p1, p2 = repo.dirstate.parents()
349 if p1 != parent:
349 if p1 != parent:
350 raise error.Abort(_('working directory not at transplant '
350 raise error.Abort(_('working directory not at transplant '
351 'parent %s') % revlog.hex(parent))
351 'parent %s') % revlog.hex(parent))
352 if merge:
352 if merge:
353 repo.setparents(p1, parents[1])
353 repo.setparents(p1, parents[1])
354 modified, added, removed, deleted = repo.status()[:4]
354 modified, added, removed, deleted = repo.status()[:4]
355 if merge or modified or added or removed or deleted:
355 if merge or modified or added or removed or deleted:
356 n = repo.commit(message, user, date, extra=extra,
356 n = repo.commit(message, user, date, extra=extra,
357 editor=self.getcommiteditor())
357 editor=self.getcommiteditor())
358 if not n:
358 if not n:
359 raise error.Abort(_('commit failed'))
359 raise error.Abort(_('commit failed'))
360 if not merge:
360 if not merge:
361 self.transplants.set(n, node)
361 self.transplants.set(n, node)
362 else:
362 else:
363 n = None
363 n = None
364 self.unlog()
364 self.unlog()
365
365
366 return n, node
366 return n, node
367 finally:
367 finally:
368 # TODO: get rid of this meaningless try/finally enclosing.
368 # TODO: get rid of this meaningless try/finally enclosing.
369 # this is kept only to reduce changes in a patch.
369 # this is kept only to reduce changes in a patch.
370 pass
370 pass
371
371
372 def readseries(self):
372 def readseries(self):
373 nodes = []
373 nodes = []
374 merges = []
374 merges = []
375 cur = nodes
375 cur = nodes
376 for line in self.opener.read('series').splitlines():
376 for line in self.opener.read('series').splitlines():
377 if line.startswith('# Merges'):
377 if line.startswith('# Merges'):
378 cur = merges
378 cur = merges
379 continue
379 continue
380 cur.append(revlog.bin(line))
380 cur.append(revlog.bin(line))
381
381
382 return (nodes, merges)
382 return (nodes, merges)
383
383
384 def saveseries(self, revmap, merges):
384 def saveseries(self, revmap, merges):
385 if not revmap:
385 if not revmap:
386 return
386 return
387
387
388 if not os.path.isdir(self.path):
388 if not os.path.isdir(self.path):
389 os.mkdir(self.path)
389 os.mkdir(self.path)
390 series = self.opener('series', 'w')
390 series = self.opener('series', 'w')
391 for rev in sorted(revmap):
391 for rev in sorted(revmap):
392 series.write(revlog.hex(revmap[rev]) + '\n')
392 series.write(revlog.hex(revmap[rev]) + '\n')
393 if merges:
393 if merges:
394 series.write('# Merges\n')
394 series.write('# Merges\n')
395 for m in merges:
395 for m in merges:
396 series.write(revlog.hex(m) + '\n')
396 series.write(revlog.hex(m) + '\n')
397 series.close()
397 series.close()
398
398
399 def parselog(self, fp):
399 def parselog(self, fp):
400 parents = []
400 parents = []
401 message = []
401 message = []
402 node = revlog.nullid
402 node = revlog.nullid
403 inmsg = False
403 inmsg = False
404 user = None
404 user = None
405 date = None
405 date = None
406 for line in fp.read().splitlines():
406 for line in fp.read().splitlines():
407 if inmsg:
407 if inmsg:
408 message.append(line)
408 message.append(line)
409 elif line.startswith('# User '):
409 elif line.startswith('# User '):
410 user = line[7:]
410 user = line[7:]
411 elif line.startswith('# Date '):
411 elif line.startswith('# Date '):
412 date = line[7:]
412 date = line[7:]
413 elif line.startswith('# Node ID '):
413 elif line.startswith('# Node ID '):
414 node = revlog.bin(line[10:])
414 node = revlog.bin(line[10:])
415 elif line.startswith('# Parent '):
415 elif line.startswith('# Parent '):
416 parents.append(revlog.bin(line[9:]))
416 parents.append(revlog.bin(line[9:]))
417 elif not line.startswith('# '):
417 elif not line.startswith('# '):
418 inmsg = True
418 inmsg = True
419 message.append(line)
419 message.append(line)
420 if None in (user, date):
420 if None in (user, date):
421 raise error.Abort(_("filter corrupted changeset (no user or date)"))
421 raise error.Abort(_("filter corrupted changeset (no user or date)"))
422 return (node, user, date, '\n'.join(message), parents)
422 return (node, user, date, '\n'.join(message), parents)
423
423
424 def log(self, user, date, message, p1, p2, merge=False):
424 def log(self, user, date, message, p1, p2, merge=False):
425 '''journal changelog metadata for later recover'''
425 '''journal changelog metadata for later recover'''
426
426
427 if not os.path.isdir(self.path):
427 if not os.path.isdir(self.path):
428 os.mkdir(self.path)
428 os.mkdir(self.path)
429 fp = self.opener('journal', 'w')
429 fp = self.opener('journal', 'w')
430 fp.write('# User %s\n' % user)
430 fp.write('# User %s\n' % user)
431 fp.write('# Date %s\n' % date)
431 fp.write('# Date %s\n' % date)
432 fp.write('# Node ID %s\n' % revlog.hex(p2))
432 fp.write('# Node ID %s\n' % revlog.hex(p2))
433 fp.write('# Parent ' + revlog.hex(p1) + '\n')
433 fp.write('# Parent ' + revlog.hex(p1) + '\n')
434 if merge:
434 if merge:
435 fp.write('# Parent ' + revlog.hex(p2) + '\n')
435 fp.write('# Parent ' + revlog.hex(p2) + '\n')
436 fp.write(message.rstrip() + '\n')
436 fp.write(message.rstrip() + '\n')
437 fp.close()
437 fp.close()
438
438
439 def readlog(self):
439 def readlog(self):
440 return self.parselog(self.opener('journal'))
440 return self.parselog(self.opener('journal'))
441
441
442 def unlog(self):
442 def unlog(self):
443 '''remove changelog journal'''
443 '''remove changelog journal'''
444 absdst = os.path.join(self.path, 'journal')
444 absdst = os.path.join(self.path, 'journal')
445 if os.path.exists(absdst):
445 if os.path.exists(absdst):
446 os.unlink(absdst)
446 os.unlink(absdst)
447
447
448 def transplantfilter(self, repo, source, root):
448 def transplantfilter(self, repo, source, root):
449 def matchfn(node):
449 def matchfn(node):
450 if self.applied(repo, node, root):
450 if self.applied(repo, node, root):
451 return False
451 return False
452 if source.changelog.parents(node)[1] != revlog.nullid:
452 if source.changelog.parents(node)[1] != revlog.nullid:
453 return False
453 return False
454 extra = source.changelog.read(node)[5]
454 extra = source.changelog.read(node)[5]
455 cnode = extra.get('transplant_source')
455 cnode = extra.get('transplant_source')
456 if cnode and self.applied(repo, cnode, root):
456 if cnode and self.applied(repo, cnode, root):
457 return False
457 return False
458 return True
458 return True
459
459
460 return matchfn
460 return matchfn
461
461
462 def hasnode(repo, node):
462 def hasnode(repo, node):
463 try:
463 try:
464 return repo.changelog.rev(node) is not None
464 return repo.changelog.rev(node) is not None
465 except error.RevlogError:
465 except error.RevlogError:
466 return False
466 return False
467
467
468 def browserevs(ui, repo, nodes, opts):
468 def browserevs(ui, repo, nodes, opts):
469 '''interactively transplant changesets'''
469 '''interactively transplant changesets'''
470 displayer = cmdutil.show_changeset(ui, repo, opts)
470 displayer = cmdutil.show_changeset(ui, repo, opts)
471 transplants = []
471 transplants = []
472 merges = []
472 merges = []
473 prompt = _('apply changeset? [ynmpcq?]:'
473 prompt = _('apply changeset? [ynmpcq?]:'
474 '$$ &yes, transplant this changeset'
474 '$$ &yes, transplant this changeset'
475 '$$ &no, skip this changeset'
475 '$$ &no, skip this changeset'
476 '$$ &merge at this changeset'
476 '$$ &merge at this changeset'
477 '$$ show &patch'
477 '$$ show &patch'
478 '$$ &commit selected changesets'
478 '$$ &commit selected changesets'
479 '$$ &quit and cancel transplant'
479 '$$ &quit and cancel transplant'
480 '$$ &? (show this help)')
480 '$$ &? (show this help)')
481 for node in nodes:
481 for node in nodes:
482 displayer.show(repo[node])
482 displayer.show(repo[node])
483 action = None
483 action = None
484 while not action:
484 while not action:
485 action = 'ynmpcq?'[ui.promptchoice(prompt)]
485 action = 'ynmpcq?'[ui.promptchoice(prompt)]
486 if action == '?':
486 if action == '?':
487 for c, t in ui.extractchoices(prompt)[1]:
487 for c, t in ui.extractchoices(prompt)[1]:
488 ui.write('%s: %s\n' % (c, t))
488 ui.write('%s: %s\n' % (c, t))
489 action = None
489 action = None
490 elif action == 'p':
490 elif action == 'p':
491 parent = repo.changelog.parents(node)[0]
491 parent = repo.changelog.parents(node)[0]
492 for chunk in patch.diff(repo, parent, node):
492 for chunk in patch.diff(repo, parent, node):
493 ui.write(chunk)
493 ui.write(chunk)
494 action = None
494 action = None
495 if action == 'y':
495 if action == 'y':
496 transplants.append(node)
496 transplants.append(node)
497 elif action == 'm':
497 elif action == 'm':
498 merges.append(node)
498 merges.append(node)
499 elif action == 'c':
499 elif action == 'c':
500 break
500 break
501 elif action == 'q':
501 elif action == 'q':
502 transplants = ()
502 transplants = ()
503 merges = ()
503 merges = ()
504 break
504 break
505 displayer.close()
505 displayer.close()
506 return (transplants, merges)
506 return (transplants, merges)
507
507
508 @command('transplant',
508 @command('transplant',
509 [('s', 'source', '', _('transplant changesets from REPO'), _('REPO')),
509 [('s', 'source', '', _('transplant changesets from REPO'), _('REPO')),
510 ('b', 'branch', [], _('use this source changeset as head'), _('REV')),
510 ('b', 'branch', [], _('use this source changeset as head'), _('REV')),
511 ('a', 'all', None, _('pull all changesets up to the --branch revisions')),
511 ('a', 'all', None, _('pull all changesets up to the --branch revisions')),
512 ('p', 'prune', [], _('skip over REV'), _('REV')),
512 ('p', 'prune', [], _('skip over REV'), _('REV')),
513 ('m', 'merge', [], _('merge at REV'), _('REV')),
513 ('m', 'merge', [], _('merge at REV'), _('REV')),
514 ('', 'parent', '',
514 ('', 'parent', '',
515 _('parent to choose when transplanting merge'), _('REV')),
515 _('parent to choose when transplanting merge'), _('REV')),
516 ('e', 'edit', False, _('invoke editor on commit messages')),
516 ('e', 'edit', False, _('invoke editor on commit messages')),
517 ('', 'log', None, _('append transplant info to log message')),
517 ('', 'log', None, _('append transplant info to log message')),
518 ('c', 'continue', None, _('continue last transplant session '
518 ('c', 'continue', None, _('continue last transplant session '
519 'after fixing conflicts')),
519 'after fixing conflicts')),
520 ('', 'filter', '',
520 ('', 'filter', '',
521 _('filter changesets through command'), _('CMD'))],
521 _('filter changesets through command'), _('CMD'))],
522 _('hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] '
522 _('hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] '
523 '[-m REV] [REV]...'))
523 '[-m REV] [REV]...'))
524 def transplant(ui, repo, *revs, **opts):
524 def transplant(ui, repo, *revs, **opts):
525 '''transplant changesets from another branch
525 '''transplant changesets from another branch
526
526
527 Selected changesets will be applied on top of the current working
527 Selected changesets will be applied on top of the current working
528 directory with the log of the original changeset. The changesets
528 directory with the log of the original changeset. The changesets
529 are copied and will thus appear twice in the history with different
529 are copied and will thus appear twice in the history with different
530 identities.
530 identities.
531
531
532 Consider using the graft command if everything is inside the same
532 Consider using the graft command if everything is inside the same
533 repository - it will use merges and will usually give a better result.
533 repository - it will use merges and will usually give a better result.
534 Use the rebase extension if the changesets are unpublished and you want
534 Use the rebase extension if the changesets are unpublished and you want
535 to move them instead of copying them.
535 to move them instead of copying them.
536
536
537 If --log is specified, log messages will have a comment appended
537 If --log is specified, log messages will have a comment appended
538 of the form::
538 of the form::
539
539
540 (transplanted from CHANGESETHASH)
540 (transplanted from CHANGESETHASH)
541
541
542 You can rewrite the changelog message with the --filter option.
542 You can rewrite the changelog message with the --filter option.
543 Its argument will be invoked with the current changelog message as
543 Its argument will be invoked with the current changelog message as
544 $1 and the patch as $2.
544 $1 and the patch as $2.
545
545
546 --source/-s specifies another repository to use for selecting changesets,
546 --source/-s specifies another repository to use for selecting changesets,
547 just as if it temporarily had been pulled.
547 just as if it temporarily had been pulled.
548 If --branch/-b is specified, these revisions will be used as
548 If --branch/-b is specified, these revisions will be used as
549 heads when deciding which changesets to transplant, just as if only
549 heads when deciding which changesets to transplant, just as if only
550 these revisions had been pulled.
550 these revisions had been pulled.
551 If --all/-a is specified, all the revisions up to the heads specified
551 If --all/-a is specified, all the revisions up to the heads specified
552 with --branch will be transplanted.
552 with --branch will be transplanted.
553
553
554 Example:
554 Example:
555
555
556 - transplant all changes up to REV on top of your current revision::
556 - transplant all changes up to REV on top of your current revision::
557
557
558 hg transplant --branch REV --all
558 hg transplant --branch REV --all
559
559
560 You can optionally mark selected transplanted changesets as merge
560 You can optionally mark selected transplanted changesets as merge
561 changesets. You will not be prompted to transplant any ancestors
561 changesets. You will not be prompted to transplant any ancestors
562 of a merged transplant, and you can merge descendants of them
562 of a merged transplant, and you can merge descendants of them
563 normally instead of transplanting them.
563 normally instead of transplanting them.
564
564
565 Merge changesets may be transplanted directly by specifying the
565 Merge changesets may be transplanted directly by specifying the
566 proper parent changeset by calling :hg:`transplant --parent`.
566 proper parent changeset by calling :hg:`transplant --parent`.
567
567
568 If no merges or revisions are provided, :hg:`transplant` will
568 If no merges or revisions are provided, :hg:`transplant` will
569 start an interactive changeset browser.
569 start an interactive changeset browser.
570
570
571 If a changeset application fails, you can fix the merge by hand
571 If a changeset application fails, you can fix the merge by hand
572 and then resume where you left off by calling :hg:`transplant
572 and then resume where you left off by calling :hg:`transplant
573 --continue/-c`.
573 --continue/-c`.
574 '''
574 '''
575 wlock = None
575 wlock = None
576 try:
576 try:
577 wlock = repo.wlock()
577 wlock = repo.wlock()
578 return _dotransplant(ui, repo, *revs, **opts)
578 return _dotransplant(ui, repo, *revs, **opts)
579 finally:
579 finally:
580 lockmod.release(wlock)
580 lockmod.release(wlock)
581
581
582 def _dotransplant(ui, repo, *revs, **opts):
582 def _dotransplant(ui, repo, *revs, **opts):
583 def incwalk(repo, csets, match=util.always):
583 def incwalk(repo, csets, match=util.always):
584 for node in csets:
584 for node in csets:
585 if match(node):
585 if match(node):
586 yield node
586 yield node
587
587
588 def transplantwalk(repo, dest, heads, match=util.always):
588 def transplantwalk(repo, dest, heads, match=util.always):
589 '''Yield all nodes that are ancestors of a head but not ancestors
589 '''Yield all nodes that are ancestors of a head but not ancestors
590 of dest.
590 of dest.
591 If no heads are specified, the heads of repo will be used.'''
591 If no heads are specified, the heads of repo will be used.'''
592 if not heads:
592 if not heads:
593 heads = repo.heads()
593 heads = repo.heads()
594 ancestors = []
594 ancestors = []
595 ctx = repo[dest]
595 ctx = repo[dest]
596 for head in heads:
596 for head in heads:
597 ancestors.append(ctx.ancestor(repo[head]).node())
597 ancestors.append(ctx.ancestor(repo[head]).node())
598 for node in repo.changelog.nodesbetween(ancestors, heads)[0]:
598 for node in repo.changelog.nodesbetween(ancestors, heads)[0]:
599 if match(node):
599 if match(node):
600 yield node
600 yield node
601
601
602 def checkopts(opts, revs):
602 def checkopts(opts, revs):
603 if opts.get('continue'):
603 if opts.get('continue'):
604 if opts.get('branch') or opts.get('all') or opts.get('merge'):
604 if opts.get('branch') or opts.get('all') or opts.get('merge'):
605 raise error.Abort(_('--continue is incompatible with '
605 raise error.Abort(_('--continue is incompatible with '
606 '--branch, --all and --merge'))
606 '--branch, --all and --merge'))
607 return
607 return
608 if not (opts.get('source') or revs or
608 if not (opts.get('source') or revs or
609 opts.get('merge') or opts.get('branch')):
609 opts.get('merge') or opts.get('branch')):
610 raise error.Abort(_('no source URL, branch revision, or revision '
610 raise error.Abort(_('no source URL, branch revision, or revision '
611 'list provided'))
611 'list provided'))
612 if opts.get('all'):
612 if opts.get('all'):
613 if not opts.get('branch'):
613 if not opts.get('branch'):
614 raise error.Abort(_('--all requires a branch revision'))
614 raise error.Abort(_('--all requires a branch revision'))
615 if revs:
615 if revs:
616 raise error.Abort(_('--all is incompatible with a '
616 raise error.Abort(_('--all is incompatible with a '
617 'revision list'))
617 'revision list'))
618
618
619 checkopts(opts, revs)
619 checkopts(opts, revs)
620
620
621 if not opts.get('log'):
621 if not opts.get('log'):
622 # deprecated config: transplant.log
622 # deprecated config: transplant.log
623 opts['log'] = ui.config('transplant', 'log')
623 opts['log'] = ui.config('transplant', 'log')
624 if not opts.get('filter'):
624 if not opts.get('filter'):
625 # deprecated config: transplant.filter
625 # deprecated config: transplant.filter
626 opts['filter'] = ui.config('transplant', 'filter')
626 opts['filter'] = ui.config('transplant', 'filter')
627
627
628 tp = transplanter(ui, repo, opts)
628 tp = transplanter(ui, repo, opts)
629
629
630 cmdutil.checkunfinished(repo)
630 cmdutil.checkunfinished(repo)
631 p1, p2 = repo.dirstate.parents()
631 p1, p2 = repo.dirstate.parents()
632 if len(repo) > 0 and p1 == revlog.nullid:
632 if len(repo) > 0 and p1 == revlog.nullid:
633 raise error.Abort(_('no revision checked out'))
633 raise error.Abort(_('no revision checked out'))
634 if not opts.get('continue'):
634 if not opts.get('continue'):
635 if p2 != revlog.nullid:
635 if p2 != revlog.nullid:
636 raise error.Abort(_('outstanding uncommitted merges'))
636 raise error.Abort(_('outstanding uncommitted merges'))
637 m, a, r, d = repo.status()[:4]
637 m, a, r, d = repo.status()[:4]
638 if m or a or r or d:
638 if m or a or r or d:
639 raise error.Abort(_('outstanding local changes'))
639 raise error.Abort(_('outstanding local changes'))
640
640
641 sourcerepo = opts.get('source')
641 sourcerepo = opts.get('source')
642 if sourcerepo:
642 if sourcerepo:
643 peer = hg.peer(repo, opts, ui.expandpath(sourcerepo))
643 peer = hg.peer(repo, opts, ui.expandpath(sourcerepo))
644 heads = map(peer.lookup, opts.get('branch', ()))
644 heads = map(peer.lookup, opts.get('branch', ()))
645 target = set(heads)
645 target = set(heads)
646 for r in revs:
646 for r in revs:
647 try:
647 try:
648 target.add(peer.lookup(r))
648 target.add(peer.lookup(r))
649 except error.RepoError:
649 except error.RepoError:
650 pass
650 pass
651 source, csets, cleanupfn = bundlerepo.getremotechanges(ui, repo, peer,
651 source, csets, cleanupfn = bundlerepo.getremotechanges(ui, repo, peer,
652 onlyheads=sorted(target), force=True)
652 onlyheads=sorted(target), force=True)
653 else:
653 else:
654 source = repo
654 source = repo
655 heads = map(source.lookup, opts.get('branch', ()))
655 heads = map(source.lookup, opts.get('branch', ()))
656 cleanupfn = None
656 cleanupfn = None
657
657
658 try:
658 try:
659 if opts.get('continue'):
659 if opts.get('continue'):
660 tp.resume(repo, source, opts)
660 tp.resume(repo, source, opts)
661 return
661 return
662
662
663 tf = tp.transplantfilter(repo, source, p1)
663 tf = tp.transplantfilter(repo, source, p1)
664 if opts.get('prune'):
664 if opts.get('prune'):
665 prune = set(source.lookup(r)
665 prune = set(source.lookup(r)
666 for r in scmutil.revrange(source, opts.get('prune')))
666 for r in scmutil.revrange(source, opts.get('prune')))
667 matchfn = lambda x: tf(x) and x not in prune
667 matchfn = lambda x: tf(x) and x not in prune
668 else:
668 else:
669 matchfn = tf
669 matchfn = tf
670 merges = map(source.lookup, opts.get('merge', ()))
670 merges = map(source.lookup, opts.get('merge', ()))
671 revmap = {}
671 revmap = {}
672 if revs:
672 if revs:
673 for r in scmutil.revrange(source, revs):
673 for r in scmutil.revrange(source, revs):
674 revmap[int(r)] = source.lookup(r)
674 revmap[int(r)] = source.lookup(r)
675 elif opts.get('all') or not merges:
675 elif opts.get('all') or not merges:
676 if source != repo:
676 if source != repo:
677 alltransplants = incwalk(source, csets, match=matchfn)
677 alltransplants = incwalk(source, csets, match=matchfn)
678 else:
678 else:
679 alltransplants = transplantwalk(source, p1, heads,
679 alltransplants = transplantwalk(source, p1, heads,
680 match=matchfn)
680 match=matchfn)
681 if opts.get('all'):
681 if opts.get('all'):
682 revs = alltransplants
682 revs = alltransplants
683 else:
683 else:
684 revs, newmerges = browserevs(ui, source, alltransplants, opts)
684 revs, newmerges = browserevs(ui, source, alltransplants, opts)
685 merges.extend(newmerges)
685 merges.extend(newmerges)
686 for r in revs:
686 for r in revs:
687 revmap[source.changelog.rev(r)] = r
687 revmap[source.changelog.rev(r)] = r
688 for r in merges:
688 for r in merges:
689 revmap[source.changelog.rev(r)] = r
689 revmap[source.changelog.rev(r)] = r
690
690
691 tp.apply(repo, source, revmap, merges, opts)
691 tp.apply(repo, source, revmap, merges, opts)
692 finally:
692 finally:
693 if cleanupfn:
693 if cleanupfn:
694 cleanupfn()
694 cleanupfn()
695
695
696 revsetpredicate = revset.extpredicate()
697
698 @revsetpredicate('transplanted([set])')
696 def revsettransplanted(repo, subset, x):
699 def revsettransplanted(repo, subset, x):
697 """``transplanted([set])``
700 """Transplanted changesets in set, or all transplanted changesets.
698 Transplanted changesets in set, or all transplanted changesets.
699 """
701 """
700 if x:
702 if x:
701 s = revset.getset(repo, subset, x)
703 s = revset.getset(repo, subset, x)
702 else:
704 else:
703 s = subset
705 s = subset
704 return revset.baseset([r for r in s if
706 return revset.baseset([r for r in s if
705 repo[r].extra().get('transplant_source')])
707 repo[r].extra().get('transplant_source')])
706
708
707 def kwtransplanted(repo, ctx, **args):
709 def kwtransplanted(repo, ctx, **args):
708 """:transplanted: String. The node identifier of the transplanted
710 """:transplanted: String. The node identifier of the transplanted
709 changeset if any."""
711 changeset if any."""
710 n = ctx.extra().get('transplant_source')
712 n = ctx.extra().get('transplant_source')
711 return n and revlog.hex(n) or ''
713 return n and revlog.hex(n) or ''
712
714
713 def extsetup(ui):
715 def extsetup(ui):
714 revset.symbols['transplanted'] = revsettransplanted
716 revsetpredicate.setup()
715 templatekw.keywords['transplanted'] = kwtransplanted
717 templatekw.keywords['transplanted'] = kwtransplanted
716 cmdutil.unfinishedstates.append(
718 cmdutil.unfinishedstates.append(
717 ['series', True, False, _('transplant in progress'),
719 ['series', True, False, _('transplant in progress'),
718 _("use 'hg transplant --continue' or 'hg update' to abort")])
720 _("use 'hg transplant --continue' or 'hg update' to abort")])
719
721
720 # tell hggettext to extract docstrings from these functions:
722 # tell hggettext to extract docstrings from these functions:
721 i18nfunctions = [revsettransplanted, kwtransplanted]
723 i18nfunctions = [revsettransplanted, kwtransplanted]
@@ -1,3705 +1,3730 b''
1 # revset.py - revision set queries for mercurial
1 # revset.py - revision set queries for mercurial
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import heapq
10 import heapq
11 import re
11 import re
12
12
13 from .i18n import _
13 from .i18n import _
14 from . import (
14 from . import (
15 destutil,
15 destutil,
16 encoding,
16 encoding,
17 error,
17 error,
18 hbisect,
18 hbisect,
19 match as matchmod,
19 match as matchmod,
20 node,
20 node,
21 obsolete as obsmod,
21 obsolete as obsmod,
22 parser,
22 parser,
23 pathutil,
23 pathutil,
24 phases,
24 phases,
25 registrar,
25 registrar,
26 repoview,
26 repoview,
27 util,
27 util,
28 )
28 )
29
29
30 def _revancestors(repo, revs, followfirst):
30 def _revancestors(repo, revs, followfirst):
31 """Like revlog.ancestors(), but supports followfirst."""
31 """Like revlog.ancestors(), but supports followfirst."""
32 if followfirst:
32 if followfirst:
33 cut = 1
33 cut = 1
34 else:
34 else:
35 cut = None
35 cut = None
36 cl = repo.changelog
36 cl = repo.changelog
37
37
38 def iterate():
38 def iterate():
39 revs.sort(reverse=True)
39 revs.sort(reverse=True)
40 irevs = iter(revs)
40 irevs = iter(revs)
41 h = []
41 h = []
42
42
43 inputrev = next(irevs, None)
43 inputrev = next(irevs, None)
44 if inputrev is not None:
44 if inputrev is not None:
45 heapq.heappush(h, -inputrev)
45 heapq.heappush(h, -inputrev)
46
46
47 seen = set()
47 seen = set()
48 while h:
48 while h:
49 current = -heapq.heappop(h)
49 current = -heapq.heappop(h)
50 if current == inputrev:
50 if current == inputrev:
51 inputrev = next(irevs, None)
51 inputrev = next(irevs, None)
52 if inputrev is not None:
52 if inputrev is not None:
53 heapq.heappush(h, -inputrev)
53 heapq.heappush(h, -inputrev)
54 if current not in seen:
54 if current not in seen:
55 seen.add(current)
55 seen.add(current)
56 yield current
56 yield current
57 for parent in cl.parentrevs(current)[:cut]:
57 for parent in cl.parentrevs(current)[:cut]:
58 if parent != node.nullrev:
58 if parent != node.nullrev:
59 heapq.heappush(h, -parent)
59 heapq.heappush(h, -parent)
60
60
61 return generatorset(iterate(), iterasc=False)
61 return generatorset(iterate(), iterasc=False)
62
62
63 def _revdescendants(repo, revs, followfirst):
63 def _revdescendants(repo, revs, followfirst):
64 """Like revlog.descendants() but supports followfirst."""
64 """Like revlog.descendants() but supports followfirst."""
65 if followfirst:
65 if followfirst:
66 cut = 1
66 cut = 1
67 else:
67 else:
68 cut = None
68 cut = None
69
69
70 def iterate():
70 def iterate():
71 cl = repo.changelog
71 cl = repo.changelog
72 # XXX this should be 'parentset.min()' assuming 'parentset' is a
72 # XXX this should be 'parentset.min()' assuming 'parentset' is a
73 # smartset (and if it is not, it should.)
73 # smartset (and if it is not, it should.)
74 first = min(revs)
74 first = min(revs)
75 nullrev = node.nullrev
75 nullrev = node.nullrev
76 if first == nullrev:
76 if first == nullrev:
77 # Are there nodes with a null first parent and a non-null
77 # Are there nodes with a null first parent and a non-null
78 # second one? Maybe. Do we care? Probably not.
78 # second one? Maybe. Do we care? Probably not.
79 for i in cl:
79 for i in cl:
80 yield i
80 yield i
81 else:
81 else:
82 seen = set(revs)
82 seen = set(revs)
83 for i in cl.revs(first + 1):
83 for i in cl.revs(first + 1):
84 for x in cl.parentrevs(i)[:cut]:
84 for x in cl.parentrevs(i)[:cut]:
85 if x != nullrev and x in seen:
85 if x != nullrev and x in seen:
86 seen.add(i)
86 seen.add(i)
87 yield i
87 yield i
88 break
88 break
89
89
90 return generatorset(iterate(), iterasc=True)
90 return generatorset(iterate(), iterasc=True)
91
91
92 def _reachablerootspure(repo, minroot, roots, heads, includepath):
92 def _reachablerootspure(repo, minroot, roots, heads, includepath):
93 """return (heads(::<roots> and ::<heads>))
93 """return (heads(::<roots> and ::<heads>))
94
94
95 If includepath is True, return (<roots>::<heads>)."""
95 If includepath is True, return (<roots>::<heads>)."""
96 if not roots:
96 if not roots:
97 return []
97 return []
98 parentrevs = repo.changelog.parentrevs
98 parentrevs = repo.changelog.parentrevs
99 roots = set(roots)
99 roots = set(roots)
100 visit = list(heads)
100 visit = list(heads)
101 reachable = set()
101 reachable = set()
102 seen = {}
102 seen = {}
103 # prefetch all the things! (because python is slow)
103 # prefetch all the things! (because python is slow)
104 reached = reachable.add
104 reached = reachable.add
105 dovisit = visit.append
105 dovisit = visit.append
106 nextvisit = visit.pop
106 nextvisit = visit.pop
107 # open-code the post-order traversal due to the tiny size of
107 # open-code the post-order traversal due to the tiny size of
108 # sys.getrecursionlimit()
108 # sys.getrecursionlimit()
109 while visit:
109 while visit:
110 rev = nextvisit()
110 rev = nextvisit()
111 if rev in roots:
111 if rev in roots:
112 reached(rev)
112 reached(rev)
113 if not includepath:
113 if not includepath:
114 continue
114 continue
115 parents = parentrevs(rev)
115 parents = parentrevs(rev)
116 seen[rev] = parents
116 seen[rev] = parents
117 for parent in parents:
117 for parent in parents:
118 if parent >= minroot and parent not in seen:
118 if parent >= minroot and parent not in seen:
119 dovisit(parent)
119 dovisit(parent)
120 if not reachable:
120 if not reachable:
121 return baseset()
121 return baseset()
122 if not includepath:
122 if not includepath:
123 return reachable
123 return reachable
124 for rev in sorted(seen):
124 for rev in sorted(seen):
125 for parent in seen[rev]:
125 for parent in seen[rev]:
126 if parent in reachable:
126 if parent in reachable:
127 reached(rev)
127 reached(rev)
128 return reachable
128 return reachable
129
129
130 def reachableroots(repo, roots, heads, includepath=False):
130 def reachableroots(repo, roots, heads, includepath=False):
131 """return (heads(::<roots> and ::<heads>))
131 """return (heads(::<roots> and ::<heads>))
132
132
133 If includepath is True, return (<roots>::<heads>)."""
133 If includepath is True, return (<roots>::<heads>)."""
134 if not roots:
134 if not roots:
135 return baseset()
135 return baseset()
136 minroot = roots.min()
136 minroot = roots.min()
137 roots = list(roots)
137 roots = list(roots)
138 heads = list(heads)
138 heads = list(heads)
139 try:
139 try:
140 revs = repo.changelog.reachableroots(minroot, heads, roots, includepath)
140 revs = repo.changelog.reachableroots(minroot, heads, roots, includepath)
141 except AttributeError:
141 except AttributeError:
142 revs = _reachablerootspure(repo, minroot, roots, heads, includepath)
142 revs = _reachablerootspure(repo, minroot, roots, heads, includepath)
143 revs = baseset(revs)
143 revs = baseset(revs)
144 revs.sort()
144 revs.sort()
145 return revs
145 return revs
146
146
147 elements = {
147 elements = {
148 # token-type: binding-strength, primary, prefix, infix, suffix
148 # token-type: binding-strength, primary, prefix, infix, suffix
149 "(": (21, None, ("group", 1, ")"), ("func", 1, ")"), None),
149 "(": (21, None, ("group", 1, ")"), ("func", 1, ")"), None),
150 "##": (20, None, None, ("_concat", 20), None),
150 "##": (20, None, None, ("_concat", 20), None),
151 "~": (18, None, None, ("ancestor", 18), None),
151 "~": (18, None, None, ("ancestor", 18), None),
152 "^": (18, None, None, ("parent", 18), ("parentpost", 18)),
152 "^": (18, None, None, ("parent", 18), ("parentpost", 18)),
153 "-": (5, None, ("negate", 19), ("minus", 5), None),
153 "-": (5, None, ("negate", 19), ("minus", 5), None),
154 "::": (17, None, ("dagrangepre", 17), ("dagrange", 17),
154 "::": (17, None, ("dagrangepre", 17), ("dagrange", 17),
155 ("dagrangepost", 17)),
155 ("dagrangepost", 17)),
156 "..": (17, None, ("dagrangepre", 17), ("dagrange", 17),
156 "..": (17, None, ("dagrangepre", 17), ("dagrange", 17),
157 ("dagrangepost", 17)),
157 ("dagrangepost", 17)),
158 ":": (15, "rangeall", ("rangepre", 15), ("range", 15), ("rangepost", 15)),
158 ":": (15, "rangeall", ("rangepre", 15), ("range", 15), ("rangepost", 15)),
159 "not": (10, None, ("not", 10), None, None),
159 "not": (10, None, ("not", 10), None, None),
160 "!": (10, None, ("not", 10), None, None),
160 "!": (10, None, ("not", 10), None, None),
161 "and": (5, None, None, ("and", 5), None),
161 "and": (5, None, None, ("and", 5), None),
162 "&": (5, None, None, ("and", 5), None),
162 "&": (5, None, None, ("and", 5), None),
163 "%": (5, None, None, ("only", 5), ("onlypost", 5)),
163 "%": (5, None, None, ("only", 5), ("onlypost", 5)),
164 "or": (4, None, None, ("or", 4), None),
164 "or": (4, None, None, ("or", 4), None),
165 "|": (4, None, None, ("or", 4), None),
165 "|": (4, None, None, ("or", 4), None),
166 "+": (4, None, None, ("or", 4), None),
166 "+": (4, None, None, ("or", 4), None),
167 "=": (3, None, None, ("keyvalue", 3), None),
167 "=": (3, None, None, ("keyvalue", 3), None),
168 ",": (2, None, None, ("list", 2), None),
168 ",": (2, None, None, ("list", 2), None),
169 ")": (0, None, None, None, None),
169 ")": (0, None, None, None, None),
170 "symbol": (0, "symbol", None, None, None),
170 "symbol": (0, "symbol", None, None, None),
171 "string": (0, "string", None, None, None),
171 "string": (0, "string", None, None, None),
172 "end": (0, None, None, None, None),
172 "end": (0, None, None, None, None),
173 }
173 }
174
174
175 keywords = set(['and', 'or', 'not'])
175 keywords = set(['and', 'or', 'not'])
176
176
177 # default set of valid characters for the initial letter of symbols
177 # default set of valid characters for the initial letter of symbols
178 _syminitletters = set(c for c in [chr(i) for i in xrange(256)]
178 _syminitletters = set(c for c in [chr(i) for i in xrange(256)]
179 if c.isalnum() or c in '._@' or ord(c) > 127)
179 if c.isalnum() or c in '._@' or ord(c) > 127)
180
180
181 # default set of valid characters for non-initial letters of symbols
181 # default set of valid characters for non-initial letters of symbols
182 _symletters = set(c for c in [chr(i) for i in xrange(256)]
182 _symletters = set(c for c in [chr(i) for i in xrange(256)]
183 if c.isalnum() or c in '-._/@' or ord(c) > 127)
183 if c.isalnum() or c in '-._/@' or ord(c) > 127)
184
184
185 def tokenize(program, lookup=None, syminitletters=None, symletters=None):
185 def tokenize(program, lookup=None, syminitletters=None, symletters=None):
186 '''
186 '''
187 Parse a revset statement into a stream of tokens
187 Parse a revset statement into a stream of tokens
188
188
189 ``syminitletters`` is the set of valid characters for the initial
189 ``syminitletters`` is the set of valid characters for the initial
190 letter of symbols.
190 letter of symbols.
191
191
192 By default, character ``c`` is recognized as valid for initial
192 By default, character ``c`` is recognized as valid for initial
193 letter of symbols, if ``c.isalnum() or c in '._@' or ord(c) > 127``.
193 letter of symbols, if ``c.isalnum() or c in '._@' or ord(c) > 127``.
194
194
195 ``symletters`` is the set of valid characters for non-initial
195 ``symletters`` is the set of valid characters for non-initial
196 letters of symbols.
196 letters of symbols.
197
197
198 By default, character ``c`` is recognized as valid for non-initial
198 By default, character ``c`` is recognized as valid for non-initial
199 letters of symbols, if ``c.isalnum() or c in '-._/@' or ord(c) > 127``.
199 letters of symbols, if ``c.isalnum() or c in '-._/@' or ord(c) > 127``.
200
200
201 Check that @ is a valid unquoted token character (issue3686):
201 Check that @ is a valid unquoted token character (issue3686):
202 >>> list(tokenize("@::"))
202 >>> list(tokenize("@::"))
203 [('symbol', '@', 0), ('::', None, 1), ('end', None, 3)]
203 [('symbol', '@', 0), ('::', None, 1), ('end', None, 3)]
204
204
205 '''
205 '''
206 if syminitletters is None:
206 if syminitletters is None:
207 syminitletters = _syminitletters
207 syminitletters = _syminitletters
208 if symletters is None:
208 if symletters is None:
209 symletters = _symletters
209 symletters = _symletters
210
210
211 if program and lookup:
211 if program and lookup:
212 # attempt to parse old-style ranges first to deal with
212 # attempt to parse old-style ranges first to deal with
213 # things like old-tag which contain query metacharacters
213 # things like old-tag which contain query metacharacters
214 parts = program.split(':', 1)
214 parts = program.split(':', 1)
215 if all(lookup(sym) for sym in parts if sym):
215 if all(lookup(sym) for sym in parts if sym):
216 if parts[0]:
216 if parts[0]:
217 yield ('symbol', parts[0], 0)
217 yield ('symbol', parts[0], 0)
218 if len(parts) > 1:
218 if len(parts) > 1:
219 s = len(parts[0])
219 s = len(parts[0])
220 yield (':', None, s)
220 yield (':', None, s)
221 if parts[1]:
221 if parts[1]:
222 yield ('symbol', parts[1], s + 1)
222 yield ('symbol', parts[1], s + 1)
223 yield ('end', None, len(program))
223 yield ('end', None, len(program))
224 return
224 return
225
225
226 pos, l = 0, len(program)
226 pos, l = 0, len(program)
227 while pos < l:
227 while pos < l:
228 c = program[pos]
228 c = program[pos]
229 if c.isspace(): # skip inter-token whitespace
229 if c.isspace(): # skip inter-token whitespace
230 pass
230 pass
231 elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully
231 elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully
232 yield ('::', None, pos)
232 yield ('::', None, pos)
233 pos += 1 # skip ahead
233 pos += 1 # skip ahead
234 elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully
234 elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully
235 yield ('..', None, pos)
235 yield ('..', None, pos)
236 pos += 1 # skip ahead
236 pos += 1 # skip ahead
237 elif c == '#' and program[pos:pos + 2] == '##': # look ahead carefully
237 elif c == '#' and program[pos:pos + 2] == '##': # look ahead carefully
238 yield ('##', None, pos)
238 yield ('##', None, pos)
239 pos += 1 # skip ahead
239 pos += 1 # skip ahead
240 elif c in "():=,-|&+!~^%": # handle simple operators
240 elif c in "():=,-|&+!~^%": # handle simple operators
241 yield (c, None, pos)
241 yield (c, None, pos)
242 elif (c in '"\'' or c == 'r' and
242 elif (c in '"\'' or c == 'r' and
243 program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings
243 program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings
244 if c == 'r':
244 if c == 'r':
245 pos += 1
245 pos += 1
246 c = program[pos]
246 c = program[pos]
247 decode = lambda x: x
247 decode = lambda x: x
248 else:
248 else:
249 decode = parser.unescapestr
249 decode = parser.unescapestr
250 pos += 1
250 pos += 1
251 s = pos
251 s = pos
252 while pos < l: # find closing quote
252 while pos < l: # find closing quote
253 d = program[pos]
253 d = program[pos]
254 if d == '\\': # skip over escaped characters
254 if d == '\\': # skip over escaped characters
255 pos += 2
255 pos += 2
256 continue
256 continue
257 if d == c:
257 if d == c:
258 yield ('string', decode(program[s:pos]), s)
258 yield ('string', decode(program[s:pos]), s)
259 break
259 break
260 pos += 1
260 pos += 1
261 else:
261 else:
262 raise error.ParseError(_("unterminated string"), s)
262 raise error.ParseError(_("unterminated string"), s)
263 # gather up a symbol/keyword
263 # gather up a symbol/keyword
264 elif c in syminitletters:
264 elif c in syminitletters:
265 s = pos
265 s = pos
266 pos += 1
266 pos += 1
267 while pos < l: # find end of symbol
267 while pos < l: # find end of symbol
268 d = program[pos]
268 d = program[pos]
269 if d not in symletters:
269 if d not in symletters:
270 break
270 break
271 if d == '.' and program[pos - 1] == '.': # special case for ..
271 if d == '.' and program[pos - 1] == '.': # special case for ..
272 pos -= 1
272 pos -= 1
273 break
273 break
274 pos += 1
274 pos += 1
275 sym = program[s:pos]
275 sym = program[s:pos]
276 if sym in keywords: # operator keywords
276 if sym in keywords: # operator keywords
277 yield (sym, None, s)
277 yield (sym, None, s)
278 elif '-' in sym:
278 elif '-' in sym:
279 # some jerk gave us foo-bar-baz, try to check if it's a symbol
279 # some jerk gave us foo-bar-baz, try to check if it's a symbol
280 if lookup and lookup(sym):
280 if lookup and lookup(sym):
281 # looks like a real symbol
281 # looks like a real symbol
282 yield ('symbol', sym, s)
282 yield ('symbol', sym, s)
283 else:
283 else:
284 # looks like an expression
284 # looks like an expression
285 parts = sym.split('-')
285 parts = sym.split('-')
286 for p in parts[:-1]:
286 for p in parts[:-1]:
287 if p: # possible consecutive -
287 if p: # possible consecutive -
288 yield ('symbol', p, s)
288 yield ('symbol', p, s)
289 s += len(p)
289 s += len(p)
290 yield ('-', None, pos)
290 yield ('-', None, pos)
291 s += 1
291 s += 1
292 if parts[-1]: # possible trailing -
292 if parts[-1]: # possible trailing -
293 yield ('symbol', parts[-1], s)
293 yield ('symbol', parts[-1], s)
294 else:
294 else:
295 yield ('symbol', sym, s)
295 yield ('symbol', sym, s)
296 pos -= 1
296 pos -= 1
297 else:
297 else:
298 raise error.ParseError(_("syntax error in revset '%s'") %
298 raise error.ParseError(_("syntax error in revset '%s'") %
299 program, pos)
299 program, pos)
300 pos += 1
300 pos += 1
301 yield ('end', None, pos)
301 yield ('end', None, pos)
302
302
303 def parseerrordetail(inst):
303 def parseerrordetail(inst):
304 """Compose error message from specified ParseError object
304 """Compose error message from specified ParseError object
305 """
305 """
306 if len(inst.args) > 1:
306 if len(inst.args) > 1:
307 return _('at %s: %s') % (inst.args[1], inst.args[0])
307 return _('at %s: %s') % (inst.args[1], inst.args[0])
308 else:
308 else:
309 return inst.args[0]
309 return inst.args[0]
310
310
311 # helpers
311 # helpers
312
312
313 def getstring(x, err):
313 def getstring(x, err):
314 if x and (x[0] == 'string' or x[0] == 'symbol'):
314 if x and (x[0] == 'string' or x[0] == 'symbol'):
315 return x[1]
315 return x[1]
316 raise error.ParseError(err)
316 raise error.ParseError(err)
317
317
318 def getlist(x):
318 def getlist(x):
319 if not x:
319 if not x:
320 return []
320 return []
321 if x[0] == 'list':
321 if x[0] == 'list':
322 return getlist(x[1]) + [x[2]]
322 return getlist(x[1]) + [x[2]]
323 return [x]
323 return [x]
324
324
325 def getargs(x, min, max, err):
325 def getargs(x, min, max, err):
326 l = getlist(x)
326 l = getlist(x)
327 if len(l) < min or (max >= 0 and len(l) > max):
327 if len(l) < min or (max >= 0 and len(l) > max):
328 raise error.ParseError(err)
328 raise error.ParseError(err)
329 return l
329 return l
330
330
331 def getargsdict(x, funcname, keys):
331 def getargsdict(x, funcname, keys):
332 return parser.buildargsdict(getlist(x), funcname, keys.split(),
332 return parser.buildargsdict(getlist(x), funcname, keys.split(),
333 keyvaluenode='keyvalue', keynode='symbol')
333 keyvaluenode='keyvalue', keynode='symbol')
334
334
335 def isvalidsymbol(tree):
335 def isvalidsymbol(tree):
336 """Examine whether specified ``tree`` is valid ``symbol`` or not
336 """Examine whether specified ``tree`` is valid ``symbol`` or not
337 """
337 """
338 return tree[0] == 'symbol' and len(tree) > 1
338 return tree[0] == 'symbol' and len(tree) > 1
339
339
340 def getsymbol(tree):
340 def getsymbol(tree):
341 """Get symbol name from valid ``symbol`` in ``tree``
341 """Get symbol name from valid ``symbol`` in ``tree``
342
342
343 This assumes that ``tree`` is already examined by ``isvalidsymbol``.
343 This assumes that ``tree`` is already examined by ``isvalidsymbol``.
344 """
344 """
345 return tree[1]
345 return tree[1]
346
346
347 def isvalidfunc(tree):
347 def isvalidfunc(tree):
348 """Examine whether specified ``tree`` is valid ``func`` or not
348 """Examine whether specified ``tree`` is valid ``func`` or not
349 """
349 """
350 return tree[0] == 'func' and len(tree) > 1 and isvalidsymbol(tree[1])
350 return tree[0] == 'func' and len(tree) > 1 and isvalidsymbol(tree[1])
351
351
352 def getfuncname(tree):
352 def getfuncname(tree):
353 """Get function name from valid ``func`` in ``tree``
353 """Get function name from valid ``func`` in ``tree``
354
354
355 This assumes that ``tree`` is already examined by ``isvalidfunc``.
355 This assumes that ``tree`` is already examined by ``isvalidfunc``.
356 """
356 """
357 return getsymbol(tree[1])
357 return getsymbol(tree[1])
358
358
359 def getfuncargs(tree):
359 def getfuncargs(tree):
360 """Get list of function arguments from valid ``func`` in ``tree``
360 """Get list of function arguments from valid ``func`` in ``tree``
361
361
362 This assumes that ``tree`` is already examined by ``isvalidfunc``.
362 This assumes that ``tree`` is already examined by ``isvalidfunc``.
363 """
363 """
364 if len(tree) > 2:
364 if len(tree) > 2:
365 return getlist(tree[2])
365 return getlist(tree[2])
366 else:
366 else:
367 return []
367 return []
368
368
369 def getset(repo, subset, x):
369 def getset(repo, subset, x):
370 if not x:
370 if not x:
371 raise error.ParseError(_("missing argument"))
371 raise error.ParseError(_("missing argument"))
372 s = methods[x[0]](repo, subset, *x[1:])
372 s = methods[x[0]](repo, subset, *x[1:])
373 if util.safehasattr(s, 'isascending'):
373 if util.safehasattr(s, 'isascending'):
374 return s
374 return s
375 if (repo.ui.configbool('devel', 'all-warnings')
375 if (repo.ui.configbool('devel', 'all-warnings')
376 or repo.ui.configbool('devel', 'old-revset')):
376 or repo.ui.configbool('devel', 'old-revset')):
377 # else case should not happen, because all non-func are internal,
377 # else case should not happen, because all non-func are internal,
378 # ignoring for now.
378 # ignoring for now.
379 if x[0] == 'func' and x[1][0] == 'symbol' and x[1][1] in symbols:
379 if x[0] == 'func' and x[1][0] == 'symbol' and x[1][1] in symbols:
380 repo.ui.develwarn('revset "%s" use list instead of smartset, '
380 repo.ui.develwarn('revset "%s" use list instead of smartset, '
381 '(upgrade your code)' % x[1][1])
381 '(upgrade your code)' % x[1][1])
382 return baseset(s)
382 return baseset(s)
383
383
384 def _getrevsource(repo, r):
384 def _getrevsource(repo, r):
385 extra = repo[r].extra()
385 extra = repo[r].extra()
386 for label in ('source', 'transplant_source', 'rebase_source'):
386 for label in ('source', 'transplant_source', 'rebase_source'):
387 if label in extra:
387 if label in extra:
388 try:
388 try:
389 return repo[extra[label]].rev()
389 return repo[extra[label]].rev()
390 except error.RepoLookupError:
390 except error.RepoLookupError:
391 pass
391 pass
392 return None
392 return None
393
393
394 # operator methods
394 # operator methods
395
395
396 def stringset(repo, subset, x):
396 def stringset(repo, subset, x):
397 x = repo[x].rev()
397 x = repo[x].rev()
398 if (x in subset
398 if (x in subset
399 or x == node.nullrev and isinstance(subset, fullreposet)):
399 or x == node.nullrev and isinstance(subset, fullreposet)):
400 return baseset([x])
400 return baseset([x])
401 return baseset()
401 return baseset()
402
402
403 def rangeset(repo, subset, x, y):
403 def rangeset(repo, subset, x, y):
404 m = getset(repo, fullreposet(repo), x)
404 m = getset(repo, fullreposet(repo), x)
405 n = getset(repo, fullreposet(repo), y)
405 n = getset(repo, fullreposet(repo), y)
406
406
407 if not m or not n:
407 if not m or not n:
408 return baseset()
408 return baseset()
409 m, n = m.first(), n.last()
409 m, n = m.first(), n.last()
410
410
411 if m == n:
411 if m == n:
412 r = baseset([m])
412 r = baseset([m])
413 elif n == node.wdirrev:
413 elif n == node.wdirrev:
414 r = spanset(repo, m, len(repo)) + baseset([n])
414 r = spanset(repo, m, len(repo)) + baseset([n])
415 elif m == node.wdirrev:
415 elif m == node.wdirrev:
416 r = baseset([m]) + spanset(repo, len(repo) - 1, n - 1)
416 r = baseset([m]) + spanset(repo, len(repo) - 1, n - 1)
417 elif m < n:
417 elif m < n:
418 r = spanset(repo, m, n + 1)
418 r = spanset(repo, m, n + 1)
419 else:
419 else:
420 r = spanset(repo, m, n - 1)
420 r = spanset(repo, m, n - 1)
421 # XXX We should combine with subset first: 'subset & baseset(...)'. This is
421 # XXX We should combine with subset first: 'subset & baseset(...)'. This is
422 # necessary to ensure we preserve the order in subset.
422 # necessary to ensure we preserve the order in subset.
423 #
423 #
424 # This has performance implication, carrying the sorting over when possible
424 # This has performance implication, carrying the sorting over when possible
425 # would be more efficient.
425 # would be more efficient.
426 return r & subset
426 return r & subset
427
427
428 def dagrange(repo, subset, x, y):
428 def dagrange(repo, subset, x, y):
429 r = fullreposet(repo)
429 r = fullreposet(repo)
430 xs = reachableroots(repo, getset(repo, r, x), getset(repo, r, y),
430 xs = reachableroots(repo, getset(repo, r, x), getset(repo, r, y),
431 includepath=True)
431 includepath=True)
432 # XXX We should combine with subset first: 'subset & baseset(...)'. This is
432 # XXX We should combine with subset first: 'subset & baseset(...)'. This is
433 # necessary to ensure we preserve the order in subset.
433 # necessary to ensure we preserve the order in subset.
434 return xs & subset
434 return xs & subset
435
435
436 def andset(repo, subset, x, y):
436 def andset(repo, subset, x, y):
437 return getset(repo, getset(repo, subset, x), y)
437 return getset(repo, getset(repo, subset, x), y)
438
438
439 def orset(repo, subset, *xs):
439 def orset(repo, subset, *xs):
440 assert xs
440 assert xs
441 if len(xs) == 1:
441 if len(xs) == 1:
442 return getset(repo, subset, xs[0])
442 return getset(repo, subset, xs[0])
443 p = len(xs) // 2
443 p = len(xs) // 2
444 a = orset(repo, subset, *xs[:p])
444 a = orset(repo, subset, *xs[:p])
445 b = orset(repo, subset, *xs[p:])
445 b = orset(repo, subset, *xs[p:])
446 return a + b
446 return a + b
447
447
448 def notset(repo, subset, x):
448 def notset(repo, subset, x):
449 return subset - getset(repo, subset, x)
449 return subset - getset(repo, subset, x)
450
450
451 def listset(repo, subset, a, b):
451 def listset(repo, subset, a, b):
452 raise error.ParseError(_("can't use a list in this context"),
452 raise error.ParseError(_("can't use a list in this context"),
453 hint=_('see hg help "revsets.x or y"'))
453 hint=_('see hg help "revsets.x or y"'))
454
454
455 def keyvaluepair(repo, subset, k, v):
455 def keyvaluepair(repo, subset, k, v):
456 raise error.ParseError(_("can't use a key-value pair in this context"))
456 raise error.ParseError(_("can't use a key-value pair in this context"))
457
457
458 def func(repo, subset, a, b):
458 def func(repo, subset, a, b):
459 if a[0] == 'symbol' and a[1] in symbols:
459 if a[0] == 'symbol' and a[1] in symbols:
460 return symbols[a[1]](repo, subset, b)
460 return symbols[a[1]](repo, subset, b)
461
461
462 keep = lambda fn: getattr(fn, '__doc__', None) is not None
462 keep = lambda fn: getattr(fn, '__doc__', None) is not None
463
463
464 syms = [s for (s, fn) in symbols.items() if keep(fn)]
464 syms = [s for (s, fn) in symbols.items() if keep(fn)]
465 raise error.UnknownIdentifier(a[1], syms)
465 raise error.UnknownIdentifier(a[1], syms)
466
466
467 # functions
467 # functions
468
468
469 # symbols are callables like:
469 # symbols are callables like:
470 # fn(repo, subset, x)
470 # fn(repo, subset, x)
471 # with:
471 # with:
472 # repo - current repository instance
472 # repo - current repository instance
473 # subset - of revisions to be examined
473 # subset - of revisions to be examined
474 # x - argument in tree form
474 # x - argument in tree form
475 symbols = {}
475 symbols = {}
476
476
477 class predicate(registrar.funcregistrar):
477 class predicate(registrar.funcregistrar):
478 """Decorator to register revset predicate
478 """Decorator to register revset predicate
479
479
480 Usage::
480 Usage::
481
481
482 @predicate('mypredicate(arg1, arg2[, arg3])')
482 @predicate('mypredicate(arg1, arg2[, arg3])')
483 def mypredicatefunc(repo, subset, x):
483 def mypredicatefunc(repo, subset, x):
484 '''Explanation of this revset predicate ....
484 '''Explanation of this revset predicate ....
485 '''
485 '''
486 pass
486 pass
487
487
488 The first string argument of the constructor is used also in
488 The first string argument of the constructor is used also in
489 online help.
489 online help.
490
491 Use 'extpredicate' instead of this to register revset predicate in
492 extensions.
490 """
493 """
491 table = symbols
494 table = symbols
492 formatdoc = "``%s``\n %s"
495 formatdoc = "``%s``\n %s"
493 getname = registrar.funcregistrar.parsefuncdecl
496 getname = registrar.funcregistrar.parsefuncdecl
494
497
498 class extpredicate(registrar.delayregistrar):
499 """Decorator to register revset predicate in extensions
500
501 Usage::
502
503 revsetpredicate = revset.extpredicate()
504
505 @revsetpredicate('mypredicate(arg1, arg2[, arg3])')
506 def mypredicatefunc(repo, subset, x):
507 '''Explanation of this revset predicate ....
508 '''
509 pass
510
511 def uisetup(ui):
512 revsetpredicate.setup()
513
514 'revsetpredicate' instance above can be used to decorate multiple
515 functions, and 'setup()' on it registers all such functions at
516 once.
517 """
518 registrar = predicate
519
495 @predicate('_destupdate')
520 @predicate('_destupdate')
496 def _destupdate(repo, subset, x):
521 def _destupdate(repo, subset, x):
497 # experimental revset for update destination
522 # experimental revset for update destination
498 args = getargsdict(x, 'limit', 'clean check')
523 args = getargsdict(x, 'limit', 'clean check')
499 return subset & baseset([destutil.destupdate(repo, **args)[0]])
524 return subset & baseset([destutil.destupdate(repo, **args)[0]])
500
525
501 @predicate('_destmerge')
526 @predicate('_destmerge')
502 def _destmerge(repo, subset, x):
527 def _destmerge(repo, subset, x):
503 # experimental revset for merge destination
528 # experimental revset for merge destination
504 getargs(x, 0, 0, _("_mergedefaultdest takes no arguments"))
529 getargs(x, 0, 0, _("_mergedefaultdest takes no arguments"))
505 return subset & baseset([destutil.destmerge(repo)])
530 return subset & baseset([destutil.destmerge(repo)])
506
531
507 @predicate('adds(pattern)')
532 @predicate('adds(pattern)')
508 def adds(repo, subset, x):
533 def adds(repo, subset, x):
509 """Changesets that add a file matching pattern.
534 """Changesets that add a file matching pattern.
510
535
511 The pattern without explicit kind like ``glob:`` is expected to be
536 The pattern without explicit kind like ``glob:`` is expected to be
512 relative to the current directory and match against a file or a
537 relative to the current directory and match against a file or a
513 directory.
538 directory.
514 """
539 """
515 # i18n: "adds" is a keyword
540 # i18n: "adds" is a keyword
516 pat = getstring(x, _("adds requires a pattern"))
541 pat = getstring(x, _("adds requires a pattern"))
517 return checkstatus(repo, subset, pat, 1)
542 return checkstatus(repo, subset, pat, 1)
518
543
519 @predicate('ancestor(*changeset)')
544 @predicate('ancestor(*changeset)')
520 def ancestor(repo, subset, x):
545 def ancestor(repo, subset, x):
521 """A greatest common ancestor of the changesets.
546 """A greatest common ancestor of the changesets.
522
547
523 Accepts 0 or more changesets.
548 Accepts 0 or more changesets.
524 Will return empty list when passed no args.
549 Will return empty list when passed no args.
525 Greatest common ancestor of a single changeset is that changeset.
550 Greatest common ancestor of a single changeset is that changeset.
526 """
551 """
527 # i18n: "ancestor" is a keyword
552 # i18n: "ancestor" is a keyword
528 l = getlist(x)
553 l = getlist(x)
529 rl = fullreposet(repo)
554 rl = fullreposet(repo)
530 anc = None
555 anc = None
531
556
532 # (getset(repo, rl, i) for i in l) generates a list of lists
557 # (getset(repo, rl, i) for i in l) generates a list of lists
533 for revs in (getset(repo, rl, i) for i in l):
558 for revs in (getset(repo, rl, i) for i in l):
534 for r in revs:
559 for r in revs:
535 if anc is None:
560 if anc is None:
536 anc = repo[r]
561 anc = repo[r]
537 else:
562 else:
538 anc = anc.ancestor(repo[r])
563 anc = anc.ancestor(repo[r])
539
564
540 if anc is not None and anc.rev() in subset:
565 if anc is not None and anc.rev() in subset:
541 return baseset([anc.rev()])
566 return baseset([anc.rev()])
542 return baseset()
567 return baseset()
543
568
544 def _ancestors(repo, subset, x, followfirst=False):
569 def _ancestors(repo, subset, x, followfirst=False):
545 heads = getset(repo, fullreposet(repo), x)
570 heads = getset(repo, fullreposet(repo), x)
546 if not heads:
571 if not heads:
547 return baseset()
572 return baseset()
548 s = _revancestors(repo, heads, followfirst)
573 s = _revancestors(repo, heads, followfirst)
549 return subset & s
574 return subset & s
550
575
551 @predicate('ancestors(set)')
576 @predicate('ancestors(set)')
552 def ancestors(repo, subset, x):
577 def ancestors(repo, subset, x):
553 """Changesets that are ancestors of a changeset in set.
578 """Changesets that are ancestors of a changeset in set.
554 """
579 """
555 return _ancestors(repo, subset, x)
580 return _ancestors(repo, subset, x)
556
581
557 @predicate('_firstancestors')
582 @predicate('_firstancestors')
558 def _firstancestors(repo, subset, x):
583 def _firstancestors(repo, subset, x):
559 # ``_firstancestors(set)``
584 # ``_firstancestors(set)``
560 # Like ``ancestors(set)`` but follows only the first parents.
585 # Like ``ancestors(set)`` but follows only the first parents.
561 return _ancestors(repo, subset, x, followfirst=True)
586 return _ancestors(repo, subset, x, followfirst=True)
562
587
563 def ancestorspec(repo, subset, x, n):
588 def ancestorspec(repo, subset, x, n):
564 """``set~n``
589 """``set~n``
565 Changesets that are the Nth ancestor (first parents only) of a changeset
590 Changesets that are the Nth ancestor (first parents only) of a changeset
566 in set.
591 in set.
567 """
592 """
568 try:
593 try:
569 n = int(n[1])
594 n = int(n[1])
570 except (TypeError, ValueError):
595 except (TypeError, ValueError):
571 raise error.ParseError(_("~ expects a number"))
596 raise error.ParseError(_("~ expects a number"))
572 ps = set()
597 ps = set()
573 cl = repo.changelog
598 cl = repo.changelog
574 for r in getset(repo, fullreposet(repo), x):
599 for r in getset(repo, fullreposet(repo), x):
575 for i in range(n):
600 for i in range(n):
576 r = cl.parentrevs(r)[0]
601 r = cl.parentrevs(r)[0]
577 ps.add(r)
602 ps.add(r)
578 return subset & ps
603 return subset & ps
579
604
580 @predicate('author(string)')
605 @predicate('author(string)')
581 def author(repo, subset, x):
606 def author(repo, subset, x):
582 """Alias for ``user(string)``.
607 """Alias for ``user(string)``.
583 """
608 """
584 # i18n: "author" is a keyword
609 # i18n: "author" is a keyword
585 n = encoding.lower(getstring(x, _("author requires a string")))
610 n = encoding.lower(getstring(x, _("author requires a string")))
586 kind, pattern, matcher = _substringmatcher(n)
611 kind, pattern, matcher = _substringmatcher(n)
587 return subset.filter(lambda x: matcher(encoding.lower(repo[x].user())))
612 return subset.filter(lambda x: matcher(encoding.lower(repo[x].user())))
588
613
589 @predicate('bisect(string)')
614 @predicate('bisect(string)')
590 def bisect(repo, subset, x):
615 def bisect(repo, subset, x):
591 """Changesets marked in the specified bisect status:
616 """Changesets marked in the specified bisect status:
592
617
593 - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip
618 - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip
594 - ``goods``, ``bads`` : csets topologically good/bad
619 - ``goods``, ``bads`` : csets topologically good/bad
595 - ``range`` : csets taking part in the bisection
620 - ``range`` : csets taking part in the bisection
596 - ``pruned`` : csets that are goods, bads or skipped
621 - ``pruned`` : csets that are goods, bads or skipped
597 - ``untested`` : csets whose fate is yet unknown
622 - ``untested`` : csets whose fate is yet unknown
598 - ``ignored`` : csets ignored due to DAG topology
623 - ``ignored`` : csets ignored due to DAG topology
599 - ``current`` : the cset currently being bisected
624 - ``current`` : the cset currently being bisected
600 """
625 """
601 # i18n: "bisect" is a keyword
626 # i18n: "bisect" is a keyword
602 status = getstring(x, _("bisect requires a string")).lower()
627 status = getstring(x, _("bisect requires a string")).lower()
603 state = set(hbisect.get(repo, status))
628 state = set(hbisect.get(repo, status))
604 return subset & state
629 return subset & state
605
630
606 # Backward-compatibility
631 # Backward-compatibility
607 # - no help entry so that we do not advertise it any more
632 # - no help entry so that we do not advertise it any more
608 @predicate('bisected')
633 @predicate('bisected')
609 def bisected(repo, subset, x):
634 def bisected(repo, subset, x):
610 return bisect(repo, subset, x)
635 return bisect(repo, subset, x)
611
636
612 @predicate('bookmark([name])')
637 @predicate('bookmark([name])')
613 def bookmark(repo, subset, x):
638 def bookmark(repo, subset, x):
614 """The named bookmark or all bookmarks.
639 """The named bookmark or all bookmarks.
615
640
616 If `name` starts with `re:`, the remainder of the name is treated as
641 If `name` starts with `re:`, the remainder of the name is treated as
617 a regular expression. To match a bookmark that actually starts with `re:`,
642 a regular expression. To match a bookmark that actually starts with `re:`,
618 use the prefix `literal:`.
643 use the prefix `literal:`.
619 """
644 """
620 # i18n: "bookmark" is a keyword
645 # i18n: "bookmark" is a keyword
621 args = getargs(x, 0, 1, _('bookmark takes one or no arguments'))
646 args = getargs(x, 0, 1, _('bookmark takes one or no arguments'))
622 if args:
647 if args:
623 bm = getstring(args[0],
648 bm = getstring(args[0],
624 # i18n: "bookmark" is a keyword
649 # i18n: "bookmark" is a keyword
625 _('the argument to bookmark must be a string'))
650 _('the argument to bookmark must be a string'))
626 kind, pattern, matcher = util.stringmatcher(bm)
651 kind, pattern, matcher = util.stringmatcher(bm)
627 bms = set()
652 bms = set()
628 if kind == 'literal':
653 if kind == 'literal':
629 bmrev = repo._bookmarks.get(pattern, None)
654 bmrev = repo._bookmarks.get(pattern, None)
630 if not bmrev:
655 if not bmrev:
631 raise error.RepoLookupError(_("bookmark '%s' does not exist")
656 raise error.RepoLookupError(_("bookmark '%s' does not exist")
632 % pattern)
657 % pattern)
633 bms.add(repo[bmrev].rev())
658 bms.add(repo[bmrev].rev())
634 else:
659 else:
635 matchrevs = set()
660 matchrevs = set()
636 for name, bmrev in repo._bookmarks.iteritems():
661 for name, bmrev in repo._bookmarks.iteritems():
637 if matcher(name):
662 if matcher(name):
638 matchrevs.add(bmrev)
663 matchrevs.add(bmrev)
639 if not matchrevs:
664 if not matchrevs:
640 raise error.RepoLookupError(_("no bookmarks exist"
665 raise error.RepoLookupError(_("no bookmarks exist"
641 " that match '%s'") % pattern)
666 " that match '%s'") % pattern)
642 for bmrev in matchrevs:
667 for bmrev in matchrevs:
643 bms.add(repo[bmrev].rev())
668 bms.add(repo[bmrev].rev())
644 else:
669 else:
645 bms = set([repo[r].rev()
670 bms = set([repo[r].rev()
646 for r in repo._bookmarks.values()])
671 for r in repo._bookmarks.values()])
647 bms -= set([node.nullrev])
672 bms -= set([node.nullrev])
648 return subset & bms
673 return subset & bms
649
674
650 @predicate('branch(string or set)')
675 @predicate('branch(string or set)')
651 def branch(repo, subset, x):
676 def branch(repo, subset, x):
652 """
677 """
653 All changesets belonging to the given branch or the branches of the given
678 All changesets belonging to the given branch or the branches of the given
654 changesets.
679 changesets.
655
680
656 If `string` starts with `re:`, the remainder of the name is treated as
681 If `string` starts with `re:`, the remainder of the name is treated as
657 a regular expression. To match a branch that actually starts with `re:`,
682 a regular expression. To match a branch that actually starts with `re:`,
658 use the prefix `literal:`.
683 use the prefix `literal:`.
659 """
684 """
660 getbi = repo.revbranchcache().branchinfo
685 getbi = repo.revbranchcache().branchinfo
661
686
662 try:
687 try:
663 b = getstring(x, '')
688 b = getstring(x, '')
664 except error.ParseError:
689 except error.ParseError:
665 # not a string, but another revspec, e.g. tip()
690 # not a string, but another revspec, e.g. tip()
666 pass
691 pass
667 else:
692 else:
668 kind, pattern, matcher = util.stringmatcher(b)
693 kind, pattern, matcher = util.stringmatcher(b)
669 if kind == 'literal':
694 if kind == 'literal':
670 # note: falls through to the revspec case if no branch with
695 # note: falls through to the revspec case if no branch with
671 # this name exists and pattern kind is not specified explicitly
696 # this name exists and pattern kind is not specified explicitly
672 if pattern in repo.branchmap():
697 if pattern in repo.branchmap():
673 return subset.filter(lambda r: matcher(getbi(r)[0]))
698 return subset.filter(lambda r: matcher(getbi(r)[0]))
674 if b.startswith('literal:'):
699 if b.startswith('literal:'):
675 raise error.RepoLookupError(_("branch '%s' does not exist")
700 raise error.RepoLookupError(_("branch '%s' does not exist")
676 % pattern)
701 % pattern)
677 else:
702 else:
678 return subset.filter(lambda r: matcher(getbi(r)[0]))
703 return subset.filter(lambda r: matcher(getbi(r)[0]))
679
704
680 s = getset(repo, fullreposet(repo), x)
705 s = getset(repo, fullreposet(repo), x)
681 b = set()
706 b = set()
682 for r in s:
707 for r in s:
683 b.add(getbi(r)[0])
708 b.add(getbi(r)[0])
684 c = s.__contains__
709 c = s.__contains__
685 return subset.filter(lambda r: c(r) or getbi(r)[0] in b)
710 return subset.filter(lambda r: c(r) or getbi(r)[0] in b)
686
711
687 @predicate('bumped()')
712 @predicate('bumped()')
688 def bumped(repo, subset, x):
713 def bumped(repo, subset, x):
689 """Mutable changesets marked as successors of public changesets.
714 """Mutable changesets marked as successors of public changesets.
690
715
691 Only non-public and non-obsolete changesets can be `bumped`.
716 Only non-public and non-obsolete changesets can be `bumped`.
692 """
717 """
693 # i18n: "bumped" is a keyword
718 # i18n: "bumped" is a keyword
694 getargs(x, 0, 0, _("bumped takes no arguments"))
719 getargs(x, 0, 0, _("bumped takes no arguments"))
695 bumped = obsmod.getrevs(repo, 'bumped')
720 bumped = obsmod.getrevs(repo, 'bumped')
696 return subset & bumped
721 return subset & bumped
697
722
698 @predicate('bundle()')
723 @predicate('bundle()')
699 def bundle(repo, subset, x):
724 def bundle(repo, subset, x):
700 """Changesets in the bundle.
725 """Changesets in the bundle.
701
726
702 Bundle must be specified by the -R option."""
727 Bundle must be specified by the -R option."""
703
728
704 try:
729 try:
705 bundlerevs = repo.changelog.bundlerevs
730 bundlerevs = repo.changelog.bundlerevs
706 except AttributeError:
731 except AttributeError:
707 raise error.Abort(_("no bundle provided - specify with -R"))
732 raise error.Abort(_("no bundle provided - specify with -R"))
708 return subset & bundlerevs
733 return subset & bundlerevs
709
734
710 def checkstatus(repo, subset, pat, field):
735 def checkstatus(repo, subset, pat, field):
711 hasset = matchmod.patkind(pat) == 'set'
736 hasset = matchmod.patkind(pat) == 'set'
712
737
713 mcache = [None]
738 mcache = [None]
714 def matches(x):
739 def matches(x):
715 c = repo[x]
740 c = repo[x]
716 if not mcache[0] or hasset:
741 if not mcache[0] or hasset:
717 mcache[0] = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
742 mcache[0] = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
718 m = mcache[0]
743 m = mcache[0]
719 fname = None
744 fname = None
720 if not m.anypats() and len(m.files()) == 1:
745 if not m.anypats() and len(m.files()) == 1:
721 fname = m.files()[0]
746 fname = m.files()[0]
722 if fname is not None:
747 if fname is not None:
723 if fname not in c.files():
748 if fname not in c.files():
724 return False
749 return False
725 else:
750 else:
726 for f in c.files():
751 for f in c.files():
727 if m(f):
752 if m(f):
728 break
753 break
729 else:
754 else:
730 return False
755 return False
731 files = repo.status(c.p1().node(), c.node())[field]
756 files = repo.status(c.p1().node(), c.node())[field]
732 if fname is not None:
757 if fname is not None:
733 if fname in files:
758 if fname in files:
734 return True
759 return True
735 else:
760 else:
736 for f in files:
761 for f in files:
737 if m(f):
762 if m(f):
738 return True
763 return True
739
764
740 return subset.filter(matches)
765 return subset.filter(matches)
741
766
742 def _children(repo, narrow, parentset):
767 def _children(repo, narrow, parentset):
743 if not parentset:
768 if not parentset:
744 return baseset()
769 return baseset()
745 cs = set()
770 cs = set()
746 pr = repo.changelog.parentrevs
771 pr = repo.changelog.parentrevs
747 minrev = parentset.min()
772 minrev = parentset.min()
748 for r in narrow:
773 for r in narrow:
749 if r <= minrev:
774 if r <= minrev:
750 continue
775 continue
751 for p in pr(r):
776 for p in pr(r):
752 if p in parentset:
777 if p in parentset:
753 cs.add(r)
778 cs.add(r)
754 # XXX using a set to feed the baseset is wrong. Sets are not ordered.
779 # XXX using a set to feed the baseset is wrong. Sets are not ordered.
755 # This does not break because of other fullreposet misbehavior.
780 # This does not break because of other fullreposet misbehavior.
756 return baseset(cs)
781 return baseset(cs)
757
782
758 @predicate('children(set)')
783 @predicate('children(set)')
759 def children(repo, subset, x):
784 def children(repo, subset, x):
760 """Child changesets of changesets in set.
785 """Child changesets of changesets in set.
761 """
786 """
762 s = getset(repo, fullreposet(repo), x)
787 s = getset(repo, fullreposet(repo), x)
763 cs = _children(repo, subset, s)
788 cs = _children(repo, subset, s)
764 return subset & cs
789 return subset & cs
765
790
766 @predicate('closed()')
791 @predicate('closed()')
767 def closed(repo, subset, x):
792 def closed(repo, subset, x):
768 """Changeset is closed.
793 """Changeset is closed.
769 """
794 """
770 # i18n: "closed" is a keyword
795 # i18n: "closed" is a keyword
771 getargs(x, 0, 0, _("closed takes no arguments"))
796 getargs(x, 0, 0, _("closed takes no arguments"))
772 return subset.filter(lambda r: repo[r].closesbranch())
797 return subset.filter(lambda r: repo[r].closesbranch())
773
798
774 @predicate('contains(pattern)')
799 @predicate('contains(pattern)')
775 def contains(repo, subset, x):
800 def contains(repo, subset, x):
776 """The revision's manifest contains a file matching pattern (but might not
801 """The revision's manifest contains a file matching pattern (but might not
777 modify it). See :hg:`help patterns` for information about file patterns.
802 modify it). See :hg:`help patterns` for information about file patterns.
778
803
779 The pattern without explicit kind like ``glob:`` is expected to be
804 The pattern without explicit kind like ``glob:`` is expected to be
780 relative to the current directory and match against a file exactly
805 relative to the current directory and match against a file exactly
781 for efficiency.
806 for efficiency.
782 """
807 """
783 # i18n: "contains" is a keyword
808 # i18n: "contains" is a keyword
784 pat = getstring(x, _("contains requires a pattern"))
809 pat = getstring(x, _("contains requires a pattern"))
785
810
786 def matches(x):
811 def matches(x):
787 if not matchmod.patkind(pat):
812 if not matchmod.patkind(pat):
788 pats = pathutil.canonpath(repo.root, repo.getcwd(), pat)
813 pats = pathutil.canonpath(repo.root, repo.getcwd(), pat)
789 if pats in repo[x]:
814 if pats in repo[x]:
790 return True
815 return True
791 else:
816 else:
792 c = repo[x]
817 c = repo[x]
793 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
818 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
794 for f in c.manifest():
819 for f in c.manifest():
795 if m(f):
820 if m(f):
796 return True
821 return True
797 return False
822 return False
798
823
799 return subset.filter(matches)
824 return subset.filter(matches)
800
825
801 @predicate('converted([id])')
826 @predicate('converted([id])')
802 def converted(repo, subset, x):
827 def converted(repo, subset, x):
803 """Changesets converted from the given identifier in the old repository if
828 """Changesets converted from the given identifier in the old repository if
804 present, or all converted changesets if no identifier is specified.
829 present, or all converted changesets if no identifier is specified.
805 """
830 """
806
831
807 # There is exactly no chance of resolving the revision, so do a simple
832 # There is exactly no chance of resolving the revision, so do a simple
808 # string compare and hope for the best
833 # string compare and hope for the best
809
834
810 rev = None
835 rev = None
811 # i18n: "converted" is a keyword
836 # i18n: "converted" is a keyword
812 l = getargs(x, 0, 1, _('converted takes one or no arguments'))
837 l = getargs(x, 0, 1, _('converted takes one or no arguments'))
813 if l:
838 if l:
814 # i18n: "converted" is a keyword
839 # i18n: "converted" is a keyword
815 rev = getstring(l[0], _('converted requires a revision'))
840 rev = getstring(l[0], _('converted requires a revision'))
816
841
817 def _matchvalue(r):
842 def _matchvalue(r):
818 source = repo[r].extra().get('convert_revision', None)
843 source = repo[r].extra().get('convert_revision', None)
819 return source is not None and (rev is None or source.startswith(rev))
844 return source is not None and (rev is None or source.startswith(rev))
820
845
821 return subset.filter(lambda r: _matchvalue(r))
846 return subset.filter(lambda r: _matchvalue(r))
822
847
823 @predicate('date(interval)')
848 @predicate('date(interval)')
824 def date(repo, subset, x):
849 def date(repo, subset, x):
825 """Changesets within the interval, see :hg:`help dates`.
850 """Changesets within the interval, see :hg:`help dates`.
826 """
851 """
827 # i18n: "date" is a keyword
852 # i18n: "date" is a keyword
828 ds = getstring(x, _("date requires a string"))
853 ds = getstring(x, _("date requires a string"))
829 dm = util.matchdate(ds)
854 dm = util.matchdate(ds)
830 return subset.filter(lambda x: dm(repo[x].date()[0]))
855 return subset.filter(lambda x: dm(repo[x].date()[0]))
831
856
832 @predicate('desc(string)')
857 @predicate('desc(string)')
833 def desc(repo, subset, x):
858 def desc(repo, subset, x):
834 """Search commit message for string. The match is case-insensitive.
859 """Search commit message for string. The match is case-insensitive.
835 """
860 """
836 # i18n: "desc" is a keyword
861 # i18n: "desc" is a keyword
837 ds = encoding.lower(getstring(x, _("desc requires a string")))
862 ds = encoding.lower(getstring(x, _("desc requires a string")))
838
863
839 def matches(x):
864 def matches(x):
840 c = repo[x]
865 c = repo[x]
841 return ds in encoding.lower(c.description())
866 return ds in encoding.lower(c.description())
842
867
843 return subset.filter(matches)
868 return subset.filter(matches)
844
869
845 def _descendants(repo, subset, x, followfirst=False):
870 def _descendants(repo, subset, x, followfirst=False):
846 roots = getset(repo, fullreposet(repo), x)
871 roots = getset(repo, fullreposet(repo), x)
847 if not roots:
872 if not roots:
848 return baseset()
873 return baseset()
849 s = _revdescendants(repo, roots, followfirst)
874 s = _revdescendants(repo, roots, followfirst)
850
875
851 # Both sets need to be ascending in order to lazily return the union
876 # Both sets need to be ascending in order to lazily return the union
852 # in the correct order.
877 # in the correct order.
853 base = subset & roots
878 base = subset & roots
854 desc = subset & s
879 desc = subset & s
855 result = base + desc
880 result = base + desc
856 if subset.isascending():
881 if subset.isascending():
857 result.sort()
882 result.sort()
858 elif subset.isdescending():
883 elif subset.isdescending():
859 result.sort(reverse=True)
884 result.sort(reverse=True)
860 else:
885 else:
861 result = subset & result
886 result = subset & result
862 return result
887 return result
863
888
864 @predicate('descendants(set)')
889 @predicate('descendants(set)')
865 def descendants(repo, subset, x):
890 def descendants(repo, subset, x):
866 """Changesets which are descendants of changesets in set.
891 """Changesets which are descendants of changesets in set.
867 """
892 """
868 return _descendants(repo, subset, x)
893 return _descendants(repo, subset, x)
869
894
870 @predicate('_firstdescendants')
895 @predicate('_firstdescendants')
871 def _firstdescendants(repo, subset, x):
896 def _firstdescendants(repo, subset, x):
872 # ``_firstdescendants(set)``
897 # ``_firstdescendants(set)``
873 # Like ``descendants(set)`` but follows only the first parents.
898 # Like ``descendants(set)`` but follows only the first parents.
874 return _descendants(repo, subset, x, followfirst=True)
899 return _descendants(repo, subset, x, followfirst=True)
875
900
876 @predicate('destination([set])')
901 @predicate('destination([set])')
877 def destination(repo, subset, x):
902 def destination(repo, subset, x):
878 """Changesets that were created by a graft, transplant or rebase operation,
903 """Changesets that were created by a graft, transplant or rebase operation,
879 with the given revisions specified as the source. Omitting the optional set
904 with the given revisions specified as the source. Omitting the optional set
880 is the same as passing all().
905 is the same as passing all().
881 """
906 """
882 if x is not None:
907 if x is not None:
883 sources = getset(repo, fullreposet(repo), x)
908 sources = getset(repo, fullreposet(repo), x)
884 else:
909 else:
885 sources = fullreposet(repo)
910 sources = fullreposet(repo)
886
911
887 dests = set()
912 dests = set()
888
913
889 # subset contains all of the possible destinations that can be returned, so
914 # subset contains all of the possible destinations that can be returned, so
890 # iterate over them and see if their source(s) were provided in the arg set.
915 # iterate over them and see if their source(s) were provided in the arg set.
891 # Even if the immediate src of r is not in the arg set, src's source (or
916 # Even if the immediate src of r is not in the arg set, src's source (or
892 # further back) may be. Scanning back further than the immediate src allows
917 # further back) may be. Scanning back further than the immediate src allows
893 # transitive transplants and rebases to yield the same results as transitive
918 # transitive transplants and rebases to yield the same results as transitive
894 # grafts.
919 # grafts.
895 for r in subset:
920 for r in subset:
896 src = _getrevsource(repo, r)
921 src = _getrevsource(repo, r)
897 lineage = None
922 lineage = None
898
923
899 while src is not None:
924 while src is not None:
900 if lineage is None:
925 if lineage is None:
901 lineage = list()
926 lineage = list()
902
927
903 lineage.append(r)
928 lineage.append(r)
904
929
905 # The visited lineage is a match if the current source is in the arg
930 # The visited lineage is a match if the current source is in the arg
906 # set. Since every candidate dest is visited by way of iterating
931 # set. Since every candidate dest is visited by way of iterating
907 # subset, any dests further back in the lineage will be tested by a
932 # subset, any dests further back in the lineage will be tested by a
908 # different iteration over subset. Likewise, if the src was already
933 # different iteration over subset. Likewise, if the src was already
909 # selected, the current lineage can be selected without going back
934 # selected, the current lineage can be selected without going back
910 # further.
935 # further.
911 if src in sources or src in dests:
936 if src in sources or src in dests:
912 dests.update(lineage)
937 dests.update(lineage)
913 break
938 break
914
939
915 r = src
940 r = src
916 src = _getrevsource(repo, r)
941 src = _getrevsource(repo, r)
917
942
918 return subset.filter(dests.__contains__)
943 return subset.filter(dests.__contains__)
919
944
920 @predicate('divergent()')
945 @predicate('divergent()')
921 def divergent(repo, subset, x):
946 def divergent(repo, subset, x):
922 """
947 """
923 Final successors of changesets with an alternative set of final successors.
948 Final successors of changesets with an alternative set of final successors.
924 """
949 """
925 # i18n: "divergent" is a keyword
950 # i18n: "divergent" is a keyword
926 getargs(x, 0, 0, _("divergent takes no arguments"))
951 getargs(x, 0, 0, _("divergent takes no arguments"))
927 divergent = obsmod.getrevs(repo, 'divergent')
952 divergent = obsmod.getrevs(repo, 'divergent')
928 return subset & divergent
953 return subset & divergent
929
954
930 @predicate('extinct()')
955 @predicate('extinct()')
931 def extinct(repo, subset, x):
956 def extinct(repo, subset, x):
932 """Obsolete changesets with obsolete descendants only.
957 """Obsolete changesets with obsolete descendants only.
933 """
958 """
934 # i18n: "extinct" is a keyword
959 # i18n: "extinct" is a keyword
935 getargs(x, 0, 0, _("extinct takes no arguments"))
960 getargs(x, 0, 0, _("extinct takes no arguments"))
936 extincts = obsmod.getrevs(repo, 'extinct')
961 extincts = obsmod.getrevs(repo, 'extinct')
937 return subset & extincts
962 return subset & extincts
938
963
939 @predicate('extra(label, [value])')
964 @predicate('extra(label, [value])')
940 def extra(repo, subset, x):
965 def extra(repo, subset, x):
941 """Changesets with the given label in the extra metadata, with the given
966 """Changesets with the given label in the extra metadata, with the given
942 optional value.
967 optional value.
943
968
944 If `value` starts with `re:`, the remainder of the value is treated as
969 If `value` starts with `re:`, the remainder of the value is treated as
945 a regular expression. To match a value that actually starts with `re:`,
970 a regular expression. To match a value that actually starts with `re:`,
946 use the prefix `literal:`.
971 use the prefix `literal:`.
947 """
972 """
948 args = getargsdict(x, 'extra', 'label value')
973 args = getargsdict(x, 'extra', 'label value')
949 if 'label' not in args:
974 if 'label' not in args:
950 # i18n: "extra" is a keyword
975 # i18n: "extra" is a keyword
951 raise error.ParseError(_('extra takes at least 1 argument'))
976 raise error.ParseError(_('extra takes at least 1 argument'))
952 # i18n: "extra" is a keyword
977 # i18n: "extra" is a keyword
953 label = getstring(args['label'], _('first argument to extra must be '
978 label = getstring(args['label'], _('first argument to extra must be '
954 'a string'))
979 'a string'))
955 value = None
980 value = None
956
981
957 if 'value' in args:
982 if 'value' in args:
958 # i18n: "extra" is a keyword
983 # i18n: "extra" is a keyword
959 value = getstring(args['value'], _('second argument to extra must be '
984 value = getstring(args['value'], _('second argument to extra must be '
960 'a string'))
985 'a string'))
961 kind, value, matcher = util.stringmatcher(value)
986 kind, value, matcher = util.stringmatcher(value)
962
987
963 def _matchvalue(r):
988 def _matchvalue(r):
964 extra = repo[r].extra()
989 extra = repo[r].extra()
965 return label in extra and (value is None or matcher(extra[label]))
990 return label in extra and (value is None or matcher(extra[label]))
966
991
967 return subset.filter(lambda r: _matchvalue(r))
992 return subset.filter(lambda r: _matchvalue(r))
968
993
969 @predicate('filelog(pattern)')
994 @predicate('filelog(pattern)')
970 def filelog(repo, subset, x):
995 def filelog(repo, subset, x):
971 """Changesets connected to the specified filelog.
996 """Changesets connected to the specified filelog.
972
997
973 For performance reasons, visits only revisions mentioned in the file-level
998 For performance reasons, visits only revisions mentioned in the file-level
974 filelog, rather than filtering through all changesets (much faster, but
999 filelog, rather than filtering through all changesets (much faster, but
975 doesn't include deletes or duplicate changes). For a slower, more accurate
1000 doesn't include deletes or duplicate changes). For a slower, more accurate
976 result, use ``file()``.
1001 result, use ``file()``.
977
1002
978 The pattern without explicit kind like ``glob:`` is expected to be
1003 The pattern without explicit kind like ``glob:`` is expected to be
979 relative to the current directory and match against a file exactly
1004 relative to the current directory and match against a file exactly
980 for efficiency.
1005 for efficiency.
981
1006
982 If some linkrev points to revisions filtered by the current repoview, we'll
1007 If some linkrev points to revisions filtered by the current repoview, we'll
983 work around it to return a non-filtered value.
1008 work around it to return a non-filtered value.
984 """
1009 """
985
1010
986 # i18n: "filelog" is a keyword
1011 # i18n: "filelog" is a keyword
987 pat = getstring(x, _("filelog requires a pattern"))
1012 pat = getstring(x, _("filelog requires a pattern"))
988 s = set()
1013 s = set()
989 cl = repo.changelog
1014 cl = repo.changelog
990
1015
991 if not matchmod.patkind(pat):
1016 if not matchmod.patkind(pat):
992 f = pathutil.canonpath(repo.root, repo.getcwd(), pat)
1017 f = pathutil.canonpath(repo.root, repo.getcwd(), pat)
993 files = [f]
1018 files = [f]
994 else:
1019 else:
995 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=repo[None])
1020 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=repo[None])
996 files = (f for f in repo[None] if m(f))
1021 files = (f for f in repo[None] if m(f))
997
1022
998 for f in files:
1023 for f in files:
999 backrevref = {} # final value for: filerev -> changerev
1024 backrevref = {} # final value for: filerev -> changerev
1000 lowestchild = {} # lowest known filerev child of a filerev
1025 lowestchild = {} # lowest known filerev child of a filerev
1001 delayed = [] # filerev with filtered linkrev, for post-processing
1026 delayed = [] # filerev with filtered linkrev, for post-processing
1002 lowesthead = None # cache for manifest content of all head revisions
1027 lowesthead = None # cache for manifest content of all head revisions
1003 fl = repo.file(f)
1028 fl = repo.file(f)
1004 for fr in list(fl):
1029 for fr in list(fl):
1005 rev = fl.linkrev(fr)
1030 rev = fl.linkrev(fr)
1006 if rev not in cl:
1031 if rev not in cl:
1007 # changerev pointed in linkrev is filtered
1032 # changerev pointed in linkrev is filtered
1008 # record it for post processing.
1033 # record it for post processing.
1009 delayed.append((fr, rev))
1034 delayed.append((fr, rev))
1010 continue
1035 continue
1011 for p in fl.parentrevs(fr):
1036 for p in fl.parentrevs(fr):
1012 if 0 <= p and p not in lowestchild:
1037 if 0 <= p and p not in lowestchild:
1013 lowestchild[p] = fr
1038 lowestchild[p] = fr
1014 backrevref[fr] = rev
1039 backrevref[fr] = rev
1015 s.add(rev)
1040 s.add(rev)
1016
1041
1017 # Post-processing of all filerevs we skipped because they were
1042 # Post-processing of all filerevs we skipped because they were
1018 # filtered. If such filerevs have known and unfiltered children, this
1043 # filtered. If such filerevs have known and unfiltered children, this
1019 # means they have an unfiltered appearance out there. We'll use linkrev
1044 # means they have an unfiltered appearance out there. We'll use linkrev
1020 # adjustment to find one of these appearances. The lowest known child
1045 # adjustment to find one of these appearances. The lowest known child
1021 # will be used as a starting point because it is the best upper-bound we
1046 # will be used as a starting point because it is the best upper-bound we
1022 # have.
1047 # have.
1023 #
1048 #
1024 # This approach will fail when an unfiltered but linkrev-shadowed
1049 # This approach will fail when an unfiltered but linkrev-shadowed
1025 # appearance exists in a head changeset without unfiltered filerev
1050 # appearance exists in a head changeset without unfiltered filerev
1026 # children anywhere.
1051 # children anywhere.
1027 while delayed:
1052 while delayed:
1028 # must be a descending iteration. To slowly fill lowest child
1053 # must be a descending iteration. To slowly fill lowest child
1029 # information that is of potential use by the next item.
1054 # information that is of potential use by the next item.
1030 fr, rev = delayed.pop()
1055 fr, rev = delayed.pop()
1031 lkr = rev
1056 lkr = rev
1032
1057
1033 child = lowestchild.get(fr)
1058 child = lowestchild.get(fr)
1034
1059
1035 if child is None:
1060 if child is None:
1036 # search for existence of this file revision in a head revision.
1061 # search for existence of this file revision in a head revision.
1037 # There are three possibilities:
1062 # There are three possibilities:
1038 # - the revision exists in a head and we can find an
1063 # - the revision exists in a head and we can find an
1039 # introduction from there,
1064 # introduction from there,
1040 # - the revision does not exist in a head because it has been
1065 # - the revision does not exist in a head because it has been
1041 # changed since its introduction: we would have found a child
1066 # changed since its introduction: we would have found a child
1042 # and be in the other 'else' clause,
1067 # and be in the other 'else' clause,
1043 # - all versions of the revision are hidden.
1068 # - all versions of the revision are hidden.
1044 if lowesthead is None:
1069 if lowesthead is None:
1045 lowesthead = {}
1070 lowesthead = {}
1046 for h in repo.heads():
1071 for h in repo.heads():
1047 fnode = repo[h].manifest().get(f)
1072 fnode = repo[h].manifest().get(f)
1048 if fnode is not None:
1073 if fnode is not None:
1049 lowesthead[fl.rev(fnode)] = h
1074 lowesthead[fl.rev(fnode)] = h
1050 headrev = lowesthead.get(fr)
1075 headrev = lowesthead.get(fr)
1051 if headrev is None:
1076 if headrev is None:
1052 # content is nowhere unfiltered
1077 # content is nowhere unfiltered
1053 continue
1078 continue
1054 rev = repo[headrev][f].introrev()
1079 rev = repo[headrev][f].introrev()
1055 else:
1080 else:
1056 # the lowest known child is a good upper bound
1081 # the lowest known child is a good upper bound
1057 childcrev = backrevref[child]
1082 childcrev = backrevref[child]
1058 # XXX this does not guarantee returning the lowest
1083 # XXX this does not guarantee returning the lowest
1059 # introduction of this revision, but this gives a
1084 # introduction of this revision, but this gives a
1060 # result which is a good start and will fit in most
1085 # result which is a good start and will fit in most
1061 # cases. We probably need to fix the multiple
1086 # cases. We probably need to fix the multiple
1062 # introductions case properly (report each
1087 # introductions case properly (report each
1063 # introduction, even for identical file revisions)
1088 # introduction, even for identical file revisions)
1064 # once and for all at some point anyway.
1089 # once and for all at some point anyway.
1065 for p in repo[childcrev][f].parents():
1090 for p in repo[childcrev][f].parents():
1066 if p.filerev() == fr:
1091 if p.filerev() == fr:
1067 rev = p.rev()
1092 rev = p.rev()
1068 break
1093 break
1069 if rev == lkr: # no shadowed entry found
1094 if rev == lkr: # no shadowed entry found
1070 # XXX This should never happen unless some manifest points
1095 # XXX This should never happen unless some manifest points
1071 # to biggish file revisions (like a revision that uses a
1096 # to biggish file revisions (like a revision that uses a
1072 # parent that never appears in the manifest ancestors)
1097 # parent that never appears in the manifest ancestors)
1073 continue
1098 continue
1074
1099
1075 # Fill the data for the next iteration.
1100 # Fill the data for the next iteration.
1076 for p in fl.parentrevs(fr):
1101 for p in fl.parentrevs(fr):
1077 if 0 <= p and p not in lowestchild:
1102 if 0 <= p and p not in lowestchild:
1078 lowestchild[p] = fr
1103 lowestchild[p] = fr
1079 backrevref[fr] = rev
1104 backrevref[fr] = rev
1080 s.add(rev)
1105 s.add(rev)
1081
1106
1082 return subset & s
1107 return subset & s
1083
1108
1084 @predicate('first(set, [n])')
1109 @predicate('first(set, [n])')
1085 def first(repo, subset, x):
1110 def first(repo, subset, x):
1086 """An alias for limit().
1111 """An alias for limit().
1087 """
1112 """
1088 return limit(repo, subset, x)
1113 return limit(repo, subset, x)
1089
1114
1090 def _follow(repo, subset, x, name, followfirst=False):
1115 def _follow(repo, subset, x, name, followfirst=False):
1091 l = getargs(x, 0, 1, _("%s takes no arguments or a pattern") % name)
1116 l = getargs(x, 0, 1, _("%s takes no arguments or a pattern") % name)
1092 c = repo['.']
1117 c = repo['.']
1093 if l:
1118 if l:
1094 x = getstring(l[0], _("%s expected a pattern") % name)
1119 x = getstring(l[0], _("%s expected a pattern") % name)
1095 matcher = matchmod.match(repo.root, repo.getcwd(), [x],
1120 matcher = matchmod.match(repo.root, repo.getcwd(), [x],
1096 ctx=repo[None], default='path')
1121 ctx=repo[None], default='path')
1097
1122
1098 s = set()
1123 s = set()
1099 for fname in c:
1124 for fname in c:
1100 if matcher(fname):
1125 if matcher(fname):
1101 fctx = c[fname]
1126 fctx = c[fname]
1102 s = s.union(set(c.rev() for c in fctx.ancestors(followfirst)))
1127 s = s.union(set(c.rev() for c in fctx.ancestors(followfirst)))
1103 # include the revision responsible for the most recent version
1128 # include the revision responsible for the most recent version
1104 s.add(fctx.introrev())
1129 s.add(fctx.introrev())
1105 else:
1130 else:
1106 s = _revancestors(repo, baseset([c.rev()]), followfirst)
1131 s = _revancestors(repo, baseset([c.rev()]), followfirst)
1107
1132
1108 return subset & s
1133 return subset & s
1109
1134
1110 @predicate('follow([pattern])')
1135 @predicate('follow([pattern])')
1111 def follow(repo, subset, x):
1136 def follow(repo, subset, x):
1112 """
1137 """
1113 An alias for ``::.`` (ancestors of the working directory's first parent).
1138 An alias for ``::.`` (ancestors of the working directory's first parent).
1114 If pattern is specified, the histories of files matching given
1139 If pattern is specified, the histories of files matching given
1115 pattern is followed, including copies.
1140 pattern is followed, including copies.
1116 """
1141 """
1117 return _follow(repo, subset, x, 'follow')
1142 return _follow(repo, subset, x, 'follow')
1118
1143
1119 @predicate('_followfirst')
1144 @predicate('_followfirst')
1120 def _followfirst(repo, subset, x):
1145 def _followfirst(repo, subset, x):
1121 # ``followfirst([pattern])``
1146 # ``followfirst([pattern])``
1122 # Like ``follow([pattern])`` but follows only the first parent of
1147 # Like ``follow([pattern])`` but follows only the first parent of
1123 # every revisions or files revisions.
1148 # every revisions or files revisions.
1124 return _follow(repo, subset, x, '_followfirst', followfirst=True)
1149 return _follow(repo, subset, x, '_followfirst', followfirst=True)
1125
1150
1126 @predicate('all()')
1151 @predicate('all()')
1127 def getall(repo, subset, x):
1152 def getall(repo, subset, x):
1128 """All changesets, the same as ``0:tip``.
1153 """All changesets, the same as ``0:tip``.
1129 """
1154 """
1130 # i18n: "all" is a keyword
1155 # i18n: "all" is a keyword
1131 getargs(x, 0, 0, _("all takes no arguments"))
1156 getargs(x, 0, 0, _("all takes no arguments"))
1132 return subset & spanset(repo) # drop "null" if any
1157 return subset & spanset(repo) # drop "null" if any
1133
1158
1134 @predicate('grep(regex)')
1159 @predicate('grep(regex)')
1135 def grep(repo, subset, x):
1160 def grep(repo, subset, x):
1136 """Like ``keyword(string)`` but accepts a regex. Use ``grep(r'...')``
1161 """Like ``keyword(string)`` but accepts a regex. Use ``grep(r'...')``
1137 to ensure special escape characters are handled correctly. Unlike
1162 to ensure special escape characters are handled correctly. Unlike
1138 ``keyword(string)``, the match is case-sensitive.
1163 ``keyword(string)``, the match is case-sensitive.
1139 """
1164 """
1140 try:
1165 try:
1141 # i18n: "grep" is a keyword
1166 # i18n: "grep" is a keyword
1142 gr = re.compile(getstring(x, _("grep requires a string")))
1167 gr = re.compile(getstring(x, _("grep requires a string")))
1143 except re.error as e:
1168 except re.error as e:
1144 raise error.ParseError(_('invalid match pattern: %s') % e)
1169 raise error.ParseError(_('invalid match pattern: %s') % e)
1145
1170
1146 def matches(x):
1171 def matches(x):
1147 c = repo[x]
1172 c = repo[x]
1148 for e in c.files() + [c.user(), c.description()]:
1173 for e in c.files() + [c.user(), c.description()]:
1149 if gr.search(e):
1174 if gr.search(e):
1150 return True
1175 return True
1151 return False
1176 return False
1152
1177
1153 return subset.filter(matches)
1178 return subset.filter(matches)
1154
1179
1155 @predicate('_matchfiles')
1180 @predicate('_matchfiles')
1156 def _matchfiles(repo, subset, x):
1181 def _matchfiles(repo, subset, x):
1157 # _matchfiles takes a revset list of prefixed arguments:
1182 # _matchfiles takes a revset list of prefixed arguments:
1158 #
1183 #
1159 # [p:foo, i:bar, x:baz]
1184 # [p:foo, i:bar, x:baz]
1160 #
1185 #
1161 # builds a match object from them and filters subset. Allowed
1186 # builds a match object from them and filters subset. Allowed
1162 # prefixes are 'p:' for regular patterns, 'i:' for include
1187 # prefixes are 'p:' for regular patterns, 'i:' for include
1163 # patterns and 'x:' for exclude patterns. Use 'r:' prefix to pass
1188 # patterns and 'x:' for exclude patterns. Use 'r:' prefix to pass
1164 # a revision identifier, or the empty string to reference the
1189 # a revision identifier, or the empty string to reference the
1165 # working directory, from which the match object is
1190 # working directory, from which the match object is
1166 # initialized. Use 'd:' to set the default matching mode, default
1191 # initialized. Use 'd:' to set the default matching mode, default
1167 # to 'glob'. At most one 'r:' and 'd:' argument can be passed.
1192 # to 'glob'. At most one 'r:' and 'd:' argument can be passed.
1168
1193
1169 # i18n: "_matchfiles" is a keyword
1194 # i18n: "_matchfiles" is a keyword
1170 l = getargs(x, 1, -1, _("_matchfiles requires at least one argument"))
1195 l = getargs(x, 1, -1, _("_matchfiles requires at least one argument"))
1171 pats, inc, exc = [], [], []
1196 pats, inc, exc = [], [], []
1172 rev, default = None, None
1197 rev, default = None, None
1173 for arg in l:
1198 for arg in l:
1174 # i18n: "_matchfiles" is a keyword
1199 # i18n: "_matchfiles" is a keyword
1175 s = getstring(arg, _("_matchfiles requires string arguments"))
1200 s = getstring(arg, _("_matchfiles requires string arguments"))
1176 prefix, value = s[:2], s[2:]
1201 prefix, value = s[:2], s[2:]
1177 if prefix == 'p:':
1202 if prefix == 'p:':
1178 pats.append(value)
1203 pats.append(value)
1179 elif prefix == 'i:':
1204 elif prefix == 'i:':
1180 inc.append(value)
1205 inc.append(value)
1181 elif prefix == 'x:':
1206 elif prefix == 'x:':
1182 exc.append(value)
1207 exc.append(value)
1183 elif prefix == 'r:':
1208 elif prefix == 'r:':
1184 if rev is not None:
1209 if rev is not None:
1185 # i18n: "_matchfiles" is a keyword
1210 # i18n: "_matchfiles" is a keyword
1186 raise error.ParseError(_('_matchfiles expected at most one '
1211 raise error.ParseError(_('_matchfiles expected at most one '
1187 'revision'))
1212 'revision'))
1188 if value != '': # empty means working directory; leave rev as None
1213 if value != '': # empty means working directory; leave rev as None
1189 rev = value
1214 rev = value
1190 elif prefix == 'd:':
1215 elif prefix == 'd:':
1191 if default is not None:
1216 if default is not None:
1192 # i18n: "_matchfiles" is a keyword
1217 # i18n: "_matchfiles" is a keyword
1193 raise error.ParseError(_('_matchfiles expected at most one '
1218 raise error.ParseError(_('_matchfiles expected at most one '
1194 'default mode'))
1219 'default mode'))
1195 default = value
1220 default = value
1196 else:
1221 else:
1197 # i18n: "_matchfiles" is a keyword
1222 # i18n: "_matchfiles" is a keyword
1198 raise error.ParseError(_('invalid _matchfiles prefix: %s') % prefix)
1223 raise error.ParseError(_('invalid _matchfiles prefix: %s') % prefix)
1199 if not default:
1224 if not default:
1200 default = 'glob'
1225 default = 'glob'
1201
1226
1202 m = matchmod.match(repo.root, repo.getcwd(), pats, include=inc,
1227 m = matchmod.match(repo.root, repo.getcwd(), pats, include=inc,
1203 exclude=exc, ctx=repo[rev], default=default)
1228 exclude=exc, ctx=repo[rev], default=default)
1204
1229
1205 # This directly read the changelog data as creating changectx for all
1230 # This directly read the changelog data as creating changectx for all
1206 # revisions is quite expensive.
1231 # revisions is quite expensive.
1207 getfiles = repo.changelog.readfiles
1232 getfiles = repo.changelog.readfiles
1208 wdirrev = node.wdirrev
1233 wdirrev = node.wdirrev
1209 def matches(x):
1234 def matches(x):
1210 if x == wdirrev:
1235 if x == wdirrev:
1211 files = repo[x].files()
1236 files = repo[x].files()
1212 else:
1237 else:
1213 files = getfiles(x)
1238 files = getfiles(x)
1214 for f in files:
1239 for f in files:
1215 if m(f):
1240 if m(f):
1216 return True
1241 return True
1217 return False
1242 return False
1218
1243
1219 return subset.filter(matches)
1244 return subset.filter(matches)
1220
1245
1221 @predicate('file(pattern)')
1246 @predicate('file(pattern)')
1222 def hasfile(repo, subset, x):
1247 def hasfile(repo, subset, x):
1223 """Changesets affecting files matched by pattern.
1248 """Changesets affecting files matched by pattern.
1224
1249
1225 For a faster but less accurate result, consider using ``filelog()``
1250 For a faster but less accurate result, consider using ``filelog()``
1226 instead.
1251 instead.
1227
1252
1228 This predicate uses ``glob:`` as the default kind of pattern.
1253 This predicate uses ``glob:`` as the default kind of pattern.
1229 """
1254 """
1230 # i18n: "file" is a keyword
1255 # i18n: "file" is a keyword
1231 pat = getstring(x, _("file requires a pattern"))
1256 pat = getstring(x, _("file requires a pattern"))
1232 return _matchfiles(repo, subset, ('string', 'p:' + pat))
1257 return _matchfiles(repo, subset, ('string', 'p:' + pat))
1233
1258
1234 @predicate('head()')
1259 @predicate('head()')
1235 def head(repo, subset, x):
1260 def head(repo, subset, x):
1236 """Changeset is a named branch head.
1261 """Changeset is a named branch head.
1237 """
1262 """
1238 # i18n: "head" is a keyword
1263 # i18n: "head" is a keyword
1239 getargs(x, 0, 0, _("head takes no arguments"))
1264 getargs(x, 0, 0, _("head takes no arguments"))
1240 hs = set()
1265 hs = set()
1241 cl = repo.changelog
1266 cl = repo.changelog
1242 for b, ls in repo.branchmap().iteritems():
1267 for b, ls in repo.branchmap().iteritems():
1243 hs.update(cl.rev(h) for h in ls)
1268 hs.update(cl.rev(h) for h in ls)
1244 # XXX using a set to feed the baseset is wrong. Sets are not ordered.
1269 # XXX using a set to feed the baseset is wrong. Sets are not ordered.
1245 # This does not break because of other fullreposet misbehavior.
1270 # This does not break because of other fullreposet misbehavior.
1246 # XXX We should combine with subset first: 'subset & baseset(...)'. This is
1271 # XXX We should combine with subset first: 'subset & baseset(...)'. This is
1247 # necessary to ensure we preserve the order in subset.
1272 # necessary to ensure we preserve the order in subset.
1248 return baseset(hs) & subset
1273 return baseset(hs) & subset
1249
1274
1250 @predicate('heads(set)')
1275 @predicate('heads(set)')
1251 def heads(repo, subset, x):
1276 def heads(repo, subset, x):
1252 """Members of set with no children in set.
1277 """Members of set with no children in set.
1253 """
1278 """
1254 s = getset(repo, subset, x)
1279 s = getset(repo, subset, x)
1255 ps = parents(repo, subset, x)
1280 ps = parents(repo, subset, x)
1256 return s - ps
1281 return s - ps
1257
1282
1258 @predicate('hidden()')
1283 @predicate('hidden()')
1259 def hidden(repo, subset, x):
1284 def hidden(repo, subset, x):
1260 """Hidden changesets.
1285 """Hidden changesets.
1261 """
1286 """
1262 # i18n: "hidden" is a keyword
1287 # i18n: "hidden" is a keyword
1263 getargs(x, 0, 0, _("hidden takes no arguments"))
1288 getargs(x, 0, 0, _("hidden takes no arguments"))
1264 hiddenrevs = repoview.filterrevs(repo, 'visible')
1289 hiddenrevs = repoview.filterrevs(repo, 'visible')
1265 return subset & hiddenrevs
1290 return subset & hiddenrevs
1266
1291
1267 @predicate('keyword(string)')
1292 @predicate('keyword(string)')
1268 def keyword(repo, subset, x):
1293 def keyword(repo, subset, x):
1269 """Search commit message, user name, and names of changed files for
1294 """Search commit message, user name, and names of changed files for
1270 string. The match is case-insensitive.
1295 string. The match is case-insensitive.
1271 """
1296 """
1272 # i18n: "keyword" is a keyword
1297 # i18n: "keyword" is a keyword
1273 kw = encoding.lower(getstring(x, _("keyword requires a string")))
1298 kw = encoding.lower(getstring(x, _("keyword requires a string")))
1274
1299
1275 def matches(r):
1300 def matches(r):
1276 c = repo[r]
1301 c = repo[r]
1277 return any(kw in encoding.lower(t)
1302 return any(kw in encoding.lower(t)
1278 for t in c.files() + [c.user(), c.description()])
1303 for t in c.files() + [c.user(), c.description()])
1279
1304
1280 return subset.filter(matches)
1305 return subset.filter(matches)
1281
1306
1282 @predicate('limit(set[, n[, offset]])')
1307 @predicate('limit(set[, n[, offset]])')
1283 def limit(repo, subset, x):
1308 def limit(repo, subset, x):
1284 """First n members of set, defaulting to 1, starting from offset.
1309 """First n members of set, defaulting to 1, starting from offset.
1285 """
1310 """
1286 args = getargsdict(x, 'limit', 'set n offset')
1311 args = getargsdict(x, 'limit', 'set n offset')
1287 if 'set' not in args:
1312 if 'set' not in args:
1288 # i18n: "limit" is a keyword
1313 # i18n: "limit" is a keyword
1289 raise error.ParseError(_("limit requires one to three arguments"))
1314 raise error.ParseError(_("limit requires one to three arguments"))
1290 try:
1315 try:
1291 lim, ofs = 1, 0
1316 lim, ofs = 1, 0
1292 if 'n' in args:
1317 if 'n' in args:
1293 # i18n: "limit" is a keyword
1318 # i18n: "limit" is a keyword
1294 lim = int(getstring(args['n'], _("limit requires a number")))
1319 lim = int(getstring(args['n'], _("limit requires a number")))
1295 if 'offset' in args:
1320 if 'offset' in args:
1296 # i18n: "limit" is a keyword
1321 # i18n: "limit" is a keyword
1297 ofs = int(getstring(args['offset'], _("limit requires a number")))
1322 ofs = int(getstring(args['offset'], _("limit requires a number")))
1298 if ofs < 0:
1323 if ofs < 0:
1299 raise error.ParseError(_("negative offset"))
1324 raise error.ParseError(_("negative offset"))
1300 except (TypeError, ValueError):
1325 except (TypeError, ValueError):
1301 # i18n: "limit" is a keyword
1326 # i18n: "limit" is a keyword
1302 raise error.ParseError(_("limit expects a number"))
1327 raise error.ParseError(_("limit expects a number"))
1303 os = getset(repo, fullreposet(repo), args['set'])
1328 os = getset(repo, fullreposet(repo), args['set'])
1304 result = []
1329 result = []
1305 it = iter(os)
1330 it = iter(os)
1306 for x in xrange(ofs):
1331 for x in xrange(ofs):
1307 y = next(it, None)
1332 y = next(it, None)
1308 if y is None:
1333 if y is None:
1309 break
1334 break
1310 for x in xrange(lim):
1335 for x in xrange(lim):
1311 y = next(it, None)
1336 y = next(it, None)
1312 if y is None:
1337 if y is None:
1313 break
1338 break
1314 elif y in subset:
1339 elif y in subset:
1315 result.append(y)
1340 result.append(y)
1316 return baseset(result)
1341 return baseset(result)
1317
1342
1318 @predicate('last(set, [n])')
1343 @predicate('last(set, [n])')
1319 def last(repo, subset, x):
1344 def last(repo, subset, x):
1320 """Last n members of set, defaulting to 1.
1345 """Last n members of set, defaulting to 1.
1321 """
1346 """
1322 # i18n: "last" is a keyword
1347 # i18n: "last" is a keyword
1323 l = getargs(x, 1, 2, _("last requires one or two arguments"))
1348 l = getargs(x, 1, 2, _("last requires one or two arguments"))
1324 try:
1349 try:
1325 lim = 1
1350 lim = 1
1326 if len(l) == 2:
1351 if len(l) == 2:
1327 # i18n: "last" is a keyword
1352 # i18n: "last" is a keyword
1328 lim = int(getstring(l[1], _("last requires a number")))
1353 lim = int(getstring(l[1], _("last requires a number")))
1329 except (TypeError, ValueError):
1354 except (TypeError, ValueError):
1330 # i18n: "last" is a keyword
1355 # i18n: "last" is a keyword
1331 raise error.ParseError(_("last expects a number"))
1356 raise error.ParseError(_("last expects a number"))
1332 os = getset(repo, fullreposet(repo), l[0])
1357 os = getset(repo, fullreposet(repo), l[0])
1333 os.reverse()
1358 os.reverse()
1334 result = []
1359 result = []
1335 it = iter(os)
1360 it = iter(os)
1336 for x in xrange(lim):
1361 for x in xrange(lim):
1337 y = next(it, None)
1362 y = next(it, None)
1338 if y is None:
1363 if y is None:
1339 break
1364 break
1340 elif y in subset:
1365 elif y in subset:
1341 result.append(y)
1366 result.append(y)
1342 return baseset(result)
1367 return baseset(result)
1343
1368
1344 @predicate('max(set)')
1369 @predicate('max(set)')
1345 def maxrev(repo, subset, x):
1370 def maxrev(repo, subset, x):
1346 """Changeset with highest revision number in set.
1371 """Changeset with highest revision number in set.
1347 """
1372 """
1348 os = getset(repo, fullreposet(repo), x)
1373 os = getset(repo, fullreposet(repo), x)
1349 try:
1374 try:
1350 m = os.max()
1375 m = os.max()
1351 if m in subset:
1376 if m in subset:
1352 return baseset([m])
1377 return baseset([m])
1353 except ValueError:
1378 except ValueError:
1354 # os.max() throws a ValueError when the collection is empty.
1379 # os.max() throws a ValueError when the collection is empty.
1355 # Same as python's max().
1380 # Same as python's max().
1356 pass
1381 pass
1357 return baseset()
1382 return baseset()
1358
1383
1359 @predicate('merge()')
1384 @predicate('merge()')
1360 def merge(repo, subset, x):
1385 def merge(repo, subset, x):
1361 """Changeset is a merge changeset.
1386 """Changeset is a merge changeset.
1362 """
1387 """
1363 # i18n: "merge" is a keyword
1388 # i18n: "merge" is a keyword
1364 getargs(x, 0, 0, _("merge takes no arguments"))
1389 getargs(x, 0, 0, _("merge takes no arguments"))
1365 cl = repo.changelog
1390 cl = repo.changelog
1366 return subset.filter(lambda r: cl.parentrevs(r)[1] != -1)
1391 return subset.filter(lambda r: cl.parentrevs(r)[1] != -1)
1367
1392
1368 @predicate('branchpoint()')
1393 @predicate('branchpoint()')
1369 def branchpoint(repo, subset, x):
1394 def branchpoint(repo, subset, x):
1370 """Changesets with more than one child.
1395 """Changesets with more than one child.
1371 """
1396 """
1372 # i18n: "branchpoint" is a keyword
1397 # i18n: "branchpoint" is a keyword
1373 getargs(x, 0, 0, _("branchpoint takes no arguments"))
1398 getargs(x, 0, 0, _("branchpoint takes no arguments"))
1374 cl = repo.changelog
1399 cl = repo.changelog
1375 if not subset:
1400 if not subset:
1376 return baseset()
1401 return baseset()
1377 # XXX this should be 'parentset.min()' assuming 'parentset' is a smartset
1402 # XXX this should be 'parentset.min()' assuming 'parentset' is a smartset
1378 # (and if it is not, it should.)
1403 # (and if it is not, it should.)
1379 baserev = min(subset)
1404 baserev = min(subset)
1380 parentscount = [0]*(len(repo) - baserev)
1405 parentscount = [0]*(len(repo) - baserev)
1381 for r in cl.revs(start=baserev + 1):
1406 for r in cl.revs(start=baserev + 1):
1382 for p in cl.parentrevs(r):
1407 for p in cl.parentrevs(r):
1383 if p >= baserev:
1408 if p >= baserev:
1384 parentscount[p - baserev] += 1
1409 parentscount[p - baserev] += 1
1385 return subset.filter(lambda r: parentscount[r - baserev] > 1)
1410 return subset.filter(lambda r: parentscount[r - baserev] > 1)
1386
1411
1387 @predicate('min(set)')
1412 @predicate('min(set)')
1388 def minrev(repo, subset, x):
1413 def minrev(repo, subset, x):
1389 """Changeset with lowest revision number in set.
1414 """Changeset with lowest revision number in set.
1390 """
1415 """
1391 os = getset(repo, fullreposet(repo), x)
1416 os = getset(repo, fullreposet(repo), x)
1392 try:
1417 try:
1393 m = os.min()
1418 m = os.min()
1394 if m in subset:
1419 if m in subset:
1395 return baseset([m])
1420 return baseset([m])
1396 except ValueError:
1421 except ValueError:
1397 # os.min() throws a ValueError when the collection is empty.
1422 # os.min() throws a ValueError when the collection is empty.
1398 # Same as python's min().
1423 # Same as python's min().
1399 pass
1424 pass
1400 return baseset()
1425 return baseset()
1401
1426
1402 @predicate('modifies(pattern)')
1427 @predicate('modifies(pattern)')
1403 def modifies(repo, subset, x):
1428 def modifies(repo, subset, x):
1404 """Changesets modifying files matched by pattern.
1429 """Changesets modifying files matched by pattern.
1405
1430
1406 The pattern without explicit kind like ``glob:`` is expected to be
1431 The pattern without explicit kind like ``glob:`` is expected to be
1407 relative to the current directory and match against a file or a
1432 relative to the current directory and match against a file or a
1408 directory.
1433 directory.
1409 """
1434 """
1410 # i18n: "modifies" is a keyword
1435 # i18n: "modifies" is a keyword
1411 pat = getstring(x, _("modifies requires a pattern"))
1436 pat = getstring(x, _("modifies requires a pattern"))
1412 return checkstatus(repo, subset, pat, 0)
1437 return checkstatus(repo, subset, pat, 0)
1413
1438
1414 @predicate('named(namespace)')
1439 @predicate('named(namespace)')
1415 def named(repo, subset, x):
1440 def named(repo, subset, x):
1416 """The changesets in a given namespace.
1441 """The changesets in a given namespace.
1417
1442
1418 If `namespace` starts with `re:`, the remainder of the string is treated as
1443 If `namespace` starts with `re:`, the remainder of the string is treated as
1419 a regular expression. To match a namespace that actually starts with `re:`,
1444 a regular expression. To match a namespace that actually starts with `re:`,
1420 use the prefix `literal:`.
1445 use the prefix `literal:`.
1421 """
1446 """
1422 # i18n: "named" is a keyword
1447 # i18n: "named" is a keyword
1423 args = getargs(x, 1, 1, _('named requires a namespace argument'))
1448 args = getargs(x, 1, 1, _('named requires a namespace argument'))
1424
1449
1425 ns = getstring(args[0],
1450 ns = getstring(args[0],
1426 # i18n: "named" is a keyword
1451 # i18n: "named" is a keyword
1427 _('the argument to named must be a string'))
1452 _('the argument to named must be a string'))
1428 kind, pattern, matcher = util.stringmatcher(ns)
1453 kind, pattern, matcher = util.stringmatcher(ns)
1429 namespaces = set()
1454 namespaces = set()
1430 if kind == 'literal':
1455 if kind == 'literal':
1431 if pattern not in repo.names:
1456 if pattern not in repo.names:
1432 raise error.RepoLookupError(_("namespace '%s' does not exist")
1457 raise error.RepoLookupError(_("namespace '%s' does not exist")
1433 % ns)
1458 % ns)
1434 namespaces.add(repo.names[pattern])
1459 namespaces.add(repo.names[pattern])
1435 else:
1460 else:
1436 for name, ns in repo.names.iteritems():
1461 for name, ns in repo.names.iteritems():
1437 if matcher(name):
1462 if matcher(name):
1438 namespaces.add(ns)
1463 namespaces.add(ns)
1439 if not namespaces:
1464 if not namespaces:
1440 raise error.RepoLookupError(_("no namespace exists"
1465 raise error.RepoLookupError(_("no namespace exists"
1441 " that match '%s'") % pattern)
1466 " that match '%s'") % pattern)
1442
1467
1443 names = set()
1468 names = set()
1444 for ns in namespaces:
1469 for ns in namespaces:
1445 for name in ns.listnames(repo):
1470 for name in ns.listnames(repo):
1446 if name not in ns.deprecated:
1471 if name not in ns.deprecated:
1447 names.update(repo[n].rev() for n in ns.nodes(repo, name))
1472 names.update(repo[n].rev() for n in ns.nodes(repo, name))
1448
1473
1449 names -= set([node.nullrev])
1474 names -= set([node.nullrev])
1450 return subset & names
1475 return subset & names
1451
1476
1452 @predicate('id(string)')
1477 @predicate('id(string)')
1453 def node_(repo, subset, x):
1478 def node_(repo, subset, x):
1454 """Revision non-ambiguously specified by the given hex string prefix.
1479 """Revision non-ambiguously specified by the given hex string prefix.
1455 """
1480 """
1456 # i18n: "id" is a keyword
1481 # i18n: "id" is a keyword
1457 l = getargs(x, 1, 1, _("id requires one argument"))
1482 l = getargs(x, 1, 1, _("id requires one argument"))
1458 # i18n: "id" is a keyword
1483 # i18n: "id" is a keyword
1459 n = getstring(l[0], _("id requires a string"))
1484 n = getstring(l[0], _("id requires a string"))
1460 if len(n) == 40:
1485 if len(n) == 40:
1461 try:
1486 try:
1462 rn = repo.changelog.rev(node.bin(n))
1487 rn = repo.changelog.rev(node.bin(n))
1463 except (LookupError, TypeError):
1488 except (LookupError, TypeError):
1464 rn = None
1489 rn = None
1465 else:
1490 else:
1466 rn = None
1491 rn = None
1467 pm = repo.changelog._partialmatch(n)
1492 pm = repo.changelog._partialmatch(n)
1468 if pm is not None:
1493 if pm is not None:
1469 rn = repo.changelog.rev(pm)
1494 rn = repo.changelog.rev(pm)
1470
1495
1471 if rn is None:
1496 if rn is None:
1472 return baseset()
1497 return baseset()
1473 result = baseset([rn])
1498 result = baseset([rn])
1474 return result & subset
1499 return result & subset
1475
1500
1476 @predicate('obsolete()')
1501 @predicate('obsolete()')
1477 def obsolete(repo, subset, x):
1502 def obsolete(repo, subset, x):
1478 """Mutable changeset with a newer version."""
1503 """Mutable changeset with a newer version."""
1479 # i18n: "obsolete" is a keyword
1504 # i18n: "obsolete" is a keyword
1480 getargs(x, 0, 0, _("obsolete takes no arguments"))
1505 getargs(x, 0, 0, _("obsolete takes no arguments"))
1481 obsoletes = obsmod.getrevs(repo, 'obsolete')
1506 obsoletes = obsmod.getrevs(repo, 'obsolete')
1482 return subset & obsoletes
1507 return subset & obsoletes
1483
1508
1484 @predicate('only(set, [set])')
1509 @predicate('only(set, [set])')
1485 def only(repo, subset, x):
1510 def only(repo, subset, x):
1486 """Changesets that are ancestors of the first set that are not ancestors
1511 """Changesets that are ancestors of the first set that are not ancestors
1487 of any other head in the repo. If a second set is specified, the result
1512 of any other head in the repo. If a second set is specified, the result
1488 is ancestors of the first set that are not ancestors of the second set
1513 is ancestors of the first set that are not ancestors of the second set
1489 (i.e. ::<set1> - ::<set2>).
1514 (i.e. ::<set1> - ::<set2>).
1490 """
1515 """
1491 cl = repo.changelog
1516 cl = repo.changelog
1492 # i18n: "only" is a keyword
1517 # i18n: "only" is a keyword
1493 args = getargs(x, 1, 2, _('only takes one or two arguments'))
1518 args = getargs(x, 1, 2, _('only takes one or two arguments'))
1494 include = getset(repo, fullreposet(repo), args[0])
1519 include = getset(repo, fullreposet(repo), args[0])
1495 if len(args) == 1:
1520 if len(args) == 1:
1496 if not include:
1521 if not include:
1497 return baseset()
1522 return baseset()
1498
1523
1499 descendants = set(_revdescendants(repo, include, False))
1524 descendants = set(_revdescendants(repo, include, False))
1500 exclude = [rev for rev in cl.headrevs()
1525 exclude = [rev for rev in cl.headrevs()
1501 if not rev in descendants and not rev in include]
1526 if not rev in descendants and not rev in include]
1502 else:
1527 else:
1503 exclude = getset(repo, fullreposet(repo), args[1])
1528 exclude = getset(repo, fullreposet(repo), args[1])
1504
1529
1505 results = set(cl.findmissingrevs(common=exclude, heads=include))
1530 results = set(cl.findmissingrevs(common=exclude, heads=include))
1506 # XXX we should turn this into a baseset instead of a set, smartset may do
1531 # XXX we should turn this into a baseset instead of a set, smartset may do
1507 # some optimisations from the fact this is a baseset.
1532 # some optimisations from the fact this is a baseset.
1508 return subset & results
1533 return subset & results
1509
1534
1510 @predicate('origin([set])')
1535 @predicate('origin([set])')
1511 def origin(repo, subset, x):
1536 def origin(repo, subset, x):
1512 """
1537 """
1513 Changesets that were specified as a source for the grafts, transplants or
1538 Changesets that were specified as a source for the grafts, transplants or
1514 rebases that created the given revisions. Omitting the optional set is the
1539 rebases that created the given revisions. Omitting the optional set is the
1515 same as passing all(). If a changeset created by these operations is itself
1540 same as passing all(). If a changeset created by these operations is itself
1516 specified as a source for one of these operations, only the source changeset
1541 specified as a source for one of these operations, only the source changeset
1517 for the first operation is selected.
1542 for the first operation is selected.
1518 """
1543 """
1519 if x is not None:
1544 if x is not None:
1520 dests = getset(repo, fullreposet(repo), x)
1545 dests = getset(repo, fullreposet(repo), x)
1521 else:
1546 else:
1522 dests = fullreposet(repo)
1547 dests = fullreposet(repo)
1523
1548
1524 def _firstsrc(rev):
1549 def _firstsrc(rev):
1525 src = _getrevsource(repo, rev)
1550 src = _getrevsource(repo, rev)
1526 if src is None:
1551 if src is None:
1527 return None
1552 return None
1528
1553
1529 while True:
1554 while True:
1530 prev = _getrevsource(repo, src)
1555 prev = _getrevsource(repo, src)
1531
1556
1532 if prev is None:
1557 if prev is None:
1533 return src
1558 return src
1534 src = prev
1559 src = prev
1535
1560
1536 o = set([_firstsrc(r) for r in dests])
1561 o = set([_firstsrc(r) for r in dests])
1537 o -= set([None])
1562 o -= set([None])
1538 # XXX we should turn this into a baseset instead of a set, smartset may do
1563 # XXX we should turn this into a baseset instead of a set, smartset may do
1539 # some optimisations from the fact this is a baseset.
1564 # some optimisations from the fact this is a baseset.
1540 return subset & o
1565 return subset & o
1541
1566
1542 @predicate('outgoing([path])')
1567 @predicate('outgoing([path])')
1543 def outgoing(repo, subset, x):
1568 def outgoing(repo, subset, x):
1544 """Changesets not found in the specified destination repository, or the
1569 """Changesets not found in the specified destination repository, or the
1545 default push location.
1570 default push location.
1546 """
1571 """
1547 # Avoid cycles.
1572 # Avoid cycles.
1548 from . import (
1573 from . import (
1549 discovery,
1574 discovery,
1550 hg,
1575 hg,
1551 )
1576 )
1552 # i18n: "outgoing" is a keyword
1577 # i18n: "outgoing" is a keyword
1553 l = getargs(x, 0, 1, _("outgoing takes one or no arguments"))
1578 l = getargs(x, 0, 1, _("outgoing takes one or no arguments"))
1554 # i18n: "outgoing" is a keyword
1579 # i18n: "outgoing" is a keyword
1555 dest = l and getstring(l[0], _("outgoing requires a repository path")) or ''
1580 dest = l and getstring(l[0], _("outgoing requires a repository path")) or ''
1556 dest = repo.ui.expandpath(dest or 'default-push', dest or 'default')
1581 dest = repo.ui.expandpath(dest or 'default-push', dest or 'default')
1557 dest, branches = hg.parseurl(dest)
1582 dest, branches = hg.parseurl(dest)
1558 revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
1583 revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
1559 if revs:
1584 if revs:
1560 revs = [repo.lookup(rev) for rev in revs]
1585 revs = [repo.lookup(rev) for rev in revs]
1561 other = hg.peer(repo, {}, dest)
1586 other = hg.peer(repo, {}, dest)
1562 repo.ui.pushbuffer()
1587 repo.ui.pushbuffer()
1563 outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs)
1588 outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs)
1564 repo.ui.popbuffer()
1589 repo.ui.popbuffer()
1565 cl = repo.changelog
1590 cl = repo.changelog
1566 o = set([cl.rev(r) for r in outgoing.missing])
1591 o = set([cl.rev(r) for r in outgoing.missing])
1567 return subset & o
1592 return subset & o
1568
1593
1569 @predicate('p1([set])')
1594 @predicate('p1([set])')
1570 def p1(repo, subset, x):
1595 def p1(repo, subset, x):
1571 """First parent of changesets in set, or the working directory.
1596 """First parent of changesets in set, or the working directory.
1572 """
1597 """
1573 if x is None:
1598 if x is None:
1574 p = repo[x].p1().rev()
1599 p = repo[x].p1().rev()
1575 if p >= 0:
1600 if p >= 0:
1576 return subset & baseset([p])
1601 return subset & baseset([p])
1577 return baseset()
1602 return baseset()
1578
1603
1579 ps = set()
1604 ps = set()
1580 cl = repo.changelog
1605 cl = repo.changelog
1581 for r in getset(repo, fullreposet(repo), x):
1606 for r in getset(repo, fullreposet(repo), x):
1582 ps.add(cl.parentrevs(r)[0])
1607 ps.add(cl.parentrevs(r)[0])
1583 ps -= set([node.nullrev])
1608 ps -= set([node.nullrev])
1584 # XXX we should turn this into a baseset instead of a set, smartset may do
1609 # XXX we should turn this into a baseset instead of a set, smartset may do
1585 # some optimisations from the fact this is a baseset.
1610 # some optimisations from the fact this is a baseset.
1586 return subset & ps
1611 return subset & ps
1587
1612
1588 @predicate('p2([set])')
1613 @predicate('p2([set])')
1589 def p2(repo, subset, x):
1614 def p2(repo, subset, x):
1590 """Second parent of changesets in set, or the working directory.
1615 """Second parent of changesets in set, or the working directory.
1591 """
1616 """
1592 if x is None:
1617 if x is None:
1593 ps = repo[x].parents()
1618 ps = repo[x].parents()
1594 try:
1619 try:
1595 p = ps[1].rev()
1620 p = ps[1].rev()
1596 if p >= 0:
1621 if p >= 0:
1597 return subset & baseset([p])
1622 return subset & baseset([p])
1598 return baseset()
1623 return baseset()
1599 except IndexError:
1624 except IndexError:
1600 return baseset()
1625 return baseset()
1601
1626
1602 ps = set()
1627 ps = set()
1603 cl = repo.changelog
1628 cl = repo.changelog
1604 for r in getset(repo, fullreposet(repo), x):
1629 for r in getset(repo, fullreposet(repo), x):
1605 ps.add(cl.parentrevs(r)[1])
1630 ps.add(cl.parentrevs(r)[1])
1606 ps -= set([node.nullrev])
1631 ps -= set([node.nullrev])
1607 # XXX we should turn this into a baseset instead of a set, smartset may do
1632 # XXX we should turn this into a baseset instead of a set, smartset may do
1608 # some optimisations from the fact this is a baseset.
1633 # some optimisations from the fact this is a baseset.
1609 return subset & ps
1634 return subset & ps
1610
1635
1611 @predicate('parents([set])')
1636 @predicate('parents([set])')
1612 def parents(repo, subset, x):
1637 def parents(repo, subset, x):
1613 """
1638 """
1614 The set of all parents for all changesets in set, or the working directory.
1639 The set of all parents for all changesets in set, or the working directory.
1615 """
1640 """
1616 if x is None:
1641 if x is None:
1617 ps = set(p.rev() for p in repo[x].parents())
1642 ps = set(p.rev() for p in repo[x].parents())
1618 else:
1643 else:
1619 ps = set()
1644 ps = set()
1620 cl = repo.changelog
1645 cl = repo.changelog
1621 up = ps.update
1646 up = ps.update
1622 parentrevs = cl.parentrevs
1647 parentrevs = cl.parentrevs
1623 for r in getset(repo, fullreposet(repo), x):
1648 for r in getset(repo, fullreposet(repo), x):
1624 if r == node.wdirrev:
1649 if r == node.wdirrev:
1625 up(p.rev() for p in repo[r].parents())
1650 up(p.rev() for p in repo[r].parents())
1626 else:
1651 else:
1627 up(parentrevs(r))
1652 up(parentrevs(r))
1628 ps -= set([node.nullrev])
1653 ps -= set([node.nullrev])
1629 return subset & ps
1654 return subset & ps
1630
1655
1631 def _phase(repo, subset, target):
1656 def _phase(repo, subset, target):
1632 """helper to select all rev in phase <target>"""
1657 """helper to select all rev in phase <target>"""
1633 repo._phasecache.loadphaserevs(repo) # ensure phase's sets are loaded
1658 repo._phasecache.loadphaserevs(repo) # ensure phase's sets are loaded
1634 if repo._phasecache._phasesets:
1659 if repo._phasecache._phasesets:
1635 s = repo._phasecache._phasesets[target] - repo.changelog.filteredrevs
1660 s = repo._phasecache._phasesets[target] - repo.changelog.filteredrevs
1636 s = baseset(s)
1661 s = baseset(s)
1637 s.sort() # set are non ordered, so we enforce ascending
1662 s.sort() # set are non ordered, so we enforce ascending
1638 return subset & s
1663 return subset & s
1639 else:
1664 else:
1640 phase = repo._phasecache.phase
1665 phase = repo._phasecache.phase
1641 condition = lambda r: phase(repo, r) == target
1666 condition = lambda r: phase(repo, r) == target
1642 return subset.filter(condition, cache=False)
1667 return subset.filter(condition, cache=False)
1643
1668
1644 @predicate('draft()')
1669 @predicate('draft()')
1645 def draft(repo, subset, x):
1670 def draft(repo, subset, x):
1646 """Changeset in draft phase."""
1671 """Changeset in draft phase."""
1647 # i18n: "draft" is a keyword
1672 # i18n: "draft" is a keyword
1648 getargs(x, 0, 0, _("draft takes no arguments"))
1673 getargs(x, 0, 0, _("draft takes no arguments"))
1649 target = phases.draft
1674 target = phases.draft
1650 return _phase(repo, subset, target)
1675 return _phase(repo, subset, target)
1651
1676
1652 @predicate('secret()')
1677 @predicate('secret()')
1653 def secret(repo, subset, x):
1678 def secret(repo, subset, x):
1654 """Changeset in secret phase."""
1679 """Changeset in secret phase."""
1655 # i18n: "secret" is a keyword
1680 # i18n: "secret" is a keyword
1656 getargs(x, 0, 0, _("secret takes no arguments"))
1681 getargs(x, 0, 0, _("secret takes no arguments"))
1657 target = phases.secret
1682 target = phases.secret
1658 return _phase(repo, subset, target)
1683 return _phase(repo, subset, target)
1659
1684
1660 def parentspec(repo, subset, x, n):
1685 def parentspec(repo, subset, x, n):
1661 """``set^0``
1686 """``set^0``
1662 The set.
1687 The set.
1663 ``set^1`` (or ``set^``), ``set^2``
1688 ``set^1`` (or ``set^``), ``set^2``
1664 First or second parent, respectively, of all changesets in set.
1689 First or second parent, respectively, of all changesets in set.
1665 """
1690 """
1666 try:
1691 try:
1667 n = int(n[1])
1692 n = int(n[1])
1668 if n not in (0, 1, 2):
1693 if n not in (0, 1, 2):
1669 raise ValueError
1694 raise ValueError
1670 except (TypeError, ValueError):
1695 except (TypeError, ValueError):
1671 raise error.ParseError(_("^ expects a number 0, 1, or 2"))
1696 raise error.ParseError(_("^ expects a number 0, 1, or 2"))
1672 ps = set()
1697 ps = set()
1673 cl = repo.changelog
1698 cl = repo.changelog
1674 for r in getset(repo, fullreposet(repo), x):
1699 for r in getset(repo, fullreposet(repo), x):
1675 if n == 0:
1700 if n == 0:
1676 ps.add(r)
1701 ps.add(r)
1677 elif n == 1:
1702 elif n == 1:
1678 ps.add(cl.parentrevs(r)[0])
1703 ps.add(cl.parentrevs(r)[0])
1679 elif n == 2:
1704 elif n == 2:
1680 parents = cl.parentrevs(r)
1705 parents = cl.parentrevs(r)
1681 if len(parents) > 1:
1706 if len(parents) > 1:
1682 ps.add(parents[1])
1707 ps.add(parents[1])
1683 return subset & ps
1708 return subset & ps
1684
1709
1685 @predicate('present(set)')
1710 @predicate('present(set)')
1686 def present(repo, subset, x):
1711 def present(repo, subset, x):
1687 """An empty set, if any revision in set isn't found; otherwise,
1712 """An empty set, if any revision in set isn't found; otherwise,
1688 all revisions in set.
1713 all revisions in set.
1689
1714
1690 If any of specified revisions is not present in the local repository,
1715 If any of specified revisions is not present in the local repository,
1691 the query is normally aborted. But this predicate allows the query
1716 the query is normally aborted. But this predicate allows the query
1692 to continue even in such cases.
1717 to continue even in such cases.
1693 """
1718 """
1694 try:
1719 try:
1695 return getset(repo, subset, x)
1720 return getset(repo, subset, x)
1696 except error.RepoLookupError:
1721 except error.RepoLookupError:
1697 return baseset()
1722 return baseset()
1698
1723
1699 # for internal use
1724 # for internal use
1700 @predicate('_notpublic')
1725 @predicate('_notpublic')
1701 def _notpublic(repo, subset, x):
1726 def _notpublic(repo, subset, x):
1702 getargs(x, 0, 0, "_notpublic takes no arguments")
1727 getargs(x, 0, 0, "_notpublic takes no arguments")
1703 repo._phasecache.loadphaserevs(repo) # ensure phase's sets are loaded
1728 repo._phasecache.loadphaserevs(repo) # ensure phase's sets are loaded
1704 if repo._phasecache._phasesets:
1729 if repo._phasecache._phasesets:
1705 s = set()
1730 s = set()
1706 for u in repo._phasecache._phasesets[1:]:
1731 for u in repo._phasecache._phasesets[1:]:
1707 s.update(u)
1732 s.update(u)
1708 s = baseset(s - repo.changelog.filteredrevs)
1733 s = baseset(s - repo.changelog.filteredrevs)
1709 s.sort()
1734 s.sort()
1710 return subset & s
1735 return subset & s
1711 else:
1736 else:
1712 phase = repo._phasecache.phase
1737 phase = repo._phasecache.phase
1713 target = phases.public
1738 target = phases.public
1714 condition = lambda r: phase(repo, r) != target
1739 condition = lambda r: phase(repo, r) != target
1715 return subset.filter(condition, cache=False)
1740 return subset.filter(condition, cache=False)
1716
1741
1717 @predicate('public()')
1742 @predicate('public()')
1718 def public(repo, subset, x):
1743 def public(repo, subset, x):
1719 """Changeset in public phase."""
1744 """Changeset in public phase."""
1720 # i18n: "public" is a keyword
1745 # i18n: "public" is a keyword
1721 getargs(x, 0, 0, _("public takes no arguments"))
1746 getargs(x, 0, 0, _("public takes no arguments"))
1722 phase = repo._phasecache.phase
1747 phase = repo._phasecache.phase
1723 target = phases.public
1748 target = phases.public
1724 condition = lambda r: phase(repo, r) == target
1749 condition = lambda r: phase(repo, r) == target
1725 return subset.filter(condition, cache=False)
1750 return subset.filter(condition, cache=False)
1726
1751
1727 @predicate('remote([id [,path]])')
1752 @predicate('remote([id [,path]])')
1728 def remote(repo, subset, x):
1753 def remote(repo, subset, x):
1729 """Local revision that corresponds to the given identifier in a
1754 """Local revision that corresponds to the given identifier in a
1730 remote repository, if present. Here, the '.' identifier is a
1755 remote repository, if present. Here, the '.' identifier is a
1731 synonym for the current local branch.
1756 synonym for the current local branch.
1732 """
1757 """
1733
1758
1734 from . import hg # avoid start-up nasties
1759 from . import hg # avoid start-up nasties
1735 # i18n: "remote" is a keyword
1760 # i18n: "remote" is a keyword
1736 l = getargs(x, 0, 2, _("remote takes zero, one, or two arguments"))
1761 l = getargs(x, 0, 2, _("remote takes zero, one, or two arguments"))
1737
1762
1738 q = '.'
1763 q = '.'
1739 if len(l) > 0:
1764 if len(l) > 0:
1740 # i18n: "remote" is a keyword
1765 # i18n: "remote" is a keyword
1741 q = getstring(l[0], _("remote requires a string id"))
1766 q = getstring(l[0], _("remote requires a string id"))
1742 if q == '.':
1767 if q == '.':
1743 q = repo['.'].branch()
1768 q = repo['.'].branch()
1744
1769
1745 dest = ''
1770 dest = ''
1746 if len(l) > 1:
1771 if len(l) > 1:
1747 # i18n: "remote" is a keyword
1772 # i18n: "remote" is a keyword
1748 dest = getstring(l[1], _("remote requires a repository path"))
1773 dest = getstring(l[1], _("remote requires a repository path"))
1749 dest = repo.ui.expandpath(dest or 'default')
1774 dest = repo.ui.expandpath(dest or 'default')
1750 dest, branches = hg.parseurl(dest)
1775 dest, branches = hg.parseurl(dest)
1751 revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
1776 revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
1752 if revs:
1777 if revs:
1753 revs = [repo.lookup(rev) for rev in revs]
1778 revs = [repo.lookup(rev) for rev in revs]
1754 other = hg.peer(repo, {}, dest)
1779 other = hg.peer(repo, {}, dest)
1755 n = other.lookup(q)
1780 n = other.lookup(q)
1756 if n in repo:
1781 if n in repo:
1757 r = repo[n].rev()
1782 r = repo[n].rev()
1758 if r in subset:
1783 if r in subset:
1759 return baseset([r])
1784 return baseset([r])
1760 return baseset()
1785 return baseset()
1761
1786
1762 @predicate('removes(pattern)')
1787 @predicate('removes(pattern)')
1763 def removes(repo, subset, x):
1788 def removes(repo, subset, x):
1764 """Changesets which remove files matching pattern.
1789 """Changesets which remove files matching pattern.
1765
1790
1766 The pattern without explicit kind like ``glob:`` is expected to be
1791 The pattern without explicit kind like ``glob:`` is expected to be
1767 relative to the current directory and match against a file or a
1792 relative to the current directory and match against a file or a
1768 directory.
1793 directory.
1769 """
1794 """
1770 # i18n: "removes" is a keyword
1795 # i18n: "removes" is a keyword
1771 pat = getstring(x, _("removes requires a pattern"))
1796 pat = getstring(x, _("removes requires a pattern"))
1772 return checkstatus(repo, subset, pat, 2)
1797 return checkstatus(repo, subset, pat, 2)
1773
1798
1774 @predicate('rev(number)')
1799 @predicate('rev(number)')
1775 def rev(repo, subset, x):
1800 def rev(repo, subset, x):
1776 """Revision with the given numeric identifier.
1801 """Revision with the given numeric identifier.
1777 """
1802 """
1778 # i18n: "rev" is a keyword
1803 # i18n: "rev" is a keyword
1779 l = getargs(x, 1, 1, _("rev requires one argument"))
1804 l = getargs(x, 1, 1, _("rev requires one argument"))
1780 try:
1805 try:
1781 # i18n: "rev" is a keyword
1806 # i18n: "rev" is a keyword
1782 l = int(getstring(l[0], _("rev requires a number")))
1807 l = int(getstring(l[0], _("rev requires a number")))
1783 except (TypeError, ValueError):
1808 except (TypeError, ValueError):
1784 # i18n: "rev" is a keyword
1809 # i18n: "rev" is a keyword
1785 raise error.ParseError(_("rev expects a number"))
1810 raise error.ParseError(_("rev expects a number"))
1786 if l not in repo.changelog and l != node.nullrev:
1811 if l not in repo.changelog and l != node.nullrev:
1787 return baseset()
1812 return baseset()
1788 return subset & baseset([l])
1813 return subset & baseset([l])
1789
1814
1790 @predicate('matching(revision [, field])')
1815 @predicate('matching(revision [, field])')
1791 def matching(repo, subset, x):
1816 def matching(repo, subset, x):
1792 """Changesets in which a given set of fields match the set of fields in the
1817 """Changesets in which a given set of fields match the set of fields in the
1793 selected revision or set.
1818 selected revision or set.
1794
1819
1795 To match more than one field pass the list of fields to match separated
1820 To match more than one field pass the list of fields to match separated
1796 by spaces (e.g. ``author description``).
1821 by spaces (e.g. ``author description``).
1797
1822
1798 Valid fields are most regular revision fields and some special fields.
1823 Valid fields are most regular revision fields and some special fields.
1799
1824
1800 Regular revision fields are ``description``, ``author``, ``branch``,
1825 Regular revision fields are ``description``, ``author``, ``branch``,
1801 ``date``, ``files``, ``phase``, ``parents``, ``substate``, ``user``
1826 ``date``, ``files``, ``phase``, ``parents``, ``substate``, ``user``
1802 and ``diff``.
1827 and ``diff``.
1803 Note that ``author`` and ``user`` are synonyms. ``diff`` refers to the
1828 Note that ``author`` and ``user`` are synonyms. ``diff`` refers to the
1804 contents of the revision. Two revisions matching their ``diff`` will
1829 contents of the revision. Two revisions matching their ``diff`` will
1805 also match their ``files``.
1830 also match their ``files``.
1806
1831
1807 Special fields are ``summary`` and ``metadata``:
1832 Special fields are ``summary`` and ``metadata``:
1808 ``summary`` matches the first line of the description.
1833 ``summary`` matches the first line of the description.
1809 ``metadata`` is equivalent to matching ``description user date``
1834 ``metadata`` is equivalent to matching ``description user date``
1810 (i.e. it matches the main metadata fields).
1835 (i.e. it matches the main metadata fields).
1811
1836
1812 ``metadata`` is the default field which is used when no fields are
1837 ``metadata`` is the default field which is used when no fields are
1813 specified. You can match more than one field at a time.
1838 specified. You can match more than one field at a time.
1814 """
1839 """
1815 # i18n: "matching" is a keyword
1840 # i18n: "matching" is a keyword
1816 l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments"))
1841 l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments"))
1817
1842
1818 revs = getset(repo, fullreposet(repo), l[0])
1843 revs = getset(repo, fullreposet(repo), l[0])
1819
1844
1820 fieldlist = ['metadata']
1845 fieldlist = ['metadata']
1821 if len(l) > 1:
1846 if len(l) > 1:
1822 fieldlist = getstring(l[1],
1847 fieldlist = getstring(l[1],
1823 # i18n: "matching" is a keyword
1848 # i18n: "matching" is a keyword
1824 _("matching requires a string "
1849 _("matching requires a string "
1825 "as its second argument")).split()
1850 "as its second argument")).split()
1826
1851
1827 # Make sure that there are no repeated fields,
1852 # Make sure that there are no repeated fields,
1828 # expand the 'special' 'metadata' field type
1853 # expand the 'special' 'metadata' field type
1829 # and check the 'files' whenever we check the 'diff'
1854 # and check the 'files' whenever we check the 'diff'
1830 fields = []
1855 fields = []
1831 for field in fieldlist:
1856 for field in fieldlist:
1832 if field == 'metadata':
1857 if field == 'metadata':
1833 fields += ['user', 'description', 'date']
1858 fields += ['user', 'description', 'date']
1834 elif field == 'diff':
1859 elif field == 'diff':
1835 # a revision matching the diff must also match the files
1860 # a revision matching the diff must also match the files
1836 # since matching the diff is very costly, make sure to
1861 # since matching the diff is very costly, make sure to
1837 # also match the files first
1862 # also match the files first
1838 fields += ['files', 'diff']
1863 fields += ['files', 'diff']
1839 else:
1864 else:
1840 if field == 'author':
1865 if field == 'author':
1841 field = 'user'
1866 field = 'user'
1842 fields.append(field)
1867 fields.append(field)
1843 fields = set(fields)
1868 fields = set(fields)
1844 if 'summary' in fields and 'description' in fields:
1869 if 'summary' in fields and 'description' in fields:
1845 # If a revision matches its description it also matches its summary
1870 # If a revision matches its description it also matches its summary
1846 fields.discard('summary')
1871 fields.discard('summary')
1847
1872
1848 # We may want to match more than one field
1873 # We may want to match more than one field
1849 # Not all fields take the same amount of time to be matched
1874 # Not all fields take the same amount of time to be matched
1850 # Sort the selected fields in order of increasing matching cost
1875 # Sort the selected fields in order of increasing matching cost
1851 fieldorder = ['phase', 'parents', 'user', 'date', 'branch', 'summary',
1876 fieldorder = ['phase', 'parents', 'user', 'date', 'branch', 'summary',
1852 'files', 'description', 'substate', 'diff']
1877 'files', 'description', 'substate', 'diff']
1853 def fieldkeyfunc(f):
1878 def fieldkeyfunc(f):
1854 try:
1879 try:
1855 return fieldorder.index(f)
1880 return fieldorder.index(f)
1856 except ValueError:
1881 except ValueError:
1857 # assume an unknown field is very costly
1882 # assume an unknown field is very costly
1858 return len(fieldorder)
1883 return len(fieldorder)
1859 fields = list(fields)
1884 fields = list(fields)
1860 fields.sort(key=fieldkeyfunc)
1885 fields.sort(key=fieldkeyfunc)
1861
1886
1862 # Each field will be matched with its own "getfield" function
1887 # Each field will be matched with its own "getfield" function
1863 # which will be added to the getfieldfuncs array of functions
1888 # which will be added to the getfieldfuncs array of functions
1864 getfieldfuncs = []
1889 getfieldfuncs = []
1865 _funcs = {
1890 _funcs = {
1866 'user': lambda r: repo[r].user(),
1891 'user': lambda r: repo[r].user(),
1867 'branch': lambda r: repo[r].branch(),
1892 'branch': lambda r: repo[r].branch(),
1868 'date': lambda r: repo[r].date(),
1893 'date': lambda r: repo[r].date(),
1869 'description': lambda r: repo[r].description(),
1894 'description': lambda r: repo[r].description(),
1870 'files': lambda r: repo[r].files(),
1895 'files': lambda r: repo[r].files(),
1871 'parents': lambda r: repo[r].parents(),
1896 'parents': lambda r: repo[r].parents(),
1872 'phase': lambda r: repo[r].phase(),
1897 'phase': lambda r: repo[r].phase(),
1873 'substate': lambda r: repo[r].substate,
1898 'substate': lambda r: repo[r].substate,
1874 'summary': lambda r: repo[r].description().splitlines()[0],
1899 'summary': lambda r: repo[r].description().splitlines()[0],
1875 'diff': lambda r: list(repo[r].diff(git=True),)
1900 'diff': lambda r: list(repo[r].diff(git=True),)
1876 }
1901 }
1877 for info in fields:
1902 for info in fields:
1878 getfield = _funcs.get(info, None)
1903 getfield = _funcs.get(info, None)
1879 if getfield is None:
1904 if getfield is None:
1880 raise error.ParseError(
1905 raise error.ParseError(
1881 # i18n: "matching" is a keyword
1906 # i18n: "matching" is a keyword
1882 _("unexpected field name passed to matching: %s") % info)
1907 _("unexpected field name passed to matching: %s") % info)
1883 getfieldfuncs.append(getfield)
1908 getfieldfuncs.append(getfield)
1884 # convert the getfield array of functions into a "getinfo" function
1909 # convert the getfield array of functions into a "getinfo" function
1885 # which returns an array of field values (or a single value if there
1910 # which returns an array of field values (or a single value if there
1886 # is only one field to match)
1911 # is only one field to match)
1887 getinfo = lambda r: [f(r) for f in getfieldfuncs]
1912 getinfo = lambda r: [f(r) for f in getfieldfuncs]
1888
1913
1889 def matches(x):
1914 def matches(x):
1890 for rev in revs:
1915 for rev in revs:
1891 target = getinfo(rev)
1916 target = getinfo(rev)
1892 match = True
1917 match = True
1893 for n, f in enumerate(getfieldfuncs):
1918 for n, f in enumerate(getfieldfuncs):
1894 if target[n] != f(x):
1919 if target[n] != f(x):
1895 match = False
1920 match = False
1896 if match:
1921 if match:
1897 return True
1922 return True
1898 return False
1923 return False
1899
1924
1900 return subset.filter(matches)
1925 return subset.filter(matches)
1901
1926
1902 @predicate('reverse(set)')
1927 @predicate('reverse(set)')
1903 def reverse(repo, subset, x):
1928 def reverse(repo, subset, x):
1904 """Reverse order of set.
1929 """Reverse order of set.
1905 """
1930 """
1906 l = getset(repo, subset, x)
1931 l = getset(repo, subset, x)
1907 l.reverse()
1932 l.reverse()
1908 return l
1933 return l
1909
1934
1910 @predicate('roots(set)')
1935 @predicate('roots(set)')
1911 def roots(repo, subset, x):
1936 def roots(repo, subset, x):
1912 """Changesets in set with no parent changeset in set.
1937 """Changesets in set with no parent changeset in set.
1913 """
1938 """
1914 s = getset(repo, fullreposet(repo), x)
1939 s = getset(repo, fullreposet(repo), x)
1915 parents = repo.changelog.parentrevs
1940 parents = repo.changelog.parentrevs
1916 def filter(r):
1941 def filter(r):
1917 for p in parents(r):
1942 for p in parents(r):
1918 if 0 <= p and p in s:
1943 if 0 <= p and p in s:
1919 return False
1944 return False
1920 return True
1945 return True
1921 return subset & s.filter(filter)
1946 return subset & s.filter(filter)
1922
1947
1923 @predicate('sort(set[, [-]key...])')
1948 @predicate('sort(set[, [-]key...])')
1924 def sort(repo, subset, x):
1949 def sort(repo, subset, x):
1925 """Sort set by keys. The default sort order is ascending, specify a key
1950 """Sort set by keys. The default sort order is ascending, specify a key
1926 as ``-key`` to sort in descending order.
1951 as ``-key`` to sort in descending order.
1927
1952
1928 The keys can be:
1953 The keys can be:
1929
1954
1930 - ``rev`` for the revision number,
1955 - ``rev`` for the revision number,
1931 - ``branch`` for the branch name,
1956 - ``branch`` for the branch name,
1932 - ``desc`` for the commit message (description),
1957 - ``desc`` for the commit message (description),
1933 - ``user`` for user name (``author`` can be used as an alias),
1958 - ``user`` for user name (``author`` can be used as an alias),
1934 - ``date`` for the commit date
1959 - ``date`` for the commit date
1935 """
1960 """
1936 # i18n: "sort" is a keyword
1961 # i18n: "sort" is a keyword
1937 l = getargs(x, 1, 2, _("sort requires one or two arguments"))
1962 l = getargs(x, 1, 2, _("sort requires one or two arguments"))
1938 keys = "rev"
1963 keys = "rev"
1939 if len(l) == 2:
1964 if len(l) == 2:
1940 # i18n: "sort" is a keyword
1965 # i18n: "sort" is a keyword
1941 keys = getstring(l[1], _("sort spec must be a string"))
1966 keys = getstring(l[1], _("sort spec must be a string"))
1942
1967
1943 s = l[0]
1968 s = l[0]
1944 keys = keys.split()
1969 keys = keys.split()
1945 l = []
1970 l = []
1946 def invert(s):
1971 def invert(s):
1947 return "".join(chr(255 - ord(c)) for c in s)
1972 return "".join(chr(255 - ord(c)) for c in s)
1948 revs = getset(repo, subset, s)
1973 revs = getset(repo, subset, s)
1949 if keys == ["rev"]:
1974 if keys == ["rev"]:
1950 revs.sort()
1975 revs.sort()
1951 return revs
1976 return revs
1952 elif keys == ["-rev"]:
1977 elif keys == ["-rev"]:
1953 revs.sort(reverse=True)
1978 revs.sort(reverse=True)
1954 return revs
1979 return revs
1955 for r in revs:
1980 for r in revs:
1956 c = repo[r]
1981 c = repo[r]
1957 e = []
1982 e = []
1958 for k in keys:
1983 for k in keys:
1959 if k == 'rev':
1984 if k == 'rev':
1960 e.append(r)
1985 e.append(r)
1961 elif k == '-rev':
1986 elif k == '-rev':
1962 e.append(-r)
1987 e.append(-r)
1963 elif k == 'branch':
1988 elif k == 'branch':
1964 e.append(c.branch())
1989 e.append(c.branch())
1965 elif k == '-branch':
1990 elif k == '-branch':
1966 e.append(invert(c.branch()))
1991 e.append(invert(c.branch()))
1967 elif k == 'desc':
1992 elif k == 'desc':
1968 e.append(c.description())
1993 e.append(c.description())
1969 elif k == '-desc':
1994 elif k == '-desc':
1970 e.append(invert(c.description()))
1995 e.append(invert(c.description()))
1971 elif k in 'user author':
1996 elif k in 'user author':
1972 e.append(c.user())
1997 e.append(c.user())
1973 elif k in '-user -author':
1998 elif k in '-user -author':
1974 e.append(invert(c.user()))
1999 e.append(invert(c.user()))
1975 elif k == 'date':
2000 elif k == 'date':
1976 e.append(c.date()[0])
2001 e.append(c.date()[0])
1977 elif k == '-date':
2002 elif k == '-date':
1978 e.append(-c.date()[0])
2003 e.append(-c.date()[0])
1979 else:
2004 else:
1980 raise error.ParseError(_("unknown sort key %r") % k)
2005 raise error.ParseError(_("unknown sort key %r") % k)
1981 e.append(r)
2006 e.append(r)
1982 l.append(e)
2007 l.append(e)
1983 l.sort()
2008 l.sort()
1984 return baseset([e[-1] for e in l])
2009 return baseset([e[-1] for e in l])
1985
2010
1986 @predicate('subrepo([pattern])')
2011 @predicate('subrepo([pattern])')
1987 def subrepo(repo, subset, x):
2012 def subrepo(repo, subset, x):
1988 """Changesets that add, modify or remove the given subrepo. If no subrepo
2013 """Changesets that add, modify or remove the given subrepo. If no subrepo
1989 pattern is named, any subrepo changes are returned.
2014 pattern is named, any subrepo changes are returned.
1990 """
2015 """
1991 # i18n: "subrepo" is a keyword
2016 # i18n: "subrepo" is a keyword
1992 args = getargs(x, 0, 1, _('subrepo takes at most one argument'))
2017 args = getargs(x, 0, 1, _('subrepo takes at most one argument'))
1993 if len(args) != 0:
2018 if len(args) != 0:
1994 pat = getstring(args[0], _("subrepo requires a pattern"))
2019 pat = getstring(args[0], _("subrepo requires a pattern"))
1995
2020
1996 m = matchmod.exact(repo.root, repo.root, ['.hgsubstate'])
2021 m = matchmod.exact(repo.root, repo.root, ['.hgsubstate'])
1997
2022
1998 def submatches(names):
2023 def submatches(names):
1999 k, p, m = util.stringmatcher(pat)
2024 k, p, m = util.stringmatcher(pat)
2000 for name in names:
2025 for name in names:
2001 if m(name):
2026 if m(name):
2002 yield name
2027 yield name
2003
2028
2004 def matches(x):
2029 def matches(x):
2005 c = repo[x]
2030 c = repo[x]
2006 s = repo.status(c.p1().node(), c.node(), match=m)
2031 s = repo.status(c.p1().node(), c.node(), match=m)
2007
2032
2008 if len(args) == 0:
2033 if len(args) == 0:
2009 return s.added or s.modified or s.removed
2034 return s.added or s.modified or s.removed
2010
2035
2011 if s.added:
2036 if s.added:
2012 return any(submatches(c.substate.keys()))
2037 return any(submatches(c.substate.keys()))
2013
2038
2014 if s.modified:
2039 if s.modified:
2015 subs = set(c.p1().substate.keys())
2040 subs = set(c.p1().substate.keys())
2016 subs.update(c.substate.keys())
2041 subs.update(c.substate.keys())
2017
2042
2018 for path in submatches(subs):
2043 for path in submatches(subs):
2019 if c.p1().substate.get(path) != c.substate.get(path):
2044 if c.p1().substate.get(path) != c.substate.get(path):
2020 return True
2045 return True
2021
2046
2022 if s.removed:
2047 if s.removed:
2023 return any(submatches(c.p1().substate.keys()))
2048 return any(submatches(c.p1().substate.keys()))
2024
2049
2025 return False
2050 return False
2026
2051
2027 return subset.filter(matches)
2052 return subset.filter(matches)
2028
2053
2029 def _substringmatcher(pattern):
2054 def _substringmatcher(pattern):
2030 kind, pattern, matcher = util.stringmatcher(pattern)
2055 kind, pattern, matcher = util.stringmatcher(pattern)
2031 if kind == 'literal':
2056 if kind == 'literal':
2032 matcher = lambda s: pattern in s
2057 matcher = lambda s: pattern in s
2033 return kind, pattern, matcher
2058 return kind, pattern, matcher
2034
2059
2035 @predicate('tag([name])')
2060 @predicate('tag([name])')
2036 def tag(repo, subset, x):
2061 def tag(repo, subset, x):
2037 """The specified tag by name, or all tagged revisions if no name is given.
2062 """The specified tag by name, or all tagged revisions if no name is given.
2038
2063
2039 If `name` starts with `re:`, the remainder of the name is treated as
2064 If `name` starts with `re:`, the remainder of the name is treated as
2040 a regular expression. To match a tag that actually starts with `re:`,
2065 a regular expression. To match a tag that actually starts with `re:`,
2041 use the prefix `literal:`.
2066 use the prefix `literal:`.
2042 """
2067 """
2043 # i18n: "tag" is a keyword
2068 # i18n: "tag" is a keyword
2044 args = getargs(x, 0, 1, _("tag takes one or no arguments"))
2069 args = getargs(x, 0, 1, _("tag takes one or no arguments"))
2045 cl = repo.changelog
2070 cl = repo.changelog
2046 if args:
2071 if args:
2047 pattern = getstring(args[0],
2072 pattern = getstring(args[0],
2048 # i18n: "tag" is a keyword
2073 # i18n: "tag" is a keyword
2049 _('the argument to tag must be a string'))
2074 _('the argument to tag must be a string'))
2050 kind, pattern, matcher = util.stringmatcher(pattern)
2075 kind, pattern, matcher = util.stringmatcher(pattern)
2051 if kind == 'literal':
2076 if kind == 'literal':
2052 # avoid resolving all tags
2077 # avoid resolving all tags
2053 tn = repo._tagscache.tags.get(pattern, None)
2078 tn = repo._tagscache.tags.get(pattern, None)
2054 if tn is None:
2079 if tn is None:
2055 raise error.RepoLookupError(_("tag '%s' does not exist")
2080 raise error.RepoLookupError(_("tag '%s' does not exist")
2056 % pattern)
2081 % pattern)
2057 s = set([repo[tn].rev()])
2082 s = set([repo[tn].rev()])
2058 else:
2083 else:
2059 s = set([cl.rev(n) for t, n in repo.tagslist() if matcher(t)])
2084 s = set([cl.rev(n) for t, n in repo.tagslist() if matcher(t)])
2060 else:
2085 else:
2061 s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
2086 s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
2062 return subset & s
2087 return subset & s
2063
2088
2064 @predicate('tagged')
2089 @predicate('tagged')
2065 def tagged(repo, subset, x):
2090 def tagged(repo, subset, x):
2066 return tag(repo, subset, x)
2091 return tag(repo, subset, x)
2067
2092
2068 @predicate('unstable()')
2093 @predicate('unstable()')
2069 def unstable(repo, subset, x):
2094 def unstable(repo, subset, x):
2070 """Non-obsolete changesets with obsolete ancestors.
2095 """Non-obsolete changesets with obsolete ancestors.
2071 """
2096 """
2072 # i18n: "unstable" is a keyword
2097 # i18n: "unstable" is a keyword
2073 getargs(x, 0, 0, _("unstable takes no arguments"))
2098 getargs(x, 0, 0, _("unstable takes no arguments"))
2074 unstables = obsmod.getrevs(repo, 'unstable')
2099 unstables = obsmod.getrevs(repo, 'unstable')
2075 return subset & unstables
2100 return subset & unstables
2076
2101
2077
2102
2078 @predicate('user(string)')
2103 @predicate('user(string)')
2079 def user(repo, subset, x):
2104 def user(repo, subset, x):
2080 """User name contains string. The match is case-insensitive.
2105 """User name contains string. The match is case-insensitive.
2081
2106
2082 If `string` starts with `re:`, the remainder of the string is treated as
2107 If `string` starts with `re:`, the remainder of the string is treated as
2083 a regular expression. To match a user that actually contains `re:`, use
2108 a regular expression. To match a user that actually contains `re:`, use
2084 the prefix `literal:`.
2109 the prefix `literal:`.
2085 """
2110 """
2086 return author(repo, subset, x)
2111 return author(repo, subset, x)
2087
2112
2088 # experimental
2113 # experimental
2089 @predicate('wdir')
2114 @predicate('wdir')
2090 def wdir(repo, subset, x):
2115 def wdir(repo, subset, x):
2091 # i18n: "wdir" is a keyword
2116 # i18n: "wdir" is a keyword
2092 getargs(x, 0, 0, _("wdir takes no arguments"))
2117 getargs(x, 0, 0, _("wdir takes no arguments"))
2093 if node.wdirrev in subset or isinstance(subset, fullreposet):
2118 if node.wdirrev in subset or isinstance(subset, fullreposet):
2094 return baseset([node.wdirrev])
2119 return baseset([node.wdirrev])
2095 return baseset()
2120 return baseset()
2096
2121
2097 # for internal use
2122 # for internal use
2098 @predicate('_list')
2123 @predicate('_list')
2099 def _list(repo, subset, x):
2124 def _list(repo, subset, x):
2100 s = getstring(x, "internal error")
2125 s = getstring(x, "internal error")
2101 if not s:
2126 if not s:
2102 return baseset()
2127 return baseset()
2103 # remove duplicates here. it's difficult for caller to deduplicate sets
2128 # remove duplicates here. it's difficult for caller to deduplicate sets
2104 # because different symbols can point to the same rev.
2129 # because different symbols can point to the same rev.
2105 cl = repo.changelog
2130 cl = repo.changelog
2106 ls = []
2131 ls = []
2107 seen = set()
2132 seen = set()
2108 for t in s.split('\0'):
2133 for t in s.split('\0'):
2109 try:
2134 try:
2110 # fast path for integer revision
2135 # fast path for integer revision
2111 r = int(t)
2136 r = int(t)
2112 if str(r) != t or r not in cl:
2137 if str(r) != t or r not in cl:
2113 raise ValueError
2138 raise ValueError
2114 revs = [r]
2139 revs = [r]
2115 except ValueError:
2140 except ValueError:
2116 revs = stringset(repo, subset, t)
2141 revs = stringset(repo, subset, t)
2117
2142
2118 for r in revs:
2143 for r in revs:
2119 if r in seen:
2144 if r in seen:
2120 continue
2145 continue
2121 if (r in subset
2146 if (r in subset
2122 or r == node.nullrev and isinstance(subset, fullreposet)):
2147 or r == node.nullrev and isinstance(subset, fullreposet)):
2123 ls.append(r)
2148 ls.append(r)
2124 seen.add(r)
2149 seen.add(r)
2125 return baseset(ls)
2150 return baseset(ls)
2126
2151
2127 # for internal use
2152 # for internal use
2128 @predicate('_intlist')
2153 @predicate('_intlist')
2129 def _intlist(repo, subset, x):
2154 def _intlist(repo, subset, x):
2130 s = getstring(x, "internal error")
2155 s = getstring(x, "internal error")
2131 if not s:
2156 if not s:
2132 return baseset()
2157 return baseset()
2133 ls = [int(r) for r in s.split('\0')]
2158 ls = [int(r) for r in s.split('\0')]
2134 s = subset
2159 s = subset
2135 return baseset([r for r in ls if r in s])
2160 return baseset([r for r in ls if r in s])
2136
2161
2137 # for internal use
2162 # for internal use
2138 @predicate('_hexlist')
2163 @predicate('_hexlist')
2139 def _hexlist(repo, subset, x):
2164 def _hexlist(repo, subset, x):
2140 s = getstring(x, "internal error")
2165 s = getstring(x, "internal error")
2141 if not s:
2166 if not s:
2142 return baseset()
2167 return baseset()
2143 cl = repo.changelog
2168 cl = repo.changelog
2144 ls = [cl.rev(node.bin(r)) for r in s.split('\0')]
2169 ls = [cl.rev(node.bin(r)) for r in s.split('\0')]
2145 s = subset
2170 s = subset
2146 return baseset([r for r in ls if r in s])
2171 return baseset([r for r in ls if r in s])
2147
2172
2148 # symbols which can't be used for a DoS attack for any given input
2173 # symbols which can't be used for a DoS attack for any given input
2149 # (e.g. those which accept regexes as plain strings shouldn't be included)
2174 # (e.g. those which accept regexes as plain strings shouldn't be included)
2150 # functions that just return a lot of changesets (like all) don't count here
2175 # functions that just return a lot of changesets (like all) don't count here
2151 safesymbols = set([
2176 safesymbols = set([
2152 "adds",
2177 "adds",
2153 "all",
2178 "all",
2154 "ancestor",
2179 "ancestor",
2155 "ancestors",
2180 "ancestors",
2156 "_firstancestors",
2181 "_firstancestors",
2157 "author",
2182 "author",
2158 "bisect",
2183 "bisect",
2159 "bisected",
2184 "bisected",
2160 "bookmark",
2185 "bookmark",
2161 "branch",
2186 "branch",
2162 "branchpoint",
2187 "branchpoint",
2163 "bumped",
2188 "bumped",
2164 "bundle",
2189 "bundle",
2165 "children",
2190 "children",
2166 "closed",
2191 "closed",
2167 "converted",
2192 "converted",
2168 "date",
2193 "date",
2169 "desc",
2194 "desc",
2170 "descendants",
2195 "descendants",
2171 "_firstdescendants",
2196 "_firstdescendants",
2172 "destination",
2197 "destination",
2173 "divergent",
2198 "divergent",
2174 "draft",
2199 "draft",
2175 "extinct",
2200 "extinct",
2176 "extra",
2201 "extra",
2177 "file",
2202 "file",
2178 "filelog",
2203 "filelog",
2179 "first",
2204 "first",
2180 "follow",
2205 "follow",
2181 "_followfirst",
2206 "_followfirst",
2182 "head",
2207 "head",
2183 "heads",
2208 "heads",
2184 "hidden",
2209 "hidden",
2185 "id",
2210 "id",
2186 "keyword",
2211 "keyword",
2187 "last",
2212 "last",
2188 "limit",
2213 "limit",
2189 "_matchfiles",
2214 "_matchfiles",
2190 "max",
2215 "max",
2191 "merge",
2216 "merge",
2192 "min",
2217 "min",
2193 "modifies",
2218 "modifies",
2194 "obsolete",
2219 "obsolete",
2195 "only",
2220 "only",
2196 "origin",
2221 "origin",
2197 "outgoing",
2222 "outgoing",
2198 "p1",
2223 "p1",
2199 "p2",
2224 "p2",
2200 "parents",
2225 "parents",
2201 "present",
2226 "present",
2202 "public",
2227 "public",
2203 "_notpublic",
2228 "_notpublic",
2204 "remote",
2229 "remote",
2205 "removes",
2230 "removes",
2206 "rev",
2231 "rev",
2207 "reverse",
2232 "reverse",
2208 "roots",
2233 "roots",
2209 "sort",
2234 "sort",
2210 "secret",
2235 "secret",
2211 "matching",
2236 "matching",
2212 "tag",
2237 "tag",
2213 "tagged",
2238 "tagged",
2214 "user",
2239 "user",
2215 "unstable",
2240 "unstable",
2216 "wdir",
2241 "wdir",
2217 "_list",
2242 "_list",
2218 "_intlist",
2243 "_intlist",
2219 "_hexlist",
2244 "_hexlist",
2220 ])
2245 ])
2221
2246
2222 methods = {
2247 methods = {
2223 "range": rangeset,
2248 "range": rangeset,
2224 "dagrange": dagrange,
2249 "dagrange": dagrange,
2225 "string": stringset,
2250 "string": stringset,
2226 "symbol": stringset,
2251 "symbol": stringset,
2227 "and": andset,
2252 "and": andset,
2228 "or": orset,
2253 "or": orset,
2229 "not": notset,
2254 "not": notset,
2230 "list": listset,
2255 "list": listset,
2231 "keyvalue": keyvaluepair,
2256 "keyvalue": keyvaluepair,
2232 "func": func,
2257 "func": func,
2233 "ancestor": ancestorspec,
2258 "ancestor": ancestorspec,
2234 "parent": parentspec,
2259 "parent": parentspec,
2235 "parentpost": p1,
2260 "parentpost": p1,
2236 }
2261 }
2237
2262
2238 def optimize(x, small):
2263 def optimize(x, small):
2239 if x is None:
2264 if x is None:
2240 return 0, x
2265 return 0, x
2241
2266
2242 smallbonus = 1
2267 smallbonus = 1
2243 if small:
2268 if small:
2244 smallbonus = .5
2269 smallbonus = .5
2245
2270
2246 op = x[0]
2271 op = x[0]
2247 if op == 'minus':
2272 if op == 'minus':
2248 return optimize(('and', x[1], ('not', x[2])), small)
2273 return optimize(('and', x[1], ('not', x[2])), small)
2249 elif op == 'only':
2274 elif op == 'only':
2250 return optimize(('func', ('symbol', 'only'),
2275 return optimize(('func', ('symbol', 'only'),
2251 ('list', x[1], x[2])), small)
2276 ('list', x[1], x[2])), small)
2252 elif op == 'onlypost':
2277 elif op == 'onlypost':
2253 return optimize(('func', ('symbol', 'only'), x[1]), small)
2278 return optimize(('func', ('symbol', 'only'), x[1]), small)
2254 elif op == 'dagrangepre':
2279 elif op == 'dagrangepre':
2255 return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
2280 return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
2256 elif op == 'dagrangepost':
2281 elif op == 'dagrangepost':
2257 return optimize(('func', ('symbol', 'descendants'), x[1]), small)
2282 return optimize(('func', ('symbol', 'descendants'), x[1]), small)
2258 elif op == 'rangeall':
2283 elif op == 'rangeall':
2259 return optimize(('range', ('string', '0'), ('string', 'tip')), small)
2284 return optimize(('range', ('string', '0'), ('string', 'tip')), small)
2260 elif op == 'rangepre':
2285 elif op == 'rangepre':
2261 return optimize(('range', ('string', '0'), x[1]), small)
2286 return optimize(('range', ('string', '0'), x[1]), small)
2262 elif op == 'rangepost':
2287 elif op == 'rangepost':
2263 return optimize(('range', x[1], ('string', 'tip')), small)
2288 return optimize(('range', x[1], ('string', 'tip')), small)
2264 elif op == 'negate':
2289 elif op == 'negate':
2265 return optimize(('string',
2290 return optimize(('string',
2266 '-' + getstring(x[1], _("can't negate that"))), small)
2291 '-' + getstring(x[1], _("can't negate that"))), small)
2267 elif op in 'string symbol negate':
2292 elif op in 'string symbol negate':
2268 return smallbonus, x # single revisions are small
2293 return smallbonus, x # single revisions are small
2269 elif op == 'and':
2294 elif op == 'and':
2270 wa, ta = optimize(x[1], True)
2295 wa, ta = optimize(x[1], True)
2271 wb, tb = optimize(x[2], True)
2296 wb, tb = optimize(x[2], True)
2272
2297
2273 # (::x and not ::y)/(not ::y and ::x) have a fast path
2298 # (::x and not ::y)/(not ::y and ::x) have a fast path
2274 def isonly(revs, bases):
2299 def isonly(revs, bases):
2275 return (
2300 return (
2276 revs is not None
2301 revs is not None
2277 and revs[0] == 'func'
2302 and revs[0] == 'func'
2278 and getstring(revs[1], _('not a symbol')) == 'ancestors'
2303 and getstring(revs[1], _('not a symbol')) == 'ancestors'
2279 and bases is not None
2304 and bases is not None
2280 and bases[0] == 'not'
2305 and bases[0] == 'not'
2281 and bases[1][0] == 'func'
2306 and bases[1][0] == 'func'
2282 and getstring(bases[1][1], _('not a symbol')) == 'ancestors')
2307 and getstring(bases[1][1], _('not a symbol')) == 'ancestors')
2283
2308
2284 w = min(wa, wb)
2309 w = min(wa, wb)
2285 if isonly(ta, tb):
2310 if isonly(ta, tb):
2286 return w, ('func', ('symbol', 'only'), ('list', ta[2], tb[1][2]))
2311 return w, ('func', ('symbol', 'only'), ('list', ta[2], tb[1][2]))
2287 if isonly(tb, ta):
2312 if isonly(tb, ta):
2288 return w, ('func', ('symbol', 'only'), ('list', tb[2], ta[1][2]))
2313 return w, ('func', ('symbol', 'only'), ('list', tb[2], ta[1][2]))
2289
2314
2290 if wa > wb:
2315 if wa > wb:
2291 return w, (op, tb, ta)
2316 return w, (op, tb, ta)
2292 return w, (op, ta, tb)
2317 return w, (op, ta, tb)
2293 elif op == 'or':
2318 elif op == 'or':
2294 # fast path for machine-generated expression, that is likely to have
2319 # fast path for machine-generated expression, that is likely to have
2295 # lots of trivial revisions: 'a + b + c()' to '_list(a b) + c()'
2320 # lots of trivial revisions: 'a + b + c()' to '_list(a b) + c()'
2296 ws, ts, ss = [], [], []
2321 ws, ts, ss = [], [], []
2297 def flushss():
2322 def flushss():
2298 if not ss:
2323 if not ss:
2299 return
2324 return
2300 if len(ss) == 1:
2325 if len(ss) == 1:
2301 w, t = ss[0]
2326 w, t = ss[0]
2302 else:
2327 else:
2303 s = '\0'.join(t[1] for w, t in ss)
2328 s = '\0'.join(t[1] for w, t in ss)
2304 y = ('func', ('symbol', '_list'), ('string', s))
2329 y = ('func', ('symbol', '_list'), ('string', s))
2305 w, t = optimize(y, False)
2330 w, t = optimize(y, False)
2306 ws.append(w)
2331 ws.append(w)
2307 ts.append(t)
2332 ts.append(t)
2308 del ss[:]
2333 del ss[:]
2309 for y in x[1:]:
2334 for y in x[1:]:
2310 w, t = optimize(y, False)
2335 w, t = optimize(y, False)
2311 if t is not None and (t[0] == 'string' or t[0] == 'symbol'):
2336 if t is not None and (t[0] == 'string' or t[0] == 'symbol'):
2312 ss.append((w, t))
2337 ss.append((w, t))
2313 continue
2338 continue
2314 flushss()
2339 flushss()
2315 ws.append(w)
2340 ws.append(w)
2316 ts.append(t)
2341 ts.append(t)
2317 flushss()
2342 flushss()
2318 if len(ts) == 1:
2343 if len(ts) == 1:
2319 return ws[0], ts[0] # 'or' operation is fully optimized out
2344 return ws[0], ts[0] # 'or' operation is fully optimized out
2320 # we can't reorder trees by weight because it would change the order.
2345 # we can't reorder trees by weight because it would change the order.
2321 # ("sort(a + b)" == "sort(b + a)", but "a + b" != "b + a")
2346 # ("sort(a + b)" == "sort(b + a)", but "a + b" != "b + a")
2322 # ts = tuple(t for w, t in sorted(zip(ws, ts), key=lambda wt: wt[0]))
2347 # ts = tuple(t for w, t in sorted(zip(ws, ts), key=lambda wt: wt[0]))
2323 return max(ws), (op,) + tuple(ts)
2348 return max(ws), (op,) + tuple(ts)
2324 elif op == 'not':
2349 elif op == 'not':
2325 # Optimize not public() to _notpublic() because we have a fast version
2350 # Optimize not public() to _notpublic() because we have a fast version
2326 if x[1] == ('func', ('symbol', 'public'), None):
2351 if x[1] == ('func', ('symbol', 'public'), None):
2327 newsym = ('func', ('symbol', '_notpublic'), None)
2352 newsym = ('func', ('symbol', '_notpublic'), None)
2328 o = optimize(newsym, not small)
2353 o = optimize(newsym, not small)
2329 return o[0], o[1]
2354 return o[0], o[1]
2330 else:
2355 else:
2331 o = optimize(x[1], not small)
2356 o = optimize(x[1], not small)
2332 return o[0], (op, o[1])
2357 return o[0], (op, o[1])
2333 elif op == 'parentpost':
2358 elif op == 'parentpost':
2334 o = optimize(x[1], small)
2359 o = optimize(x[1], small)
2335 return o[0], (op, o[1])
2360 return o[0], (op, o[1])
2336 elif op == 'group':
2361 elif op == 'group':
2337 return optimize(x[1], small)
2362 return optimize(x[1], small)
2338 elif op in 'dagrange range list parent ancestorspec':
2363 elif op in 'dagrange range list parent ancestorspec':
2339 if op == 'parent':
2364 if op == 'parent':
2340 # x^:y means (x^) : y, not x ^ (:y)
2365 # x^:y means (x^) : y, not x ^ (:y)
2341 post = ('parentpost', x[1])
2366 post = ('parentpost', x[1])
2342 if x[2][0] == 'dagrangepre':
2367 if x[2][0] == 'dagrangepre':
2343 return optimize(('dagrange', post, x[2][1]), small)
2368 return optimize(('dagrange', post, x[2][1]), small)
2344 elif x[2][0] == 'rangepre':
2369 elif x[2][0] == 'rangepre':
2345 return optimize(('range', post, x[2][1]), small)
2370 return optimize(('range', post, x[2][1]), small)
2346
2371
2347 wa, ta = optimize(x[1], small)
2372 wa, ta = optimize(x[1], small)
2348 wb, tb = optimize(x[2], small)
2373 wb, tb = optimize(x[2], small)
2349 return wa + wb, (op, ta, tb)
2374 return wa + wb, (op, ta, tb)
2350 elif op == 'func':
2375 elif op == 'func':
2351 f = getstring(x[1], _("not a symbol"))
2376 f = getstring(x[1], _("not a symbol"))
2352 wa, ta = optimize(x[2], small)
2377 wa, ta = optimize(x[2], small)
2353 if f in ("author branch closed date desc file grep keyword "
2378 if f in ("author branch closed date desc file grep keyword "
2354 "outgoing user"):
2379 "outgoing user"):
2355 w = 10 # slow
2380 w = 10 # slow
2356 elif f in "modifies adds removes":
2381 elif f in "modifies adds removes":
2357 w = 30 # slower
2382 w = 30 # slower
2358 elif f == "contains":
2383 elif f == "contains":
2359 w = 100 # very slow
2384 w = 100 # very slow
2360 elif f == "ancestor":
2385 elif f == "ancestor":
2361 w = 1 * smallbonus
2386 w = 1 * smallbonus
2362 elif f in "reverse limit first _intlist":
2387 elif f in "reverse limit first _intlist":
2363 w = 0
2388 w = 0
2364 elif f in "sort":
2389 elif f in "sort":
2365 w = 10 # assume most sorts look at changelog
2390 w = 10 # assume most sorts look at changelog
2366 else:
2391 else:
2367 w = 1
2392 w = 1
2368 return w + wa, (op, x[1], ta)
2393 return w + wa, (op, x[1], ta)
2369 return 1, x
2394 return 1, x
2370
2395
2371 _aliasarg = ('func', ('symbol', '_aliasarg'))
2396 _aliasarg = ('func', ('symbol', '_aliasarg'))
2372 def _getaliasarg(tree):
2397 def _getaliasarg(tree):
2373 """If tree matches ('func', ('symbol', '_aliasarg'), ('string', X))
2398 """If tree matches ('func', ('symbol', '_aliasarg'), ('string', X))
2374 return X, None otherwise.
2399 return X, None otherwise.
2375 """
2400 """
2376 if (len(tree) == 3 and tree[:2] == _aliasarg
2401 if (len(tree) == 3 and tree[:2] == _aliasarg
2377 and tree[2][0] == 'string'):
2402 and tree[2][0] == 'string'):
2378 return tree[2][1]
2403 return tree[2][1]
2379 return None
2404 return None
2380
2405
2381 def _checkaliasarg(tree, known=None):
2406 def _checkaliasarg(tree, known=None):
2382 """Check tree contains no _aliasarg construct or only ones which
2407 """Check tree contains no _aliasarg construct or only ones which
2383 value is in known. Used to avoid alias placeholders injection.
2408 value is in known. Used to avoid alias placeholders injection.
2384 """
2409 """
2385 if isinstance(tree, tuple):
2410 if isinstance(tree, tuple):
2386 arg = _getaliasarg(tree)
2411 arg = _getaliasarg(tree)
2387 if arg is not None and (not known or arg not in known):
2412 if arg is not None and (not known or arg not in known):
2388 raise error.UnknownIdentifier('_aliasarg', [])
2413 raise error.UnknownIdentifier('_aliasarg', [])
2389 for t in tree:
2414 for t in tree:
2390 _checkaliasarg(t, known)
2415 _checkaliasarg(t, known)
2391
2416
2392 # the set of valid characters for the initial letter of symbols in
2417 # the set of valid characters for the initial letter of symbols in
2393 # alias declarations and definitions
2418 # alias declarations and definitions
2394 _aliassyminitletters = set(c for c in [chr(i) for i in xrange(256)]
2419 _aliassyminitletters = set(c for c in [chr(i) for i in xrange(256)]
2395 if c.isalnum() or c in '._@$' or ord(c) > 127)
2420 if c.isalnum() or c in '._@$' or ord(c) > 127)
2396
2421
2397 def _tokenizealias(program, lookup=None):
2422 def _tokenizealias(program, lookup=None):
2398 """Parse alias declaration/definition into a stream of tokens
2423 """Parse alias declaration/definition into a stream of tokens
2399
2424
2400 This allows symbol names to use also ``$`` as an initial letter
2425 This allows symbol names to use also ``$`` as an initial letter
2401 (for backward compatibility), and callers of this function should
2426 (for backward compatibility), and callers of this function should
2402 examine whether ``$`` is used also for unexpected symbols or not.
2427 examine whether ``$`` is used also for unexpected symbols or not.
2403 """
2428 """
2404 return tokenize(program, lookup=lookup,
2429 return tokenize(program, lookup=lookup,
2405 syminitletters=_aliassyminitletters)
2430 syminitletters=_aliassyminitletters)
2406
2431
2407 def _parsealiasdecl(decl):
2432 def _parsealiasdecl(decl):
2408 """Parse alias declaration ``decl``
2433 """Parse alias declaration ``decl``
2409
2434
2410 This returns ``(name, tree, args, errorstr)`` tuple:
2435 This returns ``(name, tree, args, errorstr)`` tuple:
2411
2436
2412 - ``name``: of declared alias (may be ``decl`` itself at error)
2437 - ``name``: of declared alias (may be ``decl`` itself at error)
2413 - ``tree``: parse result (or ``None`` at error)
2438 - ``tree``: parse result (or ``None`` at error)
2414 - ``args``: list of alias argument names (or None for symbol declaration)
2439 - ``args``: list of alias argument names (or None for symbol declaration)
2415 - ``errorstr``: detail about detected error (or None)
2440 - ``errorstr``: detail about detected error (or None)
2416
2441
2417 >>> _parsealiasdecl('foo')
2442 >>> _parsealiasdecl('foo')
2418 ('foo', ('symbol', 'foo'), None, None)
2443 ('foo', ('symbol', 'foo'), None, None)
2419 >>> _parsealiasdecl('$foo')
2444 >>> _parsealiasdecl('$foo')
2420 ('$foo', None, None, "'$' not for alias arguments")
2445 ('$foo', None, None, "'$' not for alias arguments")
2421 >>> _parsealiasdecl('foo::bar')
2446 >>> _parsealiasdecl('foo::bar')
2422 ('foo::bar', None, None, 'invalid format')
2447 ('foo::bar', None, None, 'invalid format')
2423 >>> _parsealiasdecl('foo bar')
2448 >>> _parsealiasdecl('foo bar')
2424 ('foo bar', None, None, 'at 4: invalid token')
2449 ('foo bar', None, None, 'at 4: invalid token')
2425 >>> _parsealiasdecl('foo()')
2450 >>> _parsealiasdecl('foo()')
2426 ('foo', ('func', ('symbol', 'foo')), [], None)
2451 ('foo', ('func', ('symbol', 'foo')), [], None)
2427 >>> _parsealiasdecl('$foo()')
2452 >>> _parsealiasdecl('$foo()')
2428 ('$foo()', None, None, "'$' not for alias arguments")
2453 ('$foo()', None, None, "'$' not for alias arguments")
2429 >>> _parsealiasdecl('foo($1, $2)')
2454 >>> _parsealiasdecl('foo($1, $2)')
2430 ('foo', ('func', ('symbol', 'foo')), ['$1', '$2'], None)
2455 ('foo', ('func', ('symbol', 'foo')), ['$1', '$2'], None)
2431 >>> _parsealiasdecl('foo(bar_bar, baz.baz)')
2456 >>> _parsealiasdecl('foo(bar_bar, baz.baz)')
2432 ('foo', ('func', ('symbol', 'foo')), ['bar_bar', 'baz.baz'], None)
2457 ('foo', ('func', ('symbol', 'foo')), ['bar_bar', 'baz.baz'], None)
2433 >>> _parsealiasdecl('foo($1, $2, nested($1, $2))')
2458 >>> _parsealiasdecl('foo($1, $2, nested($1, $2))')
2434 ('foo($1, $2, nested($1, $2))', None, None, 'invalid argument list')
2459 ('foo($1, $2, nested($1, $2))', None, None, 'invalid argument list')
2435 >>> _parsealiasdecl('foo(bar($1, $2))')
2460 >>> _parsealiasdecl('foo(bar($1, $2))')
2436 ('foo(bar($1, $2))', None, None, 'invalid argument list')
2461 ('foo(bar($1, $2))', None, None, 'invalid argument list')
2437 >>> _parsealiasdecl('foo("string")')
2462 >>> _parsealiasdecl('foo("string")')
2438 ('foo("string")', None, None, 'invalid argument list')
2463 ('foo("string")', None, None, 'invalid argument list')
2439 >>> _parsealiasdecl('foo($1, $2')
2464 >>> _parsealiasdecl('foo($1, $2')
2440 ('foo($1, $2', None, None, 'at 10: unexpected token: end')
2465 ('foo($1, $2', None, None, 'at 10: unexpected token: end')
2441 >>> _parsealiasdecl('foo("string')
2466 >>> _parsealiasdecl('foo("string')
2442 ('foo("string', None, None, 'at 5: unterminated string')
2467 ('foo("string', None, None, 'at 5: unterminated string')
2443 >>> _parsealiasdecl('foo($1, $2, $1)')
2468 >>> _parsealiasdecl('foo($1, $2, $1)')
2444 ('foo', None, None, 'argument names collide with each other')
2469 ('foo', None, None, 'argument names collide with each other')
2445 """
2470 """
2446 p = parser.parser(elements)
2471 p = parser.parser(elements)
2447 try:
2472 try:
2448 tree, pos = p.parse(_tokenizealias(decl))
2473 tree, pos = p.parse(_tokenizealias(decl))
2449 if (pos != len(decl)):
2474 if (pos != len(decl)):
2450 raise error.ParseError(_('invalid token'), pos)
2475 raise error.ParseError(_('invalid token'), pos)
2451
2476
2452 if isvalidsymbol(tree):
2477 if isvalidsymbol(tree):
2453 # "name = ...." style
2478 # "name = ...." style
2454 name = getsymbol(tree)
2479 name = getsymbol(tree)
2455 if name.startswith('$'):
2480 if name.startswith('$'):
2456 return (decl, None, None, _("'$' not for alias arguments"))
2481 return (decl, None, None, _("'$' not for alias arguments"))
2457 return (name, ('symbol', name), None, None)
2482 return (name, ('symbol', name), None, None)
2458
2483
2459 if isvalidfunc(tree):
2484 if isvalidfunc(tree):
2460 # "name(arg, ....) = ...." style
2485 # "name(arg, ....) = ...." style
2461 name = getfuncname(tree)
2486 name = getfuncname(tree)
2462 if name.startswith('$'):
2487 if name.startswith('$'):
2463 return (decl, None, None, _("'$' not for alias arguments"))
2488 return (decl, None, None, _("'$' not for alias arguments"))
2464 args = []
2489 args = []
2465 for arg in getfuncargs(tree):
2490 for arg in getfuncargs(tree):
2466 if not isvalidsymbol(arg):
2491 if not isvalidsymbol(arg):
2467 return (decl, None, None, _("invalid argument list"))
2492 return (decl, None, None, _("invalid argument list"))
2468 args.append(getsymbol(arg))
2493 args.append(getsymbol(arg))
2469 if len(args) != len(set(args)):
2494 if len(args) != len(set(args)):
2470 return (name, None, None,
2495 return (name, None, None,
2471 _("argument names collide with each other"))
2496 _("argument names collide with each other"))
2472 return (name, ('func', ('symbol', name)), args, None)
2497 return (name, ('func', ('symbol', name)), args, None)
2473
2498
2474 return (decl, None, None, _("invalid format"))
2499 return (decl, None, None, _("invalid format"))
2475 except error.ParseError as inst:
2500 except error.ParseError as inst:
2476 return (decl, None, None, parseerrordetail(inst))
2501 return (decl, None, None, parseerrordetail(inst))
2477
2502
2478 def _parsealiasdefn(defn, args):
2503 def _parsealiasdefn(defn, args):
2479 """Parse alias definition ``defn``
2504 """Parse alias definition ``defn``
2480
2505
2481 This function also replaces alias argument references in the
2506 This function also replaces alias argument references in the
2482 specified definition by ``_aliasarg(ARGNAME)``.
2507 specified definition by ``_aliasarg(ARGNAME)``.
2483
2508
2484 ``args`` is a list of alias argument names, or None if the alias
2509 ``args`` is a list of alias argument names, or None if the alias
2485 is declared as a symbol.
2510 is declared as a symbol.
2486
2511
2487 This returns "tree" as parsing result.
2512 This returns "tree" as parsing result.
2488
2513
2489 >>> args = ['$1', '$2', 'foo']
2514 >>> args = ['$1', '$2', 'foo']
2490 >>> print prettyformat(_parsealiasdefn('$1 or foo', args))
2515 >>> print prettyformat(_parsealiasdefn('$1 or foo', args))
2491 (or
2516 (or
2492 (func
2517 (func
2493 ('symbol', '_aliasarg')
2518 ('symbol', '_aliasarg')
2494 ('string', '$1'))
2519 ('string', '$1'))
2495 (func
2520 (func
2496 ('symbol', '_aliasarg')
2521 ('symbol', '_aliasarg')
2497 ('string', 'foo')))
2522 ('string', 'foo')))
2498 >>> try:
2523 >>> try:
2499 ... _parsealiasdefn('$1 or $bar', args)
2524 ... _parsealiasdefn('$1 or $bar', args)
2500 ... except error.ParseError, inst:
2525 ... except error.ParseError, inst:
2501 ... print parseerrordetail(inst)
2526 ... print parseerrordetail(inst)
2502 at 6: '$' not for alias arguments
2527 at 6: '$' not for alias arguments
2503 >>> args = ['$1', '$10', 'foo']
2528 >>> args = ['$1', '$10', 'foo']
2504 >>> print prettyformat(_parsealiasdefn('$10 or foobar', args))
2529 >>> print prettyformat(_parsealiasdefn('$10 or foobar', args))
2505 (or
2530 (or
2506 (func
2531 (func
2507 ('symbol', '_aliasarg')
2532 ('symbol', '_aliasarg')
2508 ('string', '$10'))
2533 ('string', '$10'))
2509 ('symbol', 'foobar'))
2534 ('symbol', 'foobar'))
2510 >>> print prettyformat(_parsealiasdefn('"$1" or "foo"', args))
2535 >>> print prettyformat(_parsealiasdefn('"$1" or "foo"', args))
2511 (or
2536 (or
2512 ('string', '$1')
2537 ('string', '$1')
2513 ('string', 'foo'))
2538 ('string', 'foo'))
2514 """
2539 """
2515 def tokenizedefn(program, lookup=None):
2540 def tokenizedefn(program, lookup=None):
2516 if args:
2541 if args:
2517 argset = set(args)
2542 argset = set(args)
2518 else:
2543 else:
2519 argset = set()
2544 argset = set()
2520
2545
2521 for t, value, pos in _tokenizealias(program, lookup=lookup):
2546 for t, value, pos in _tokenizealias(program, lookup=lookup):
2522 if t == 'symbol':
2547 if t == 'symbol':
2523 if value in argset:
2548 if value in argset:
2524 # emulate tokenization of "_aliasarg('ARGNAME')":
2549 # emulate tokenization of "_aliasarg('ARGNAME')":
2525 # "_aliasarg()" is an unknown symbol only used separate
2550 # "_aliasarg()" is an unknown symbol only used separate
2526 # alias argument placeholders from regular strings.
2551 # alias argument placeholders from regular strings.
2527 yield ('symbol', '_aliasarg', pos)
2552 yield ('symbol', '_aliasarg', pos)
2528 yield ('(', None, pos)
2553 yield ('(', None, pos)
2529 yield ('string', value, pos)
2554 yield ('string', value, pos)
2530 yield (')', None, pos)
2555 yield (')', None, pos)
2531 continue
2556 continue
2532 elif value.startswith('$'):
2557 elif value.startswith('$'):
2533 raise error.ParseError(_("'$' not for alias arguments"),
2558 raise error.ParseError(_("'$' not for alias arguments"),
2534 pos)
2559 pos)
2535 yield (t, value, pos)
2560 yield (t, value, pos)
2536
2561
2537 p = parser.parser(elements)
2562 p = parser.parser(elements)
2538 tree, pos = p.parse(tokenizedefn(defn))
2563 tree, pos = p.parse(tokenizedefn(defn))
2539 if pos != len(defn):
2564 if pos != len(defn):
2540 raise error.ParseError(_('invalid token'), pos)
2565 raise error.ParseError(_('invalid token'), pos)
2541 return parser.simplifyinfixops(tree, ('or',))
2566 return parser.simplifyinfixops(tree, ('or',))
2542
2567
2543 class revsetalias(object):
2568 class revsetalias(object):
2544 # whether own `error` information is already shown or not.
2569 # whether own `error` information is already shown or not.
2545 # this avoids showing same warning multiple times at each `findaliases`.
2570 # this avoids showing same warning multiple times at each `findaliases`.
2546 warned = False
2571 warned = False
2547
2572
2548 def __init__(self, name, value):
2573 def __init__(self, name, value):
2549 '''Aliases like:
2574 '''Aliases like:
2550
2575
2551 h = heads(default)
2576 h = heads(default)
2552 b($1) = ancestors($1) - ancestors(default)
2577 b($1) = ancestors($1) - ancestors(default)
2553 '''
2578 '''
2554 self.name, self.tree, self.args, self.error = _parsealiasdecl(name)
2579 self.name, self.tree, self.args, self.error = _parsealiasdecl(name)
2555 if self.error:
2580 if self.error:
2556 self.error = _('failed to parse the declaration of revset alias'
2581 self.error = _('failed to parse the declaration of revset alias'
2557 ' "%s": %s') % (self.name, self.error)
2582 ' "%s": %s') % (self.name, self.error)
2558 return
2583 return
2559
2584
2560 try:
2585 try:
2561 self.replacement = _parsealiasdefn(value, self.args)
2586 self.replacement = _parsealiasdefn(value, self.args)
2562 # Check for placeholder injection
2587 # Check for placeholder injection
2563 _checkaliasarg(self.replacement, self.args)
2588 _checkaliasarg(self.replacement, self.args)
2564 except error.ParseError as inst:
2589 except error.ParseError as inst:
2565 self.error = _('failed to parse the definition of revset alias'
2590 self.error = _('failed to parse the definition of revset alias'
2566 ' "%s": %s') % (self.name, parseerrordetail(inst))
2591 ' "%s": %s') % (self.name, parseerrordetail(inst))
2567
2592
2568 def _getalias(aliases, tree):
2593 def _getalias(aliases, tree):
2569 """If tree looks like an unexpanded alias, return it. Return None
2594 """If tree looks like an unexpanded alias, return it. Return None
2570 otherwise.
2595 otherwise.
2571 """
2596 """
2572 if isinstance(tree, tuple) and tree:
2597 if isinstance(tree, tuple) and tree:
2573 if tree[0] == 'symbol' and len(tree) == 2:
2598 if tree[0] == 'symbol' and len(tree) == 2:
2574 name = tree[1]
2599 name = tree[1]
2575 alias = aliases.get(name)
2600 alias = aliases.get(name)
2576 if alias and alias.args is None and alias.tree == tree:
2601 if alias and alias.args is None and alias.tree == tree:
2577 return alias
2602 return alias
2578 if tree[0] == 'func' and len(tree) > 1:
2603 if tree[0] == 'func' and len(tree) > 1:
2579 if tree[1][0] == 'symbol' and len(tree[1]) == 2:
2604 if tree[1][0] == 'symbol' and len(tree[1]) == 2:
2580 name = tree[1][1]
2605 name = tree[1][1]
2581 alias = aliases.get(name)
2606 alias = aliases.get(name)
2582 if alias and alias.args is not None and alias.tree == tree[:2]:
2607 if alias and alias.args is not None and alias.tree == tree[:2]:
2583 return alias
2608 return alias
2584 return None
2609 return None
2585
2610
2586 def _expandargs(tree, args):
2611 def _expandargs(tree, args):
2587 """Replace _aliasarg instances with the substitution value of the
2612 """Replace _aliasarg instances with the substitution value of the
2588 same name in args, recursively.
2613 same name in args, recursively.
2589 """
2614 """
2590 if not tree or not isinstance(tree, tuple):
2615 if not tree or not isinstance(tree, tuple):
2591 return tree
2616 return tree
2592 arg = _getaliasarg(tree)
2617 arg = _getaliasarg(tree)
2593 if arg is not None:
2618 if arg is not None:
2594 return args[arg]
2619 return args[arg]
2595 return tuple(_expandargs(t, args) for t in tree)
2620 return tuple(_expandargs(t, args) for t in tree)
2596
2621
2597 def _expandaliases(aliases, tree, expanding, cache):
2622 def _expandaliases(aliases, tree, expanding, cache):
2598 """Expand aliases in tree, recursively.
2623 """Expand aliases in tree, recursively.
2599
2624
2600 'aliases' is a dictionary mapping user defined aliases to
2625 'aliases' is a dictionary mapping user defined aliases to
2601 revsetalias objects.
2626 revsetalias objects.
2602 """
2627 """
2603 if not isinstance(tree, tuple):
2628 if not isinstance(tree, tuple):
2604 # Do not expand raw strings
2629 # Do not expand raw strings
2605 return tree
2630 return tree
2606 alias = _getalias(aliases, tree)
2631 alias = _getalias(aliases, tree)
2607 if alias is not None:
2632 if alias is not None:
2608 if alias.error:
2633 if alias.error:
2609 raise error.Abort(alias.error)
2634 raise error.Abort(alias.error)
2610 if alias in expanding:
2635 if alias in expanding:
2611 raise error.ParseError(_('infinite expansion of revset alias "%s" '
2636 raise error.ParseError(_('infinite expansion of revset alias "%s" '
2612 'detected') % alias.name)
2637 'detected') % alias.name)
2613 expanding.append(alias)
2638 expanding.append(alias)
2614 if alias.name not in cache:
2639 if alias.name not in cache:
2615 cache[alias.name] = _expandaliases(aliases, alias.replacement,
2640 cache[alias.name] = _expandaliases(aliases, alias.replacement,
2616 expanding, cache)
2641 expanding, cache)
2617 result = cache[alias.name]
2642 result = cache[alias.name]
2618 expanding.pop()
2643 expanding.pop()
2619 if alias.args is not None:
2644 if alias.args is not None:
2620 l = getlist(tree[2])
2645 l = getlist(tree[2])
2621 if len(l) != len(alias.args):
2646 if len(l) != len(alias.args):
2622 raise error.ParseError(
2647 raise error.ParseError(
2623 _('invalid number of arguments: %d') % len(l))
2648 _('invalid number of arguments: %d') % len(l))
2624 l = [_expandaliases(aliases, a, [], cache) for a in l]
2649 l = [_expandaliases(aliases, a, [], cache) for a in l]
2625 result = _expandargs(result, dict(zip(alias.args, l)))
2650 result = _expandargs(result, dict(zip(alias.args, l)))
2626 else:
2651 else:
2627 result = tuple(_expandaliases(aliases, t, expanding, cache)
2652 result = tuple(_expandaliases(aliases, t, expanding, cache)
2628 for t in tree)
2653 for t in tree)
2629 return result
2654 return result
2630
2655
2631 def findaliases(ui, tree, showwarning=None):
2656 def findaliases(ui, tree, showwarning=None):
2632 _checkaliasarg(tree)
2657 _checkaliasarg(tree)
2633 aliases = {}
2658 aliases = {}
2634 for k, v in ui.configitems('revsetalias'):
2659 for k, v in ui.configitems('revsetalias'):
2635 alias = revsetalias(k, v)
2660 alias = revsetalias(k, v)
2636 aliases[alias.name] = alias
2661 aliases[alias.name] = alias
2637 tree = _expandaliases(aliases, tree, [], {})
2662 tree = _expandaliases(aliases, tree, [], {})
2638 if showwarning:
2663 if showwarning:
2639 # warn about problematic (but not referred) aliases
2664 # warn about problematic (but not referred) aliases
2640 for name, alias in sorted(aliases.iteritems()):
2665 for name, alias in sorted(aliases.iteritems()):
2641 if alias.error and not alias.warned:
2666 if alias.error and not alias.warned:
2642 showwarning(_('warning: %s\n') % (alias.error))
2667 showwarning(_('warning: %s\n') % (alias.error))
2643 alias.warned = True
2668 alias.warned = True
2644 return tree
2669 return tree
2645
2670
2646 def foldconcat(tree):
2671 def foldconcat(tree):
2647 """Fold elements to be concatenated by `##`
2672 """Fold elements to be concatenated by `##`
2648 """
2673 """
2649 if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
2674 if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
2650 return tree
2675 return tree
2651 if tree[0] == '_concat':
2676 if tree[0] == '_concat':
2652 pending = [tree]
2677 pending = [tree]
2653 l = []
2678 l = []
2654 while pending:
2679 while pending:
2655 e = pending.pop()
2680 e = pending.pop()
2656 if e[0] == '_concat':
2681 if e[0] == '_concat':
2657 pending.extend(reversed(e[1:]))
2682 pending.extend(reversed(e[1:]))
2658 elif e[0] in ('string', 'symbol'):
2683 elif e[0] in ('string', 'symbol'):
2659 l.append(e[1])
2684 l.append(e[1])
2660 else:
2685 else:
2661 msg = _("\"##\" can't concatenate \"%s\" element") % (e[0])
2686 msg = _("\"##\" can't concatenate \"%s\" element") % (e[0])
2662 raise error.ParseError(msg)
2687 raise error.ParseError(msg)
2663 return ('string', ''.join(l))
2688 return ('string', ''.join(l))
2664 else:
2689 else:
2665 return tuple(foldconcat(t) for t in tree)
2690 return tuple(foldconcat(t) for t in tree)
2666
2691
2667 def parse(spec, lookup=None):
2692 def parse(spec, lookup=None):
2668 p = parser.parser(elements)
2693 p = parser.parser(elements)
2669 tree, pos = p.parse(tokenize(spec, lookup=lookup))
2694 tree, pos = p.parse(tokenize(spec, lookup=lookup))
2670 if pos != len(spec):
2695 if pos != len(spec):
2671 raise error.ParseError(_("invalid token"), pos)
2696 raise error.ParseError(_("invalid token"), pos)
2672 return parser.simplifyinfixops(tree, ('or',))
2697 return parser.simplifyinfixops(tree, ('or',))
2673
2698
2674 def posttreebuilthook(tree, repo):
2699 def posttreebuilthook(tree, repo):
2675 # hook for extensions to execute code on the optimized tree
2700 # hook for extensions to execute code on the optimized tree
2676 pass
2701 pass
2677
2702
2678 def match(ui, spec, repo=None):
2703 def match(ui, spec, repo=None):
2679 if not spec:
2704 if not spec:
2680 raise error.ParseError(_("empty query"))
2705 raise error.ParseError(_("empty query"))
2681 lookup = None
2706 lookup = None
2682 if repo:
2707 if repo:
2683 lookup = repo.__contains__
2708 lookup = repo.__contains__
2684 tree = parse(spec, lookup)
2709 tree = parse(spec, lookup)
2685 return _makematcher(ui, tree, repo)
2710 return _makematcher(ui, tree, repo)
2686
2711
2687 def matchany(ui, specs, repo=None):
2712 def matchany(ui, specs, repo=None):
2688 """Create a matcher that will include any revisions matching one of the
2713 """Create a matcher that will include any revisions matching one of the
2689 given specs"""
2714 given specs"""
2690 if not specs:
2715 if not specs:
2691 def mfunc(repo, subset=None):
2716 def mfunc(repo, subset=None):
2692 return baseset()
2717 return baseset()
2693 return mfunc
2718 return mfunc
2694 if not all(specs):
2719 if not all(specs):
2695 raise error.ParseError(_("empty query"))
2720 raise error.ParseError(_("empty query"))
2696 lookup = None
2721 lookup = None
2697 if repo:
2722 if repo:
2698 lookup = repo.__contains__
2723 lookup = repo.__contains__
2699 if len(specs) == 1:
2724 if len(specs) == 1:
2700 tree = parse(specs[0], lookup)
2725 tree = parse(specs[0], lookup)
2701 else:
2726 else:
2702 tree = ('or',) + tuple(parse(s, lookup) for s in specs)
2727 tree = ('or',) + tuple(parse(s, lookup) for s in specs)
2703 return _makematcher(ui, tree, repo)
2728 return _makematcher(ui, tree, repo)
2704
2729
2705 def _makematcher(ui, tree, repo):
2730 def _makematcher(ui, tree, repo):
2706 if ui:
2731 if ui:
2707 tree = findaliases(ui, tree, showwarning=ui.warn)
2732 tree = findaliases(ui, tree, showwarning=ui.warn)
2708 tree = foldconcat(tree)
2733 tree = foldconcat(tree)
2709 weight, tree = optimize(tree, True)
2734 weight, tree = optimize(tree, True)
2710 posttreebuilthook(tree, repo)
2735 posttreebuilthook(tree, repo)
2711 def mfunc(repo, subset=None):
2736 def mfunc(repo, subset=None):
2712 if subset is None:
2737 if subset is None:
2713 subset = fullreposet(repo)
2738 subset = fullreposet(repo)
2714 if util.safehasattr(subset, 'isascending'):
2739 if util.safehasattr(subset, 'isascending'):
2715 result = getset(repo, subset, tree)
2740 result = getset(repo, subset, tree)
2716 else:
2741 else:
2717 result = getset(repo, baseset(subset), tree)
2742 result = getset(repo, baseset(subset), tree)
2718 return result
2743 return result
2719 return mfunc
2744 return mfunc
2720
2745
2721 def formatspec(expr, *args):
2746 def formatspec(expr, *args):
2722 '''
2747 '''
2723 This is a convenience function for using revsets internally, and
2748 This is a convenience function for using revsets internally, and
2724 escapes arguments appropriately. Aliases are intentionally ignored
2749 escapes arguments appropriately. Aliases are intentionally ignored
2725 so that intended expression behavior isn't accidentally subverted.
2750 so that intended expression behavior isn't accidentally subverted.
2726
2751
2727 Supported arguments:
2752 Supported arguments:
2728
2753
2729 %r = revset expression, parenthesized
2754 %r = revset expression, parenthesized
2730 %d = int(arg), no quoting
2755 %d = int(arg), no quoting
2731 %s = string(arg), escaped and single-quoted
2756 %s = string(arg), escaped and single-quoted
2732 %b = arg.branch(), escaped and single-quoted
2757 %b = arg.branch(), escaped and single-quoted
2733 %n = hex(arg), single-quoted
2758 %n = hex(arg), single-quoted
2734 %% = a literal '%'
2759 %% = a literal '%'
2735
2760
2736 Prefixing the type with 'l' specifies a parenthesized list of that type.
2761 Prefixing the type with 'l' specifies a parenthesized list of that type.
2737
2762
2738 >>> formatspec('%r:: and %lr', '10 or 11', ("this()", "that()"))
2763 >>> formatspec('%r:: and %lr', '10 or 11', ("this()", "that()"))
2739 '(10 or 11):: and ((this()) or (that()))'
2764 '(10 or 11):: and ((this()) or (that()))'
2740 >>> formatspec('%d:: and not %d::', 10, 20)
2765 >>> formatspec('%d:: and not %d::', 10, 20)
2741 '10:: and not 20::'
2766 '10:: and not 20::'
2742 >>> formatspec('%ld or %ld', [], [1])
2767 >>> formatspec('%ld or %ld', [], [1])
2743 "_list('') or 1"
2768 "_list('') or 1"
2744 >>> formatspec('keyword(%s)', 'foo\\xe9')
2769 >>> formatspec('keyword(%s)', 'foo\\xe9')
2745 "keyword('foo\\\\xe9')"
2770 "keyword('foo\\\\xe9')"
2746 >>> b = lambda: 'default'
2771 >>> b = lambda: 'default'
2747 >>> b.branch = b
2772 >>> b.branch = b
2748 >>> formatspec('branch(%b)', b)
2773 >>> formatspec('branch(%b)', b)
2749 "branch('default')"
2774 "branch('default')"
2750 >>> formatspec('root(%ls)', ['a', 'b', 'c', 'd'])
2775 >>> formatspec('root(%ls)', ['a', 'b', 'c', 'd'])
2751 "root(_list('a\\x00b\\x00c\\x00d'))"
2776 "root(_list('a\\x00b\\x00c\\x00d'))"
2752 '''
2777 '''
2753
2778
2754 def quote(s):
2779 def quote(s):
2755 return repr(str(s))
2780 return repr(str(s))
2756
2781
2757 def argtype(c, arg):
2782 def argtype(c, arg):
2758 if c == 'd':
2783 if c == 'd':
2759 return str(int(arg))
2784 return str(int(arg))
2760 elif c == 's':
2785 elif c == 's':
2761 return quote(arg)
2786 return quote(arg)
2762 elif c == 'r':
2787 elif c == 'r':
2763 parse(arg) # make sure syntax errors are confined
2788 parse(arg) # make sure syntax errors are confined
2764 return '(%s)' % arg
2789 return '(%s)' % arg
2765 elif c == 'n':
2790 elif c == 'n':
2766 return quote(node.hex(arg))
2791 return quote(node.hex(arg))
2767 elif c == 'b':
2792 elif c == 'b':
2768 return quote(arg.branch())
2793 return quote(arg.branch())
2769
2794
2770 def listexp(s, t):
2795 def listexp(s, t):
2771 l = len(s)
2796 l = len(s)
2772 if l == 0:
2797 if l == 0:
2773 return "_list('')"
2798 return "_list('')"
2774 elif l == 1:
2799 elif l == 1:
2775 return argtype(t, s[0])
2800 return argtype(t, s[0])
2776 elif t == 'd':
2801 elif t == 'd':
2777 return "_intlist('%s')" % "\0".join(str(int(a)) for a in s)
2802 return "_intlist('%s')" % "\0".join(str(int(a)) for a in s)
2778 elif t == 's':
2803 elif t == 's':
2779 return "_list('%s')" % "\0".join(s)
2804 return "_list('%s')" % "\0".join(s)
2780 elif t == 'n':
2805 elif t == 'n':
2781 return "_hexlist('%s')" % "\0".join(node.hex(a) for a in s)
2806 return "_hexlist('%s')" % "\0".join(node.hex(a) for a in s)
2782 elif t == 'b':
2807 elif t == 'b':
2783 return "_list('%s')" % "\0".join(a.branch() for a in s)
2808 return "_list('%s')" % "\0".join(a.branch() for a in s)
2784
2809
2785 m = l // 2
2810 m = l // 2
2786 return '(%s or %s)' % (listexp(s[:m], t), listexp(s[m:], t))
2811 return '(%s or %s)' % (listexp(s[:m], t), listexp(s[m:], t))
2787
2812
2788 ret = ''
2813 ret = ''
2789 pos = 0
2814 pos = 0
2790 arg = 0
2815 arg = 0
2791 while pos < len(expr):
2816 while pos < len(expr):
2792 c = expr[pos]
2817 c = expr[pos]
2793 if c == '%':
2818 if c == '%':
2794 pos += 1
2819 pos += 1
2795 d = expr[pos]
2820 d = expr[pos]
2796 if d == '%':
2821 if d == '%':
2797 ret += d
2822 ret += d
2798 elif d in 'dsnbr':
2823 elif d in 'dsnbr':
2799 ret += argtype(d, args[arg])
2824 ret += argtype(d, args[arg])
2800 arg += 1
2825 arg += 1
2801 elif d == 'l':
2826 elif d == 'l':
2802 # a list of some type
2827 # a list of some type
2803 pos += 1
2828 pos += 1
2804 d = expr[pos]
2829 d = expr[pos]
2805 ret += listexp(list(args[arg]), d)
2830 ret += listexp(list(args[arg]), d)
2806 arg += 1
2831 arg += 1
2807 else:
2832 else:
2808 raise error.Abort('unexpected revspec format character %s' % d)
2833 raise error.Abort('unexpected revspec format character %s' % d)
2809 else:
2834 else:
2810 ret += c
2835 ret += c
2811 pos += 1
2836 pos += 1
2812
2837
2813 return ret
2838 return ret
2814
2839
2815 def prettyformat(tree):
2840 def prettyformat(tree):
2816 return parser.prettyformat(tree, ('string', 'symbol'))
2841 return parser.prettyformat(tree, ('string', 'symbol'))
2817
2842
2818 def depth(tree):
2843 def depth(tree):
2819 if isinstance(tree, tuple):
2844 if isinstance(tree, tuple):
2820 return max(map(depth, tree)) + 1
2845 return max(map(depth, tree)) + 1
2821 else:
2846 else:
2822 return 0
2847 return 0
2823
2848
2824 def funcsused(tree):
2849 def funcsused(tree):
2825 if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
2850 if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
2826 return set()
2851 return set()
2827 else:
2852 else:
2828 funcs = set()
2853 funcs = set()
2829 for s in tree[1:]:
2854 for s in tree[1:]:
2830 funcs |= funcsused(s)
2855 funcs |= funcsused(s)
2831 if tree[0] == 'func':
2856 if tree[0] == 'func':
2832 funcs.add(tree[1][1])
2857 funcs.add(tree[1][1])
2833 return funcs
2858 return funcs
2834
2859
2835 class abstractsmartset(object):
2860 class abstractsmartset(object):
2836
2861
2837 def __nonzero__(self):
2862 def __nonzero__(self):
2838 """True if the smartset is not empty"""
2863 """True if the smartset is not empty"""
2839 raise NotImplementedError()
2864 raise NotImplementedError()
2840
2865
2841 def __contains__(self, rev):
2866 def __contains__(self, rev):
2842 """provide fast membership testing"""
2867 """provide fast membership testing"""
2843 raise NotImplementedError()
2868 raise NotImplementedError()
2844
2869
2845 def __iter__(self):
2870 def __iter__(self):
2846 """iterate the set in the order it is supposed to be iterated"""
2871 """iterate the set in the order it is supposed to be iterated"""
2847 raise NotImplementedError()
2872 raise NotImplementedError()
2848
2873
2849 # Attributes containing a function to perform a fast iteration in a given
2874 # Attributes containing a function to perform a fast iteration in a given
2850 # direction. A smartset can have none, one, or both defined.
2875 # direction. A smartset can have none, one, or both defined.
2851 #
2876 #
2852 # Default value is None instead of a function returning None to avoid
2877 # Default value is None instead of a function returning None to avoid
2853 # initializing an iterator just for testing if a fast method exists.
2878 # initializing an iterator just for testing if a fast method exists.
2854 fastasc = None
2879 fastasc = None
2855 fastdesc = None
2880 fastdesc = None
2856
2881
2857 def isascending(self):
2882 def isascending(self):
2858 """True if the set will iterate in ascending order"""
2883 """True if the set will iterate in ascending order"""
2859 raise NotImplementedError()
2884 raise NotImplementedError()
2860
2885
2861 def isdescending(self):
2886 def isdescending(self):
2862 """True if the set will iterate in descending order"""
2887 """True if the set will iterate in descending order"""
2863 raise NotImplementedError()
2888 raise NotImplementedError()
2864
2889
2865 @util.cachefunc
2890 @util.cachefunc
2866 def min(self):
2891 def min(self):
2867 """return the minimum element in the set"""
2892 """return the minimum element in the set"""
2868 if self.fastasc is not None:
2893 if self.fastasc is not None:
2869 for r in self.fastasc():
2894 for r in self.fastasc():
2870 return r
2895 return r
2871 raise ValueError('arg is an empty sequence')
2896 raise ValueError('arg is an empty sequence')
2872 return min(self)
2897 return min(self)
2873
2898
2874 @util.cachefunc
2899 @util.cachefunc
2875 def max(self):
2900 def max(self):
2876 """return the maximum element in the set"""
2901 """return the maximum element in the set"""
2877 if self.fastdesc is not None:
2902 if self.fastdesc is not None:
2878 for r in self.fastdesc():
2903 for r in self.fastdesc():
2879 return r
2904 return r
2880 raise ValueError('arg is an empty sequence')
2905 raise ValueError('arg is an empty sequence')
2881 return max(self)
2906 return max(self)
2882
2907
2883 def first(self):
2908 def first(self):
2884 """return the first element in the set (user iteration perspective)
2909 """return the first element in the set (user iteration perspective)
2885
2910
2886 Return None if the set is empty"""
2911 Return None if the set is empty"""
2887 raise NotImplementedError()
2912 raise NotImplementedError()
2888
2913
2889 def last(self):
2914 def last(self):
2890 """return the last element in the set (user iteration perspective)
2915 """return the last element in the set (user iteration perspective)
2891
2916
2892 Return None if the set is empty"""
2917 Return None if the set is empty"""
2893 raise NotImplementedError()
2918 raise NotImplementedError()
2894
2919
2895 def __len__(self):
2920 def __len__(self):
2896 """return the length of the smartsets
2921 """return the length of the smartsets
2897
2922
2898 This can be expensive on smartset that could be lazy otherwise."""
2923 This can be expensive on smartset that could be lazy otherwise."""
2899 raise NotImplementedError()
2924 raise NotImplementedError()
2900
2925
2901 def reverse(self):
2926 def reverse(self):
2902 """reverse the expected iteration order"""
2927 """reverse the expected iteration order"""
2903 raise NotImplementedError()
2928 raise NotImplementedError()
2904
2929
2905 def sort(self, reverse=True):
2930 def sort(self, reverse=True):
2906 """get the set to iterate in an ascending or descending order"""
2931 """get the set to iterate in an ascending or descending order"""
2907 raise NotImplementedError()
2932 raise NotImplementedError()
2908
2933
2909 def __and__(self, other):
2934 def __and__(self, other):
2910 """Returns a new object with the intersection of the two collections.
2935 """Returns a new object with the intersection of the two collections.
2911
2936
2912 This is part of the mandatory API for smartset."""
2937 This is part of the mandatory API for smartset."""
2913 if isinstance(other, fullreposet):
2938 if isinstance(other, fullreposet):
2914 return self
2939 return self
2915 return self.filter(other.__contains__, cache=False)
2940 return self.filter(other.__contains__, cache=False)
2916
2941
2917 def __add__(self, other):
2942 def __add__(self, other):
2918 """Returns a new object with the union of the two collections.
2943 """Returns a new object with the union of the two collections.
2919
2944
2920 This is part of the mandatory API for smartset."""
2945 This is part of the mandatory API for smartset."""
2921 return addset(self, other)
2946 return addset(self, other)
2922
2947
2923 def __sub__(self, other):
2948 def __sub__(self, other):
2924 """Returns a new object with the substraction of the two collections.
2949 """Returns a new object with the substraction of the two collections.
2925
2950
2926 This is part of the mandatory API for smartset."""
2951 This is part of the mandatory API for smartset."""
2927 c = other.__contains__
2952 c = other.__contains__
2928 return self.filter(lambda r: not c(r), cache=False)
2953 return self.filter(lambda r: not c(r), cache=False)
2929
2954
2930 def filter(self, condition, cache=True):
2955 def filter(self, condition, cache=True):
2931 """Returns this smartset filtered by condition as a new smartset.
2956 """Returns this smartset filtered by condition as a new smartset.
2932
2957
2933 `condition` is a callable which takes a revision number and returns a
2958 `condition` is a callable which takes a revision number and returns a
2934 boolean.
2959 boolean.
2935
2960
2936 This is part of the mandatory API for smartset."""
2961 This is part of the mandatory API for smartset."""
2937 # builtin cannot be cached. but do not needs to
2962 # builtin cannot be cached. but do not needs to
2938 if cache and util.safehasattr(condition, 'func_code'):
2963 if cache and util.safehasattr(condition, 'func_code'):
2939 condition = util.cachefunc(condition)
2964 condition = util.cachefunc(condition)
2940 return filteredset(self, condition)
2965 return filteredset(self, condition)
2941
2966
2942 class baseset(abstractsmartset):
2967 class baseset(abstractsmartset):
2943 """Basic data structure that represents a revset and contains the basic
2968 """Basic data structure that represents a revset and contains the basic
2944 operation that it should be able to perform.
2969 operation that it should be able to perform.
2945
2970
2946 Every method in this class should be implemented by any smartset class.
2971 Every method in this class should be implemented by any smartset class.
2947 """
2972 """
2948 def __init__(self, data=()):
2973 def __init__(self, data=()):
2949 if not isinstance(data, list):
2974 if not isinstance(data, list):
2950 if isinstance(data, set):
2975 if isinstance(data, set):
2951 self._set = data
2976 self._set = data
2952 data = list(data)
2977 data = list(data)
2953 self._list = data
2978 self._list = data
2954 self._ascending = None
2979 self._ascending = None
2955
2980
2956 @util.propertycache
2981 @util.propertycache
2957 def _set(self):
2982 def _set(self):
2958 return set(self._list)
2983 return set(self._list)
2959
2984
2960 @util.propertycache
2985 @util.propertycache
2961 def _asclist(self):
2986 def _asclist(self):
2962 asclist = self._list[:]
2987 asclist = self._list[:]
2963 asclist.sort()
2988 asclist.sort()
2964 return asclist
2989 return asclist
2965
2990
2966 def __iter__(self):
2991 def __iter__(self):
2967 if self._ascending is None:
2992 if self._ascending is None:
2968 return iter(self._list)
2993 return iter(self._list)
2969 elif self._ascending:
2994 elif self._ascending:
2970 return iter(self._asclist)
2995 return iter(self._asclist)
2971 else:
2996 else:
2972 return reversed(self._asclist)
2997 return reversed(self._asclist)
2973
2998
2974 def fastasc(self):
2999 def fastasc(self):
2975 return iter(self._asclist)
3000 return iter(self._asclist)
2976
3001
2977 def fastdesc(self):
3002 def fastdesc(self):
2978 return reversed(self._asclist)
3003 return reversed(self._asclist)
2979
3004
2980 @util.propertycache
3005 @util.propertycache
2981 def __contains__(self):
3006 def __contains__(self):
2982 return self._set.__contains__
3007 return self._set.__contains__
2983
3008
2984 def __nonzero__(self):
3009 def __nonzero__(self):
2985 return bool(self._list)
3010 return bool(self._list)
2986
3011
2987 def sort(self, reverse=False):
3012 def sort(self, reverse=False):
2988 self._ascending = not bool(reverse)
3013 self._ascending = not bool(reverse)
2989
3014
2990 def reverse(self):
3015 def reverse(self):
2991 if self._ascending is None:
3016 if self._ascending is None:
2992 self._list.reverse()
3017 self._list.reverse()
2993 else:
3018 else:
2994 self._ascending = not self._ascending
3019 self._ascending = not self._ascending
2995
3020
2996 def __len__(self):
3021 def __len__(self):
2997 return len(self._list)
3022 return len(self._list)
2998
3023
2999 def isascending(self):
3024 def isascending(self):
3000 """Returns True if the collection is ascending order, False if not.
3025 """Returns True if the collection is ascending order, False if not.
3001
3026
3002 This is part of the mandatory API for smartset."""
3027 This is part of the mandatory API for smartset."""
3003 if len(self) <= 1:
3028 if len(self) <= 1:
3004 return True
3029 return True
3005 return self._ascending is not None and self._ascending
3030 return self._ascending is not None and self._ascending
3006
3031
3007 def isdescending(self):
3032 def isdescending(self):
3008 """Returns True if the collection is descending order, False if not.
3033 """Returns True if the collection is descending order, False if not.
3009
3034
3010 This is part of the mandatory API for smartset."""
3035 This is part of the mandatory API for smartset."""
3011 if len(self) <= 1:
3036 if len(self) <= 1:
3012 return True
3037 return True
3013 return self._ascending is not None and not self._ascending
3038 return self._ascending is not None and not self._ascending
3014
3039
3015 def first(self):
3040 def first(self):
3016 if self:
3041 if self:
3017 if self._ascending is None:
3042 if self._ascending is None:
3018 return self._list[0]
3043 return self._list[0]
3019 elif self._ascending:
3044 elif self._ascending:
3020 return self._asclist[0]
3045 return self._asclist[0]
3021 else:
3046 else:
3022 return self._asclist[-1]
3047 return self._asclist[-1]
3023 return None
3048 return None
3024
3049
3025 def last(self):
3050 def last(self):
3026 if self:
3051 if self:
3027 if self._ascending is None:
3052 if self._ascending is None:
3028 return self._list[-1]
3053 return self._list[-1]
3029 elif self._ascending:
3054 elif self._ascending:
3030 return self._asclist[-1]
3055 return self._asclist[-1]
3031 else:
3056 else:
3032 return self._asclist[0]
3057 return self._asclist[0]
3033 return None
3058 return None
3034
3059
3035 def __repr__(self):
3060 def __repr__(self):
3036 d = {None: '', False: '-', True: '+'}[self._ascending]
3061 d = {None: '', False: '-', True: '+'}[self._ascending]
3037 return '<%s%s %r>' % (type(self).__name__, d, self._list)
3062 return '<%s%s %r>' % (type(self).__name__, d, self._list)
3038
3063
3039 class filteredset(abstractsmartset):
3064 class filteredset(abstractsmartset):
3040 """Duck type for baseset class which iterates lazily over the revisions in
3065 """Duck type for baseset class which iterates lazily over the revisions in
3041 the subset and contains a function which tests for membership in the
3066 the subset and contains a function which tests for membership in the
3042 revset
3067 revset
3043 """
3068 """
3044 def __init__(self, subset, condition=lambda x: True):
3069 def __init__(self, subset, condition=lambda x: True):
3045 """
3070 """
3046 condition: a function that decide whether a revision in the subset
3071 condition: a function that decide whether a revision in the subset
3047 belongs to the revset or not.
3072 belongs to the revset or not.
3048 """
3073 """
3049 self._subset = subset
3074 self._subset = subset
3050 self._condition = condition
3075 self._condition = condition
3051
3076
3052 def __contains__(self, x):
3077 def __contains__(self, x):
3053 return x in self._subset and self._condition(x)
3078 return x in self._subset and self._condition(x)
3054
3079
3055 def __iter__(self):
3080 def __iter__(self):
3056 return self._iterfilter(self._subset)
3081 return self._iterfilter(self._subset)
3057
3082
3058 def _iterfilter(self, it):
3083 def _iterfilter(self, it):
3059 cond = self._condition
3084 cond = self._condition
3060 for x in it:
3085 for x in it:
3061 if cond(x):
3086 if cond(x):
3062 yield x
3087 yield x
3063
3088
3064 @property
3089 @property
3065 def fastasc(self):
3090 def fastasc(self):
3066 it = self._subset.fastasc
3091 it = self._subset.fastasc
3067 if it is None:
3092 if it is None:
3068 return None
3093 return None
3069 return lambda: self._iterfilter(it())
3094 return lambda: self._iterfilter(it())
3070
3095
3071 @property
3096 @property
3072 def fastdesc(self):
3097 def fastdesc(self):
3073 it = self._subset.fastdesc
3098 it = self._subset.fastdesc
3074 if it is None:
3099 if it is None:
3075 return None
3100 return None
3076 return lambda: self._iterfilter(it())
3101 return lambda: self._iterfilter(it())
3077
3102
3078 def __nonzero__(self):
3103 def __nonzero__(self):
3079 fast = self.fastasc
3104 fast = self.fastasc
3080 if fast is None:
3105 if fast is None:
3081 fast = self.fastdesc
3106 fast = self.fastdesc
3082 if fast is not None:
3107 if fast is not None:
3083 it = fast()
3108 it = fast()
3084 else:
3109 else:
3085 it = self
3110 it = self
3086
3111
3087 for r in it:
3112 for r in it:
3088 return True
3113 return True
3089 return False
3114 return False
3090
3115
3091 def __len__(self):
3116 def __len__(self):
3092 # Basic implementation to be changed in future patches.
3117 # Basic implementation to be changed in future patches.
3093 l = baseset([r for r in self])
3118 l = baseset([r for r in self])
3094 return len(l)
3119 return len(l)
3095
3120
3096 def sort(self, reverse=False):
3121 def sort(self, reverse=False):
3097 self._subset.sort(reverse=reverse)
3122 self._subset.sort(reverse=reverse)
3098
3123
3099 def reverse(self):
3124 def reverse(self):
3100 self._subset.reverse()
3125 self._subset.reverse()
3101
3126
3102 def isascending(self):
3127 def isascending(self):
3103 return self._subset.isascending()
3128 return self._subset.isascending()
3104
3129
3105 def isdescending(self):
3130 def isdescending(self):
3106 return self._subset.isdescending()
3131 return self._subset.isdescending()
3107
3132
3108 def first(self):
3133 def first(self):
3109 for x in self:
3134 for x in self:
3110 return x
3135 return x
3111 return None
3136 return None
3112
3137
3113 def last(self):
3138 def last(self):
3114 it = None
3139 it = None
3115 if self.isascending():
3140 if self.isascending():
3116 it = self.fastdesc
3141 it = self.fastdesc
3117 elif self.isdescending():
3142 elif self.isdescending():
3118 it = self.fastasc
3143 it = self.fastasc
3119 if it is not None:
3144 if it is not None:
3120 for x in it():
3145 for x in it():
3121 return x
3146 return x
3122 return None #empty case
3147 return None #empty case
3123 else:
3148 else:
3124 x = None
3149 x = None
3125 for x in self:
3150 for x in self:
3126 pass
3151 pass
3127 return x
3152 return x
3128
3153
3129 def __repr__(self):
3154 def __repr__(self):
3130 return '<%s %r>' % (type(self).__name__, self._subset)
3155 return '<%s %r>' % (type(self).__name__, self._subset)
3131
3156
3132 def _iterordered(ascending, iter1, iter2):
3157 def _iterordered(ascending, iter1, iter2):
3133 """produce an ordered iteration from two iterators with the same order
3158 """produce an ordered iteration from two iterators with the same order
3134
3159
3135 The ascending is used to indicated the iteration direction.
3160 The ascending is used to indicated the iteration direction.
3136 """
3161 """
3137 choice = max
3162 choice = max
3138 if ascending:
3163 if ascending:
3139 choice = min
3164 choice = min
3140
3165
3141 val1 = None
3166 val1 = None
3142 val2 = None
3167 val2 = None
3143 try:
3168 try:
3144 # Consume both iterators in an ordered way until one is empty
3169 # Consume both iterators in an ordered way until one is empty
3145 while True:
3170 while True:
3146 if val1 is None:
3171 if val1 is None:
3147 val1 = iter1.next()
3172 val1 = iter1.next()
3148 if val2 is None:
3173 if val2 is None:
3149 val2 = iter2.next()
3174 val2 = iter2.next()
3150 next = choice(val1, val2)
3175 next = choice(val1, val2)
3151 yield next
3176 yield next
3152 if val1 == next:
3177 if val1 == next:
3153 val1 = None
3178 val1 = None
3154 if val2 == next:
3179 if val2 == next:
3155 val2 = None
3180 val2 = None
3156 except StopIteration:
3181 except StopIteration:
3157 # Flush any remaining values and consume the other one
3182 # Flush any remaining values and consume the other one
3158 it = iter2
3183 it = iter2
3159 if val1 is not None:
3184 if val1 is not None:
3160 yield val1
3185 yield val1
3161 it = iter1
3186 it = iter1
3162 elif val2 is not None:
3187 elif val2 is not None:
3163 # might have been equality and both are empty
3188 # might have been equality and both are empty
3164 yield val2
3189 yield val2
3165 for val in it:
3190 for val in it:
3166 yield val
3191 yield val
3167
3192
3168 class addset(abstractsmartset):
3193 class addset(abstractsmartset):
3169 """Represent the addition of two sets
3194 """Represent the addition of two sets
3170
3195
3171 Wrapper structure for lazily adding two structures without losing much
3196 Wrapper structure for lazily adding two structures without losing much
3172 performance on the __contains__ method
3197 performance on the __contains__ method
3173
3198
3174 If the ascending attribute is set, that means the two structures are
3199 If the ascending attribute is set, that means the two structures are
3175 ordered in either an ascending or descending way. Therefore, we can add
3200 ordered in either an ascending or descending way. Therefore, we can add
3176 them maintaining the order by iterating over both at the same time
3201 them maintaining the order by iterating over both at the same time
3177
3202
3178 >>> xs = baseset([0, 3, 2])
3203 >>> xs = baseset([0, 3, 2])
3179 >>> ys = baseset([5, 2, 4])
3204 >>> ys = baseset([5, 2, 4])
3180
3205
3181 >>> rs = addset(xs, ys)
3206 >>> rs = addset(xs, ys)
3182 >>> bool(rs), 0 in rs, 1 in rs, 5 in rs, rs.first(), rs.last()
3207 >>> bool(rs), 0 in rs, 1 in rs, 5 in rs, rs.first(), rs.last()
3183 (True, True, False, True, 0, 4)
3208 (True, True, False, True, 0, 4)
3184 >>> rs = addset(xs, baseset([]))
3209 >>> rs = addset(xs, baseset([]))
3185 >>> bool(rs), 0 in rs, 1 in rs, rs.first(), rs.last()
3210 >>> bool(rs), 0 in rs, 1 in rs, rs.first(), rs.last()
3186 (True, True, False, 0, 2)
3211 (True, True, False, 0, 2)
3187 >>> rs = addset(baseset([]), baseset([]))
3212 >>> rs = addset(baseset([]), baseset([]))
3188 >>> bool(rs), 0 in rs, rs.first(), rs.last()
3213 >>> bool(rs), 0 in rs, rs.first(), rs.last()
3189 (False, False, None, None)
3214 (False, False, None, None)
3190
3215
3191 iterate unsorted:
3216 iterate unsorted:
3192 >>> rs = addset(xs, ys)
3217 >>> rs = addset(xs, ys)
3193 >>> [x for x in rs] # without _genlist
3218 >>> [x for x in rs] # without _genlist
3194 [0, 3, 2, 5, 4]
3219 [0, 3, 2, 5, 4]
3195 >>> assert not rs._genlist
3220 >>> assert not rs._genlist
3196 >>> len(rs)
3221 >>> len(rs)
3197 5
3222 5
3198 >>> [x for x in rs] # with _genlist
3223 >>> [x for x in rs] # with _genlist
3199 [0, 3, 2, 5, 4]
3224 [0, 3, 2, 5, 4]
3200 >>> assert rs._genlist
3225 >>> assert rs._genlist
3201
3226
3202 iterate ascending:
3227 iterate ascending:
3203 >>> rs = addset(xs, ys, ascending=True)
3228 >>> rs = addset(xs, ys, ascending=True)
3204 >>> [x for x in rs], [x for x in rs.fastasc()] # without _asclist
3229 >>> [x for x in rs], [x for x in rs.fastasc()] # without _asclist
3205 ([0, 2, 3, 4, 5], [0, 2, 3, 4, 5])
3230 ([0, 2, 3, 4, 5], [0, 2, 3, 4, 5])
3206 >>> assert not rs._asclist
3231 >>> assert not rs._asclist
3207 >>> len(rs)
3232 >>> len(rs)
3208 5
3233 5
3209 >>> [x for x in rs], [x for x in rs.fastasc()]
3234 >>> [x for x in rs], [x for x in rs.fastasc()]
3210 ([0, 2, 3, 4, 5], [0, 2, 3, 4, 5])
3235 ([0, 2, 3, 4, 5], [0, 2, 3, 4, 5])
3211 >>> assert rs._asclist
3236 >>> assert rs._asclist
3212
3237
3213 iterate descending:
3238 iterate descending:
3214 >>> rs = addset(xs, ys, ascending=False)
3239 >>> rs = addset(xs, ys, ascending=False)
3215 >>> [x for x in rs], [x for x in rs.fastdesc()] # without _asclist
3240 >>> [x for x in rs], [x for x in rs.fastdesc()] # without _asclist
3216 ([5, 4, 3, 2, 0], [5, 4, 3, 2, 0])
3241 ([5, 4, 3, 2, 0], [5, 4, 3, 2, 0])
3217 >>> assert not rs._asclist
3242 >>> assert not rs._asclist
3218 >>> len(rs)
3243 >>> len(rs)
3219 5
3244 5
3220 >>> [x for x in rs], [x for x in rs.fastdesc()]
3245 >>> [x for x in rs], [x for x in rs.fastdesc()]
3221 ([5, 4, 3, 2, 0], [5, 4, 3, 2, 0])
3246 ([5, 4, 3, 2, 0], [5, 4, 3, 2, 0])
3222 >>> assert rs._asclist
3247 >>> assert rs._asclist
3223
3248
3224 iterate ascending without fastasc:
3249 iterate ascending without fastasc:
3225 >>> rs = addset(xs, generatorset(ys), ascending=True)
3250 >>> rs = addset(xs, generatorset(ys), ascending=True)
3226 >>> assert rs.fastasc is None
3251 >>> assert rs.fastasc is None
3227 >>> [x for x in rs]
3252 >>> [x for x in rs]
3228 [0, 2, 3, 4, 5]
3253 [0, 2, 3, 4, 5]
3229
3254
3230 iterate descending without fastdesc:
3255 iterate descending without fastdesc:
3231 >>> rs = addset(generatorset(xs), ys, ascending=False)
3256 >>> rs = addset(generatorset(xs), ys, ascending=False)
3232 >>> assert rs.fastdesc is None
3257 >>> assert rs.fastdesc is None
3233 >>> [x for x in rs]
3258 >>> [x for x in rs]
3234 [5, 4, 3, 2, 0]
3259 [5, 4, 3, 2, 0]
3235 """
3260 """
3236 def __init__(self, revs1, revs2, ascending=None):
3261 def __init__(self, revs1, revs2, ascending=None):
3237 self._r1 = revs1
3262 self._r1 = revs1
3238 self._r2 = revs2
3263 self._r2 = revs2
3239 self._iter = None
3264 self._iter = None
3240 self._ascending = ascending
3265 self._ascending = ascending
3241 self._genlist = None
3266 self._genlist = None
3242 self._asclist = None
3267 self._asclist = None
3243
3268
3244 def __len__(self):
3269 def __len__(self):
3245 return len(self._list)
3270 return len(self._list)
3246
3271
3247 def __nonzero__(self):
3272 def __nonzero__(self):
3248 return bool(self._r1) or bool(self._r2)
3273 return bool(self._r1) or bool(self._r2)
3249
3274
3250 @util.propertycache
3275 @util.propertycache
3251 def _list(self):
3276 def _list(self):
3252 if not self._genlist:
3277 if not self._genlist:
3253 self._genlist = baseset(iter(self))
3278 self._genlist = baseset(iter(self))
3254 return self._genlist
3279 return self._genlist
3255
3280
3256 def __iter__(self):
3281 def __iter__(self):
3257 """Iterate over both collections without repeating elements
3282 """Iterate over both collections without repeating elements
3258
3283
3259 If the ascending attribute is not set, iterate over the first one and
3284 If the ascending attribute is not set, iterate over the first one and
3260 then over the second one checking for membership on the first one so we
3285 then over the second one checking for membership on the first one so we
3261 dont yield any duplicates.
3286 dont yield any duplicates.
3262
3287
3263 If the ascending attribute is set, iterate over both collections at the
3288 If the ascending attribute is set, iterate over both collections at the
3264 same time, yielding only one value at a time in the given order.
3289 same time, yielding only one value at a time in the given order.
3265 """
3290 """
3266 if self._ascending is None:
3291 if self._ascending is None:
3267 if self._genlist:
3292 if self._genlist:
3268 return iter(self._genlist)
3293 return iter(self._genlist)
3269 def arbitraryordergen():
3294 def arbitraryordergen():
3270 for r in self._r1:
3295 for r in self._r1:
3271 yield r
3296 yield r
3272 inr1 = self._r1.__contains__
3297 inr1 = self._r1.__contains__
3273 for r in self._r2:
3298 for r in self._r2:
3274 if not inr1(r):
3299 if not inr1(r):
3275 yield r
3300 yield r
3276 return arbitraryordergen()
3301 return arbitraryordergen()
3277 # try to use our own fast iterator if it exists
3302 # try to use our own fast iterator if it exists
3278 self._trysetasclist()
3303 self._trysetasclist()
3279 if self._ascending:
3304 if self._ascending:
3280 attr = 'fastasc'
3305 attr = 'fastasc'
3281 else:
3306 else:
3282 attr = 'fastdesc'
3307 attr = 'fastdesc'
3283 it = getattr(self, attr)
3308 it = getattr(self, attr)
3284 if it is not None:
3309 if it is not None:
3285 return it()
3310 return it()
3286 # maybe half of the component supports fast
3311 # maybe half of the component supports fast
3287 # get iterator for _r1
3312 # get iterator for _r1
3288 iter1 = getattr(self._r1, attr)
3313 iter1 = getattr(self._r1, attr)
3289 if iter1 is None:
3314 if iter1 is None:
3290 # let's avoid side effect (not sure it matters)
3315 # let's avoid side effect (not sure it matters)
3291 iter1 = iter(sorted(self._r1, reverse=not self._ascending))
3316 iter1 = iter(sorted(self._r1, reverse=not self._ascending))
3292 else:
3317 else:
3293 iter1 = iter1()
3318 iter1 = iter1()
3294 # get iterator for _r2
3319 # get iterator for _r2
3295 iter2 = getattr(self._r2, attr)
3320 iter2 = getattr(self._r2, attr)
3296 if iter2 is None:
3321 if iter2 is None:
3297 # let's avoid side effect (not sure it matters)
3322 # let's avoid side effect (not sure it matters)
3298 iter2 = iter(sorted(self._r2, reverse=not self._ascending))
3323 iter2 = iter(sorted(self._r2, reverse=not self._ascending))
3299 else:
3324 else:
3300 iter2 = iter2()
3325 iter2 = iter2()
3301 return _iterordered(self._ascending, iter1, iter2)
3326 return _iterordered(self._ascending, iter1, iter2)
3302
3327
3303 def _trysetasclist(self):
3328 def _trysetasclist(self):
3304 """populate the _asclist attribute if possible and necessary"""
3329 """populate the _asclist attribute if possible and necessary"""
3305 if self._genlist is not None and self._asclist is None:
3330 if self._genlist is not None and self._asclist is None:
3306 self._asclist = sorted(self._genlist)
3331 self._asclist = sorted(self._genlist)
3307
3332
3308 @property
3333 @property
3309 def fastasc(self):
3334 def fastasc(self):
3310 self._trysetasclist()
3335 self._trysetasclist()
3311 if self._asclist is not None:
3336 if self._asclist is not None:
3312 return self._asclist.__iter__
3337 return self._asclist.__iter__
3313 iter1 = self._r1.fastasc
3338 iter1 = self._r1.fastasc
3314 iter2 = self._r2.fastasc
3339 iter2 = self._r2.fastasc
3315 if None in (iter1, iter2):
3340 if None in (iter1, iter2):
3316 return None
3341 return None
3317 return lambda: _iterordered(True, iter1(), iter2())
3342 return lambda: _iterordered(True, iter1(), iter2())
3318
3343
3319 @property
3344 @property
3320 def fastdesc(self):
3345 def fastdesc(self):
3321 self._trysetasclist()
3346 self._trysetasclist()
3322 if self._asclist is not None:
3347 if self._asclist is not None:
3323 return self._asclist.__reversed__
3348 return self._asclist.__reversed__
3324 iter1 = self._r1.fastdesc
3349 iter1 = self._r1.fastdesc
3325 iter2 = self._r2.fastdesc
3350 iter2 = self._r2.fastdesc
3326 if None in (iter1, iter2):
3351 if None in (iter1, iter2):
3327 return None
3352 return None
3328 return lambda: _iterordered(False, iter1(), iter2())
3353 return lambda: _iterordered(False, iter1(), iter2())
3329
3354
3330 def __contains__(self, x):
3355 def __contains__(self, x):
3331 return x in self._r1 or x in self._r2
3356 return x in self._r1 or x in self._r2
3332
3357
3333 def sort(self, reverse=False):
3358 def sort(self, reverse=False):
3334 """Sort the added set
3359 """Sort the added set
3335
3360
3336 For this we use the cached list with all the generated values and if we
3361 For this we use the cached list with all the generated values and if we
3337 know they are ascending or descending we can sort them in a smart way.
3362 know they are ascending or descending we can sort them in a smart way.
3338 """
3363 """
3339 self._ascending = not reverse
3364 self._ascending = not reverse
3340
3365
3341 def isascending(self):
3366 def isascending(self):
3342 return self._ascending is not None and self._ascending
3367 return self._ascending is not None and self._ascending
3343
3368
3344 def isdescending(self):
3369 def isdescending(self):
3345 return self._ascending is not None and not self._ascending
3370 return self._ascending is not None and not self._ascending
3346
3371
3347 def reverse(self):
3372 def reverse(self):
3348 if self._ascending is None:
3373 if self._ascending is None:
3349 self._list.reverse()
3374 self._list.reverse()
3350 else:
3375 else:
3351 self._ascending = not self._ascending
3376 self._ascending = not self._ascending
3352
3377
3353 def first(self):
3378 def first(self):
3354 for x in self:
3379 for x in self:
3355 return x
3380 return x
3356 return None
3381 return None
3357
3382
3358 def last(self):
3383 def last(self):
3359 self.reverse()
3384 self.reverse()
3360 val = self.first()
3385 val = self.first()
3361 self.reverse()
3386 self.reverse()
3362 return val
3387 return val
3363
3388
3364 def __repr__(self):
3389 def __repr__(self):
3365 d = {None: '', False: '-', True: '+'}[self._ascending]
3390 d = {None: '', False: '-', True: '+'}[self._ascending]
3366 return '<%s%s %r, %r>' % (type(self).__name__, d, self._r1, self._r2)
3391 return '<%s%s %r, %r>' % (type(self).__name__, d, self._r1, self._r2)
3367
3392
3368 class generatorset(abstractsmartset):
3393 class generatorset(abstractsmartset):
3369 """Wrap a generator for lazy iteration
3394 """Wrap a generator for lazy iteration
3370
3395
3371 Wrapper structure for generators that provides lazy membership and can
3396 Wrapper structure for generators that provides lazy membership and can
3372 be iterated more than once.
3397 be iterated more than once.
3373 When asked for membership it generates values until either it finds the
3398 When asked for membership it generates values until either it finds the
3374 requested one or has gone through all the elements in the generator
3399 requested one or has gone through all the elements in the generator
3375 """
3400 """
3376 def __init__(self, gen, iterasc=None):
3401 def __init__(self, gen, iterasc=None):
3377 """
3402 """
3378 gen: a generator producing the values for the generatorset.
3403 gen: a generator producing the values for the generatorset.
3379 """
3404 """
3380 self._gen = gen
3405 self._gen = gen
3381 self._asclist = None
3406 self._asclist = None
3382 self._cache = {}
3407 self._cache = {}
3383 self._genlist = []
3408 self._genlist = []
3384 self._finished = False
3409 self._finished = False
3385 self._ascending = True
3410 self._ascending = True
3386 if iterasc is not None:
3411 if iterasc is not None:
3387 if iterasc:
3412 if iterasc:
3388 self.fastasc = self._iterator
3413 self.fastasc = self._iterator
3389 self.__contains__ = self._asccontains
3414 self.__contains__ = self._asccontains
3390 else:
3415 else:
3391 self.fastdesc = self._iterator
3416 self.fastdesc = self._iterator
3392 self.__contains__ = self._desccontains
3417 self.__contains__ = self._desccontains
3393
3418
3394 def __nonzero__(self):
3419 def __nonzero__(self):
3395 # Do not use 'for r in self' because it will enforce the iteration
3420 # Do not use 'for r in self' because it will enforce the iteration
3396 # order (default ascending), possibly unrolling a whole descending
3421 # order (default ascending), possibly unrolling a whole descending
3397 # iterator.
3422 # iterator.
3398 if self._genlist:
3423 if self._genlist:
3399 return True
3424 return True
3400 for r in self._consumegen():
3425 for r in self._consumegen():
3401 return True
3426 return True
3402 return False
3427 return False
3403
3428
3404 def __contains__(self, x):
3429 def __contains__(self, x):
3405 if x in self._cache:
3430 if x in self._cache:
3406 return self._cache[x]
3431 return self._cache[x]
3407
3432
3408 # Use new values only, as existing values would be cached.
3433 # Use new values only, as existing values would be cached.
3409 for l in self._consumegen():
3434 for l in self._consumegen():
3410 if l == x:
3435 if l == x:
3411 return True
3436 return True
3412
3437
3413 self._cache[x] = False
3438 self._cache[x] = False
3414 return False
3439 return False
3415
3440
3416 def _asccontains(self, x):
3441 def _asccontains(self, x):
3417 """version of contains optimised for ascending generator"""
3442 """version of contains optimised for ascending generator"""
3418 if x in self._cache:
3443 if x in self._cache:
3419 return self._cache[x]
3444 return self._cache[x]
3420
3445
3421 # Use new values only, as existing values would be cached.
3446 # Use new values only, as existing values would be cached.
3422 for l in self._consumegen():
3447 for l in self._consumegen():
3423 if l == x:
3448 if l == x:
3424 return True
3449 return True
3425 if l > x:
3450 if l > x:
3426 break
3451 break
3427
3452
3428 self._cache[x] = False
3453 self._cache[x] = False
3429 return False
3454 return False
3430
3455
3431 def _desccontains(self, x):
3456 def _desccontains(self, x):
3432 """version of contains optimised for descending generator"""
3457 """version of contains optimised for descending generator"""
3433 if x in self._cache:
3458 if x in self._cache:
3434 return self._cache[x]
3459 return self._cache[x]
3435
3460
3436 # Use new values only, as existing values would be cached.
3461 # Use new values only, as existing values would be cached.
3437 for l in self._consumegen():
3462 for l in self._consumegen():
3438 if l == x:
3463 if l == x:
3439 return True
3464 return True
3440 if l < x:
3465 if l < x:
3441 break
3466 break
3442
3467
3443 self._cache[x] = False
3468 self._cache[x] = False
3444 return False
3469 return False
3445
3470
3446 def __iter__(self):
3471 def __iter__(self):
3447 if self._ascending:
3472 if self._ascending:
3448 it = self.fastasc
3473 it = self.fastasc
3449 else:
3474 else:
3450 it = self.fastdesc
3475 it = self.fastdesc
3451 if it is not None:
3476 if it is not None:
3452 return it()
3477 return it()
3453 # we need to consume the iterator
3478 # we need to consume the iterator
3454 for x in self._consumegen():
3479 for x in self._consumegen():
3455 pass
3480 pass
3456 # recall the same code
3481 # recall the same code
3457 return iter(self)
3482 return iter(self)
3458
3483
3459 def _iterator(self):
3484 def _iterator(self):
3460 if self._finished:
3485 if self._finished:
3461 return iter(self._genlist)
3486 return iter(self._genlist)
3462
3487
3463 # We have to use this complex iteration strategy to allow multiple
3488 # We have to use this complex iteration strategy to allow multiple
3464 # iterations at the same time. We need to be able to catch revision
3489 # iterations at the same time. We need to be able to catch revision
3465 # removed from _consumegen and added to genlist in another instance.
3490 # removed from _consumegen and added to genlist in another instance.
3466 #
3491 #
3467 # Getting rid of it would provide an about 15% speed up on this
3492 # Getting rid of it would provide an about 15% speed up on this
3468 # iteration.
3493 # iteration.
3469 genlist = self._genlist
3494 genlist = self._genlist
3470 nextrev = self._consumegen().next
3495 nextrev = self._consumegen().next
3471 _len = len # cache global lookup
3496 _len = len # cache global lookup
3472 def gen():
3497 def gen():
3473 i = 0
3498 i = 0
3474 while True:
3499 while True:
3475 if i < _len(genlist):
3500 if i < _len(genlist):
3476 yield genlist[i]
3501 yield genlist[i]
3477 else:
3502 else:
3478 yield nextrev()
3503 yield nextrev()
3479 i += 1
3504 i += 1
3480 return gen()
3505 return gen()
3481
3506
3482 def _consumegen(self):
3507 def _consumegen(self):
3483 cache = self._cache
3508 cache = self._cache
3484 genlist = self._genlist.append
3509 genlist = self._genlist.append
3485 for item in self._gen:
3510 for item in self._gen:
3486 cache[item] = True
3511 cache[item] = True
3487 genlist(item)
3512 genlist(item)
3488 yield item
3513 yield item
3489 if not self._finished:
3514 if not self._finished:
3490 self._finished = True
3515 self._finished = True
3491 asc = self._genlist[:]
3516 asc = self._genlist[:]
3492 asc.sort()
3517 asc.sort()
3493 self._asclist = asc
3518 self._asclist = asc
3494 self.fastasc = asc.__iter__
3519 self.fastasc = asc.__iter__
3495 self.fastdesc = asc.__reversed__
3520 self.fastdesc = asc.__reversed__
3496
3521
3497 def __len__(self):
3522 def __len__(self):
3498 for x in self._consumegen():
3523 for x in self._consumegen():
3499 pass
3524 pass
3500 return len(self._genlist)
3525 return len(self._genlist)
3501
3526
3502 def sort(self, reverse=False):
3527 def sort(self, reverse=False):
3503 self._ascending = not reverse
3528 self._ascending = not reverse
3504
3529
3505 def reverse(self):
3530 def reverse(self):
3506 self._ascending = not self._ascending
3531 self._ascending = not self._ascending
3507
3532
3508 def isascending(self):
3533 def isascending(self):
3509 return self._ascending
3534 return self._ascending
3510
3535
3511 def isdescending(self):
3536 def isdescending(self):
3512 return not self._ascending
3537 return not self._ascending
3513
3538
3514 def first(self):
3539 def first(self):
3515 if self._ascending:
3540 if self._ascending:
3516 it = self.fastasc
3541 it = self.fastasc
3517 else:
3542 else:
3518 it = self.fastdesc
3543 it = self.fastdesc
3519 if it is None:
3544 if it is None:
3520 # we need to consume all and try again
3545 # we need to consume all and try again
3521 for x in self._consumegen():
3546 for x in self._consumegen():
3522 pass
3547 pass
3523 return self.first()
3548 return self.first()
3524 return next(it(), None)
3549 return next(it(), None)
3525
3550
3526 def last(self):
3551 def last(self):
3527 if self._ascending:
3552 if self._ascending:
3528 it = self.fastdesc
3553 it = self.fastdesc
3529 else:
3554 else:
3530 it = self.fastasc
3555 it = self.fastasc
3531 if it is None:
3556 if it is None:
3532 # we need to consume all and try again
3557 # we need to consume all and try again
3533 for x in self._consumegen():
3558 for x in self._consumegen():
3534 pass
3559 pass
3535 return self.first()
3560 return self.first()
3536 return next(it(), None)
3561 return next(it(), None)
3537
3562
3538 def __repr__(self):
3563 def __repr__(self):
3539 d = {False: '-', True: '+'}[self._ascending]
3564 d = {False: '-', True: '+'}[self._ascending]
3540 return '<%s%s>' % (type(self).__name__, d)
3565 return '<%s%s>' % (type(self).__name__, d)
3541
3566
3542 class spanset(abstractsmartset):
3567 class spanset(abstractsmartset):
3543 """Duck type for baseset class which represents a range of revisions and
3568 """Duck type for baseset class which represents a range of revisions and
3544 can work lazily and without having all the range in memory
3569 can work lazily and without having all the range in memory
3545
3570
3546 Note that spanset(x, y) behave almost like xrange(x, y) except for two
3571 Note that spanset(x, y) behave almost like xrange(x, y) except for two
3547 notable points:
3572 notable points:
3548 - when x < y it will be automatically descending,
3573 - when x < y it will be automatically descending,
3549 - revision filtered with this repoview will be skipped.
3574 - revision filtered with this repoview will be skipped.
3550
3575
3551 """
3576 """
3552 def __init__(self, repo, start=0, end=None):
3577 def __init__(self, repo, start=0, end=None):
3553 """
3578 """
3554 start: first revision included the set
3579 start: first revision included the set
3555 (default to 0)
3580 (default to 0)
3556 end: first revision excluded (last+1)
3581 end: first revision excluded (last+1)
3557 (default to len(repo)
3582 (default to len(repo)
3558
3583
3559 Spanset will be descending if `end` < `start`.
3584 Spanset will be descending if `end` < `start`.
3560 """
3585 """
3561 if end is None:
3586 if end is None:
3562 end = len(repo)
3587 end = len(repo)
3563 self._ascending = start <= end
3588 self._ascending = start <= end
3564 if not self._ascending:
3589 if not self._ascending:
3565 start, end = end + 1, start +1
3590 start, end = end + 1, start +1
3566 self._start = start
3591 self._start = start
3567 self._end = end
3592 self._end = end
3568 self._hiddenrevs = repo.changelog.filteredrevs
3593 self._hiddenrevs = repo.changelog.filteredrevs
3569
3594
3570 def sort(self, reverse=False):
3595 def sort(self, reverse=False):
3571 self._ascending = not reverse
3596 self._ascending = not reverse
3572
3597
3573 def reverse(self):
3598 def reverse(self):
3574 self._ascending = not self._ascending
3599 self._ascending = not self._ascending
3575
3600
3576 def _iterfilter(self, iterrange):
3601 def _iterfilter(self, iterrange):
3577 s = self._hiddenrevs
3602 s = self._hiddenrevs
3578 for r in iterrange:
3603 for r in iterrange:
3579 if r not in s:
3604 if r not in s:
3580 yield r
3605 yield r
3581
3606
3582 def __iter__(self):
3607 def __iter__(self):
3583 if self._ascending:
3608 if self._ascending:
3584 return self.fastasc()
3609 return self.fastasc()
3585 else:
3610 else:
3586 return self.fastdesc()
3611 return self.fastdesc()
3587
3612
3588 def fastasc(self):
3613 def fastasc(self):
3589 iterrange = xrange(self._start, self._end)
3614 iterrange = xrange(self._start, self._end)
3590 if self._hiddenrevs:
3615 if self._hiddenrevs:
3591 return self._iterfilter(iterrange)
3616 return self._iterfilter(iterrange)
3592 return iter(iterrange)
3617 return iter(iterrange)
3593
3618
3594 def fastdesc(self):
3619 def fastdesc(self):
3595 iterrange = xrange(self._end - 1, self._start - 1, -1)
3620 iterrange = xrange(self._end - 1, self._start - 1, -1)
3596 if self._hiddenrevs:
3621 if self._hiddenrevs:
3597 return self._iterfilter(iterrange)
3622 return self._iterfilter(iterrange)
3598 return iter(iterrange)
3623 return iter(iterrange)
3599
3624
3600 def __contains__(self, rev):
3625 def __contains__(self, rev):
3601 hidden = self._hiddenrevs
3626 hidden = self._hiddenrevs
3602 return ((self._start <= rev < self._end)
3627 return ((self._start <= rev < self._end)
3603 and not (hidden and rev in hidden))
3628 and not (hidden and rev in hidden))
3604
3629
3605 def __nonzero__(self):
3630 def __nonzero__(self):
3606 for r in self:
3631 for r in self:
3607 return True
3632 return True
3608 return False
3633 return False
3609
3634
3610 def __len__(self):
3635 def __len__(self):
3611 if not self._hiddenrevs:
3636 if not self._hiddenrevs:
3612 return abs(self._end - self._start)
3637 return abs(self._end - self._start)
3613 else:
3638 else:
3614 count = 0
3639 count = 0
3615 start = self._start
3640 start = self._start
3616 end = self._end
3641 end = self._end
3617 for rev in self._hiddenrevs:
3642 for rev in self._hiddenrevs:
3618 if (end < rev <= start) or (start <= rev < end):
3643 if (end < rev <= start) or (start <= rev < end):
3619 count += 1
3644 count += 1
3620 return abs(self._end - self._start) - count
3645 return abs(self._end - self._start) - count
3621
3646
3622 def isascending(self):
3647 def isascending(self):
3623 return self._ascending
3648 return self._ascending
3624
3649
3625 def isdescending(self):
3650 def isdescending(self):
3626 return not self._ascending
3651 return not self._ascending
3627
3652
3628 def first(self):
3653 def first(self):
3629 if self._ascending:
3654 if self._ascending:
3630 it = self.fastasc
3655 it = self.fastasc
3631 else:
3656 else:
3632 it = self.fastdesc
3657 it = self.fastdesc
3633 for x in it():
3658 for x in it():
3634 return x
3659 return x
3635 return None
3660 return None
3636
3661
3637 def last(self):
3662 def last(self):
3638 if self._ascending:
3663 if self._ascending:
3639 it = self.fastdesc
3664 it = self.fastdesc
3640 else:
3665 else:
3641 it = self.fastasc
3666 it = self.fastasc
3642 for x in it():
3667 for x in it():
3643 return x
3668 return x
3644 return None
3669 return None
3645
3670
3646 def __repr__(self):
3671 def __repr__(self):
3647 d = {False: '-', True: '+'}[self._ascending]
3672 d = {False: '-', True: '+'}[self._ascending]
3648 return '<%s%s %d:%d>' % (type(self).__name__, d,
3673 return '<%s%s %d:%d>' % (type(self).__name__, d,
3649 self._start, self._end - 1)
3674 self._start, self._end - 1)
3650
3675
3651 class fullreposet(spanset):
3676 class fullreposet(spanset):
3652 """a set containing all revisions in the repo
3677 """a set containing all revisions in the repo
3653
3678
3654 This class exists to host special optimization and magic to handle virtual
3679 This class exists to host special optimization and magic to handle virtual
3655 revisions such as "null".
3680 revisions such as "null".
3656 """
3681 """
3657
3682
3658 def __init__(self, repo):
3683 def __init__(self, repo):
3659 super(fullreposet, self).__init__(repo)
3684 super(fullreposet, self).__init__(repo)
3660
3685
3661 def __and__(self, other):
3686 def __and__(self, other):
3662 """As self contains the whole repo, all of the other set should also be
3687 """As self contains the whole repo, all of the other set should also be
3663 in self. Therefore `self & other = other`.
3688 in self. Therefore `self & other = other`.
3664
3689
3665 This boldly assumes the other contains valid revs only.
3690 This boldly assumes the other contains valid revs only.
3666 """
3691 """
3667 # other not a smartset, make is so
3692 # other not a smartset, make is so
3668 if not util.safehasattr(other, 'isascending'):
3693 if not util.safehasattr(other, 'isascending'):
3669 # filter out hidden revision
3694 # filter out hidden revision
3670 # (this boldly assumes all smartset are pure)
3695 # (this boldly assumes all smartset are pure)
3671 #
3696 #
3672 # `other` was used with "&", let's assume this is a set like
3697 # `other` was used with "&", let's assume this is a set like
3673 # object.
3698 # object.
3674 other = baseset(other - self._hiddenrevs)
3699 other = baseset(other - self._hiddenrevs)
3675
3700
3676 # XXX As fullreposet is also used as bootstrap, this is wrong.
3701 # XXX As fullreposet is also used as bootstrap, this is wrong.
3677 #
3702 #
3678 # With a giveme312() revset returning [3,1,2], this makes
3703 # With a giveme312() revset returning [3,1,2], this makes
3679 # 'hg log -r "giveme312()"' -> 1, 2, 3 (wrong)
3704 # 'hg log -r "giveme312()"' -> 1, 2, 3 (wrong)
3680 # We cannot just drop it because other usage still need to sort it:
3705 # We cannot just drop it because other usage still need to sort it:
3681 # 'hg log -r "all() and giveme312()"' -> 1, 2, 3 (right)
3706 # 'hg log -r "all() and giveme312()"' -> 1, 2, 3 (right)
3682 #
3707 #
3683 # There is also some faulty revset implementations that rely on it
3708 # There is also some faulty revset implementations that rely on it
3684 # (eg: children as of its state in e8075329c5fb)
3709 # (eg: children as of its state in e8075329c5fb)
3685 #
3710 #
3686 # When we fix the two points above we can move this into the if clause
3711 # When we fix the two points above we can move this into the if clause
3687 other.sort(reverse=self.isdescending())
3712 other.sort(reverse=self.isdescending())
3688 return other
3713 return other
3689
3714
3690 def prettyformatset(revs):
3715 def prettyformatset(revs):
3691 lines = []
3716 lines = []
3692 rs = repr(revs)
3717 rs = repr(revs)
3693 p = 0
3718 p = 0
3694 while p < len(rs):
3719 while p < len(rs):
3695 q = rs.find('<', p + 1)
3720 q = rs.find('<', p + 1)
3696 if q < 0:
3721 if q < 0:
3697 q = len(rs)
3722 q = len(rs)
3698 l = rs.count('<', 0, p) - rs.count('>', 0, p)
3723 l = rs.count('<', 0, p) - rs.count('>', 0, p)
3699 assert l >= 0
3724 assert l >= 0
3700 lines.append((l, rs[p:q].rstrip()))
3725 lines.append((l, rs[p:q].rstrip()))
3701 p = q
3726 p = q
3702 return '\n'.join(' ' * l + s for l, s in lines)
3727 return '\n'.join(' ' * l + s for l, s in lines)
3703
3728
3704 # tell hggettext to extract docstrings from these functions:
3729 # tell hggettext to extract docstrings from these functions:
3705 i18nfunctions = symbols.values()
3730 i18nfunctions = symbols.values()
@@ -1,2191 +1,2231 b''
1 $ HGENCODING=utf-8
1 $ HGENCODING=utf-8
2 $ export HGENCODING
2 $ export HGENCODING
3 $ cat > testrevset.py << EOF
3 $ cat > testrevset.py << EOF
4 > import mercurial.revset
4 > import mercurial.revset
5 >
5 >
6 > baseset = mercurial.revset.baseset
6 > baseset = mercurial.revset.baseset
7 >
7 >
8 > def r3232(repo, subset, x):
8 > def r3232(repo, subset, x):
9 > """"simple revset that return [3,2,3,2]
9 > """"simple revset that return [3,2,3,2]
10 >
10 >
11 > revisions duplicated on purpose.
11 > revisions duplicated on purpose.
12 > """
12 > """
13 > if 3 not in subset:
13 > if 3 not in subset:
14 > if 2 in subset:
14 > if 2 in subset:
15 > return baseset([2,2])
15 > return baseset([2,2])
16 > return baseset()
16 > return baseset()
17 > return baseset([3,3,2,2])
17 > return baseset([3,3,2,2])
18 >
18 >
19 > mercurial.revset.symbols['r3232'] = r3232
19 > mercurial.revset.symbols['r3232'] = r3232
20 > EOF
20 > EOF
21 $ cat >> $HGRCPATH << EOF
21 $ cat >> $HGRCPATH << EOF
22 > [extensions]
22 > [extensions]
23 > testrevset=$TESTTMP/testrevset.py
23 > testrevset=$TESTTMP/testrevset.py
24 > EOF
24 > EOF
25
25
26 $ try() {
26 $ try() {
27 > hg debugrevspec --debug "$@"
27 > hg debugrevspec --debug "$@"
28 > }
28 > }
29
29
30 $ log() {
30 $ log() {
31 > hg log --template '{rev}\n' -r "$1"
31 > hg log --template '{rev}\n' -r "$1"
32 > }
32 > }
33
33
34 $ hg init repo
34 $ hg init repo
35 $ cd repo
35 $ cd repo
36
36
37 $ echo a > a
37 $ echo a > a
38 $ hg branch a
38 $ hg branch a
39 marked working directory as branch a
39 marked working directory as branch a
40 (branches are permanent and global, did you want a bookmark?)
40 (branches are permanent and global, did you want a bookmark?)
41 $ hg ci -Aqm0
41 $ hg ci -Aqm0
42
42
43 $ echo b > b
43 $ echo b > b
44 $ hg branch b
44 $ hg branch b
45 marked working directory as branch b
45 marked working directory as branch b
46 $ hg ci -Aqm1
46 $ hg ci -Aqm1
47
47
48 $ rm a
48 $ rm a
49 $ hg branch a-b-c-
49 $ hg branch a-b-c-
50 marked working directory as branch a-b-c-
50 marked working directory as branch a-b-c-
51 $ hg ci -Aqm2 -u Bob
51 $ hg ci -Aqm2 -u Bob
52
52
53 $ hg log -r "extra('branch', 'a-b-c-')" --template '{rev}\n'
53 $ hg log -r "extra('branch', 'a-b-c-')" --template '{rev}\n'
54 2
54 2
55 $ hg log -r "extra('branch')" --template '{rev}\n'
55 $ hg log -r "extra('branch')" --template '{rev}\n'
56 0
56 0
57 1
57 1
58 2
58 2
59 $ hg log -r "extra('branch', 're:a')" --template '{rev} {branch}\n'
59 $ hg log -r "extra('branch', 're:a')" --template '{rev} {branch}\n'
60 0 a
60 0 a
61 2 a-b-c-
61 2 a-b-c-
62
62
63 $ hg co 1
63 $ hg co 1
64 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
64 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
65 $ hg branch +a+b+c+
65 $ hg branch +a+b+c+
66 marked working directory as branch +a+b+c+
66 marked working directory as branch +a+b+c+
67 $ hg ci -Aqm3
67 $ hg ci -Aqm3
68
68
69 $ hg co 2 # interleave
69 $ hg co 2 # interleave
70 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
70 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
71 $ echo bb > b
71 $ echo bb > b
72 $ hg branch -- -a-b-c-
72 $ hg branch -- -a-b-c-
73 marked working directory as branch -a-b-c-
73 marked working directory as branch -a-b-c-
74 $ hg ci -Aqm4 -d "May 12 2005"
74 $ hg ci -Aqm4 -d "May 12 2005"
75
75
76 $ hg co 3
76 $ hg co 3
77 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
77 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
78 $ hg branch !a/b/c/
78 $ hg branch !a/b/c/
79 marked working directory as branch !a/b/c/
79 marked working directory as branch !a/b/c/
80 $ hg ci -Aqm"5 bug"
80 $ hg ci -Aqm"5 bug"
81
81
82 $ hg merge 4
82 $ hg merge 4
83 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
83 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
84 (branch merge, don't forget to commit)
84 (branch merge, don't forget to commit)
85 $ hg branch _a_b_c_
85 $ hg branch _a_b_c_
86 marked working directory as branch _a_b_c_
86 marked working directory as branch _a_b_c_
87 $ hg ci -Aqm"6 issue619"
87 $ hg ci -Aqm"6 issue619"
88
88
89 $ hg branch .a.b.c.
89 $ hg branch .a.b.c.
90 marked working directory as branch .a.b.c.
90 marked working directory as branch .a.b.c.
91 $ hg ci -Aqm7
91 $ hg ci -Aqm7
92
92
93 $ hg branch all
93 $ hg branch all
94 marked working directory as branch all
94 marked working directory as branch all
95
95
96 $ hg co 4
96 $ hg co 4
97 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
97 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
98 $ hg branch Γ©
98 $ hg branch Γ©
99 marked working directory as branch \xc3\xa9 (esc)
99 marked working directory as branch \xc3\xa9 (esc)
100 $ hg ci -Aqm9
100 $ hg ci -Aqm9
101
101
102 $ hg tag -r6 1.0
102 $ hg tag -r6 1.0
103 $ hg bookmark -r6 xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
103 $ hg bookmark -r6 xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
104
104
105 $ hg clone --quiet -U -r 7 . ../remote1
105 $ hg clone --quiet -U -r 7 . ../remote1
106 $ hg clone --quiet -U -r 8 . ../remote2
106 $ hg clone --quiet -U -r 8 . ../remote2
107 $ echo "[paths]" >> .hg/hgrc
107 $ echo "[paths]" >> .hg/hgrc
108 $ echo "default = ../remote1" >> .hg/hgrc
108 $ echo "default = ../remote1" >> .hg/hgrc
109
109
110 trivial
110 trivial
111
111
112 $ try 0:1
112 $ try 0:1
113 (range
113 (range
114 ('symbol', '0')
114 ('symbol', '0')
115 ('symbol', '1'))
115 ('symbol', '1'))
116 * set:
116 * set:
117 <spanset+ 0:1>
117 <spanset+ 0:1>
118 0
118 0
119 1
119 1
120 $ try --optimize :
120 $ try --optimize :
121 (rangeall
121 (rangeall
122 None)
122 None)
123 * optimized:
123 * optimized:
124 (range
124 (range
125 ('string', '0')
125 ('string', '0')
126 ('string', 'tip'))
126 ('string', 'tip'))
127 * set:
127 * set:
128 <spanset+ 0:9>
128 <spanset+ 0:9>
129 0
129 0
130 1
130 1
131 2
131 2
132 3
132 3
133 4
133 4
134 5
134 5
135 6
135 6
136 7
136 7
137 8
137 8
138 9
138 9
139 $ try 3::6
139 $ try 3::6
140 (dagrange
140 (dagrange
141 ('symbol', '3')
141 ('symbol', '3')
142 ('symbol', '6'))
142 ('symbol', '6'))
143 * set:
143 * set:
144 <baseset+ [3, 5, 6]>
144 <baseset+ [3, 5, 6]>
145 3
145 3
146 5
146 5
147 6
147 6
148 $ try '0|1|2'
148 $ try '0|1|2'
149 (or
149 (or
150 ('symbol', '0')
150 ('symbol', '0')
151 ('symbol', '1')
151 ('symbol', '1')
152 ('symbol', '2'))
152 ('symbol', '2'))
153 * set:
153 * set:
154 <baseset [0, 1, 2]>
154 <baseset [0, 1, 2]>
155 0
155 0
156 1
156 1
157 2
157 2
158
158
159 names that should work without quoting
159 names that should work without quoting
160
160
161 $ try a
161 $ try a
162 ('symbol', 'a')
162 ('symbol', 'a')
163 * set:
163 * set:
164 <baseset [0]>
164 <baseset [0]>
165 0
165 0
166 $ try b-a
166 $ try b-a
167 (minus
167 (minus
168 ('symbol', 'b')
168 ('symbol', 'b')
169 ('symbol', 'a'))
169 ('symbol', 'a'))
170 * set:
170 * set:
171 <filteredset
171 <filteredset
172 <baseset [1]>>
172 <baseset [1]>>
173 1
173 1
174 $ try _a_b_c_
174 $ try _a_b_c_
175 ('symbol', '_a_b_c_')
175 ('symbol', '_a_b_c_')
176 * set:
176 * set:
177 <baseset [6]>
177 <baseset [6]>
178 6
178 6
179 $ try _a_b_c_-a
179 $ try _a_b_c_-a
180 (minus
180 (minus
181 ('symbol', '_a_b_c_')
181 ('symbol', '_a_b_c_')
182 ('symbol', 'a'))
182 ('symbol', 'a'))
183 * set:
183 * set:
184 <filteredset
184 <filteredset
185 <baseset [6]>>
185 <baseset [6]>>
186 6
186 6
187 $ try .a.b.c.
187 $ try .a.b.c.
188 ('symbol', '.a.b.c.')
188 ('symbol', '.a.b.c.')
189 * set:
189 * set:
190 <baseset [7]>
190 <baseset [7]>
191 7
191 7
192 $ try .a.b.c.-a
192 $ try .a.b.c.-a
193 (minus
193 (minus
194 ('symbol', '.a.b.c.')
194 ('symbol', '.a.b.c.')
195 ('symbol', 'a'))
195 ('symbol', 'a'))
196 * set:
196 * set:
197 <filteredset
197 <filteredset
198 <baseset [7]>>
198 <baseset [7]>>
199 7
199 7
200
200
201 names that should be caught by fallback mechanism
201 names that should be caught by fallback mechanism
202
202
203 $ try -- '-a-b-c-'
203 $ try -- '-a-b-c-'
204 ('symbol', '-a-b-c-')
204 ('symbol', '-a-b-c-')
205 * set:
205 * set:
206 <baseset [4]>
206 <baseset [4]>
207 4
207 4
208 $ log -a-b-c-
208 $ log -a-b-c-
209 4
209 4
210 $ try '+a+b+c+'
210 $ try '+a+b+c+'
211 ('symbol', '+a+b+c+')
211 ('symbol', '+a+b+c+')
212 * set:
212 * set:
213 <baseset [3]>
213 <baseset [3]>
214 3
214 3
215 $ try '+a+b+c+:'
215 $ try '+a+b+c+:'
216 (rangepost
216 (rangepost
217 ('symbol', '+a+b+c+'))
217 ('symbol', '+a+b+c+'))
218 * set:
218 * set:
219 <spanset+ 3:9>
219 <spanset+ 3:9>
220 3
220 3
221 4
221 4
222 5
222 5
223 6
223 6
224 7
224 7
225 8
225 8
226 9
226 9
227 $ try ':+a+b+c+'
227 $ try ':+a+b+c+'
228 (rangepre
228 (rangepre
229 ('symbol', '+a+b+c+'))
229 ('symbol', '+a+b+c+'))
230 * set:
230 * set:
231 <spanset+ 0:3>
231 <spanset+ 0:3>
232 0
232 0
233 1
233 1
234 2
234 2
235 3
235 3
236 $ try -- '-a-b-c-:+a+b+c+'
236 $ try -- '-a-b-c-:+a+b+c+'
237 (range
237 (range
238 ('symbol', '-a-b-c-')
238 ('symbol', '-a-b-c-')
239 ('symbol', '+a+b+c+'))
239 ('symbol', '+a+b+c+'))
240 * set:
240 * set:
241 <spanset- 3:4>
241 <spanset- 3:4>
242 4
242 4
243 3
243 3
244 $ log '-a-b-c-:+a+b+c+'
244 $ log '-a-b-c-:+a+b+c+'
245 4
245 4
246 3
246 3
247
247
248 $ try -- -a-b-c--a # complains
248 $ try -- -a-b-c--a # complains
249 (minus
249 (minus
250 (minus
250 (minus
251 (minus
251 (minus
252 (negate
252 (negate
253 ('symbol', 'a'))
253 ('symbol', 'a'))
254 ('symbol', 'b'))
254 ('symbol', 'b'))
255 ('symbol', 'c'))
255 ('symbol', 'c'))
256 (negate
256 (negate
257 ('symbol', 'a')))
257 ('symbol', 'a')))
258 abort: unknown revision '-a'!
258 abort: unknown revision '-a'!
259 [255]
259 [255]
260 $ try Γ©
260 $ try Γ©
261 ('symbol', '\xc3\xa9')
261 ('symbol', '\xc3\xa9')
262 * set:
262 * set:
263 <baseset [9]>
263 <baseset [9]>
264 9
264 9
265
265
266 no quoting needed
266 no quoting needed
267
267
268 $ log ::a-b-c-
268 $ log ::a-b-c-
269 0
269 0
270 1
270 1
271 2
271 2
272
272
273 quoting needed
273 quoting needed
274
274
275 $ try '"-a-b-c-"-a'
275 $ try '"-a-b-c-"-a'
276 (minus
276 (minus
277 ('string', '-a-b-c-')
277 ('string', '-a-b-c-')
278 ('symbol', 'a'))
278 ('symbol', 'a'))
279 * set:
279 * set:
280 <filteredset
280 <filteredset
281 <baseset [4]>>
281 <baseset [4]>>
282 4
282 4
283
283
284 $ log '1 or 2'
284 $ log '1 or 2'
285 1
285 1
286 2
286 2
287 $ log '1|2'
287 $ log '1|2'
288 1
288 1
289 2
289 2
290 $ log '1 and 2'
290 $ log '1 and 2'
291 $ log '1&2'
291 $ log '1&2'
292 $ try '1&2|3' # precedence - and is higher
292 $ try '1&2|3' # precedence - and is higher
293 (or
293 (or
294 (and
294 (and
295 ('symbol', '1')
295 ('symbol', '1')
296 ('symbol', '2'))
296 ('symbol', '2'))
297 ('symbol', '3'))
297 ('symbol', '3'))
298 * set:
298 * set:
299 <addset
299 <addset
300 <baseset []>,
300 <baseset []>,
301 <baseset [3]>>
301 <baseset [3]>>
302 3
302 3
303 $ try '1|2&3'
303 $ try '1|2&3'
304 (or
304 (or
305 ('symbol', '1')
305 ('symbol', '1')
306 (and
306 (and
307 ('symbol', '2')
307 ('symbol', '2')
308 ('symbol', '3')))
308 ('symbol', '3')))
309 * set:
309 * set:
310 <addset
310 <addset
311 <baseset [1]>,
311 <baseset [1]>,
312 <baseset []>>
312 <baseset []>>
313 1
313 1
314 $ try '1&2&3' # associativity
314 $ try '1&2&3' # associativity
315 (and
315 (and
316 (and
316 (and
317 ('symbol', '1')
317 ('symbol', '1')
318 ('symbol', '2'))
318 ('symbol', '2'))
319 ('symbol', '3'))
319 ('symbol', '3'))
320 * set:
320 * set:
321 <baseset []>
321 <baseset []>
322 $ try '1|(2|3)'
322 $ try '1|(2|3)'
323 (or
323 (or
324 ('symbol', '1')
324 ('symbol', '1')
325 (group
325 (group
326 (or
326 (or
327 ('symbol', '2')
327 ('symbol', '2')
328 ('symbol', '3'))))
328 ('symbol', '3'))))
329 * set:
329 * set:
330 <addset
330 <addset
331 <baseset [1]>,
331 <baseset [1]>,
332 <baseset [2, 3]>>
332 <baseset [2, 3]>>
333 1
333 1
334 2
334 2
335 3
335 3
336 $ log '1.0' # tag
336 $ log '1.0' # tag
337 6
337 6
338 $ log 'a' # branch
338 $ log 'a' # branch
339 0
339 0
340 $ log '2785f51ee'
340 $ log '2785f51ee'
341 0
341 0
342 $ log 'date(2005)'
342 $ log 'date(2005)'
343 4
343 4
344 $ log 'date(this is a test)'
344 $ log 'date(this is a test)'
345 hg: parse error at 10: unexpected token: symbol
345 hg: parse error at 10: unexpected token: symbol
346 [255]
346 [255]
347 $ log 'date()'
347 $ log 'date()'
348 hg: parse error: date requires a string
348 hg: parse error: date requires a string
349 [255]
349 [255]
350 $ log 'date'
350 $ log 'date'
351 abort: unknown revision 'date'!
351 abort: unknown revision 'date'!
352 [255]
352 [255]
353 $ log 'date('
353 $ log 'date('
354 hg: parse error at 5: not a prefix: end
354 hg: parse error at 5: not a prefix: end
355 [255]
355 [255]
356 $ log 'date("\xy")'
356 $ log 'date("\xy")'
357 hg: parse error: invalid \x escape
357 hg: parse error: invalid \x escape
358 [255]
358 [255]
359 $ log 'date(tip)'
359 $ log 'date(tip)'
360 abort: invalid date: 'tip'
360 abort: invalid date: 'tip'
361 [255]
361 [255]
362 $ log '0:date'
362 $ log '0:date'
363 abort: unknown revision 'date'!
363 abort: unknown revision 'date'!
364 [255]
364 [255]
365 $ log '::"date"'
365 $ log '::"date"'
366 abort: unknown revision 'date'!
366 abort: unknown revision 'date'!
367 [255]
367 [255]
368 $ hg book date -r 4
368 $ hg book date -r 4
369 $ log '0:date'
369 $ log '0:date'
370 0
370 0
371 1
371 1
372 2
372 2
373 3
373 3
374 4
374 4
375 $ log '::date'
375 $ log '::date'
376 0
376 0
377 1
377 1
378 2
378 2
379 4
379 4
380 $ log '::"date"'
380 $ log '::"date"'
381 0
381 0
382 1
382 1
383 2
383 2
384 4
384 4
385 $ log 'date(2005) and 1::'
385 $ log 'date(2005) and 1::'
386 4
386 4
387 $ hg book -d date
387 $ hg book -d date
388
388
389 keyword arguments
389 keyword arguments
390
390
391 $ log 'extra(branch, value=a)'
391 $ log 'extra(branch, value=a)'
392 0
392 0
393
393
394 $ log 'extra(branch, a, b)'
394 $ log 'extra(branch, a, b)'
395 hg: parse error: extra takes at most 2 arguments
395 hg: parse error: extra takes at most 2 arguments
396 [255]
396 [255]
397 $ log 'extra(a, label=b)'
397 $ log 'extra(a, label=b)'
398 hg: parse error: extra got multiple values for keyword argument 'label'
398 hg: parse error: extra got multiple values for keyword argument 'label'
399 [255]
399 [255]
400 $ log 'extra(label=branch, default)'
400 $ log 'extra(label=branch, default)'
401 hg: parse error: extra got an invalid argument
401 hg: parse error: extra got an invalid argument
402 [255]
402 [255]
403 $ log 'extra(branch, foo+bar=baz)'
403 $ log 'extra(branch, foo+bar=baz)'
404 hg: parse error: extra got an invalid argument
404 hg: parse error: extra got an invalid argument
405 [255]
405 [255]
406 $ log 'extra(unknown=branch)'
406 $ log 'extra(unknown=branch)'
407 hg: parse error: extra got an unexpected keyword argument 'unknown'
407 hg: parse error: extra got an unexpected keyword argument 'unknown'
408 [255]
408 [255]
409
409
410 $ try 'foo=bar|baz'
410 $ try 'foo=bar|baz'
411 (keyvalue
411 (keyvalue
412 ('symbol', 'foo')
412 ('symbol', 'foo')
413 (or
413 (or
414 ('symbol', 'bar')
414 ('symbol', 'bar')
415 ('symbol', 'baz')))
415 ('symbol', 'baz')))
416 hg: parse error: can't use a key-value pair in this context
416 hg: parse error: can't use a key-value pair in this context
417 [255]
417 [255]
418
418
419 Test that symbols only get parsed as functions if there's an opening
419 Test that symbols only get parsed as functions if there's an opening
420 parenthesis.
420 parenthesis.
421
421
422 $ hg book only -r 9
422 $ hg book only -r 9
423 $ log 'only(only)' # Outer "only" is a function, inner "only" is the bookmark
423 $ log 'only(only)' # Outer "only" is a function, inner "only" is the bookmark
424 8
424 8
425 9
425 9
426
426
427 ancestor can accept 0 or more arguments
427 ancestor can accept 0 or more arguments
428
428
429 $ log 'ancestor()'
429 $ log 'ancestor()'
430 $ log 'ancestor(1)'
430 $ log 'ancestor(1)'
431 1
431 1
432 $ log 'ancestor(4,5)'
432 $ log 'ancestor(4,5)'
433 1
433 1
434 $ log 'ancestor(4,5) and 4'
434 $ log 'ancestor(4,5) and 4'
435 $ log 'ancestor(0,0,1,3)'
435 $ log 'ancestor(0,0,1,3)'
436 0
436 0
437 $ log 'ancestor(3,1,5,3,5,1)'
437 $ log 'ancestor(3,1,5,3,5,1)'
438 1
438 1
439 $ log 'ancestor(0,1,3,5)'
439 $ log 'ancestor(0,1,3,5)'
440 0
440 0
441 $ log 'ancestor(1,2,3,4,5)'
441 $ log 'ancestor(1,2,3,4,5)'
442 1
442 1
443
443
444 test ancestors
444 test ancestors
445
445
446 $ log 'ancestors(5)'
446 $ log 'ancestors(5)'
447 0
447 0
448 1
448 1
449 3
449 3
450 5
450 5
451 $ log 'ancestor(ancestors(5))'
451 $ log 'ancestor(ancestors(5))'
452 0
452 0
453 $ log '::r3232()'
453 $ log '::r3232()'
454 0
454 0
455 1
455 1
456 2
456 2
457 3
457 3
458
458
459 $ log 'author(bob)'
459 $ log 'author(bob)'
460 2
460 2
461 $ log 'author("re:bob|test")'
461 $ log 'author("re:bob|test")'
462 0
462 0
463 1
463 1
464 2
464 2
465 3
465 3
466 4
466 4
467 5
467 5
468 6
468 6
469 7
469 7
470 8
470 8
471 9
471 9
472 $ log 'branch(Γ©)'
472 $ log 'branch(Γ©)'
473 8
473 8
474 9
474 9
475 $ log 'branch(a)'
475 $ log 'branch(a)'
476 0
476 0
477 $ hg log -r 'branch("re:a")' --template '{rev} {branch}\n'
477 $ hg log -r 'branch("re:a")' --template '{rev} {branch}\n'
478 0 a
478 0 a
479 2 a-b-c-
479 2 a-b-c-
480 3 +a+b+c+
480 3 +a+b+c+
481 4 -a-b-c-
481 4 -a-b-c-
482 5 !a/b/c/
482 5 !a/b/c/
483 6 _a_b_c_
483 6 _a_b_c_
484 7 .a.b.c.
484 7 .a.b.c.
485 $ log 'children(ancestor(4,5))'
485 $ log 'children(ancestor(4,5))'
486 2
486 2
487 3
487 3
488 $ log 'closed()'
488 $ log 'closed()'
489 $ log 'contains(a)'
489 $ log 'contains(a)'
490 0
490 0
491 1
491 1
492 3
492 3
493 5
493 5
494 $ log 'contains("../repo/a")'
494 $ log 'contains("../repo/a")'
495 0
495 0
496 1
496 1
497 3
497 3
498 5
498 5
499 $ log 'desc(B)'
499 $ log 'desc(B)'
500 5
500 5
501 $ log 'descendants(2 or 3)'
501 $ log 'descendants(2 or 3)'
502 2
502 2
503 3
503 3
504 4
504 4
505 5
505 5
506 6
506 6
507 7
507 7
508 8
508 8
509 9
509 9
510 $ log 'file("b*")'
510 $ log 'file("b*")'
511 1
511 1
512 4
512 4
513 $ log 'filelog("b")'
513 $ log 'filelog("b")'
514 1
514 1
515 4
515 4
516 $ log 'filelog("../repo/b")'
516 $ log 'filelog("../repo/b")'
517 1
517 1
518 4
518 4
519 $ log 'follow()'
519 $ log 'follow()'
520 0
520 0
521 1
521 1
522 2
522 2
523 4
523 4
524 8
524 8
525 9
525 9
526 $ log 'grep("issue\d+")'
526 $ log 'grep("issue\d+")'
527 6
527 6
528 $ try 'grep("(")' # invalid regular expression
528 $ try 'grep("(")' # invalid regular expression
529 (func
529 (func
530 ('symbol', 'grep')
530 ('symbol', 'grep')
531 ('string', '('))
531 ('string', '('))
532 hg: parse error: invalid match pattern: unbalanced parenthesis
532 hg: parse error: invalid match pattern: unbalanced parenthesis
533 [255]
533 [255]
534 $ try 'grep("\bissue\d+")'
534 $ try 'grep("\bissue\d+")'
535 (func
535 (func
536 ('symbol', 'grep')
536 ('symbol', 'grep')
537 ('string', '\x08issue\\d+'))
537 ('string', '\x08issue\\d+'))
538 * set:
538 * set:
539 <filteredset
539 <filteredset
540 <fullreposet+ 0:9>>
540 <fullreposet+ 0:9>>
541 $ try 'grep(r"\bissue\d+")'
541 $ try 'grep(r"\bissue\d+")'
542 (func
542 (func
543 ('symbol', 'grep')
543 ('symbol', 'grep')
544 ('string', '\\bissue\\d+'))
544 ('string', '\\bissue\\d+'))
545 * set:
545 * set:
546 <filteredset
546 <filteredset
547 <fullreposet+ 0:9>>
547 <fullreposet+ 0:9>>
548 6
548 6
549 $ try 'grep(r"\")'
549 $ try 'grep(r"\")'
550 hg: parse error at 7: unterminated string
550 hg: parse error at 7: unterminated string
551 [255]
551 [255]
552 $ log 'head()'
552 $ log 'head()'
553 0
553 0
554 1
554 1
555 2
555 2
556 3
556 3
557 4
557 4
558 5
558 5
559 6
559 6
560 7
560 7
561 9
561 9
562 $ log 'heads(6::)'
562 $ log 'heads(6::)'
563 7
563 7
564 $ log 'keyword(issue)'
564 $ log 'keyword(issue)'
565 6
565 6
566 $ log 'keyword("test a")'
566 $ log 'keyword("test a")'
567 $ log 'limit(head(), 1)'
567 $ log 'limit(head(), 1)'
568 0
568 0
569 $ log 'limit(author("re:bob|test"), 3, 5)'
569 $ log 'limit(author("re:bob|test"), 3, 5)'
570 5
570 5
571 6
571 6
572 7
572 7
573 $ log 'limit(author("re:bob|test"), offset=6)'
573 $ log 'limit(author("re:bob|test"), offset=6)'
574 6
574 6
575 $ log 'limit(author("re:bob|test"), offset=10)'
575 $ log 'limit(author("re:bob|test"), offset=10)'
576 $ log 'limit(all(), 1, -1)'
576 $ log 'limit(all(), 1, -1)'
577 hg: parse error: negative offset
577 hg: parse error: negative offset
578 [255]
578 [255]
579 $ log 'matching(6)'
579 $ log 'matching(6)'
580 6
580 6
581 $ log 'matching(6:7, "phase parents user date branch summary files description substate")'
581 $ log 'matching(6:7, "phase parents user date branch summary files description substate")'
582 6
582 6
583 7
583 7
584
584
585 Testing min and max
585 Testing min and max
586
586
587 max: simple
587 max: simple
588
588
589 $ log 'max(contains(a))'
589 $ log 'max(contains(a))'
590 5
590 5
591
591
592 max: simple on unordered set)
592 max: simple on unordered set)
593
593
594 $ log 'max((4+0+2+5+7) and contains(a))'
594 $ log 'max((4+0+2+5+7) and contains(a))'
595 5
595 5
596
596
597 max: no result
597 max: no result
598
598
599 $ log 'max(contains(stringthatdoesnotappearanywhere))'
599 $ log 'max(contains(stringthatdoesnotappearanywhere))'
600
600
601 max: no result on unordered set
601 max: no result on unordered set
602
602
603 $ log 'max((4+0+2+5+7) and contains(stringthatdoesnotappearanywhere))'
603 $ log 'max((4+0+2+5+7) and contains(stringthatdoesnotappearanywhere))'
604
604
605 min: simple
605 min: simple
606
606
607 $ log 'min(contains(a))'
607 $ log 'min(contains(a))'
608 0
608 0
609
609
610 min: simple on unordered set
610 min: simple on unordered set
611
611
612 $ log 'min((4+0+2+5+7) and contains(a))'
612 $ log 'min((4+0+2+5+7) and contains(a))'
613 0
613 0
614
614
615 min: empty
615 min: empty
616
616
617 $ log 'min(contains(stringthatdoesnotappearanywhere))'
617 $ log 'min(contains(stringthatdoesnotappearanywhere))'
618
618
619 min: empty on unordered set
619 min: empty on unordered set
620
620
621 $ log 'min((4+0+2+5+7) and contains(stringthatdoesnotappearanywhere))'
621 $ log 'min((4+0+2+5+7) and contains(stringthatdoesnotappearanywhere))'
622
622
623
623
624 $ log 'merge()'
624 $ log 'merge()'
625 6
625 6
626 $ log 'branchpoint()'
626 $ log 'branchpoint()'
627 1
627 1
628 4
628 4
629 $ log 'modifies(b)'
629 $ log 'modifies(b)'
630 4
630 4
631 $ log 'modifies("path:b")'
631 $ log 'modifies("path:b")'
632 4
632 4
633 $ log 'modifies("*")'
633 $ log 'modifies("*")'
634 4
634 4
635 6
635 6
636 $ log 'modifies("set:modified()")'
636 $ log 'modifies("set:modified()")'
637 4
637 4
638 $ log 'id(5)'
638 $ log 'id(5)'
639 2
639 2
640 $ log 'only(9)'
640 $ log 'only(9)'
641 8
641 8
642 9
642 9
643 $ log 'only(8)'
643 $ log 'only(8)'
644 8
644 8
645 $ log 'only(9, 5)'
645 $ log 'only(9, 5)'
646 2
646 2
647 4
647 4
648 8
648 8
649 9
649 9
650 $ log 'only(7 + 9, 5 + 2)'
650 $ log 'only(7 + 9, 5 + 2)'
651 4
651 4
652 6
652 6
653 7
653 7
654 8
654 8
655 9
655 9
656
656
657 Test empty set input
657 Test empty set input
658 $ log 'only(p2())'
658 $ log 'only(p2())'
659 $ log 'only(p1(), p2())'
659 $ log 'only(p1(), p2())'
660 0
660 0
661 1
661 1
662 2
662 2
663 4
663 4
664 8
664 8
665 9
665 9
666
666
667 Test '%' operator
667 Test '%' operator
668
668
669 $ log '9%'
669 $ log '9%'
670 8
670 8
671 9
671 9
672 $ log '9%5'
672 $ log '9%5'
673 2
673 2
674 4
674 4
675 8
675 8
676 9
676 9
677 $ log '(7 + 9)%(5 + 2)'
677 $ log '(7 + 9)%(5 + 2)'
678 4
678 4
679 6
679 6
680 7
680 7
681 8
681 8
682 9
682 9
683
683
684 Test opreand of '%' is optimized recursively (issue4670)
684 Test opreand of '%' is optimized recursively (issue4670)
685
685
686 $ try --optimize '8:9-8%'
686 $ try --optimize '8:9-8%'
687 (onlypost
687 (onlypost
688 (minus
688 (minus
689 (range
689 (range
690 ('symbol', '8')
690 ('symbol', '8')
691 ('symbol', '9'))
691 ('symbol', '9'))
692 ('symbol', '8')))
692 ('symbol', '8')))
693 * optimized:
693 * optimized:
694 (func
694 (func
695 ('symbol', 'only')
695 ('symbol', 'only')
696 (and
696 (and
697 (range
697 (range
698 ('symbol', '8')
698 ('symbol', '8')
699 ('symbol', '9'))
699 ('symbol', '9'))
700 (not
700 (not
701 ('symbol', '8'))))
701 ('symbol', '8'))))
702 * set:
702 * set:
703 <baseset+ [8, 9]>
703 <baseset+ [8, 9]>
704 8
704 8
705 9
705 9
706 $ try --optimize '(9)%(5)'
706 $ try --optimize '(9)%(5)'
707 (only
707 (only
708 (group
708 (group
709 ('symbol', '9'))
709 ('symbol', '9'))
710 (group
710 (group
711 ('symbol', '5')))
711 ('symbol', '5')))
712 * optimized:
712 * optimized:
713 (func
713 (func
714 ('symbol', 'only')
714 ('symbol', 'only')
715 (list
715 (list
716 ('symbol', '9')
716 ('symbol', '9')
717 ('symbol', '5')))
717 ('symbol', '5')))
718 * set:
718 * set:
719 <baseset+ [8, 9, 2, 4]>
719 <baseset+ [8, 9, 2, 4]>
720 2
720 2
721 4
721 4
722 8
722 8
723 9
723 9
724
724
725 Test the order of operations
725 Test the order of operations
726
726
727 $ log '7 + 9%5 + 2'
727 $ log '7 + 9%5 + 2'
728 7
728 7
729 2
729 2
730 4
730 4
731 8
731 8
732 9
732 9
733
733
734 Test explicit numeric revision
734 Test explicit numeric revision
735 $ log 'rev(-2)'
735 $ log 'rev(-2)'
736 $ log 'rev(-1)'
736 $ log 'rev(-1)'
737 -1
737 -1
738 $ log 'rev(0)'
738 $ log 'rev(0)'
739 0
739 0
740 $ log 'rev(9)'
740 $ log 'rev(9)'
741 9
741 9
742 $ log 'rev(10)'
742 $ log 'rev(10)'
743 $ log 'rev(tip)'
743 $ log 'rev(tip)'
744 hg: parse error: rev expects a number
744 hg: parse error: rev expects a number
745 [255]
745 [255]
746
746
747 Test hexadecimal revision
747 Test hexadecimal revision
748 $ log 'id(2)'
748 $ log 'id(2)'
749 abort: 00changelog.i@2: ambiguous identifier!
749 abort: 00changelog.i@2: ambiguous identifier!
750 [255]
750 [255]
751 $ log 'id(23268)'
751 $ log 'id(23268)'
752 4
752 4
753 $ log 'id(2785f51eece)'
753 $ log 'id(2785f51eece)'
754 0
754 0
755 $ log 'id(d5d0dcbdc4d9ff5dbb2d336f32f0bb561c1a532c)'
755 $ log 'id(d5d0dcbdc4d9ff5dbb2d336f32f0bb561c1a532c)'
756 8
756 8
757 $ log 'id(d5d0dcbdc4a)'
757 $ log 'id(d5d0dcbdc4a)'
758 $ log 'id(d5d0dcbdc4w)'
758 $ log 'id(d5d0dcbdc4w)'
759 $ log 'id(d5d0dcbdc4d9ff5dbb2d336f32f0bb561c1a532d)'
759 $ log 'id(d5d0dcbdc4d9ff5dbb2d336f32f0bb561c1a532d)'
760 $ log 'id(d5d0dcbdc4d9ff5dbb2d336f32f0bb561c1a532q)'
760 $ log 'id(d5d0dcbdc4d9ff5dbb2d336f32f0bb561c1a532q)'
761 $ log 'id(1.0)'
761 $ log 'id(1.0)'
762 $ log 'id(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx)'
762 $ log 'id(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx)'
763
763
764 Test null revision
764 Test null revision
765 $ log '(null)'
765 $ log '(null)'
766 -1
766 -1
767 $ log '(null:0)'
767 $ log '(null:0)'
768 -1
768 -1
769 0
769 0
770 $ log '(0:null)'
770 $ log '(0:null)'
771 0
771 0
772 -1
772 -1
773 $ log 'null::0'
773 $ log 'null::0'
774 -1
774 -1
775 0
775 0
776 $ log 'null:tip - 0:'
776 $ log 'null:tip - 0:'
777 -1
777 -1
778 $ log 'null: and null::' | head -1
778 $ log 'null: and null::' | head -1
779 -1
779 -1
780 $ log 'null: or 0:' | head -2
780 $ log 'null: or 0:' | head -2
781 -1
781 -1
782 0
782 0
783 $ log 'ancestors(null)'
783 $ log 'ancestors(null)'
784 -1
784 -1
785 $ log 'reverse(null:)' | tail -2
785 $ log 'reverse(null:)' | tail -2
786 0
786 0
787 -1
787 -1
788 BROKEN: should be '-1'
788 BROKEN: should be '-1'
789 $ log 'first(null:)'
789 $ log 'first(null:)'
790 BROKEN: should be '-1'
790 BROKEN: should be '-1'
791 $ log 'min(null:)'
791 $ log 'min(null:)'
792 $ log 'tip:null and all()' | tail -2
792 $ log 'tip:null and all()' | tail -2
793 1
793 1
794 0
794 0
795
795
796 Test working-directory revision
796 Test working-directory revision
797 $ hg debugrevspec 'wdir()'
797 $ hg debugrevspec 'wdir()'
798 2147483647
798 2147483647
799 $ hg debugrevspec 'tip or wdir()'
799 $ hg debugrevspec 'tip or wdir()'
800 9
800 9
801 2147483647
801 2147483647
802 $ hg debugrevspec '0:tip and wdir()'
802 $ hg debugrevspec '0:tip and wdir()'
803 $ log '0:wdir()' | tail -3
803 $ log '0:wdir()' | tail -3
804 8
804 8
805 9
805 9
806 2147483647
806 2147483647
807 $ log 'wdir():0' | head -3
807 $ log 'wdir():0' | head -3
808 2147483647
808 2147483647
809 9
809 9
810 8
810 8
811 $ log 'wdir():wdir()'
811 $ log 'wdir():wdir()'
812 2147483647
812 2147483647
813 $ log '(all() + wdir()) & min(. + wdir())'
813 $ log '(all() + wdir()) & min(. + wdir())'
814 9
814 9
815 $ log '(all() + wdir()) & max(. + wdir())'
815 $ log '(all() + wdir()) & max(. + wdir())'
816 2147483647
816 2147483647
817 $ log '(all() + wdir()) & first(wdir() + .)'
817 $ log '(all() + wdir()) & first(wdir() + .)'
818 2147483647
818 2147483647
819 $ log '(all() + wdir()) & last(. + wdir())'
819 $ log '(all() + wdir()) & last(. + wdir())'
820 2147483647
820 2147483647
821
821
822 $ log 'outgoing()'
822 $ log 'outgoing()'
823 8
823 8
824 9
824 9
825 $ log 'outgoing("../remote1")'
825 $ log 'outgoing("../remote1")'
826 8
826 8
827 9
827 9
828 $ log 'outgoing("../remote2")'
828 $ log 'outgoing("../remote2")'
829 3
829 3
830 5
830 5
831 6
831 6
832 7
832 7
833 9
833 9
834 $ log 'p1(merge())'
834 $ log 'p1(merge())'
835 5
835 5
836 $ log 'p2(merge())'
836 $ log 'p2(merge())'
837 4
837 4
838 $ log 'parents(merge())'
838 $ log 'parents(merge())'
839 4
839 4
840 5
840 5
841 $ log 'p1(branchpoint())'
841 $ log 'p1(branchpoint())'
842 0
842 0
843 2
843 2
844 $ log 'p2(branchpoint())'
844 $ log 'p2(branchpoint())'
845 $ log 'parents(branchpoint())'
845 $ log 'parents(branchpoint())'
846 0
846 0
847 2
847 2
848 $ log 'removes(a)'
848 $ log 'removes(a)'
849 2
849 2
850 6
850 6
851 $ log 'roots(all())'
851 $ log 'roots(all())'
852 0
852 0
853 $ log 'reverse(2 or 3 or 4 or 5)'
853 $ log 'reverse(2 or 3 or 4 or 5)'
854 5
854 5
855 4
855 4
856 3
856 3
857 2
857 2
858 $ log 'reverse(all())'
858 $ log 'reverse(all())'
859 9
859 9
860 8
860 8
861 7
861 7
862 6
862 6
863 5
863 5
864 4
864 4
865 3
865 3
866 2
866 2
867 1
867 1
868 0
868 0
869 $ log 'reverse(all()) & filelog(b)'
869 $ log 'reverse(all()) & filelog(b)'
870 4
870 4
871 1
871 1
872 $ log 'rev(5)'
872 $ log 'rev(5)'
873 5
873 5
874 $ log 'sort(limit(reverse(all()), 3))'
874 $ log 'sort(limit(reverse(all()), 3))'
875 7
875 7
876 8
876 8
877 9
877 9
878 $ log 'sort(2 or 3 or 4 or 5, date)'
878 $ log 'sort(2 or 3 or 4 or 5, date)'
879 2
879 2
880 3
880 3
881 5
881 5
882 4
882 4
883 $ log 'tagged()'
883 $ log 'tagged()'
884 6
884 6
885 $ log 'tag()'
885 $ log 'tag()'
886 6
886 6
887 $ log 'tag(1.0)'
887 $ log 'tag(1.0)'
888 6
888 6
889 $ log 'tag(tip)'
889 $ log 'tag(tip)'
890 9
890 9
891
891
892 test sort revset
892 test sort revset
893 --------------------------------------------
893 --------------------------------------------
894
894
895 test when adding two unordered revsets
895 test when adding two unordered revsets
896
896
897 $ log 'sort(keyword(issue) or modifies(b))'
897 $ log 'sort(keyword(issue) or modifies(b))'
898 4
898 4
899 6
899 6
900
900
901 test when sorting a reversed collection in the same way it is
901 test when sorting a reversed collection in the same way it is
902
902
903 $ log 'sort(reverse(all()), -rev)'
903 $ log 'sort(reverse(all()), -rev)'
904 9
904 9
905 8
905 8
906 7
906 7
907 6
907 6
908 5
908 5
909 4
909 4
910 3
910 3
911 2
911 2
912 1
912 1
913 0
913 0
914
914
915 test when sorting a reversed collection
915 test when sorting a reversed collection
916
916
917 $ log 'sort(reverse(all()), rev)'
917 $ log 'sort(reverse(all()), rev)'
918 0
918 0
919 1
919 1
920 2
920 2
921 3
921 3
922 4
922 4
923 5
923 5
924 6
924 6
925 7
925 7
926 8
926 8
927 9
927 9
928
928
929
929
930 test sorting two sorted collections in different orders
930 test sorting two sorted collections in different orders
931
931
932 $ log 'sort(outgoing() or reverse(removes(a)), rev)'
932 $ log 'sort(outgoing() or reverse(removes(a)), rev)'
933 2
933 2
934 6
934 6
935 8
935 8
936 9
936 9
937
937
938 test sorting two sorted collections in different orders backwards
938 test sorting two sorted collections in different orders backwards
939
939
940 $ log 'sort(outgoing() or reverse(removes(a)), -rev)'
940 $ log 'sort(outgoing() or reverse(removes(a)), -rev)'
941 9
941 9
942 8
942 8
943 6
943 6
944 2
944 2
945
945
946 test subtracting something from an addset
946 test subtracting something from an addset
947
947
948 $ log '(outgoing() or removes(a)) - removes(a)'
948 $ log '(outgoing() or removes(a)) - removes(a)'
949 8
949 8
950 9
950 9
951
951
952 test intersecting something with an addset
952 test intersecting something with an addset
953
953
954 $ log 'parents(outgoing() or removes(a))'
954 $ log 'parents(outgoing() or removes(a))'
955 1
955 1
956 4
956 4
957 5
957 5
958 8
958 8
959
959
960 test that `or` operation combines elements in the right order:
960 test that `or` operation combines elements in the right order:
961
961
962 $ log '3:4 or 2:5'
962 $ log '3:4 or 2:5'
963 3
963 3
964 4
964 4
965 2
965 2
966 5
966 5
967 $ log '3:4 or 5:2'
967 $ log '3:4 or 5:2'
968 3
968 3
969 4
969 4
970 5
970 5
971 2
971 2
972 $ log 'sort(3:4 or 2:5)'
972 $ log 'sort(3:4 or 2:5)'
973 2
973 2
974 3
974 3
975 4
975 4
976 5
976 5
977 $ log 'sort(3:4 or 5:2)'
977 $ log 'sort(3:4 or 5:2)'
978 2
978 2
979 3
979 3
980 4
980 4
981 5
981 5
982
982
983 test that more than one `-r`s are combined in the right order and deduplicated:
983 test that more than one `-r`s are combined in the right order and deduplicated:
984
984
985 $ hg log -T '{rev}\n' -r 3 -r 3 -r 4 -r 5:2 -r 'ancestors(4)'
985 $ hg log -T '{rev}\n' -r 3 -r 3 -r 4 -r 5:2 -r 'ancestors(4)'
986 3
986 3
987 4
987 4
988 5
988 5
989 2
989 2
990 0
990 0
991 1
991 1
992
992
993 test that `or` operation skips duplicated revisions from right-hand side
993 test that `or` operation skips duplicated revisions from right-hand side
994
994
995 $ try 'reverse(1::5) or ancestors(4)'
995 $ try 'reverse(1::5) or ancestors(4)'
996 (or
996 (or
997 (func
997 (func
998 ('symbol', 'reverse')
998 ('symbol', 'reverse')
999 (dagrange
999 (dagrange
1000 ('symbol', '1')
1000 ('symbol', '1')
1001 ('symbol', '5')))
1001 ('symbol', '5')))
1002 (func
1002 (func
1003 ('symbol', 'ancestors')
1003 ('symbol', 'ancestors')
1004 ('symbol', '4')))
1004 ('symbol', '4')))
1005 * set:
1005 * set:
1006 <addset
1006 <addset
1007 <baseset- [1, 3, 5]>,
1007 <baseset- [1, 3, 5]>,
1008 <generatorset+>>
1008 <generatorset+>>
1009 5
1009 5
1010 3
1010 3
1011 1
1011 1
1012 0
1012 0
1013 2
1013 2
1014 4
1014 4
1015 $ try 'sort(ancestors(4) or reverse(1::5))'
1015 $ try 'sort(ancestors(4) or reverse(1::5))'
1016 (func
1016 (func
1017 ('symbol', 'sort')
1017 ('symbol', 'sort')
1018 (or
1018 (or
1019 (func
1019 (func
1020 ('symbol', 'ancestors')
1020 ('symbol', 'ancestors')
1021 ('symbol', '4'))
1021 ('symbol', '4'))
1022 (func
1022 (func
1023 ('symbol', 'reverse')
1023 ('symbol', 'reverse')
1024 (dagrange
1024 (dagrange
1025 ('symbol', '1')
1025 ('symbol', '1')
1026 ('symbol', '5')))))
1026 ('symbol', '5')))))
1027 * set:
1027 * set:
1028 <addset+
1028 <addset+
1029 <generatorset+>,
1029 <generatorset+>,
1030 <baseset- [1, 3, 5]>>
1030 <baseset- [1, 3, 5]>>
1031 0
1031 0
1032 1
1032 1
1033 2
1033 2
1034 3
1034 3
1035 4
1035 4
1036 5
1036 5
1037
1037
1038 test optimization of trivial `or` operation
1038 test optimization of trivial `or` operation
1039
1039
1040 $ try --optimize '0|(1)|"2"|-2|tip|null'
1040 $ try --optimize '0|(1)|"2"|-2|tip|null'
1041 (or
1041 (or
1042 ('symbol', '0')
1042 ('symbol', '0')
1043 (group
1043 (group
1044 ('symbol', '1'))
1044 ('symbol', '1'))
1045 ('string', '2')
1045 ('string', '2')
1046 (negate
1046 (negate
1047 ('symbol', '2'))
1047 ('symbol', '2'))
1048 ('symbol', 'tip')
1048 ('symbol', 'tip')
1049 ('symbol', 'null'))
1049 ('symbol', 'null'))
1050 * optimized:
1050 * optimized:
1051 (func
1051 (func
1052 ('symbol', '_list')
1052 ('symbol', '_list')
1053 ('string', '0\x001\x002\x00-2\x00tip\x00null'))
1053 ('string', '0\x001\x002\x00-2\x00tip\x00null'))
1054 * set:
1054 * set:
1055 <baseset [0, 1, 2, 8, 9, -1]>
1055 <baseset [0, 1, 2, 8, 9, -1]>
1056 0
1056 0
1057 1
1057 1
1058 2
1058 2
1059 8
1059 8
1060 9
1060 9
1061 -1
1061 -1
1062
1062
1063 $ try --optimize '0|1|2:3'
1063 $ try --optimize '0|1|2:3'
1064 (or
1064 (or
1065 ('symbol', '0')
1065 ('symbol', '0')
1066 ('symbol', '1')
1066 ('symbol', '1')
1067 (range
1067 (range
1068 ('symbol', '2')
1068 ('symbol', '2')
1069 ('symbol', '3')))
1069 ('symbol', '3')))
1070 * optimized:
1070 * optimized:
1071 (or
1071 (or
1072 (func
1072 (func
1073 ('symbol', '_list')
1073 ('symbol', '_list')
1074 ('string', '0\x001'))
1074 ('string', '0\x001'))
1075 (range
1075 (range
1076 ('symbol', '2')
1076 ('symbol', '2')
1077 ('symbol', '3')))
1077 ('symbol', '3')))
1078 * set:
1078 * set:
1079 <addset
1079 <addset
1080 <baseset [0, 1]>,
1080 <baseset [0, 1]>,
1081 <spanset+ 2:3>>
1081 <spanset+ 2:3>>
1082 0
1082 0
1083 1
1083 1
1084 2
1084 2
1085 3
1085 3
1086
1086
1087 $ try --optimize '0:1|2|3:4|5|6'
1087 $ try --optimize '0:1|2|3:4|5|6'
1088 (or
1088 (or
1089 (range
1089 (range
1090 ('symbol', '0')
1090 ('symbol', '0')
1091 ('symbol', '1'))
1091 ('symbol', '1'))
1092 ('symbol', '2')
1092 ('symbol', '2')
1093 (range
1093 (range
1094 ('symbol', '3')
1094 ('symbol', '3')
1095 ('symbol', '4'))
1095 ('symbol', '4'))
1096 ('symbol', '5')
1096 ('symbol', '5')
1097 ('symbol', '6'))
1097 ('symbol', '6'))
1098 * optimized:
1098 * optimized:
1099 (or
1099 (or
1100 (range
1100 (range
1101 ('symbol', '0')
1101 ('symbol', '0')
1102 ('symbol', '1'))
1102 ('symbol', '1'))
1103 ('symbol', '2')
1103 ('symbol', '2')
1104 (range
1104 (range
1105 ('symbol', '3')
1105 ('symbol', '3')
1106 ('symbol', '4'))
1106 ('symbol', '4'))
1107 (func
1107 (func
1108 ('symbol', '_list')
1108 ('symbol', '_list')
1109 ('string', '5\x006')))
1109 ('string', '5\x006')))
1110 * set:
1110 * set:
1111 <addset
1111 <addset
1112 <addset
1112 <addset
1113 <spanset+ 0:1>,
1113 <spanset+ 0:1>,
1114 <baseset [2]>>,
1114 <baseset [2]>>,
1115 <addset
1115 <addset
1116 <spanset+ 3:4>,
1116 <spanset+ 3:4>,
1117 <baseset [5, 6]>>>
1117 <baseset [5, 6]>>>
1118 0
1118 0
1119 1
1119 1
1120 2
1120 2
1121 3
1121 3
1122 4
1122 4
1123 5
1123 5
1124 6
1124 6
1125
1125
1126 test that `_list` should be narrowed by provided `subset`
1126 test that `_list` should be narrowed by provided `subset`
1127
1127
1128 $ log '0:2 and (null|1|2|3)'
1128 $ log '0:2 and (null|1|2|3)'
1129 1
1129 1
1130 2
1130 2
1131
1131
1132 test that `_list` should remove duplicates
1132 test that `_list` should remove duplicates
1133
1133
1134 $ log '0|1|2|1|2|-1|tip'
1134 $ log '0|1|2|1|2|-1|tip'
1135 0
1135 0
1136 1
1136 1
1137 2
1137 2
1138 9
1138 9
1139
1139
1140 test unknown revision in `_list`
1140 test unknown revision in `_list`
1141
1141
1142 $ log '0|unknown'
1142 $ log '0|unknown'
1143 abort: unknown revision 'unknown'!
1143 abort: unknown revision 'unknown'!
1144 [255]
1144 [255]
1145
1145
1146 test integer range in `_list`
1146 test integer range in `_list`
1147
1147
1148 $ log '-1|-10'
1148 $ log '-1|-10'
1149 9
1149 9
1150 0
1150 0
1151
1151
1152 $ log '-10|-11'
1152 $ log '-10|-11'
1153 abort: unknown revision '-11'!
1153 abort: unknown revision '-11'!
1154 [255]
1154 [255]
1155
1155
1156 $ log '9|10'
1156 $ log '9|10'
1157 abort: unknown revision '10'!
1157 abort: unknown revision '10'!
1158 [255]
1158 [255]
1159
1159
1160 test '0000' != '0' in `_list`
1160 test '0000' != '0' in `_list`
1161
1161
1162 $ log '0|0000'
1162 $ log '0|0000'
1163 0
1163 0
1164 -1
1164 -1
1165
1165
1166 test ',' in `_list`
1166 test ',' in `_list`
1167 $ log '0,1'
1167 $ log '0,1'
1168 hg: parse error: can't use a list in this context
1168 hg: parse error: can't use a list in this context
1169 (see hg help "revsets.x or y")
1169 (see hg help "revsets.x or y")
1170 [255]
1170 [255]
1171
1171
1172 test that chained `or` operations make balanced addsets
1172 test that chained `or` operations make balanced addsets
1173
1173
1174 $ try '0:1|1:2|2:3|3:4|4:5'
1174 $ try '0:1|1:2|2:3|3:4|4:5'
1175 (or
1175 (or
1176 (range
1176 (range
1177 ('symbol', '0')
1177 ('symbol', '0')
1178 ('symbol', '1'))
1178 ('symbol', '1'))
1179 (range
1179 (range
1180 ('symbol', '1')
1180 ('symbol', '1')
1181 ('symbol', '2'))
1181 ('symbol', '2'))
1182 (range
1182 (range
1183 ('symbol', '2')
1183 ('symbol', '2')
1184 ('symbol', '3'))
1184 ('symbol', '3'))
1185 (range
1185 (range
1186 ('symbol', '3')
1186 ('symbol', '3')
1187 ('symbol', '4'))
1187 ('symbol', '4'))
1188 (range
1188 (range
1189 ('symbol', '4')
1189 ('symbol', '4')
1190 ('symbol', '5')))
1190 ('symbol', '5')))
1191 * set:
1191 * set:
1192 <addset
1192 <addset
1193 <addset
1193 <addset
1194 <spanset+ 0:1>,
1194 <spanset+ 0:1>,
1195 <spanset+ 1:2>>,
1195 <spanset+ 1:2>>,
1196 <addset
1196 <addset
1197 <spanset+ 2:3>,
1197 <spanset+ 2:3>,
1198 <addset
1198 <addset
1199 <spanset+ 3:4>,
1199 <spanset+ 3:4>,
1200 <spanset+ 4:5>>>>
1200 <spanset+ 4:5>>>>
1201 0
1201 0
1202 1
1202 1
1203 2
1203 2
1204 3
1204 3
1205 4
1205 4
1206 5
1206 5
1207
1207
1208 no crash by empty group "()" while optimizing `or` operations
1208 no crash by empty group "()" while optimizing `or` operations
1209
1209
1210 $ try --optimize '0|()'
1210 $ try --optimize '0|()'
1211 (or
1211 (or
1212 ('symbol', '0')
1212 ('symbol', '0')
1213 (group
1213 (group
1214 None))
1214 None))
1215 * optimized:
1215 * optimized:
1216 (or
1216 (or
1217 ('symbol', '0')
1217 ('symbol', '0')
1218 None)
1218 None)
1219 hg: parse error: missing argument
1219 hg: parse error: missing argument
1220 [255]
1220 [255]
1221
1221
1222 test that chained `or` operations never eat up stack (issue4624)
1222 test that chained `or` operations never eat up stack (issue4624)
1223 (uses `0:1` instead of `0` to avoid future optimization of trivial revisions)
1223 (uses `0:1` instead of `0` to avoid future optimization of trivial revisions)
1224
1224
1225 $ hg log -T '{rev}\n' -r "`python -c "print '|'.join(['0:1'] * 500)"`"
1225 $ hg log -T '{rev}\n' -r "`python -c "print '|'.join(['0:1'] * 500)"`"
1226 0
1226 0
1227 1
1227 1
1228
1228
1229 test that repeated `-r` options never eat up stack (issue4565)
1229 test that repeated `-r` options never eat up stack (issue4565)
1230 (uses `-r 0::1` to avoid possible optimization at old-style parser)
1230 (uses `-r 0::1` to avoid possible optimization at old-style parser)
1231
1231
1232 $ hg log -T '{rev}\n' `python -c "for i in xrange(500): print '-r 0::1 ',"`
1232 $ hg log -T '{rev}\n' `python -c "for i in xrange(500): print '-r 0::1 ',"`
1233 0
1233 0
1234 1
1234 1
1235
1235
1236 check that conversion to only works
1236 check that conversion to only works
1237 $ try --optimize '::3 - ::1'
1237 $ try --optimize '::3 - ::1'
1238 (minus
1238 (minus
1239 (dagrangepre
1239 (dagrangepre
1240 ('symbol', '3'))
1240 ('symbol', '3'))
1241 (dagrangepre
1241 (dagrangepre
1242 ('symbol', '1')))
1242 ('symbol', '1')))
1243 * optimized:
1243 * optimized:
1244 (func
1244 (func
1245 ('symbol', 'only')
1245 ('symbol', 'only')
1246 (list
1246 (list
1247 ('symbol', '3')
1247 ('symbol', '3')
1248 ('symbol', '1')))
1248 ('symbol', '1')))
1249 * set:
1249 * set:
1250 <baseset+ [3]>
1250 <baseset+ [3]>
1251 3
1251 3
1252 $ try --optimize 'ancestors(1) - ancestors(3)'
1252 $ try --optimize 'ancestors(1) - ancestors(3)'
1253 (minus
1253 (minus
1254 (func
1254 (func
1255 ('symbol', 'ancestors')
1255 ('symbol', 'ancestors')
1256 ('symbol', '1'))
1256 ('symbol', '1'))
1257 (func
1257 (func
1258 ('symbol', 'ancestors')
1258 ('symbol', 'ancestors')
1259 ('symbol', '3')))
1259 ('symbol', '3')))
1260 * optimized:
1260 * optimized:
1261 (func
1261 (func
1262 ('symbol', 'only')
1262 ('symbol', 'only')
1263 (list
1263 (list
1264 ('symbol', '1')
1264 ('symbol', '1')
1265 ('symbol', '3')))
1265 ('symbol', '3')))
1266 * set:
1266 * set:
1267 <baseset+ []>
1267 <baseset+ []>
1268 $ try --optimize 'not ::2 and ::6'
1268 $ try --optimize 'not ::2 and ::6'
1269 (and
1269 (and
1270 (not
1270 (not
1271 (dagrangepre
1271 (dagrangepre
1272 ('symbol', '2')))
1272 ('symbol', '2')))
1273 (dagrangepre
1273 (dagrangepre
1274 ('symbol', '6')))
1274 ('symbol', '6')))
1275 * optimized:
1275 * optimized:
1276 (func
1276 (func
1277 ('symbol', 'only')
1277 ('symbol', 'only')
1278 (list
1278 (list
1279 ('symbol', '6')
1279 ('symbol', '6')
1280 ('symbol', '2')))
1280 ('symbol', '2')))
1281 * set:
1281 * set:
1282 <baseset+ [3, 4, 5, 6]>
1282 <baseset+ [3, 4, 5, 6]>
1283 3
1283 3
1284 4
1284 4
1285 5
1285 5
1286 6
1286 6
1287 $ try --optimize 'ancestors(6) and not ancestors(4)'
1287 $ try --optimize 'ancestors(6) and not ancestors(4)'
1288 (and
1288 (and
1289 (func
1289 (func
1290 ('symbol', 'ancestors')
1290 ('symbol', 'ancestors')
1291 ('symbol', '6'))
1291 ('symbol', '6'))
1292 (not
1292 (not
1293 (func
1293 (func
1294 ('symbol', 'ancestors')
1294 ('symbol', 'ancestors')
1295 ('symbol', '4'))))
1295 ('symbol', '4'))))
1296 * optimized:
1296 * optimized:
1297 (func
1297 (func
1298 ('symbol', 'only')
1298 ('symbol', 'only')
1299 (list
1299 (list
1300 ('symbol', '6')
1300 ('symbol', '6')
1301 ('symbol', '4')))
1301 ('symbol', '4')))
1302 * set:
1302 * set:
1303 <baseset+ [3, 5, 6]>
1303 <baseset+ [3, 5, 6]>
1304 3
1304 3
1305 5
1305 5
1306 6
1306 6
1307
1307
1308 no crash by empty group "()" while optimizing to "only()"
1308 no crash by empty group "()" while optimizing to "only()"
1309
1309
1310 $ try --optimize '::1 and ()'
1310 $ try --optimize '::1 and ()'
1311 (and
1311 (and
1312 (dagrangepre
1312 (dagrangepre
1313 ('symbol', '1'))
1313 ('symbol', '1'))
1314 (group
1314 (group
1315 None))
1315 None))
1316 * optimized:
1316 * optimized:
1317 (and
1317 (and
1318 None
1318 None
1319 (func
1319 (func
1320 ('symbol', 'ancestors')
1320 ('symbol', 'ancestors')
1321 ('symbol', '1')))
1321 ('symbol', '1')))
1322 hg: parse error: missing argument
1322 hg: parse error: missing argument
1323 [255]
1323 [255]
1324
1324
1325 we can use patterns when searching for tags
1325 we can use patterns when searching for tags
1326
1326
1327 $ log 'tag("1..*")'
1327 $ log 'tag("1..*")'
1328 abort: tag '1..*' does not exist!
1328 abort: tag '1..*' does not exist!
1329 [255]
1329 [255]
1330 $ log 'tag("re:1..*")'
1330 $ log 'tag("re:1..*")'
1331 6
1331 6
1332 $ log 'tag("re:[0-9].[0-9]")'
1332 $ log 'tag("re:[0-9].[0-9]")'
1333 6
1333 6
1334 $ log 'tag("literal:1.0")'
1334 $ log 'tag("literal:1.0")'
1335 6
1335 6
1336 $ log 'tag("re:0..*")'
1336 $ log 'tag("re:0..*")'
1337
1337
1338 $ log 'tag(unknown)'
1338 $ log 'tag(unknown)'
1339 abort: tag 'unknown' does not exist!
1339 abort: tag 'unknown' does not exist!
1340 [255]
1340 [255]
1341 $ log 'tag("re:unknown")'
1341 $ log 'tag("re:unknown")'
1342 $ log 'present(tag("unknown"))'
1342 $ log 'present(tag("unknown"))'
1343 $ log 'present(tag("re:unknown"))'
1343 $ log 'present(tag("re:unknown"))'
1344 $ log 'branch(unknown)'
1344 $ log 'branch(unknown)'
1345 abort: unknown revision 'unknown'!
1345 abort: unknown revision 'unknown'!
1346 [255]
1346 [255]
1347 $ log 'branch("literal:unknown")'
1347 $ log 'branch("literal:unknown")'
1348 abort: branch 'unknown' does not exist!
1348 abort: branch 'unknown' does not exist!
1349 [255]
1349 [255]
1350 $ log 'branch("re:unknown")'
1350 $ log 'branch("re:unknown")'
1351 $ log 'present(branch("unknown"))'
1351 $ log 'present(branch("unknown"))'
1352 $ log 'present(branch("re:unknown"))'
1352 $ log 'present(branch("re:unknown"))'
1353 $ log 'user(bob)'
1353 $ log 'user(bob)'
1354 2
1354 2
1355
1355
1356 $ log '4::8'
1356 $ log '4::8'
1357 4
1357 4
1358 8
1358 8
1359 $ log '4:8'
1359 $ log '4:8'
1360 4
1360 4
1361 5
1361 5
1362 6
1362 6
1363 7
1363 7
1364 8
1364 8
1365
1365
1366 $ log 'sort(!merge() & (modifies(b) | user(bob) | keyword(bug) | keyword(issue) & 1::9), "-date")'
1366 $ log 'sort(!merge() & (modifies(b) | user(bob) | keyword(bug) | keyword(issue) & 1::9), "-date")'
1367 4
1367 4
1368 2
1368 2
1369 5
1369 5
1370
1370
1371 $ log 'not 0 and 0:2'
1371 $ log 'not 0 and 0:2'
1372 1
1372 1
1373 2
1373 2
1374 $ log 'not 1 and 0:2'
1374 $ log 'not 1 and 0:2'
1375 0
1375 0
1376 2
1376 2
1377 $ log 'not 2 and 0:2'
1377 $ log 'not 2 and 0:2'
1378 0
1378 0
1379 1
1379 1
1380 $ log '(1 and 2)::'
1380 $ log '(1 and 2)::'
1381 $ log '(1 and 2):'
1381 $ log '(1 and 2):'
1382 $ log '(1 and 2):3'
1382 $ log '(1 and 2):3'
1383 $ log 'sort(head(), -rev)'
1383 $ log 'sort(head(), -rev)'
1384 9
1384 9
1385 7
1385 7
1386 6
1386 6
1387 5
1387 5
1388 4
1388 4
1389 3
1389 3
1390 2
1390 2
1391 1
1391 1
1392 0
1392 0
1393 $ log '4::8 - 8'
1393 $ log '4::8 - 8'
1394 4
1394 4
1395 $ log 'matching(1 or 2 or 3) and (2 or 3 or 1)'
1395 $ log 'matching(1 or 2 or 3) and (2 or 3 or 1)'
1396 2
1396 2
1397 3
1397 3
1398 1
1398 1
1399
1399
1400 $ log 'named("unknown")'
1400 $ log 'named("unknown")'
1401 abort: namespace 'unknown' does not exist!
1401 abort: namespace 'unknown' does not exist!
1402 [255]
1402 [255]
1403 $ log 'named("re:unknown")'
1403 $ log 'named("re:unknown")'
1404 abort: no namespace exists that match 'unknown'!
1404 abort: no namespace exists that match 'unknown'!
1405 [255]
1405 [255]
1406 $ log 'present(named("unknown"))'
1406 $ log 'present(named("unknown"))'
1407 $ log 'present(named("re:unknown"))'
1407 $ log 'present(named("re:unknown"))'
1408
1408
1409 $ log 'tag()'
1409 $ log 'tag()'
1410 6
1410 6
1411 $ log 'named("tags")'
1411 $ log 'named("tags")'
1412 6
1412 6
1413
1413
1414 issue2437
1414 issue2437
1415
1415
1416 $ log '3 and p1(5)'
1416 $ log '3 and p1(5)'
1417 3
1417 3
1418 $ log '4 and p2(6)'
1418 $ log '4 and p2(6)'
1419 4
1419 4
1420 $ log '1 and parents(:2)'
1420 $ log '1 and parents(:2)'
1421 1
1421 1
1422 $ log '2 and children(1:)'
1422 $ log '2 and children(1:)'
1423 2
1423 2
1424 $ log 'roots(all()) or roots(all())'
1424 $ log 'roots(all()) or roots(all())'
1425 0
1425 0
1426 $ hg debugrevspec 'roots(all()) or roots(all())'
1426 $ hg debugrevspec 'roots(all()) or roots(all())'
1427 0
1427 0
1428 $ log 'heads(branch(Γ©)) or heads(branch(Γ©))'
1428 $ log 'heads(branch(Γ©)) or heads(branch(Γ©))'
1429 9
1429 9
1430 $ log 'ancestors(8) and (heads(branch("-a-b-c-")) or heads(branch(Γ©)))'
1430 $ log 'ancestors(8) and (heads(branch("-a-b-c-")) or heads(branch(Γ©)))'
1431 4
1431 4
1432
1432
1433 issue2654: report a parse error if the revset was not completely parsed
1433 issue2654: report a parse error if the revset was not completely parsed
1434
1434
1435 $ log '1 OR 2'
1435 $ log '1 OR 2'
1436 hg: parse error at 2: invalid token
1436 hg: parse error at 2: invalid token
1437 [255]
1437 [255]
1438
1438
1439 or operator should preserve ordering:
1439 or operator should preserve ordering:
1440 $ log 'reverse(2::4) or tip'
1440 $ log 'reverse(2::4) or tip'
1441 4
1441 4
1442 2
1442 2
1443 9
1443 9
1444
1444
1445 parentrevspec
1445 parentrevspec
1446
1446
1447 $ log 'merge()^0'
1447 $ log 'merge()^0'
1448 6
1448 6
1449 $ log 'merge()^'
1449 $ log 'merge()^'
1450 5
1450 5
1451 $ log 'merge()^1'
1451 $ log 'merge()^1'
1452 5
1452 5
1453 $ log 'merge()^2'
1453 $ log 'merge()^2'
1454 4
1454 4
1455 $ log 'merge()^^'
1455 $ log 'merge()^^'
1456 3
1456 3
1457 $ log 'merge()^1^'
1457 $ log 'merge()^1^'
1458 3
1458 3
1459 $ log 'merge()^^^'
1459 $ log 'merge()^^^'
1460 1
1460 1
1461
1461
1462 $ log 'merge()~0'
1462 $ log 'merge()~0'
1463 6
1463 6
1464 $ log 'merge()~1'
1464 $ log 'merge()~1'
1465 5
1465 5
1466 $ log 'merge()~2'
1466 $ log 'merge()~2'
1467 3
1467 3
1468 $ log 'merge()~2^1'
1468 $ log 'merge()~2^1'
1469 1
1469 1
1470 $ log 'merge()~3'
1470 $ log 'merge()~3'
1471 1
1471 1
1472
1472
1473 $ log '(-3:tip)^'
1473 $ log '(-3:tip)^'
1474 4
1474 4
1475 6
1475 6
1476 8
1476 8
1477
1477
1478 $ log 'tip^foo'
1478 $ log 'tip^foo'
1479 hg: parse error: ^ expects a number 0, 1, or 2
1479 hg: parse error: ^ expects a number 0, 1, or 2
1480 [255]
1480 [255]
1481
1481
1482 Bogus function gets suggestions
1482 Bogus function gets suggestions
1483 $ log 'add()'
1483 $ log 'add()'
1484 hg: parse error: unknown identifier: add
1484 hg: parse error: unknown identifier: add
1485 (did you mean 'adds'?)
1485 (did you mean 'adds'?)
1486 [255]
1486 [255]
1487 $ log 'added()'
1487 $ log 'added()'
1488 hg: parse error: unknown identifier: added
1488 hg: parse error: unknown identifier: added
1489 (did you mean 'adds'?)
1489 (did you mean 'adds'?)
1490 [255]
1490 [255]
1491 $ log 'remo()'
1491 $ log 'remo()'
1492 hg: parse error: unknown identifier: remo
1492 hg: parse error: unknown identifier: remo
1493 (did you mean one of remote, removes?)
1493 (did you mean one of remote, removes?)
1494 [255]
1494 [255]
1495 $ log 'babar()'
1495 $ log 'babar()'
1496 hg: parse error: unknown identifier: babar
1496 hg: parse error: unknown identifier: babar
1497 [255]
1497 [255]
1498
1498
1499 Bogus function with a similar internal name doesn't suggest the internal name
1499 Bogus function with a similar internal name doesn't suggest the internal name
1500 $ log 'matches()'
1500 $ log 'matches()'
1501 hg: parse error: unknown identifier: matches
1501 hg: parse error: unknown identifier: matches
1502 (did you mean 'matching'?)
1502 (did you mean 'matching'?)
1503 [255]
1503 [255]
1504
1504
1505 Undocumented functions aren't suggested as similar either
1505 Undocumented functions aren't suggested as similar either
1506 $ log 'wdir2()'
1506 $ log 'wdir2()'
1507 hg: parse error: unknown identifier: wdir2
1507 hg: parse error: unknown identifier: wdir2
1508 [255]
1508 [255]
1509
1509
1510 multiple revspecs
1510 multiple revspecs
1511
1511
1512 $ hg log -r 'tip~1:tip' -r 'tip~2:tip~1' --template '{rev}\n'
1512 $ hg log -r 'tip~1:tip' -r 'tip~2:tip~1' --template '{rev}\n'
1513 8
1513 8
1514 9
1514 9
1515 4
1515 4
1516 5
1516 5
1517 6
1517 6
1518 7
1518 7
1519
1519
1520 test usage in revpair (with "+")
1520 test usage in revpair (with "+")
1521
1521
1522 (real pair)
1522 (real pair)
1523
1523
1524 $ hg diff -r 'tip^^' -r 'tip'
1524 $ hg diff -r 'tip^^' -r 'tip'
1525 diff -r 2326846efdab -r 24286f4ae135 .hgtags
1525 diff -r 2326846efdab -r 24286f4ae135 .hgtags
1526 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
1526 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
1527 +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000
1527 +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000
1528 @@ -0,0 +1,1 @@
1528 @@ -0,0 +1,1 @@
1529 +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
1529 +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
1530 $ hg diff -r 'tip^^::tip'
1530 $ hg diff -r 'tip^^::tip'
1531 diff -r 2326846efdab -r 24286f4ae135 .hgtags
1531 diff -r 2326846efdab -r 24286f4ae135 .hgtags
1532 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
1532 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
1533 +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000
1533 +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000
1534 @@ -0,0 +1,1 @@
1534 @@ -0,0 +1,1 @@
1535 +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
1535 +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
1536
1536
1537 (single rev)
1537 (single rev)
1538
1538
1539 $ hg diff -r 'tip^' -r 'tip^'
1539 $ hg diff -r 'tip^' -r 'tip^'
1540 $ hg diff -r 'tip^:tip^'
1540 $ hg diff -r 'tip^:tip^'
1541
1541
1542 (single rev that does not looks like a range)
1542 (single rev that does not looks like a range)
1543
1543
1544 $ hg diff -r 'tip^::tip^ or tip^'
1544 $ hg diff -r 'tip^::tip^ or tip^'
1545 diff -r d5d0dcbdc4d9 .hgtags
1545 diff -r d5d0dcbdc4d9 .hgtags
1546 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
1546 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
1547 +++ b/.hgtags * (glob)
1547 +++ b/.hgtags * (glob)
1548 @@ -0,0 +1,1 @@
1548 @@ -0,0 +1,1 @@
1549 +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
1549 +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
1550 $ hg diff -r 'tip^ or tip^'
1550 $ hg diff -r 'tip^ or tip^'
1551 diff -r d5d0dcbdc4d9 .hgtags
1551 diff -r d5d0dcbdc4d9 .hgtags
1552 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
1552 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
1553 +++ b/.hgtags * (glob)
1553 +++ b/.hgtags * (glob)
1554 @@ -0,0 +1,1 @@
1554 @@ -0,0 +1,1 @@
1555 +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
1555 +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
1556
1556
1557 (no rev)
1557 (no rev)
1558
1558
1559 $ hg diff -r 'author("babar") or author("celeste")'
1559 $ hg diff -r 'author("babar") or author("celeste")'
1560 abort: empty revision range
1560 abort: empty revision range
1561 [255]
1561 [255]
1562
1562
1563 aliases:
1563 aliases:
1564
1564
1565 $ echo '[revsetalias]' >> .hg/hgrc
1565 $ echo '[revsetalias]' >> .hg/hgrc
1566 $ echo 'm = merge()' >> .hg/hgrc
1566 $ echo 'm = merge()' >> .hg/hgrc
1567 (revset aliases can override builtin revsets)
1567 (revset aliases can override builtin revsets)
1568 $ echo 'p2($1) = p1($1)' >> .hg/hgrc
1568 $ echo 'p2($1) = p1($1)' >> .hg/hgrc
1569 $ echo 'sincem = descendants(m)' >> .hg/hgrc
1569 $ echo 'sincem = descendants(m)' >> .hg/hgrc
1570 $ echo 'd($1) = reverse(sort($1, date))' >> .hg/hgrc
1570 $ echo 'd($1) = reverse(sort($1, date))' >> .hg/hgrc
1571 $ echo 'rs(ARG1, ARG2) = reverse(sort(ARG1, ARG2))' >> .hg/hgrc
1571 $ echo 'rs(ARG1, ARG2) = reverse(sort(ARG1, ARG2))' >> .hg/hgrc
1572 $ echo 'rs4(ARG1, ARGA, ARGB, ARG2) = reverse(sort(ARG1, ARG2))' >> .hg/hgrc
1572 $ echo 'rs4(ARG1, ARGA, ARGB, ARG2) = reverse(sort(ARG1, ARG2))' >> .hg/hgrc
1573
1573
1574 $ try m
1574 $ try m
1575 ('symbol', 'm')
1575 ('symbol', 'm')
1576 (func
1576 (func
1577 ('symbol', 'merge')
1577 ('symbol', 'merge')
1578 None)
1578 None)
1579 * set:
1579 * set:
1580 <filteredset
1580 <filteredset
1581 <fullreposet+ 0:9>>
1581 <fullreposet+ 0:9>>
1582 6
1582 6
1583
1583
1584 $ HGPLAIN=1
1584 $ HGPLAIN=1
1585 $ export HGPLAIN
1585 $ export HGPLAIN
1586 $ try m
1586 $ try m
1587 ('symbol', 'm')
1587 ('symbol', 'm')
1588 abort: unknown revision 'm'!
1588 abort: unknown revision 'm'!
1589 [255]
1589 [255]
1590
1590
1591 $ HGPLAINEXCEPT=revsetalias
1591 $ HGPLAINEXCEPT=revsetalias
1592 $ export HGPLAINEXCEPT
1592 $ export HGPLAINEXCEPT
1593 $ try m
1593 $ try m
1594 ('symbol', 'm')
1594 ('symbol', 'm')
1595 (func
1595 (func
1596 ('symbol', 'merge')
1596 ('symbol', 'merge')
1597 None)
1597 None)
1598 * set:
1598 * set:
1599 <filteredset
1599 <filteredset
1600 <fullreposet+ 0:9>>
1600 <fullreposet+ 0:9>>
1601 6
1601 6
1602
1602
1603 $ unset HGPLAIN
1603 $ unset HGPLAIN
1604 $ unset HGPLAINEXCEPT
1604 $ unset HGPLAINEXCEPT
1605
1605
1606 $ try 'p2(.)'
1606 $ try 'p2(.)'
1607 (func
1607 (func
1608 ('symbol', 'p2')
1608 ('symbol', 'p2')
1609 ('symbol', '.'))
1609 ('symbol', '.'))
1610 (func
1610 (func
1611 ('symbol', 'p1')
1611 ('symbol', 'p1')
1612 ('symbol', '.'))
1612 ('symbol', '.'))
1613 * set:
1613 * set:
1614 <baseset+ [8]>
1614 <baseset+ [8]>
1615 8
1615 8
1616
1616
1617 $ HGPLAIN=1
1617 $ HGPLAIN=1
1618 $ export HGPLAIN
1618 $ export HGPLAIN
1619 $ try 'p2(.)'
1619 $ try 'p2(.)'
1620 (func
1620 (func
1621 ('symbol', 'p2')
1621 ('symbol', 'p2')
1622 ('symbol', '.'))
1622 ('symbol', '.'))
1623 * set:
1623 * set:
1624 <baseset+ []>
1624 <baseset+ []>
1625
1625
1626 $ HGPLAINEXCEPT=revsetalias
1626 $ HGPLAINEXCEPT=revsetalias
1627 $ export HGPLAINEXCEPT
1627 $ export HGPLAINEXCEPT
1628 $ try 'p2(.)'
1628 $ try 'p2(.)'
1629 (func
1629 (func
1630 ('symbol', 'p2')
1630 ('symbol', 'p2')
1631 ('symbol', '.'))
1631 ('symbol', '.'))
1632 (func
1632 (func
1633 ('symbol', 'p1')
1633 ('symbol', 'p1')
1634 ('symbol', '.'))
1634 ('symbol', '.'))
1635 * set:
1635 * set:
1636 <baseset+ [8]>
1636 <baseset+ [8]>
1637 8
1637 8
1638
1638
1639 $ unset HGPLAIN
1639 $ unset HGPLAIN
1640 $ unset HGPLAINEXCEPT
1640 $ unset HGPLAINEXCEPT
1641
1641
1642 test alias recursion
1642 test alias recursion
1643
1643
1644 $ try sincem
1644 $ try sincem
1645 ('symbol', 'sincem')
1645 ('symbol', 'sincem')
1646 (func
1646 (func
1647 ('symbol', 'descendants')
1647 ('symbol', 'descendants')
1648 (func
1648 (func
1649 ('symbol', 'merge')
1649 ('symbol', 'merge')
1650 None))
1650 None))
1651 * set:
1651 * set:
1652 <addset+
1652 <addset+
1653 <filteredset
1653 <filteredset
1654 <fullreposet+ 0:9>>,
1654 <fullreposet+ 0:9>>,
1655 <generatorset+>>
1655 <generatorset+>>
1656 6
1656 6
1657 7
1657 7
1658
1658
1659 test infinite recursion
1659 test infinite recursion
1660
1660
1661 $ echo 'recurse1 = recurse2' >> .hg/hgrc
1661 $ echo 'recurse1 = recurse2' >> .hg/hgrc
1662 $ echo 'recurse2 = recurse1' >> .hg/hgrc
1662 $ echo 'recurse2 = recurse1' >> .hg/hgrc
1663 $ try recurse1
1663 $ try recurse1
1664 ('symbol', 'recurse1')
1664 ('symbol', 'recurse1')
1665 hg: parse error: infinite expansion of revset alias "recurse1" detected
1665 hg: parse error: infinite expansion of revset alias "recurse1" detected
1666 [255]
1666 [255]
1667
1667
1668 $ echo 'level1($1, $2) = $1 or $2' >> .hg/hgrc
1668 $ echo 'level1($1, $2) = $1 or $2' >> .hg/hgrc
1669 $ echo 'level2($1, $2) = level1($2, $1)' >> .hg/hgrc
1669 $ echo 'level2($1, $2) = level1($2, $1)' >> .hg/hgrc
1670 $ try "level2(level1(1, 2), 3)"
1670 $ try "level2(level1(1, 2), 3)"
1671 (func
1671 (func
1672 ('symbol', 'level2')
1672 ('symbol', 'level2')
1673 (list
1673 (list
1674 (func
1674 (func
1675 ('symbol', 'level1')
1675 ('symbol', 'level1')
1676 (list
1676 (list
1677 ('symbol', '1')
1677 ('symbol', '1')
1678 ('symbol', '2')))
1678 ('symbol', '2')))
1679 ('symbol', '3')))
1679 ('symbol', '3')))
1680 (or
1680 (or
1681 ('symbol', '3')
1681 ('symbol', '3')
1682 (or
1682 (or
1683 ('symbol', '1')
1683 ('symbol', '1')
1684 ('symbol', '2')))
1684 ('symbol', '2')))
1685 * set:
1685 * set:
1686 <addset
1686 <addset
1687 <baseset [3]>,
1687 <baseset [3]>,
1688 <baseset [1, 2]>>
1688 <baseset [1, 2]>>
1689 3
1689 3
1690 1
1690 1
1691 2
1691 2
1692
1692
1693 test nesting and variable passing
1693 test nesting and variable passing
1694
1694
1695 $ echo 'nested($1) = nested2($1)' >> .hg/hgrc
1695 $ echo 'nested($1) = nested2($1)' >> .hg/hgrc
1696 $ echo 'nested2($1) = nested3($1)' >> .hg/hgrc
1696 $ echo 'nested2($1) = nested3($1)' >> .hg/hgrc
1697 $ echo 'nested3($1) = max($1)' >> .hg/hgrc
1697 $ echo 'nested3($1) = max($1)' >> .hg/hgrc
1698 $ try 'nested(2:5)'
1698 $ try 'nested(2:5)'
1699 (func
1699 (func
1700 ('symbol', 'nested')
1700 ('symbol', 'nested')
1701 (range
1701 (range
1702 ('symbol', '2')
1702 ('symbol', '2')
1703 ('symbol', '5')))
1703 ('symbol', '5')))
1704 (func
1704 (func
1705 ('symbol', 'max')
1705 ('symbol', 'max')
1706 (range
1706 (range
1707 ('symbol', '2')
1707 ('symbol', '2')
1708 ('symbol', '5')))
1708 ('symbol', '5')))
1709 * set:
1709 * set:
1710 <baseset [5]>
1710 <baseset [5]>
1711 5
1711 5
1712
1712
1713 test chained `or` operations are flattened at parsing phase
1713 test chained `or` operations are flattened at parsing phase
1714
1714
1715 $ echo 'chainedorops($1, $2, $3) = $1|$2|$3' >> .hg/hgrc
1715 $ echo 'chainedorops($1, $2, $3) = $1|$2|$3' >> .hg/hgrc
1716 $ try 'chainedorops(0:1, 1:2, 2:3)'
1716 $ try 'chainedorops(0:1, 1:2, 2:3)'
1717 (func
1717 (func
1718 ('symbol', 'chainedorops')
1718 ('symbol', 'chainedorops')
1719 (list
1719 (list
1720 (list
1720 (list
1721 (range
1721 (range
1722 ('symbol', '0')
1722 ('symbol', '0')
1723 ('symbol', '1'))
1723 ('symbol', '1'))
1724 (range
1724 (range
1725 ('symbol', '1')
1725 ('symbol', '1')
1726 ('symbol', '2')))
1726 ('symbol', '2')))
1727 (range
1727 (range
1728 ('symbol', '2')
1728 ('symbol', '2')
1729 ('symbol', '3'))))
1729 ('symbol', '3'))))
1730 (or
1730 (or
1731 (range
1731 (range
1732 ('symbol', '0')
1732 ('symbol', '0')
1733 ('symbol', '1'))
1733 ('symbol', '1'))
1734 (range
1734 (range
1735 ('symbol', '1')
1735 ('symbol', '1')
1736 ('symbol', '2'))
1736 ('symbol', '2'))
1737 (range
1737 (range
1738 ('symbol', '2')
1738 ('symbol', '2')
1739 ('symbol', '3')))
1739 ('symbol', '3')))
1740 * set:
1740 * set:
1741 <addset
1741 <addset
1742 <spanset+ 0:1>,
1742 <spanset+ 0:1>,
1743 <addset
1743 <addset
1744 <spanset+ 1:2>,
1744 <spanset+ 1:2>,
1745 <spanset+ 2:3>>>
1745 <spanset+ 2:3>>>
1746 0
1746 0
1747 1
1747 1
1748 2
1748 2
1749 3
1749 3
1750
1750
1751 test variable isolation, variable placeholders are rewritten as string
1751 test variable isolation, variable placeholders are rewritten as string
1752 then parsed and matched again as string. Check they do not leak too
1752 then parsed and matched again as string. Check they do not leak too
1753 far away.
1753 far away.
1754
1754
1755 $ echo 'injectparamasstring = max("$1")' >> .hg/hgrc
1755 $ echo 'injectparamasstring = max("$1")' >> .hg/hgrc
1756 $ echo 'callinjection($1) = descendants(injectparamasstring)' >> .hg/hgrc
1756 $ echo 'callinjection($1) = descendants(injectparamasstring)' >> .hg/hgrc
1757 $ try 'callinjection(2:5)'
1757 $ try 'callinjection(2:5)'
1758 (func
1758 (func
1759 ('symbol', 'callinjection')
1759 ('symbol', 'callinjection')
1760 (range
1760 (range
1761 ('symbol', '2')
1761 ('symbol', '2')
1762 ('symbol', '5')))
1762 ('symbol', '5')))
1763 (func
1763 (func
1764 ('symbol', 'descendants')
1764 ('symbol', 'descendants')
1765 (func
1765 (func
1766 ('symbol', 'max')
1766 ('symbol', 'max')
1767 ('string', '$1')))
1767 ('string', '$1')))
1768 abort: unknown revision '$1'!
1768 abort: unknown revision '$1'!
1769 [255]
1769 [255]
1770
1770
1771 $ echo 'injectparamasstring2 = max(_aliasarg("$1"))' >> .hg/hgrc
1771 $ echo 'injectparamasstring2 = max(_aliasarg("$1"))' >> .hg/hgrc
1772 $ echo 'callinjection2($1) = descendants(injectparamasstring2)' >> .hg/hgrc
1772 $ echo 'callinjection2($1) = descendants(injectparamasstring2)' >> .hg/hgrc
1773 $ try 'callinjection2(2:5)'
1773 $ try 'callinjection2(2:5)'
1774 (func
1774 (func
1775 ('symbol', 'callinjection2')
1775 ('symbol', 'callinjection2')
1776 (range
1776 (range
1777 ('symbol', '2')
1777 ('symbol', '2')
1778 ('symbol', '5')))
1778 ('symbol', '5')))
1779 abort: failed to parse the definition of revset alias "injectparamasstring2": unknown identifier: _aliasarg
1779 abort: failed to parse the definition of revset alias "injectparamasstring2": unknown identifier: _aliasarg
1780 [255]
1780 [255]
1781 $ hg debugrevspec --debug --config revsetalias.anotherbadone='branch(' "tip"
1781 $ hg debugrevspec --debug --config revsetalias.anotherbadone='branch(' "tip"
1782 ('symbol', 'tip')
1782 ('symbol', 'tip')
1783 warning: failed to parse the definition of revset alias "anotherbadone": at 7: not a prefix: end
1783 warning: failed to parse the definition of revset alias "anotherbadone": at 7: not a prefix: end
1784 warning: failed to parse the definition of revset alias "injectparamasstring2": unknown identifier: _aliasarg
1784 warning: failed to parse the definition of revset alias "injectparamasstring2": unknown identifier: _aliasarg
1785 * set:
1785 * set:
1786 <baseset [9]>
1786 <baseset [9]>
1787 9
1787 9
1788 >>> data = file('.hg/hgrc', 'rb').read()
1788 >>> data = file('.hg/hgrc', 'rb').read()
1789 >>> file('.hg/hgrc', 'wb').write(data.replace('_aliasarg', ''))
1789 >>> file('.hg/hgrc', 'wb').write(data.replace('_aliasarg', ''))
1790
1790
1791 $ try 'tip'
1791 $ try 'tip'
1792 ('symbol', 'tip')
1792 ('symbol', 'tip')
1793 * set:
1793 * set:
1794 <baseset [9]>
1794 <baseset [9]>
1795 9
1795 9
1796
1796
1797 $ hg debugrevspec --debug --config revsetalias.'bad name'='tip' "tip"
1797 $ hg debugrevspec --debug --config revsetalias.'bad name'='tip' "tip"
1798 ('symbol', 'tip')
1798 ('symbol', 'tip')
1799 warning: failed to parse the declaration of revset alias "bad name": at 4: invalid token
1799 warning: failed to parse the declaration of revset alias "bad name": at 4: invalid token
1800 * set:
1800 * set:
1801 <baseset [9]>
1801 <baseset [9]>
1802 9
1802 9
1803 $ echo 'strictreplacing($1, $10) = $10 or desc("$1")' >> .hg/hgrc
1803 $ echo 'strictreplacing($1, $10) = $10 or desc("$1")' >> .hg/hgrc
1804 $ try 'strictreplacing("foo", tip)'
1804 $ try 'strictreplacing("foo", tip)'
1805 (func
1805 (func
1806 ('symbol', 'strictreplacing')
1806 ('symbol', 'strictreplacing')
1807 (list
1807 (list
1808 ('string', 'foo')
1808 ('string', 'foo')
1809 ('symbol', 'tip')))
1809 ('symbol', 'tip')))
1810 (or
1810 (or
1811 ('symbol', 'tip')
1811 ('symbol', 'tip')
1812 (func
1812 (func
1813 ('symbol', 'desc')
1813 ('symbol', 'desc')
1814 ('string', '$1')))
1814 ('string', '$1')))
1815 * set:
1815 * set:
1816 <addset
1816 <addset
1817 <baseset [9]>,
1817 <baseset [9]>,
1818 <filteredset
1818 <filteredset
1819 <fullreposet+ 0:9>>>
1819 <fullreposet+ 0:9>>>
1820 9
1820 9
1821
1821
1822 $ try 'd(2:5)'
1822 $ try 'd(2:5)'
1823 (func
1823 (func
1824 ('symbol', 'd')
1824 ('symbol', 'd')
1825 (range
1825 (range
1826 ('symbol', '2')
1826 ('symbol', '2')
1827 ('symbol', '5')))
1827 ('symbol', '5')))
1828 (func
1828 (func
1829 ('symbol', 'reverse')
1829 ('symbol', 'reverse')
1830 (func
1830 (func
1831 ('symbol', 'sort')
1831 ('symbol', 'sort')
1832 (list
1832 (list
1833 (range
1833 (range
1834 ('symbol', '2')
1834 ('symbol', '2')
1835 ('symbol', '5'))
1835 ('symbol', '5'))
1836 ('symbol', 'date'))))
1836 ('symbol', 'date'))))
1837 * set:
1837 * set:
1838 <baseset [4, 5, 3, 2]>
1838 <baseset [4, 5, 3, 2]>
1839 4
1839 4
1840 5
1840 5
1841 3
1841 3
1842 2
1842 2
1843 $ try 'rs(2 or 3, date)'
1843 $ try 'rs(2 or 3, date)'
1844 (func
1844 (func
1845 ('symbol', 'rs')
1845 ('symbol', 'rs')
1846 (list
1846 (list
1847 (or
1847 (or
1848 ('symbol', '2')
1848 ('symbol', '2')
1849 ('symbol', '3'))
1849 ('symbol', '3'))
1850 ('symbol', 'date')))
1850 ('symbol', 'date')))
1851 (func
1851 (func
1852 ('symbol', 'reverse')
1852 ('symbol', 'reverse')
1853 (func
1853 (func
1854 ('symbol', 'sort')
1854 ('symbol', 'sort')
1855 (list
1855 (list
1856 (or
1856 (or
1857 ('symbol', '2')
1857 ('symbol', '2')
1858 ('symbol', '3'))
1858 ('symbol', '3'))
1859 ('symbol', 'date'))))
1859 ('symbol', 'date'))))
1860 * set:
1860 * set:
1861 <baseset [3, 2]>
1861 <baseset [3, 2]>
1862 3
1862 3
1863 2
1863 2
1864 $ try 'rs()'
1864 $ try 'rs()'
1865 (func
1865 (func
1866 ('symbol', 'rs')
1866 ('symbol', 'rs')
1867 None)
1867 None)
1868 hg: parse error: invalid number of arguments: 0
1868 hg: parse error: invalid number of arguments: 0
1869 [255]
1869 [255]
1870 $ try 'rs(2)'
1870 $ try 'rs(2)'
1871 (func
1871 (func
1872 ('symbol', 'rs')
1872 ('symbol', 'rs')
1873 ('symbol', '2'))
1873 ('symbol', '2'))
1874 hg: parse error: invalid number of arguments: 1
1874 hg: parse error: invalid number of arguments: 1
1875 [255]
1875 [255]
1876 $ try 'rs(2, data, 7)'
1876 $ try 'rs(2, data, 7)'
1877 (func
1877 (func
1878 ('symbol', 'rs')
1878 ('symbol', 'rs')
1879 (list
1879 (list
1880 (list
1880 (list
1881 ('symbol', '2')
1881 ('symbol', '2')
1882 ('symbol', 'data'))
1882 ('symbol', 'data'))
1883 ('symbol', '7')))
1883 ('symbol', '7')))
1884 hg: parse error: invalid number of arguments: 3
1884 hg: parse error: invalid number of arguments: 3
1885 [255]
1885 [255]
1886 $ try 'rs4(2 or 3, x, x, date)'
1886 $ try 'rs4(2 or 3, x, x, date)'
1887 (func
1887 (func
1888 ('symbol', 'rs4')
1888 ('symbol', 'rs4')
1889 (list
1889 (list
1890 (list
1890 (list
1891 (list
1891 (list
1892 (or
1892 (or
1893 ('symbol', '2')
1893 ('symbol', '2')
1894 ('symbol', '3'))
1894 ('symbol', '3'))
1895 ('symbol', 'x'))
1895 ('symbol', 'x'))
1896 ('symbol', 'x'))
1896 ('symbol', 'x'))
1897 ('symbol', 'date')))
1897 ('symbol', 'date')))
1898 (func
1898 (func
1899 ('symbol', 'reverse')
1899 ('symbol', 'reverse')
1900 (func
1900 (func
1901 ('symbol', 'sort')
1901 ('symbol', 'sort')
1902 (list
1902 (list
1903 (or
1903 (or
1904 ('symbol', '2')
1904 ('symbol', '2')
1905 ('symbol', '3'))
1905 ('symbol', '3'))
1906 ('symbol', 'date'))))
1906 ('symbol', 'date'))))
1907 * set:
1907 * set:
1908 <baseset [3, 2]>
1908 <baseset [3, 2]>
1909 3
1909 3
1910 2
1910 2
1911
1911
1912 issue4553: check that revset aliases override existing hash prefix
1912 issue4553: check that revset aliases override existing hash prefix
1913
1913
1914 $ hg log -qr e
1914 $ hg log -qr e
1915 6:e0cc66ef77e8
1915 6:e0cc66ef77e8
1916
1916
1917 $ hg log -qr e --config revsetalias.e="all()"
1917 $ hg log -qr e --config revsetalias.e="all()"
1918 0:2785f51eece5
1918 0:2785f51eece5
1919 1:d75937da8da0
1919 1:d75937da8da0
1920 2:5ed5505e9f1c
1920 2:5ed5505e9f1c
1921 3:8528aa5637f2
1921 3:8528aa5637f2
1922 4:2326846efdab
1922 4:2326846efdab
1923 5:904fa392b941
1923 5:904fa392b941
1924 6:e0cc66ef77e8
1924 6:e0cc66ef77e8
1925 7:013af1973af4
1925 7:013af1973af4
1926 8:d5d0dcbdc4d9
1926 8:d5d0dcbdc4d9
1927 9:24286f4ae135
1927 9:24286f4ae135
1928
1928
1929 $ hg log -qr e: --config revsetalias.e="0"
1929 $ hg log -qr e: --config revsetalias.e="0"
1930 0:2785f51eece5
1930 0:2785f51eece5
1931 1:d75937da8da0
1931 1:d75937da8da0
1932 2:5ed5505e9f1c
1932 2:5ed5505e9f1c
1933 3:8528aa5637f2
1933 3:8528aa5637f2
1934 4:2326846efdab
1934 4:2326846efdab
1935 5:904fa392b941
1935 5:904fa392b941
1936 6:e0cc66ef77e8
1936 6:e0cc66ef77e8
1937 7:013af1973af4
1937 7:013af1973af4
1938 8:d5d0dcbdc4d9
1938 8:d5d0dcbdc4d9
1939 9:24286f4ae135
1939 9:24286f4ae135
1940
1940
1941 $ hg log -qr :e --config revsetalias.e="9"
1941 $ hg log -qr :e --config revsetalias.e="9"
1942 0:2785f51eece5
1942 0:2785f51eece5
1943 1:d75937da8da0
1943 1:d75937da8da0
1944 2:5ed5505e9f1c
1944 2:5ed5505e9f1c
1945 3:8528aa5637f2
1945 3:8528aa5637f2
1946 4:2326846efdab
1946 4:2326846efdab
1947 5:904fa392b941
1947 5:904fa392b941
1948 6:e0cc66ef77e8
1948 6:e0cc66ef77e8
1949 7:013af1973af4
1949 7:013af1973af4
1950 8:d5d0dcbdc4d9
1950 8:d5d0dcbdc4d9
1951 9:24286f4ae135
1951 9:24286f4ae135
1952
1952
1953 $ hg log -qr e:
1953 $ hg log -qr e:
1954 6:e0cc66ef77e8
1954 6:e0cc66ef77e8
1955 7:013af1973af4
1955 7:013af1973af4
1956 8:d5d0dcbdc4d9
1956 8:d5d0dcbdc4d9
1957 9:24286f4ae135
1957 9:24286f4ae135
1958
1958
1959 $ hg log -qr :e
1959 $ hg log -qr :e
1960 0:2785f51eece5
1960 0:2785f51eece5
1961 1:d75937da8da0
1961 1:d75937da8da0
1962 2:5ed5505e9f1c
1962 2:5ed5505e9f1c
1963 3:8528aa5637f2
1963 3:8528aa5637f2
1964 4:2326846efdab
1964 4:2326846efdab
1965 5:904fa392b941
1965 5:904fa392b941
1966 6:e0cc66ef77e8
1966 6:e0cc66ef77e8
1967
1967
1968 issue2549 - correct optimizations
1968 issue2549 - correct optimizations
1969
1969
1970 $ log 'limit(1 or 2 or 3, 2) and not 2'
1970 $ log 'limit(1 or 2 or 3, 2) and not 2'
1971 1
1971 1
1972 $ log 'max(1 or 2) and not 2'
1972 $ log 'max(1 or 2) and not 2'
1973 $ log 'min(1 or 2) and not 1'
1973 $ log 'min(1 or 2) and not 1'
1974 $ log 'last(1 or 2, 1) and not 2'
1974 $ log 'last(1 or 2, 1) and not 2'
1975
1975
1976 issue4289 - ordering of built-ins
1976 issue4289 - ordering of built-ins
1977 $ hg log -M -q -r 3:2
1977 $ hg log -M -q -r 3:2
1978 3:8528aa5637f2
1978 3:8528aa5637f2
1979 2:5ed5505e9f1c
1979 2:5ed5505e9f1c
1980
1980
1981 test revsets started with 40-chars hash (issue3669)
1981 test revsets started with 40-chars hash (issue3669)
1982
1982
1983 $ ISSUE3669_TIP=`hg tip --template '{node}'`
1983 $ ISSUE3669_TIP=`hg tip --template '{node}'`
1984 $ hg log -r "${ISSUE3669_TIP}" --template '{rev}\n'
1984 $ hg log -r "${ISSUE3669_TIP}" --template '{rev}\n'
1985 9
1985 9
1986 $ hg log -r "${ISSUE3669_TIP}^" --template '{rev}\n'
1986 $ hg log -r "${ISSUE3669_TIP}^" --template '{rev}\n'
1987 8
1987 8
1988
1988
1989 test or-ed indirect predicates (issue3775)
1989 test or-ed indirect predicates (issue3775)
1990
1990
1991 $ log '6 or 6^1' | sort
1991 $ log '6 or 6^1' | sort
1992 5
1992 5
1993 6
1993 6
1994 $ log '6^1 or 6' | sort
1994 $ log '6^1 or 6' | sort
1995 5
1995 5
1996 6
1996 6
1997 $ log '4 or 4~1' | sort
1997 $ log '4 or 4~1' | sort
1998 2
1998 2
1999 4
1999 4
2000 $ log '4~1 or 4' | sort
2000 $ log '4~1 or 4' | sort
2001 2
2001 2
2002 4
2002 4
2003 $ log '(0 or 2):(4 or 6) or 0 or 6' | sort
2003 $ log '(0 or 2):(4 or 6) or 0 or 6' | sort
2004 0
2004 0
2005 1
2005 1
2006 2
2006 2
2007 3
2007 3
2008 4
2008 4
2009 5
2009 5
2010 6
2010 6
2011 $ log '0 or 6 or (0 or 2):(4 or 6)' | sort
2011 $ log '0 or 6 or (0 or 2):(4 or 6)' | sort
2012 0
2012 0
2013 1
2013 1
2014 2
2014 2
2015 3
2015 3
2016 4
2016 4
2017 5
2017 5
2018 6
2018 6
2019
2019
2020 tests for 'remote()' predicate:
2020 tests for 'remote()' predicate:
2021 #. (csets in remote) (id) (remote)
2021 #. (csets in remote) (id) (remote)
2022 1. less than local current branch "default"
2022 1. less than local current branch "default"
2023 2. same with local specified "default"
2023 2. same with local specified "default"
2024 3. more than local specified specified
2024 3. more than local specified specified
2025
2025
2026 $ hg clone --quiet -U . ../remote3
2026 $ hg clone --quiet -U . ../remote3
2027 $ cd ../remote3
2027 $ cd ../remote3
2028 $ hg update -q 7
2028 $ hg update -q 7
2029 $ echo r > r
2029 $ echo r > r
2030 $ hg ci -Aqm 10
2030 $ hg ci -Aqm 10
2031 $ log 'remote()'
2031 $ log 'remote()'
2032 7
2032 7
2033 $ log 'remote("a-b-c-")'
2033 $ log 'remote("a-b-c-")'
2034 2
2034 2
2035 $ cd ../repo
2035 $ cd ../repo
2036 $ log 'remote(".a.b.c.", "../remote3")'
2036 $ log 'remote(".a.b.c.", "../remote3")'
2037
2037
2038 tests for concatenation of strings/symbols by "##"
2038 tests for concatenation of strings/symbols by "##"
2039
2039
2040 $ try "278 ## '5f5' ## 1ee ## 'ce5'"
2040 $ try "278 ## '5f5' ## 1ee ## 'ce5'"
2041 (_concat
2041 (_concat
2042 (_concat
2042 (_concat
2043 (_concat
2043 (_concat
2044 ('symbol', '278')
2044 ('symbol', '278')
2045 ('string', '5f5'))
2045 ('string', '5f5'))
2046 ('symbol', '1ee'))
2046 ('symbol', '1ee'))
2047 ('string', 'ce5'))
2047 ('string', 'ce5'))
2048 ('string', '2785f51eece5')
2048 ('string', '2785f51eece5')
2049 * set:
2049 * set:
2050 <baseset [0]>
2050 <baseset [0]>
2051 0
2051 0
2052
2052
2053 $ echo 'cat4($1, $2, $3, $4) = $1 ## $2 ## $3 ## $4' >> .hg/hgrc
2053 $ echo 'cat4($1, $2, $3, $4) = $1 ## $2 ## $3 ## $4' >> .hg/hgrc
2054 $ try "cat4(278, '5f5', 1ee, 'ce5')"
2054 $ try "cat4(278, '5f5', 1ee, 'ce5')"
2055 (func
2055 (func
2056 ('symbol', 'cat4')
2056 ('symbol', 'cat4')
2057 (list
2057 (list
2058 (list
2058 (list
2059 (list
2059 (list
2060 ('symbol', '278')
2060 ('symbol', '278')
2061 ('string', '5f5'))
2061 ('string', '5f5'))
2062 ('symbol', '1ee'))
2062 ('symbol', '1ee'))
2063 ('string', 'ce5')))
2063 ('string', 'ce5')))
2064 (_concat
2064 (_concat
2065 (_concat
2065 (_concat
2066 (_concat
2066 (_concat
2067 ('symbol', '278')
2067 ('symbol', '278')
2068 ('string', '5f5'))
2068 ('string', '5f5'))
2069 ('symbol', '1ee'))
2069 ('symbol', '1ee'))
2070 ('string', 'ce5'))
2070 ('string', 'ce5'))
2071 ('string', '2785f51eece5')
2071 ('string', '2785f51eece5')
2072 * set:
2072 * set:
2073 <baseset [0]>
2073 <baseset [0]>
2074 0
2074 0
2075
2075
2076 (check concatenation in alias nesting)
2076 (check concatenation in alias nesting)
2077
2077
2078 $ echo 'cat2($1, $2) = $1 ## $2' >> .hg/hgrc
2078 $ echo 'cat2($1, $2) = $1 ## $2' >> .hg/hgrc
2079 $ echo 'cat2x2($1, $2, $3, $4) = cat2($1 ## $2, $3 ## $4)' >> .hg/hgrc
2079 $ echo 'cat2x2($1, $2, $3, $4) = cat2($1 ## $2, $3 ## $4)' >> .hg/hgrc
2080 $ log "cat2x2(278, '5f5', 1ee, 'ce5')"
2080 $ log "cat2x2(278, '5f5', 1ee, 'ce5')"
2081 0
2081 0
2082
2082
2083 (check operator priority)
2083 (check operator priority)
2084
2084
2085 $ echo 'cat2n2($1, $2, $3, $4) = $1 ## $2 or $3 ## $4~2' >> .hg/hgrc
2085 $ echo 'cat2n2($1, $2, $3, $4) = $1 ## $2 or $3 ## $4~2' >> .hg/hgrc
2086 $ log "cat2n2(2785f5, 1eece5, 24286f, 4ae135)"
2086 $ log "cat2n2(2785f5, 1eece5, 24286f, 4ae135)"
2087 0
2087 0
2088 4
2088 4
2089
2089
2090 $ cd ..
2090 $ cd ..
2091
2091
2092 prepare repository that has "default" branches of multiple roots
2092 prepare repository that has "default" branches of multiple roots
2093
2093
2094 $ hg init namedbranch
2094 $ hg init namedbranch
2095 $ cd namedbranch
2095 $ cd namedbranch
2096
2096
2097 $ echo default0 >> a
2097 $ echo default0 >> a
2098 $ hg ci -Aqm0
2098 $ hg ci -Aqm0
2099 $ echo default1 >> a
2099 $ echo default1 >> a
2100 $ hg ci -m1
2100 $ hg ci -m1
2101
2101
2102 $ hg branch -q stable
2102 $ hg branch -q stable
2103 $ echo stable2 >> a
2103 $ echo stable2 >> a
2104 $ hg ci -m2
2104 $ hg ci -m2
2105 $ echo stable3 >> a
2105 $ echo stable3 >> a
2106 $ hg ci -m3
2106 $ hg ci -m3
2107
2107
2108 $ hg update -q null
2108 $ hg update -q null
2109 $ echo default4 >> a
2109 $ echo default4 >> a
2110 $ hg ci -Aqm4
2110 $ hg ci -Aqm4
2111 $ echo default5 >> a
2111 $ echo default5 >> a
2112 $ hg ci -m5
2112 $ hg ci -m5
2113
2113
2114 "null" revision belongs to "default" branch (issue4683)
2114 "null" revision belongs to "default" branch (issue4683)
2115
2115
2116 $ log 'branch(null)'
2116 $ log 'branch(null)'
2117 0
2117 0
2118 1
2118 1
2119 4
2119 4
2120 5
2120 5
2121
2121
2122 "null" revision belongs to "default" branch, but it shouldn't appear in set
2122 "null" revision belongs to "default" branch, but it shouldn't appear in set
2123 unless explicitly specified (issue4682)
2123 unless explicitly specified (issue4682)
2124
2124
2125 $ log 'children(branch(default))'
2125 $ log 'children(branch(default))'
2126 1
2126 1
2127 2
2127 2
2128 5
2128 5
2129
2129
2130 $ cd ..
2130 $ cd ..
2131
2131
2132 test author/desc/keyword in problematic encoding
2132 test author/desc/keyword in problematic encoding
2133 # unicode: cp932:
2133 # unicode: cp932:
2134 # u30A2 0x83 0x41(= 'A')
2134 # u30A2 0x83 0x41(= 'A')
2135 # u30C2 0x83 0x61(= 'a')
2135 # u30C2 0x83 0x61(= 'a')
2136
2136
2137 $ hg init problematicencoding
2137 $ hg init problematicencoding
2138 $ cd problematicencoding
2138 $ cd problematicencoding
2139
2139
2140 $ python > setup.sh <<EOF
2140 $ python > setup.sh <<EOF
2141 > print u'''
2141 > print u'''
2142 > echo a > text
2142 > echo a > text
2143 > hg add text
2143 > hg add text
2144 > hg --encoding utf-8 commit -u '\u30A2' -m none
2144 > hg --encoding utf-8 commit -u '\u30A2' -m none
2145 > echo b > text
2145 > echo b > text
2146 > hg --encoding utf-8 commit -u '\u30C2' -m none
2146 > hg --encoding utf-8 commit -u '\u30C2' -m none
2147 > echo c > text
2147 > echo c > text
2148 > hg --encoding utf-8 commit -u none -m '\u30A2'
2148 > hg --encoding utf-8 commit -u none -m '\u30A2'
2149 > echo d > text
2149 > echo d > text
2150 > hg --encoding utf-8 commit -u none -m '\u30C2'
2150 > hg --encoding utf-8 commit -u none -m '\u30C2'
2151 > '''.encode('utf-8')
2151 > '''.encode('utf-8')
2152 > EOF
2152 > EOF
2153 $ sh < setup.sh
2153 $ sh < setup.sh
2154
2154
2155 test in problematic encoding
2155 test in problematic encoding
2156 $ python > test.sh <<EOF
2156 $ python > test.sh <<EOF
2157 > print u'''
2157 > print u'''
2158 > hg --encoding cp932 log --template '{rev}\\n' -r 'author(\u30A2)'
2158 > hg --encoding cp932 log --template '{rev}\\n' -r 'author(\u30A2)'
2159 > echo ====
2159 > echo ====
2160 > hg --encoding cp932 log --template '{rev}\\n' -r 'author(\u30C2)'
2160 > hg --encoding cp932 log --template '{rev}\\n' -r 'author(\u30C2)'
2161 > echo ====
2161 > echo ====
2162 > hg --encoding cp932 log --template '{rev}\\n' -r 'desc(\u30A2)'
2162 > hg --encoding cp932 log --template '{rev}\\n' -r 'desc(\u30A2)'
2163 > echo ====
2163 > echo ====
2164 > hg --encoding cp932 log --template '{rev}\\n' -r 'desc(\u30C2)'
2164 > hg --encoding cp932 log --template '{rev}\\n' -r 'desc(\u30C2)'
2165 > echo ====
2165 > echo ====
2166 > hg --encoding cp932 log --template '{rev}\\n' -r 'keyword(\u30A2)'
2166 > hg --encoding cp932 log --template '{rev}\\n' -r 'keyword(\u30A2)'
2167 > echo ====
2167 > echo ====
2168 > hg --encoding cp932 log --template '{rev}\\n' -r 'keyword(\u30C2)'
2168 > hg --encoding cp932 log --template '{rev}\\n' -r 'keyword(\u30C2)'
2169 > '''.encode('cp932')
2169 > '''.encode('cp932')
2170 > EOF
2170 > EOF
2171 $ sh < test.sh
2171 $ sh < test.sh
2172 0
2172 0
2173 ====
2173 ====
2174 1
2174 1
2175 ====
2175 ====
2176 2
2176 2
2177 ====
2177 ====
2178 3
2178 3
2179 ====
2179 ====
2180 0
2180 0
2181 2
2181 2
2182 ====
2182 ====
2183 1
2183 1
2184 3
2184 3
2185
2185
2186 test error message of bad revset
2186 test error message of bad revset
2187 $ hg log -r 'foo\\'
2187 $ hg log -r 'foo\\'
2188 hg: parse error at 3: syntax error in revset 'foo\\'
2188 hg: parse error at 3: syntax error in revset 'foo\\'
2189 [255]
2189 [255]
2190
2190
2191 $ cd ..
2191 $ cd ..
2192
2193 Test registrar.delayregistrar via revset.extpredicate
2194
2195 'extpredicate' decorator shouldn't register any functions until
2196 'setup()' on it.
2197
2198 $ cd repo
2199
2200 $ cat <<EOF > $TESTTMP/custompredicate.py
2201 > from mercurial import revset
2202 >
2203 > revsetpredicate = revset.extpredicate()
2204 >
2205 > @revsetpredicate('custom1()')
2206 > def custom1(repo, subset, x):
2207 > return revset.baseset([1])
2208 > @revsetpredicate('custom2()')
2209 > def custom2(repo, subset, x):
2210 > return revset.baseset([2])
2211 >
2212 > def uisetup(ui):
2213 > if ui.configbool('custompredicate', 'enabled'):
2214 > revsetpredicate.setup()
2215 > EOF
2216 $ cat <<EOF > .hg/hgrc
2217 > [extensions]
2218 > custompredicate = $TESTTMP/custompredicate.py
2219 > EOF
2220
2221 $ hg debugrevspec "custom1()"
2222 hg: parse error: unknown identifier: custom1
2223 [255]
2224 $ hg debugrevspec "custom2()"
2225 hg: parse error: unknown identifier: custom2
2226 [255]
2227 $ hg debugrevspec "custom1() or custom2()" --config custompredicate.enabled=true
2228 1
2229 2
2230
2231 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now