##// END OF EJS Templates
rebase: make summary template configurable, with default to shared template...
Martin von Zweigbergk -
r46355:96fcc37a default
parent child Browse files
Show More
@@ -1,2269 +1,2269 b''
1 # rebase.py - rebasing feature for mercurial
1 # rebase.py - rebasing feature for mercurial
2 #
2 #
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to move sets of revisions to a different ancestor
8 '''command to move sets of revisions to a different ancestor
9
9
10 This extension lets you rebase changesets in an existing Mercurial
10 This extension lets you rebase changesets in an existing Mercurial
11 repository.
11 repository.
12
12
13 For more information:
13 For more information:
14 https://mercurial-scm.org/wiki/RebaseExtension
14 https://mercurial-scm.org/wiki/RebaseExtension
15 '''
15 '''
16
16
17 from __future__ import absolute_import
17 from __future__ import absolute_import
18
18
19 import errno
19 import errno
20 import os
20 import os
21
21
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23 from mercurial.node import (
23 from mercurial.node import (
24 nullrev,
24 nullrev,
25 short,
25 short,
26 )
26 )
27 from mercurial.pycompat import open
27 from mercurial.pycompat import open
28 from mercurial import (
28 from mercurial import (
29 bookmarks,
29 bookmarks,
30 cmdutil,
30 cmdutil,
31 commands,
31 commands,
32 copies,
32 copies,
33 destutil,
33 destutil,
34 dirstateguard,
34 dirstateguard,
35 error,
35 error,
36 extensions,
36 extensions,
37 formatter,
38 merge as mergemod,
37 merge as mergemod,
39 mergestate as mergestatemod,
38 mergestate as mergestatemod,
40 mergeutil,
39 mergeutil,
41 node as nodemod,
40 node as nodemod,
42 obsolete,
41 obsolete,
43 obsutil,
42 obsutil,
44 patch,
43 patch,
45 phases,
44 phases,
46 pycompat,
45 pycompat,
47 registrar,
46 registrar,
48 repair,
47 repair,
49 revset,
48 revset,
50 revsetlang,
49 revsetlang,
51 rewriteutil,
50 rewriteutil,
52 scmutil,
51 scmutil,
53 smartset,
52 smartset,
54 state as statemod,
53 state as statemod,
55 templatekw,
56 util,
54 util,
57 )
55 )
58
56
59 # The following constants are used throughout the rebase module. The ordering of
57 # The following constants are used throughout the rebase module. The ordering of
60 # their values must be maintained.
58 # their values must be maintained.
61
59
62 # Indicates that a revision needs to be rebased
60 # Indicates that a revision needs to be rebased
63 revtodo = -1
61 revtodo = -1
64 revtodostr = b'-1'
62 revtodostr = b'-1'
65
63
66 # legacy revstates no longer needed in current code
64 # legacy revstates no longer needed in current code
67 # -2: nullmerge, -3: revignored, -4: revprecursor, -5: revpruned
65 # -2: nullmerge, -3: revignored, -4: revprecursor, -5: revpruned
68 legacystates = {b'-2', b'-3', b'-4', b'-5'}
66 legacystates = {b'-2', b'-3', b'-4', b'-5'}
69
67
70 cmdtable = {}
68 cmdtable = {}
71 command = registrar.command(cmdtable)
69 command = registrar.command(cmdtable)
72 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
70 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
73 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
71 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
74 # be specifying the version(s) of Mercurial they are tested with, or
72 # be specifying the version(s) of Mercurial they are tested with, or
75 # leave the attribute unspecified.
73 # leave the attribute unspecified.
76 testedwith = b'ships-with-hg-core'
74 testedwith = b'ships-with-hg-core'
77
75
78
76
79 def _nothingtorebase():
77 def _nothingtorebase():
80 return 1
78 return 1
81
79
82
80
83 def _savegraft(ctx, extra):
81 def _savegraft(ctx, extra):
84 s = ctx.extra().get(b'source', None)
82 s = ctx.extra().get(b'source', None)
85 if s is not None:
83 if s is not None:
86 extra[b'source'] = s
84 extra[b'source'] = s
87 s = ctx.extra().get(b'intermediate-source', None)
85 s = ctx.extra().get(b'intermediate-source', None)
88 if s is not None:
86 if s is not None:
89 extra[b'intermediate-source'] = s
87 extra[b'intermediate-source'] = s
90
88
91
89
92 def _savebranch(ctx, extra):
90 def _savebranch(ctx, extra):
93 extra[b'branch'] = ctx.branch()
91 extra[b'branch'] = ctx.branch()
94
92
95
93
96 def _destrebase(repo, sourceset, destspace=None):
94 def _destrebase(repo, sourceset, destspace=None):
97 """small wrapper around destmerge to pass the right extra args
95 """small wrapper around destmerge to pass the right extra args
98
96
99 Please wrap destutil.destmerge instead."""
97 Please wrap destutil.destmerge instead."""
100 return destutil.destmerge(
98 return destutil.destmerge(
101 repo,
99 repo,
102 action=b'rebase',
100 action=b'rebase',
103 sourceset=sourceset,
101 sourceset=sourceset,
104 onheadcheck=False,
102 onheadcheck=False,
105 destspace=destspace,
103 destspace=destspace,
106 )
104 )
107
105
108
106
109 revsetpredicate = registrar.revsetpredicate()
107 revsetpredicate = registrar.revsetpredicate()
110
108
111
109
112 @revsetpredicate(b'_destrebase')
110 @revsetpredicate(b'_destrebase')
113 def _revsetdestrebase(repo, subset, x):
111 def _revsetdestrebase(repo, subset, x):
114 # ``_rebasedefaultdest()``
112 # ``_rebasedefaultdest()``
115
113
116 # default destination for rebase.
114 # default destination for rebase.
117 # # XXX: Currently private because I expect the signature to change.
115 # # XXX: Currently private because I expect the signature to change.
118 # # XXX: - bailing out in case of ambiguity vs returning all data.
116 # # XXX: - bailing out in case of ambiguity vs returning all data.
119 # i18n: "_rebasedefaultdest" is a keyword
117 # i18n: "_rebasedefaultdest" is a keyword
120 sourceset = None
118 sourceset = None
121 if x is not None:
119 if x is not None:
122 sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
120 sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
123 return subset & smartset.baseset([_destrebase(repo, sourceset)])
121 return subset & smartset.baseset([_destrebase(repo, sourceset)])
124
122
125
123
126 @revsetpredicate(b'_destautoorphanrebase')
124 @revsetpredicate(b'_destautoorphanrebase')
127 def _revsetdestautoorphanrebase(repo, subset, x):
125 def _revsetdestautoorphanrebase(repo, subset, x):
128 # ``_destautoorphanrebase()``
126 # ``_destautoorphanrebase()``
129
127
130 # automatic rebase destination for a single orphan revision.
128 # automatic rebase destination for a single orphan revision.
131 unfi = repo.unfiltered()
129 unfi = repo.unfiltered()
132 obsoleted = unfi.revs(b'obsolete()')
130 obsoleted = unfi.revs(b'obsolete()')
133
131
134 src = revset.getset(repo, subset, x).first()
132 src = revset.getset(repo, subset, x).first()
135
133
136 # Empty src or already obsoleted - Do not return a destination
134 # Empty src or already obsoleted - Do not return a destination
137 if not src or src in obsoleted:
135 if not src or src in obsoleted:
138 return smartset.baseset()
136 return smartset.baseset()
139 dests = destutil.orphanpossibledestination(repo, src)
137 dests = destutil.orphanpossibledestination(repo, src)
140 if len(dests) > 1:
138 if len(dests) > 1:
141 raise error.Abort(
139 raise error.Abort(
142 _(b"ambiguous automatic rebase: %r could end up on any of %r")
140 _(b"ambiguous automatic rebase: %r could end up on any of %r")
143 % (src, dests)
141 % (src, dests)
144 )
142 )
145 # We have zero or one destination, so we can just return here.
143 # We have zero or one destination, so we can just return here.
146 return smartset.baseset(dests)
144 return smartset.baseset(dests)
147
145
148
146
149 def _ctxdesc(ctx):
147 def _ctxdesc(ctx):
150 """short description for a context"""
148 """short description for a context"""
151 labels_spec = b'join(filter(namespaces % "{ifeq(namespace, "branches", "", join(names, " "))}"), " ")'
149 labels_spec = b'join(filter(namespaces % "{ifeq(namespace, "branches", "", join(names, " "))}"), " ")'
152 spec = b'{rev}:{node|short} "{desc|firstline}"{if(%s, " ({%s})")}' % (
150 spec = b'{rev}:{node|short} "{desc|firstline}"{if(%s, " ({%s})")}' % (
153 labels_spec,
151 labels_spec,
154 labels_spec,
152 labels_spec,
155 )
153 )
156 return cmdutil.rendertemplate(ctx, spec)
154 return cmdutil.format_changeset_summary(
155 ctx.repo().ui, ctx, command=b'rebase', default_spec=spec
156 )
157
157
158
158
159 class rebaseruntime(object):
159 class rebaseruntime(object):
160 """This class is a container for rebase runtime state"""
160 """This class is a container for rebase runtime state"""
161
161
162 def __init__(self, repo, ui, inmemory=False, dryrun=False, opts=None):
162 def __init__(self, repo, ui, inmemory=False, dryrun=False, opts=None):
163 if opts is None:
163 if opts is None:
164 opts = {}
164 opts = {}
165
165
166 # prepared: whether we have rebasestate prepared or not. Currently it
166 # prepared: whether we have rebasestate prepared or not. Currently it
167 # decides whether "self.repo" is unfiltered or not.
167 # decides whether "self.repo" is unfiltered or not.
168 # The rebasestate has explicit hash to hash instructions not depending
168 # The rebasestate has explicit hash to hash instructions not depending
169 # on visibility. If rebasestate exists (in-memory or on-disk), use
169 # on visibility. If rebasestate exists (in-memory or on-disk), use
170 # unfiltered repo to avoid visibility issues.
170 # unfiltered repo to avoid visibility issues.
171 # Before knowing rebasestate (i.e. when starting a new rebase (not
171 # Before knowing rebasestate (i.e. when starting a new rebase (not
172 # --continue or --abort)), the original repo should be used so
172 # --continue or --abort)), the original repo should be used so
173 # visibility-dependent revsets are correct.
173 # visibility-dependent revsets are correct.
174 self.prepared = False
174 self.prepared = False
175 self.resume = False
175 self.resume = False
176 self._repo = repo
176 self._repo = repo
177
177
178 self.ui = ui
178 self.ui = ui
179 self.opts = opts
179 self.opts = opts
180 self.originalwd = None
180 self.originalwd = None
181 self.external = nullrev
181 self.external = nullrev
182 # Mapping between the old revision id and either what is the new rebased
182 # Mapping between the old revision id and either what is the new rebased
183 # revision or what needs to be done with the old revision. The state
183 # revision or what needs to be done with the old revision. The state
184 # dict will be what contains most of the rebase progress state.
184 # dict will be what contains most of the rebase progress state.
185 self.state = {}
185 self.state = {}
186 self.activebookmark = None
186 self.activebookmark = None
187 self.destmap = {}
187 self.destmap = {}
188 self.skipped = set()
188 self.skipped = set()
189
189
190 self.collapsef = opts.get(b'collapse', False)
190 self.collapsef = opts.get(b'collapse', False)
191 self.collapsemsg = cmdutil.logmessage(ui, opts)
191 self.collapsemsg = cmdutil.logmessage(ui, opts)
192 self.date = opts.get(b'date', None)
192 self.date = opts.get(b'date', None)
193
193
194 e = opts.get(b'extrafn') # internal, used by e.g. hgsubversion
194 e = opts.get(b'extrafn') # internal, used by e.g. hgsubversion
195 self.extrafns = [_savegraft]
195 self.extrafns = [_savegraft]
196 if e:
196 if e:
197 self.extrafns = [e]
197 self.extrafns = [e]
198
198
199 self.backupf = ui.configbool(b'rewrite', b'backup-bundle')
199 self.backupf = ui.configbool(b'rewrite', b'backup-bundle')
200 self.keepf = opts.get(b'keep', False)
200 self.keepf = opts.get(b'keep', False)
201 self.keepbranchesf = opts.get(b'keepbranches', False)
201 self.keepbranchesf = opts.get(b'keepbranches', False)
202 self.skipemptysuccessorf = rewriteutil.skip_empty_successor(
202 self.skipemptysuccessorf = rewriteutil.skip_empty_successor(
203 repo.ui, b'rebase'
203 repo.ui, b'rebase'
204 )
204 )
205 self.obsoletenotrebased = {}
205 self.obsoletenotrebased = {}
206 self.obsoletewithoutsuccessorindestination = set()
206 self.obsoletewithoutsuccessorindestination = set()
207 self.inmemory = inmemory
207 self.inmemory = inmemory
208 self.dryrun = dryrun
208 self.dryrun = dryrun
209 self.stateobj = statemod.cmdstate(repo, b'rebasestate')
209 self.stateobj = statemod.cmdstate(repo, b'rebasestate')
210
210
211 @property
211 @property
212 def repo(self):
212 def repo(self):
213 if self.prepared:
213 if self.prepared:
214 return self._repo.unfiltered()
214 return self._repo.unfiltered()
215 else:
215 else:
216 return self._repo
216 return self._repo
217
217
218 def storestatus(self, tr=None):
218 def storestatus(self, tr=None):
219 """Store the current status to allow recovery"""
219 """Store the current status to allow recovery"""
220 if tr:
220 if tr:
221 tr.addfilegenerator(
221 tr.addfilegenerator(
222 b'rebasestate',
222 b'rebasestate',
223 (b'rebasestate',),
223 (b'rebasestate',),
224 self._writestatus,
224 self._writestatus,
225 location=b'plain',
225 location=b'plain',
226 )
226 )
227 else:
227 else:
228 with self.repo.vfs(b"rebasestate", b"w") as f:
228 with self.repo.vfs(b"rebasestate", b"w") as f:
229 self._writestatus(f)
229 self._writestatus(f)
230
230
231 def _writestatus(self, f):
231 def _writestatus(self, f):
232 repo = self.repo
232 repo = self.repo
233 assert repo.filtername is None
233 assert repo.filtername is None
234 f.write(repo[self.originalwd].hex() + b'\n')
234 f.write(repo[self.originalwd].hex() + b'\n')
235 # was "dest". we now write dest per src root below.
235 # was "dest". we now write dest per src root below.
236 f.write(b'\n')
236 f.write(b'\n')
237 f.write(repo[self.external].hex() + b'\n')
237 f.write(repo[self.external].hex() + b'\n')
238 f.write(b'%d\n' % int(self.collapsef))
238 f.write(b'%d\n' % int(self.collapsef))
239 f.write(b'%d\n' % int(self.keepf))
239 f.write(b'%d\n' % int(self.keepf))
240 f.write(b'%d\n' % int(self.keepbranchesf))
240 f.write(b'%d\n' % int(self.keepbranchesf))
241 f.write(b'%s\n' % (self.activebookmark or b''))
241 f.write(b'%s\n' % (self.activebookmark or b''))
242 destmap = self.destmap
242 destmap = self.destmap
243 for d, v in pycompat.iteritems(self.state):
243 for d, v in pycompat.iteritems(self.state):
244 oldrev = repo[d].hex()
244 oldrev = repo[d].hex()
245 if v >= 0:
245 if v >= 0:
246 newrev = repo[v].hex()
246 newrev = repo[v].hex()
247 else:
247 else:
248 newrev = b"%d" % v
248 newrev = b"%d" % v
249 destnode = repo[destmap[d]].hex()
249 destnode = repo[destmap[d]].hex()
250 f.write(b"%s:%s:%s\n" % (oldrev, newrev, destnode))
250 f.write(b"%s:%s:%s\n" % (oldrev, newrev, destnode))
251 repo.ui.debug(b'rebase status stored\n')
251 repo.ui.debug(b'rebase status stored\n')
252
252
253 def restorestatus(self):
253 def restorestatus(self):
254 """Restore a previously stored status"""
254 """Restore a previously stored status"""
255 if not self.stateobj.exists():
255 if not self.stateobj.exists():
256 cmdutil.wrongtooltocontinue(self.repo, _(b'rebase'))
256 cmdutil.wrongtooltocontinue(self.repo, _(b'rebase'))
257
257
258 data = self._read()
258 data = self._read()
259 self.repo.ui.debug(b'rebase status resumed\n')
259 self.repo.ui.debug(b'rebase status resumed\n')
260
260
261 self.originalwd = data[b'originalwd']
261 self.originalwd = data[b'originalwd']
262 self.destmap = data[b'destmap']
262 self.destmap = data[b'destmap']
263 self.state = data[b'state']
263 self.state = data[b'state']
264 self.skipped = data[b'skipped']
264 self.skipped = data[b'skipped']
265 self.collapsef = data[b'collapse']
265 self.collapsef = data[b'collapse']
266 self.keepf = data[b'keep']
266 self.keepf = data[b'keep']
267 self.keepbranchesf = data[b'keepbranches']
267 self.keepbranchesf = data[b'keepbranches']
268 self.external = data[b'external']
268 self.external = data[b'external']
269 self.activebookmark = data[b'activebookmark']
269 self.activebookmark = data[b'activebookmark']
270
270
271 def _read(self):
271 def _read(self):
272 self.prepared = True
272 self.prepared = True
273 repo = self.repo
273 repo = self.repo
274 assert repo.filtername is None
274 assert repo.filtername is None
275 data = {
275 data = {
276 b'keepbranches': None,
276 b'keepbranches': None,
277 b'collapse': None,
277 b'collapse': None,
278 b'activebookmark': None,
278 b'activebookmark': None,
279 b'external': nullrev,
279 b'external': nullrev,
280 b'keep': None,
280 b'keep': None,
281 b'originalwd': None,
281 b'originalwd': None,
282 }
282 }
283 legacydest = None
283 legacydest = None
284 state = {}
284 state = {}
285 destmap = {}
285 destmap = {}
286
286
287 if True:
287 if True:
288 f = repo.vfs(b"rebasestate")
288 f = repo.vfs(b"rebasestate")
289 for i, l in enumerate(f.read().splitlines()):
289 for i, l in enumerate(f.read().splitlines()):
290 if i == 0:
290 if i == 0:
291 data[b'originalwd'] = repo[l].rev()
291 data[b'originalwd'] = repo[l].rev()
292 elif i == 1:
292 elif i == 1:
293 # this line should be empty in newer version. but legacy
293 # this line should be empty in newer version. but legacy
294 # clients may still use it
294 # clients may still use it
295 if l:
295 if l:
296 legacydest = repo[l].rev()
296 legacydest = repo[l].rev()
297 elif i == 2:
297 elif i == 2:
298 data[b'external'] = repo[l].rev()
298 data[b'external'] = repo[l].rev()
299 elif i == 3:
299 elif i == 3:
300 data[b'collapse'] = bool(int(l))
300 data[b'collapse'] = bool(int(l))
301 elif i == 4:
301 elif i == 4:
302 data[b'keep'] = bool(int(l))
302 data[b'keep'] = bool(int(l))
303 elif i == 5:
303 elif i == 5:
304 data[b'keepbranches'] = bool(int(l))
304 data[b'keepbranches'] = bool(int(l))
305 elif i == 6 and not (len(l) == 81 and b':' in l):
305 elif i == 6 and not (len(l) == 81 and b':' in l):
306 # line 6 is a recent addition, so for backwards
306 # line 6 is a recent addition, so for backwards
307 # compatibility check that the line doesn't look like the
307 # compatibility check that the line doesn't look like the
308 # oldrev:newrev lines
308 # oldrev:newrev lines
309 data[b'activebookmark'] = l
309 data[b'activebookmark'] = l
310 else:
310 else:
311 args = l.split(b':')
311 args = l.split(b':')
312 oldrev = repo[args[0]].rev()
312 oldrev = repo[args[0]].rev()
313 newrev = args[1]
313 newrev = args[1]
314 if newrev in legacystates:
314 if newrev in legacystates:
315 continue
315 continue
316 if len(args) > 2:
316 if len(args) > 2:
317 destrev = repo[args[2]].rev()
317 destrev = repo[args[2]].rev()
318 else:
318 else:
319 destrev = legacydest
319 destrev = legacydest
320 destmap[oldrev] = destrev
320 destmap[oldrev] = destrev
321 if newrev == revtodostr:
321 if newrev == revtodostr:
322 state[oldrev] = revtodo
322 state[oldrev] = revtodo
323 # Legacy compat special case
323 # Legacy compat special case
324 else:
324 else:
325 state[oldrev] = repo[newrev].rev()
325 state[oldrev] = repo[newrev].rev()
326
326
327 if data[b'keepbranches'] is None:
327 if data[b'keepbranches'] is None:
328 raise error.Abort(_(b'.hg/rebasestate is incomplete'))
328 raise error.Abort(_(b'.hg/rebasestate is incomplete'))
329
329
330 data[b'destmap'] = destmap
330 data[b'destmap'] = destmap
331 data[b'state'] = state
331 data[b'state'] = state
332 skipped = set()
332 skipped = set()
333 # recompute the set of skipped revs
333 # recompute the set of skipped revs
334 if not data[b'collapse']:
334 if not data[b'collapse']:
335 seen = set(destmap.values())
335 seen = set(destmap.values())
336 for old, new in sorted(state.items()):
336 for old, new in sorted(state.items()):
337 if new != revtodo and new in seen:
337 if new != revtodo and new in seen:
338 skipped.add(old)
338 skipped.add(old)
339 seen.add(new)
339 seen.add(new)
340 data[b'skipped'] = skipped
340 data[b'skipped'] = skipped
341 repo.ui.debug(
341 repo.ui.debug(
342 b'computed skipped revs: %s\n'
342 b'computed skipped revs: %s\n'
343 % (b' '.join(b'%d' % r for r in sorted(skipped)) or b'')
343 % (b' '.join(b'%d' % r for r in sorted(skipped)) or b'')
344 )
344 )
345
345
346 return data
346 return data
347
347
348 def _handleskippingobsolete(self, obsoleterevs, destmap):
348 def _handleskippingobsolete(self, obsoleterevs, destmap):
349 """Compute structures necessary for skipping obsolete revisions
349 """Compute structures necessary for skipping obsolete revisions
350
350
351 obsoleterevs: iterable of all obsolete revisions in rebaseset
351 obsoleterevs: iterable of all obsolete revisions in rebaseset
352 destmap: {srcrev: destrev} destination revisions
352 destmap: {srcrev: destrev} destination revisions
353 """
353 """
354 self.obsoletenotrebased = {}
354 self.obsoletenotrebased = {}
355 if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'):
355 if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'):
356 return
356 return
357 obsoleteset = set(obsoleterevs)
357 obsoleteset = set(obsoleterevs)
358 (
358 (
359 self.obsoletenotrebased,
359 self.obsoletenotrebased,
360 self.obsoletewithoutsuccessorindestination,
360 self.obsoletewithoutsuccessorindestination,
361 obsoleteextinctsuccessors,
361 obsoleteextinctsuccessors,
362 ) = _computeobsoletenotrebased(self.repo, obsoleteset, destmap)
362 ) = _computeobsoletenotrebased(self.repo, obsoleteset, destmap)
363 skippedset = set(self.obsoletenotrebased)
363 skippedset = set(self.obsoletenotrebased)
364 skippedset.update(self.obsoletewithoutsuccessorindestination)
364 skippedset.update(self.obsoletewithoutsuccessorindestination)
365 skippedset.update(obsoleteextinctsuccessors)
365 skippedset.update(obsoleteextinctsuccessors)
366 _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
366 _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
367
367
368 def _prepareabortorcontinue(
368 def _prepareabortorcontinue(
369 self, isabort, backup=True, suppwarns=False, dryrun=False, confirm=False
369 self, isabort, backup=True, suppwarns=False, dryrun=False, confirm=False
370 ):
370 ):
371 self.resume = True
371 self.resume = True
372 try:
372 try:
373 self.restorestatus()
373 self.restorestatus()
374 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
374 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
375 except error.RepoLookupError:
375 except error.RepoLookupError:
376 if isabort:
376 if isabort:
377 clearstatus(self.repo)
377 clearstatus(self.repo)
378 clearcollapsemsg(self.repo)
378 clearcollapsemsg(self.repo)
379 self.repo.ui.warn(
379 self.repo.ui.warn(
380 _(
380 _(
381 b'rebase aborted (no revision is removed,'
381 b'rebase aborted (no revision is removed,'
382 b' only broken state is cleared)\n'
382 b' only broken state is cleared)\n'
383 )
383 )
384 )
384 )
385 return 0
385 return 0
386 else:
386 else:
387 msg = _(b'cannot continue inconsistent rebase')
387 msg = _(b'cannot continue inconsistent rebase')
388 hint = _(b'use "hg rebase --abort" to clear broken state')
388 hint = _(b'use "hg rebase --abort" to clear broken state')
389 raise error.Abort(msg, hint=hint)
389 raise error.Abort(msg, hint=hint)
390
390
391 if isabort:
391 if isabort:
392 backup = backup and self.backupf
392 backup = backup and self.backupf
393 return self._abort(
393 return self._abort(
394 backup=backup,
394 backup=backup,
395 suppwarns=suppwarns,
395 suppwarns=suppwarns,
396 dryrun=dryrun,
396 dryrun=dryrun,
397 confirm=confirm,
397 confirm=confirm,
398 )
398 )
399
399
400 def _preparenewrebase(self, destmap):
400 def _preparenewrebase(self, destmap):
401 if not destmap:
401 if not destmap:
402 return _nothingtorebase()
402 return _nothingtorebase()
403
403
404 rebaseset = destmap.keys()
404 rebaseset = destmap.keys()
405 if not self.keepf:
405 if not self.keepf:
406 try:
406 try:
407 rewriteutil.precheck(self.repo, rebaseset, action=b'rebase')
407 rewriteutil.precheck(self.repo, rebaseset, action=b'rebase')
408 except error.Abort as e:
408 except error.Abort as e:
409 if e.hint is None:
409 if e.hint is None:
410 e.hint = _(b'use --keep to keep original changesets')
410 e.hint = _(b'use --keep to keep original changesets')
411 raise e
411 raise e
412
412
413 result = buildstate(self.repo, destmap, self.collapsef)
413 result = buildstate(self.repo, destmap, self.collapsef)
414
414
415 if not result:
415 if not result:
416 # Empty state built, nothing to rebase
416 # Empty state built, nothing to rebase
417 self.ui.status(_(b'nothing to rebase\n'))
417 self.ui.status(_(b'nothing to rebase\n'))
418 return _nothingtorebase()
418 return _nothingtorebase()
419
419
420 (self.originalwd, self.destmap, self.state) = result
420 (self.originalwd, self.destmap, self.state) = result
421 if self.collapsef:
421 if self.collapsef:
422 dests = set(self.destmap.values())
422 dests = set(self.destmap.values())
423 if len(dests) != 1:
423 if len(dests) != 1:
424 raise error.Abort(
424 raise error.Abort(
425 _(b'--collapse does not work with multiple destinations')
425 _(b'--collapse does not work with multiple destinations')
426 )
426 )
427 destrev = next(iter(dests))
427 destrev = next(iter(dests))
428 destancestors = self.repo.changelog.ancestors(
428 destancestors = self.repo.changelog.ancestors(
429 [destrev], inclusive=True
429 [destrev], inclusive=True
430 )
430 )
431 self.external = externalparent(self.repo, self.state, destancestors)
431 self.external = externalparent(self.repo, self.state, destancestors)
432
432
433 for destrev in sorted(set(destmap.values())):
433 for destrev in sorted(set(destmap.values())):
434 dest = self.repo[destrev]
434 dest = self.repo[destrev]
435 if dest.closesbranch() and not self.keepbranchesf:
435 if dest.closesbranch() and not self.keepbranchesf:
436 self.ui.status(_(b'reopening closed branch head %s\n') % dest)
436 self.ui.status(_(b'reopening closed branch head %s\n') % dest)
437
437
438 self.prepared = True
438 self.prepared = True
439
439
440 def _assignworkingcopy(self):
440 def _assignworkingcopy(self):
441 if self.inmemory:
441 if self.inmemory:
442 from mercurial.context import overlayworkingctx
442 from mercurial.context import overlayworkingctx
443
443
444 self.wctx = overlayworkingctx(self.repo)
444 self.wctx = overlayworkingctx(self.repo)
445 self.repo.ui.debug(b"rebasing in memory\n")
445 self.repo.ui.debug(b"rebasing in memory\n")
446 else:
446 else:
447 self.wctx = self.repo[None]
447 self.wctx = self.repo[None]
448 self.repo.ui.debug(b"rebasing on disk\n")
448 self.repo.ui.debug(b"rebasing on disk\n")
449 self.repo.ui.log(
449 self.repo.ui.log(
450 b"rebase",
450 b"rebase",
451 b"using in-memory rebase: %r\n",
451 b"using in-memory rebase: %r\n",
452 self.inmemory,
452 self.inmemory,
453 rebase_imm_used=self.inmemory,
453 rebase_imm_used=self.inmemory,
454 )
454 )
455
455
456 def _performrebase(self, tr):
456 def _performrebase(self, tr):
457 self._assignworkingcopy()
457 self._assignworkingcopy()
458 repo, ui = self.repo, self.ui
458 repo, ui = self.repo, self.ui
459 if self.keepbranchesf:
459 if self.keepbranchesf:
460 # insert _savebranch at the start of extrafns so if
460 # insert _savebranch at the start of extrafns so if
461 # there's a user-provided extrafn it can clobber branch if
461 # there's a user-provided extrafn it can clobber branch if
462 # desired
462 # desired
463 self.extrafns.insert(0, _savebranch)
463 self.extrafns.insert(0, _savebranch)
464 if self.collapsef:
464 if self.collapsef:
465 branches = set()
465 branches = set()
466 for rev in self.state:
466 for rev in self.state:
467 branches.add(repo[rev].branch())
467 branches.add(repo[rev].branch())
468 if len(branches) > 1:
468 if len(branches) > 1:
469 raise error.Abort(
469 raise error.Abort(
470 _(b'cannot collapse multiple named branches')
470 _(b'cannot collapse multiple named branches')
471 )
471 )
472
472
473 # Calculate self.obsoletenotrebased
473 # Calculate self.obsoletenotrebased
474 obsrevs = _filterobsoleterevs(self.repo, self.state)
474 obsrevs = _filterobsoleterevs(self.repo, self.state)
475 self._handleskippingobsolete(obsrevs, self.destmap)
475 self._handleskippingobsolete(obsrevs, self.destmap)
476
476
477 # Keep track of the active bookmarks in order to reset them later
477 # Keep track of the active bookmarks in order to reset them later
478 self.activebookmark = self.activebookmark or repo._activebookmark
478 self.activebookmark = self.activebookmark or repo._activebookmark
479 if self.activebookmark:
479 if self.activebookmark:
480 bookmarks.deactivate(repo)
480 bookmarks.deactivate(repo)
481
481
482 # Store the state before we begin so users can run 'hg rebase --abort'
482 # Store the state before we begin so users can run 'hg rebase --abort'
483 # if we fail before the transaction closes.
483 # if we fail before the transaction closes.
484 self.storestatus()
484 self.storestatus()
485 if tr:
485 if tr:
486 # When using single transaction, store state when transaction
486 # When using single transaction, store state when transaction
487 # commits.
487 # commits.
488 self.storestatus(tr)
488 self.storestatus(tr)
489
489
490 cands = [k for k, v in pycompat.iteritems(self.state) if v == revtodo]
490 cands = [k for k, v in pycompat.iteritems(self.state) if v == revtodo]
491 p = repo.ui.makeprogress(
491 p = repo.ui.makeprogress(
492 _(b"rebasing"), unit=_(b'changesets'), total=len(cands)
492 _(b"rebasing"), unit=_(b'changesets'), total=len(cands)
493 )
493 )
494
494
495 def progress(ctx):
495 def progress(ctx):
496 p.increment(item=(b"%d:%s" % (ctx.rev(), ctx)))
496 p.increment(item=(b"%d:%s" % (ctx.rev(), ctx)))
497
497
498 allowdivergence = self.ui.configbool(
498 allowdivergence = self.ui.configbool(
499 b'experimental', b'evolution.allowdivergence'
499 b'experimental', b'evolution.allowdivergence'
500 )
500 )
501 for subset in sortsource(self.destmap):
501 for subset in sortsource(self.destmap):
502 sortedrevs = self.repo.revs(b'sort(%ld, -topo)', subset)
502 sortedrevs = self.repo.revs(b'sort(%ld, -topo)', subset)
503 if not allowdivergence:
503 if not allowdivergence:
504 sortedrevs -= self.repo.revs(
504 sortedrevs -= self.repo.revs(
505 b'descendants(%ld) and not %ld',
505 b'descendants(%ld) and not %ld',
506 self.obsoletewithoutsuccessorindestination,
506 self.obsoletewithoutsuccessorindestination,
507 self.obsoletewithoutsuccessorindestination,
507 self.obsoletewithoutsuccessorindestination,
508 )
508 )
509 for rev in sortedrevs:
509 for rev in sortedrevs:
510 self._rebasenode(tr, rev, allowdivergence, progress)
510 self._rebasenode(tr, rev, allowdivergence, progress)
511 p.complete()
511 p.complete()
512 ui.note(_(b'rebase merging completed\n'))
512 ui.note(_(b'rebase merging completed\n'))
513
513
514 def _concludenode(self, rev, editor, commitmsg=None):
514 def _concludenode(self, rev, editor, commitmsg=None):
515 '''Commit the wd changes with parents p1 and p2.
515 '''Commit the wd changes with parents p1 and p2.
516
516
517 Reuse commit info from rev but also store useful information in extra.
517 Reuse commit info from rev but also store useful information in extra.
518 Return node of committed revision.'''
518 Return node of committed revision.'''
519 repo = self.repo
519 repo = self.repo
520 ctx = repo[rev]
520 ctx = repo[rev]
521 if commitmsg is None:
521 if commitmsg is None:
522 commitmsg = ctx.description()
522 commitmsg = ctx.description()
523 date = self.date
523 date = self.date
524 if date is None:
524 if date is None:
525 date = ctx.date()
525 date = ctx.date()
526 extra = {b'rebase_source': ctx.hex()}
526 extra = {b'rebase_source': ctx.hex()}
527 for c in self.extrafns:
527 for c in self.extrafns:
528 c(ctx, extra)
528 c(ctx, extra)
529 destphase = max(ctx.phase(), phases.draft)
529 destphase = max(ctx.phase(), phases.draft)
530 overrides = {
530 overrides = {
531 (b'phases', b'new-commit'): destphase,
531 (b'phases', b'new-commit'): destphase,
532 (b'ui', b'allowemptycommit'): not self.skipemptysuccessorf,
532 (b'ui', b'allowemptycommit'): not self.skipemptysuccessorf,
533 }
533 }
534 with repo.ui.configoverride(overrides, b'rebase'):
534 with repo.ui.configoverride(overrides, b'rebase'):
535 if self.inmemory:
535 if self.inmemory:
536 newnode = commitmemorynode(
536 newnode = commitmemorynode(
537 repo,
537 repo,
538 wctx=self.wctx,
538 wctx=self.wctx,
539 extra=extra,
539 extra=extra,
540 commitmsg=commitmsg,
540 commitmsg=commitmsg,
541 editor=editor,
541 editor=editor,
542 user=ctx.user(),
542 user=ctx.user(),
543 date=date,
543 date=date,
544 )
544 )
545 else:
545 else:
546 newnode = commitnode(
546 newnode = commitnode(
547 repo,
547 repo,
548 extra=extra,
548 extra=extra,
549 commitmsg=commitmsg,
549 commitmsg=commitmsg,
550 editor=editor,
550 editor=editor,
551 user=ctx.user(),
551 user=ctx.user(),
552 date=date,
552 date=date,
553 )
553 )
554
554
555 return newnode
555 return newnode
556
556
557 def _rebasenode(self, tr, rev, allowdivergence, progressfn):
557 def _rebasenode(self, tr, rev, allowdivergence, progressfn):
558 repo, ui, opts = self.repo, self.ui, self.opts
558 repo, ui, opts = self.repo, self.ui, self.opts
559 ctx = repo[rev]
559 ctx = repo[rev]
560 desc = _ctxdesc(ctx)
560 desc = _ctxdesc(ctx)
561 if self.state[rev] == rev:
561 if self.state[rev] == rev:
562 ui.status(_(b'already rebased %s\n') % desc)
562 ui.status(_(b'already rebased %s\n') % desc)
563 elif (
563 elif (
564 not allowdivergence
564 not allowdivergence
565 and rev in self.obsoletewithoutsuccessorindestination
565 and rev in self.obsoletewithoutsuccessorindestination
566 ):
566 ):
567 msg = (
567 msg = (
568 _(
568 _(
569 b'note: not rebasing %s and its descendants as '
569 b'note: not rebasing %s and its descendants as '
570 b'this would cause divergence\n'
570 b'this would cause divergence\n'
571 )
571 )
572 % desc
572 % desc
573 )
573 )
574 repo.ui.status(msg)
574 repo.ui.status(msg)
575 self.skipped.add(rev)
575 self.skipped.add(rev)
576 elif rev in self.obsoletenotrebased:
576 elif rev in self.obsoletenotrebased:
577 succ = self.obsoletenotrebased[rev]
577 succ = self.obsoletenotrebased[rev]
578 if succ is None:
578 if succ is None:
579 msg = _(b'note: not rebasing %s, it has no successor\n') % desc
579 msg = _(b'note: not rebasing %s, it has no successor\n') % desc
580 else:
580 else:
581 succdesc = _ctxdesc(repo[succ])
581 succdesc = _ctxdesc(repo[succ])
582 msg = _(
582 msg = _(
583 b'note: not rebasing %s, already in destination as %s\n'
583 b'note: not rebasing %s, already in destination as %s\n'
584 ) % (desc, succdesc)
584 ) % (desc, succdesc)
585 repo.ui.status(msg)
585 repo.ui.status(msg)
586 # Make clearrebased aware state[rev] is not a true successor
586 # Make clearrebased aware state[rev] is not a true successor
587 self.skipped.add(rev)
587 self.skipped.add(rev)
588 # Record rev as moved to its desired destination in self.state.
588 # Record rev as moved to its desired destination in self.state.
589 # This helps bookmark and working parent movement.
589 # This helps bookmark and working parent movement.
590 dest = max(
590 dest = max(
591 adjustdest(repo, rev, self.destmap, self.state, self.skipped)
591 adjustdest(repo, rev, self.destmap, self.state, self.skipped)
592 )
592 )
593 self.state[rev] = dest
593 self.state[rev] = dest
594 elif self.state[rev] == revtodo:
594 elif self.state[rev] == revtodo:
595 ui.status(_(b'rebasing %s\n') % desc)
595 ui.status(_(b'rebasing %s\n') % desc)
596 progressfn(ctx)
596 progressfn(ctx)
597 p1, p2, base = defineparents(
597 p1, p2, base = defineparents(
598 repo,
598 repo,
599 rev,
599 rev,
600 self.destmap,
600 self.destmap,
601 self.state,
601 self.state,
602 self.skipped,
602 self.skipped,
603 self.obsoletenotrebased,
603 self.obsoletenotrebased,
604 )
604 )
605 if self.resume and self.wctx.p1().rev() == p1:
605 if self.resume and self.wctx.p1().rev() == p1:
606 repo.ui.debug(b'resuming interrupted rebase\n')
606 repo.ui.debug(b'resuming interrupted rebase\n')
607 self.resume = False
607 self.resume = False
608 else:
608 else:
609 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
609 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
610 with ui.configoverride(overrides, b'rebase'):
610 with ui.configoverride(overrides, b'rebase'):
611 try:
611 try:
612 rebasenode(
612 rebasenode(
613 repo,
613 repo,
614 rev,
614 rev,
615 p1,
615 p1,
616 p2,
616 p2,
617 base,
617 base,
618 self.collapsef,
618 self.collapsef,
619 wctx=self.wctx,
619 wctx=self.wctx,
620 )
620 )
621 except error.InMemoryMergeConflictsError:
621 except error.InMemoryMergeConflictsError:
622 if self.dryrun:
622 if self.dryrun:
623 raise error.ConflictResolutionRequired(b'rebase')
623 raise error.ConflictResolutionRequired(b'rebase')
624 if self.collapsef:
624 if self.collapsef:
625 # TODO: Make the overlayworkingctx reflected
625 # TODO: Make the overlayworkingctx reflected
626 # in the working copy here instead of re-raising
626 # in the working copy here instead of re-raising
627 # so the entire rebase operation is retried.
627 # so the entire rebase operation is retried.
628 raise
628 raise
629 ui.status(
629 ui.status(
630 _(
630 _(
631 b"hit merge conflicts; rebasing that "
631 b"hit merge conflicts; rebasing that "
632 b"commit again in the working copy\n"
632 b"commit again in the working copy\n"
633 )
633 )
634 )
634 )
635 cmdutil.bailifchanged(repo)
635 cmdutil.bailifchanged(repo)
636 self.inmemory = False
636 self.inmemory = False
637 self._assignworkingcopy()
637 self._assignworkingcopy()
638 mergemod.update(repo[p1], wc=self.wctx)
638 mergemod.update(repo[p1], wc=self.wctx)
639 rebasenode(
639 rebasenode(
640 repo,
640 repo,
641 rev,
641 rev,
642 p1,
642 p1,
643 p2,
643 p2,
644 base,
644 base,
645 self.collapsef,
645 self.collapsef,
646 wctx=self.wctx,
646 wctx=self.wctx,
647 )
647 )
648 if not self.collapsef:
648 if not self.collapsef:
649 merging = p2 != nullrev
649 merging = p2 != nullrev
650 editform = cmdutil.mergeeditform(merging, b'rebase')
650 editform = cmdutil.mergeeditform(merging, b'rebase')
651 editor = cmdutil.getcommiteditor(
651 editor = cmdutil.getcommiteditor(
652 editform=editform, **pycompat.strkwargs(opts)
652 editform=editform, **pycompat.strkwargs(opts)
653 )
653 )
654 # We need to set parents again here just in case we're continuing
654 # We need to set parents again here just in case we're continuing
655 # a rebase started with an old hg version (before 9c9cfecd4600),
655 # a rebase started with an old hg version (before 9c9cfecd4600),
656 # because those old versions would have left us with two dirstate
656 # because those old versions would have left us with two dirstate
657 # parents, and we don't want to create a merge commit here (unless
657 # parents, and we don't want to create a merge commit here (unless
658 # we're rebasing a merge commit).
658 # we're rebasing a merge commit).
659 self.wctx.setparents(repo[p1].node(), repo[p2].node())
659 self.wctx.setparents(repo[p1].node(), repo[p2].node())
660 newnode = self._concludenode(rev, editor)
660 newnode = self._concludenode(rev, editor)
661 else:
661 else:
662 # Skip commit if we are collapsing
662 # Skip commit if we are collapsing
663 newnode = None
663 newnode = None
664 # Update the state
664 # Update the state
665 if newnode is not None:
665 if newnode is not None:
666 self.state[rev] = repo[newnode].rev()
666 self.state[rev] = repo[newnode].rev()
667 ui.debug(b'rebased as %s\n' % short(newnode))
667 ui.debug(b'rebased as %s\n' % short(newnode))
668 if repo[newnode].isempty():
668 if repo[newnode].isempty():
669 ui.warn(
669 ui.warn(
670 _(
670 _(
671 b'note: created empty successor for %s, its '
671 b'note: created empty successor for %s, its '
672 b'destination already has all its changes\n'
672 b'destination already has all its changes\n'
673 )
673 )
674 % desc
674 % desc
675 )
675 )
676 else:
676 else:
677 if not self.collapsef:
677 if not self.collapsef:
678 ui.warn(
678 ui.warn(
679 _(
679 _(
680 b'note: not rebasing %s, its destination already '
680 b'note: not rebasing %s, its destination already '
681 b'has all its changes\n'
681 b'has all its changes\n'
682 )
682 )
683 % desc
683 % desc
684 )
684 )
685 self.skipped.add(rev)
685 self.skipped.add(rev)
686 self.state[rev] = p1
686 self.state[rev] = p1
687 ui.debug(b'next revision set to %d\n' % p1)
687 ui.debug(b'next revision set to %d\n' % p1)
688 else:
688 else:
689 ui.status(
689 ui.status(
690 _(b'already rebased %s as %s\n') % (desc, repo[self.state[rev]])
690 _(b'already rebased %s as %s\n') % (desc, repo[self.state[rev]])
691 )
691 )
692 if not tr:
692 if not tr:
693 # When not using single transaction, store state after each
693 # When not using single transaction, store state after each
694 # commit is completely done. On InterventionRequired, we thus
694 # commit is completely done. On InterventionRequired, we thus
695 # won't store the status. Instead, we'll hit the "len(parents) == 2"
695 # won't store the status. Instead, we'll hit the "len(parents) == 2"
696 # case and realize that the commit was in progress.
696 # case and realize that the commit was in progress.
697 self.storestatus()
697 self.storestatus()
698
698
699 def _finishrebase(self):
699 def _finishrebase(self):
700 repo, ui, opts = self.repo, self.ui, self.opts
700 repo, ui, opts = self.repo, self.ui, self.opts
701 fm = ui.formatter(b'rebase', opts)
701 fm = ui.formatter(b'rebase', opts)
702 fm.startitem()
702 fm.startitem()
703 if self.collapsef:
703 if self.collapsef:
704 p1, p2, _base = defineparents(
704 p1, p2, _base = defineparents(
705 repo,
705 repo,
706 min(self.state),
706 min(self.state),
707 self.destmap,
707 self.destmap,
708 self.state,
708 self.state,
709 self.skipped,
709 self.skipped,
710 self.obsoletenotrebased,
710 self.obsoletenotrebased,
711 )
711 )
712 editopt = opts.get(b'edit')
712 editopt = opts.get(b'edit')
713 editform = b'rebase.collapse'
713 editform = b'rebase.collapse'
714 if self.collapsemsg:
714 if self.collapsemsg:
715 commitmsg = self.collapsemsg
715 commitmsg = self.collapsemsg
716 else:
716 else:
717 commitmsg = b'Collapsed revision'
717 commitmsg = b'Collapsed revision'
718 for rebased in sorted(self.state):
718 for rebased in sorted(self.state):
719 if rebased not in self.skipped:
719 if rebased not in self.skipped:
720 commitmsg += b'\n* %s' % repo[rebased].description()
720 commitmsg += b'\n* %s' % repo[rebased].description()
721 editopt = True
721 editopt = True
722 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
722 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
723 revtoreuse = max(self.state)
723 revtoreuse = max(self.state)
724
724
725 self.wctx.setparents(repo[p1].node(), repo[self.external].node())
725 self.wctx.setparents(repo[p1].node(), repo[self.external].node())
726 newnode = self._concludenode(
726 newnode = self._concludenode(
727 revtoreuse, editor, commitmsg=commitmsg
727 revtoreuse, editor, commitmsg=commitmsg
728 )
728 )
729
729
730 if newnode is not None:
730 if newnode is not None:
731 newrev = repo[newnode].rev()
731 newrev = repo[newnode].rev()
732 for oldrev in self.state:
732 for oldrev in self.state:
733 self.state[oldrev] = newrev
733 self.state[oldrev] = newrev
734
734
735 if b'qtip' in repo.tags():
735 if b'qtip' in repo.tags():
736 updatemq(repo, self.state, self.skipped, **pycompat.strkwargs(opts))
736 updatemq(repo, self.state, self.skipped, **pycompat.strkwargs(opts))
737
737
738 # restore original working directory
738 # restore original working directory
739 # (we do this before stripping)
739 # (we do this before stripping)
740 newwd = self.state.get(self.originalwd, self.originalwd)
740 newwd = self.state.get(self.originalwd, self.originalwd)
741 if newwd < 0:
741 if newwd < 0:
742 # original directory is a parent of rebase set root or ignored
742 # original directory is a parent of rebase set root or ignored
743 newwd = self.originalwd
743 newwd = self.originalwd
744 if newwd not in [c.rev() for c in repo[None].parents()]:
744 if newwd not in [c.rev() for c in repo[None].parents()]:
745 ui.note(_(b"update back to initial working directory parent\n"))
745 ui.note(_(b"update back to initial working directory parent\n"))
746 mergemod.update(repo[newwd])
746 mergemod.update(repo[newwd])
747
747
748 collapsedas = None
748 collapsedas = None
749 if self.collapsef and not self.keepf:
749 if self.collapsef and not self.keepf:
750 collapsedas = newnode
750 collapsedas = newnode
751 clearrebased(
751 clearrebased(
752 ui,
752 ui,
753 repo,
753 repo,
754 self.destmap,
754 self.destmap,
755 self.state,
755 self.state,
756 self.skipped,
756 self.skipped,
757 collapsedas,
757 collapsedas,
758 self.keepf,
758 self.keepf,
759 fm=fm,
759 fm=fm,
760 backup=self.backupf,
760 backup=self.backupf,
761 )
761 )
762
762
763 clearstatus(repo)
763 clearstatus(repo)
764 clearcollapsemsg(repo)
764 clearcollapsemsg(repo)
765
765
766 ui.note(_(b"rebase completed\n"))
766 ui.note(_(b"rebase completed\n"))
767 util.unlinkpath(repo.sjoin(b'undo'), ignoremissing=True)
767 util.unlinkpath(repo.sjoin(b'undo'), ignoremissing=True)
768 if self.skipped:
768 if self.skipped:
769 skippedlen = len(self.skipped)
769 skippedlen = len(self.skipped)
770 ui.note(_(b"%d revisions have been skipped\n") % skippedlen)
770 ui.note(_(b"%d revisions have been skipped\n") % skippedlen)
771 fm.end()
771 fm.end()
772
772
773 if (
773 if (
774 self.activebookmark
774 self.activebookmark
775 and self.activebookmark in repo._bookmarks
775 and self.activebookmark in repo._bookmarks
776 and repo[b'.'].node() == repo._bookmarks[self.activebookmark]
776 and repo[b'.'].node() == repo._bookmarks[self.activebookmark]
777 ):
777 ):
778 bookmarks.activate(repo, self.activebookmark)
778 bookmarks.activate(repo, self.activebookmark)
779
779
780 def _abort(self, backup=True, suppwarns=False, dryrun=False, confirm=False):
780 def _abort(self, backup=True, suppwarns=False, dryrun=False, confirm=False):
781 '''Restore the repository to its original state.'''
781 '''Restore the repository to its original state.'''
782
782
783 repo = self.repo
783 repo = self.repo
784 try:
784 try:
785 # If the first commits in the rebased set get skipped during the
785 # If the first commits in the rebased set get skipped during the
786 # rebase, their values within the state mapping will be the dest
786 # rebase, their values within the state mapping will be the dest
787 # rev id. The rebased list must must not contain the dest rev
787 # rev id. The rebased list must must not contain the dest rev
788 # (issue4896)
788 # (issue4896)
789 rebased = [
789 rebased = [
790 s
790 s
791 for r, s in self.state.items()
791 for r, s in self.state.items()
792 if s >= 0 and s != r and s != self.destmap[r]
792 if s >= 0 and s != r and s != self.destmap[r]
793 ]
793 ]
794 immutable = [d for d in rebased if not repo[d].mutable()]
794 immutable = [d for d in rebased if not repo[d].mutable()]
795 cleanup = True
795 cleanup = True
796 if immutable:
796 if immutable:
797 repo.ui.warn(
797 repo.ui.warn(
798 _(b"warning: can't clean up public changesets %s\n")
798 _(b"warning: can't clean up public changesets %s\n")
799 % b', '.join(bytes(repo[r]) for r in immutable),
799 % b', '.join(bytes(repo[r]) for r in immutable),
800 hint=_(b"see 'hg help phases' for details"),
800 hint=_(b"see 'hg help phases' for details"),
801 )
801 )
802 cleanup = False
802 cleanup = False
803
803
804 descendants = set()
804 descendants = set()
805 if rebased:
805 if rebased:
806 descendants = set(repo.changelog.descendants(rebased))
806 descendants = set(repo.changelog.descendants(rebased))
807 if descendants - set(rebased):
807 if descendants - set(rebased):
808 repo.ui.warn(
808 repo.ui.warn(
809 _(
809 _(
810 b"warning: new changesets detected on "
810 b"warning: new changesets detected on "
811 b"destination branch, can't strip\n"
811 b"destination branch, can't strip\n"
812 )
812 )
813 )
813 )
814 cleanup = False
814 cleanup = False
815
815
816 if cleanup:
816 if cleanup:
817 if rebased:
817 if rebased:
818 strippoints = [
818 strippoints = [
819 c.node() for c in repo.set(b'roots(%ld)', rebased)
819 c.node() for c in repo.set(b'roots(%ld)', rebased)
820 ]
820 ]
821
821
822 updateifonnodes = set(rebased)
822 updateifonnodes = set(rebased)
823 updateifonnodes.update(self.destmap.values())
823 updateifonnodes.update(self.destmap.values())
824
824
825 if not dryrun and not confirm:
825 if not dryrun and not confirm:
826 updateifonnodes.add(self.originalwd)
826 updateifonnodes.add(self.originalwd)
827
827
828 shouldupdate = repo[b'.'].rev() in updateifonnodes
828 shouldupdate = repo[b'.'].rev() in updateifonnodes
829
829
830 # Update away from the rebase if necessary
830 # Update away from the rebase if necessary
831 if shouldupdate:
831 if shouldupdate:
832 mergemod.clean_update(repo[self.originalwd])
832 mergemod.clean_update(repo[self.originalwd])
833
833
834 # Strip from the first rebased revision
834 # Strip from the first rebased revision
835 if rebased:
835 if rebased:
836 repair.strip(repo.ui, repo, strippoints, backup=backup)
836 repair.strip(repo.ui, repo, strippoints, backup=backup)
837
837
838 if self.activebookmark and self.activebookmark in repo._bookmarks:
838 if self.activebookmark and self.activebookmark in repo._bookmarks:
839 bookmarks.activate(repo, self.activebookmark)
839 bookmarks.activate(repo, self.activebookmark)
840
840
841 finally:
841 finally:
842 clearstatus(repo)
842 clearstatus(repo)
843 clearcollapsemsg(repo)
843 clearcollapsemsg(repo)
844 if not suppwarns:
844 if not suppwarns:
845 repo.ui.warn(_(b'rebase aborted\n'))
845 repo.ui.warn(_(b'rebase aborted\n'))
846 return 0
846 return 0
847
847
848
848
849 @command(
849 @command(
850 b'rebase',
850 b'rebase',
851 [
851 [
852 (
852 (
853 b's',
853 b's',
854 b'source',
854 b'source',
855 [],
855 [],
856 _(b'rebase the specified changesets and their descendants'),
856 _(b'rebase the specified changesets and their descendants'),
857 _(b'REV'),
857 _(b'REV'),
858 ),
858 ),
859 (
859 (
860 b'b',
860 b'b',
861 b'base',
861 b'base',
862 [],
862 [],
863 _(b'rebase everything from branching point of specified changeset'),
863 _(b'rebase everything from branching point of specified changeset'),
864 _(b'REV'),
864 _(b'REV'),
865 ),
865 ),
866 (b'r', b'rev', [], _(b'rebase these revisions'), _(b'REV')),
866 (b'r', b'rev', [], _(b'rebase these revisions'), _(b'REV')),
867 (
867 (
868 b'd',
868 b'd',
869 b'dest',
869 b'dest',
870 b'',
870 b'',
871 _(b'rebase onto the specified changeset'),
871 _(b'rebase onto the specified changeset'),
872 _(b'REV'),
872 _(b'REV'),
873 ),
873 ),
874 (b'', b'collapse', False, _(b'collapse the rebased changesets')),
874 (b'', b'collapse', False, _(b'collapse the rebased changesets')),
875 (
875 (
876 b'm',
876 b'm',
877 b'message',
877 b'message',
878 b'',
878 b'',
879 _(b'use text as collapse commit message'),
879 _(b'use text as collapse commit message'),
880 _(b'TEXT'),
880 _(b'TEXT'),
881 ),
881 ),
882 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
882 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
883 (
883 (
884 b'l',
884 b'l',
885 b'logfile',
885 b'logfile',
886 b'',
886 b'',
887 _(b'read collapse commit message from file'),
887 _(b'read collapse commit message from file'),
888 _(b'FILE'),
888 _(b'FILE'),
889 ),
889 ),
890 (b'k', b'keep', False, _(b'keep original changesets')),
890 (b'k', b'keep', False, _(b'keep original changesets')),
891 (b'', b'keepbranches', False, _(b'keep original branch names')),
891 (b'', b'keepbranches', False, _(b'keep original branch names')),
892 (b'D', b'detach', False, _(b'(DEPRECATED)')),
892 (b'D', b'detach', False, _(b'(DEPRECATED)')),
893 (b'i', b'interactive', False, _(b'(DEPRECATED)')),
893 (b'i', b'interactive', False, _(b'(DEPRECATED)')),
894 (b't', b'tool', b'', _(b'specify merge tool')),
894 (b't', b'tool', b'', _(b'specify merge tool')),
895 (b'', b'stop', False, _(b'stop interrupted rebase')),
895 (b'', b'stop', False, _(b'stop interrupted rebase')),
896 (b'c', b'continue', False, _(b'continue an interrupted rebase')),
896 (b'c', b'continue', False, _(b'continue an interrupted rebase')),
897 (b'a', b'abort', False, _(b'abort an interrupted rebase')),
897 (b'a', b'abort', False, _(b'abort an interrupted rebase')),
898 (
898 (
899 b'',
899 b'',
900 b'auto-orphans',
900 b'auto-orphans',
901 b'',
901 b'',
902 _(
902 _(
903 b'automatically rebase orphan revisions '
903 b'automatically rebase orphan revisions '
904 b'in the specified revset (EXPERIMENTAL)'
904 b'in the specified revset (EXPERIMENTAL)'
905 ),
905 ),
906 ),
906 ),
907 ]
907 ]
908 + cmdutil.dryrunopts
908 + cmdutil.dryrunopts
909 + cmdutil.formatteropts
909 + cmdutil.formatteropts
910 + cmdutil.confirmopts,
910 + cmdutil.confirmopts,
911 _(b'[[-s REV]... | [-b REV]... | [-r REV]...] [-d REV] [OPTION]...'),
911 _(b'[[-s REV]... | [-b REV]... | [-r REV]...] [-d REV] [OPTION]...'),
912 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
912 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
913 )
913 )
914 def rebase(ui, repo, **opts):
914 def rebase(ui, repo, **opts):
915 """move changeset (and descendants) to a different branch
915 """move changeset (and descendants) to a different branch
916
916
917 Rebase uses repeated merging to graft changesets from one part of
917 Rebase uses repeated merging to graft changesets from one part of
918 history (the source) onto another (the destination). This can be
918 history (the source) onto another (the destination). This can be
919 useful for linearizing *local* changes relative to a master
919 useful for linearizing *local* changes relative to a master
920 development tree.
920 development tree.
921
921
922 Published commits cannot be rebased (see :hg:`help phases`).
922 Published commits cannot be rebased (see :hg:`help phases`).
923 To copy commits, see :hg:`help graft`.
923 To copy commits, see :hg:`help graft`.
924
924
925 If you don't specify a destination changeset (``-d/--dest``), rebase
925 If you don't specify a destination changeset (``-d/--dest``), rebase
926 will use the same logic as :hg:`merge` to pick a destination. if
926 will use the same logic as :hg:`merge` to pick a destination. if
927 the current branch contains exactly one other head, the other head
927 the current branch contains exactly one other head, the other head
928 is merged with by default. Otherwise, an explicit revision with
928 is merged with by default. Otherwise, an explicit revision with
929 which to merge with must be provided. (destination changeset is not
929 which to merge with must be provided. (destination changeset is not
930 modified by rebasing, but new changesets are added as its
930 modified by rebasing, but new changesets are added as its
931 descendants.)
931 descendants.)
932
932
933 Here are the ways to select changesets:
933 Here are the ways to select changesets:
934
934
935 1. Explicitly select them using ``--rev``.
935 1. Explicitly select them using ``--rev``.
936
936
937 2. Use ``--source`` to select a root changeset and include all of its
937 2. Use ``--source`` to select a root changeset and include all of its
938 descendants.
938 descendants.
939
939
940 3. Use ``--base`` to select a changeset; rebase will find ancestors
940 3. Use ``--base`` to select a changeset; rebase will find ancestors
941 and their descendants which are not also ancestors of the destination.
941 and their descendants which are not also ancestors of the destination.
942
942
943 4. If you do not specify any of ``--rev``, ``--source``, or ``--base``,
943 4. If you do not specify any of ``--rev``, ``--source``, or ``--base``,
944 rebase will use ``--base .`` as above.
944 rebase will use ``--base .`` as above.
945
945
946 If ``--source`` or ``--rev`` is used, special names ``SRC`` and ``ALLSRC``
946 If ``--source`` or ``--rev`` is used, special names ``SRC`` and ``ALLSRC``
947 can be used in ``--dest``. Destination would be calculated per source
947 can be used in ``--dest``. Destination would be calculated per source
948 revision with ``SRC`` substituted by that single source revision and
948 revision with ``SRC`` substituted by that single source revision and
949 ``ALLSRC`` substituted by all source revisions.
949 ``ALLSRC`` substituted by all source revisions.
950
950
951 Rebase will destroy original changesets unless you use ``--keep``.
951 Rebase will destroy original changesets unless you use ``--keep``.
952 It will also move your bookmarks (even if you do).
952 It will also move your bookmarks (even if you do).
953
953
954 Some changesets may be dropped if they do not contribute changes
954 Some changesets may be dropped if they do not contribute changes
955 (e.g. merges from the destination branch).
955 (e.g. merges from the destination branch).
956
956
957 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
957 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
958 a named branch with two heads. You will need to explicitly specify source
958 a named branch with two heads. You will need to explicitly specify source
959 and/or destination.
959 and/or destination.
960
960
961 If you need to use a tool to automate merge/conflict decisions, you
961 If you need to use a tool to automate merge/conflict decisions, you
962 can specify one with ``--tool``, see :hg:`help merge-tools`.
962 can specify one with ``--tool``, see :hg:`help merge-tools`.
963 As a caveat: the tool will not be used to mediate when a file was
963 As a caveat: the tool will not be used to mediate when a file was
964 deleted, there is no hook presently available for this.
964 deleted, there is no hook presently available for this.
965
965
966 If a rebase is interrupted to manually resolve a conflict, it can be
966 If a rebase is interrupted to manually resolve a conflict, it can be
967 continued with --continue/-c, aborted with --abort/-a, or stopped with
967 continued with --continue/-c, aborted with --abort/-a, or stopped with
968 --stop.
968 --stop.
969
969
970 .. container:: verbose
970 .. container:: verbose
971
971
972 Examples:
972 Examples:
973
973
974 - move "local changes" (current commit back to branching point)
974 - move "local changes" (current commit back to branching point)
975 to the current branch tip after a pull::
975 to the current branch tip after a pull::
976
976
977 hg rebase
977 hg rebase
978
978
979 - move a single changeset to the stable branch::
979 - move a single changeset to the stable branch::
980
980
981 hg rebase -r 5f493448 -d stable
981 hg rebase -r 5f493448 -d stable
982
982
983 - splice a commit and all its descendants onto another part of history::
983 - splice a commit and all its descendants onto another part of history::
984
984
985 hg rebase --source c0c3 --dest 4cf9
985 hg rebase --source c0c3 --dest 4cf9
986
986
987 - rebase everything on a branch marked by a bookmark onto the
987 - rebase everything on a branch marked by a bookmark onto the
988 default branch::
988 default branch::
989
989
990 hg rebase --base myfeature --dest default
990 hg rebase --base myfeature --dest default
991
991
992 - collapse a sequence of changes into a single commit::
992 - collapse a sequence of changes into a single commit::
993
993
994 hg rebase --collapse -r 1520:1525 -d .
994 hg rebase --collapse -r 1520:1525 -d .
995
995
996 - move a named branch while preserving its name::
996 - move a named branch while preserving its name::
997
997
998 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
998 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
999
999
1000 - stabilize orphaned changesets so history looks linear::
1000 - stabilize orphaned changesets so history looks linear::
1001
1001
1002 hg rebase -r 'orphan()-obsolete()'\
1002 hg rebase -r 'orphan()-obsolete()'\
1003 -d 'first(max((successors(max(roots(ALLSRC) & ::SRC)^)-obsolete())::) +\
1003 -d 'first(max((successors(max(roots(ALLSRC) & ::SRC)^)-obsolete())::) +\
1004 max(::((roots(ALLSRC) & ::SRC)^)-obsolete()))'
1004 max(::((roots(ALLSRC) & ::SRC)^)-obsolete()))'
1005
1005
1006 Configuration Options:
1006 Configuration Options:
1007
1007
1008 You can make rebase require a destination if you set the following config
1008 You can make rebase require a destination if you set the following config
1009 option::
1009 option::
1010
1010
1011 [commands]
1011 [commands]
1012 rebase.requiredest = True
1012 rebase.requiredest = True
1013
1013
1014 By default, rebase will close the transaction after each commit. For
1014 By default, rebase will close the transaction after each commit. For
1015 performance purposes, you can configure rebase to use a single transaction
1015 performance purposes, you can configure rebase to use a single transaction
1016 across the entire rebase. WARNING: This setting introduces a significant
1016 across the entire rebase. WARNING: This setting introduces a significant
1017 risk of losing the work you've done in a rebase if the rebase aborts
1017 risk of losing the work you've done in a rebase if the rebase aborts
1018 unexpectedly::
1018 unexpectedly::
1019
1019
1020 [rebase]
1020 [rebase]
1021 singletransaction = True
1021 singletransaction = True
1022
1022
1023 By default, rebase writes to the working copy, but you can configure it to
1023 By default, rebase writes to the working copy, but you can configure it to
1024 run in-memory for better performance. When the rebase is not moving the
1024 run in-memory for better performance. When the rebase is not moving the
1025 parent(s) of the working copy (AKA the "currently checked out changesets"),
1025 parent(s) of the working copy (AKA the "currently checked out changesets"),
1026 this may also allow it to run even if the working copy is dirty::
1026 this may also allow it to run even if the working copy is dirty::
1027
1027
1028 [rebase]
1028 [rebase]
1029 experimental.inmemory = True
1029 experimental.inmemory = True
1030
1030
1031 Return Values:
1031 Return Values:
1032
1032
1033 Returns 0 on success, 1 if nothing to rebase or there are
1033 Returns 0 on success, 1 if nothing to rebase or there are
1034 unresolved conflicts.
1034 unresolved conflicts.
1035
1035
1036 """
1036 """
1037 opts = pycompat.byteskwargs(opts)
1037 opts = pycompat.byteskwargs(opts)
1038 inmemory = ui.configbool(b'rebase', b'experimental.inmemory')
1038 inmemory = ui.configbool(b'rebase', b'experimental.inmemory')
1039 action = cmdutil.check_at_most_one_arg(opts, b'abort', b'stop', b'continue')
1039 action = cmdutil.check_at_most_one_arg(opts, b'abort', b'stop', b'continue')
1040 if action:
1040 if action:
1041 cmdutil.check_incompatible_arguments(
1041 cmdutil.check_incompatible_arguments(
1042 opts, action, [b'confirm', b'dry_run']
1042 opts, action, [b'confirm', b'dry_run']
1043 )
1043 )
1044 cmdutil.check_incompatible_arguments(
1044 cmdutil.check_incompatible_arguments(
1045 opts, action, [b'rev', b'source', b'base', b'dest']
1045 opts, action, [b'rev', b'source', b'base', b'dest']
1046 )
1046 )
1047 cmdutil.check_at_most_one_arg(opts, b'confirm', b'dry_run')
1047 cmdutil.check_at_most_one_arg(opts, b'confirm', b'dry_run')
1048 cmdutil.check_at_most_one_arg(opts, b'rev', b'source', b'base')
1048 cmdutil.check_at_most_one_arg(opts, b'rev', b'source', b'base')
1049
1049
1050 if action or repo.currenttransaction() is not None:
1050 if action or repo.currenttransaction() is not None:
1051 # in-memory rebase is not compatible with resuming rebases.
1051 # in-memory rebase is not compatible with resuming rebases.
1052 # (Or if it is run within a transaction, since the restart logic can
1052 # (Or if it is run within a transaction, since the restart logic can
1053 # fail the entire transaction.)
1053 # fail the entire transaction.)
1054 inmemory = False
1054 inmemory = False
1055
1055
1056 if opts.get(b'auto_orphans'):
1056 if opts.get(b'auto_orphans'):
1057 disallowed_opts = set(opts) - {b'auto_orphans'}
1057 disallowed_opts = set(opts) - {b'auto_orphans'}
1058 cmdutil.check_incompatible_arguments(
1058 cmdutil.check_incompatible_arguments(
1059 opts, b'auto_orphans', disallowed_opts
1059 opts, b'auto_orphans', disallowed_opts
1060 )
1060 )
1061
1061
1062 userrevs = list(repo.revs(opts.get(b'auto_orphans')))
1062 userrevs = list(repo.revs(opts.get(b'auto_orphans')))
1063 opts[b'rev'] = [revsetlang.formatspec(b'%ld and orphan()', userrevs)]
1063 opts[b'rev'] = [revsetlang.formatspec(b'%ld and orphan()', userrevs)]
1064 opts[b'dest'] = b'_destautoorphanrebase(SRC)'
1064 opts[b'dest'] = b'_destautoorphanrebase(SRC)'
1065
1065
1066 if opts.get(b'dry_run') or opts.get(b'confirm'):
1066 if opts.get(b'dry_run') or opts.get(b'confirm'):
1067 return _dryrunrebase(ui, repo, action, opts)
1067 return _dryrunrebase(ui, repo, action, opts)
1068 elif action == b'stop':
1068 elif action == b'stop':
1069 rbsrt = rebaseruntime(repo, ui)
1069 rbsrt = rebaseruntime(repo, ui)
1070 with repo.wlock(), repo.lock():
1070 with repo.wlock(), repo.lock():
1071 rbsrt.restorestatus()
1071 rbsrt.restorestatus()
1072 if rbsrt.collapsef:
1072 if rbsrt.collapsef:
1073 raise error.Abort(_(b"cannot stop in --collapse session"))
1073 raise error.Abort(_(b"cannot stop in --collapse session"))
1074 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1074 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1075 if not (rbsrt.keepf or allowunstable):
1075 if not (rbsrt.keepf or allowunstable):
1076 raise error.Abort(
1076 raise error.Abort(
1077 _(
1077 _(
1078 b"cannot remove original changesets with"
1078 b"cannot remove original changesets with"
1079 b" unrebased descendants"
1079 b" unrebased descendants"
1080 ),
1080 ),
1081 hint=_(
1081 hint=_(
1082 b'either enable obsmarkers to allow unstable '
1082 b'either enable obsmarkers to allow unstable '
1083 b'revisions or use --keep to keep original '
1083 b'revisions or use --keep to keep original '
1084 b'changesets'
1084 b'changesets'
1085 ),
1085 ),
1086 )
1086 )
1087 # update to the current working revision
1087 # update to the current working revision
1088 # to clear interrupted merge
1088 # to clear interrupted merge
1089 mergemod.clean_update(repo[rbsrt.originalwd])
1089 mergemod.clean_update(repo[rbsrt.originalwd])
1090 rbsrt._finishrebase()
1090 rbsrt._finishrebase()
1091 return 0
1091 return 0
1092 elif inmemory:
1092 elif inmemory:
1093 try:
1093 try:
1094 # in-memory merge doesn't support conflicts, so if we hit any, abort
1094 # in-memory merge doesn't support conflicts, so if we hit any, abort
1095 # and re-run as an on-disk merge.
1095 # and re-run as an on-disk merge.
1096 overrides = {(b'rebase', b'singletransaction'): True}
1096 overrides = {(b'rebase', b'singletransaction'): True}
1097 with ui.configoverride(overrides, b'rebase'):
1097 with ui.configoverride(overrides, b'rebase'):
1098 return _dorebase(ui, repo, action, opts, inmemory=inmemory)
1098 return _dorebase(ui, repo, action, opts, inmemory=inmemory)
1099 except error.InMemoryMergeConflictsError:
1099 except error.InMemoryMergeConflictsError:
1100 ui.warn(
1100 ui.warn(
1101 _(
1101 _(
1102 b'hit merge conflicts; re-running rebase without in-memory'
1102 b'hit merge conflicts; re-running rebase without in-memory'
1103 b' merge\n'
1103 b' merge\n'
1104 )
1104 )
1105 )
1105 )
1106 clearstatus(repo)
1106 clearstatus(repo)
1107 clearcollapsemsg(repo)
1107 clearcollapsemsg(repo)
1108 return _dorebase(ui, repo, action, opts, inmemory=False)
1108 return _dorebase(ui, repo, action, opts, inmemory=False)
1109 else:
1109 else:
1110 return _dorebase(ui, repo, action, opts)
1110 return _dorebase(ui, repo, action, opts)
1111
1111
1112
1112
1113 def _dryrunrebase(ui, repo, action, opts):
1113 def _dryrunrebase(ui, repo, action, opts):
1114 rbsrt = rebaseruntime(repo, ui, inmemory=True, dryrun=True, opts=opts)
1114 rbsrt = rebaseruntime(repo, ui, inmemory=True, dryrun=True, opts=opts)
1115 confirm = opts.get(b'confirm')
1115 confirm = opts.get(b'confirm')
1116 if confirm:
1116 if confirm:
1117 ui.status(_(b'starting in-memory rebase\n'))
1117 ui.status(_(b'starting in-memory rebase\n'))
1118 else:
1118 else:
1119 ui.status(
1119 ui.status(
1120 _(b'starting dry-run rebase; repository will not be changed\n')
1120 _(b'starting dry-run rebase; repository will not be changed\n')
1121 )
1121 )
1122 with repo.wlock(), repo.lock():
1122 with repo.wlock(), repo.lock():
1123 needsabort = True
1123 needsabort = True
1124 try:
1124 try:
1125 overrides = {(b'rebase', b'singletransaction'): True}
1125 overrides = {(b'rebase', b'singletransaction'): True}
1126 with ui.configoverride(overrides, b'rebase'):
1126 with ui.configoverride(overrides, b'rebase'):
1127 _origrebase(
1127 _origrebase(
1128 ui, repo, action, opts, rbsrt,
1128 ui, repo, action, opts, rbsrt,
1129 )
1129 )
1130 except error.ConflictResolutionRequired:
1130 except error.ConflictResolutionRequired:
1131 ui.status(_(b'hit a merge conflict\n'))
1131 ui.status(_(b'hit a merge conflict\n'))
1132 return 1
1132 return 1
1133 except error.Abort:
1133 except error.Abort:
1134 needsabort = False
1134 needsabort = False
1135 raise
1135 raise
1136 else:
1136 else:
1137 if confirm:
1137 if confirm:
1138 ui.status(_(b'rebase completed successfully\n'))
1138 ui.status(_(b'rebase completed successfully\n'))
1139 if not ui.promptchoice(_(b'apply changes (yn)?$$ &Yes $$ &No')):
1139 if not ui.promptchoice(_(b'apply changes (yn)?$$ &Yes $$ &No')):
1140 # finish unfinished rebase
1140 # finish unfinished rebase
1141 rbsrt._finishrebase()
1141 rbsrt._finishrebase()
1142 else:
1142 else:
1143 rbsrt._prepareabortorcontinue(
1143 rbsrt._prepareabortorcontinue(
1144 isabort=True,
1144 isabort=True,
1145 backup=False,
1145 backup=False,
1146 suppwarns=True,
1146 suppwarns=True,
1147 confirm=confirm,
1147 confirm=confirm,
1148 )
1148 )
1149 needsabort = False
1149 needsabort = False
1150 else:
1150 else:
1151 ui.status(
1151 ui.status(
1152 _(
1152 _(
1153 b'dry-run rebase completed successfully; run without'
1153 b'dry-run rebase completed successfully; run without'
1154 b' -n/--dry-run to perform this rebase\n'
1154 b' -n/--dry-run to perform this rebase\n'
1155 )
1155 )
1156 )
1156 )
1157 return 0
1157 return 0
1158 finally:
1158 finally:
1159 if needsabort:
1159 if needsabort:
1160 # no need to store backup in case of dryrun
1160 # no need to store backup in case of dryrun
1161 rbsrt._prepareabortorcontinue(
1161 rbsrt._prepareabortorcontinue(
1162 isabort=True,
1162 isabort=True,
1163 backup=False,
1163 backup=False,
1164 suppwarns=True,
1164 suppwarns=True,
1165 dryrun=opts.get(b'dry_run'),
1165 dryrun=opts.get(b'dry_run'),
1166 )
1166 )
1167
1167
1168
1168
1169 def _dorebase(ui, repo, action, opts, inmemory=False):
1169 def _dorebase(ui, repo, action, opts, inmemory=False):
1170 rbsrt = rebaseruntime(repo, ui, inmemory, opts=opts)
1170 rbsrt = rebaseruntime(repo, ui, inmemory, opts=opts)
1171 return _origrebase(ui, repo, action, opts, rbsrt)
1171 return _origrebase(ui, repo, action, opts, rbsrt)
1172
1172
1173
1173
1174 def _origrebase(ui, repo, action, opts, rbsrt):
1174 def _origrebase(ui, repo, action, opts, rbsrt):
1175 assert action != b'stop'
1175 assert action != b'stop'
1176 with repo.wlock(), repo.lock():
1176 with repo.wlock(), repo.lock():
1177 if opts.get(b'interactive'):
1177 if opts.get(b'interactive'):
1178 try:
1178 try:
1179 if extensions.find(b'histedit'):
1179 if extensions.find(b'histedit'):
1180 enablehistedit = b''
1180 enablehistedit = b''
1181 except KeyError:
1181 except KeyError:
1182 enablehistedit = b" --config extensions.histedit="
1182 enablehistedit = b" --config extensions.histedit="
1183 help = b"hg%s help -e histedit" % enablehistedit
1183 help = b"hg%s help -e histedit" % enablehistedit
1184 msg = (
1184 msg = (
1185 _(
1185 _(
1186 b"interactive history editing is supported by the "
1186 b"interactive history editing is supported by the "
1187 b"'histedit' extension (see \"%s\")"
1187 b"'histedit' extension (see \"%s\")"
1188 )
1188 )
1189 % help
1189 % help
1190 )
1190 )
1191 raise error.Abort(msg)
1191 raise error.Abort(msg)
1192
1192
1193 if rbsrt.collapsemsg and not rbsrt.collapsef:
1193 if rbsrt.collapsemsg and not rbsrt.collapsef:
1194 raise error.Abort(_(b'message can only be specified with collapse'))
1194 raise error.Abort(_(b'message can only be specified with collapse'))
1195
1195
1196 if action:
1196 if action:
1197 if rbsrt.collapsef:
1197 if rbsrt.collapsef:
1198 raise error.Abort(
1198 raise error.Abort(
1199 _(b'cannot use collapse with continue or abort')
1199 _(b'cannot use collapse with continue or abort')
1200 )
1200 )
1201 if action == b'abort' and opts.get(b'tool', False):
1201 if action == b'abort' and opts.get(b'tool', False):
1202 ui.warn(_(b'tool option will be ignored\n'))
1202 ui.warn(_(b'tool option will be ignored\n'))
1203 if action == b'continue':
1203 if action == b'continue':
1204 ms = mergestatemod.mergestate.read(repo)
1204 ms = mergestatemod.mergestate.read(repo)
1205 mergeutil.checkunresolved(ms)
1205 mergeutil.checkunresolved(ms)
1206
1206
1207 retcode = rbsrt._prepareabortorcontinue(
1207 retcode = rbsrt._prepareabortorcontinue(
1208 isabort=(action == b'abort')
1208 isabort=(action == b'abort')
1209 )
1209 )
1210 if retcode is not None:
1210 if retcode is not None:
1211 return retcode
1211 return retcode
1212 else:
1212 else:
1213 # search default destination in this space
1213 # search default destination in this space
1214 # used in the 'hg pull --rebase' case, see issue 5214.
1214 # used in the 'hg pull --rebase' case, see issue 5214.
1215 destspace = opts.get(b'_destspace')
1215 destspace = opts.get(b'_destspace')
1216 destmap = _definedestmap(
1216 destmap = _definedestmap(
1217 ui,
1217 ui,
1218 repo,
1218 repo,
1219 rbsrt.inmemory,
1219 rbsrt.inmemory,
1220 opts.get(b'dest', None),
1220 opts.get(b'dest', None),
1221 opts.get(b'source', []),
1221 opts.get(b'source', []),
1222 opts.get(b'base', []),
1222 opts.get(b'base', []),
1223 opts.get(b'rev', []),
1223 opts.get(b'rev', []),
1224 destspace=destspace,
1224 destspace=destspace,
1225 )
1225 )
1226 retcode = rbsrt._preparenewrebase(destmap)
1226 retcode = rbsrt._preparenewrebase(destmap)
1227 if retcode is not None:
1227 if retcode is not None:
1228 return retcode
1228 return retcode
1229 storecollapsemsg(repo, rbsrt.collapsemsg)
1229 storecollapsemsg(repo, rbsrt.collapsemsg)
1230
1230
1231 tr = None
1231 tr = None
1232
1232
1233 singletr = ui.configbool(b'rebase', b'singletransaction')
1233 singletr = ui.configbool(b'rebase', b'singletransaction')
1234 if singletr:
1234 if singletr:
1235 tr = repo.transaction(b'rebase')
1235 tr = repo.transaction(b'rebase')
1236
1236
1237 # If `rebase.singletransaction` is enabled, wrap the entire operation in
1237 # If `rebase.singletransaction` is enabled, wrap the entire operation in
1238 # one transaction here. Otherwise, transactions are obtained when
1238 # one transaction here. Otherwise, transactions are obtained when
1239 # committing each node, which is slower but allows partial success.
1239 # committing each node, which is slower but allows partial success.
1240 with util.acceptintervention(tr):
1240 with util.acceptintervention(tr):
1241 # Same logic for the dirstate guard, except we don't create one when
1241 # Same logic for the dirstate guard, except we don't create one when
1242 # rebasing in-memory (it's not needed).
1242 # rebasing in-memory (it's not needed).
1243 dsguard = None
1243 dsguard = None
1244 if singletr and not rbsrt.inmemory:
1244 if singletr and not rbsrt.inmemory:
1245 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1245 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1246 with util.acceptintervention(dsguard):
1246 with util.acceptintervention(dsguard):
1247 rbsrt._performrebase(tr)
1247 rbsrt._performrebase(tr)
1248 if not rbsrt.dryrun:
1248 if not rbsrt.dryrun:
1249 rbsrt._finishrebase()
1249 rbsrt._finishrebase()
1250
1250
1251
1251
1252 def _definedestmap(ui, repo, inmemory, destf, srcf, basef, revf, destspace):
1252 def _definedestmap(ui, repo, inmemory, destf, srcf, basef, revf, destspace):
1253 """use revisions argument to define destmap {srcrev: destrev}"""
1253 """use revisions argument to define destmap {srcrev: destrev}"""
1254 if revf is None:
1254 if revf is None:
1255 revf = []
1255 revf = []
1256
1256
1257 # destspace is here to work around issues with `hg pull --rebase` see
1257 # destspace is here to work around issues with `hg pull --rebase` see
1258 # issue5214 for details
1258 # issue5214 for details
1259
1259
1260 cmdutil.checkunfinished(repo)
1260 cmdutil.checkunfinished(repo)
1261 if not inmemory:
1261 if not inmemory:
1262 cmdutil.bailifchanged(repo)
1262 cmdutil.bailifchanged(repo)
1263
1263
1264 if ui.configbool(b'commands', b'rebase.requiredest') and not destf:
1264 if ui.configbool(b'commands', b'rebase.requiredest') and not destf:
1265 raise error.Abort(
1265 raise error.Abort(
1266 _(b'you must specify a destination'),
1266 _(b'you must specify a destination'),
1267 hint=_(b'use: hg rebase -d REV'),
1267 hint=_(b'use: hg rebase -d REV'),
1268 )
1268 )
1269
1269
1270 dest = None
1270 dest = None
1271
1271
1272 if revf:
1272 if revf:
1273 rebaseset = scmutil.revrange(repo, revf)
1273 rebaseset = scmutil.revrange(repo, revf)
1274 if not rebaseset:
1274 if not rebaseset:
1275 ui.status(_(b'empty "rev" revision set - nothing to rebase\n'))
1275 ui.status(_(b'empty "rev" revision set - nothing to rebase\n'))
1276 return None
1276 return None
1277 elif srcf:
1277 elif srcf:
1278 src = scmutil.revrange(repo, srcf)
1278 src = scmutil.revrange(repo, srcf)
1279 if not src:
1279 if not src:
1280 ui.status(_(b'empty "source" revision set - nothing to rebase\n'))
1280 ui.status(_(b'empty "source" revision set - nothing to rebase\n'))
1281 return None
1281 return None
1282 # `+ (%ld)` to work around `wdir()::` being empty
1282 # `+ (%ld)` to work around `wdir()::` being empty
1283 rebaseset = repo.revs(b'(%ld):: + (%ld)', src, src)
1283 rebaseset = repo.revs(b'(%ld):: + (%ld)', src, src)
1284 else:
1284 else:
1285 base = scmutil.revrange(repo, basef or [b'.'])
1285 base = scmutil.revrange(repo, basef or [b'.'])
1286 if not base:
1286 if not base:
1287 ui.status(
1287 ui.status(
1288 _(b'empty "base" revision set - ' b"can't compute rebase set\n")
1288 _(b'empty "base" revision set - ' b"can't compute rebase set\n")
1289 )
1289 )
1290 return None
1290 return None
1291 if destf:
1291 if destf:
1292 # --base does not support multiple destinations
1292 # --base does not support multiple destinations
1293 dest = scmutil.revsingle(repo, destf)
1293 dest = scmutil.revsingle(repo, destf)
1294 else:
1294 else:
1295 dest = repo[_destrebase(repo, base, destspace=destspace)]
1295 dest = repo[_destrebase(repo, base, destspace=destspace)]
1296 destf = bytes(dest)
1296 destf = bytes(dest)
1297
1297
1298 roots = [] # selected children of branching points
1298 roots = [] # selected children of branching points
1299 bpbase = {} # {branchingpoint: [origbase]}
1299 bpbase = {} # {branchingpoint: [origbase]}
1300 for b in base: # group bases by branching points
1300 for b in base: # group bases by branching points
1301 bp = repo.revs(b'ancestor(%d, %d)', b, dest.rev()).first()
1301 bp = repo.revs(b'ancestor(%d, %d)', b, dest.rev()).first()
1302 bpbase[bp] = bpbase.get(bp, []) + [b]
1302 bpbase[bp] = bpbase.get(bp, []) + [b]
1303 if None in bpbase:
1303 if None in bpbase:
1304 # emulate the old behavior, showing "nothing to rebase" (a better
1304 # emulate the old behavior, showing "nothing to rebase" (a better
1305 # behavior may be abort with "cannot find branching point" error)
1305 # behavior may be abort with "cannot find branching point" error)
1306 bpbase.clear()
1306 bpbase.clear()
1307 for bp, bs in pycompat.iteritems(bpbase): # calculate roots
1307 for bp, bs in pycompat.iteritems(bpbase): # calculate roots
1308 roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs))
1308 roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs))
1309
1309
1310 rebaseset = repo.revs(b'%ld::', roots)
1310 rebaseset = repo.revs(b'%ld::', roots)
1311
1311
1312 if not rebaseset:
1312 if not rebaseset:
1313 # transform to list because smartsets are not comparable to
1313 # transform to list because smartsets are not comparable to
1314 # lists. This should be improved to honor laziness of
1314 # lists. This should be improved to honor laziness of
1315 # smartset.
1315 # smartset.
1316 if list(base) == [dest.rev()]:
1316 if list(base) == [dest.rev()]:
1317 if basef:
1317 if basef:
1318 ui.status(
1318 ui.status(
1319 _(
1319 _(
1320 b'nothing to rebase - %s is both "base"'
1320 b'nothing to rebase - %s is both "base"'
1321 b' and destination\n'
1321 b' and destination\n'
1322 )
1322 )
1323 % dest
1323 % dest
1324 )
1324 )
1325 else:
1325 else:
1326 ui.status(
1326 ui.status(
1327 _(
1327 _(
1328 b'nothing to rebase - working directory '
1328 b'nothing to rebase - working directory '
1329 b'parent is also destination\n'
1329 b'parent is also destination\n'
1330 )
1330 )
1331 )
1331 )
1332 elif not repo.revs(b'%ld - ::%d', base, dest.rev()):
1332 elif not repo.revs(b'%ld - ::%d', base, dest.rev()):
1333 if basef:
1333 if basef:
1334 ui.status(
1334 ui.status(
1335 _(
1335 _(
1336 b'nothing to rebase - "base" %s is '
1336 b'nothing to rebase - "base" %s is '
1337 b'already an ancestor of destination '
1337 b'already an ancestor of destination '
1338 b'%s\n'
1338 b'%s\n'
1339 )
1339 )
1340 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1340 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1341 )
1341 )
1342 else:
1342 else:
1343 ui.status(
1343 ui.status(
1344 _(
1344 _(
1345 b'nothing to rebase - working '
1345 b'nothing to rebase - working '
1346 b'directory parent is already an '
1346 b'directory parent is already an '
1347 b'ancestor of destination %s\n'
1347 b'ancestor of destination %s\n'
1348 )
1348 )
1349 % dest
1349 % dest
1350 )
1350 )
1351 else: # can it happen?
1351 else: # can it happen?
1352 ui.status(
1352 ui.status(
1353 _(b'nothing to rebase from %s to %s\n')
1353 _(b'nothing to rebase from %s to %s\n')
1354 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1354 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1355 )
1355 )
1356 return None
1356 return None
1357
1357
1358 if nodemod.wdirrev in rebaseset:
1358 if nodemod.wdirrev in rebaseset:
1359 raise error.Abort(_(b'cannot rebase the working copy'))
1359 raise error.Abort(_(b'cannot rebase the working copy'))
1360 rebasingwcp = repo[b'.'].rev() in rebaseset
1360 rebasingwcp = repo[b'.'].rev() in rebaseset
1361 ui.log(
1361 ui.log(
1362 b"rebase",
1362 b"rebase",
1363 b"rebasing working copy parent: %r\n",
1363 b"rebasing working copy parent: %r\n",
1364 rebasingwcp,
1364 rebasingwcp,
1365 rebase_rebasing_wcp=rebasingwcp,
1365 rebase_rebasing_wcp=rebasingwcp,
1366 )
1366 )
1367 if inmemory and rebasingwcp:
1367 if inmemory and rebasingwcp:
1368 # Check these since we did not before.
1368 # Check these since we did not before.
1369 cmdutil.checkunfinished(repo)
1369 cmdutil.checkunfinished(repo)
1370 cmdutil.bailifchanged(repo)
1370 cmdutil.bailifchanged(repo)
1371
1371
1372 if not destf:
1372 if not destf:
1373 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
1373 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
1374 destf = bytes(dest)
1374 destf = bytes(dest)
1375
1375
1376 allsrc = revsetlang.formatspec(b'%ld', rebaseset)
1376 allsrc = revsetlang.formatspec(b'%ld', rebaseset)
1377 alias = {b'ALLSRC': allsrc}
1377 alias = {b'ALLSRC': allsrc}
1378
1378
1379 if dest is None:
1379 if dest is None:
1380 try:
1380 try:
1381 # fast path: try to resolve dest without SRC alias
1381 # fast path: try to resolve dest without SRC alias
1382 dest = scmutil.revsingle(repo, destf, localalias=alias)
1382 dest = scmutil.revsingle(repo, destf, localalias=alias)
1383 except error.RepoLookupError:
1383 except error.RepoLookupError:
1384 # multi-dest path: resolve dest for each SRC separately
1384 # multi-dest path: resolve dest for each SRC separately
1385 destmap = {}
1385 destmap = {}
1386 for r in rebaseset:
1386 for r in rebaseset:
1387 alias[b'SRC'] = revsetlang.formatspec(b'%d', r)
1387 alias[b'SRC'] = revsetlang.formatspec(b'%d', r)
1388 # use repo.anyrevs instead of scmutil.revsingle because we
1388 # use repo.anyrevs instead of scmutil.revsingle because we
1389 # don't want to abort if destset is empty.
1389 # don't want to abort if destset is empty.
1390 destset = repo.anyrevs([destf], user=True, localalias=alias)
1390 destset = repo.anyrevs([destf], user=True, localalias=alias)
1391 size = len(destset)
1391 size = len(destset)
1392 if size == 1:
1392 if size == 1:
1393 destmap[r] = destset.first()
1393 destmap[r] = destset.first()
1394 elif size == 0:
1394 elif size == 0:
1395 ui.note(_(b'skipping %s - empty destination\n') % repo[r])
1395 ui.note(_(b'skipping %s - empty destination\n') % repo[r])
1396 else:
1396 else:
1397 raise error.Abort(
1397 raise error.Abort(
1398 _(b'rebase destination for %s is not unique') % repo[r]
1398 _(b'rebase destination for %s is not unique') % repo[r]
1399 )
1399 )
1400
1400
1401 if dest is not None:
1401 if dest is not None:
1402 # single-dest case: assign dest to each rev in rebaseset
1402 # single-dest case: assign dest to each rev in rebaseset
1403 destrev = dest.rev()
1403 destrev = dest.rev()
1404 destmap = {r: destrev for r in rebaseset} # {srcrev: destrev}
1404 destmap = {r: destrev for r in rebaseset} # {srcrev: destrev}
1405
1405
1406 if not destmap:
1406 if not destmap:
1407 ui.status(_(b'nothing to rebase - empty destination\n'))
1407 ui.status(_(b'nothing to rebase - empty destination\n'))
1408 return None
1408 return None
1409
1409
1410 return destmap
1410 return destmap
1411
1411
1412
1412
1413 def externalparent(repo, state, destancestors):
1413 def externalparent(repo, state, destancestors):
1414 """Return the revision that should be used as the second parent
1414 """Return the revision that should be used as the second parent
1415 when the revisions in state is collapsed on top of destancestors.
1415 when the revisions in state is collapsed on top of destancestors.
1416 Abort if there is more than one parent.
1416 Abort if there is more than one parent.
1417 """
1417 """
1418 parents = set()
1418 parents = set()
1419 source = min(state)
1419 source = min(state)
1420 for rev in state:
1420 for rev in state:
1421 if rev == source:
1421 if rev == source:
1422 continue
1422 continue
1423 for p in repo[rev].parents():
1423 for p in repo[rev].parents():
1424 if p.rev() not in state and p.rev() not in destancestors:
1424 if p.rev() not in state and p.rev() not in destancestors:
1425 parents.add(p.rev())
1425 parents.add(p.rev())
1426 if not parents:
1426 if not parents:
1427 return nullrev
1427 return nullrev
1428 if len(parents) == 1:
1428 if len(parents) == 1:
1429 return parents.pop()
1429 return parents.pop()
1430 raise error.Abort(
1430 raise error.Abort(
1431 _(
1431 _(
1432 b'unable to collapse on top of %d, there is more '
1432 b'unable to collapse on top of %d, there is more '
1433 b'than one external parent: %s'
1433 b'than one external parent: %s'
1434 )
1434 )
1435 % (max(destancestors), b', '.join(b"%d" % p for p in sorted(parents)))
1435 % (max(destancestors), b', '.join(b"%d" % p for p in sorted(parents)))
1436 )
1436 )
1437
1437
1438
1438
1439 def commitmemorynode(repo, wctx, editor, extra, user, date, commitmsg):
1439 def commitmemorynode(repo, wctx, editor, extra, user, date, commitmsg):
1440 '''Commit the memory changes with parents p1 and p2.
1440 '''Commit the memory changes with parents p1 and p2.
1441 Return node of committed revision.'''
1441 Return node of committed revision.'''
1442 # By convention, ``extra['branch']`` (set by extrafn) clobbers
1442 # By convention, ``extra['branch']`` (set by extrafn) clobbers
1443 # ``branch`` (used when passing ``--keepbranches``).
1443 # ``branch`` (used when passing ``--keepbranches``).
1444 branch = None
1444 branch = None
1445 if b'branch' in extra:
1445 if b'branch' in extra:
1446 branch = extra[b'branch']
1446 branch = extra[b'branch']
1447
1447
1448 # FIXME: We call _compact() because it's required to correctly detect
1448 # FIXME: We call _compact() because it's required to correctly detect
1449 # changed files. This was added to fix a regression shortly before the 5.5
1449 # changed files. This was added to fix a regression shortly before the 5.5
1450 # release. A proper fix will be done in the default branch.
1450 # release. A proper fix will be done in the default branch.
1451 wctx._compact()
1451 wctx._compact()
1452 memctx = wctx.tomemctx(
1452 memctx = wctx.tomemctx(
1453 commitmsg,
1453 commitmsg,
1454 date=date,
1454 date=date,
1455 extra=extra,
1455 extra=extra,
1456 user=user,
1456 user=user,
1457 branch=branch,
1457 branch=branch,
1458 editor=editor,
1458 editor=editor,
1459 )
1459 )
1460 if memctx.isempty() and not repo.ui.configbool(b'ui', b'allowemptycommit'):
1460 if memctx.isempty() and not repo.ui.configbool(b'ui', b'allowemptycommit'):
1461 return None
1461 return None
1462 commitres = repo.commitctx(memctx)
1462 commitres = repo.commitctx(memctx)
1463 wctx.clean() # Might be reused
1463 wctx.clean() # Might be reused
1464 return commitres
1464 return commitres
1465
1465
1466
1466
1467 def commitnode(repo, editor, extra, user, date, commitmsg):
1467 def commitnode(repo, editor, extra, user, date, commitmsg):
1468 '''Commit the wd changes with parents p1 and p2.
1468 '''Commit the wd changes with parents p1 and p2.
1469 Return node of committed revision.'''
1469 Return node of committed revision.'''
1470 dsguard = util.nullcontextmanager()
1470 dsguard = util.nullcontextmanager()
1471 if not repo.ui.configbool(b'rebase', b'singletransaction'):
1471 if not repo.ui.configbool(b'rebase', b'singletransaction'):
1472 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1472 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1473 with dsguard:
1473 with dsguard:
1474 # Commit might fail if unresolved files exist
1474 # Commit might fail if unresolved files exist
1475 newnode = repo.commit(
1475 newnode = repo.commit(
1476 text=commitmsg, user=user, date=date, extra=extra, editor=editor
1476 text=commitmsg, user=user, date=date, extra=extra, editor=editor
1477 )
1477 )
1478
1478
1479 repo.dirstate.setbranch(repo[newnode].branch())
1479 repo.dirstate.setbranch(repo[newnode].branch())
1480 return newnode
1480 return newnode
1481
1481
1482
1482
1483 def rebasenode(repo, rev, p1, p2, base, collapse, wctx):
1483 def rebasenode(repo, rev, p1, p2, base, collapse, wctx):
1484 """Rebase a single revision rev on top of p1 using base as merge ancestor"""
1484 """Rebase a single revision rev on top of p1 using base as merge ancestor"""
1485 # Merge phase
1485 # Merge phase
1486 # Update to destination and merge it with local
1486 # Update to destination and merge it with local
1487 p1ctx = repo[p1]
1487 p1ctx = repo[p1]
1488 if wctx.isinmemory():
1488 if wctx.isinmemory():
1489 wctx.setbase(p1ctx)
1489 wctx.setbase(p1ctx)
1490 else:
1490 else:
1491 if repo[b'.'].rev() != p1:
1491 if repo[b'.'].rev() != p1:
1492 repo.ui.debug(b" update to %d:%s\n" % (p1, p1ctx))
1492 repo.ui.debug(b" update to %d:%s\n" % (p1, p1ctx))
1493 mergemod.clean_update(p1ctx)
1493 mergemod.clean_update(p1ctx)
1494 else:
1494 else:
1495 repo.ui.debug(b" already in destination\n")
1495 repo.ui.debug(b" already in destination\n")
1496 # This is, alas, necessary to invalidate workingctx's manifest cache,
1496 # This is, alas, necessary to invalidate workingctx's manifest cache,
1497 # as well as other data we litter on it in other places.
1497 # as well as other data we litter on it in other places.
1498 wctx = repo[None]
1498 wctx = repo[None]
1499 repo.dirstate.write(repo.currenttransaction())
1499 repo.dirstate.write(repo.currenttransaction())
1500 ctx = repo[rev]
1500 ctx = repo[rev]
1501 repo.ui.debug(b" merge against %d:%s\n" % (rev, ctx))
1501 repo.ui.debug(b" merge against %d:%s\n" % (rev, ctx))
1502 if base is not None:
1502 if base is not None:
1503 repo.ui.debug(b" detach base %d:%s\n" % (base, repo[base]))
1503 repo.ui.debug(b" detach base %d:%s\n" % (base, repo[base]))
1504
1504
1505 # See explanation in merge.graft()
1505 # See explanation in merge.graft()
1506 mergeancestor = repo.changelog.isancestor(p1ctx.node(), ctx.node())
1506 mergeancestor = repo.changelog.isancestor(p1ctx.node(), ctx.node())
1507 stats = mergemod._update(
1507 stats = mergemod._update(
1508 repo,
1508 repo,
1509 rev,
1509 rev,
1510 branchmerge=True,
1510 branchmerge=True,
1511 force=True,
1511 force=True,
1512 ancestor=base,
1512 ancestor=base,
1513 mergeancestor=mergeancestor,
1513 mergeancestor=mergeancestor,
1514 labels=[b'dest', b'source'],
1514 labels=[b'dest', b'source'],
1515 wc=wctx,
1515 wc=wctx,
1516 )
1516 )
1517 wctx.setparents(p1ctx.node(), repo[p2].node())
1517 wctx.setparents(p1ctx.node(), repo[p2].node())
1518 if collapse:
1518 if collapse:
1519 copies.graftcopies(wctx, ctx, p1ctx)
1519 copies.graftcopies(wctx, ctx, p1ctx)
1520 else:
1520 else:
1521 # If we're not using --collapse, we need to
1521 # If we're not using --collapse, we need to
1522 # duplicate copies between the revision we're
1522 # duplicate copies between the revision we're
1523 # rebasing and its first parent.
1523 # rebasing and its first parent.
1524 copies.graftcopies(wctx, ctx, ctx.p1())
1524 copies.graftcopies(wctx, ctx, ctx.p1())
1525
1525
1526 if stats.unresolvedcount > 0:
1526 if stats.unresolvedcount > 0:
1527 if wctx.isinmemory():
1527 if wctx.isinmemory():
1528 raise error.InMemoryMergeConflictsError()
1528 raise error.InMemoryMergeConflictsError()
1529 else:
1529 else:
1530 raise error.ConflictResolutionRequired(b'rebase')
1530 raise error.ConflictResolutionRequired(b'rebase')
1531
1531
1532
1532
1533 def adjustdest(repo, rev, destmap, state, skipped):
1533 def adjustdest(repo, rev, destmap, state, skipped):
1534 r"""adjust rebase destination given the current rebase state
1534 r"""adjust rebase destination given the current rebase state
1535
1535
1536 rev is what is being rebased. Return a list of two revs, which are the
1536 rev is what is being rebased. Return a list of two revs, which are the
1537 adjusted destinations for rev's p1 and p2, respectively. If a parent is
1537 adjusted destinations for rev's p1 and p2, respectively. If a parent is
1538 nullrev, return dest without adjustment for it.
1538 nullrev, return dest without adjustment for it.
1539
1539
1540 For example, when doing rebasing B+E to F, C to G, rebase will first move B
1540 For example, when doing rebasing B+E to F, C to G, rebase will first move B
1541 to B1, and E's destination will be adjusted from F to B1.
1541 to B1, and E's destination will be adjusted from F to B1.
1542
1542
1543 B1 <- written during rebasing B
1543 B1 <- written during rebasing B
1544 |
1544 |
1545 F <- original destination of B, E
1545 F <- original destination of B, E
1546 |
1546 |
1547 | E <- rev, which is being rebased
1547 | E <- rev, which is being rebased
1548 | |
1548 | |
1549 | D <- prev, one parent of rev being checked
1549 | D <- prev, one parent of rev being checked
1550 | |
1550 | |
1551 | x <- skipped, ex. no successor or successor in (::dest)
1551 | x <- skipped, ex. no successor or successor in (::dest)
1552 | |
1552 | |
1553 | C <- rebased as C', different destination
1553 | C <- rebased as C', different destination
1554 | |
1554 | |
1555 | B <- rebased as B1 C'
1555 | B <- rebased as B1 C'
1556 |/ |
1556 |/ |
1557 A G <- destination of C, different
1557 A G <- destination of C, different
1558
1558
1559 Another example about merge changeset, rebase -r C+G+H -d K, rebase will
1559 Another example about merge changeset, rebase -r C+G+H -d K, rebase will
1560 first move C to C1, G to G1, and when it's checking H, the adjusted
1560 first move C to C1, G to G1, and when it's checking H, the adjusted
1561 destinations will be [C1, G1].
1561 destinations will be [C1, G1].
1562
1562
1563 H C1 G1
1563 H C1 G1
1564 /| | /
1564 /| | /
1565 F G |/
1565 F G |/
1566 K | | -> K
1566 K | | -> K
1567 | C D |
1567 | C D |
1568 | |/ |
1568 | |/ |
1569 | B | ...
1569 | B | ...
1570 |/ |/
1570 |/ |/
1571 A A
1571 A A
1572
1572
1573 Besides, adjust dest according to existing rebase information. For example,
1573 Besides, adjust dest according to existing rebase information. For example,
1574
1574
1575 B C D B needs to be rebased on top of C, C needs to be rebased on top
1575 B C D B needs to be rebased on top of C, C needs to be rebased on top
1576 \|/ of D. We will rebase C first.
1576 \|/ of D. We will rebase C first.
1577 A
1577 A
1578
1578
1579 C' After rebasing C, when considering B's destination, use C'
1579 C' After rebasing C, when considering B's destination, use C'
1580 | instead of the original C.
1580 | instead of the original C.
1581 B D
1581 B D
1582 \ /
1582 \ /
1583 A
1583 A
1584 """
1584 """
1585 # pick already rebased revs with same dest from state as interesting source
1585 # pick already rebased revs with same dest from state as interesting source
1586 dest = destmap[rev]
1586 dest = destmap[rev]
1587 source = [
1587 source = [
1588 s
1588 s
1589 for s, d in state.items()
1589 for s, d in state.items()
1590 if d > 0 and destmap[s] == dest and s not in skipped
1590 if d > 0 and destmap[s] == dest and s not in skipped
1591 ]
1591 ]
1592
1592
1593 result = []
1593 result = []
1594 for prev in repo.changelog.parentrevs(rev):
1594 for prev in repo.changelog.parentrevs(rev):
1595 adjusted = dest
1595 adjusted = dest
1596 if prev != nullrev:
1596 if prev != nullrev:
1597 candidate = repo.revs(b'max(%ld and (::%d))', source, prev).first()
1597 candidate = repo.revs(b'max(%ld and (::%d))', source, prev).first()
1598 if candidate is not None:
1598 if candidate is not None:
1599 adjusted = state[candidate]
1599 adjusted = state[candidate]
1600 if adjusted == dest and dest in state:
1600 if adjusted == dest and dest in state:
1601 adjusted = state[dest]
1601 adjusted = state[dest]
1602 if adjusted == revtodo:
1602 if adjusted == revtodo:
1603 # sortsource should produce an order that makes this impossible
1603 # sortsource should produce an order that makes this impossible
1604 raise error.ProgrammingError(
1604 raise error.ProgrammingError(
1605 b'rev %d should be rebased already at this time' % dest
1605 b'rev %d should be rebased already at this time' % dest
1606 )
1606 )
1607 result.append(adjusted)
1607 result.append(adjusted)
1608 return result
1608 return result
1609
1609
1610
1610
1611 def _checkobsrebase(repo, ui, rebaseobsrevs, rebaseobsskipped):
1611 def _checkobsrebase(repo, ui, rebaseobsrevs, rebaseobsskipped):
1612 """
1612 """
1613 Abort if rebase will create divergence or rebase is noop because of markers
1613 Abort if rebase will create divergence or rebase is noop because of markers
1614
1614
1615 `rebaseobsrevs`: set of obsolete revision in source
1615 `rebaseobsrevs`: set of obsolete revision in source
1616 `rebaseobsskipped`: set of revisions from source skipped because they have
1616 `rebaseobsskipped`: set of revisions from source skipped because they have
1617 successors in destination or no non-obsolete successor.
1617 successors in destination or no non-obsolete successor.
1618 """
1618 """
1619 # Obsolete node with successors not in dest leads to divergence
1619 # Obsolete node with successors not in dest leads to divergence
1620 divergenceok = ui.configbool(b'experimental', b'evolution.allowdivergence')
1620 divergenceok = ui.configbool(b'experimental', b'evolution.allowdivergence')
1621 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
1621 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
1622
1622
1623 if divergencebasecandidates and not divergenceok:
1623 if divergencebasecandidates and not divergenceok:
1624 divhashes = (bytes(repo[r]) for r in divergencebasecandidates)
1624 divhashes = (bytes(repo[r]) for r in divergencebasecandidates)
1625 msg = _(b"this rebase will cause divergences from: %s")
1625 msg = _(b"this rebase will cause divergences from: %s")
1626 h = _(
1626 h = _(
1627 b"to force the rebase please set "
1627 b"to force the rebase please set "
1628 b"experimental.evolution.allowdivergence=True"
1628 b"experimental.evolution.allowdivergence=True"
1629 )
1629 )
1630 raise error.Abort(msg % (b",".join(divhashes),), hint=h)
1630 raise error.Abort(msg % (b",".join(divhashes),), hint=h)
1631
1631
1632
1632
1633 def successorrevs(unfi, rev):
1633 def successorrevs(unfi, rev):
1634 """yield revision numbers for successors of rev"""
1634 """yield revision numbers for successors of rev"""
1635 assert unfi.filtername is None
1635 assert unfi.filtername is None
1636 get_rev = unfi.changelog.index.get_rev
1636 get_rev = unfi.changelog.index.get_rev
1637 for s in obsutil.allsuccessors(unfi.obsstore, [unfi[rev].node()]):
1637 for s in obsutil.allsuccessors(unfi.obsstore, [unfi[rev].node()]):
1638 r = get_rev(s)
1638 r = get_rev(s)
1639 if r is not None:
1639 if r is not None:
1640 yield r
1640 yield r
1641
1641
1642
1642
1643 def defineparents(repo, rev, destmap, state, skipped, obsskipped):
1643 def defineparents(repo, rev, destmap, state, skipped, obsskipped):
1644 """Return new parents and optionally a merge base for rev being rebased
1644 """Return new parents and optionally a merge base for rev being rebased
1645
1645
1646 The destination specified by "dest" cannot always be used directly because
1646 The destination specified by "dest" cannot always be used directly because
1647 previously rebase result could affect destination. For example,
1647 previously rebase result could affect destination. For example,
1648
1648
1649 D E rebase -r C+D+E -d B
1649 D E rebase -r C+D+E -d B
1650 |/ C will be rebased to C'
1650 |/ C will be rebased to C'
1651 B C D's new destination will be C' instead of B
1651 B C D's new destination will be C' instead of B
1652 |/ E's new destination will be C' instead of B
1652 |/ E's new destination will be C' instead of B
1653 A
1653 A
1654
1654
1655 The new parents of a merge is slightly more complicated. See the comment
1655 The new parents of a merge is slightly more complicated. See the comment
1656 block below.
1656 block below.
1657 """
1657 """
1658 # use unfiltered changelog since successorrevs may return filtered nodes
1658 # use unfiltered changelog since successorrevs may return filtered nodes
1659 assert repo.filtername is None
1659 assert repo.filtername is None
1660 cl = repo.changelog
1660 cl = repo.changelog
1661 isancestor = cl.isancestorrev
1661 isancestor = cl.isancestorrev
1662
1662
1663 dest = destmap[rev]
1663 dest = destmap[rev]
1664 oldps = repo.changelog.parentrevs(rev) # old parents
1664 oldps = repo.changelog.parentrevs(rev) # old parents
1665 newps = [nullrev, nullrev] # new parents
1665 newps = [nullrev, nullrev] # new parents
1666 dests = adjustdest(repo, rev, destmap, state, skipped)
1666 dests = adjustdest(repo, rev, destmap, state, skipped)
1667 bases = list(oldps) # merge base candidates, initially just old parents
1667 bases = list(oldps) # merge base candidates, initially just old parents
1668
1668
1669 if all(r == nullrev for r in oldps[1:]):
1669 if all(r == nullrev for r in oldps[1:]):
1670 # For non-merge changeset, just move p to adjusted dest as requested.
1670 # For non-merge changeset, just move p to adjusted dest as requested.
1671 newps[0] = dests[0]
1671 newps[0] = dests[0]
1672 else:
1672 else:
1673 # For merge changeset, if we move p to dests[i] unconditionally, both
1673 # For merge changeset, if we move p to dests[i] unconditionally, both
1674 # parents may change and the end result looks like "the merge loses a
1674 # parents may change and the end result looks like "the merge loses a
1675 # parent", which is a surprise. This is a limit because "--dest" only
1675 # parent", which is a surprise. This is a limit because "--dest" only
1676 # accepts one dest per src.
1676 # accepts one dest per src.
1677 #
1677 #
1678 # Therefore, only move p with reasonable conditions (in this order):
1678 # Therefore, only move p with reasonable conditions (in this order):
1679 # 1. use dest, if dest is a descendent of (p or one of p's successors)
1679 # 1. use dest, if dest is a descendent of (p or one of p's successors)
1680 # 2. use p's rebased result, if p is rebased (state[p] > 0)
1680 # 2. use p's rebased result, if p is rebased (state[p] > 0)
1681 #
1681 #
1682 # Comparing with adjustdest, the logic here does some additional work:
1682 # Comparing with adjustdest, the logic here does some additional work:
1683 # 1. decide which parents will not be moved towards dest
1683 # 1. decide which parents will not be moved towards dest
1684 # 2. if the above decision is "no", should a parent still be moved
1684 # 2. if the above decision is "no", should a parent still be moved
1685 # because it was rebased?
1685 # because it was rebased?
1686 #
1686 #
1687 # For example:
1687 # For example:
1688 #
1688 #
1689 # C # "rebase -r C -d D" is an error since none of the parents
1689 # C # "rebase -r C -d D" is an error since none of the parents
1690 # /| # can be moved. "rebase -r B+C -d D" will move C's parent
1690 # /| # can be moved. "rebase -r B+C -d D" will move C's parent
1691 # A B D # B (using rule "2."), since B will be rebased.
1691 # A B D # B (using rule "2."), since B will be rebased.
1692 #
1692 #
1693 # The loop tries to be not rely on the fact that a Mercurial node has
1693 # The loop tries to be not rely on the fact that a Mercurial node has
1694 # at most 2 parents.
1694 # at most 2 parents.
1695 for i, p in enumerate(oldps):
1695 for i, p in enumerate(oldps):
1696 np = p # new parent
1696 np = p # new parent
1697 if any(isancestor(x, dests[i]) for x in successorrevs(repo, p)):
1697 if any(isancestor(x, dests[i]) for x in successorrevs(repo, p)):
1698 np = dests[i]
1698 np = dests[i]
1699 elif p in state and state[p] > 0:
1699 elif p in state and state[p] > 0:
1700 np = state[p]
1700 np = state[p]
1701
1701
1702 # If one parent becomes an ancestor of the other, drop the ancestor
1702 # If one parent becomes an ancestor of the other, drop the ancestor
1703 for j, x in enumerate(newps[:i]):
1703 for j, x in enumerate(newps[:i]):
1704 if x == nullrev:
1704 if x == nullrev:
1705 continue
1705 continue
1706 if isancestor(np, x): # CASE-1
1706 if isancestor(np, x): # CASE-1
1707 np = nullrev
1707 np = nullrev
1708 elif isancestor(x, np): # CASE-2
1708 elif isancestor(x, np): # CASE-2
1709 newps[j] = np
1709 newps[j] = np
1710 np = nullrev
1710 np = nullrev
1711 # New parents forming an ancestor relationship does not
1711 # New parents forming an ancestor relationship does not
1712 # mean the old parents have a similar relationship. Do not
1712 # mean the old parents have a similar relationship. Do not
1713 # set bases[x] to nullrev.
1713 # set bases[x] to nullrev.
1714 bases[j], bases[i] = bases[i], bases[j]
1714 bases[j], bases[i] = bases[i], bases[j]
1715
1715
1716 newps[i] = np
1716 newps[i] = np
1717
1717
1718 # "rebasenode" updates to new p1, and the old p1 will be used as merge
1718 # "rebasenode" updates to new p1, and the old p1 will be used as merge
1719 # base. If only p2 changes, merging using unchanged p1 as merge base is
1719 # base. If only p2 changes, merging using unchanged p1 as merge base is
1720 # suboptimal. Therefore swap parents to make the merge sane.
1720 # suboptimal. Therefore swap parents to make the merge sane.
1721 if newps[1] != nullrev and oldps[0] == newps[0]:
1721 if newps[1] != nullrev and oldps[0] == newps[0]:
1722 assert len(newps) == 2 and len(oldps) == 2
1722 assert len(newps) == 2 and len(oldps) == 2
1723 newps.reverse()
1723 newps.reverse()
1724 bases.reverse()
1724 bases.reverse()
1725
1725
1726 # No parent change might be an error because we fail to make rev a
1726 # No parent change might be an error because we fail to make rev a
1727 # descendent of requested dest. This can happen, for example:
1727 # descendent of requested dest. This can happen, for example:
1728 #
1728 #
1729 # C # rebase -r C -d D
1729 # C # rebase -r C -d D
1730 # /| # None of A and B will be changed to D and rebase fails.
1730 # /| # None of A and B will be changed to D and rebase fails.
1731 # A B D
1731 # A B D
1732 if set(newps) == set(oldps) and dest not in newps:
1732 if set(newps) == set(oldps) and dest not in newps:
1733 raise error.Abort(
1733 raise error.Abort(
1734 _(
1734 _(
1735 b'cannot rebase %d:%s without '
1735 b'cannot rebase %d:%s without '
1736 b'moving at least one of its parents'
1736 b'moving at least one of its parents'
1737 )
1737 )
1738 % (rev, repo[rev])
1738 % (rev, repo[rev])
1739 )
1739 )
1740
1740
1741 # Source should not be ancestor of dest. The check here guarantees it's
1741 # Source should not be ancestor of dest. The check here guarantees it's
1742 # impossible. With multi-dest, the initial check does not cover complex
1742 # impossible. With multi-dest, the initial check does not cover complex
1743 # cases since we don't have abstractions to dry-run rebase cheaply.
1743 # cases since we don't have abstractions to dry-run rebase cheaply.
1744 if any(p != nullrev and isancestor(rev, p) for p in newps):
1744 if any(p != nullrev and isancestor(rev, p) for p in newps):
1745 raise error.Abort(_(b'source is ancestor of destination'))
1745 raise error.Abort(_(b'source is ancestor of destination'))
1746
1746
1747 # Check if the merge will contain unwanted changes. That may happen if
1747 # Check if the merge will contain unwanted changes. That may happen if
1748 # there are multiple special (non-changelog ancestor) merge bases, which
1748 # there are multiple special (non-changelog ancestor) merge bases, which
1749 # cannot be handled well by the 3-way merge algorithm. For example:
1749 # cannot be handled well by the 3-way merge algorithm. For example:
1750 #
1750 #
1751 # F
1751 # F
1752 # /|
1752 # /|
1753 # D E # "rebase -r D+E+F -d Z", when rebasing F, if "D" was chosen
1753 # D E # "rebase -r D+E+F -d Z", when rebasing F, if "D" was chosen
1754 # | | # as merge base, the difference between D and F will include
1754 # | | # as merge base, the difference between D and F will include
1755 # B C # C, so the rebased F will contain C surprisingly. If "E" was
1755 # B C # C, so the rebased F will contain C surprisingly. If "E" was
1756 # |/ # chosen, the rebased F will contain B.
1756 # |/ # chosen, the rebased F will contain B.
1757 # A Z
1757 # A Z
1758 #
1758 #
1759 # But our merge base candidates (D and E in above case) could still be
1759 # But our merge base candidates (D and E in above case) could still be
1760 # better than the default (ancestor(F, Z) == null). Therefore still
1760 # better than the default (ancestor(F, Z) == null). Therefore still
1761 # pick one (so choose p1 above).
1761 # pick one (so choose p1 above).
1762 if sum(1 for b in set(bases) if b != nullrev and b not in newps) > 1:
1762 if sum(1 for b in set(bases) if b != nullrev and b not in newps) > 1:
1763 unwanted = [None, None] # unwanted[i]: unwanted revs if choose bases[i]
1763 unwanted = [None, None] # unwanted[i]: unwanted revs if choose bases[i]
1764 for i, base in enumerate(bases):
1764 for i, base in enumerate(bases):
1765 if base == nullrev or base in newps:
1765 if base == nullrev or base in newps:
1766 continue
1766 continue
1767 # Revisions in the side (not chosen as merge base) branch that
1767 # Revisions in the side (not chosen as merge base) branch that
1768 # might contain "surprising" contents
1768 # might contain "surprising" contents
1769 other_bases = set(bases) - {base}
1769 other_bases = set(bases) - {base}
1770 siderevs = list(
1770 siderevs = list(
1771 repo.revs(b'(%ld %% (%d+%d))', other_bases, base, dest)
1771 repo.revs(b'(%ld %% (%d+%d))', other_bases, base, dest)
1772 )
1772 )
1773
1773
1774 # If those revisions are covered by rebaseset, the result is good.
1774 # If those revisions are covered by rebaseset, the result is good.
1775 # A merge in rebaseset would be considered to cover its ancestors.
1775 # A merge in rebaseset would be considered to cover its ancestors.
1776 if siderevs:
1776 if siderevs:
1777 rebaseset = [
1777 rebaseset = [
1778 r for r, d in state.items() if d > 0 and r not in obsskipped
1778 r for r, d in state.items() if d > 0 and r not in obsskipped
1779 ]
1779 ]
1780 merges = [
1780 merges = [
1781 r for r in rebaseset if cl.parentrevs(r)[1] != nullrev
1781 r for r in rebaseset if cl.parentrevs(r)[1] != nullrev
1782 ]
1782 ]
1783 unwanted[i] = list(
1783 unwanted[i] = list(
1784 repo.revs(
1784 repo.revs(
1785 b'%ld - (::%ld) - %ld', siderevs, merges, rebaseset
1785 b'%ld - (::%ld) - %ld', siderevs, merges, rebaseset
1786 )
1786 )
1787 )
1787 )
1788
1788
1789 if any(revs is not None for revs in unwanted):
1789 if any(revs is not None for revs in unwanted):
1790 # Choose a merge base that has a minimal number of unwanted revs.
1790 # Choose a merge base that has a minimal number of unwanted revs.
1791 l, i = min(
1791 l, i = min(
1792 (len(revs), i)
1792 (len(revs), i)
1793 for i, revs in enumerate(unwanted)
1793 for i, revs in enumerate(unwanted)
1794 if revs is not None
1794 if revs is not None
1795 )
1795 )
1796
1796
1797 # The merge will include unwanted revisions. Abort now. Revisit this if
1797 # The merge will include unwanted revisions. Abort now. Revisit this if
1798 # we have a more advanced merge algorithm that handles multiple bases.
1798 # we have a more advanced merge algorithm that handles multiple bases.
1799 if l > 0:
1799 if l > 0:
1800 unwanteddesc = _(b' or ').join(
1800 unwanteddesc = _(b' or ').join(
1801 (
1801 (
1802 b', '.join(b'%d:%s' % (r, repo[r]) for r in revs)
1802 b', '.join(b'%d:%s' % (r, repo[r]) for r in revs)
1803 for revs in unwanted
1803 for revs in unwanted
1804 if revs is not None
1804 if revs is not None
1805 )
1805 )
1806 )
1806 )
1807 raise error.Abort(
1807 raise error.Abort(
1808 _(b'rebasing %d:%s will include unwanted changes from %s')
1808 _(b'rebasing %d:%s will include unwanted changes from %s')
1809 % (rev, repo[rev], unwanteddesc)
1809 % (rev, repo[rev], unwanteddesc)
1810 )
1810 )
1811
1811
1812 # newps[0] should match merge base if possible. Currently, if newps[i]
1812 # newps[0] should match merge base if possible. Currently, if newps[i]
1813 # is nullrev, the only case is newps[i] and newps[j] (j < i), one is
1813 # is nullrev, the only case is newps[i] and newps[j] (j < i), one is
1814 # the other's ancestor. In that case, it's fine to not swap newps here.
1814 # the other's ancestor. In that case, it's fine to not swap newps here.
1815 # (see CASE-1 and CASE-2 above)
1815 # (see CASE-1 and CASE-2 above)
1816 if i != 0:
1816 if i != 0:
1817 if newps[i] != nullrev:
1817 if newps[i] != nullrev:
1818 newps[0], newps[i] = newps[i], newps[0]
1818 newps[0], newps[i] = newps[i], newps[0]
1819 bases[0], bases[i] = bases[i], bases[0]
1819 bases[0], bases[i] = bases[i], bases[0]
1820
1820
1821 # "rebasenode" updates to new p1, use the corresponding merge base.
1821 # "rebasenode" updates to new p1, use the corresponding merge base.
1822 base = bases[0]
1822 base = bases[0]
1823
1823
1824 repo.ui.debug(b" future parents are %d and %d\n" % tuple(newps))
1824 repo.ui.debug(b" future parents are %d and %d\n" % tuple(newps))
1825
1825
1826 return newps[0], newps[1], base
1826 return newps[0], newps[1], base
1827
1827
1828
1828
1829 def isagitpatch(repo, patchname):
1829 def isagitpatch(repo, patchname):
1830 """Return true if the given patch is in git format"""
1830 """Return true if the given patch is in git format"""
1831 mqpatch = os.path.join(repo.mq.path, patchname)
1831 mqpatch = os.path.join(repo.mq.path, patchname)
1832 for line in patch.linereader(open(mqpatch, b'rb')):
1832 for line in patch.linereader(open(mqpatch, b'rb')):
1833 if line.startswith(b'diff --git'):
1833 if line.startswith(b'diff --git'):
1834 return True
1834 return True
1835 return False
1835 return False
1836
1836
1837
1837
1838 def updatemq(repo, state, skipped, **opts):
1838 def updatemq(repo, state, skipped, **opts):
1839 """Update rebased mq patches - finalize and then import them"""
1839 """Update rebased mq patches - finalize and then import them"""
1840 mqrebase = {}
1840 mqrebase = {}
1841 mq = repo.mq
1841 mq = repo.mq
1842 original_series = mq.fullseries[:]
1842 original_series = mq.fullseries[:]
1843 skippedpatches = set()
1843 skippedpatches = set()
1844
1844
1845 for p in mq.applied:
1845 for p in mq.applied:
1846 rev = repo[p.node].rev()
1846 rev = repo[p.node].rev()
1847 if rev in state:
1847 if rev in state:
1848 repo.ui.debug(
1848 repo.ui.debug(
1849 b'revision %d is an mq patch (%s), finalize it.\n'
1849 b'revision %d is an mq patch (%s), finalize it.\n'
1850 % (rev, p.name)
1850 % (rev, p.name)
1851 )
1851 )
1852 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1852 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1853 else:
1853 else:
1854 # Applied but not rebased, not sure this should happen
1854 # Applied but not rebased, not sure this should happen
1855 skippedpatches.add(p.name)
1855 skippedpatches.add(p.name)
1856
1856
1857 if mqrebase:
1857 if mqrebase:
1858 mq.finish(repo, mqrebase.keys())
1858 mq.finish(repo, mqrebase.keys())
1859
1859
1860 # We must start import from the newest revision
1860 # We must start import from the newest revision
1861 for rev in sorted(mqrebase, reverse=True):
1861 for rev in sorted(mqrebase, reverse=True):
1862 if rev not in skipped:
1862 if rev not in skipped:
1863 name, isgit = mqrebase[rev]
1863 name, isgit = mqrebase[rev]
1864 repo.ui.note(
1864 repo.ui.note(
1865 _(b'updating mq patch %s to %d:%s\n')
1865 _(b'updating mq patch %s to %d:%s\n')
1866 % (name, state[rev], repo[state[rev]])
1866 % (name, state[rev], repo[state[rev]])
1867 )
1867 )
1868 mq.qimport(
1868 mq.qimport(
1869 repo,
1869 repo,
1870 (),
1870 (),
1871 patchname=name,
1871 patchname=name,
1872 git=isgit,
1872 git=isgit,
1873 rev=[b"%d" % state[rev]],
1873 rev=[b"%d" % state[rev]],
1874 )
1874 )
1875 else:
1875 else:
1876 # Rebased and skipped
1876 # Rebased and skipped
1877 skippedpatches.add(mqrebase[rev][0])
1877 skippedpatches.add(mqrebase[rev][0])
1878
1878
1879 # Patches were either applied and rebased and imported in
1879 # Patches were either applied and rebased and imported in
1880 # order, applied and removed or unapplied. Discard the removed
1880 # order, applied and removed or unapplied. Discard the removed
1881 # ones while preserving the original series order and guards.
1881 # ones while preserving the original series order and guards.
1882 newseries = [
1882 newseries = [
1883 s
1883 s
1884 for s in original_series
1884 for s in original_series
1885 if mq.guard_re.split(s, 1)[0] not in skippedpatches
1885 if mq.guard_re.split(s, 1)[0] not in skippedpatches
1886 ]
1886 ]
1887 mq.fullseries[:] = newseries
1887 mq.fullseries[:] = newseries
1888 mq.seriesdirty = True
1888 mq.seriesdirty = True
1889 mq.savedirty()
1889 mq.savedirty()
1890
1890
1891
1891
1892 def storecollapsemsg(repo, collapsemsg):
1892 def storecollapsemsg(repo, collapsemsg):
1893 """Store the collapse message to allow recovery"""
1893 """Store the collapse message to allow recovery"""
1894 collapsemsg = collapsemsg or b''
1894 collapsemsg = collapsemsg or b''
1895 f = repo.vfs(b"last-message.txt", b"w")
1895 f = repo.vfs(b"last-message.txt", b"w")
1896 f.write(b"%s\n" % collapsemsg)
1896 f.write(b"%s\n" % collapsemsg)
1897 f.close()
1897 f.close()
1898
1898
1899
1899
1900 def clearcollapsemsg(repo):
1900 def clearcollapsemsg(repo):
1901 """Remove collapse message file"""
1901 """Remove collapse message file"""
1902 repo.vfs.unlinkpath(b"last-message.txt", ignoremissing=True)
1902 repo.vfs.unlinkpath(b"last-message.txt", ignoremissing=True)
1903
1903
1904
1904
1905 def restorecollapsemsg(repo, isabort):
1905 def restorecollapsemsg(repo, isabort):
1906 """Restore previously stored collapse message"""
1906 """Restore previously stored collapse message"""
1907 try:
1907 try:
1908 f = repo.vfs(b"last-message.txt")
1908 f = repo.vfs(b"last-message.txt")
1909 collapsemsg = f.readline().strip()
1909 collapsemsg = f.readline().strip()
1910 f.close()
1910 f.close()
1911 except IOError as err:
1911 except IOError as err:
1912 if err.errno != errno.ENOENT:
1912 if err.errno != errno.ENOENT:
1913 raise
1913 raise
1914 if isabort:
1914 if isabort:
1915 # Oh well, just abort like normal
1915 # Oh well, just abort like normal
1916 collapsemsg = b''
1916 collapsemsg = b''
1917 else:
1917 else:
1918 raise error.Abort(_(b'missing .hg/last-message.txt for rebase'))
1918 raise error.Abort(_(b'missing .hg/last-message.txt for rebase'))
1919 return collapsemsg
1919 return collapsemsg
1920
1920
1921
1921
1922 def clearstatus(repo):
1922 def clearstatus(repo):
1923 """Remove the status files"""
1923 """Remove the status files"""
1924 # Make sure the active transaction won't write the state file
1924 # Make sure the active transaction won't write the state file
1925 tr = repo.currenttransaction()
1925 tr = repo.currenttransaction()
1926 if tr:
1926 if tr:
1927 tr.removefilegenerator(b'rebasestate')
1927 tr.removefilegenerator(b'rebasestate')
1928 repo.vfs.unlinkpath(b"rebasestate", ignoremissing=True)
1928 repo.vfs.unlinkpath(b"rebasestate", ignoremissing=True)
1929
1929
1930
1930
1931 def sortsource(destmap):
1931 def sortsource(destmap):
1932 """yield source revisions in an order that we only rebase things once
1932 """yield source revisions in an order that we only rebase things once
1933
1933
1934 If source and destination overlaps, we should filter out revisions
1934 If source and destination overlaps, we should filter out revisions
1935 depending on other revisions which hasn't been rebased yet.
1935 depending on other revisions which hasn't been rebased yet.
1936
1936
1937 Yield a sorted list of revisions each time.
1937 Yield a sorted list of revisions each time.
1938
1938
1939 For example, when rebasing A to B, B to C. This function yields [B], then
1939 For example, when rebasing A to B, B to C. This function yields [B], then
1940 [A], indicating B needs to be rebased first.
1940 [A], indicating B needs to be rebased first.
1941
1941
1942 Raise if there is a cycle so the rebase is impossible.
1942 Raise if there is a cycle so the rebase is impossible.
1943 """
1943 """
1944 srcset = set(destmap)
1944 srcset = set(destmap)
1945 while srcset:
1945 while srcset:
1946 srclist = sorted(srcset)
1946 srclist = sorted(srcset)
1947 result = []
1947 result = []
1948 for r in srclist:
1948 for r in srclist:
1949 if destmap[r] not in srcset:
1949 if destmap[r] not in srcset:
1950 result.append(r)
1950 result.append(r)
1951 if not result:
1951 if not result:
1952 raise error.Abort(_(b'source and destination form a cycle'))
1952 raise error.Abort(_(b'source and destination form a cycle'))
1953 srcset -= set(result)
1953 srcset -= set(result)
1954 yield result
1954 yield result
1955
1955
1956
1956
1957 def buildstate(repo, destmap, collapse):
1957 def buildstate(repo, destmap, collapse):
1958 '''Define which revisions are going to be rebased and where
1958 '''Define which revisions are going to be rebased and where
1959
1959
1960 repo: repo
1960 repo: repo
1961 destmap: {srcrev: destrev}
1961 destmap: {srcrev: destrev}
1962 '''
1962 '''
1963 rebaseset = destmap.keys()
1963 rebaseset = destmap.keys()
1964 originalwd = repo[b'.'].rev()
1964 originalwd = repo[b'.'].rev()
1965
1965
1966 # This check isn't strictly necessary, since mq detects commits over an
1966 # This check isn't strictly necessary, since mq detects commits over an
1967 # applied patch. But it prevents messing up the working directory when
1967 # applied patch. But it prevents messing up the working directory when
1968 # a partially completed rebase is blocked by mq.
1968 # a partially completed rebase is blocked by mq.
1969 if b'qtip' in repo.tags():
1969 if b'qtip' in repo.tags():
1970 mqapplied = {repo[s.node].rev() for s in repo.mq.applied}
1970 mqapplied = {repo[s.node].rev() for s in repo.mq.applied}
1971 if set(destmap.values()) & mqapplied:
1971 if set(destmap.values()) & mqapplied:
1972 raise error.Abort(_(b'cannot rebase onto an applied mq patch'))
1972 raise error.Abort(_(b'cannot rebase onto an applied mq patch'))
1973
1973
1974 # Get "cycle" error early by exhausting the generator.
1974 # Get "cycle" error early by exhausting the generator.
1975 sortedsrc = list(sortsource(destmap)) # a list of sorted revs
1975 sortedsrc = list(sortsource(destmap)) # a list of sorted revs
1976 if not sortedsrc:
1976 if not sortedsrc:
1977 raise error.Abort(_(b'no matching revisions'))
1977 raise error.Abort(_(b'no matching revisions'))
1978
1978
1979 # Only check the first batch of revisions to rebase not depending on other
1979 # Only check the first batch of revisions to rebase not depending on other
1980 # rebaseset. This means "source is ancestor of destination" for the second
1980 # rebaseset. This means "source is ancestor of destination" for the second
1981 # (and following) batches of revisions are not checked here. We rely on
1981 # (and following) batches of revisions are not checked here. We rely on
1982 # "defineparents" to do that check.
1982 # "defineparents" to do that check.
1983 roots = list(repo.set(b'roots(%ld)', sortedsrc[0]))
1983 roots = list(repo.set(b'roots(%ld)', sortedsrc[0]))
1984 if not roots:
1984 if not roots:
1985 raise error.Abort(_(b'no matching revisions'))
1985 raise error.Abort(_(b'no matching revisions'))
1986
1986
1987 def revof(r):
1987 def revof(r):
1988 return r.rev()
1988 return r.rev()
1989
1989
1990 roots = sorted(roots, key=revof)
1990 roots = sorted(roots, key=revof)
1991 state = dict.fromkeys(rebaseset, revtodo)
1991 state = dict.fromkeys(rebaseset, revtodo)
1992 emptyrebase = len(sortedsrc) == 1
1992 emptyrebase = len(sortedsrc) == 1
1993 for root in roots:
1993 for root in roots:
1994 dest = repo[destmap[root.rev()]]
1994 dest = repo[destmap[root.rev()]]
1995 commonbase = root.ancestor(dest)
1995 commonbase = root.ancestor(dest)
1996 if commonbase == root:
1996 if commonbase == root:
1997 raise error.Abort(_(b'source is ancestor of destination'))
1997 raise error.Abort(_(b'source is ancestor of destination'))
1998 if commonbase == dest:
1998 if commonbase == dest:
1999 wctx = repo[None]
1999 wctx = repo[None]
2000 if dest == wctx.p1():
2000 if dest == wctx.p1():
2001 # when rebasing to '.', it will use the current wd branch name
2001 # when rebasing to '.', it will use the current wd branch name
2002 samebranch = root.branch() == wctx.branch()
2002 samebranch = root.branch() == wctx.branch()
2003 else:
2003 else:
2004 samebranch = root.branch() == dest.branch()
2004 samebranch = root.branch() == dest.branch()
2005 if not collapse and samebranch and dest in root.parents():
2005 if not collapse and samebranch and dest in root.parents():
2006 # mark the revision as done by setting its new revision
2006 # mark the revision as done by setting its new revision
2007 # equal to its old (current) revisions
2007 # equal to its old (current) revisions
2008 state[root.rev()] = root.rev()
2008 state[root.rev()] = root.rev()
2009 repo.ui.debug(b'source is a child of destination\n')
2009 repo.ui.debug(b'source is a child of destination\n')
2010 continue
2010 continue
2011
2011
2012 emptyrebase = False
2012 emptyrebase = False
2013 repo.ui.debug(b'rebase onto %s starting from %s\n' % (dest, root))
2013 repo.ui.debug(b'rebase onto %s starting from %s\n' % (dest, root))
2014 if emptyrebase:
2014 if emptyrebase:
2015 return None
2015 return None
2016 for rev in sorted(state):
2016 for rev in sorted(state):
2017 parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
2017 parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
2018 # if all parents of this revision are done, then so is this revision
2018 # if all parents of this revision are done, then so is this revision
2019 if parents and all((state.get(p) == p for p in parents)):
2019 if parents and all((state.get(p) == p for p in parents)):
2020 state[rev] = rev
2020 state[rev] = rev
2021 return originalwd, destmap, state
2021 return originalwd, destmap, state
2022
2022
2023
2023
2024 def clearrebased(
2024 def clearrebased(
2025 ui,
2025 ui,
2026 repo,
2026 repo,
2027 destmap,
2027 destmap,
2028 state,
2028 state,
2029 skipped,
2029 skipped,
2030 collapsedas=None,
2030 collapsedas=None,
2031 keepf=False,
2031 keepf=False,
2032 fm=None,
2032 fm=None,
2033 backup=True,
2033 backup=True,
2034 ):
2034 ):
2035 """dispose of rebased revision at the end of the rebase
2035 """dispose of rebased revision at the end of the rebase
2036
2036
2037 If `collapsedas` is not None, the rebase was a collapse whose result if the
2037 If `collapsedas` is not None, the rebase was a collapse whose result if the
2038 `collapsedas` node.
2038 `collapsedas` node.
2039
2039
2040 If `keepf` is not True, the rebase has --keep set and no nodes should be
2040 If `keepf` is not True, the rebase has --keep set and no nodes should be
2041 removed (but bookmarks still need to be moved).
2041 removed (but bookmarks still need to be moved).
2042
2042
2043 If `backup` is False, no backup will be stored when stripping rebased
2043 If `backup` is False, no backup will be stored when stripping rebased
2044 revisions.
2044 revisions.
2045 """
2045 """
2046 tonode = repo.changelog.node
2046 tonode = repo.changelog.node
2047 replacements = {}
2047 replacements = {}
2048 moves = {}
2048 moves = {}
2049 stripcleanup = not obsolete.isenabled(repo, obsolete.createmarkersopt)
2049 stripcleanup = not obsolete.isenabled(repo, obsolete.createmarkersopt)
2050
2050
2051 collapsednodes = []
2051 collapsednodes = []
2052 for rev, newrev in sorted(state.items()):
2052 for rev, newrev in sorted(state.items()):
2053 if newrev >= 0 and newrev != rev:
2053 if newrev >= 0 and newrev != rev:
2054 oldnode = tonode(rev)
2054 oldnode = tonode(rev)
2055 newnode = collapsedas or tonode(newrev)
2055 newnode = collapsedas or tonode(newrev)
2056 moves[oldnode] = newnode
2056 moves[oldnode] = newnode
2057 succs = None
2057 succs = None
2058 if rev in skipped:
2058 if rev in skipped:
2059 if stripcleanup or not repo[rev].obsolete():
2059 if stripcleanup or not repo[rev].obsolete():
2060 succs = ()
2060 succs = ()
2061 elif collapsedas:
2061 elif collapsedas:
2062 collapsednodes.append(oldnode)
2062 collapsednodes.append(oldnode)
2063 else:
2063 else:
2064 succs = (newnode,)
2064 succs = (newnode,)
2065 if succs is not None:
2065 if succs is not None:
2066 replacements[(oldnode,)] = succs
2066 replacements[(oldnode,)] = succs
2067 if collapsednodes:
2067 if collapsednodes:
2068 replacements[tuple(collapsednodes)] = (collapsedas,)
2068 replacements[tuple(collapsednodes)] = (collapsedas,)
2069 if fm:
2069 if fm:
2070 hf = fm.hexfunc
2070 hf = fm.hexfunc
2071 fl = fm.formatlist
2071 fl = fm.formatlist
2072 fd = fm.formatdict
2072 fd = fm.formatdict
2073 changes = {}
2073 changes = {}
2074 for oldns, newn in pycompat.iteritems(replacements):
2074 for oldns, newn in pycompat.iteritems(replacements):
2075 for oldn in oldns:
2075 for oldn in oldns:
2076 changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node')
2076 changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node')
2077 nodechanges = fd(changes, key=b"oldnode", value=b"newnodes")
2077 nodechanges = fd(changes, key=b"oldnode", value=b"newnodes")
2078 fm.data(nodechanges=nodechanges)
2078 fm.data(nodechanges=nodechanges)
2079 if keepf:
2079 if keepf:
2080 replacements = {}
2080 replacements = {}
2081 scmutil.cleanupnodes(repo, replacements, b'rebase', moves, backup=backup)
2081 scmutil.cleanupnodes(repo, replacements, b'rebase', moves, backup=backup)
2082
2082
2083
2083
2084 def pullrebase(orig, ui, repo, *args, **opts):
2084 def pullrebase(orig, ui, repo, *args, **opts):
2085 """Call rebase after pull if the latter has been invoked with --rebase"""
2085 """Call rebase after pull if the latter has been invoked with --rebase"""
2086 if opts.get('rebase'):
2086 if opts.get('rebase'):
2087 if ui.configbool(b'commands', b'rebase.requiredest'):
2087 if ui.configbool(b'commands', b'rebase.requiredest'):
2088 msg = _(b'rebase destination required by configuration')
2088 msg = _(b'rebase destination required by configuration')
2089 hint = _(b'use hg pull followed by hg rebase -d DEST')
2089 hint = _(b'use hg pull followed by hg rebase -d DEST')
2090 raise error.Abort(msg, hint=hint)
2090 raise error.Abort(msg, hint=hint)
2091
2091
2092 with repo.wlock(), repo.lock():
2092 with repo.wlock(), repo.lock():
2093 if opts.get('update'):
2093 if opts.get('update'):
2094 del opts['update']
2094 del opts['update']
2095 ui.debug(
2095 ui.debug(
2096 b'--update and --rebase are not compatible, ignoring '
2096 b'--update and --rebase are not compatible, ignoring '
2097 b'the update flag\n'
2097 b'the update flag\n'
2098 )
2098 )
2099
2099
2100 cmdutil.checkunfinished(repo, skipmerge=True)
2100 cmdutil.checkunfinished(repo, skipmerge=True)
2101 cmdutil.bailifchanged(
2101 cmdutil.bailifchanged(
2102 repo,
2102 repo,
2103 hint=_(
2103 hint=_(
2104 b'cannot pull with rebase: '
2104 b'cannot pull with rebase: '
2105 b'please commit or shelve your changes first'
2105 b'please commit or shelve your changes first'
2106 ),
2106 ),
2107 )
2107 )
2108
2108
2109 revsprepull = len(repo)
2109 revsprepull = len(repo)
2110 origpostincoming = commands.postincoming
2110 origpostincoming = commands.postincoming
2111
2111
2112 def _dummy(*args, **kwargs):
2112 def _dummy(*args, **kwargs):
2113 pass
2113 pass
2114
2114
2115 commands.postincoming = _dummy
2115 commands.postincoming = _dummy
2116 try:
2116 try:
2117 ret = orig(ui, repo, *args, **opts)
2117 ret = orig(ui, repo, *args, **opts)
2118 finally:
2118 finally:
2119 commands.postincoming = origpostincoming
2119 commands.postincoming = origpostincoming
2120 revspostpull = len(repo)
2120 revspostpull = len(repo)
2121 if revspostpull > revsprepull:
2121 if revspostpull > revsprepull:
2122 # --rev option from pull conflict with rebase own --rev
2122 # --rev option from pull conflict with rebase own --rev
2123 # dropping it
2123 # dropping it
2124 if 'rev' in opts:
2124 if 'rev' in opts:
2125 del opts['rev']
2125 del opts['rev']
2126 # positional argument from pull conflicts with rebase's own
2126 # positional argument from pull conflicts with rebase's own
2127 # --source.
2127 # --source.
2128 if 'source' in opts:
2128 if 'source' in opts:
2129 del opts['source']
2129 del opts['source']
2130 # revsprepull is the len of the repo, not revnum of tip.
2130 # revsprepull is the len of the repo, not revnum of tip.
2131 destspace = list(repo.changelog.revs(start=revsprepull))
2131 destspace = list(repo.changelog.revs(start=revsprepull))
2132 opts['_destspace'] = destspace
2132 opts['_destspace'] = destspace
2133 try:
2133 try:
2134 rebase(ui, repo, **opts)
2134 rebase(ui, repo, **opts)
2135 except error.NoMergeDestAbort:
2135 except error.NoMergeDestAbort:
2136 # we can maybe update instead
2136 # we can maybe update instead
2137 rev, _a, _b = destutil.destupdate(repo)
2137 rev, _a, _b = destutil.destupdate(repo)
2138 if rev == repo[b'.'].rev():
2138 if rev == repo[b'.'].rev():
2139 ui.status(_(b'nothing to rebase\n'))
2139 ui.status(_(b'nothing to rebase\n'))
2140 else:
2140 else:
2141 ui.status(_(b'nothing to rebase - updating instead\n'))
2141 ui.status(_(b'nothing to rebase - updating instead\n'))
2142 # not passing argument to get the bare update behavior
2142 # not passing argument to get the bare update behavior
2143 # with warning and trumpets
2143 # with warning and trumpets
2144 commands.update(ui, repo)
2144 commands.update(ui, repo)
2145 else:
2145 else:
2146 if opts.get('tool'):
2146 if opts.get('tool'):
2147 raise error.Abort(_(b'--tool can only be used with --rebase'))
2147 raise error.Abort(_(b'--tool can only be used with --rebase'))
2148 ret = orig(ui, repo, *args, **opts)
2148 ret = orig(ui, repo, *args, **opts)
2149
2149
2150 return ret
2150 return ret
2151
2151
2152
2152
2153 def _filterobsoleterevs(repo, revs):
2153 def _filterobsoleterevs(repo, revs):
2154 """returns a set of the obsolete revisions in revs"""
2154 """returns a set of the obsolete revisions in revs"""
2155 return {r for r in revs if repo[r].obsolete()}
2155 return {r for r in revs if repo[r].obsolete()}
2156
2156
2157
2157
2158 def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap):
2158 def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap):
2159 """Return (obsoletenotrebased, obsoletewithoutsuccessorindestination).
2159 """Return (obsoletenotrebased, obsoletewithoutsuccessorindestination).
2160
2160
2161 `obsoletenotrebased` is a mapping mapping obsolete => successor for all
2161 `obsoletenotrebased` is a mapping mapping obsolete => successor for all
2162 obsolete nodes to be rebased given in `rebaseobsrevs`.
2162 obsolete nodes to be rebased given in `rebaseobsrevs`.
2163
2163
2164 `obsoletewithoutsuccessorindestination` is a set with obsolete revisions
2164 `obsoletewithoutsuccessorindestination` is a set with obsolete revisions
2165 without a successor in destination.
2165 without a successor in destination.
2166
2166
2167 `obsoleteextinctsuccessors` is a set of obsolete revisions with only
2167 `obsoleteextinctsuccessors` is a set of obsolete revisions with only
2168 obsolete successors.
2168 obsolete successors.
2169 """
2169 """
2170 obsoletenotrebased = {}
2170 obsoletenotrebased = {}
2171 obsoletewithoutsuccessorindestination = set()
2171 obsoletewithoutsuccessorindestination = set()
2172 obsoleteextinctsuccessors = set()
2172 obsoleteextinctsuccessors = set()
2173
2173
2174 assert repo.filtername is None
2174 assert repo.filtername is None
2175 cl = repo.changelog
2175 cl = repo.changelog
2176 get_rev = cl.index.get_rev
2176 get_rev = cl.index.get_rev
2177 extinctrevs = set(repo.revs(b'extinct()'))
2177 extinctrevs = set(repo.revs(b'extinct()'))
2178 for srcrev in rebaseobsrevs:
2178 for srcrev in rebaseobsrevs:
2179 srcnode = cl.node(srcrev)
2179 srcnode = cl.node(srcrev)
2180 # XXX: more advanced APIs are required to handle split correctly
2180 # XXX: more advanced APIs are required to handle split correctly
2181 successors = set(obsutil.allsuccessors(repo.obsstore, [srcnode]))
2181 successors = set(obsutil.allsuccessors(repo.obsstore, [srcnode]))
2182 # obsutil.allsuccessors includes node itself
2182 # obsutil.allsuccessors includes node itself
2183 successors.remove(srcnode)
2183 successors.remove(srcnode)
2184 succrevs = {get_rev(s) for s in successors}
2184 succrevs = {get_rev(s) for s in successors}
2185 succrevs.discard(None)
2185 succrevs.discard(None)
2186 if succrevs.issubset(extinctrevs):
2186 if succrevs.issubset(extinctrevs):
2187 # all successors are extinct
2187 # all successors are extinct
2188 obsoleteextinctsuccessors.add(srcrev)
2188 obsoleteextinctsuccessors.add(srcrev)
2189 if not successors:
2189 if not successors:
2190 # no successor
2190 # no successor
2191 obsoletenotrebased[srcrev] = None
2191 obsoletenotrebased[srcrev] = None
2192 else:
2192 else:
2193 dstrev = destmap[srcrev]
2193 dstrev = destmap[srcrev]
2194 for succrev in succrevs:
2194 for succrev in succrevs:
2195 if cl.isancestorrev(succrev, dstrev):
2195 if cl.isancestorrev(succrev, dstrev):
2196 obsoletenotrebased[srcrev] = succrev
2196 obsoletenotrebased[srcrev] = succrev
2197 break
2197 break
2198 else:
2198 else:
2199 # If 'srcrev' has a successor in rebase set but none in
2199 # If 'srcrev' has a successor in rebase set but none in
2200 # destination (which would be catched above), we shall skip it
2200 # destination (which would be catched above), we shall skip it
2201 # and its descendants to avoid divergence.
2201 # and its descendants to avoid divergence.
2202 if srcrev in extinctrevs or any(s in destmap for s in succrevs):
2202 if srcrev in extinctrevs or any(s in destmap for s in succrevs):
2203 obsoletewithoutsuccessorindestination.add(srcrev)
2203 obsoletewithoutsuccessorindestination.add(srcrev)
2204
2204
2205 return (
2205 return (
2206 obsoletenotrebased,
2206 obsoletenotrebased,
2207 obsoletewithoutsuccessorindestination,
2207 obsoletewithoutsuccessorindestination,
2208 obsoleteextinctsuccessors,
2208 obsoleteextinctsuccessors,
2209 )
2209 )
2210
2210
2211
2211
2212 def abortrebase(ui, repo):
2212 def abortrebase(ui, repo):
2213 with repo.wlock(), repo.lock():
2213 with repo.wlock(), repo.lock():
2214 rbsrt = rebaseruntime(repo, ui)
2214 rbsrt = rebaseruntime(repo, ui)
2215 rbsrt._prepareabortorcontinue(isabort=True)
2215 rbsrt._prepareabortorcontinue(isabort=True)
2216
2216
2217
2217
2218 def continuerebase(ui, repo):
2218 def continuerebase(ui, repo):
2219 with repo.wlock(), repo.lock():
2219 with repo.wlock(), repo.lock():
2220 rbsrt = rebaseruntime(repo, ui)
2220 rbsrt = rebaseruntime(repo, ui)
2221 ms = mergestatemod.mergestate.read(repo)
2221 ms = mergestatemod.mergestate.read(repo)
2222 mergeutil.checkunresolved(ms)
2222 mergeutil.checkunresolved(ms)
2223 retcode = rbsrt._prepareabortorcontinue(isabort=False)
2223 retcode = rbsrt._prepareabortorcontinue(isabort=False)
2224 if retcode is not None:
2224 if retcode is not None:
2225 return retcode
2225 return retcode
2226 rbsrt._performrebase(None)
2226 rbsrt._performrebase(None)
2227 rbsrt._finishrebase()
2227 rbsrt._finishrebase()
2228
2228
2229
2229
2230 def summaryhook(ui, repo):
2230 def summaryhook(ui, repo):
2231 if not repo.vfs.exists(b'rebasestate'):
2231 if not repo.vfs.exists(b'rebasestate'):
2232 return
2232 return
2233 try:
2233 try:
2234 rbsrt = rebaseruntime(repo, ui, {})
2234 rbsrt = rebaseruntime(repo, ui, {})
2235 rbsrt.restorestatus()
2235 rbsrt.restorestatus()
2236 state = rbsrt.state
2236 state = rbsrt.state
2237 except error.RepoLookupError:
2237 except error.RepoLookupError:
2238 # i18n: column positioning for "hg summary"
2238 # i18n: column positioning for "hg summary"
2239 msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n')
2239 msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n')
2240 ui.write(msg)
2240 ui.write(msg)
2241 return
2241 return
2242 numrebased = len([i for i in pycompat.itervalues(state) if i >= 0])
2242 numrebased = len([i for i in pycompat.itervalues(state) if i >= 0])
2243 # i18n: column positioning for "hg summary"
2243 # i18n: column positioning for "hg summary"
2244 ui.write(
2244 ui.write(
2245 _(b'rebase: %s, %s (rebase --continue)\n')
2245 _(b'rebase: %s, %s (rebase --continue)\n')
2246 % (
2246 % (
2247 ui.label(_(b'%d rebased'), b'rebase.rebased') % numrebased,
2247 ui.label(_(b'%d rebased'), b'rebase.rebased') % numrebased,
2248 ui.label(_(b'%d remaining'), b'rebase.remaining')
2248 ui.label(_(b'%d remaining'), b'rebase.remaining')
2249 % (len(state) - numrebased),
2249 % (len(state) - numrebased),
2250 )
2250 )
2251 )
2251 )
2252
2252
2253
2253
2254 def uisetup(ui):
2254 def uisetup(ui):
2255 # Replace pull with a decorator to provide --rebase option
2255 # Replace pull with a decorator to provide --rebase option
2256 entry = extensions.wrapcommand(commands.table, b'pull', pullrebase)
2256 entry = extensions.wrapcommand(commands.table, b'pull', pullrebase)
2257 entry[1].append(
2257 entry[1].append(
2258 (b'', b'rebase', None, _(b"rebase working directory to branch head"))
2258 (b'', b'rebase', None, _(b"rebase working directory to branch head"))
2259 )
2259 )
2260 entry[1].append((b't', b'tool', b'', _(b"specify merge tool for rebase")))
2260 entry[1].append((b't', b'tool', b'', _(b"specify merge tool for rebase")))
2261 cmdutil.summaryhooks.add(b'rebase', summaryhook)
2261 cmdutil.summaryhooks.add(b'rebase', summaryhook)
2262 statemod.addunfinished(
2262 statemod.addunfinished(
2263 b'rebase',
2263 b'rebase',
2264 fname=b'rebasestate',
2264 fname=b'rebasestate',
2265 stopflag=True,
2265 stopflag=True,
2266 continueflag=True,
2266 continueflag=True,
2267 abortfunc=abortrebase,
2267 abortfunc=abortrebase,
2268 continuefunc=continuerebase,
2268 continuefunc=continuerebase,
2269 )
2269 )
@@ -1,3872 +1,3890 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy as copymod
10 import copy as copymod
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 short,
19 short,
20 )
20 )
21 from .pycompat import (
21 from .pycompat import (
22 getattr,
22 getattr,
23 open,
23 open,
24 setattr,
24 setattr,
25 )
25 )
26 from .thirdparty import attr
26 from .thirdparty import attr
27
27
28 from . import (
28 from . import (
29 bookmarks,
29 bookmarks,
30 changelog,
30 changelog,
31 copies,
31 copies,
32 crecord as crecordmod,
32 crecord as crecordmod,
33 dirstateguard,
33 dirstateguard,
34 encoding,
34 encoding,
35 error,
35 error,
36 formatter,
36 formatter,
37 logcmdutil,
37 logcmdutil,
38 match as matchmod,
38 match as matchmod,
39 merge as mergemod,
39 merge as mergemod,
40 mergestate as mergestatemod,
40 mergestate as mergestatemod,
41 mergeutil,
41 mergeutil,
42 obsolete,
42 obsolete,
43 patch,
43 patch,
44 pathutil,
44 pathutil,
45 phases,
45 phases,
46 pycompat,
46 pycompat,
47 repair,
47 repair,
48 revlog,
48 revlog,
49 rewriteutil,
49 rewriteutil,
50 scmutil,
50 scmutil,
51 state as statemod,
51 state as statemod,
52 subrepoutil,
52 subrepoutil,
53 templatekw,
53 templatekw,
54 templater,
54 templater,
55 util,
55 util,
56 vfs as vfsmod,
56 vfs as vfsmod,
57 )
57 )
58
58
59 from .utils import (
59 from .utils import (
60 dateutil,
60 dateutil,
61 stringutil,
61 stringutil,
62 )
62 )
63
63
64 if pycompat.TYPE_CHECKING:
64 if pycompat.TYPE_CHECKING:
65 from typing import (
65 from typing import (
66 Any,
66 Any,
67 Dict,
67 Dict,
68 )
68 )
69
69
70 for t in (Any, Dict):
70 for t in (Any, Dict):
71 assert t
71 assert t
72
72
73 stringio = util.stringio
73 stringio = util.stringio
74
74
75 # templates of common command options
75 # templates of common command options
76
76
77 dryrunopts = [
77 dryrunopts = [
78 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
78 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
79 ]
79 ]
80
80
81 confirmopts = [
81 confirmopts = [
82 (b'', b'confirm', None, _(b'ask before applying actions')),
82 (b'', b'confirm', None, _(b'ask before applying actions')),
83 ]
83 ]
84
84
85 remoteopts = [
85 remoteopts = [
86 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
86 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
87 (
87 (
88 b'',
88 b'',
89 b'remotecmd',
89 b'remotecmd',
90 b'',
90 b'',
91 _(b'specify hg command to run on the remote side'),
91 _(b'specify hg command to run on the remote side'),
92 _(b'CMD'),
92 _(b'CMD'),
93 ),
93 ),
94 (
94 (
95 b'',
95 b'',
96 b'insecure',
96 b'insecure',
97 None,
97 None,
98 _(b'do not verify server certificate (ignoring web.cacerts config)'),
98 _(b'do not verify server certificate (ignoring web.cacerts config)'),
99 ),
99 ),
100 ]
100 ]
101
101
102 walkopts = [
102 walkopts = [
103 (
103 (
104 b'I',
104 b'I',
105 b'include',
105 b'include',
106 [],
106 [],
107 _(b'include names matching the given patterns'),
107 _(b'include names matching the given patterns'),
108 _(b'PATTERN'),
108 _(b'PATTERN'),
109 ),
109 ),
110 (
110 (
111 b'X',
111 b'X',
112 b'exclude',
112 b'exclude',
113 [],
113 [],
114 _(b'exclude names matching the given patterns'),
114 _(b'exclude names matching the given patterns'),
115 _(b'PATTERN'),
115 _(b'PATTERN'),
116 ),
116 ),
117 ]
117 ]
118
118
119 commitopts = [
119 commitopts = [
120 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
120 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
121 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
121 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
122 ]
122 ]
123
123
124 commitopts2 = [
124 commitopts2 = [
125 (
125 (
126 b'd',
126 b'd',
127 b'date',
127 b'date',
128 b'',
128 b'',
129 _(b'record the specified date as commit date'),
129 _(b'record the specified date as commit date'),
130 _(b'DATE'),
130 _(b'DATE'),
131 ),
131 ),
132 (
132 (
133 b'u',
133 b'u',
134 b'user',
134 b'user',
135 b'',
135 b'',
136 _(b'record the specified user as committer'),
136 _(b'record the specified user as committer'),
137 _(b'USER'),
137 _(b'USER'),
138 ),
138 ),
139 ]
139 ]
140
140
141 commitopts3 = [
141 commitopts3 = [
142 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
142 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
143 (b'U', b'currentuser', None, _(b'record the current user as committer')),
143 (b'U', b'currentuser', None, _(b'record the current user as committer')),
144 ]
144 ]
145
145
146 formatteropts = [
146 formatteropts = [
147 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
147 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
148 ]
148 ]
149
149
150 templateopts = [
150 templateopts = [
151 (
151 (
152 b'',
152 b'',
153 b'style',
153 b'style',
154 b'',
154 b'',
155 _(b'display using template map file (DEPRECATED)'),
155 _(b'display using template map file (DEPRECATED)'),
156 _(b'STYLE'),
156 _(b'STYLE'),
157 ),
157 ),
158 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
158 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
159 ]
159 ]
160
160
161 logopts = [
161 logopts = [
162 (b'p', b'patch', None, _(b'show patch')),
162 (b'p', b'patch', None, _(b'show patch')),
163 (b'g', b'git', None, _(b'use git extended diff format')),
163 (b'g', b'git', None, _(b'use git extended diff format')),
164 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
164 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
165 (b'M', b'no-merges', None, _(b'do not show merges')),
165 (b'M', b'no-merges', None, _(b'do not show merges')),
166 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
166 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
167 (b'G', b'graph', None, _(b"show the revision DAG")),
167 (b'G', b'graph', None, _(b"show the revision DAG")),
168 ] + templateopts
168 ] + templateopts
169
169
170 diffopts = [
170 diffopts = [
171 (b'a', b'text', None, _(b'treat all files as text')),
171 (b'a', b'text', None, _(b'treat all files as text')),
172 (
172 (
173 b'g',
173 b'g',
174 b'git',
174 b'git',
175 None,
175 None,
176 _(b'use git extended diff format (DEFAULT: diff.git)'),
176 _(b'use git extended diff format (DEFAULT: diff.git)'),
177 ),
177 ),
178 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
178 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
179 (b'', b'nodates', None, _(b'omit dates from diff headers')),
179 (b'', b'nodates', None, _(b'omit dates from diff headers')),
180 ]
180 ]
181
181
182 diffwsopts = [
182 diffwsopts = [
183 (
183 (
184 b'w',
184 b'w',
185 b'ignore-all-space',
185 b'ignore-all-space',
186 None,
186 None,
187 _(b'ignore white space when comparing lines'),
187 _(b'ignore white space when comparing lines'),
188 ),
188 ),
189 (
189 (
190 b'b',
190 b'b',
191 b'ignore-space-change',
191 b'ignore-space-change',
192 None,
192 None,
193 _(b'ignore changes in the amount of white space'),
193 _(b'ignore changes in the amount of white space'),
194 ),
194 ),
195 (
195 (
196 b'B',
196 b'B',
197 b'ignore-blank-lines',
197 b'ignore-blank-lines',
198 None,
198 None,
199 _(b'ignore changes whose lines are all blank'),
199 _(b'ignore changes whose lines are all blank'),
200 ),
200 ),
201 (
201 (
202 b'Z',
202 b'Z',
203 b'ignore-space-at-eol',
203 b'ignore-space-at-eol',
204 None,
204 None,
205 _(b'ignore changes in whitespace at EOL'),
205 _(b'ignore changes in whitespace at EOL'),
206 ),
206 ),
207 ]
207 ]
208
208
209 diffopts2 = (
209 diffopts2 = (
210 [
210 [
211 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
211 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
212 (
212 (
213 b'p',
213 b'p',
214 b'show-function',
214 b'show-function',
215 None,
215 None,
216 _(
216 _(
217 b'show which function each change is in (DEFAULT: diff.showfunc)'
217 b'show which function each change is in (DEFAULT: diff.showfunc)'
218 ),
218 ),
219 ),
219 ),
220 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
220 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
221 ]
221 ]
222 + diffwsopts
222 + diffwsopts
223 + [
223 + [
224 (
224 (
225 b'U',
225 b'U',
226 b'unified',
226 b'unified',
227 b'',
227 b'',
228 _(b'number of lines of context to show'),
228 _(b'number of lines of context to show'),
229 _(b'NUM'),
229 _(b'NUM'),
230 ),
230 ),
231 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
231 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
232 (
232 (
233 b'',
233 b'',
234 b'root',
234 b'root',
235 b'',
235 b'',
236 _(b'produce diffs relative to subdirectory'),
236 _(b'produce diffs relative to subdirectory'),
237 _(b'DIR'),
237 _(b'DIR'),
238 ),
238 ),
239 ]
239 ]
240 )
240 )
241
241
242 mergetoolopts = [
242 mergetoolopts = [
243 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
243 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
244 ]
244 ]
245
245
246 similarityopts = [
246 similarityopts = [
247 (
247 (
248 b's',
248 b's',
249 b'similarity',
249 b'similarity',
250 b'',
250 b'',
251 _(b'guess renamed files by similarity (0<=s<=100)'),
251 _(b'guess renamed files by similarity (0<=s<=100)'),
252 _(b'SIMILARITY'),
252 _(b'SIMILARITY'),
253 )
253 )
254 ]
254 ]
255
255
256 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
256 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
257
257
258 debugrevlogopts = [
258 debugrevlogopts = [
259 (b'c', b'changelog', False, _(b'open changelog')),
259 (b'c', b'changelog', False, _(b'open changelog')),
260 (b'm', b'manifest', False, _(b'open manifest')),
260 (b'm', b'manifest', False, _(b'open manifest')),
261 (b'', b'dir', b'', _(b'open directory manifest')),
261 (b'', b'dir', b'', _(b'open directory manifest')),
262 ]
262 ]
263
263
264 # special string such that everything below this line will be ingored in the
264 # special string such that everything below this line will be ingored in the
265 # editor text
265 # editor text
266 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
266 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
267
267
268
268
269 def check_at_most_one_arg(opts, *args):
269 def check_at_most_one_arg(opts, *args):
270 """abort if more than one of the arguments are in opts
270 """abort if more than one of the arguments are in opts
271
271
272 Returns the unique argument or None if none of them were specified.
272 Returns the unique argument or None if none of them were specified.
273 """
273 """
274
274
275 def to_display(name):
275 def to_display(name):
276 return pycompat.sysbytes(name).replace(b'_', b'-')
276 return pycompat.sysbytes(name).replace(b'_', b'-')
277
277
278 previous = None
278 previous = None
279 for x in args:
279 for x in args:
280 if opts.get(x):
280 if opts.get(x):
281 if previous:
281 if previous:
282 raise error.Abort(
282 raise error.Abort(
283 _(b'cannot specify both --%s and --%s')
283 _(b'cannot specify both --%s and --%s')
284 % (to_display(previous), to_display(x))
284 % (to_display(previous), to_display(x))
285 )
285 )
286 previous = x
286 previous = x
287 return previous
287 return previous
288
288
289
289
290 def check_incompatible_arguments(opts, first, others):
290 def check_incompatible_arguments(opts, first, others):
291 """abort if the first argument is given along with any of the others
291 """abort if the first argument is given along with any of the others
292
292
293 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
293 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
294 among themselves, and they're passed as a single collection.
294 among themselves, and they're passed as a single collection.
295 """
295 """
296 for other in others:
296 for other in others:
297 check_at_most_one_arg(opts, first, other)
297 check_at_most_one_arg(opts, first, other)
298
298
299
299
300 def resolvecommitoptions(ui, opts):
300 def resolvecommitoptions(ui, opts):
301 """modify commit options dict to handle related options
301 """modify commit options dict to handle related options
302
302
303 The return value indicates that ``rewrite.update-timestamp`` is the reason
303 The return value indicates that ``rewrite.update-timestamp`` is the reason
304 the ``date`` option is set.
304 the ``date`` option is set.
305 """
305 """
306 check_at_most_one_arg(opts, b'date', b'currentdate')
306 check_at_most_one_arg(opts, b'date', b'currentdate')
307 check_at_most_one_arg(opts, b'user', b'currentuser')
307 check_at_most_one_arg(opts, b'user', b'currentuser')
308
308
309 datemaydiffer = False # date-only change should be ignored?
309 datemaydiffer = False # date-only change should be ignored?
310
310
311 if opts.get(b'currentdate'):
311 if opts.get(b'currentdate'):
312 opts[b'date'] = b'%d %d' % dateutil.makedate()
312 opts[b'date'] = b'%d %d' % dateutil.makedate()
313 elif (
313 elif (
314 not opts.get(b'date')
314 not opts.get(b'date')
315 and ui.configbool(b'rewrite', b'update-timestamp')
315 and ui.configbool(b'rewrite', b'update-timestamp')
316 and opts.get(b'currentdate') is None
316 and opts.get(b'currentdate') is None
317 ):
317 ):
318 opts[b'date'] = b'%d %d' % dateutil.makedate()
318 opts[b'date'] = b'%d %d' % dateutil.makedate()
319 datemaydiffer = True
319 datemaydiffer = True
320
320
321 if opts.get(b'currentuser'):
321 if opts.get(b'currentuser'):
322 opts[b'user'] = ui.username()
322 opts[b'user'] = ui.username()
323
323
324 return datemaydiffer
324 return datemaydiffer
325
325
326
326
327 def checknotesize(ui, opts):
327 def checknotesize(ui, opts):
328 """ make sure note is of valid format """
328 """ make sure note is of valid format """
329
329
330 note = opts.get(b'note')
330 note = opts.get(b'note')
331 if not note:
331 if not note:
332 return
332 return
333
333
334 if len(note) > 255:
334 if len(note) > 255:
335 raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
335 raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
336 if b'\n' in note:
336 if b'\n' in note:
337 raise error.Abort(_(b"note cannot contain a newline"))
337 raise error.Abort(_(b"note cannot contain a newline"))
338
338
339
339
340 def ishunk(x):
340 def ishunk(x):
341 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
341 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
342 return isinstance(x, hunkclasses)
342 return isinstance(x, hunkclasses)
343
343
344
344
345 def newandmodified(chunks, originalchunks):
345 def newandmodified(chunks, originalchunks):
346 newlyaddedandmodifiedfiles = set()
346 newlyaddedandmodifiedfiles = set()
347 alsorestore = set()
347 alsorestore = set()
348 for chunk in chunks:
348 for chunk in chunks:
349 if (
349 if (
350 ishunk(chunk)
350 ishunk(chunk)
351 and chunk.header.isnewfile()
351 and chunk.header.isnewfile()
352 and chunk not in originalchunks
352 and chunk not in originalchunks
353 ):
353 ):
354 newlyaddedandmodifiedfiles.add(chunk.header.filename())
354 newlyaddedandmodifiedfiles.add(chunk.header.filename())
355 alsorestore.update(
355 alsorestore.update(
356 set(chunk.header.files()) - {chunk.header.filename()}
356 set(chunk.header.files()) - {chunk.header.filename()}
357 )
357 )
358 return newlyaddedandmodifiedfiles, alsorestore
358 return newlyaddedandmodifiedfiles, alsorestore
359
359
360
360
361 def parsealiases(cmd):
361 def parsealiases(cmd):
362 return cmd.split(b"|")
362 return cmd.split(b"|")
363
363
364
364
365 def setupwrapcolorwrite(ui):
365 def setupwrapcolorwrite(ui):
366 # wrap ui.write so diff output can be labeled/colorized
366 # wrap ui.write so diff output can be labeled/colorized
367 def wrapwrite(orig, *args, **kw):
367 def wrapwrite(orig, *args, **kw):
368 label = kw.pop('label', b'')
368 label = kw.pop('label', b'')
369 for chunk, l in patch.difflabel(lambda: args):
369 for chunk, l in patch.difflabel(lambda: args):
370 orig(chunk, label=label + l)
370 orig(chunk, label=label + l)
371
371
372 oldwrite = ui.write
372 oldwrite = ui.write
373
373
374 def wrap(*args, **kwargs):
374 def wrap(*args, **kwargs):
375 return wrapwrite(oldwrite, *args, **kwargs)
375 return wrapwrite(oldwrite, *args, **kwargs)
376
376
377 setattr(ui, 'write', wrap)
377 setattr(ui, 'write', wrap)
378 return oldwrite
378 return oldwrite
379
379
380
380
381 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
381 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
382 try:
382 try:
383 if usecurses:
383 if usecurses:
384 if testfile:
384 if testfile:
385 recordfn = crecordmod.testdecorator(
385 recordfn = crecordmod.testdecorator(
386 testfile, crecordmod.testchunkselector
386 testfile, crecordmod.testchunkselector
387 )
387 )
388 else:
388 else:
389 recordfn = crecordmod.chunkselector
389 recordfn = crecordmod.chunkselector
390
390
391 return crecordmod.filterpatch(
391 return crecordmod.filterpatch(
392 ui, originalhunks, recordfn, operation
392 ui, originalhunks, recordfn, operation
393 )
393 )
394 except crecordmod.fallbackerror as e:
394 except crecordmod.fallbackerror as e:
395 ui.warn(b'%s\n' % e)
395 ui.warn(b'%s\n' % e)
396 ui.warn(_(b'falling back to text mode\n'))
396 ui.warn(_(b'falling back to text mode\n'))
397
397
398 return patch.filterpatch(ui, originalhunks, match, operation)
398 return patch.filterpatch(ui, originalhunks, match, operation)
399
399
400
400
401 def recordfilter(ui, originalhunks, match, operation=None):
401 def recordfilter(ui, originalhunks, match, operation=None):
402 """ Prompts the user to filter the originalhunks and return a list of
402 """ Prompts the user to filter the originalhunks and return a list of
403 selected hunks.
403 selected hunks.
404 *operation* is used for to build ui messages to indicate the user what
404 *operation* is used for to build ui messages to indicate the user what
405 kind of filtering they are doing: reverting, committing, shelving, etc.
405 kind of filtering they are doing: reverting, committing, shelving, etc.
406 (see patch.filterpatch).
406 (see patch.filterpatch).
407 """
407 """
408 usecurses = crecordmod.checkcurses(ui)
408 usecurses = crecordmod.checkcurses(ui)
409 testfile = ui.config(b'experimental', b'crecordtest')
409 testfile = ui.config(b'experimental', b'crecordtest')
410 oldwrite = setupwrapcolorwrite(ui)
410 oldwrite = setupwrapcolorwrite(ui)
411 try:
411 try:
412 newchunks, newopts = filterchunks(
412 newchunks, newopts = filterchunks(
413 ui, originalhunks, usecurses, testfile, match, operation
413 ui, originalhunks, usecurses, testfile, match, operation
414 )
414 )
415 finally:
415 finally:
416 ui.write = oldwrite
416 ui.write = oldwrite
417 return newchunks, newopts
417 return newchunks, newopts
418
418
419
419
420 def dorecord(
420 def dorecord(
421 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
421 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
422 ):
422 ):
423 opts = pycompat.byteskwargs(opts)
423 opts = pycompat.byteskwargs(opts)
424 if not ui.interactive():
424 if not ui.interactive():
425 if cmdsuggest:
425 if cmdsuggest:
426 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
426 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
427 else:
427 else:
428 msg = _(b'running non-interactively')
428 msg = _(b'running non-interactively')
429 raise error.Abort(msg)
429 raise error.Abort(msg)
430
430
431 # make sure username is set before going interactive
431 # make sure username is set before going interactive
432 if not opts.get(b'user'):
432 if not opts.get(b'user'):
433 ui.username() # raise exception, username not provided
433 ui.username() # raise exception, username not provided
434
434
435 def recordfunc(ui, repo, message, match, opts):
435 def recordfunc(ui, repo, message, match, opts):
436 """This is generic record driver.
436 """This is generic record driver.
437
437
438 Its job is to interactively filter local changes, and
438 Its job is to interactively filter local changes, and
439 accordingly prepare working directory into a state in which the
439 accordingly prepare working directory into a state in which the
440 job can be delegated to a non-interactive commit command such as
440 job can be delegated to a non-interactive commit command such as
441 'commit' or 'qrefresh'.
441 'commit' or 'qrefresh'.
442
442
443 After the actual job is done by non-interactive command, the
443 After the actual job is done by non-interactive command, the
444 working directory is restored to its original state.
444 working directory is restored to its original state.
445
445
446 In the end we'll record interesting changes, and everything else
446 In the end we'll record interesting changes, and everything else
447 will be left in place, so the user can continue working.
447 will be left in place, so the user can continue working.
448 """
448 """
449 if not opts.get(b'interactive-unshelve'):
449 if not opts.get(b'interactive-unshelve'):
450 checkunfinished(repo, commit=True)
450 checkunfinished(repo, commit=True)
451 wctx = repo[None]
451 wctx = repo[None]
452 merge = len(wctx.parents()) > 1
452 merge = len(wctx.parents()) > 1
453 if merge:
453 if merge:
454 raise error.Abort(
454 raise error.Abort(
455 _(
455 _(
456 b'cannot partially commit a merge '
456 b'cannot partially commit a merge '
457 b'(use "hg commit" instead)'
457 b'(use "hg commit" instead)'
458 )
458 )
459 )
459 )
460
460
461 def fail(f, msg):
461 def fail(f, msg):
462 raise error.Abort(b'%s: %s' % (f, msg))
462 raise error.Abort(b'%s: %s' % (f, msg))
463
463
464 force = opts.get(b'force')
464 force = opts.get(b'force')
465 if not force:
465 if not force:
466 match = matchmod.badmatch(match, fail)
466 match = matchmod.badmatch(match, fail)
467
467
468 status = repo.status(match=match)
468 status = repo.status(match=match)
469
469
470 overrides = {(b'ui', b'commitsubrepos'): True}
470 overrides = {(b'ui', b'commitsubrepos'): True}
471
471
472 with repo.ui.configoverride(overrides, b'record'):
472 with repo.ui.configoverride(overrides, b'record'):
473 # subrepoutil.precommit() modifies the status
473 # subrepoutil.precommit() modifies the status
474 tmpstatus = scmutil.status(
474 tmpstatus = scmutil.status(
475 copymod.copy(status.modified),
475 copymod.copy(status.modified),
476 copymod.copy(status.added),
476 copymod.copy(status.added),
477 copymod.copy(status.removed),
477 copymod.copy(status.removed),
478 copymod.copy(status.deleted),
478 copymod.copy(status.deleted),
479 copymod.copy(status.unknown),
479 copymod.copy(status.unknown),
480 copymod.copy(status.ignored),
480 copymod.copy(status.ignored),
481 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
481 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
482 )
482 )
483
483
484 # Force allows -X subrepo to skip the subrepo.
484 # Force allows -X subrepo to skip the subrepo.
485 subs, commitsubs, newstate = subrepoutil.precommit(
485 subs, commitsubs, newstate = subrepoutil.precommit(
486 repo.ui, wctx, tmpstatus, match, force=True
486 repo.ui, wctx, tmpstatus, match, force=True
487 )
487 )
488 for s in subs:
488 for s in subs:
489 if s in commitsubs:
489 if s in commitsubs:
490 dirtyreason = wctx.sub(s).dirtyreason(True)
490 dirtyreason = wctx.sub(s).dirtyreason(True)
491 raise error.Abort(dirtyreason)
491 raise error.Abort(dirtyreason)
492
492
493 if not force:
493 if not force:
494 repo.checkcommitpatterns(wctx, match, status, fail)
494 repo.checkcommitpatterns(wctx, match, status, fail)
495 diffopts = patch.difffeatureopts(
495 diffopts = patch.difffeatureopts(
496 ui,
496 ui,
497 opts=opts,
497 opts=opts,
498 whitespace=True,
498 whitespace=True,
499 section=b'commands',
499 section=b'commands',
500 configprefix=b'commit.interactive.',
500 configprefix=b'commit.interactive.',
501 )
501 )
502 diffopts.nodates = True
502 diffopts.nodates = True
503 diffopts.git = True
503 diffopts.git = True
504 diffopts.showfunc = True
504 diffopts.showfunc = True
505 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
505 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
506 originalchunks = patch.parsepatch(originaldiff)
506 originalchunks = patch.parsepatch(originaldiff)
507 match = scmutil.match(repo[None], pats)
507 match = scmutil.match(repo[None], pats)
508
508
509 # 1. filter patch, since we are intending to apply subset of it
509 # 1. filter patch, since we are intending to apply subset of it
510 try:
510 try:
511 chunks, newopts = filterfn(ui, originalchunks, match)
511 chunks, newopts = filterfn(ui, originalchunks, match)
512 except error.PatchError as err:
512 except error.PatchError as err:
513 raise error.Abort(_(b'error parsing patch: %s') % err)
513 raise error.Abort(_(b'error parsing patch: %s') % err)
514 opts.update(newopts)
514 opts.update(newopts)
515
515
516 # We need to keep a backup of files that have been newly added and
516 # We need to keep a backup of files that have been newly added and
517 # modified during the recording process because there is a previous
517 # modified during the recording process because there is a previous
518 # version without the edit in the workdir. We also will need to restore
518 # version without the edit in the workdir. We also will need to restore
519 # files that were the sources of renames so that the patch application
519 # files that were the sources of renames so that the patch application
520 # works.
520 # works.
521 newlyaddedandmodifiedfiles, alsorestore = newandmodified(
521 newlyaddedandmodifiedfiles, alsorestore = newandmodified(
522 chunks, originalchunks
522 chunks, originalchunks
523 )
523 )
524 contenders = set()
524 contenders = set()
525 for h in chunks:
525 for h in chunks:
526 try:
526 try:
527 contenders.update(set(h.files()))
527 contenders.update(set(h.files()))
528 except AttributeError:
528 except AttributeError:
529 pass
529 pass
530
530
531 changed = status.modified + status.added + status.removed
531 changed = status.modified + status.added + status.removed
532 newfiles = [f for f in changed if f in contenders]
532 newfiles = [f for f in changed if f in contenders]
533 if not newfiles:
533 if not newfiles:
534 ui.status(_(b'no changes to record\n'))
534 ui.status(_(b'no changes to record\n'))
535 return 0
535 return 0
536
536
537 modified = set(status.modified)
537 modified = set(status.modified)
538
538
539 # 2. backup changed files, so we can restore them in the end
539 # 2. backup changed files, so we can restore them in the end
540
540
541 if backupall:
541 if backupall:
542 tobackup = changed
542 tobackup = changed
543 else:
543 else:
544 tobackup = [
544 tobackup = [
545 f
545 f
546 for f in newfiles
546 for f in newfiles
547 if f in modified or f in newlyaddedandmodifiedfiles
547 if f in modified or f in newlyaddedandmodifiedfiles
548 ]
548 ]
549 backups = {}
549 backups = {}
550 if tobackup:
550 if tobackup:
551 backupdir = repo.vfs.join(b'record-backups')
551 backupdir = repo.vfs.join(b'record-backups')
552 try:
552 try:
553 os.mkdir(backupdir)
553 os.mkdir(backupdir)
554 except OSError as err:
554 except OSError as err:
555 if err.errno != errno.EEXIST:
555 if err.errno != errno.EEXIST:
556 raise
556 raise
557 try:
557 try:
558 # backup continues
558 # backup continues
559 for f in tobackup:
559 for f in tobackup:
560 fd, tmpname = pycompat.mkstemp(
560 fd, tmpname = pycompat.mkstemp(
561 prefix=os.path.basename(f) + b'.', dir=backupdir
561 prefix=os.path.basename(f) + b'.', dir=backupdir
562 )
562 )
563 os.close(fd)
563 os.close(fd)
564 ui.debug(b'backup %r as %r\n' % (f, tmpname))
564 ui.debug(b'backup %r as %r\n' % (f, tmpname))
565 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
565 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
566 backups[f] = tmpname
566 backups[f] = tmpname
567
567
568 fp = stringio()
568 fp = stringio()
569 for c in chunks:
569 for c in chunks:
570 fname = c.filename()
570 fname = c.filename()
571 if fname in backups:
571 if fname in backups:
572 c.write(fp)
572 c.write(fp)
573 dopatch = fp.tell()
573 dopatch = fp.tell()
574 fp.seek(0)
574 fp.seek(0)
575
575
576 # 2.5 optionally review / modify patch in text editor
576 # 2.5 optionally review / modify patch in text editor
577 if opts.get(b'review', False):
577 if opts.get(b'review', False):
578 patchtext = (
578 patchtext = (
579 crecordmod.diffhelptext
579 crecordmod.diffhelptext
580 + crecordmod.patchhelptext
580 + crecordmod.patchhelptext
581 + fp.read()
581 + fp.read()
582 )
582 )
583 reviewedpatch = ui.edit(
583 reviewedpatch = ui.edit(
584 patchtext, b"", action=b"diff", repopath=repo.path
584 patchtext, b"", action=b"diff", repopath=repo.path
585 )
585 )
586 fp.truncate(0)
586 fp.truncate(0)
587 fp.write(reviewedpatch)
587 fp.write(reviewedpatch)
588 fp.seek(0)
588 fp.seek(0)
589
589
590 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
590 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
591 # 3a. apply filtered patch to clean repo (clean)
591 # 3a. apply filtered patch to clean repo (clean)
592 if backups:
592 if backups:
593 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
593 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
594 mergemod.revert_to(repo[b'.'], matcher=m)
594 mergemod.revert_to(repo[b'.'], matcher=m)
595
595
596 # 3b. (apply)
596 # 3b. (apply)
597 if dopatch:
597 if dopatch:
598 try:
598 try:
599 ui.debug(b'applying patch\n')
599 ui.debug(b'applying patch\n')
600 ui.debug(fp.getvalue())
600 ui.debug(fp.getvalue())
601 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
601 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
602 except error.PatchError as err:
602 except error.PatchError as err:
603 raise error.Abort(pycompat.bytestr(err))
603 raise error.Abort(pycompat.bytestr(err))
604 del fp
604 del fp
605
605
606 # 4. We prepared working directory according to filtered
606 # 4. We prepared working directory according to filtered
607 # patch. Now is the time to delegate the job to
607 # patch. Now is the time to delegate the job to
608 # commit/qrefresh or the like!
608 # commit/qrefresh or the like!
609
609
610 # Make all of the pathnames absolute.
610 # Make all of the pathnames absolute.
611 newfiles = [repo.wjoin(nf) for nf in newfiles]
611 newfiles = [repo.wjoin(nf) for nf in newfiles]
612 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
612 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
613 finally:
613 finally:
614 # 5. finally restore backed-up files
614 # 5. finally restore backed-up files
615 try:
615 try:
616 dirstate = repo.dirstate
616 dirstate = repo.dirstate
617 for realname, tmpname in pycompat.iteritems(backups):
617 for realname, tmpname in pycompat.iteritems(backups):
618 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
618 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
619
619
620 if dirstate[realname] == b'n':
620 if dirstate[realname] == b'n':
621 # without normallookup, restoring timestamp
621 # without normallookup, restoring timestamp
622 # may cause partially committed files
622 # may cause partially committed files
623 # to be treated as unmodified
623 # to be treated as unmodified
624 dirstate.normallookup(realname)
624 dirstate.normallookup(realname)
625
625
626 # copystat=True here and above are a hack to trick any
626 # copystat=True here and above are a hack to trick any
627 # editors that have f open that we haven't modified them.
627 # editors that have f open that we haven't modified them.
628 #
628 #
629 # Also note that this racy as an editor could notice the
629 # Also note that this racy as an editor could notice the
630 # file's mtime before we've finished writing it.
630 # file's mtime before we've finished writing it.
631 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
631 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
632 os.unlink(tmpname)
632 os.unlink(tmpname)
633 if tobackup:
633 if tobackup:
634 os.rmdir(backupdir)
634 os.rmdir(backupdir)
635 except OSError:
635 except OSError:
636 pass
636 pass
637
637
638 def recordinwlock(ui, repo, message, match, opts):
638 def recordinwlock(ui, repo, message, match, opts):
639 with repo.wlock():
639 with repo.wlock():
640 return recordfunc(ui, repo, message, match, opts)
640 return recordfunc(ui, repo, message, match, opts)
641
641
642 return commit(ui, repo, recordinwlock, pats, opts)
642 return commit(ui, repo, recordinwlock, pats, opts)
643
643
644
644
645 class dirnode(object):
645 class dirnode(object):
646 """
646 """
647 Represent a directory in user working copy with information required for
647 Represent a directory in user working copy with information required for
648 the purpose of tersing its status.
648 the purpose of tersing its status.
649
649
650 path is the path to the directory, without a trailing '/'
650 path is the path to the directory, without a trailing '/'
651
651
652 statuses is a set of statuses of all files in this directory (this includes
652 statuses is a set of statuses of all files in this directory (this includes
653 all the files in all the subdirectories too)
653 all the files in all the subdirectories too)
654
654
655 files is a list of files which are direct child of this directory
655 files is a list of files which are direct child of this directory
656
656
657 subdirs is a dictionary of sub-directory name as the key and it's own
657 subdirs is a dictionary of sub-directory name as the key and it's own
658 dirnode object as the value
658 dirnode object as the value
659 """
659 """
660
660
661 def __init__(self, dirpath):
661 def __init__(self, dirpath):
662 self.path = dirpath
662 self.path = dirpath
663 self.statuses = set()
663 self.statuses = set()
664 self.files = []
664 self.files = []
665 self.subdirs = {}
665 self.subdirs = {}
666
666
667 def _addfileindir(self, filename, status):
667 def _addfileindir(self, filename, status):
668 """Add a file in this directory as a direct child."""
668 """Add a file in this directory as a direct child."""
669 self.files.append((filename, status))
669 self.files.append((filename, status))
670
670
671 def addfile(self, filename, status):
671 def addfile(self, filename, status):
672 """
672 """
673 Add a file to this directory or to its direct parent directory.
673 Add a file to this directory or to its direct parent directory.
674
674
675 If the file is not direct child of this directory, we traverse to the
675 If the file is not direct child of this directory, we traverse to the
676 directory of which this file is a direct child of and add the file
676 directory of which this file is a direct child of and add the file
677 there.
677 there.
678 """
678 """
679
679
680 # the filename contains a path separator, it means it's not the direct
680 # the filename contains a path separator, it means it's not the direct
681 # child of this directory
681 # child of this directory
682 if b'/' in filename:
682 if b'/' in filename:
683 subdir, filep = filename.split(b'/', 1)
683 subdir, filep = filename.split(b'/', 1)
684
684
685 # does the dirnode object for subdir exists
685 # does the dirnode object for subdir exists
686 if subdir not in self.subdirs:
686 if subdir not in self.subdirs:
687 subdirpath = pathutil.join(self.path, subdir)
687 subdirpath = pathutil.join(self.path, subdir)
688 self.subdirs[subdir] = dirnode(subdirpath)
688 self.subdirs[subdir] = dirnode(subdirpath)
689
689
690 # try adding the file in subdir
690 # try adding the file in subdir
691 self.subdirs[subdir].addfile(filep, status)
691 self.subdirs[subdir].addfile(filep, status)
692
692
693 else:
693 else:
694 self._addfileindir(filename, status)
694 self._addfileindir(filename, status)
695
695
696 if status not in self.statuses:
696 if status not in self.statuses:
697 self.statuses.add(status)
697 self.statuses.add(status)
698
698
699 def iterfilepaths(self):
699 def iterfilepaths(self):
700 """Yield (status, path) for files directly under this directory."""
700 """Yield (status, path) for files directly under this directory."""
701 for f, st in self.files:
701 for f, st in self.files:
702 yield st, pathutil.join(self.path, f)
702 yield st, pathutil.join(self.path, f)
703
703
704 def tersewalk(self, terseargs):
704 def tersewalk(self, terseargs):
705 """
705 """
706 Yield (status, path) obtained by processing the status of this
706 Yield (status, path) obtained by processing the status of this
707 dirnode.
707 dirnode.
708
708
709 terseargs is the string of arguments passed by the user with `--terse`
709 terseargs is the string of arguments passed by the user with `--terse`
710 flag.
710 flag.
711
711
712 Following are the cases which can happen:
712 Following are the cases which can happen:
713
713
714 1) All the files in the directory (including all the files in its
714 1) All the files in the directory (including all the files in its
715 subdirectories) share the same status and the user has asked us to terse
715 subdirectories) share the same status and the user has asked us to terse
716 that status. -> yield (status, dirpath). dirpath will end in '/'.
716 that status. -> yield (status, dirpath). dirpath will end in '/'.
717
717
718 2) Otherwise, we do following:
718 2) Otherwise, we do following:
719
719
720 a) Yield (status, filepath) for all the files which are in this
720 a) Yield (status, filepath) for all the files which are in this
721 directory (only the ones in this directory, not the subdirs)
721 directory (only the ones in this directory, not the subdirs)
722
722
723 b) Recurse the function on all the subdirectories of this
723 b) Recurse the function on all the subdirectories of this
724 directory
724 directory
725 """
725 """
726
726
727 if len(self.statuses) == 1:
727 if len(self.statuses) == 1:
728 onlyst = self.statuses.pop()
728 onlyst = self.statuses.pop()
729
729
730 # Making sure we terse only when the status abbreviation is
730 # Making sure we terse only when the status abbreviation is
731 # passed as terse argument
731 # passed as terse argument
732 if onlyst in terseargs:
732 if onlyst in terseargs:
733 yield onlyst, self.path + b'/'
733 yield onlyst, self.path + b'/'
734 return
734 return
735
735
736 # add the files to status list
736 # add the files to status list
737 for st, fpath in self.iterfilepaths():
737 for st, fpath in self.iterfilepaths():
738 yield st, fpath
738 yield st, fpath
739
739
740 # recurse on the subdirs
740 # recurse on the subdirs
741 for dirobj in self.subdirs.values():
741 for dirobj in self.subdirs.values():
742 for st, fpath in dirobj.tersewalk(terseargs):
742 for st, fpath in dirobj.tersewalk(terseargs):
743 yield st, fpath
743 yield st, fpath
744
744
745
745
746 def tersedir(statuslist, terseargs):
746 def tersedir(statuslist, terseargs):
747 """
747 """
748 Terse the status if all the files in a directory shares the same status.
748 Terse the status if all the files in a directory shares the same status.
749
749
750 statuslist is scmutil.status() object which contains a list of files for
750 statuslist is scmutil.status() object which contains a list of files for
751 each status.
751 each status.
752 terseargs is string which is passed by the user as the argument to `--terse`
752 terseargs is string which is passed by the user as the argument to `--terse`
753 flag.
753 flag.
754
754
755 The function makes a tree of objects of dirnode class, and at each node it
755 The function makes a tree of objects of dirnode class, and at each node it
756 stores the information required to know whether we can terse a certain
756 stores the information required to know whether we can terse a certain
757 directory or not.
757 directory or not.
758 """
758 """
759 # the order matters here as that is used to produce final list
759 # the order matters here as that is used to produce final list
760 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
760 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
761
761
762 # checking the argument validity
762 # checking the argument validity
763 for s in pycompat.bytestr(terseargs):
763 for s in pycompat.bytestr(terseargs):
764 if s not in allst:
764 if s not in allst:
765 raise error.Abort(_(b"'%s' not recognized") % s)
765 raise error.Abort(_(b"'%s' not recognized") % s)
766
766
767 # creating a dirnode object for the root of the repo
767 # creating a dirnode object for the root of the repo
768 rootobj = dirnode(b'')
768 rootobj = dirnode(b'')
769 pstatus = (
769 pstatus = (
770 b'modified',
770 b'modified',
771 b'added',
771 b'added',
772 b'deleted',
772 b'deleted',
773 b'clean',
773 b'clean',
774 b'unknown',
774 b'unknown',
775 b'ignored',
775 b'ignored',
776 b'removed',
776 b'removed',
777 )
777 )
778
778
779 tersedict = {}
779 tersedict = {}
780 for attrname in pstatus:
780 for attrname in pstatus:
781 statuschar = attrname[0:1]
781 statuschar = attrname[0:1]
782 for f in getattr(statuslist, attrname):
782 for f in getattr(statuslist, attrname):
783 rootobj.addfile(f, statuschar)
783 rootobj.addfile(f, statuschar)
784 tersedict[statuschar] = []
784 tersedict[statuschar] = []
785
785
786 # we won't be tersing the root dir, so add files in it
786 # we won't be tersing the root dir, so add files in it
787 for st, fpath in rootobj.iterfilepaths():
787 for st, fpath in rootobj.iterfilepaths():
788 tersedict[st].append(fpath)
788 tersedict[st].append(fpath)
789
789
790 # process each sub-directory and build tersedict
790 # process each sub-directory and build tersedict
791 for subdir in rootobj.subdirs.values():
791 for subdir in rootobj.subdirs.values():
792 for st, f in subdir.tersewalk(terseargs):
792 for st, f in subdir.tersewalk(terseargs):
793 tersedict[st].append(f)
793 tersedict[st].append(f)
794
794
795 tersedlist = []
795 tersedlist = []
796 for st in allst:
796 for st in allst:
797 tersedict[st].sort()
797 tersedict[st].sort()
798 tersedlist.append(tersedict[st])
798 tersedlist.append(tersedict[st])
799
799
800 return scmutil.status(*tersedlist)
800 return scmutil.status(*tersedlist)
801
801
802
802
803 def _commentlines(raw):
803 def _commentlines(raw):
804 '''Surround lineswith a comment char and a new line'''
804 '''Surround lineswith a comment char and a new line'''
805 lines = raw.splitlines()
805 lines = raw.splitlines()
806 commentedlines = [b'# %s' % line for line in lines]
806 commentedlines = [b'# %s' % line for line in lines]
807 return b'\n'.join(commentedlines) + b'\n'
807 return b'\n'.join(commentedlines) + b'\n'
808
808
809
809
810 @attr.s(frozen=True)
810 @attr.s(frozen=True)
811 class morestatus(object):
811 class morestatus(object):
812 reporoot = attr.ib()
812 reporoot = attr.ib()
813 unfinishedop = attr.ib()
813 unfinishedop = attr.ib()
814 unfinishedmsg = attr.ib()
814 unfinishedmsg = attr.ib()
815 activemerge = attr.ib()
815 activemerge = attr.ib()
816 unresolvedpaths = attr.ib()
816 unresolvedpaths = attr.ib()
817 _formattedpaths = attr.ib(init=False, default=set())
817 _formattedpaths = attr.ib(init=False, default=set())
818 _label = b'status.morestatus'
818 _label = b'status.morestatus'
819
819
820 def formatfile(self, path, fm):
820 def formatfile(self, path, fm):
821 self._formattedpaths.add(path)
821 self._formattedpaths.add(path)
822 if self.activemerge and path in self.unresolvedpaths:
822 if self.activemerge and path in self.unresolvedpaths:
823 fm.data(unresolved=True)
823 fm.data(unresolved=True)
824
824
825 def formatfooter(self, fm):
825 def formatfooter(self, fm):
826 if self.unfinishedop or self.unfinishedmsg:
826 if self.unfinishedop or self.unfinishedmsg:
827 fm.startitem()
827 fm.startitem()
828 fm.data(itemtype=b'morestatus')
828 fm.data(itemtype=b'morestatus')
829
829
830 if self.unfinishedop:
830 if self.unfinishedop:
831 fm.data(unfinished=self.unfinishedop)
831 fm.data(unfinished=self.unfinishedop)
832 statemsg = (
832 statemsg = (
833 _(b'The repository is in an unfinished *%s* state.')
833 _(b'The repository is in an unfinished *%s* state.')
834 % self.unfinishedop
834 % self.unfinishedop
835 )
835 )
836 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
836 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
837 if self.unfinishedmsg:
837 if self.unfinishedmsg:
838 fm.data(unfinishedmsg=self.unfinishedmsg)
838 fm.data(unfinishedmsg=self.unfinishedmsg)
839
839
840 # May also start new data items.
840 # May also start new data items.
841 self._formatconflicts(fm)
841 self._formatconflicts(fm)
842
842
843 if self.unfinishedmsg:
843 if self.unfinishedmsg:
844 fm.plain(
844 fm.plain(
845 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
845 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
846 )
846 )
847
847
848 def _formatconflicts(self, fm):
848 def _formatconflicts(self, fm):
849 if not self.activemerge:
849 if not self.activemerge:
850 return
850 return
851
851
852 if self.unresolvedpaths:
852 if self.unresolvedpaths:
853 mergeliststr = b'\n'.join(
853 mergeliststr = b'\n'.join(
854 [
854 [
855 b' %s'
855 b' %s'
856 % util.pathto(self.reporoot, encoding.getcwd(), path)
856 % util.pathto(self.reporoot, encoding.getcwd(), path)
857 for path in self.unresolvedpaths
857 for path in self.unresolvedpaths
858 ]
858 ]
859 )
859 )
860 msg = (
860 msg = (
861 _(
861 _(
862 '''Unresolved merge conflicts:
862 '''Unresolved merge conflicts:
863
863
864 %s
864 %s
865
865
866 To mark files as resolved: hg resolve --mark FILE'''
866 To mark files as resolved: hg resolve --mark FILE'''
867 )
867 )
868 % mergeliststr
868 % mergeliststr
869 )
869 )
870
870
871 # If any paths with unresolved conflicts were not previously
871 # If any paths with unresolved conflicts were not previously
872 # formatted, output them now.
872 # formatted, output them now.
873 for f in self.unresolvedpaths:
873 for f in self.unresolvedpaths:
874 if f in self._formattedpaths:
874 if f in self._formattedpaths:
875 # Already output.
875 # Already output.
876 continue
876 continue
877 fm.startitem()
877 fm.startitem()
878 # We can't claim to know the status of the file - it may just
878 # We can't claim to know the status of the file - it may just
879 # have been in one of the states that were not requested for
879 # have been in one of the states that were not requested for
880 # display, so it could be anything.
880 # display, so it could be anything.
881 fm.data(itemtype=b'file', path=f, unresolved=True)
881 fm.data(itemtype=b'file', path=f, unresolved=True)
882
882
883 else:
883 else:
884 msg = _(b'No unresolved merge conflicts.')
884 msg = _(b'No unresolved merge conflicts.')
885
885
886 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
886 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
887
887
888
888
889 def readmorestatus(repo):
889 def readmorestatus(repo):
890 """Returns a morestatus object if the repo has unfinished state."""
890 """Returns a morestatus object if the repo has unfinished state."""
891 statetuple = statemod.getrepostate(repo)
891 statetuple = statemod.getrepostate(repo)
892 mergestate = mergestatemod.mergestate.read(repo)
892 mergestate = mergestatemod.mergestate.read(repo)
893 activemerge = mergestate.active()
893 activemerge = mergestate.active()
894 if not statetuple and not activemerge:
894 if not statetuple and not activemerge:
895 return None
895 return None
896
896
897 unfinishedop = unfinishedmsg = unresolved = None
897 unfinishedop = unfinishedmsg = unresolved = None
898 if statetuple:
898 if statetuple:
899 unfinishedop, unfinishedmsg = statetuple
899 unfinishedop, unfinishedmsg = statetuple
900 if activemerge:
900 if activemerge:
901 unresolved = sorted(mergestate.unresolved())
901 unresolved = sorted(mergestate.unresolved())
902 return morestatus(
902 return morestatus(
903 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
903 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
904 )
904 )
905
905
906
906
907 def findpossible(cmd, table, strict=False):
907 def findpossible(cmd, table, strict=False):
908 """
908 """
909 Return cmd -> (aliases, command table entry)
909 Return cmd -> (aliases, command table entry)
910 for each matching command.
910 for each matching command.
911 Return debug commands (or their aliases) only if no normal command matches.
911 Return debug commands (or their aliases) only if no normal command matches.
912 """
912 """
913 choice = {}
913 choice = {}
914 debugchoice = {}
914 debugchoice = {}
915
915
916 if cmd in table:
916 if cmd in table:
917 # short-circuit exact matches, "log" alias beats "log|history"
917 # short-circuit exact matches, "log" alias beats "log|history"
918 keys = [cmd]
918 keys = [cmd]
919 else:
919 else:
920 keys = table.keys()
920 keys = table.keys()
921
921
922 allcmds = []
922 allcmds = []
923 for e in keys:
923 for e in keys:
924 aliases = parsealiases(e)
924 aliases = parsealiases(e)
925 allcmds.extend(aliases)
925 allcmds.extend(aliases)
926 found = None
926 found = None
927 if cmd in aliases:
927 if cmd in aliases:
928 found = cmd
928 found = cmd
929 elif not strict:
929 elif not strict:
930 for a in aliases:
930 for a in aliases:
931 if a.startswith(cmd):
931 if a.startswith(cmd):
932 found = a
932 found = a
933 break
933 break
934 if found is not None:
934 if found is not None:
935 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
935 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
936 debugchoice[found] = (aliases, table[e])
936 debugchoice[found] = (aliases, table[e])
937 else:
937 else:
938 choice[found] = (aliases, table[e])
938 choice[found] = (aliases, table[e])
939
939
940 if not choice and debugchoice:
940 if not choice and debugchoice:
941 choice = debugchoice
941 choice = debugchoice
942
942
943 return choice, allcmds
943 return choice, allcmds
944
944
945
945
946 def findcmd(cmd, table, strict=True):
946 def findcmd(cmd, table, strict=True):
947 """Return (aliases, command table entry) for command string."""
947 """Return (aliases, command table entry) for command string."""
948 choice, allcmds = findpossible(cmd, table, strict)
948 choice, allcmds = findpossible(cmd, table, strict)
949
949
950 if cmd in choice:
950 if cmd in choice:
951 return choice[cmd]
951 return choice[cmd]
952
952
953 if len(choice) > 1:
953 if len(choice) > 1:
954 clist = sorted(choice)
954 clist = sorted(choice)
955 raise error.AmbiguousCommand(cmd, clist)
955 raise error.AmbiguousCommand(cmd, clist)
956
956
957 if choice:
957 if choice:
958 return list(choice.values())[0]
958 return list(choice.values())[0]
959
959
960 raise error.UnknownCommand(cmd, allcmds)
960 raise error.UnknownCommand(cmd, allcmds)
961
961
962
962
963 def changebranch(ui, repo, revs, label, opts):
963 def changebranch(ui, repo, revs, label, opts):
964 """ Change the branch name of given revs to label """
964 """ Change the branch name of given revs to label """
965
965
966 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
966 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
967 # abort in case of uncommitted merge or dirty wdir
967 # abort in case of uncommitted merge or dirty wdir
968 bailifchanged(repo)
968 bailifchanged(repo)
969 revs = scmutil.revrange(repo, revs)
969 revs = scmutil.revrange(repo, revs)
970 if not revs:
970 if not revs:
971 raise error.Abort(b"empty revision set")
971 raise error.Abort(b"empty revision set")
972 roots = repo.revs(b'roots(%ld)', revs)
972 roots = repo.revs(b'roots(%ld)', revs)
973 if len(roots) > 1:
973 if len(roots) > 1:
974 raise error.Abort(
974 raise error.Abort(
975 _(b"cannot change branch of non-linear revisions")
975 _(b"cannot change branch of non-linear revisions")
976 )
976 )
977 rewriteutil.precheck(repo, revs, b'change branch of')
977 rewriteutil.precheck(repo, revs, b'change branch of')
978
978
979 root = repo[roots.first()]
979 root = repo[roots.first()]
980 rpb = {parent.branch() for parent in root.parents()}
980 rpb = {parent.branch() for parent in root.parents()}
981 if (
981 if (
982 not opts.get(b'force')
982 not opts.get(b'force')
983 and label not in rpb
983 and label not in rpb
984 and label in repo.branchmap()
984 and label in repo.branchmap()
985 ):
985 ):
986 raise error.Abort(_(b"a branch of the same name already exists"))
986 raise error.Abort(_(b"a branch of the same name already exists"))
987
987
988 if repo.revs(b'obsolete() and %ld', revs):
988 if repo.revs(b'obsolete() and %ld', revs):
989 raise error.Abort(
989 raise error.Abort(
990 _(b"cannot change branch of a obsolete changeset")
990 _(b"cannot change branch of a obsolete changeset")
991 )
991 )
992
992
993 # make sure only topological heads
993 # make sure only topological heads
994 if repo.revs(b'heads(%ld) - head()', revs):
994 if repo.revs(b'heads(%ld) - head()', revs):
995 raise error.Abort(_(b"cannot change branch in middle of a stack"))
995 raise error.Abort(_(b"cannot change branch in middle of a stack"))
996
996
997 replacements = {}
997 replacements = {}
998 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
998 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
999 # mercurial.subrepo -> mercurial.cmdutil
999 # mercurial.subrepo -> mercurial.cmdutil
1000 from . import context
1000 from . import context
1001
1001
1002 for rev in revs:
1002 for rev in revs:
1003 ctx = repo[rev]
1003 ctx = repo[rev]
1004 oldbranch = ctx.branch()
1004 oldbranch = ctx.branch()
1005 # check if ctx has same branch
1005 # check if ctx has same branch
1006 if oldbranch == label:
1006 if oldbranch == label:
1007 continue
1007 continue
1008
1008
1009 def filectxfn(repo, newctx, path):
1009 def filectxfn(repo, newctx, path):
1010 try:
1010 try:
1011 return ctx[path]
1011 return ctx[path]
1012 except error.ManifestLookupError:
1012 except error.ManifestLookupError:
1013 return None
1013 return None
1014
1014
1015 ui.debug(
1015 ui.debug(
1016 b"changing branch of '%s' from '%s' to '%s'\n"
1016 b"changing branch of '%s' from '%s' to '%s'\n"
1017 % (hex(ctx.node()), oldbranch, label)
1017 % (hex(ctx.node()), oldbranch, label)
1018 )
1018 )
1019 extra = ctx.extra()
1019 extra = ctx.extra()
1020 extra[b'branch_change'] = hex(ctx.node())
1020 extra[b'branch_change'] = hex(ctx.node())
1021 # While changing branch of set of linear commits, make sure that
1021 # While changing branch of set of linear commits, make sure that
1022 # we base our commits on new parent rather than old parent which
1022 # we base our commits on new parent rather than old parent which
1023 # was obsoleted while changing the branch
1023 # was obsoleted while changing the branch
1024 p1 = ctx.p1().node()
1024 p1 = ctx.p1().node()
1025 p2 = ctx.p2().node()
1025 p2 = ctx.p2().node()
1026 if p1 in replacements:
1026 if p1 in replacements:
1027 p1 = replacements[p1][0]
1027 p1 = replacements[p1][0]
1028 if p2 in replacements:
1028 if p2 in replacements:
1029 p2 = replacements[p2][0]
1029 p2 = replacements[p2][0]
1030
1030
1031 mc = context.memctx(
1031 mc = context.memctx(
1032 repo,
1032 repo,
1033 (p1, p2),
1033 (p1, p2),
1034 ctx.description(),
1034 ctx.description(),
1035 ctx.files(),
1035 ctx.files(),
1036 filectxfn,
1036 filectxfn,
1037 user=ctx.user(),
1037 user=ctx.user(),
1038 date=ctx.date(),
1038 date=ctx.date(),
1039 extra=extra,
1039 extra=extra,
1040 branch=label,
1040 branch=label,
1041 )
1041 )
1042
1042
1043 newnode = repo.commitctx(mc)
1043 newnode = repo.commitctx(mc)
1044 replacements[ctx.node()] = (newnode,)
1044 replacements[ctx.node()] = (newnode,)
1045 ui.debug(b'new node id is %s\n' % hex(newnode))
1045 ui.debug(b'new node id is %s\n' % hex(newnode))
1046
1046
1047 # create obsmarkers and move bookmarks
1047 # create obsmarkers and move bookmarks
1048 scmutil.cleanupnodes(
1048 scmutil.cleanupnodes(
1049 repo, replacements, b'branch-change', fixphase=True
1049 repo, replacements, b'branch-change', fixphase=True
1050 )
1050 )
1051
1051
1052 # move the working copy too
1052 # move the working copy too
1053 wctx = repo[None]
1053 wctx = repo[None]
1054 # in-progress merge is a bit too complex for now.
1054 # in-progress merge is a bit too complex for now.
1055 if len(wctx.parents()) == 1:
1055 if len(wctx.parents()) == 1:
1056 newid = replacements.get(wctx.p1().node())
1056 newid = replacements.get(wctx.p1().node())
1057 if newid is not None:
1057 if newid is not None:
1058 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1058 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1059 # mercurial.cmdutil
1059 # mercurial.cmdutil
1060 from . import hg
1060 from . import hg
1061
1061
1062 hg.update(repo, newid[0], quietempty=True)
1062 hg.update(repo, newid[0], quietempty=True)
1063
1063
1064 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1064 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1065
1065
1066
1066
1067 def findrepo(p):
1067 def findrepo(p):
1068 while not os.path.isdir(os.path.join(p, b".hg")):
1068 while not os.path.isdir(os.path.join(p, b".hg")):
1069 oldp, p = p, os.path.dirname(p)
1069 oldp, p = p, os.path.dirname(p)
1070 if p == oldp:
1070 if p == oldp:
1071 return None
1071 return None
1072
1072
1073 return p
1073 return p
1074
1074
1075
1075
1076 def bailifchanged(repo, merge=True, hint=None):
1076 def bailifchanged(repo, merge=True, hint=None):
1077 """ enforce the precondition that working directory must be clean.
1077 """ enforce the precondition that working directory must be clean.
1078
1078
1079 'merge' can be set to false if a pending uncommitted merge should be
1079 'merge' can be set to false if a pending uncommitted merge should be
1080 ignored (such as when 'update --check' runs).
1080 ignored (such as when 'update --check' runs).
1081
1081
1082 'hint' is the usual hint given to Abort exception.
1082 'hint' is the usual hint given to Abort exception.
1083 """
1083 """
1084
1084
1085 if merge and repo.dirstate.p2() != nullid:
1085 if merge and repo.dirstate.p2() != nullid:
1086 raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
1086 raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
1087 st = repo.status()
1087 st = repo.status()
1088 if st.modified or st.added or st.removed or st.deleted:
1088 if st.modified or st.added or st.removed or st.deleted:
1089 raise error.Abort(_(b'uncommitted changes'), hint=hint)
1089 raise error.Abort(_(b'uncommitted changes'), hint=hint)
1090 ctx = repo[None]
1090 ctx = repo[None]
1091 for s in sorted(ctx.substate):
1091 for s in sorted(ctx.substate):
1092 ctx.sub(s).bailifchanged(hint=hint)
1092 ctx.sub(s).bailifchanged(hint=hint)
1093
1093
1094
1094
1095 def logmessage(ui, opts):
1095 def logmessage(ui, opts):
1096 """ get the log message according to -m and -l option """
1096 """ get the log message according to -m and -l option """
1097
1097
1098 check_at_most_one_arg(opts, b'message', b'logfile')
1098 check_at_most_one_arg(opts, b'message', b'logfile')
1099
1099
1100 message = opts.get(b'message')
1100 message = opts.get(b'message')
1101 logfile = opts.get(b'logfile')
1101 logfile = opts.get(b'logfile')
1102
1102
1103 if not message and logfile:
1103 if not message and logfile:
1104 try:
1104 try:
1105 if isstdiofilename(logfile):
1105 if isstdiofilename(logfile):
1106 message = ui.fin.read()
1106 message = ui.fin.read()
1107 else:
1107 else:
1108 message = b'\n'.join(util.readfile(logfile).splitlines())
1108 message = b'\n'.join(util.readfile(logfile).splitlines())
1109 except IOError as inst:
1109 except IOError as inst:
1110 raise error.Abort(
1110 raise error.Abort(
1111 _(b"can't read commit message '%s': %s")
1111 _(b"can't read commit message '%s': %s")
1112 % (logfile, encoding.strtolocal(inst.strerror))
1112 % (logfile, encoding.strtolocal(inst.strerror))
1113 )
1113 )
1114 return message
1114 return message
1115
1115
1116
1116
1117 def mergeeditform(ctxorbool, baseformname):
1117 def mergeeditform(ctxorbool, baseformname):
1118 """return appropriate editform name (referencing a committemplate)
1118 """return appropriate editform name (referencing a committemplate)
1119
1119
1120 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1120 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1121 merging is committed.
1121 merging is committed.
1122
1122
1123 This returns baseformname with '.merge' appended if it is a merge,
1123 This returns baseformname with '.merge' appended if it is a merge,
1124 otherwise '.normal' is appended.
1124 otherwise '.normal' is appended.
1125 """
1125 """
1126 if isinstance(ctxorbool, bool):
1126 if isinstance(ctxorbool, bool):
1127 if ctxorbool:
1127 if ctxorbool:
1128 return baseformname + b".merge"
1128 return baseformname + b".merge"
1129 elif len(ctxorbool.parents()) > 1:
1129 elif len(ctxorbool.parents()) > 1:
1130 return baseformname + b".merge"
1130 return baseformname + b".merge"
1131
1131
1132 return baseformname + b".normal"
1132 return baseformname + b".normal"
1133
1133
1134
1134
1135 def getcommiteditor(
1135 def getcommiteditor(
1136 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1136 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1137 ):
1137 ):
1138 """get appropriate commit message editor according to '--edit' option
1138 """get appropriate commit message editor according to '--edit' option
1139
1139
1140 'finishdesc' is a function to be called with edited commit message
1140 'finishdesc' is a function to be called with edited commit message
1141 (= 'description' of the new changeset) just after editing, but
1141 (= 'description' of the new changeset) just after editing, but
1142 before checking empty-ness. It should return actual text to be
1142 before checking empty-ness. It should return actual text to be
1143 stored into history. This allows to change description before
1143 stored into history. This allows to change description before
1144 storing.
1144 storing.
1145
1145
1146 'extramsg' is a extra message to be shown in the editor instead of
1146 'extramsg' is a extra message to be shown in the editor instead of
1147 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1147 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1148 is automatically added.
1148 is automatically added.
1149
1149
1150 'editform' is a dot-separated list of names, to distinguish
1150 'editform' is a dot-separated list of names, to distinguish
1151 the purpose of commit text editing.
1151 the purpose of commit text editing.
1152
1152
1153 'getcommiteditor' returns 'commitforceeditor' regardless of
1153 'getcommiteditor' returns 'commitforceeditor' regardless of
1154 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1154 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1155 they are specific for usage in MQ.
1155 they are specific for usage in MQ.
1156 """
1156 """
1157 if edit or finishdesc or extramsg:
1157 if edit or finishdesc or extramsg:
1158 return lambda r, c, s: commitforceeditor(
1158 return lambda r, c, s: commitforceeditor(
1159 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1159 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1160 )
1160 )
1161 elif editform:
1161 elif editform:
1162 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1162 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1163 else:
1163 else:
1164 return commiteditor
1164 return commiteditor
1165
1165
1166
1166
1167 def _escapecommandtemplate(tmpl):
1167 def _escapecommandtemplate(tmpl):
1168 parts = []
1168 parts = []
1169 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1169 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1170 if typ == b'string':
1170 if typ == b'string':
1171 parts.append(stringutil.escapestr(tmpl[start:end]))
1171 parts.append(stringutil.escapestr(tmpl[start:end]))
1172 else:
1172 else:
1173 parts.append(tmpl[start:end])
1173 parts.append(tmpl[start:end])
1174 return b''.join(parts)
1174 return b''.join(parts)
1175
1175
1176
1176
1177 def rendercommandtemplate(ui, tmpl, props):
1177 def rendercommandtemplate(ui, tmpl, props):
1178 r"""Expand a literal template 'tmpl' in a way suitable for command line
1178 r"""Expand a literal template 'tmpl' in a way suitable for command line
1179
1179
1180 '\' in outermost string is not taken as an escape character because it
1180 '\' in outermost string is not taken as an escape character because it
1181 is a directory separator on Windows.
1181 is a directory separator on Windows.
1182
1182
1183 >>> from . import ui as uimod
1183 >>> from . import ui as uimod
1184 >>> ui = uimod.ui()
1184 >>> ui = uimod.ui()
1185 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1185 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1186 'c:\\foo'
1186 'c:\\foo'
1187 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1187 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1188 'c:{path}'
1188 'c:{path}'
1189 """
1189 """
1190 if not tmpl:
1190 if not tmpl:
1191 return tmpl
1191 return tmpl
1192 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1192 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1193 return t.renderdefault(props)
1193 return t.renderdefault(props)
1194
1194
1195
1195
1196 def rendertemplate(ctx, tmpl, props=None):
1196 def rendertemplate(ctx, tmpl, props=None):
1197 """Expand a literal template 'tmpl' byte-string against one changeset
1197 """Expand a literal template 'tmpl' byte-string against one changeset
1198
1198
1199 Each props item must be a stringify-able value or a callable returning
1199 Each props item must be a stringify-able value or a callable returning
1200 such value, i.e. no bare list nor dict should be passed.
1200 such value, i.e. no bare list nor dict should be passed.
1201 """
1201 """
1202 repo = ctx.repo()
1202 repo = ctx.repo()
1203 tres = formatter.templateresources(repo.ui, repo)
1203 tres = formatter.templateresources(repo.ui, repo)
1204 t = formatter.maketemplater(
1204 t = formatter.maketemplater(
1205 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1205 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1206 )
1206 )
1207 mapping = {b'ctx': ctx}
1207 mapping = {b'ctx': ctx}
1208 if props:
1208 if props:
1209 mapping.update(props)
1209 mapping.update(props)
1210 return t.renderdefault(mapping)
1210 return t.renderdefault(mapping)
1211
1211
1212
1212
1213 def format_changeset_summary(ui, ctx, command=None, default_spec=None):
1214 """Format a changeset summary (one line)."""
1215 spec = None
1216 if command:
1217 spec = ui.config(
1218 b'command-templates', b'oneline-summary.%s' % command, None
1219 )
1220 if not spec:
1221 spec = ui.config(b'command-templates', b'oneline-summary')
1222 if not spec:
1223 spec = default_spec
1224 if not spec:
1225 # TODO: Pick a default we can agree on. This isn't used yet.
1226 raise error.ProgrammingError(b"no default one-line summary defined yet")
1227 text = rendertemplate(ctx, spec)
1228 return text.split(b'\n')[0]
1229
1230
1213 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1231 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1214 r"""Convert old-style filename format string to template string
1232 r"""Convert old-style filename format string to template string
1215
1233
1216 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1234 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1217 'foo-{reporoot|basename}-{seqno}.patch'
1235 'foo-{reporoot|basename}-{seqno}.patch'
1218 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1236 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1219 '{rev}{tags % "{tag}"}{node}'
1237 '{rev}{tags % "{tag}"}{node}'
1220
1238
1221 '\' in outermost strings has to be escaped because it is a directory
1239 '\' in outermost strings has to be escaped because it is a directory
1222 separator on Windows:
1240 separator on Windows:
1223
1241
1224 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1242 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1225 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1243 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1226 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1244 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1227 '\\\\\\\\foo\\\\bar.patch'
1245 '\\\\\\\\foo\\\\bar.patch'
1228 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1246 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1229 '\\\\{tags % "{tag}"}'
1247 '\\\\{tags % "{tag}"}'
1230
1248
1231 but inner strings follow the template rules (i.e. '\' is taken as an
1249 but inner strings follow the template rules (i.e. '\' is taken as an
1232 escape character):
1250 escape character):
1233
1251
1234 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1252 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1235 '{"c:\\tmp"}'
1253 '{"c:\\tmp"}'
1236 """
1254 """
1237 expander = {
1255 expander = {
1238 b'H': b'{node}',
1256 b'H': b'{node}',
1239 b'R': b'{rev}',
1257 b'R': b'{rev}',
1240 b'h': b'{node|short}',
1258 b'h': b'{node|short}',
1241 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1259 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1242 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1260 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1243 b'%': b'%',
1261 b'%': b'%',
1244 b'b': b'{reporoot|basename}',
1262 b'b': b'{reporoot|basename}',
1245 }
1263 }
1246 if total is not None:
1264 if total is not None:
1247 expander[b'N'] = b'{total}'
1265 expander[b'N'] = b'{total}'
1248 if seqno is not None:
1266 if seqno is not None:
1249 expander[b'n'] = b'{seqno}'
1267 expander[b'n'] = b'{seqno}'
1250 if total is not None and seqno is not None:
1268 if total is not None and seqno is not None:
1251 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1269 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1252 if pathname is not None:
1270 if pathname is not None:
1253 expander[b's'] = b'{pathname|basename}'
1271 expander[b's'] = b'{pathname|basename}'
1254 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1272 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1255 expander[b'p'] = b'{pathname}'
1273 expander[b'p'] = b'{pathname}'
1256
1274
1257 newname = []
1275 newname = []
1258 for typ, start, end in templater.scantemplate(pat, raw=True):
1276 for typ, start, end in templater.scantemplate(pat, raw=True):
1259 if typ != b'string':
1277 if typ != b'string':
1260 newname.append(pat[start:end])
1278 newname.append(pat[start:end])
1261 continue
1279 continue
1262 i = start
1280 i = start
1263 while i < end:
1281 while i < end:
1264 n = pat.find(b'%', i, end)
1282 n = pat.find(b'%', i, end)
1265 if n < 0:
1283 if n < 0:
1266 newname.append(stringutil.escapestr(pat[i:end]))
1284 newname.append(stringutil.escapestr(pat[i:end]))
1267 break
1285 break
1268 newname.append(stringutil.escapestr(pat[i:n]))
1286 newname.append(stringutil.escapestr(pat[i:n]))
1269 if n + 2 > end:
1287 if n + 2 > end:
1270 raise error.Abort(
1288 raise error.Abort(
1271 _(b"incomplete format spec in output filename")
1289 _(b"incomplete format spec in output filename")
1272 )
1290 )
1273 c = pat[n + 1 : n + 2]
1291 c = pat[n + 1 : n + 2]
1274 i = n + 2
1292 i = n + 2
1275 try:
1293 try:
1276 newname.append(expander[c])
1294 newname.append(expander[c])
1277 except KeyError:
1295 except KeyError:
1278 raise error.Abort(
1296 raise error.Abort(
1279 _(b"invalid format spec '%%%s' in output filename") % c
1297 _(b"invalid format spec '%%%s' in output filename") % c
1280 )
1298 )
1281 return b''.join(newname)
1299 return b''.join(newname)
1282
1300
1283
1301
1284 def makefilename(ctx, pat, **props):
1302 def makefilename(ctx, pat, **props):
1285 if not pat:
1303 if not pat:
1286 return pat
1304 return pat
1287 tmpl = _buildfntemplate(pat, **props)
1305 tmpl = _buildfntemplate(pat, **props)
1288 # BUG: alias expansion shouldn't be made against template fragments
1306 # BUG: alias expansion shouldn't be made against template fragments
1289 # rewritten from %-format strings, but we have no easy way to partially
1307 # rewritten from %-format strings, but we have no easy way to partially
1290 # disable the expansion.
1308 # disable the expansion.
1291 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1309 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1292
1310
1293
1311
1294 def isstdiofilename(pat):
1312 def isstdiofilename(pat):
1295 """True if the given pat looks like a filename denoting stdin/stdout"""
1313 """True if the given pat looks like a filename denoting stdin/stdout"""
1296 return not pat or pat == b'-'
1314 return not pat or pat == b'-'
1297
1315
1298
1316
1299 class _unclosablefile(object):
1317 class _unclosablefile(object):
1300 def __init__(self, fp):
1318 def __init__(self, fp):
1301 self._fp = fp
1319 self._fp = fp
1302
1320
1303 def close(self):
1321 def close(self):
1304 pass
1322 pass
1305
1323
1306 def __iter__(self):
1324 def __iter__(self):
1307 return iter(self._fp)
1325 return iter(self._fp)
1308
1326
1309 def __getattr__(self, attr):
1327 def __getattr__(self, attr):
1310 return getattr(self._fp, attr)
1328 return getattr(self._fp, attr)
1311
1329
1312 def __enter__(self):
1330 def __enter__(self):
1313 return self
1331 return self
1314
1332
1315 def __exit__(self, exc_type, exc_value, exc_tb):
1333 def __exit__(self, exc_type, exc_value, exc_tb):
1316 pass
1334 pass
1317
1335
1318
1336
1319 def makefileobj(ctx, pat, mode=b'wb', **props):
1337 def makefileobj(ctx, pat, mode=b'wb', **props):
1320 writable = mode not in (b'r', b'rb')
1338 writable = mode not in (b'r', b'rb')
1321
1339
1322 if isstdiofilename(pat):
1340 if isstdiofilename(pat):
1323 repo = ctx.repo()
1341 repo = ctx.repo()
1324 if writable:
1342 if writable:
1325 fp = repo.ui.fout
1343 fp = repo.ui.fout
1326 else:
1344 else:
1327 fp = repo.ui.fin
1345 fp = repo.ui.fin
1328 return _unclosablefile(fp)
1346 return _unclosablefile(fp)
1329 fn = makefilename(ctx, pat, **props)
1347 fn = makefilename(ctx, pat, **props)
1330 return open(fn, mode)
1348 return open(fn, mode)
1331
1349
1332
1350
1333 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1351 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1334 """opens the changelog, manifest, a filelog or a given revlog"""
1352 """opens the changelog, manifest, a filelog or a given revlog"""
1335 cl = opts[b'changelog']
1353 cl = opts[b'changelog']
1336 mf = opts[b'manifest']
1354 mf = opts[b'manifest']
1337 dir = opts[b'dir']
1355 dir = opts[b'dir']
1338 msg = None
1356 msg = None
1339 if cl and mf:
1357 if cl and mf:
1340 msg = _(b'cannot specify --changelog and --manifest at the same time')
1358 msg = _(b'cannot specify --changelog and --manifest at the same time')
1341 elif cl and dir:
1359 elif cl and dir:
1342 msg = _(b'cannot specify --changelog and --dir at the same time')
1360 msg = _(b'cannot specify --changelog and --dir at the same time')
1343 elif cl or mf or dir:
1361 elif cl or mf or dir:
1344 if file_:
1362 if file_:
1345 msg = _(b'cannot specify filename with --changelog or --manifest')
1363 msg = _(b'cannot specify filename with --changelog or --manifest')
1346 elif not repo:
1364 elif not repo:
1347 msg = _(
1365 msg = _(
1348 b'cannot specify --changelog or --manifest or --dir '
1366 b'cannot specify --changelog or --manifest or --dir '
1349 b'without a repository'
1367 b'without a repository'
1350 )
1368 )
1351 if msg:
1369 if msg:
1352 raise error.Abort(msg)
1370 raise error.Abort(msg)
1353
1371
1354 r = None
1372 r = None
1355 if repo:
1373 if repo:
1356 if cl:
1374 if cl:
1357 r = repo.unfiltered().changelog
1375 r = repo.unfiltered().changelog
1358 elif dir:
1376 elif dir:
1359 if not scmutil.istreemanifest(repo):
1377 if not scmutil.istreemanifest(repo):
1360 raise error.Abort(
1378 raise error.Abort(
1361 _(
1379 _(
1362 b"--dir can only be used on repos with "
1380 b"--dir can only be used on repos with "
1363 b"treemanifest enabled"
1381 b"treemanifest enabled"
1364 )
1382 )
1365 )
1383 )
1366 if not dir.endswith(b'/'):
1384 if not dir.endswith(b'/'):
1367 dir = dir + b'/'
1385 dir = dir + b'/'
1368 dirlog = repo.manifestlog.getstorage(dir)
1386 dirlog = repo.manifestlog.getstorage(dir)
1369 if len(dirlog):
1387 if len(dirlog):
1370 r = dirlog
1388 r = dirlog
1371 elif mf:
1389 elif mf:
1372 r = repo.manifestlog.getstorage(b'')
1390 r = repo.manifestlog.getstorage(b'')
1373 elif file_:
1391 elif file_:
1374 filelog = repo.file(file_)
1392 filelog = repo.file(file_)
1375 if len(filelog):
1393 if len(filelog):
1376 r = filelog
1394 r = filelog
1377
1395
1378 # Not all storage may be revlogs. If requested, try to return an actual
1396 # Not all storage may be revlogs. If requested, try to return an actual
1379 # revlog instance.
1397 # revlog instance.
1380 if returnrevlog:
1398 if returnrevlog:
1381 if isinstance(r, revlog.revlog):
1399 if isinstance(r, revlog.revlog):
1382 pass
1400 pass
1383 elif util.safehasattr(r, b'_revlog'):
1401 elif util.safehasattr(r, b'_revlog'):
1384 r = r._revlog # pytype: disable=attribute-error
1402 r = r._revlog # pytype: disable=attribute-error
1385 elif r is not None:
1403 elif r is not None:
1386 raise error.Abort(_(b'%r does not appear to be a revlog') % r)
1404 raise error.Abort(_(b'%r does not appear to be a revlog') % r)
1387
1405
1388 if not r:
1406 if not r:
1389 if not returnrevlog:
1407 if not returnrevlog:
1390 raise error.Abort(_(b'cannot give path to non-revlog'))
1408 raise error.Abort(_(b'cannot give path to non-revlog'))
1391
1409
1392 if not file_:
1410 if not file_:
1393 raise error.CommandError(cmd, _(b'invalid arguments'))
1411 raise error.CommandError(cmd, _(b'invalid arguments'))
1394 if not os.path.isfile(file_):
1412 if not os.path.isfile(file_):
1395 raise error.Abort(_(b"revlog '%s' not found") % file_)
1413 raise error.Abort(_(b"revlog '%s' not found") % file_)
1396 r = revlog.revlog(
1414 r = revlog.revlog(
1397 vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
1415 vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
1398 )
1416 )
1399 return r
1417 return r
1400
1418
1401
1419
1402 def openrevlog(repo, cmd, file_, opts):
1420 def openrevlog(repo, cmd, file_, opts):
1403 """Obtain a revlog backing storage of an item.
1421 """Obtain a revlog backing storage of an item.
1404
1422
1405 This is similar to ``openstorage()`` except it always returns a revlog.
1423 This is similar to ``openstorage()`` except it always returns a revlog.
1406
1424
1407 In most cases, a caller cares about the main storage object - not the
1425 In most cases, a caller cares about the main storage object - not the
1408 revlog backing it. Therefore, this function should only be used by code
1426 revlog backing it. Therefore, this function should only be used by code
1409 that needs to examine low-level revlog implementation details. e.g. debug
1427 that needs to examine low-level revlog implementation details. e.g. debug
1410 commands.
1428 commands.
1411 """
1429 """
1412 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1430 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1413
1431
1414
1432
1415 def copy(ui, repo, pats, opts, rename=False):
1433 def copy(ui, repo, pats, opts, rename=False):
1416 check_incompatible_arguments(opts, b'forget', [b'dry_run'])
1434 check_incompatible_arguments(opts, b'forget', [b'dry_run'])
1417
1435
1418 # called with the repo lock held
1436 # called with the repo lock held
1419 #
1437 #
1420 # hgsep => pathname that uses "/" to separate directories
1438 # hgsep => pathname that uses "/" to separate directories
1421 # ossep => pathname that uses os.sep to separate directories
1439 # ossep => pathname that uses os.sep to separate directories
1422 cwd = repo.getcwd()
1440 cwd = repo.getcwd()
1423 targets = {}
1441 targets = {}
1424 forget = opts.get(b"forget")
1442 forget = opts.get(b"forget")
1425 after = opts.get(b"after")
1443 after = opts.get(b"after")
1426 dryrun = opts.get(b"dry_run")
1444 dryrun = opts.get(b"dry_run")
1427 rev = opts.get(b'at_rev')
1445 rev = opts.get(b'at_rev')
1428 if rev:
1446 if rev:
1429 if not forget and not after:
1447 if not forget and not after:
1430 # TODO: Remove this restriction and make it also create the copy
1448 # TODO: Remove this restriction and make it also create the copy
1431 # targets (and remove the rename source if rename==True).
1449 # targets (and remove the rename source if rename==True).
1432 raise error.Abort(_(b'--at-rev requires --after'))
1450 raise error.Abort(_(b'--at-rev requires --after'))
1433 ctx = scmutil.revsingle(repo, rev)
1451 ctx = scmutil.revsingle(repo, rev)
1434 if len(ctx.parents()) > 1:
1452 if len(ctx.parents()) > 1:
1435 raise error.Abort(_(b'cannot mark/unmark copy in merge commit'))
1453 raise error.Abort(_(b'cannot mark/unmark copy in merge commit'))
1436 else:
1454 else:
1437 ctx = repo[None]
1455 ctx = repo[None]
1438
1456
1439 pctx = ctx.p1()
1457 pctx = ctx.p1()
1440
1458
1441 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1459 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1442
1460
1443 if forget:
1461 if forget:
1444 if ctx.rev() is None:
1462 if ctx.rev() is None:
1445 new_ctx = ctx
1463 new_ctx = ctx
1446 else:
1464 else:
1447 if len(ctx.parents()) > 1:
1465 if len(ctx.parents()) > 1:
1448 raise error.Abort(_(b'cannot unmark copy in merge commit'))
1466 raise error.Abort(_(b'cannot unmark copy in merge commit'))
1449 # avoid cycle context -> subrepo -> cmdutil
1467 # avoid cycle context -> subrepo -> cmdutil
1450 from . import context
1468 from . import context
1451
1469
1452 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1470 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1453 new_ctx = context.overlayworkingctx(repo)
1471 new_ctx = context.overlayworkingctx(repo)
1454 new_ctx.setbase(ctx.p1())
1472 new_ctx.setbase(ctx.p1())
1455 mergemod.graft(repo, ctx, wctx=new_ctx)
1473 mergemod.graft(repo, ctx, wctx=new_ctx)
1456
1474
1457 match = scmutil.match(ctx, pats, opts)
1475 match = scmutil.match(ctx, pats, opts)
1458
1476
1459 current_copies = ctx.p1copies()
1477 current_copies = ctx.p1copies()
1460 current_copies.update(ctx.p2copies())
1478 current_copies.update(ctx.p2copies())
1461
1479
1462 uipathfn = scmutil.getuipathfn(repo)
1480 uipathfn = scmutil.getuipathfn(repo)
1463 for f in ctx.walk(match):
1481 for f in ctx.walk(match):
1464 if f in current_copies:
1482 if f in current_copies:
1465 new_ctx[f].markcopied(None)
1483 new_ctx[f].markcopied(None)
1466 elif match.exact(f):
1484 elif match.exact(f):
1467 ui.warn(
1485 ui.warn(
1468 _(
1486 _(
1469 b'%s: not unmarking as copy - file is not marked as copied\n'
1487 b'%s: not unmarking as copy - file is not marked as copied\n'
1470 )
1488 )
1471 % uipathfn(f)
1489 % uipathfn(f)
1472 )
1490 )
1473
1491
1474 if ctx.rev() is not None:
1492 if ctx.rev() is not None:
1475 with repo.lock():
1493 with repo.lock():
1476 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1494 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1477 new_node = mem_ctx.commit()
1495 new_node = mem_ctx.commit()
1478
1496
1479 if repo.dirstate.p1() == ctx.node():
1497 if repo.dirstate.p1() == ctx.node():
1480 with repo.dirstate.parentchange():
1498 with repo.dirstate.parentchange():
1481 scmutil.movedirstate(repo, repo[new_node])
1499 scmutil.movedirstate(repo, repo[new_node])
1482 replacements = {ctx.node(): [new_node]}
1500 replacements = {ctx.node(): [new_node]}
1483 scmutil.cleanupnodes(
1501 scmutil.cleanupnodes(
1484 repo, replacements, b'uncopy', fixphase=True
1502 repo, replacements, b'uncopy', fixphase=True
1485 )
1503 )
1486
1504
1487 return
1505 return
1488
1506
1489 pats = scmutil.expandpats(pats)
1507 pats = scmutil.expandpats(pats)
1490 if not pats:
1508 if not pats:
1491 raise error.Abort(_(b'no source or destination specified'))
1509 raise error.Abort(_(b'no source or destination specified'))
1492 if len(pats) == 1:
1510 if len(pats) == 1:
1493 raise error.Abort(_(b'no destination specified'))
1511 raise error.Abort(_(b'no destination specified'))
1494 dest = pats.pop()
1512 dest = pats.pop()
1495
1513
1496 def walkpat(pat):
1514 def walkpat(pat):
1497 srcs = []
1515 srcs = []
1498 # TODO: Inline and simplify the non-working-copy version of this code
1516 # TODO: Inline and simplify the non-working-copy version of this code
1499 # since it shares very little with the working-copy version of it.
1517 # since it shares very little with the working-copy version of it.
1500 ctx_to_walk = ctx if ctx.rev() is None else pctx
1518 ctx_to_walk = ctx if ctx.rev() is None else pctx
1501 m = scmutil.match(ctx_to_walk, [pat], opts, globbed=True)
1519 m = scmutil.match(ctx_to_walk, [pat], opts, globbed=True)
1502 for abs in ctx_to_walk.walk(m):
1520 for abs in ctx_to_walk.walk(m):
1503 rel = uipathfn(abs)
1521 rel = uipathfn(abs)
1504 exact = m.exact(abs)
1522 exact = m.exact(abs)
1505 if abs not in ctx:
1523 if abs not in ctx:
1506 if abs in pctx:
1524 if abs in pctx:
1507 if not after:
1525 if not after:
1508 if exact:
1526 if exact:
1509 ui.warn(
1527 ui.warn(
1510 _(
1528 _(
1511 b'%s: not copying - file has been marked '
1529 b'%s: not copying - file has been marked '
1512 b'for remove\n'
1530 b'for remove\n'
1513 )
1531 )
1514 % rel
1532 % rel
1515 )
1533 )
1516 continue
1534 continue
1517 else:
1535 else:
1518 if exact:
1536 if exact:
1519 ui.warn(
1537 ui.warn(
1520 _(b'%s: not copying - file is not managed\n') % rel
1538 _(b'%s: not copying - file is not managed\n') % rel
1521 )
1539 )
1522 continue
1540 continue
1523
1541
1524 # abs: hgsep
1542 # abs: hgsep
1525 # rel: ossep
1543 # rel: ossep
1526 srcs.append((abs, rel, exact))
1544 srcs.append((abs, rel, exact))
1527 return srcs
1545 return srcs
1528
1546
1529 if ctx.rev() is not None:
1547 if ctx.rev() is not None:
1530 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1548 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1531 absdest = pathutil.canonpath(repo.root, cwd, dest)
1549 absdest = pathutil.canonpath(repo.root, cwd, dest)
1532 if ctx.hasdir(absdest):
1550 if ctx.hasdir(absdest):
1533 raise error.Abort(
1551 raise error.Abort(
1534 _(b'%s: --at-rev does not support a directory as destination')
1552 _(b'%s: --at-rev does not support a directory as destination')
1535 % uipathfn(absdest)
1553 % uipathfn(absdest)
1536 )
1554 )
1537 if absdest not in ctx:
1555 if absdest not in ctx:
1538 raise error.Abort(
1556 raise error.Abort(
1539 _(b'%s: copy destination does not exist in %s')
1557 _(b'%s: copy destination does not exist in %s')
1540 % (uipathfn(absdest), ctx)
1558 % (uipathfn(absdest), ctx)
1541 )
1559 )
1542
1560
1543 # avoid cycle context -> subrepo -> cmdutil
1561 # avoid cycle context -> subrepo -> cmdutil
1544 from . import context
1562 from . import context
1545
1563
1546 copylist = []
1564 copylist = []
1547 for pat in pats:
1565 for pat in pats:
1548 srcs = walkpat(pat)
1566 srcs = walkpat(pat)
1549 if not srcs:
1567 if not srcs:
1550 continue
1568 continue
1551 for abs, rel, exact in srcs:
1569 for abs, rel, exact in srcs:
1552 copylist.append(abs)
1570 copylist.append(abs)
1553
1571
1554 if not copylist:
1572 if not copylist:
1555 raise error.Abort(_(b'no files to copy'))
1573 raise error.Abort(_(b'no files to copy'))
1556 # TODO: Add support for `hg cp --at-rev . foo bar dir` and
1574 # TODO: Add support for `hg cp --at-rev . foo bar dir` and
1557 # `hg cp --at-rev . dir1 dir2`, preferably unifying the code with the
1575 # `hg cp --at-rev . dir1 dir2`, preferably unifying the code with the
1558 # existing functions below.
1576 # existing functions below.
1559 if len(copylist) != 1:
1577 if len(copylist) != 1:
1560 raise error.Abort(_(b'--at-rev requires a single source'))
1578 raise error.Abort(_(b'--at-rev requires a single source'))
1561
1579
1562 new_ctx = context.overlayworkingctx(repo)
1580 new_ctx = context.overlayworkingctx(repo)
1563 new_ctx.setbase(ctx.p1())
1581 new_ctx.setbase(ctx.p1())
1564 mergemod.graft(repo, ctx, wctx=new_ctx)
1582 mergemod.graft(repo, ctx, wctx=new_ctx)
1565
1583
1566 new_ctx.markcopied(absdest, copylist[0])
1584 new_ctx.markcopied(absdest, copylist[0])
1567
1585
1568 with repo.lock():
1586 with repo.lock():
1569 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1587 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1570 new_node = mem_ctx.commit()
1588 new_node = mem_ctx.commit()
1571
1589
1572 if repo.dirstate.p1() == ctx.node():
1590 if repo.dirstate.p1() == ctx.node():
1573 with repo.dirstate.parentchange():
1591 with repo.dirstate.parentchange():
1574 scmutil.movedirstate(repo, repo[new_node])
1592 scmutil.movedirstate(repo, repo[new_node])
1575 replacements = {ctx.node(): [new_node]}
1593 replacements = {ctx.node(): [new_node]}
1576 scmutil.cleanupnodes(repo, replacements, b'copy', fixphase=True)
1594 scmutil.cleanupnodes(repo, replacements, b'copy', fixphase=True)
1577
1595
1578 return
1596 return
1579
1597
1580 # abssrc: hgsep
1598 # abssrc: hgsep
1581 # relsrc: ossep
1599 # relsrc: ossep
1582 # otarget: ossep
1600 # otarget: ossep
1583 def copyfile(abssrc, relsrc, otarget, exact):
1601 def copyfile(abssrc, relsrc, otarget, exact):
1584 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1602 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1585 if b'/' in abstarget:
1603 if b'/' in abstarget:
1586 # We cannot normalize abstarget itself, this would prevent
1604 # We cannot normalize abstarget itself, this would prevent
1587 # case only renames, like a => A.
1605 # case only renames, like a => A.
1588 abspath, absname = abstarget.rsplit(b'/', 1)
1606 abspath, absname = abstarget.rsplit(b'/', 1)
1589 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1607 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1590 reltarget = repo.pathto(abstarget, cwd)
1608 reltarget = repo.pathto(abstarget, cwd)
1591 target = repo.wjoin(abstarget)
1609 target = repo.wjoin(abstarget)
1592 src = repo.wjoin(abssrc)
1610 src = repo.wjoin(abssrc)
1593 state = repo.dirstate[abstarget]
1611 state = repo.dirstate[abstarget]
1594
1612
1595 scmutil.checkportable(ui, abstarget)
1613 scmutil.checkportable(ui, abstarget)
1596
1614
1597 # check for collisions
1615 # check for collisions
1598 prevsrc = targets.get(abstarget)
1616 prevsrc = targets.get(abstarget)
1599 if prevsrc is not None:
1617 if prevsrc is not None:
1600 ui.warn(
1618 ui.warn(
1601 _(b'%s: not overwriting - %s collides with %s\n')
1619 _(b'%s: not overwriting - %s collides with %s\n')
1602 % (
1620 % (
1603 reltarget,
1621 reltarget,
1604 repo.pathto(abssrc, cwd),
1622 repo.pathto(abssrc, cwd),
1605 repo.pathto(prevsrc, cwd),
1623 repo.pathto(prevsrc, cwd),
1606 )
1624 )
1607 )
1625 )
1608 return True # report a failure
1626 return True # report a failure
1609
1627
1610 # check for overwrites
1628 # check for overwrites
1611 exists = os.path.lexists(target)
1629 exists = os.path.lexists(target)
1612 samefile = False
1630 samefile = False
1613 if exists and abssrc != abstarget:
1631 if exists and abssrc != abstarget:
1614 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1632 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1615 abstarget
1633 abstarget
1616 ):
1634 ):
1617 if not rename:
1635 if not rename:
1618 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1636 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1619 return True # report a failure
1637 return True # report a failure
1620 exists = False
1638 exists = False
1621 samefile = True
1639 samefile = True
1622
1640
1623 if not after and exists or after and state in b'mn':
1641 if not after and exists or after and state in b'mn':
1624 if not opts[b'force']:
1642 if not opts[b'force']:
1625 if state in b'mn':
1643 if state in b'mn':
1626 msg = _(b'%s: not overwriting - file already committed\n')
1644 msg = _(b'%s: not overwriting - file already committed\n')
1627 if after:
1645 if after:
1628 flags = b'--after --force'
1646 flags = b'--after --force'
1629 else:
1647 else:
1630 flags = b'--force'
1648 flags = b'--force'
1631 if rename:
1649 if rename:
1632 hint = (
1650 hint = (
1633 _(
1651 _(
1634 b"('hg rename %s' to replace the file by "
1652 b"('hg rename %s' to replace the file by "
1635 b'recording a rename)\n'
1653 b'recording a rename)\n'
1636 )
1654 )
1637 % flags
1655 % flags
1638 )
1656 )
1639 else:
1657 else:
1640 hint = (
1658 hint = (
1641 _(
1659 _(
1642 b"('hg copy %s' to replace the file by "
1660 b"('hg copy %s' to replace the file by "
1643 b'recording a copy)\n'
1661 b'recording a copy)\n'
1644 )
1662 )
1645 % flags
1663 % flags
1646 )
1664 )
1647 else:
1665 else:
1648 msg = _(b'%s: not overwriting - file exists\n')
1666 msg = _(b'%s: not overwriting - file exists\n')
1649 if rename:
1667 if rename:
1650 hint = _(
1668 hint = _(
1651 b"('hg rename --after' to record the rename)\n"
1669 b"('hg rename --after' to record the rename)\n"
1652 )
1670 )
1653 else:
1671 else:
1654 hint = _(b"('hg copy --after' to record the copy)\n")
1672 hint = _(b"('hg copy --after' to record the copy)\n")
1655 ui.warn(msg % reltarget)
1673 ui.warn(msg % reltarget)
1656 ui.warn(hint)
1674 ui.warn(hint)
1657 return True # report a failure
1675 return True # report a failure
1658
1676
1659 if after:
1677 if after:
1660 if not exists:
1678 if not exists:
1661 if rename:
1679 if rename:
1662 ui.warn(
1680 ui.warn(
1663 _(b'%s: not recording move - %s does not exist\n')
1681 _(b'%s: not recording move - %s does not exist\n')
1664 % (relsrc, reltarget)
1682 % (relsrc, reltarget)
1665 )
1683 )
1666 else:
1684 else:
1667 ui.warn(
1685 ui.warn(
1668 _(b'%s: not recording copy - %s does not exist\n')
1686 _(b'%s: not recording copy - %s does not exist\n')
1669 % (relsrc, reltarget)
1687 % (relsrc, reltarget)
1670 )
1688 )
1671 return True # report a failure
1689 return True # report a failure
1672 elif not dryrun:
1690 elif not dryrun:
1673 try:
1691 try:
1674 if exists:
1692 if exists:
1675 os.unlink(target)
1693 os.unlink(target)
1676 targetdir = os.path.dirname(target) or b'.'
1694 targetdir = os.path.dirname(target) or b'.'
1677 if not os.path.isdir(targetdir):
1695 if not os.path.isdir(targetdir):
1678 os.makedirs(targetdir)
1696 os.makedirs(targetdir)
1679 if samefile:
1697 if samefile:
1680 tmp = target + b"~hgrename"
1698 tmp = target + b"~hgrename"
1681 os.rename(src, tmp)
1699 os.rename(src, tmp)
1682 os.rename(tmp, target)
1700 os.rename(tmp, target)
1683 else:
1701 else:
1684 # Preserve stat info on renames, not on copies; this matches
1702 # Preserve stat info on renames, not on copies; this matches
1685 # Linux CLI behavior.
1703 # Linux CLI behavior.
1686 util.copyfile(src, target, copystat=rename)
1704 util.copyfile(src, target, copystat=rename)
1687 srcexists = True
1705 srcexists = True
1688 except IOError as inst:
1706 except IOError as inst:
1689 if inst.errno == errno.ENOENT:
1707 if inst.errno == errno.ENOENT:
1690 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1708 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1691 srcexists = False
1709 srcexists = False
1692 else:
1710 else:
1693 ui.warn(
1711 ui.warn(
1694 _(b'%s: cannot copy - %s\n')
1712 _(b'%s: cannot copy - %s\n')
1695 % (relsrc, encoding.strtolocal(inst.strerror))
1713 % (relsrc, encoding.strtolocal(inst.strerror))
1696 )
1714 )
1697 return True # report a failure
1715 return True # report a failure
1698
1716
1699 if ui.verbose or not exact:
1717 if ui.verbose or not exact:
1700 if rename:
1718 if rename:
1701 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1719 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1702 else:
1720 else:
1703 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1721 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1704
1722
1705 targets[abstarget] = abssrc
1723 targets[abstarget] = abssrc
1706
1724
1707 # fix up dirstate
1725 # fix up dirstate
1708 scmutil.dirstatecopy(
1726 scmutil.dirstatecopy(
1709 ui, repo, ctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1727 ui, repo, ctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1710 )
1728 )
1711 if rename and not dryrun:
1729 if rename and not dryrun:
1712 if not after and srcexists and not samefile:
1730 if not after and srcexists and not samefile:
1713 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1731 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1714 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1732 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1715 ctx.forget([abssrc])
1733 ctx.forget([abssrc])
1716
1734
1717 # pat: ossep
1735 # pat: ossep
1718 # dest ossep
1736 # dest ossep
1719 # srcs: list of (hgsep, hgsep, ossep, bool)
1737 # srcs: list of (hgsep, hgsep, ossep, bool)
1720 # return: function that takes hgsep and returns ossep
1738 # return: function that takes hgsep and returns ossep
1721 def targetpathfn(pat, dest, srcs):
1739 def targetpathfn(pat, dest, srcs):
1722 if os.path.isdir(pat):
1740 if os.path.isdir(pat):
1723 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1741 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1724 abspfx = util.localpath(abspfx)
1742 abspfx = util.localpath(abspfx)
1725 if destdirexists:
1743 if destdirexists:
1726 striplen = len(os.path.split(abspfx)[0])
1744 striplen = len(os.path.split(abspfx)[0])
1727 else:
1745 else:
1728 striplen = len(abspfx)
1746 striplen = len(abspfx)
1729 if striplen:
1747 if striplen:
1730 striplen += len(pycompat.ossep)
1748 striplen += len(pycompat.ossep)
1731 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1749 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1732 elif destdirexists:
1750 elif destdirexists:
1733 res = lambda p: os.path.join(
1751 res = lambda p: os.path.join(
1734 dest, os.path.basename(util.localpath(p))
1752 dest, os.path.basename(util.localpath(p))
1735 )
1753 )
1736 else:
1754 else:
1737 res = lambda p: dest
1755 res = lambda p: dest
1738 return res
1756 return res
1739
1757
1740 # pat: ossep
1758 # pat: ossep
1741 # dest ossep
1759 # dest ossep
1742 # srcs: list of (hgsep, hgsep, ossep, bool)
1760 # srcs: list of (hgsep, hgsep, ossep, bool)
1743 # return: function that takes hgsep and returns ossep
1761 # return: function that takes hgsep and returns ossep
1744 def targetpathafterfn(pat, dest, srcs):
1762 def targetpathafterfn(pat, dest, srcs):
1745 if matchmod.patkind(pat):
1763 if matchmod.patkind(pat):
1746 # a mercurial pattern
1764 # a mercurial pattern
1747 res = lambda p: os.path.join(
1765 res = lambda p: os.path.join(
1748 dest, os.path.basename(util.localpath(p))
1766 dest, os.path.basename(util.localpath(p))
1749 )
1767 )
1750 else:
1768 else:
1751 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1769 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1752 if len(abspfx) < len(srcs[0][0]):
1770 if len(abspfx) < len(srcs[0][0]):
1753 # A directory. Either the target path contains the last
1771 # A directory. Either the target path contains the last
1754 # component of the source path or it does not.
1772 # component of the source path or it does not.
1755 def evalpath(striplen):
1773 def evalpath(striplen):
1756 score = 0
1774 score = 0
1757 for s in srcs:
1775 for s in srcs:
1758 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1776 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1759 if os.path.lexists(t):
1777 if os.path.lexists(t):
1760 score += 1
1778 score += 1
1761 return score
1779 return score
1762
1780
1763 abspfx = util.localpath(abspfx)
1781 abspfx = util.localpath(abspfx)
1764 striplen = len(abspfx)
1782 striplen = len(abspfx)
1765 if striplen:
1783 if striplen:
1766 striplen += len(pycompat.ossep)
1784 striplen += len(pycompat.ossep)
1767 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1785 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1768 score = evalpath(striplen)
1786 score = evalpath(striplen)
1769 striplen1 = len(os.path.split(abspfx)[0])
1787 striplen1 = len(os.path.split(abspfx)[0])
1770 if striplen1:
1788 if striplen1:
1771 striplen1 += len(pycompat.ossep)
1789 striplen1 += len(pycompat.ossep)
1772 if evalpath(striplen1) > score:
1790 if evalpath(striplen1) > score:
1773 striplen = striplen1
1791 striplen = striplen1
1774 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1792 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1775 else:
1793 else:
1776 # a file
1794 # a file
1777 if destdirexists:
1795 if destdirexists:
1778 res = lambda p: os.path.join(
1796 res = lambda p: os.path.join(
1779 dest, os.path.basename(util.localpath(p))
1797 dest, os.path.basename(util.localpath(p))
1780 )
1798 )
1781 else:
1799 else:
1782 res = lambda p: dest
1800 res = lambda p: dest
1783 return res
1801 return res
1784
1802
1785 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1803 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1786 if not destdirexists:
1804 if not destdirexists:
1787 if len(pats) > 1 or matchmod.patkind(pats[0]):
1805 if len(pats) > 1 or matchmod.patkind(pats[0]):
1788 raise error.Abort(
1806 raise error.Abort(
1789 _(
1807 _(
1790 b'with multiple sources, destination must be an '
1808 b'with multiple sources, destination must be an '
1791 b'existing directory'
1809 b'existing directory'
1792 )
1810 )
1793 )
1811 )
1794 if util.endswithsep(dest):
1812 if util.endswithsep(dest):
1795 raise error.Abort(_(b'destination %s is not a directory') % dest)
1813 raise error.Abort(_(b'destination %s is not a directory') % dest)
1796
1814
1797 tfn = targetpathfn
1815 tfn = targetpathfn
1798 if after:
1816 if after:
1799 tfn = targetpathafterfn
1817 tfn = targetpathafterfn
1800 copylist = []
1818 copylist = []
1801 for pat in pats:
1819 for pat in pats:
1802 srcs = walkpat(pat)
1820 srcs = walkpat(pat)
1803 if not srcs:
1821 if not srcs:
1804 continue
1822 continue
1805 copylist.append((tfn(pat, dest, srcs), srcs))
1823 copylist.append((tfn(pat, dest, srcs), srcs))
1806 if not copylist:
1824 if not copylist:
1807 raise error.Abort(_(b'no files to copy'))
1825 raise error.Abort(_(b'no files to copy'))
1808
1826
1809 errors = 0
1827 errors = 0
1810 for targetpath, srcs in copylist:
1828 for targetpath, srcs in copylist:
1811 for abssrc, relsrc, exact in srcs:
1829 for abssrc, relsrc, exact in srcs:
1812 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1830 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1813 errors += 1
1831 errors += 1
1814
1832
1815 return errors != 0
1833 return errors != 0
1816
1834
1817
1835
1818 ## facility to let extension process additional data into an import patch
1836 ## facility to let extension process additional data into an import patch
1819 # list of identifier to be executed in order
1837 # list of identifier to be executed in order
1820 extrapreimport = [] # run before commit
1838 extrapreimport = [] # run before commit
1821 extrapostimport = [] # run after commit
1839 extrapostimport = [] # run after commit
1822 # mapping from identifier to actual import function
1840 # mapping from identifier to actual import function
1823 #
1841 #
1824 # 'preimport' are run before the commit is made and are provided the following
1842 # 'preimport' are run before the commit is made and are provided the following
1825 # arguments:
1843 # arguments:
1826 # - repo: the localrepository instance,
1844 # - repo: the localrepository instance,
1827 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1845 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1828 # - extra: the future extra dictionary of the changeset, please mutate it,
1846 # - extra: the future extra dictionary of the changeset, please mutate it,
1829 # - opts: the import options.
1847 # - opts: the import options.
1830 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1848 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1831 # mutation of in memory commit and more. Feel free to rework the code to get
1849 # mutation of in memory commit and more. Feel free to rework the code to get
1832 # there.
1850 # there.
1833 extrapreimportmap = {}
1851 extrapreimportmap = {}
1834 # 'postimport' are run after the commit is made and are provided the following
1852 # 'postimport' are run after the commit is made and are provided the following
1835 # argument:
1853 # argument:
1836 # - ctx: the changectx created by import.
1854 # - ctx: the changectx created by import.
1837 extrapostimportmap = {}
1855 extrapostimportmap = {}
1838
1856
1839
1857
1840 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1858 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1841 """Utility function used by commands.import to import a single patch
1859 """Utility function used by commands.import to import a single patch
1842
1860
1843 This function is explicitly defined here to help the evolve extension to
1861 This function is explicitly defined here to help the evolve extension to
1844 wrap this part of the import logic.
1862 wrap this part of the import logic.
1845
1863
1846 The API is currently a bit ugly because it a simple code translation from
1864 The API is currently a bit ugly because it a simple code translation from
1847 the import command. Feel free to make it better.
1865 the import command. Feel free to make it better.
1848
1866
1849 :patchdata: a dictionary containing parsed patch data (such as from
1867 :patchdata: a dictionary containing parsed patch data (such as from
1850 ``patch.extract()``)
1868 ``patch.extract()``)
1851 :parents: nodes that will be parent of the created commit
1869 :parents: nodes that will be parent of the created commit
1852 :opts: the full dict of option passed to the import command
1870 :opts: the full dict of option passed to the import command
1853 :msgs: list to save commit message to.
1871 :msgs: list to save commit message to.
1854 (used in case we need to save it when failing)
1872 (used in case we need to save it when failing)
1855 :updatefunc: a function that update a repo to a given node
1873 :updatefunc: a function that update a repo to a given node
1856 updatefunc(<repo>, <node>)
1874 updatefunc(<repo>, <node>)
1857 """
1875 """
1858 # avoid cycle context -> subrepo -> cmdutil
1876 # avoid cycle context -> subrepo -> cmdutil
1859 from . import context
1877 from . import context
1860
1878
1861 tmpname = patchdata.get(b'filename')
1879 tmpname = patchdata.get(b'filename')
1862 message = patchdata.get(b'message')
1880 message = patchdata.get(b'message')
1863 user = opts.get(b'user') or patchdata.get(b'user')
1881 user = opts.get(b'user') or patchdata.get(b'user')
1864 date = opts.get(b'date') or patchdata.get(b'date')
1882 date = opts.get(b'date') or patchdata.get(b'date')
1865 branch = patchdata.get(b'branch')
1883 branch = patchdata.get(b'branch')
1866 nodeid = patchdata.get(b'nodeid')
1884 nodeid = patchdata.get(b'nodeid')
1867 p1 = patchdata.get(b'p1')
1885 p1 = patchdata.get(b'p1')
1868 p2 = patchdata.get(b'p2')
1886 p2 = patchdata.get(b'p2')
1869
1887
1870 nocommit = opts.get(b'no_commit')
1888 nocommit = opts.get(b'no_commit')
1871 importbranch = opts.get(b'import_branch')
1889 importbranch = opts.get(b'import_branch')
1872 update = not opts.get(b'bypass')
1890 update = not opts.get(b'bypass')
1873 strip = opts[b"strip"]
1891 strip = opts[b"strip"]
1874 prefix = opts[b"prefix"]
1892 prefix = opts[b"prefix"]
1875 sim = float(opts.get(b'similarity') or 0)
1893 sim = float(opts.get(b'similarity') or 0)
1876
1894
1877 if not tmpname:
1895 if not tmpname:
1878 return None, None, False
1896 return None, None, False
1879
1897
1880 rejects = False
1898 rejects = False
1881
1899
1882 cmdline_message = logmessage(ui, opts)
1900 cmdline_message = logmessage(ui, opts)
1883 if cmdline_message:
1901 if cmdline_message:
1884 # pickup the cmdline msg
1902 # pickup the cmdline msg
1885 message = cmdline_message
1903 message = cmdline_message
1886 elif message:
1904 elif message:
1887 # pickup the patch msg
1905 # pickup the patch msg
1888 message = message.strip()
1906 message = message.strip()
1889 else:
1907 else:
1890 # launch the editor
1908 # launch the editor
1891 message = None
1909 message = None
1892 ui.debug(b'message:\n%s\n' % (message or b''))
1910 ui.debug(b'message:\n%s\n' % (message or b''))
1893
1911
1894 if len(parents) == 1:
1912 if len(parents) == 1:
1895 parents.append(repo[nullid])
1913 parents.append(repo[nullid])
1896 if opts.get(b'exact'):
1914 if opts.get(b'exact'):
1897 if not nodeid or not p1:
1915 if not nodeid or not p1:
1898 raise error.Abort(_(b'not a Mercurial patch'))
1916 raise error.Abort(_(b'not a Mercurial patch'))
1899 p1 = repo[p1]
1917 p1 = repo[p1]
1900 p2 = repo[p2 or nullid]
1918 p2 = repo[p2 or nullid]
1901 elif p2:
1919 elif p2:
1902 try:
1920 try:
1903 p1 = repo[p1]
1921 p1 = repo[p1]
1904 p2 = repo[p2]
1922 p2 = repo[p2]
1905 # Without any options, consider p2 only if the
1923 # Without any options, consider p2 only if the
1906 # patch is being applied on top of the recorded
1924 # patch is being applied on top of the recorded
1907 # first parent.
1925 # first parent.
1908 if p1 != parents[0]:
1926 if p1 != parents[0]:
1909 p1 = parents[0]
1927 p1 = parents[0]
1910 p2 = repo[nullid]
1928 p2 = repo[nullid]
1911 except error.RepoError:
1929 except error.RepoError:
1912 p1, p2 = parents
1930 p1, p2 = parents
1913 if p2.node() == nullid:
1931 if p2.node() == nullid:
1914 ui.warn(
1932 ui.warn(
1915 _(
1933 _(
1916 b"warning: import the patch as a normal revision\n"
1934 b"warning: import the patch as a normal revision\n"
1917 b"(use --exact to import the patch as a merge)\n"
1935 b"(use --exact to import the patch as a merge)\n"
1918 )
1936 )
1919 )
1937 )
1920 else:
1938 else:
1921 p1, p2 = parents
1939 p1, p2 = parents
1922
1940
1923 n = None
1941 n = None
1924 if update:
1942 if update:
1925 if p1 != parents[0]:
1943 if p1 != parents[0]:
1926 updatefunc(repo, p1.node())
1944 updatefunc(repo, p1.node())
1927 if p2 != parents[1]:
1945 if p2 != parents[1]:
1928 repo.setparents(p1.node(), p2.node())
1946 repo.setparents(p1.node(), p2.node())
1929
1947
1930 if opts.get(b'exact') or importbranch:
1948 if opts.get(b'exact') or importbranch:
1931 repo.dirstate.setbranch(branch or b'default')
1949 repo.dirstate.setbranch(branch or b'default')
1932
1950
1933 partial = opts.get(b'partial', False)
1951 partial = opts.get(b'partial', False)
1934 files = set()
1952 files = set()
1935 try:
1953 try:
1936 patch.patch(
1954 patch.patch(
1937 ui,
1955 ui,
1938 repo,
1956 repo,
1939 tmpname,
1957 tmpname,
1940 strip=strip,
1958 strip=strip,
1941 prefix=prefix,
1959 prefix=prefix,
1942 files=files,
1960 files=files,
1943 eolmode=None,
1961 eolmode=None,
1944 similarity=sim / 100.0,
1962 similarity=sim / 100.0,
1945 )
1963 )
1946 except error.PatchError as e:
1964 except error.PatchError as e:
1947 if not partial:
1965 if not partial:
1948 raise error.Abort(pycompat.bytestr(e))
1966 raise error.Abort(pycompat.bytestr(e))
1949 if partial:
1967 if partial:
1950 rejects = True
1968 rejects = True
1951
1969
1952 files = list(files)
1970 files = list(files)
1953 if nocommit:
1971 if nocommit:
1954 if message:
1972 if message:
1955 msgs.append(message)
1973 msgs.append(message)
1956 else:
1974 else:
1957 if opts.get(b'exact') or p2:
1975 if opts.get(b'exact') or p2:
1958 # If you got here, you either use --force and know what
1976 # If you got here, you either use --force and know what
1959 # you are doing or used --exact or a merge patch while
1977 # you are doing or used --exact or a merge patch while
1960 # being updated to its first parent.
1978 # being updated to its first parent.
1961 m = None
1979 m = None
1962 else:
1980 else:
1963 m = scmutil.matchfiles(repo, files or [])
1981 m = scmutil.matchfiles(repo, files or [])
1964 editform = mergeeditform(repo[None], b'import.normal')
1982 editform = mergeeditform(repo[None], b'import.normal')
1965 if opts.get(b'exact'):
1983 if opts.get(b'exact'):
1966 editor = None
1984 editor = None
1967 else:
1985 else:
1968 editor = getcommiteditor(
1986 editor = getcommiteditor(
1969 editform=editform, **pycompat.strkwargs(opts)
1987 editform=editform, **pycompat.strkwargs(opts)
1970 )
1988 )
1971 extra = {}
1989 extra = {}
1972 for idfunc in extrapreimport:
1990 for idfunc in extrapreimport:
1973 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1991 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1974 overrides = {}
1992 overrides = {}
1975 if partial:
1993 if partial:
1976 overrides[(b'ui', b'allowemptycommit')] = True
1994 overrides[(b'ui', b'allowemptycommit')] = True
1977 if opts.get(b'secret'):
1995 if opts.get(b'secret'):
1978 overrides[(b'phases', b'new-commit')] = b'secret'
1996 overrides[(b'phases', b'new-commit')] = b'secret'
1979 with repo.ui.configoverride(overrides, b'import'):
1997 with repo.ui.configoverride(overrides, b'import'):
1980 n = repo.commit(
1998 n = repo.commit(
1981 message, user, date, match=m, editor=editor, extra=extra
1999 message, user, date, match=m, editor=editor, extra=extra
1982 )
2000 )
1983 for idfunc in extrapostimport:
2001 for idfunc in extrapostimport:
1984 extrapostimportmap[idfunc](repo[n])
2002 extrapostimportmap[idfunc](repo[n])
1985 else:
2003 else:
1986 if opts.get(b'exact') or importbranch:
2004 if opts.get(b'exact') or importbranch:
1987 branch = branch or b'default'
2005 branch = branch or b'default'
1988 else:
2006 else:
1989 branch = p1.branch()
2007 branch = p1.branch()
1990 store = patch.filestore()
2008 store = patch.filestore()
1991 try:
2009 try:
1992 files = set()
2010 files = set()
1993 try:
2011 try:
1994 patch.patchrepo(
2012 patch.patchrepo(
1995 ui,
2013 ui,
1996 repo,
2014 repo,
1997 p1,
2015 p1,
1998 store,
2016 store,
1999 tmpname,
2017 tmpname,
2000 strip,
2018 strip,
2001 prefix,
2019 prefix,
2002 files,
2020 files,
2003 eolmode=None,
2021 eolmode=None,
2004 )
2022 )
2005 except error.PatchError as e:
2023 except error.PatchError as e:
2006 raise error.Abort(stringutil.forcebytestr(e))
2024 raise error.Abort(stringutil.forcebytestr(e))
2007 if opts.get(b'exact'):
2025 if opts.get(b'exact'):
2008 editor = None
2026 editor = None
2009 else:
2027 else:
2010 editor = getcommiteditor(editform=b'import.bypass')
2028 editor = getcommiteditor(editform=b'import.bypass')
2011 memctx = context.memctx(
2029 memctx = context.memctx(
2012 repo,
2030 repo,
2013 (p1.node(), p2.node()),
2031 (p1.node(), p2.node()),
2014 message,
2032 message,
2015 files=files,
2033 files=files,
2016 filectxfn=store,
2034 filectxfn=store,
2017 user=user,
2035 user=user,
2018 date=date,
2036 date=date,
2019 branch=branch,
2037 branch=branch,
2020 editor=editor,
2038 editor=editor,
2021 )
2039 )
2022
2040
2023 overrides = {}
2041 overrides = {}
2024 if opts.get(b'secret'):
2042 if opts.get(b'secret'):
2025 overrides[(b'phases', b'new-commit')] = b'secret'
2043 overrides[(b'phases', b'new-commit')] = b'secret'
2026 with repo.ui.configoverride(overrides, b'import'):
2044 with repo.ui.configoverride(overrides, b'import'):
2027 n = memctx.commit()
2045 n = memctx.commit()
2028 finally:
2046 finally:
2029 store.close()
2047 store.close()
2030 if opts.get(b'exact') and nocommit:
2048 if opts.get(b'exact') and nocommit:
2031 # --exact with --no-commit is still useful in that it does merge
2049 # --exact with --no-commit is still useful in that it does merge
2032 # and branch bits
2050 # and branch bits
2033 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
2051 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
2034 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
2052 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
2035 raise error.Abort(_(b'patch is damaged or loses information'))
2053 raise error.Abort(_(b'patch is damaged or loses information'))
2036 msg = _(b'applied to working directory')
2054 msg = _(b'applied to working directory')
2037 if n:
2055 if n:
2038 # i18n: refers to a short changeset id
2056 # i18n: refers to a short changeset id
2039 msg = _(b'created %s') % short(n)
2057 msg = _(b'created %s') % short(n)
2040 return msg, n, rejects
2058 return msg, n, rejects
2041
2059
2042
2060
2043 # facility to let extensions include additional data in an exported patch
2061 # facility to let extensions include additional data in an exported patch
2044 # list of identifiers to be executed in order
2062 # list of identifiers to be executed in order
2045 extraexport = []
2063 extraexport = []
2046 # mapping from identifier to actual export function
2064 # mapping from identifier to actual export function
2047 # function as to return a string to be added to the header or None
2065 # function as to return a string to be added to the header or None
2048 # it is given two arguments (sequencenumber, changectx)
2066 # it is given two arguments (sequencenumber, changectx)
2049 extraexportmap = {}
2067 extraexportmap = {}
2050
2068
2051
2069
2052 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
2070 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
2053 node = scmutil.binnode(ctx)
2071 node = scmutil.binnode(ctx)
2054 parents = [p.node() for p in ctx.parents() if p]
2072 parents = [p.node() for p in ctx.parents() if p]
2055 branch = ctx.branch()
2073 branch = ctx.branch()
2056 if switch_parent:
2074 if switch_parent:
2057 parents.reverse()
2075 parents.reverse()
2058
2076
2059 if parents:
2077 if parents:
2060 prev = parents[0]
2078 prev = parents[0]
2061 else:
2079 else:
2062 prev = nullid
2080 prev = nullid
2063
2081
2064 fm.context(ctx=ctx)
2082 fm.context(ctx=ctx)
2065 fm.plain(b'# HG changeset patch\n')
2083 fm.plain(b'# HG changeset patch\n')
2066 fm.write(b'user', b'# User %s\n', ctx.user())
2084 fm.write(b'user', b'# User %s\n', ctx.user())
2067 fm.plain(b'# Date %d %d\n' % ctx.date())
2085 fm.plain(b'# Date %d %d\n' % ctx.date())
2068 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
2086 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
2069 fm.condwrite(
2087 fm.condwrite(
2070 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
2088 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
2071 )
2089 )
2072 fm.write(b'node', b'# Node ID %s\n', hex(node))
2090 fm.write(b'node', b'# Node ID %s\n', hex(node))
2073 fm.plain(b'# Parent %s\n' % hex(prev))
2091 fm.plain(b'# Parent %s\n' % hex(prev))
2074 if len(parents) > 1:
2092 if len(parents) > 1:
2075 fm.plain(b'# Parent %s\n' % hex(parents[1]))
2093 fm.plain(b'# Parent %s\n' % hex(parents[1]))
2076 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
2094 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
2077
2095
2078 # TODO: redesign extraexportmap function to support formatter
2096 # TODO: redesign extraexportmap function to support formatter
2079 for headerid in extraexport:
2097 for headerid in extraexport:
2080 header = extraexportmap[headerid](seqno, ctx)
2098 header = extraexportmap[headerid](seqno, ctx)
2081 if header is not None:
2099 if header is not None:
2082 fm.plain(b'# %s\n' % header)
2100 fm.plain(b'# %s\n' % header)
2083
2101
2084 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
2102 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
2085 fm.plain(b'\n')
2103 fm.plain(b'\n')
2086
2104
2087 if fm.isplain():
2105 if fm.isplain():
2088 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
2106 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
2089 for chunk, label in chunkiter:
2107 for chunk, label in chunkiter:
2090 fm.plain(chunk, label=label)
2108 fm.plain(chunk, label=label)
2091 else:
2109 else:
2092 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
2110 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
2093 # TODO: make it structured?
2111 # TODO: make it structured?
2094 fm.data(diff=b''.join(chunkiter))
2112 fm.data(diff=b''.join(chunkiter))
2095
2113
2096
2114
2097 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
2115 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
2098 """Export changesets to stdout or a single file"""
2116 """Export changesets to stdout or a single file"""
2099 for seqno, rev in enumerate(revs, 1):
2117 for seqno, rev in enumerate(revs, 1):
2100 ctx = repo[rev]
2118 ctx = repo[rev]
2101 if not dest.startswith(b'<'):
2119 if not dest.startswith(b'<'):
2102 repo.ui.note(b"%s\n" % dest)
2120 repo.ui.note(b"%s\n" % dest)
2103 fm.startitem()
2121 fm.startitem()
2104 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
2122 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
2105
2123
2106
2124
2107 def _exportfntemplate(
2125 def _exportfntemplate(
2108 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
2126 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
2109 ):
2127 ):
2110 """Export changesets to possibly multiple files"""
2128 """Export changesets to possibly multiple files"""
2111 total = len(revs)
2129 total = len(revs)
2112 revwidth = max(len(str(rev)) for rev in revs)
2130 revwidth = max(len(str(rev)) for rev in revs)
2113 filemap = util.sortdict() # filename: [(seqno, rev), ...]
2131 filemap = util.sortdict() # filename: [(seqno, rev), ...]
2114
2132
2115 for seqno, rev in enumerate(revs, 1):
2133 for seqno, rev in enumerate(revs, 1):
2116 ctx = repo[rev]
2134 ctx = repo[rev]
2117 dest = makefilename(
2135 dest = makefilename(
2118 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
2136 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
2119 )
2137 )
2120 filemap.setdefault(dest, []).append((seqno, rev))
2138 filemap.setdefault(dest, []).append((seqno, rev))
2121
2139
2122 for dest in filemap:
2140 for dest in filemap:
2123 with formatter.maybereopen(basefm, dest) as fm:
2141 with formatter.maybereopen(basefm, dest) as fm:
2124 repo.ui.note(b"%s\n" % dest)
2142 repo.ui.note(b"%s\n" % dest)
2125 for seqno, rev in filemap[dest]:
2143 for seqno, rev in filemap[dest]:
2126 fm.startitem()
2144 fm.startitem()
2127 ctx = repo[rev]
2145 ctx = repo[rev]
2128 _exportsingle(
2146 _exportsingle(
2129 repo, ctx, fm, match, switch_parent, seqno, diffopts
2147 repo, ctx, fm, match, switch_parent, seqno, diffopts
2130 )
2148 )
2131
2149
2132
2150
2133 def _prefetchchangedfiles(repo, revs, match):
2151 def _prefetchchangedfiles(repo, revs, match):
2134 allfiles = set()
2152 allfiles = set()
2135 for rev in revs:
2153 for rev in revs:
2136 for file in repo[rev].files():
2154 for file in repo[rev].files():
2137 if not match or match(file):
2155 if not match or match(file):
2138 allfiles.add(file)
2156 allfiles.add(file)
2139 match = scmutil.matchfiles(repo, allfiles)
2157 match = scmutil.matchfiles(repo, allfiles)
2140 revmatches = [(rev, match) for rev in revs]
2158 revmatches = [(rev, match) for rev in revs]
2141 scmutil.prefetchfiles(repo, revmatches)
2159 scmutil.prefetchfiles(repo, revmatches)
2142
2160
2143
2161
2144 def export(
2162 def export(
2145 repo,
2163 repo,
2146 revs,
2164 revs,
2147 basefm,
2165 basefm,
2148 fntemplate=b'hg-%h.patch',
2166 fntemplate=b'hg-%h.patch',
2149 switch_parent=False,
2167 switch_parent=False,
2150 opts=None,
2168 opts=None,
2151 match=None,
2169 match=None,
2152 ):
2170 ):
2153 '''export changesets as hg patches
2171 '''export changesets as hg patches
2154
2172
2155 Args:
2173 Args:
2156 repo: The repository from which we're exporting revisions.
2174 repo: The repository from which we're exporting revisions.
2157 revs: A list of revisions to export as revision numbers.
2175 revs: A list of revisions to export as revision numbers.
2158 basefm: A formatter to which patches should be written.
2176 basefm: A formatter to which patches should be written.
2159 fntemplate: An optional string to use for generating patch file names.
2177 fntemplate: An optional string to use for generating patch file names.
2160 switch_parent: If True, show diffs against second parent when not nullid.
2178 switch_parent: If True, show diffs against second parent when not nullid.
2161 Default is false, which always shows diff against p1.
2179 Default is false, which always shows diff against p1.
2162 opts: diff options to use for generating the patch.
2180 opts: diff options to use for generating the patch.
2163 match: If specified, only export changes to files matching this matcher.
2181 match: If specified, only export changes to files matching this matcher.
2164
2182
2165 Returns:
2183 Returns:
2166 Nothing.
2184 Nothing.
2167
2185
2168 Side Effect:
2186 Side Effect:
2169 "HG Changeset Patch" data is emitted to one of the following
2187 "HG Changeset Patch" data is emitted to one of the following
2170 destinations:
2188 destinations:
2171 fntemplate specified: Each rev is written to a unique file named using
2189 fntemplate specified: Each rev is written to a unique file named using
2172 the given template.
2190 the given template.
2173 Otherwise: All revs will be written to basefm.
2191 Otherwise: All revs will be written to basefm.
2174 '''
2192 '''
2175 _prefetchchangedfiles(repo, revs, match)
2193 _prefetchchangedfiles(repo, revs, match)
2176
2194
2177 if not fntemplate:
2195 if not fntemplate:
2178 _exportfile(
2196 _exportfile(
2179 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2197 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2180 )
2198 )
2181 else:
2199 else:
2182 _exportfntemplate(
2200 _exportfntemplate(
2183 repo, revs, basefm, fntemplate, switch_parent, opts, match
2201 repo, revs, basefm, fntemplate, switch_parent, opts, match
2184 )
2202 )
2185
2203
2186
2204
2187 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2205 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2188 """Export changesets to the given file stream"""
2206 """Export changesets to the given file stream"""
2189 _prefetchchangedfiles(repo, revs, match)
2207 _prefetchchangedfiles(repo, revs, match)
2190
2208
2191 dest = getattr(fp, 'name', b'<unnamed>')
2209 dest = getattr(fp, 'name', b'<unnamed>')
2192 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2210 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2193 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2211 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2194
2212
2195
2213
2196 def showmarker(fm, marker, index=None):
2214 def showmarker(fm, marker, index=None):
2197 """utility function to display obsolescence marker in a readable way
2215 """utility function to display obsolescence marker in a readable way
2198
2216
2199 To be used by debug function."""
2217 To be used by debug function."""
2200 if index is not None:
2218 if index is not None:
2201 fm.write(b'index', b'%i ', index)
2219 fm.write(b'index', b'%i ', index)
2202 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2220 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2203 succs = marker.succnodes()
2221 succs = marker.succnodes()
2204 fm.condwrite(
2222 fm.condwrite(
2205 succs,
2223 succs,
2206 b'succnodes',
2224 b'succnodes',
2207 b'%s ',
2225 b'%s ',
2208 fm.formatlist(map(hex, succs), name=b'node'),
2226 fm.formatlist(map(hex, succs), name=b'node'),
2209 )
2227 )
2210 fm.write(b'flag', b'%X ', marker.flags())
2228 fm.write(b'flag', b'%X ', marker.flags())
2211 parents = marker.parentnodes()
2229 parents = marker.parentnodes()
2212 if parents is not None:
2230 if parents is not None:
2213 fm.write(
2231 fm.write(
2214 b'parentnodes',
2232 b'parentnodes',
2215 b'{%s} ',
2233 b'{%s} ',
2216 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2234 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2217 )
2235 )
2218 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2236 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2219 meta = marker.metadata().copy()
2237 meta = marker.metadata().copy()
2220 meta.pop(b'date', None)
2238 meta.pop(b'date', None)
2221 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2239 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2222 fm.write(
2240 fm.write(
2223 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2241 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2224 )
2242 )
2225 fm.plain(b'\n')
2243 fm.plain(b'\n')
2226
2244
2227
2245
2228 def finddate(ui, repo, date):
2246 def finddate(ui, repo, date):
2229 """Find the tipmost changeset that matches the given date spec"""
2247 """Find the tipmost changeset that matches the given date spec"""
2230 mrevs = repo.revs(b'date(%s)', date)
2248 mrevs = repo.revs(b'date(%s)', date)
2231 try:
2249 try:
2232 rev = mrevs.max()
2250 rev = mrevs.max()
2233 except ValueError:
2251 except ValueError:
2234 raise error.Abort(_(b"revision matching date not found"))
2252 raise error.Abort(_(b"revision matching date not found"))
2235
2253
2236 ui.status(
2254 ui.status(
2237 _(b"found revision %d from %s\n")
2255 _(b"found revision %d from %s\n")
2238 % (rev, dateutil.datestr(repo[rev].date()))
2256 % (rev, dateutil.datestr(repo[rev].date()))
2239 )
2257 )
2240 return b'%d' % rev
2258 return b'%d' % rev
2241
2259
2242
2260
2243 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2261 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2244 bad = []
2262 bad = []
2245
2263
2246 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2264 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2247 names = []
2265 names = []
2248 wctx = repo[None]
2266 wctx = repo[None]
2249 cca = None
2267 cca = None
2250 abort, warn = scmutil.checkportabilityalert(ui)
2268 abort, warn = scmutil.checkportabilityalert(ui)
2251 if abort or warn:
2269 if abort or warn:
2252 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2270 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2253
2271
2254 match = repo.narrowmatch(match, includeexact=True)
2272 match = repo.narrowmatch(match, includeexact=True)
2255 badmatch = matchmod.badmatch(match, badfn)
2273 badmatch = matchmod.badmatch(match, badfn)
2256 dirstate = repo.dirstate
2274 dirstate = repo.dirstate
2257 # We don't want to just call wctx.walk here, since it would return a lot of
2275 # We don't want to just call wctx.walk here, since it would return a lot of
2258 # clean files, which we aren't interested in and takes time.
2276 # clean files, which we aren't interested in and takes time.
2259 for f in sorted(
2277 for f in sorted(
2260 dirstate.walk(
2278 dirstate.walk(
2261 badmatch,
2279 badmatch,
2262 subrepos=sorted(wctx.substate),
2280 subrepos=sorted(wctx.substate),
2263 unknown=True,
2281 unknown=True,
2264 ignored=False,
2282 ignored=False,
2265 full=False,
2283 full=False,
2266 )
2284 )
2267 ):
2285 ):
2268 exact = match.exact(f)
2286 exact = match.exact(f)
2269 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2287 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2270 if cca:
2288 if cca:
2271 cca(f)
2289 cca(f)
2272 names.append(f)
2290 names.append(f)
2273 if ui.verbose or not exact:
2291 if ui.verbose or not exact:
2274 ui.status(
2292 ui.status(
2275 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2293 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2276 )
2294 )
2277
2295
2278 for subpath in sorted(wctx.substate):
2296 for subpath in sorted(wctx.substate):
2279 sub = wctx.sub(subpath)
2297 sub = wctx.sub(subpath)
2280 try:
2298 try:
2281 submatch = matchmod.subdirmatcher(subpath, match)
2299 submatch = matchmod.subdirmatcher(subpath, match)
2282 subprefix = repo.wvfs.reljoin(prefix, subpath)
2300 subprefix = repo.wvfs.reljoin(prefix, subpath)
2283 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2301 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2284 if opts.get('subrepos'):
2302 if opts.get('subrepos'):
2285 bad.extend(
2303 bad.extend(
2286 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2304 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2287 )
2305 )
2288 else:
2306 else:
2289 bad.extend(
2307 bad.extend(
2290 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2308 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2291 )
2309 )
2292 except error.LookupError:
2310 except error.LookupError:
2293 ui.status(
2311 ui.status(
2294 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2312 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2295 )
2313 )
2296
2314
2297 if not opts.get('dry_run'):
2315 if not opts.get('dry_run'):
2298 rejected = wctx.add(names, prefix)
2316 rejected = wctx.add(names, prefix)
2299 bad.extend(f for f in rejected if f in match.files())
2317 bad.extend(f for f in rejected if f in match.files())
2300 return bad
2318 return bad
2301
2319
2302
2320
2303 def addwebdirpath(repo, serverpath, webconf):
2321 def addwebdirpath(repo, serverpath, webconf):
2304 webconf[serverpath] = repo.root
2322 webconf[serverpath] = repo.root
2305 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2323 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2306
2324
2307 for r in repo.revs(b'filelog("path:.hgsub")'):
2325 for r in repo.revs(b'filelog("path:.hgsub")'):
2308 ctx = repo[r]
2326 ctx = repo[r]
2309 for subpath in ctx.substate:
2327 for subpath in ctx.substate:
2310 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2328 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2311
2329
2312
2330
2313 def forget(
2331 def forget(
2314 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2332 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2315 ):
2333 ):
2316 if dryrun and interactive:
2334 if dryrun and interactive:
2317 raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
2335 raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
2318 bad = []
2336 bad = []
2319 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2337 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2320 wctx = repo[None]
2338 wctx = repo[None]
2321 forgot = []
2339 forgot = []
2322
2340
2323 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2341 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2324 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2342 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2325 if explicitonly:
2343 if explicitonly:
2326 forget = [f for f in forget if match.exact(f)]
2344 forget = [f for f in forget if match.exact(f)]
2327
2345
2328 for subpath in sorted(wctx.substate):
2346 for subpath in sorted(wctx.substate):
2329 sub = wctx.sub(subpath)
2347 sub = wctx.sub(subpath)
2330 submatch = matchmod.subdirmatcher(subpath, match)
2348 submatch = matchmod.subdirmatcher(subpath, match)
2331 subprefix = repo.wvfs.reljoin(prefix, subpath)
2349 subprefix = repo.wvfs.reljoin(prefix, subpath)
2332 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2350 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2333 try:
2351 try:
2334 subbad, subforgot = sub.forget(
2352 subbad, subforgot = sub.forget(
2335 submatch,
2353 submatch,
2336 subprefix,
2354 subprefix,
2337 subuipathfn,
2355 subuipathfn,
2338 dryrun=dryrun,
2356 dryrun=dryrun,
2339 interactive=interactive,
2357 interactive=interactive,
2340 )
2358 )
2341 bad.extend([subpath + b'/' + f for f in subbad])
2359 bad.extend([subpath + b'/' + f for f in subbad])
2342 forgot.extend([subpath + b'/' + f for f in subforgot])
2360 forgot.extend([subpath + b'/' + f for f in subforgot])
2343 except error.LookupError:
2361 except error.LookupError:
2344 ui.status(
2362 ui.status(
2345 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2363 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2346 )
2364 )
2347
2365
2348 if not explicitonly:
2366 if not explicitonly:
2349 for f in match.files():
2367 for f in match.files():
2350 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2368 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2351 if f not in forgot:
2369 if f not in forgot:
2352 if repo.wvfs.exists(f):
2370 if repo.wvfs.exists(f):
2353 # Don't complain if the exact case match wasn't given.
2371 # Don't complain if the exact case match wasn't given.
2354 # But don't do this until after checking 'forgot', so
2372 # But don't do this until after checking 'forgot', so
2355 # that subrepo files aren't normalized, and this op is
2373 # that subrepo files aren't normalized, and this op is
2356 # purely from data cached by the status walk above.
2374 # purely from data cached by the status walk above.
2357 if repo.dirstate.normalize(f) in repo.dirstate:
2375 if repo.dirstate.normalize(f) in repo.dirstate:
2358 continue
2376 continue
2359 ui.warn(
2377 ui.warn(
2360 _(
2378 _(
2361 b'not removing %s: '
2379 b'not removing %s: '
2362 b'file is already untracked\n'
2380 b'file is already untracked\n'
2363 )
2381 )
2364 % uipathfn(f)
2382 % uipathfn(f)
2365 )
2383 )
2366 bad.append(f)
2384 bad.append(f)
2367
2385
2368 if interactive:
2386 if interactive:
2369 responses = _(
2387 responses = _(
2370 b'[Ynsa?]'
2388 b'[Ynsa?]'
2371 b'$$ &Yes, forget this file'
2389 b'$$ &Yes, forget this file'
2372 b'$$ &No, skip this file'
2390 b'$$ &No, skip this file'
2373 b'$$ &Skip remaining files'
2391 b'$$ &Skip remaining files'
2374 b'$$ Include &all remaining files'
2392 b'$$ Include &all remaining files'
2375 b'$$ &? (display help)'
2393 b'$$ &? (display help)'
2376 )
2394 )
2377 for filename in forget[:]:
2395 for filename in forget[:]:
2378 r = ui.promptchoice(
2396 r = ui.promptchoice(
2379 _(b'forget %s %s') % (uipathfn(filename), responses)
2397 _(b'forget %s %s') % (uipathfn(filename), responses)
2380 )
2398 )
2381 if r == 4: # ?
2399 if r == 4: # ?
2382 while r == 4:
2400 while r == 4:
2383 for c, t in ui.extractchoices(responses)[1]:
2401 for c, t in ui.extractchoices(responses)[1]:
2384 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2402 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2385 r = ui.promptchoice(
2403 r = ui.promptchoice(
2386 _(b'forget %s %s') % (uipathfn(filename), responses)
2404 _(b'forget %s %s') % (uipathfn(filename), responses)
2387 )
2405 )
2388 if r == 0: # yes
2406 if r == 0: # yes
2389 continue
2407 continue
2390 elif r == 1: # no
2408 elif r == 1: # no
2391 forget.remove(filename)
2409 forget.remove(filename)
2392 elif r == 2: # Skip
2410 elif r == 2: # Skip
2393 fnindex = forget.index(filename)
2411 fnindex = forget.index(filename)
2394 del forget[fnindex:]
2412 del forget[fnindex:]
2395 break
2413 break
2396 elif r == 3: # All
2414 elif r == 3: # All
2397 break
2415 break
2398
2416
2399 for f in forget:
2417 for f in forget:
2400 if ui.verbose or not match.exact(f) or interactive:
2418 if ui.verbose or not match.exact(f) or interactive:
2401 ui.status(
2419 ui.status(
2402 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2420 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2403 )
2421 )
2404
2422
2405 if not dryrun:
2423 if not dryrun:
2406 rejected = wctx.forget(forget, prefix)
2424 rejected = wctx.forget(forget, prefix)
2407 bad.extend(f for f in rejected if f in match.files())
2425 bad.extend(f for f in rejected if f in match.files())
2408 forgot.extend(f for f in forget if f not in rejected)
2426 forgot.extend(f for f in forget if f not in rejected)
2409 return bad, forgot
2427 return bad, forgot
2410
2428
2411
2429
2412 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2430 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2413 ret = 1
2431 ret = 1
2414
2432
2415 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2433 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2416 if fm.isplain() and not needsfctx:
2434 if fm.isplain() and not needsfctx:
2417 # Fast path. The speed-up comes from skipping the formatter, and batching
2435 # Fast path. The speed-up comes from skipping the formatter, and batching
2418 # calls to ui.write.
2436 # calls to ui.write.
2419 buf = []
2437 buf = []
2420 for f in ctx.matches(m):
2438 for f in ctx.matches(m):
2421 buf.append(fmt % uipathfn(f))
2439 buf.append(fmt % uipathfn(f))
2422 if len(buf) > 100:
2440 if len(buf) > 100:
2423 ui.write(b''.join(buf))
2441 ui.write(b''.join(buf))
2424 del buf[:]
2442 del buf[:]
2425 ret = 0
2443 ret = 0
2426 if buf:
2444 if buf:
2427 ui.write(b''.join(buf))
2445 ui.write(b''.join(buf))
2428 else:
2446 else:
2429 for f in ctx.matches(m):
2447 for f in ctx.matches(m):
2430 fm.startitem()
2448 fm.startitem()
2431 fm.context(ctx=ctx)
2449 fm.context(ctx=ctx)
2432 if needsfctx:
2450 if needsfctx:
2433 fc = ctx[f]
2451 fc = ctx[f]
2434 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2452 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2435 fm.data(path=f)
2453 fm.data(path=f)
2436 fm.plain(fmt % uipathfn(f))
2454 fm.plain(fmt % uipathfn(f))
2437 ret = 0
2455 ret = 0
2438
2456
2439 for subpath in sorted(ctx.substate):
2457 for subpath in sorted(ctx.substate):
2440 submatch = matchmod.subdirmatcher(subpath, m)
2458 submatch = matchmod.subdirmatcher(subpath, m)
2441 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2459 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2442 if subrepos or m.exact(subpath) or any(submatch.files()):
2460 if subrepos or m.exact(subpath) or any(submatch.files()):
2443 sub = ctx.sub(subpath)
2461 sub = ctx.sub(subpath)
2444 try:
2462 try:
2445 recurse = m.exact(subpath) or subrepos
2463 recurse = m.exact(subpath) or subrepos
2446 if (
2464 if (
2447 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2465 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2448 == 0
2466 == 0
2449 ):
2467 ):
2450 ret = 0
2468 ret = 0
2451 except error.LookupError:
2469 except error.LookupError:
2452 ui.status(
2470 ui.status(
2453 _(b"skipping missing subrepository: %s\n")
2471 _(b"skipping missing subrepository: %s\n")
2454 % uipathfn(subpath)
2472 % uipathfn(subpath)
2455 )
2473 )
2456
2474
2457 return ret
2475 return ret
2458
2476
2459
2477
2460 def remove(
2478 def remove(
2461 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2479 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2462 ):
2480 ):
2463 ret = 0
2481 ret = 0
2464 s = repo.status(match=m, clean=True)
2482 s = repo.status(match=m, clean=True)
2465 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2483 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2466
2484
2467 wctx = repo[None]
2485 wctx = repo[None]
2468
2486
2469 if warnings is None:
2487 if warnings is None:
2470 warnings = []
2488 warnings = []
2471 warn = True
2489 warn = True
2472 else:
2490 else:
2473 warn = False
2491 warn = False
2474
2492
2475 subs = sorted(wctx.substate)
2493 subs = sorted(wctx.substate)
2476 progress = ui.makeprogress(
2494 progress = ui.makeprogress(
2477 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2495 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2478 )
2496 )
2479 for subpath in subs:
2497 for subpath in subs:
2480 submatch = matchmod.subdirmatcher(subpath, m)
2498 submatch = matchmod.subdirmatcher(subpath, m)
2481 subprefix = repo.wvfs.reljoin(prefix, subpath)
2499 subprefix = repo.wvfs.reljoin(prefix, subpath)
2482 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2500 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2483 if subrepos or m.exact(subpath) or any(submatch.files()):
2501 if subrepos or m.exact(subpath) or any(submatch.files()):
2484 progress.increment()
2502 progress.increment()
2485 sub = wctx.sub(subpath)
2503 sub = wctx.sub(subpath)
2486 try:
2504 try:
2487 if sub.removefiles(
2505 if sub.removefiles(
2488 submatch,
2506 submatch,
2489 subprefix,
2507 subprefix,
2490 subuipathfn,
2508 subuipathfn,
2491 after,
2509 after,
2492 force,
2510 force,
2493 subrepos,
2511 subrepos,
2494 dryrun,
2512 dryrun,
2495 warnings,
2513 warnings,
2496 ):
2514 ):
2497 ret = 1
2515 ret = 1
2498 except error.LookupError:
2516 except error.LookupError:
2499 warnings.append(
2517 warnings.append(
2500 _(b"skipping missing subrepository: %s\n")
2518 _(b"skipping missing subrepository: %s\n")
2501 % uipathfn(subpath)
2519 % uipathfn(subpath)
2502 )
2520 )
2503 progress.complete()
2521 progress.complete()
2504
2522
2505 # warn about failure to delete explicit files/dirs
2523 # warn about failure to delete explicit files/dirs
2506 deleteddirs = pathutil.dirs(deleted)
2524 deleteddirs = pathutil.dirs(deleted)
2507 files = m.files()
2525 files = m.files()
2508 progress = ui.makeprogress(
2526 progress = ui.makeprogress(
2509 _(b'deleting'), total=len(files), unit=_(b'files')
2527 _(b'deleting'), total=len(files), unit=_(b'files')
2510 )
2528 )
2511 for f in files:
2529 for f in files:
2512
2530
2513 def insubrepo():
2531 def insubrepo():
2514 for subpath in wctx.substate:
2532 for subpath in wctx.substate:
2515 if f.startswith(subpath + b'/'):
2533 if f.startswith(subpath + b'/'):
2516 return True
2534 return True
2517 return False
2535 return False
2518
2536
2519 progress.increment()
2537 progress.increment()
2520 isdir = f in deleteddirs or wctx.hasdir(f)
2538 isdir = f in deleteddirs or wctx.hasdir(f)
2521 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2539 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2522 continue
2540 continue
2523
2541
2524 if repo.wvfs.exists(f):
2542 if repo.wvfs.exists(f):
2525 if repo.wvfs.isdir(f):
2543 if repo.wvfs.isdir(f):
2526 warnings.append(
2544 warnings.append(
2527 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2545 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2528 )
2546 )
2529 else:
2547 else:
2530 warnings.append(
2548 warnings.append(
2531 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2549 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2532 )
2550 )
2533 # missing files will generate a warning elsewhere
2551 # missing files will generate a warning elsewhere
2534 ret = 1
2552 ret = 1
2535 progress.complete()
2553 progress.complete()
2536
2554
2537 if force:
2555 if force:
2538 list = modified + deleted + clean + added
2556 list = modified + deleted + clean + added
2539 elif after:
2557 elif after:
2540 list = deleted
2558 list = deleted
2541 remaining = modified + added + clean
2559 remaining = modified + added + clean
2542 progress = ui.makeprogress(
2560 progress = ui.makeprogress(
2543 _(b'skipping'), total=len(remaining), unit=_(b'files')
2561 _(b'skipping'), total=len(remaining), unit=_(b'files')
2544 )
2562 )
2545 for f in remaining:
2563 for f in remaining:
2546 progress.increment()
2564 progress.increment()
2547 if ui.verbose or (f in files):
2565 if ui.verbose or (f in files):
2548 warnings.append(
2566 warnings.append(
2549 _(b'not removing %s: file still exists\n') % uipathfn(f)
2567 _(b'not removing %s: file still exists\n') % uipathfn(f)
2550 )
2568 )
2551 ret = 1
2569 ret = 1
2552 progress.complete()
2570 progress.complete()
2553 else:
2571 else:
2554 list = deleted + clean
2572 list = deleted + clean
2555 progress = ui.makeprogress(
2573 progress = ui.makeprogress(
2556 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2574 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2557 )
2575 )
2558 for f in modified:
2576 for f in modified:
2559 progress.increment()
2577 progress.increment()
2560 warnings.append(
2578 warnings.append(
2561 _(
2579 _(
2562 b'not removing %s: file is modified (use -f'
2580 b'not removing %s: file is modified (use -f'
2563 b' to force removal)\n'
2581 b' to force removal)\n'
2564 )
2582 )
2565 % uipathfn(f)
2583 % uipathfn(f)
2566 )
2584 )
2567 ret = 1
2585 ret = 1
2568 for f in added:
2586 for f in added:
2569 progress.increment()
2587 progress.increment()
2570 warnings.append(
2588 warnings.append(
2571 _(
2589 _(
2572 b"not removing %s: file has been marked for add"
2590 b"not removing %s: file has been marked for add"
2573 b" (use 'hg forget' to undo add)\n"
2591 b" (use 'hg forget' to undo add)\n"
2574 )
2592 )
2575 % uipathfn(f)
2593 % uipathfn(f)
2576 )
2594 )
2577 ret = 1
2595 ret = 1
2578 progress.complete()
2596 progress.complete()
2579
2597
2580 list = sorted(list)
2598 list = sorted(list)
2581 progress = ui.makeprogress(
2599 progress = ui.makeprogress(
2582 _(b'deleting'), total=len(list), unit=_(b'files')
2600 _(b'deleting'), total=len(list), unit=_(b'files')
2583 )
2601 )
2584 for f in list:
2602 for f in list:
2585 if ui.verbose or not m.exact(f):
2603 if ui.verbose or not m.exact(f):
2586 progress.increment()
2604 progress.increment()
2587 ui.status(
2605 ui.status(
2588 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2606 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2589 )
2607 )
2590 progress.complete()
2608 progress.complete()
2591
2609
2592 if not dryrun:
2610 if not dryrun:
2593 with repo.wlock():
2611 with repo.wlock():
2594 if not after:
2612 if not after:
2595 for f in list:
2613 for f in list:
2596 if f in added:
2614 if f in added:
2597 continue # we never unlink added files on remove
2615 continue # we never unlink added files on remove
2598 rmdir = repo.ui.configbool(
2616 rmdir = repo.ui.configbool(
2599 b'experimental', b'removeemptydirs'
2617 b'experimental', b'removeemptydirs'
2600 )
2618 )
2601 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2619 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2602 repo[None].forget(list)
2620 repo[None].forget(list)
2603
2621
2604 if warn:
2622 if warn:
2605 for warning in warnings:
2623 for warning in warnings:
2606 ui.warn(warning)
2624 ui.warn(warning)
2607
2625
2608 return ret
2626 return ret
2609
2627
2610
2628
2611 def _catfmtneedsdata(fm):
2629 def _catfmtneedsdata(fm):
2612 return not fm.datahint() or b'data' in fm.datahint()
2630 return not fm.datahint() or b'data' in fm.datahint()
2613
2631
2614
2632
2615 def _updatecatformatter(fm, ctx, matcher, path, decode):
2633 def _updatecatformatter(fm, ctx, matcher, path, decode):
2616 """Hook for adding data to the formatter used by ``hg cat``.
2634 """Hook for adding data to the formatter used by ``hg cat``.
2617
2635
2618 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2636 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2619 this method first."""
2637 this method first."""
2620
2638
2621 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2639 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2622 # wasn't requested.
2640 # wasn't requested.
2623 data = b''
2641 data = b''
2624 if _catfmtneedsdata(fm):
2642 if _catfmtneedsdata(fm):
2625 data = ctx[path].data()
2643 data = ctx[path].data()
2626 if decode:
2644 if decode:
2627 data = ctx.repo().wwritedata(path, data)
2645 data = ctx.repo().wwritedata(path, data)
2628 fm.startitem()
2646 fm.startitem()
2629 fm.context(ctx=ctx)
2647 fm.context(ctx=ctx)
2630 fm.write(b'data', b'%s', data)
2648 fm.write(b'data', b'%s', data)
2631 fm.data(path=path)
2649 fm.data(path=path)
2632
2650
2633
2651
2634 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2652 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2635 err = 1
2653 err = 1
2636 opts = pycompat.byteskwargs(opts)
2654 opts = pycompat.byteskwargs(opts)
2637
2655
2638 def write(path):
2656 def write(path):
2639 filename = None
2657 filename = None
2640 if fntemplate:
2658 if fntemplate:
2641 filename = makefilename(
2659 filename = makefilename(
2642 ctx, fntemplate, pathname=os.path.join(prefix, path)
2660 ctx, fntemplate, pathname=os.path.join(prefix, path)
2643 )
2661 )
2644 # attempt to create the directory if it does not already exist
2662 # attempt to create the directory if it does not already exist
2645 try:
2663 try:
2646 os.makedirs(os.path.dirname(filename))
2664 os.makedirs(os.path.dirname(filename))
2647 except OSError:
2665 except OSError:
2648 pass
2666 pass
2649 with formatter.maybereopen(basefm, filename) as fm:
2667 with formatter.maybereopen(basefm, filename) as fm:
2650 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2668 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2651
2669
2652 # Automation often uses hg cat on single files, so special case it
2670 # Automation often uses hg cat on single files, so special case it
2653 # for performance to avoid the cost of parsing the manifest.
2671 # for performance to avoid the cost of parsing the manifest.
2654 if len(matcher.files()) == 1 and not matcher.anypats():
2672 if len(matcher.files()) == 1 and not matcher.anypats():
2655 file = matcher.files()[0]
2673 file = matcher.files()[0]
2656 mfl = repo.manifestlog
2674 mfl = repo.manifestlog
2657 mfnode = ctx.manifestnode()
2675 mfnode = ctx.manifestnode()
2658 try:
2676 try:
2659 if mfnode and mfl[mfnode].find(file)[0]:
2677 if mfnode and mfl[mfnode].find(file)[0]:
2660 if _catfmtneedsdata(basefm):
2678 if _catfmtneedsdata(basefm):
2661 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
2679 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
2662 write(file)
2680 write(file)
2663 return 0
2681 return 0
2664 except KeyError:
2682 except KeyError:
2665 pass
2683 pass
2666
2684
2667 if _catfmtneedsdata(basefm):
2685 if _catfmtneedsdata(basefm):
2668 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
2686 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
2669
2687
2670 for abs in ctx.walk(matcher):
2688 for abs in ctx.walk(matcher):
2671 write(abs)
2689 write(abs)
2672 err = 0
2690 err = 0
2673
2691
2674 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2692 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2675 for subpath in sorted(ctx.substate):
2693 for subpath in sorted(ctx.substate):
2676 sub = ctx.sub(subpath)
2694 sub = ctx.sub(subpath)
2677 try:
2695 try:
2678 submatch = matchmod.subdirmatcher(subpath, matcher)
2696 submatch = matchmod.subdirmatcher(subpath, matcher)
2679 subprefix = os.path.join(prefix, subpath)
2697 subprefix = os.path.join(prefix, subpath)
2680 if not sub.cat(
2698 if not sub.cat(
2681 submatch,
2699 submatch,
2682 basefm,
2700 basefm,
2683 fntemplate,
2701 fntemplate,
2684 subprefix,
2702 subprefix,
2685 **pycompat.strkwargs(opts)
2703 **pycompat.strkwargs(opts)
2686 ):
2704 ):
2687 err = 0
2705 err = 0
2688 except error.RepoLookupError:
2706 except error.RepoLookupError:
2689 ui.status(
2707 ui.status(
2690 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2708 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2691 )
2709 )
2692
2710
2693 return err
2711 return err
2694
2712
2695
2713
2696 def commit(ui, repo, commitfunc, pats, opts):
2714 def commit(ui, repo, commitfunc, pats, opts):
2697 '''commit the specified files or all outstanding changes'''
2715 '''commit the specified files or all outstanding changes'''
2698 date = opts.get(b'date')
2716 date = opts.get(b'date')
2699 if date:
2717 if date:
2700 opts[b'date'] = dateutil.parsedate(date)
2718 opts[b'date'] = dateutil.parsedate(date)
2701 message = logmessage(ui, opts)
2719 message = logmessage(ui, opts)
2702 matcher = scmutil.match(repo[None], pats, opts)
2720 matcher = scmutil.match(repo[None], pats, opts)
2703
2721
2704 dsguard = None
2722 dsguard = None
2705 # extract addremove carefully -- this function can be called from a command
2723 # extract addremove carefully -- this function can be called from a command
2706 # that doesn't support addremove
2724 # that doesn't support addremove
2707 if opts.get(b'addremove'):
2725 if opts.get(b'addremove'):
2708 dsguard = dirstateguard.dirstateguard(repo, b'commit')
2726 dsguard = dirstateguard.dirstateguard(repo, b'commit')
2709 with dsguard or util.nullcontextmanager():
2727 with dsguard or util.nullcontextmanager():
2710 if dsguard:
2728 if dsguard:
2711 relative = scmutil.anypats(pats, opts)
2729 relative = scmutil.anypats(pats, opts)
2712 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2730 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2713 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
2731 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
2714 raise error.Abort(
2732 raise error.Abort(
2715 _(b"failed to mark all new/missing files as added/removed")
2733 _(b"failed to mark all new/missing files as added/removed")
2716 )
2734 )
2717
2735
2718 return commitfunc(ui, repo, message, matcher, opts)
2736 return commitfunc(ui, repo, message, matcher, opts)
2719
2737
2720
2738
2721 def samefile(f, ctx1, ctx2):
2739 def samefile(f, ctx1, ctx2):
2722 if f in ctx1.manifest():
2740 if f in ctx1.manifest():
2723 a = ctx1.filectx(f)
2741 a = ctx1.filectx(f)
2724 if f in ctx2.manifest():
2742 if f in ctx2.manifest():
2725 b = ctx2.filectx(f)
2743 b = ctx2.filectx(f)
2726 return not a.cmp(b) and a.flags() == b.flags()
2744 return not a.cmp(b) and a.flags() == b.flags()
2727 else:
2745 else:
2728 return False
2746 return False
2729 else:
2747 else:
2730 return f not in ctx2.manifest()
2748 return f not in ctx2.manifest()
2731
2749
2732
2750
2733 def amend(ui, repo, old, extra, pats, opts):
2751 def amend(ui, repo, old, extra, pats, opts):
2734 # avoid cycle context -> subrepo -> cmdutil
2752 # avoid cycle context -> subrepo -> cmdutil
2735 from . import context
2753 from . import context
2736
2754
2737 # amend will reuse the existing user if not specified, but the obsolete
2755 # amend will reuse the existing user if not specified, but the obsolete
2738 # marker creation requires that the current user's name is specified.
2756 # marker creation requires that the current user's name is specified.
2739 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2757 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2740 ui.username() # raise exception if username not set
2758 ui.username() # raise exception if username not set
2741
2759
2742 ui.note(_(b'amending changeset %s\n') % old)
2760 ui.note(_(b'amending changeset %s\n') % old)
2743 base = old.p1()
2761 base = old.p1()
2744
2762
2745 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
2763 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
2746 # Participating changesets:
2764 # Participating changesets:
2747 #
2765 #
2748 # wctx o - workingctx that contains changes from working copy
2766 # wctx o - workingctx that contains changes from working copy
2749 # | to go into amending commit
2767 # | to go into amending commit
2750 # |
2768 # |
2751 # old o - changeset to amend
2769 # old o - changeset to amend
2752 # |
2770 # |
2753 # base o - first parent of the changeset to amend
2771 # base o - first parent of the changeset to amend
2754 wctx = repo[None]
2772 wctx = repo[None]
2755
2773
2756 # Copy to avoid mutating input
2774 # Copy to avoid mutating input
2757 extra = extra.copy()
2775 extra = extra.copy()
2758 # Update extra dict from amended commit (e.g. to preserve graft
2776 # Update extra dict from amended commit (e.g. to preserve graft
2759 # source)
2777 # source)
2760 extra.update(old.extra())
2778 extra.update(old.extra())
2761
2779
2762 # Also update it from the from the wctx
2780 # Also update it from the from the wctx
2763 extra.update(wctx.extra())
2781 extra.update(wctx.extra())
2764
2782
2765 # date-only change should be ignored?
2783 # date-only change should be ignored?
2766 datemaydiffer = resolvecommitoptions(ui, opts)
2784 datemaydiffer = resolvecommitoptions(ui, opts)
2767
2785
2768 date = old.date()
2786 date = old.date()
2769 if opts.get(b'date'):
2787 if opts.get(b'date'):
2770 date = dateutil.parsedate(opts.get(b'date'))
2788 date = dateutil.parsedate(opts.get(b'date'))
2771 user = opts.get(b'user') or old.user()
2789 user = opts.get(b'user') or old.user()
2772
2790
2773 if len(old.parents()) > 1:
2791 if len(old.parents()) > 1:
2774 # ctx.files() isn't reliable for merges, so fall back to the
2792 # ctx.files() isn't reliable for merges, so fall back to the
2775 # slower repo.status() method
2793 # slower repo.status() method
2776 st = base.status(old)
2794 st = base.status(old)
2777 files = set(st.modified) | set(st.added) | set(st.removed)
2795 files = set(st.modified) | set(st.added) | set(st.removed)
2778 else:
2796 else:
2779 files = set(old.files())
2797 files = set(old.files())
2780
2798
2781 # add/remove the files to the working copy if the "addremove" option
2799 # add/remove the files to the working copy if the "addremove" option
2782 # was specified.
2800 # was specified.
2783 matcher = scmutil.match(wctx, pats, opts)
2801 matcher = scmutil.match(wctx, pats, opts)
2784 relative = scmutil.anypats(pats, opts)
2802 relative = scmutil.anypats(pats, opts)
2785 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2803 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2786 if opts.get(b'addremove') and scmutil.addremove(
2804 if opts.get(b'addremove') and scmutil.addremove(
2787 repo, matcher, b"", uipathfn, opts
2805 repo, matcher, b"", uipathfn, opts
2788 ):
2806 ):
2789 raise error.Abort(
2807 raise error.Abort(
2790 _(b"failed to mark all new/missing files as added/removed")
2808 _(b"failed to mark all new/missing files as added/removed")
2791 )
2809 )
2792
2810
2793 # Check subrepos. This depends on in-place wctx._status update in
2811 # Check subrepos. This depends on in-place wctx._status update in
2794 # subrepo.precommit(). To minimize the risk of this hack, we do
2812 # subrepo.precommit(). To minimize the risk of this hack, we do
2795 # nothing if .hgsub does not exist.
2813 # nothing if .hgsub does not exist.
2796 if b'.hgsub' in wctx or b'.hgsub' in old:
2814 if b'.hgsub' in wctx or b'.hgsub' in old:
2797 subs, commitsubs, newsubstate = subrepoutil.precommit(
2815 subs, commitsubs, newsubstate = subrepoutil.precommit(
2798 ui, wctx, wctx._status, matcher
2816 ui, wctx, wctx._status, matcher
2799 )
2817 )
2800 # amend should abort if commitsubrepos is enabled
2818 # amend should abort if commitsubrepos is enabled
2801 assert not commitsubs
2819 assert not commitsubs
2802 if subs:
2820 if subs:
2803 subrepoutil.writestate(repo, newsubstate)
2821 subrepoutil.writestate(repo, newsubstate)
2804
2822
2805 ms = mergestatemod.mergestate.read(repo)
2823 ms = mergestatemod.mergestate.read(repo)
2806 mergeutil.checkunresolved(ms)
2824 mergeutil.checkunresolved(ms)
2807
2825
2808 filestoamend = {f for f in wctx.files() if matcher(f)}
2826 filestoamend = {f for f in wctx.files() if matcher(f)}
2809
2827
2810 changes = len(filestoamend) > 0
2828 changes = len(filestoamend) > 0
2811 if changes:
2829 if changes:
2812 # Recompute copies (avoid recording a -> b -> a)
2830 # Recompute copies (avoid recording a -> b -> a)
2813 copied = copies.pathcopies(base, wctx, matcher)
2831 copied = copies.pathcopies(base, wctx, matcher)
2814 if old.p2:
2832 if old.p2:
2815 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2833 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2816
2834
2817 # Prune files which were reverted by the updates: if old
2835 # Prune files which were reverted by the updates: if old
2818 # introduced file X and the file was renamed in the working
2836 # introduced file X and the file was renamed in the working
2819 # copy, then those two files are the same and
2837 # copy, then those two files are the same and
2820 # we can discard X from our list of files. Likewise if X
2838 # we can discard X from our list of files. Likewise if X
2821 # was removed, it's no longer relevant. If X is missing (aka
2839 # was removed, it's no longer relevant. If X is missing (aka
2822 # deleted), old X must be preserved.
2840 # deleted), old X must be preserved.
2823 files.update(filestoamend)
2841 files.update(filestoamend)
2824 files = [
2842 files = [
2825 f
2843 f
2826 for f in files
2844 for f in files
2827 if (f not in filestoamend or not samefile(f, wctx, base))
2845 if (f not in filestoamend or not samefile(f, wctx, base))
2828 ]
2846 ]
2829
2847
2830 def filectxfn(repo, ctx_, path):
2848 def filectxfn(repo, ctx_, path):
2831 try:
2849 try:
2832 # If the file being considered is not amongst the files
2850 # If the file being considered is not amongst the files
2833 # to be amended, we should return the file context from the
2851 # to be amended, we should return the file context from the
2834 # old changeset. This avoids issues when only some files in
2852 # old changeset. This avoids issues when only some files in
2835 # the working copy are being amended but there are also
2853 # the working copy are being amended but there are also
2836 # changes to other files from the old changeset.
2854 # changes to other files from the old changeset.
2837 if path not in filestoamend:
2855 if path not in filestoamend:
2838 return old.filectx(path)
2856 return old.filectx(path)
2839
2857
2840 # Return None for removed files.
2858 # Return None for removed files.
2841 if path in wctx.removed():
2859 if path in wctx.removed():
2842 return None
2860 return None
2843
2861
2844 fctx = wctx[path]
2862 fctx = wctx[path]
2845 flags = fctx.flags()
2863 flags = fctx.flags()
2846 mctx = context.memfilectx(
2864 mctx = context.memfilectx(
2847 repo,
2865 repo,
2848 ctx_,
2866 ctx_,
2849 fctx.path(),
2867 fctx.path(),
2850 fctx.data(),
2868 fctx.data(),
2851 islink=b'l' in flags,
2869 islink=b'l' in flags,
2852 isexec=b'x' in flags,
2870 isexec=b'x' in flags,
2853 copysource=copied.get(path),
2871 copysource=copied.get(path),
2854 )
2872 )
2855 return mctx
2873 return mctx
2856 except KeyError:
2874 except KeyError:
2857 return None
2875 return None
2858
2876
2859 else:
2877 else:
2860 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
2878 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
2861
2879
2862 # Use version of files as in the old cset
2880 # Use version of files as in the old cset
2863 def filectxfn(repo, ctx_, path):
2881 def filectxfn(repo, ctx_, path):
2864 try:
2882 try:
2865 return old.filectx(path)
2883 return old.filectx(path)
2866 except KeyError:
2884 except KeyError:
2867 return None
2885 return None
2868
2886
2869 # See if we got a message from -m or -l, if not, open the editor with
2887 # See if we got a message from -m or -l, if not, open the editor with
2870 # the message of the changeset to amend.
2888 # the message of the changeset to amend.
2871 message = logmessage(ui, opts)
2889 message = logmessage(ui, opts)
2872
2890
2873 editform = mergeeditform(old, b'commit.amend')
2891 editform = mergeeditform(old, b'commit.amend')
2874
2892
2875 if not message:
2893 if not message:
2876 message = old.description()
2894 message = old.description()
2877 # Default if message isn't provided and --edit is not passed is to
2895 # Default if message isn't provided and --edit is not passed is to
2878 # invoke editor, but allow --no-edit. If somehow we don't have any
2896 # invoke editor, but allow --no-edit. If somehow we don't have any
2879 # description, let's always start the editor.
2897 # description, let's always start the editor.
2880 doedit = not message or opts.get(b'edit') in [True, None]
2898 doedit = not message or opts.get(b'edit') in [True, None]
2881 else:
2899 else:
2882 # Default if message is provided is to not invoke editor, but allow
2900 # Default if message is provided is to not invoke editor, but allow
2883 # --edit.
2901 # --edit.
2884 doedit = opts.get(b'edit') is True
2902 doedit = opts.get(b'edit') is True
2885 editor = getcommiteditor(edit=doedit, editform=editform)
2903 editor = getcommiteditor(edit=doedit, editform=editform)
2886
2904
2887 pureextra = extra.copy()
2905 pureextra = extra.copy()
2888 extra[b'amend_source'] = old.hex()
2906 extra[b'amend_source'] = old.hex()
2889
2907
2890 new = context.memctx(
2908 new = context.memctx(
2891 repo,
2909 repo,
2892 parents=[base.node(), old.p2().node()],
2910 parents=[base.node(), old.p2().node()],
2893 text=message,
2911 text=message,
2894 files=files,
2912 files=files,
2895 filectxfn=filectxfn,
2913 filectxfn=filectxfn,
2896 user=user,
2914 user=user,
2897 date=date,
2915 date=date,
2898 extra=extra,
2916 extra=extra,
2899 editor=editor,
2917 editor=editor,
2900 )
2918 )
2901
2919
2902 newdesc = changelog.stripdesc(new.description())
2920 newdesc = changelog.stripdesc(new.description())
2903 if (
2921 if (
2904 (not changes)
2922 (not changes)
2905 and newdesc == old.description()
2923 and newdesc == old.description()
2906 and user == old.user()
2924 and user == old.user()
2907 and (date == old.date() or datemaydiffer)
2925 and (date == old.date() or datemaydiffer)
2908 and pureextra == old.extra()
2926 and pureextra == old.extra()
2909 ):
2927 ):
2910 # nothing changed. continuing here would create a new node
2928 # nothing changed. continuing here would create a new node
2911 # anyway because of the amend_source noise.
2929 # anyway because of the amend_source noise.
2912 #
2930 #
2913 # This not what we expect from amend.
2931 # This not what we expect from amend.
2914 return old.node()
2932 return old.node()
2915
2933
2916 commitphase = None
2934 commitphase = None
2917 if opts.get(b'secret'):
2935 if opts.get(b'secret'):
2918 commitphase = phases.secret
2936 commitphase = phases.secret
2919 newid = repo.commitctx(new)
2937 newid = repo.commitctx(new)
2920 ms.reset()
2938 ms.reset()
2921
2939
2922 # Reroute the working copy parent to the new changeset
2940 # Reroute the working copy parent to the new changeset
2923 repo.setparents(newid, nullid)
2941 repo.setparents(newid, nullid)
2924 mapping = {old.node(): (newid,)}
2942 mapping = {old.node(): (newid,)}
2925 obsmetadata = None
2943 obsmetadata = None
2926 if opts.get(b'note'):
2944 if opts.get(b'note'):
2927 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
2945 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
2928 backup = ui.configbool(b'rewrite', b'backup-bundle')
2946 backup = ui.configbool(b'rewrite', b'backup-bundle')
2929 scmutil.cleanupnodes(
2947 scmutil.cleanupnodes(
2930 repo,
2948 repo,
2931 mapping,
2949 mapping,
2932 b'amend',
2950 b'amend',
2933 metadata=obsmetadata,
2951 metadata=obsmetadata,
2934 fixphase=True,
2952 fixphase=True,
2935 targetphase=commitphase,
2953 targetphase=commitphase,
2936 backup=backup,
2954 backup=backup,
2937 )
2955 )
2938
2956
2939 # Fixing the dirstate because localrepo.commitctx does not update
2957 # Fixing the dirstate because localrepo.commitctx does not update
2940 # it. This is rather convenient because we did not need to update
2958 # it. This is rather convenient because we did not need to update
2941 # the dirstate for all the files in the new commit which commitctx
2959 # the dirstate for all the files in the new commit which commitctx
2942 # could have done if it updated the dirstate. Now, we can
2960 # could have done if it updated the dirstate. Now, we can
2943 # selectively update the dirstate only for the amended files.
2961 # selectively update the dirstate only for the amended files.
2944 dirstate = repo.dirstate
2962 dirstate = repo.dirstate
2945
2963
2946 # Update the state of the files which were added and modified in the
2964 # Update the state of the files which were added and modified in the
2947 # amend to "normal" in the dirstate. We need to use "normallookup" since
2965 # amend to "normal" in the dirstate. We need to use "normallookup" since
2948 # the files may have changed since the command started; using "normal"
2966 # the files may have changed since the command started; using "normal"
2949 # would mark them as clean but with uncommitted contents.
2967 # would mark them as clean but with uncommitted contents.
2950 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2968 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2951 for f in normalfiles:
2969 for f in normalfiles:
2952 dirstate.normallookup(f)
2970 dirstate.normallookup(f)
2953
2971
2954 # Update the state of files which were removed in the amend
2972 # Update the state of files which were removed in the amend
2955 # to "removed" in the dirstate.
2973 # to "removed" in the dirstate.
2956 removedfiles = set(wctx.removed()) & filestoamend
2974 removedfiles = set(wctx.removed()) & filestoamend
2957 for f in removedfiles:
2975 for f in removedfiles:
2958 dirstate.drop(f)
2976 dirstate.drop(f)
2959
2977
2960 return newid
2978 return newid
2961
2979
2962
2980
2963 def commiteditor(repo, ctx, subs, editform=b''):
2981 def commiteditor(repo, ctx, subs, editform=b''):
2964 if ctx.description():
2982 if ctx.description():
2965 return ctx.description()
2983 return ctx.description()
2966 return commitforceeditor(
2984 return commitforceeditor(
2967 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
2985 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
2968 )
2986 )
2969
2987
2970
2988
2971 def commitforceeditor(
2989 def commitforceeditor(
2972 repo,
2990 repo,
2973 ctx,
2991 ctx,
2974 subs,
2992 subs,
2975 finishdesc=None,
2993 finishdesc=None,
2976 extramsg=None,
2994 extramsg=None,
2977 editform=b'',
2995 editform=b'',
2978 unchangedmessagedetection=False,
2996 unchangedmessagedetection=False,
2979 ):
2997 ):
2980 if not extramsg:
2998 if not extramsg:
2981 extramsg = _(b"Leave message empty to abort commit.")
2999 extramsg = _(b"Leave message empty to abort commit.")
2982
3000
2983 forms = [e for e in editform.split(b'.') if e]
3001 forms = [e for e in editform.split(b'.') if e]
2984 forms.insert(0, b'changeset')
3002 forms.insert(0, b'changeset')
2985 templatetext = None
3003 templatetext = None
2986 while forms:
3004 while forms:
2987 ref = b'.'.join(forms)
3005 ref = b'.'.join(forms)
2988 if repo.ui.config(b'committemplate', ref):
3006 if repo.ui.config(b'committemplate', ref):
2989 templatetext = committext = buildcommittemplate(
3007 templatetext = committext = buildcommittemplate(
2990 repo, ctx, subs, extramsg, ref
3008 repo, ctx, subs, extramsg, ref
2991 )
3009 )
2992 break
3010 break
2993 forms.pop()
3011 forms.pop()
2994 else:
3012 else:
2995 committext = buildcommittext(repo, ctx, subs, extramsg)
3013 committext = buildcommittext(repo, ctx, subs, extramsg)
2996
3014
2997 # run editor in the repository root
3015 # run editor in the repository root
2998 olddir = encoding.getcwd()
3016 olddir = encoding.getcwd()
2999 os.chdir(repo.root)
3017 os.chdir(repo.root)
3000
3018
3001 # make in-memory changes visible to external process
3019 # make in-memory changes visible to external process
3002 tr = repo.currenttransaction()
3020 tr = repo.currenttransaction()
3003 repo.dirstate.write(tr)
3021 repo.dirstate.write(tr)
3004 pending = tr and tr.writepending() and repo.root
3022 pending = tr and tr.writepending() and repo.root
3005
3023
3006 editortext = repo.ui.edit(
3024 editortext = repo.ui.edit(
3007 committext,
3025 committext,
3008 ctx.user(),
3026 ctx.user(),
3009 ctx.extra(),
3027 ctx.extra(),
3010 editform=editform,
3028 editform=editform,
3011 pending=pending,
3029 pending=pending,
3012 repopath=repo.path,
3030 repopath=repo.path,
3013 action=b'commit',
3031 action=b'commit',
3014 )
3032 )
3015 text = editortext
3033 text = editortext
3016
3034
3017 # strip away anything below this special string (used for editors that want
3035 # strip away anything below this special string (used for editors that want
3018 # to display the diff)
3036 # to display the diff)
3019 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3037 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3020 if stripbelow:
3038 if stripbelow:
3021 text = text[: stripbelow.start()]
3039 text = text[: stripbelow.start()]
3022
3040
3023 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3041 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3024 os.chdir(olddir)
3042 os.chdir(olddir)
3025
3043
3026 if finishdesc:
3044 if finishdesc:
3027 text = finishdesc(text)
3045 text = finishdesc(text)
3028 if not text.strip():
3046 if not text.strip():
3029 raise error.Abort(_(b"empty commit message"))
3047 raise error.Abort(_(b"empty commit message"))
3030 if unchangedmessagedetection and editortext == templatetext:
3048 if unchangedmessagedetection and editortext == templatetext:
3031 raise error.Abort(_(b"commit message unchanged"))
3049 raise error.Abort(_(b"commit message unchanged"))
3032
3050
3033 return text
3051 return text
3034
3052
3035
3053
3036 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3054 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3037 ui = repo.ui
3055 ui = repo.ui
3038 spec = formatter.reference_templatespec(ref)
3056 spec = formatter.reference_templatespec(ref)
3039 t = logcmdutil.changesettemplater(ui, repo, spec)
3057 t = logcmdutil.changesettemplater(ui, repo, spec)
3040 t.t.cache.update(
3058 t.t.cache.update(
3041 (k, templater.unquotestring(v))
3059 (k, templater.unquotestring(v))
3042 for k, v in repo.ui.configitems(b'committemplate')
3060 for k, v in repo.ui.configitems(b'committemplate')
3043 )
3061 )
3044
3062
3045 if not extramsg:
3063 if not extramsg:
3046 extramsg = b'' # ensure that extramsg is string
3064 extramsg = b'' # ensure that extramsg is string
3047
3065
3048 ui.pushbuffer()
3066 ui.pushbuffer()
3049 t.show(ctx, extramsg=extramsg)
3067 t.show(ctx, extramsg=extramsg)
3050 return ui.popbuffer()
3068 return ui.popbuffer()
3051
3069
3052
3070
3053 def hgprefix(msg):
3071 def hgprefix(msg):
3054 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3072 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3055
3073
3056
3074
3057 def buildcommittext(repo, ctx, subs, extramsg):
3075 def buildcommittext(repo, ctx, subs, extramsg):
3058 edittext = []
3076 edittext = []
3059 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3077 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3060 if ctx.description():
3078 if ctx.description():
3061 edittext.append(ctx.description())
3079 edittext.append(ctx.description())
3062 edittext.append(b"")
3080 edittext.append(b"")
3063 edittext.append(b"") # Empty line between message and comments.
3081 edittext.append(b"") # Empty line between message and comments.
3064 edittext.append(
3082 edittext.append(
3065 hgprefix(
3083 hgprefix(
3066 _(
3084 _(
3067 b"Enter commit message."
3085 b"Enter commit message."
3068 b" Lines beginning with 'HG:' are removed."
3086 b" Lines beginning with 'HG:' are removed."
3069 )
3087 )
3070 )
3088 )
3071 )
3089 )
3072 edittext.append(hgprefix(extramsg))
3090 edittext.append(hgprefix(extramsg))
3073 edittext.append(b"HG: --")
3091 edittext.append(b"HG: --")
3074 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3092 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3075 if ctx.p2():
3093 if ctx.p2():
3076 edittext.append(hgprefix(_(b"branch merge")))
3094 edittext.append(hgprefix(_(b"branch merge")))
3077 if ctx.branch():
3095 if ctx.branch():
3078 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3096 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3079 if bookmarks.isactivewdirparent(repo):
3097 if bookmarks.isactivewdirparent(repo):
3080 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3098 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3081 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3099 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3082 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3100 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3083 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3101 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3084 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3102 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3085 if not added and not modified and not removed:
3103 if not added and not modified and not removed:
3086 edittext.append(hgprefix(_(b"no files changed")))
3104 edittext.append(hgprefix(_(b"no files changed")))
3087 edittext.append(b"")
3105 edittext.append(b"")
3088
3106
3089 return b"\n".join(edittext)
3107 return b"\n".join(edittext)
3090
3108
3091
3109
3092 def commitstatus(repo, node, branch, bheads=None, opts=None):
3110 def commitstatus(repo, node, branch, bheads=None, opts=None):
3093 if opts is None:
3111 if opts is None:
3094 opts = {}
3112 opts = {}
3095 ctx = repo[node]
3113 ctx = repo[node]
3096 parents = ctx.parents()
3114 parents = ctx.parents()
3097
3115
3098 if (
3116 if (
3099 not opts.get(b'amend')
3117 not opts.get(b'amend')
3100 and bheads
3118 and bheads
3101 and node not in bheads
3119 and node not in bheads
3102 and not any(
3120 and not any(
3103 p.node() in bheads and p.branch() == branch for p in parents
3121 p.node() in bheads and p.branch() == branch for p in parents
3104 )
3122 )
3105 ):
3123 ):
3106 repo.ui.status(_(b'created new head\n'))
3124 repo.ui.status(_(b'created new head\n'))
3107 # The message is not printed for initial roots. For the other
3125 # The message is not printed for initial roots. For the other
3108 # changesets, it is printed in the following situations:
3126 # changesets, it is printed in the following situations:
3109 #
3127 #
3110 # Par column: for the 2 parents with ...
3128 # Par column: for the 2 parents with ...
3111 # N: null or no parent
3129 # N: null or no parent
3112 # B: parent is on another named branch
3130 # B: parent is on another named branch
3113 # C: parent is a regular non head changeset
3131 # C: parent is a regular non head changeset
3114 # H: parent was a branch head of the current branch
3132 # H: parent was a branch head of the current branch
3115 # Msg column: whether we print "created new head" message
3133 # Msg column: whether we print "created new head" message
3116 # In the following, it is assumed that there already exists some
3134 # In the following, it is assumed that there already exists some
3117 # initial branch heads of the current branch, otherwise nothing is
3135 # initial branch heads of the current branch, otherwise nothing is
3118 # printed anyway.
3136 # printed anyway.
3119 #
3137 #
3120 # Par Msg Comment
3138 # Par Msg Comment
3121 # N N y additional topo root
3139 # N N y additional topo root
3122 #
3140 #
3123 # B N y additional branch root
3141 # B N y additional branch root
3124 # C N y additional topo head
3142 # C N y additional topo head
3125 # H N n usual case
3143 # H N n usual case
3126 #
3144 #
3127 # B B y weird additional branch root
3145 # B B y weird additional branch root
3128 # C B y branch merge
3146 # C B y branch merge
3129 # H B n merge with named branch
3147 # H B n merge with named branch
3130 #
3148 #
3131 # C C y additional head from merge
3149 # C C y additional head from merge
3132 # C H n merge with a head
3150 # C H n merge with a head
3133 #
3151 #
3134 # H H n head merge: head count decreases
3152 # H H n head merge: head count decreases
3135
3153
3136 if not opts.get(b'close_branch'):
3154 if not opts.get(b'close_branch'):
3137 for r in parents:
3155 for r in parents:
3138 if r.closesbranch() and r.branch() == branch:
3156 if r.closesbranch() and r.branch() == branch:
3139 repo.ui.status(
3157 repo.ui.status(
3140 _(b'reopening closed branch head %d\n') % r.rev()
3158 _(b'reopening closed branch head %d\n') % r.rev()
3141 )
3159 )
3142
3160
3143 if repo.ui.debugflag:
3161 if repo.ui.debugflag:
3144 repo.ui.write(
3162 repo.ui.write(
3145 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3163 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3146 )
3164 )
3147 elif repo.ui.verbose:
3165 elif repo.ui.verbose:
3148 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3166 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3149
3167
3150
3168
3151 def postcommitstatus(repo, pats, opts):
3169 def postcommitstatus(repo, pats, opts):
3152 return repo.status(match=scmutil.match(repo[None], pats, opts))
3170 return repo.status(match=scmutil.match(repo[None], pats, opts))
3153
3171
3154
3172
3155 def revert(ui, repo, ctx, *pats, **opts):
3173 def revert(ui, repo, ctx, *pats, **opts):
3156 opts = pycompat.byteskwargs(opts)
3174 opts = pycompat.byteskwargs(opts)
3157 parent, p2 = repo.dirstate.parents()
3175 parent, p2 = repo.dirstate.parents()
3158 node = ctx.node()
3176 node = ctx.node()
3159
3177
3160 mf = ctx.manifest()
3178 mf = ctx.manifest()
3161 if node == p2:
3179 if node == p2:
3162 parent = p2
3180 parent = p2
3163
3181
3164 # need all matching names in dirstate and manifest of target rev,
3182 # need all matching names in dirstate and manifest of target rev,
3165 # so have to walk both. do not print errors if files exist in one
3183 # so have to walk both. do not print errors if files exist in one
3166 # but not other. in both cases, filesets should be evaluated against
3184 # but not other. in both cases, filesets should be evaluated against
3167 # workingctx to get consistent result (issue4497). this means 'set:**'
3185 # workingctx to get consistent result (issue4497). this means 'set:**'
3168 # cannot be used to select missing files from target rev.
3186 # cannot be used to select missing files from target rev.
3169
3187
3170 # `names` is a mapping for all elements in working copy and target revision
3188 # `names` is a mapping for all elements in working copy and target revision
3171 # The mapping is in the form:
3189 # The mapping is in the form:
3172 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3190 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3173 names = {}
3191 names = {}
3174 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3192 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3175
3193
3176 with repo.wlock():
3194 with repo.wlock():
3177 ## filling of the `names` mapping
3195 ## filling of the `names` mapping
3178 # walk dirstate to fill `names`
3196 # walk dirstate to fill `names`
3179
3197
3180 interactive = opts.get(b'interactive', False)
3198 interactive = opts.get(b'interactive', False)
3181 wctx = repo[None]
3199 wctx = repo[None]
3182 m = scmutil.match(wctx, pats, opts)
3200 m = scmutil.match(wctx, pats, opts)
3183
3201
3184 # we'll need this later
3202 # we'll need this later
3185 targetsubs = sorted(s for s in wctx.substate if m(s))
3203 targetsubs = sorted(s for s in wctx.substate if m(s))
3186
3204
3187 if not m.always():
3205 if not m.always():
3188 matcher = matchmod.badmatch(m, lambda x, y: False)
3206 matcher = matchmod.badmatch(m, lambda x, y: False)
3189 for abs in wctx.walk(matcher):
3207 for abs in wctx.walk(matcher):
3190 names[abs] = m.exact(abs)
3208 names[abs] = m.exact(abs)
3191
3209
3192 # walk target manifest to fill `names`
3210 # walk target manifest to fill `names`
3193
3211
3194 def badfn(path, msg):
3212 def badfn(path, msg):
3195 if path in names:
3213 if path in names:
3196 return
3214 return
3197 if path in ctx.substate:
3215 if path in ctx.substate:
3198 return
3216 return
3199 path_ = path + b'/'
3217 path_ = path + b'/'
3200 for f in names:
3218 for f in names:
3201 if f.startswith(path_):
3219 if f.startswith(path_):
3202 return
3220 return
3203 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3221 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3204
3222
3205 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3223 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3206 if abs not in names:
3224 if abs not in names:
3207 names[abs] = m.exact(abs)
3225 names[abs] = m.exact(abs)
3208
3226
3209 # Find status of all file in `names`.
3227 # Find status of all file in `names`.
3210 m = scmutil.matchfiles(repo, names)
3228 m = scmutil.matchfiles(repo, names)
3211
3229
3212 changes = repo.status(
3230 changes = repo.status(
3213 node1=node, match=m, unknown=True, ignored=True, clean=True
3231 node1=node, match=m, unknown=True, ignored=True, clean=True
3214 )
3232 )
3215 else:
3233 else:
3216 changes = repo.status(node1=node, match=m)
3234 changes = repo.status(node1=node, match=m)
3217 for kind in changes:
3235 for kind in changes:
3218 for abs in kind:
3236 for abs in kind:
3219 names[abs] = m.exact(abs)
3237 names[abs] = m.exact(abs)
3220
3238
3221 m = scmutil.matchfiles(repo, names)
3239 m = scmutil.matchfiles(repo, names)
3222
3240
3223 modified = set(changes.modified)
3241 modified = set(changes.modified)
3224 added = set(changes.added)
3242 added = set(changes.added)
3225 removed = set(changes.removed)
3243 removed = set(changes.removed)
3226 _deleted = set(changes.deleted)
3244 _deleted = set(changes.deleted)
3227 unknown = set(changes.unknown)
3245 unknown = set(changes.unknown)
3228 unknown.update(changes.ignored)
3246 unknown.update(changes.ignored)
3229 clean = set(changes.clean)
3247 clean = set(changes.clean)
3230 modadded = set()
3248 modadded = set()
3231
3249
3232 # We need to account for the state of the file in the dirstate,
3250 # We need to account for the state of the file in the dirstate,
3233 # even when we revert against something else than parent. This will
3251 # even when we revert against something else than parent. This will
3234 # slightly alter the behavior of revert (doing back up or not, delete
3252 # slightly alter the behavior of revert (doing back up or not, delete
3235 # or just forget etc).
3253 # or just forget etc).
3236 if parent == node:
3254 if parent == node:
3237 dsmodified = modified
3255 dsmodified = modified
3238 dsadded = added
3256 dsadded = added
3239 dsremoved = removed
3257 dsremoved = removed
3240 # store all local modifications, useful later for rename detection
3258 # store all local modifications, useful later for rename detection
3241 localchanges = dsmodified | dsadded
3259 localchanges = dsmodified | dsadded
3242 modified, added, removed = set(), set(), set()
3260 modified, added, removed = set(), set(), set()
3243 else:
3261 else:
3244 changes = repo.status(node1=parent, match=m)
3262 changes = repo.status(node1=parent, match=m)
3245 dsmodified = set(changes.modified)
3263 dsmodified = set(changes.modified)
3246 dsadded = set(changes.added)
3264 dsadded = set(changes.added)
3247 dsremoved = set(changes.removed)
3265 dsremoved = set(changes.removed)
3248 # store all local modifications, useful later for rename detection
3266 # store all local modifications, useful later for rename detection
3249 localchanges = dsmodified | dsadded
3267 localchanges = dsmodified | dsadded
3250
3268
3251 # only take into account for removes between wc and target
3269 # only take into account for removes between wc and target
3252 clean |= dsremoved - removed
3270 clean |= dsremoved - removed
3253 dsremoved &= removed
3271 dsremoved &= removed
3254 # distinct between dirstate remove and other
3272 # distinct between dirstate remove and other
3255 removed -= dsremoved
3273 removed -= dsremoved
3256
3274
3257 modadded = added & dsmodified
3275 modadded = added & dsmodified
3258 added -= modadded
3276 added -= modadded
3259
3277
3260 # tell newly modified apart.
3278 # tell newly modified apart.
3261 dsmodified &= modified
3279 dsmodified &= modified
3262 dsmodified |= modified & dsadded # dirstate added may need backup
3280 dsmodified |= modified & dsadded # dirstate added may need backup
3263 modified -= dsmodified
3281 modified -= dsmodified
3264
3282
3265 # We need to wait for some post-processing to update this set
3283 # We need to wait for some post-processing to update this set
3266 # before making the distinction. The dirstate will be used for
3284 # before making the distinction. The dirstate will be used for
3267 # that purpose.
3285 # that purpose.
3268 dsadded = added
3286 dsadded = added
3269
3287
3270 # in case of merge, files that are actually added can be reported as
3288 # in case of merge, files that are actually added can be reported as
3271 # modified, we need to post process the result
3289 # modified, we need to post process the result
3272 if p2 != nullid:
3290 if p2 != nullid:
3273 mergeadd = set(dsmodified)
3291 mergeadd = set(dsmodified)
3274 for path in dsmodified:
3292 for path in dsmodified:
3275 if path in mf:
3293 if path in mf:
3276 mergeadd.remove(path)
3294 mergeadd.remove(path)
3277 dsadded |= mergeadd
3295 dsadded |= mergeadd
3278 dsmodified -= mergeadd
3296 dsmodified -= mergeadd
3279
3297
3280 # if f is a rename, update `names` to also revert the source
3298 # if f is a rename, update `names` to also revert the source
3281 for f in localchanges:
3299 for f in localchanges:
3282 src = repo.dirstate.copied(f)
3300 src = repo.dirstate.copied(f)
3283 # XXX should we check for rename down to target node?
3301 # XXX should we check for rename down to target node?
3284 if src and src not in names and repo.dirstate[src] == b'r':
3302 if src and src not in names and repo.dirstate[src] == b'r':
3285 dsremoved.add(src)
3303 dsremoved.add(src)
3286 names[src] = True
3304 names[src] = True
3287
3305
3288 # determine the exact nature of the deleted changesets
3306 # determine the exact nature of the deleted changesets
3289 deladded = set(_deleted)
3307 deladded = set(_deleted)
3290 for path in _deleted:
3308 for path in _deleted:
3291 if path in mf:
3309 if path in mf:
3292 deladded.remove(path)
3310 deladded.remove(path)
3293 deleted = _deleted - deladded
3311 deleted = _deleted - deladded
3294
3312
3295 # distinguish between file to forget and the other
3313 # distinguish between file to forget and the other
3296 added = set()
3314 added = set()
3297 for abs in dsadded:
3315 for abs in dsadded:
3298 if repo.dirstate[abs] != b'a':
3316 if repo.dirstate[abs] != b'a':
3299 added.add(abs)
3317 added.add(abs)
3300 dsadded -= added
3318 dsadded -= added
3301
3319
3302 for abs in deladded:
3320 for abs in deladded:
3303 if repo.dirstate[abs] == b'a':
3321 if repo.dirstate[abs] == b'a':
3304 dsadded.add(abs)
3322 dsadded.add(abs)
3305 deladded -= dsadded
3323 deladded -= dsadded
3306
3324
3307 # For files marked as removed, we check if an unknown file is present at
3325 # For files marked as removed, we check if an unknown file is present at
3308 # the same path. If a such file exists it may need to be backed up.
3326 # the same path. If a such file exists it may need to be backed up.
3309 # Making the distinction at this stage helps have simpler backup
3327 # Making the distinction at this stage helps have simpler backup
3310 # logic.
3328 # logic.
3311 removunk = set()
3329 removunk = set()
3312 for abs in removed:
3330 for abs in removed:
3313 target = repo.wjoin(abs)
3331 target = repo.wjoin(abs)
3314 if os.path.lexists(target):
3332 if os.path.lexists(target):
3315 removunk.add(abs)
3333 removunk.add(abs)
3316 removed -= removunk
3334 removed -= removunk
3317
3335
3318 dsremovunk = set()
3336 dsremovunk = set()
3319 for abs in dsremoved:
3337 for abs in dsremoved:
3320 target = repo.wjoin(abs)
3338 target = repo.wjoin(abs)
3321 if os.path.lexists(target):
3339 if os.path.lexists(target):
3322 dsremovunk.add(abs)
3340 dsremovunk.add(abs)
3323 dsremoved -= dsremovunk
3341 dsremoved -= dsremovunk
3324
3342
3325 # action to be actually performed by revert
3343 # action to be actually performed by revert
3326 # (<list of file>, message>) tuple
3344 # (<list of file>, message>) tuple
3327 actions = {
3345 actions = {
3328 b'revert': ([], _(b'reverting %s\n')),
3346 b'revert': ([], _(b'reverting %s\n')),
3329 b'add': ([], _(b'adding %s\n')),
3347 b'add': ([], _(b'adding %s\n')),
3330 b'remove': ([], _(b'removing %s\n')),
3348 b'remove': ([], _(b'removing %s\n')),
3331 b'drop': ([], _(b'removing %s\n')),
3349 b'drop': ([], _(b'removing %s\n')),
3332 b'forget': ([], _(b'forgetting %s\n')),
3350 b'forget': ([], _(b'forgetting %s\n')),
3333 b'undelete': ([], _(b'undeleting %s\n')),
3351 b'undelete': ([], _(b'undeleting %s\n')),
3334 b'noop': (None, _(b'no changes needed to %s\n')),
3352 b'noop': (None, _(b'no changes needed to %s\n')),
3335 b'unknown': (None, _(b'file not managed: %s\n')),
3353 b'unknown': (None, _(b'file not managed: %s\n')),
3336 }
3354 }
3337
3355
3338 # "constant" that convey the backup strategy.
3356 # "constant" that convey the backup strategy.
3339 # All set to `discard` if `no-backup` is set do avoid checking
3357 # All set to `discard` if `no-backup` is set do avoid checking
3340 # no_backup lower in the code.
3358 # no_backup lower in the code.
3341 # These values are ordered for comparison purposes
3359 # These values are ordered for comparison purposes
3342 backupinteractive = 3 # do backup if interactively modified
3360 backupinteractive = 3 # do backup if interactively modified
3343 backup = 2 # unconditionally do backup
3361 backup = 2 # unconditionally do backup
3344 check = 1 # check if the existing file differs from target
3362 check = 1 # check if the existing file differs from target
3345 discard = 0 # never do backup
3363 discard = 0 # never do backup
3346 if opts.get(b'no_backup'):
3364 if opts.get(b'no_backup'):
3347 backupinteractive = backup = check = discard
3365 backupinteractive = backup = check = discard
3348 if interactive:
3366 if interactive:
3349 dsmodifiedbackup = backupinteractive
3367 dsmodifiedbackup = backupinteractive
3350 else:
3368 else:
3351 dsmodifiedbackup = backup
3369 dsmodifiedbackup = backup
3352 tobackup = set()
3370 tobackup = set()
3353
3371
3354 backupanddel = actions[b'remove']
3372 backupanddel = actions[b'remove']
3355 if not opts.get(b'no_backup'):
3373 if not opts.get(b'no_backup'):
3356 backupanddel = actions[b'drop']
3374 backupanddel = actions[b'drop']
3357
3375
3358 disptable = (
3376 disptable = (
3359 # dispatch table:
3377 # dispatch table:
3360 # file state
3378 # file state
3361 # action
3379 # action
3362 # make backup
3380 # make backup
3363 ## Sets that results that will change file on disk
3381 ## Sets that results that will change file on disk
3364 # Modified compared to target, no local change
3382 # Modified compared to target, no local change
3365 (modified, actions[b'revert'], discard),
3383 (modified, actions[b'revert'], discard),
3366 # Modified compared to target, but local file is deleted
3384 # Modified compared to target, but local file is deleted
3367 (deleted, actions[b'revert'], discard),
3385 (deleted, actions[b'revert'], discard),
3368 # Modified compared to target, local change
3386 # Modified compared to target, local change
3369 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3387 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3370 # Added since target
3388 # Added since target
3371 (added, actions[b'remove'], discard),
3389 (added, actions[b'remove'], discard),
3372 # Added in working directory
3390 # Added in working directory
3373 (dsadded, actions[b'forget'], discard),
3391 (dsadded, actions[b'forget'], discard),
3374 # Added since target, have local modification
3392 # Added since target, have local modification
3375 (modadded, backupanddel, backup),
3393 (modadded, backupanddel, backup),
3376 # Added since target but file is missing in working directory
3394 # Added since target but file is missing in working directory
3377 (deladded, actions[b'drop'], discard),
3395 (deladded, actions[b'drop'], discard),
3378 # Removed since target, before working copy parent
3396 # Removed since target, before working copy parent
3379 (removed, actions[b'add'], discard),
3397 (removed, actions[b'add'], discard),
3380 # Same as `removed` but an unknown file exists at the same path
3398 # Same as `removed` but an unknown file exists at the same path
3381 (removunk, actions[b'add'], check),
3399 (removunk, actions[b'add'], check),
3382 # Removed since targe, marked as such in working copy parent
3400 # Removed since targe, marked as such in working copy parent
3383 (dsremoved, actions[b'undelete'], discard),
3401 (dsremoved, actions[b'undelete'], discard),
3384 # Same as `dsremoved` but an unknown file exists at the same path
3402 # Same as `dsremoved` but an unknown file exists at the same path
3385 (dsremovunk, actions[b'undelete'], check),
3403 (dsremovunk, actions[b'undelete'], check),
3386 ## the following sets does not result in any file changes
3404 ## the following sets does not result in any file changes
3387 # File with no modification
3405 # File with no modification
3388 (clean, actions[b'noop'], discard),
3406 (clean, actions[b'noop'], discard),
3389 # Existing file, not tracked anywhere
3407 # Existing file, not tracked anywhere
3390 (unknown, actions[b'unknown'], discard),
3408 (unknown, actions[b'unknown'], discard),
3391 )
3409 )
3392
3410
3393 for abs, exact in sorted(names.items()):
3411 for abs, exact in sorted(names.items()):
3394 # target file to be touch on disk (relative to cwd)
3412 # target file to be touch on disk (relative to cwd)
3395 target = repo.wjoin(abs)
3413 target = repo.wjoin(abs)
3396 # search the entry in the dispatch table.
3414 # search the entry in the dispatch table.
3397 # if the file is in any of these sets, it was touched in the working
3415 # if the file is in any of these sets, it was touched in the working
3398 # directory parent and we are sure it needs to be reverted.
3416 # directory parent and we are sure it needs to be reverted.
3399 for table, (xlist, msg), dobackup in disptable:
3417 for table, (xlist, msg), dobackup in disptable:
3400 if abs not in table:
3418 if abs not in table:
3401 continue
3419 continue
3402 if xlist is not None:
3420 if xlist is not None:
3403 xlist.append(abs)
3421 xlist.append(abs)
3404 if dobackup:
3422 if dobackup:
3405 # If in interactive mode, don't automatically create
3423 # If in interactive mode, don't automatically create
3406 # .orig files (issue4793)
3424 # .orig files (issue4793)
3407 if dobackup == backupinteractive:
3425 if dobackup == backupinteractive:
3408 tobackup.add(abs)
3426 tobackup.add(abs)
3409 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3427 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3410 absbakname = scmutil.backuppath(ui, repo, abs)
3428 absbakname = scmutil.backuppath(ui, repo, abs)
3411 bakname = os.path.relpath(
3429 bakname = os.path.relpath(
3412 absbakname, start=repo.root
3430 absbakname, start=repo.root
3413 )
3431 )
3414 ui.note(
3432 ui.note(
3415 _(b'saving current version of %s as %s\n')
3433 _(b'saving current version of %s as %s\n')
3416 % (uipathfn(abs), uipathfn(bakname))
3434 % (uipathfn(abs), uipathfn(bakname))
3417 )
3435 )
3418 if not opts.get(b'dry_run'):
3436 if not opts.get(b'dry_run'):
3419 if interactive:
3437 if interactive:
3420 util.copyfile(target, absbakname)
3438 util.copyfile(target, absbakname)
3421 else:
3439 else:
3422 util.rename(target, absbakname)
3440 util.rename(target, absbakname)
3423 if opts.get(b'dry_run'):
3441 if opts.get(b'dry_run'):
3424 if ui.verbose or not exact:
3442 if ui.verbose or not exact:
3425 ui.status(msg % uipathfn(abs))
3443 ui.status(msg % uipathfn(abs))
3426 elif exact:
3444 elif exact:
3427 ui.warn(msg % uipathfn(abs))
3445 ui.warn(msg % uipathfn(abs))
3428 break
3446 break
3429
3447
3430 if not opts.get(b'dry_run'):
3448 if not opts.get(b'dry_run'):
3431 needdata = (b'revert', b'add', b'undelete')
3449 needdata = (b'revert', b'add', b'undelete')
3432 oplist = [actions[name][0] for name in needdata]
3450 oplist = [actions[name][0] for name in needdata]
3433 prefetch = scmutil.prefetchfiles
3451 prefetch = scmutil.prefetchfiles
3434 matchfiles = scmutil.matchfiles(
3452 matchfiles = scmutil.matchfiles(
3435 repo, [f for sublist in oplist for f in sublist]
3453 repo, [f for sublist in oplist for f in sublist]
3436 )
3454 )
3437 prefetch(
3455 prefetch(
3438 repo, [(ctx.rev(), matchfiles)],
3456 repo, [(ctx.rev(), matchfiles)],
3439 )
3457 )
3440 match = scmutil.match(repo[None], pats)
3458 match = scmutil.match(repo[None], pats)
3441 _performrevert(
3459 _performrevert(
3442 repo,
3460 repo,
3443 ctx,
3461 ctx,
3444 names,
3462 names,
3445 uipathfn,
3463 uipathfn,
3446 actions,
3464 actions,
3447 match,
3465 match,
3448 interactive,
3466 interactive,
3449 tobackup,
3467 tobackup,
3450 )
3468 )
3451
3469
3452 if targetsubs:
3470 if targetsubs:
3453 # Revert the subrepos on the revert list
3471 # Revert the subrepos on the revert list
3454 for sub in targetsubs:
3472 for sub in targetsubs:
3455 try:
3473 try:
3456 wctx.sub(sub).revert(
3474 wctx.sub(sub).revert(
3457 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3475 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3458 )
3476 )
3459 except KeyError:
3477 except KeyError:
3460 raise error.Abort(
3478 raise error.Abort(
3461 b"subrepository '%s' does not exist in %s!"
3479 b"subrepository '%s' does not exist in %s!"
3462 % (sub, short(ctx.node()))
3480 % (sub, short(ctx.node()))
3463 )
3481 )
3464
3482
3465
3483
3466 def _performrevert(
3484 def _performrevert(
3467 repo,
3485 repo,
3468 ctx,
3486 ctx,
3469 names,
3487 names,
3470 uipathfn,
3488 uipathfn,
3471 actions,
3489 actions,
3472 match,
3490 match,
3473 interactive=False,
3491 interactive=False,
3474 tobackup=None,
3492 tobackup=None,
3475 ):
3493 ):
3476 """function that actually perform all the actions computed for revert
3494 """function that actually perform all the actions computed for revert
3477
3495
3478 This is an independent function to let extension to plug in and react to
3496 This is an independent function to let extension to plug in and react to
3479 the imminent revert.
3497 the imminent revert.
3480
3498
3481 Make sure you have the working directory locked when calling this function.
3499 Make sure you have the working directory locked when calling this function.
3482 """
3500 """
3483 parent, p2 = repo.dirstate.parents()
3501 parent, p2 = repo.dirstate.parents()
3484 node = ctx.node()
3502 node = ctx.node()
3485 excluded_files = []
3503 excluded_files = []
3486
3504
3487 def checkout(f):
3505 def checkout(f):
3488 fc = ctx[f]
3506 fc = ctx[f]
3489 repo.wwrite(f, fc.data(), fc.flags())
3507 repo.wwrite(f, fc.data(), fc.flags())
3490
3508
3491 def doremove(f):
3509 def doremove(f):
3492 try:
3510 try:
3493 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3511 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3494 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3512 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3495 except OSError:
3513 except OSError:
3496 pass
3514 pass
3497 repo.dirstate.remove(f)
3515 repo.dirstate.remove(f)
3498
3516
3499 def prntstatusmsg(action, f):
3517 def prntstatusmsg(action, f):
3500 exact = names[f]
3518 exact = names[f]
3501 if repo.ui.verbose or not exact:
3519 if repo.ui.verbose or not exact:
3502 repo.ui.status(actions[action][1] % uipathfn(f))
3520 repo.ui.status(actions[action][1] % uipathfn(f))
3503
3521
3504 audit_path = pathutil.pathauditor(repo.root, cached=True)
3522 audit_path = pathutil.pathauditor(repo.root, cached=True)
3505 for f in actions[b'forget'][0]:
3523 for f in actions[b'forget'][0]:
3506 if interactive:
3524 if interactive:
3507 choice = repo.ui.promptchoice(
3525 choice = repo.ui.promptchoice(
3508 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3526 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3509 )
3527 )
3510 if choice == 0:
3528 if choice == 0:
3511 prntstatusmsg(b'forget', f)
3529 prntstatusmsg(b'forget', f)
3512 repo.dirstate.drop(f)
3530 repo.dirstate.drop(f)
3513 else:
3531 else:
3514 excluded_files.append(f)
3532 excluded_files.append(f)
3515 else:
3533 else:
3516 prntstatusmsg(b'forget', f)
3534 prntstatusmsg(b'forget', f)
3517 repo.dirstate.drop(f)
3535 repo.dirstate.drop(f)
3518 for f in actions[b'remove'][0]:
3536 for f in actions[b'remove'][0]:
3519 audit_path(f)
3537 audit_path(f)
3520 if interactive:
3538 if interactive:
3521 choice = repo.ui.promptchoice(
3539 choice = repo.ui.promptchoice(
3522 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3540 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3523 )
3541 )
3524 if choice == 0:
3542 if choice == 0:
3525 prntstatusmsg(b'remove', f)
3543 prntstatusmsg(b'remove', f)
3526 doremove(f)
3544 doremove(f)
3527 else:
3545 else:
3528 excluded_files.append(f)
3546 excluded_files.append(f)
3529 else:
3547 else:
3530 prntstatusmsg(b'remove', f)
3548 prntstatusmsg(b'remove', f)
3531 doremove(f)
3549 doremove(f)
3532 for f in actions[b'drop'][0]:
3550 for f in actions[b'drop'][0]:
3533 audit_path(f)
3551 audit_path(f)
3534 prntstatusmsg(b'drop', f)
3552 prntstatusmsg(b'drop', f)
3535 repo.dirstate.remove(f)
3553 repo.dirstate.remove(f)
3536
3554
3537 normal = None
3555 normal = None
3538 if node == parent:
3556 if node == parent:
3539 # We're reverting to our parent. If possible, we'd like status
3557 # We're reverting to our parent. If possible, we'd like status
3540 # to report the file as clean. We have to use normallookup for
3558 # to report the file as clean. We have to use normallookup for
3541 # merges to avoid losing information about merged/dirty files.
3559 # merges to avoid losing information about merged/dirty files.
3542 if p2 != nullid:
3560 if p2 != nullid:
3543 normal = repo.dirstate.normallookup
3561 normal = repo.dirstate.normallookup
3544 else:
3562 else:
3545 normal = repo.dirstate.normal
3563 normal = repo.dirstate.normal
3546
3564
3547 newlyaddedandmodifiedfiles = set()
3565 newlyaddedandmodifiedfiles = set()
3548 if interactive:
3566 if interactive:
3549 # Prompt the user for changes to revert
3567 # Prompt the user for changes to revert
3550 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3568 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3551 m = scmutil.matchfiles(repo, torevert)
3569 m = scmutil.matchfiles(repo, torevert)
3552 diffopts = patch.difffeatureopts(
3570 diffopts = patch.difffeatureopts(
3553 repo.ui,
3571 repo.ui,
3554 whitespace=True,
3572 whitespace=True,
3555 section=b'commands',
3573 section=b'commands',
3556 configprefix=b'revert.interactive.',
3574 configprefix=b'revert.interactive.',
3557 )
3575 )
3558 diffopts.nodates = True
3576 diffopts.nodates = True
3559 diffopts.git = True
3577 diffopts.git = True
3560 operation = b'apply'
3578 operation = b'apply'
3561 if node == parent:
3579 if node == parent:
3562 if repo.ui.configbool(
3580 if repo.ui.configbool(
3563 b'experimental', b'revert.interactive.select-to-keep'
3581 b'experimental', b'revert.interactive.select-to-keep'
3564 ):
3582 ):
3565 operation = b'keep'
3583 operation = b'keep'
3566 else:
3584 else:
3567 operation = b'discard'
3585 operation = b'discard'
3568
3586
3569 if operation == b'apply':
3587 if operation == b'apply':
3570 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3588 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3571 else:
3589 else:
3572 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3590 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3573 originalchunks = patch.parsepatch(diff)
3591 originalchunks = patch.parsepatch(diff)
3574
3592
3575 try:
3593 try:
3576
3594
3577 chunks, opts = recordfilter(
3595 chunks, opts = recordfilter(
3578 repo.ui, originalchunks, match, operation=operation
3596 repo.ui, originalchunks, match, operation=operation
3579 )
3597 )
3580 if operation == b'discard':
3598 if operation == b'discard':
3581 chunks = patch.reversehunks(chunks)
3599 chunks = patch.reversehunks(chunks)
3582
3600
3583 except error.PatchError as err:
3601 except error.PatchError as err:
3584 raise error.Abort(_(b'error parsing patch: %s') % err)
3602 raise error.Abort(_(b'error parsing patch: %s') % err)
3585
3603
3586 # FIXME: when doing an interactive revert of a copy, there's no way of
3604 # FIXME: when doing an interactive revert of a copy, there's no way of
3587 # performing a partial revert of the added file, the only option is
3605 # performing a partial revert of the added file, the only option is
3588 # "remove added file <name> (Yn)?", so we don't need to worry about the
3606 # "remove added file <name> (Yn)?", so we don't need to worry about the
3589 # alsorestore value. Ideally we'd be able to partially revert
3607 # alsorestore value. Ideally we'd be able to partially revert
3590 # copied/renamed files.
3608 # copied/renamed files.
3591 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
3609 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
3592 chunks, originalchunks
3610 chunks, originalchunks
3593 )
3611 )
3594 if tobackup is None:
3612 if tobackup is None:
3595 tobackup = set()
3613 tobackup = set()
3596 # Apply changes
3614 # Apply changes
3597 fp = stringio()
3615 fp = stringio()
3598 # chunks are serialized per file, but files aren't sorted
3616 # chunks are serialized per file, but files aren't sorted
3599 for f in sorted({c.header.filename() for c in chunks if ishunk(c)}):
3617 for f in sorted({c.header.filename() for c in chunks if ishunk(c)}):
3600 prntstatusmsg(b'revert', f)
3618 prntstatusmsg(b'revert', f)
3601 files = set()
3619 files = set()
3602 for c in chunks:
3620 for c in chunks:
3603 if ishunk(c):
3621 if ishunk(c):
3604 abs = c.header.filename()
3622 abs = c.header.filename()
3605 # Create a backup file only if this hunk should be backed up
3623 # Create a backup file only if this hunk should be backed up
3606 if c.header.filename() in tobackup:
3624 if c.header.filename() in tobackup:
3607 target = repo.wjoin(abs)
3625 target = repo.wjoin(abs)
3608 bakname = scmutil.backuppath(repo.ui, repo, abs)
3626 bakname = scmutil.backuppath(repo.ui, repo, abs)
3609 util.copyfile(target, bakname)
3627 util.copyfile(target, bakname)
3610 tobackup.remove(abs)
3628 tobackup.remove(abs)
3611 if abs not in files:
3629 if abs not in files:
3612 files.add(abs)
3630 files.add(abs)
3613 if operation == b'keep':
3631 if operation == b'keep':
3614 checkout(abs)
3632 checkout(abs)
3615 c.write(fp)
3633 c.write(fp)
3616 dopatch = fp.tell()
3634 dopatch = fp.tell()
3617 fp.seek(0)
3635 fp.seek(0)
3618 if dopatch:
3636 if dopatch:
3619 try:
3637 try:
3620 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3638 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3621 except error.PatchError as err:
3639 except error.PatchError as err:
3622 raise error.Abort(pycompat.bytestr(err))
3640 raise error.Abort(pycompat.bytestr(err))
3623 del fp
3641 del fp
3624 else:
3642 else:
3625 for f in actions[b'revert'][0]:
3643 for f in actions[b'revert'][0]:
3626 prntstatusmsg(b'revert', f)
3644 prntstatusmsg(b'revert', f)
3627 checkout(f)
3645 checkout(f)
3628 if normal:
3646 if normal:
3629 normal(f)
3647 normal(f)
3630
3648
3631 for f in actions[b'add'][0]:
3649 for f in actions[b'add'][0]:
3632 # Don't checkout modified files, they are already created by the diff
3650 # Don't checkout modified files, they are already created by the diff
3633 if f not in newlyaddedandmodifiedfiles:
3651 if f not in newlyaddedandmodifiedfiles:
3634 prntstatusmsg(b'add', f)
3652 prntstatusmsg(b'add', f)
3635 checkout(f)
3653 checkout(f)
3636 repo.dirstate.add(f)
3654 repo.dirstate.add(f)
3637
3655
3638 normal = repo.dirstate.normallookup
3656 normal = repo.dirstate.normallookup
3639 if node == parent and p2 == nullid:
3657 if node == parent and p2 == nullid:
3640 normal = repo.dirstate.normal
3658 normal = repo.dirstate.normal
3641 for f in actions[b'undelete'][0]:
3659 for f in actions[b'undelete'][0]:
3642 if interactive:
3660 if interactive:
3643 choice = repo.ui.promptchoice(
3661 choice = repo.ui.promptchoice(
3644 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3662 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3645 )
3663 )
3646 if choice == 0:
3664 if choice == 0:
3647 prntstatusmsg(b'undelete', f)
3665 prntstatusmsg(b'undelete', f)
3648 checkout(f)
3666 checkout(f)
3649 normal(f)
3667 normal(f)
3650 else:
3668 else:
3651 excluded_files.append(f)
3669 excluded_files.append(f)
3652 else:
3670 else:
3653 prntstatusmsg(b'undelete', f)
3671 prntstatusmsg(b'undelete', f)
3654 checkout(f)
3672 checkout(f)
3655 normal(f)
3673 normal(f)
3656
3674
3657 copied = copies.pathcopies(repo[parent], ctx)
3675 copied = copies.pathcopies(repo[parent], ctx)
3658
3676
3659 for f in (
3677 for f in (
3660 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
3678 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
3661 ):
3679 ):
3662 if f in copied:
3680 if f in copied:
3663 repo.dirstate.copy(copied[f], f)
3681 repo.dirstate.copy(copied[f], f)
3664
3682
3665
3683
3666 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3684 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3667 # commands.outgoing. "missing" is "missing" of the result of
3685 # commands.outgoing. "missing" is "missing" of the result of
3668 # "findcommonoutgoing()"
3686 # "findcommonoutgoing()"
3669 outgoinghooks = util.hooks()
3687 outgoinghooks = util.hooks()
3670
3688
3671 # a list of (ui, repo) functions called by commands.summary
3689 # a list of (ui, repo) functions called by commands.summary
3672 summaryhooks = util.hooks()
3690 summaryhooks = util.hooks()
3673
3691
3674 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3692 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3675 #
3693 #
3676 # functions should return tuple of booleans below, if 'changes' is None:
3694 # functions should return tuple of booleans below, if 'changes' is None:
3677 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3695 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3678 #
3696 #
3679 # otherwise, 'changes' is a tuple of tuples below:
3697 # otherwise, 'changes' is a tuple of tuples below:
3680 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3698 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3681 # - (desturl, destbranch, destpeer, outgoing)
3699 # - (desturl, destbranch, destpeer, outgoing)
3682 summaryremotehooks = util.hooks()
3700 summaryremotehooks = util.hooks()
3683
3701
3684
3702
3685 def checkunfinished(repo, commit=False, skipmerge=False):
3703 def checkunfinished(repo, commit=False, skipmerge=False):
3686 '''Look for an unfinished multistep operation, like graft, and abort
3704 '''Look for an unfinished multistep operation, like graft, and abort
3687 if found. It's probably good to check this right before
3705 if found. It's probably good to check this right before
3688 bailifchanged().
3706 bailifchanged().
3689 '''
3707 '''
3690 # Check for non-clearable states first, so things like rebase will take
3708 # Check for non-clearable states first, so things like rebase will take
3691 # precedence over update.
3709 # precedence over update.
3692 for state in statemod._unfinishedstates:
3710 for state in statemod._unfinishedstates:
3693 if (
3711 if (
3694 state._clearable
3712 state._clearable
3695 or (commit and state._allowcommit)
3713 or (commit and state._allowcommit)
3696 or state._reportonly
3714 or state._reportonly
3697 ):
3715 ):
3698 continue
3716 continue
3699 if state.isunfinished(repo):
3717 if state.isunfinished(repo):
3700 raise error.Abort(state.msg(), hint=state.hint())
3718 raise error.Abort(state.msg(), hint=state.hint())
3701
3719
3702 for s in statemod._unfinishedstates:
3720 for s in statemod._unfinishedstates:
3703 if (
3721 if (
3704 not s._clearable
3722 not s._clearable
3705 or (commit and s._allowcommit)
3723 or (commit and s._allowcommit)
3706 or (s._opname == b'merge' and skipmerge)
3724 or (s._opname == b'merge' and skipmerge)
3707 or s._reportonly
3725 or s._reportonly
3708 ):
3726 ):
3709 continue
3727 continue
3710 if s.isunfinished(repo):
3728 if s.isunfinished(repo):
3711 raise error.Abort(s.msg(), hint=s.hint())
3729 raise error.Abort(s.msg(), hint=s.hint())
3712
3730
3713
3731
3714 def clearunfinished(repo):
3732 def clearunfinished(repo):
3715 '''Check for unfinished operations (as above), and clear the ones
3733 '''Check for unfinished operations (as above), and clear the ones
3716 that are clearable.
3734 that are clearable.
3717 '''
3735 '''
3718 for state in statemod._unfinishedstates:
3736 for state in statemod._unfinishedstates:
3719 if state._reportonly:
3737 if state._reportonly:
3720 continue
3738 continue
3721 if not state._clearable and state.isunfinished(repo):
3739 if not state._clearable and state.isunfinished(repo):
3722 raise error.Abort(state.msg(), hint=state.hint())
3740 raise error.Abort(state.msg(), hint=state.hint())
3723
3741
3724 for s in statemod._unfinishedstates:
3742 for s in statemod._unfinishedstates:
3725 if s._opname == b'merge' or state._reportonly:
3743 if s._opname == b'merge' or state._reportonly:
3726 continue
3744 continue
3727 if s._clearable and s.isunfinished(repo):
3745 if s._clearable and s.isunfinished(repo):
3728 util.unlink(repo.vfs.join(s._fname))
3746 util.unlink(repo.vfs.join(s._fname))
3729
3747
3730
3748
3731 def getunfinishedstate(repo):
3749 def getunfinishedstate(repo):
3732 ''' Checks for unfinished operations and returns statecheck object
3750 ''' Checks for unfinished operations and returns statecheck object
3733 for it'''
3751 for it'''
3734 for state in statemod._unfinishedstates:
3752 for state in statemod._unfinishedstates:
3735 if state.isunfinished(repo):
3753 if state.isunfinished(repo):
3736 return state
3754 return state
3737 return None
3755 return None
3738
3756
3739
3757
3740 def howtocontinue(repo):
3758 def howtocontinue(repo):
3741 '''Check for an unfinished operation and return the command to finish
3759 '''Check for an unfinished operation and return the command to finish
3742 it.
3760 it.
3743
3761
3744 statemod._unfinishedstates list is checked for an unfinished operation
3762 statemod._unfinishedstates list is checked for an unfinished operation
3745 and the corresponding message to finish it is generated if a method to
3763 and the corresponding message to finish it is generated if a method to
3746 continue is supported by the operation.
3764 continue is supported by the operation.
3747
3765
3748 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3766 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3749 a boolean.
3767 a boolean.
3750 '''
3768 '''
3751 contmsg = _(b"continue: %s")
3769 contmsg = _(b"continue: %s")
3752 for state in statemod._unfinishedstates:
3770 for state in statemod._unfinishedstates:
3753 if not state._continueflag:
3771 if not state._continueflag:
3754 continue
3772 continue
3755 if state.isunfinished(repo):
3773 if state.isunfinished(repo):
3756 return contmsg % state.continuemsg(), True
3774 return contmsg % state.continuemsg(), True
3757 if repo[None].dirty(missing=True, merge=False, branch=False):
3775 if repo[None].dirty(missing=True, merge=False, branch=False):
3758 return contmsg % _(b"hg commit"), False
3776 return contmsg % _(b"hg commit"), False
3759 return None, None
3777 return None, None
3760
3778
3761
3779
3762 def checkafterresolved(repo):
3780 def checkafterresolved(repo):
3763 '''Inform the user about the next action after completing hg resolve
3781 '''Inform the user about the next action after completing hg resolve
3764
3782
3765 If there's a an unfinished operation that supports continue flag,
3783 If there's a an unfinished operation that supports continue flag,
3766 howtocontinue will yield repo.ui.warn as the reporter.
3784 howtocontinue will yield repo.ui.warn as the reporter.
3767
3785
3768 Otherwise, it will yield repo.ui.note.
3786 Otherwise, it will yield repo.ui.note.
3769 '''
3787 '''
3770 msg, warning = howtocontinue(repo)
3788 msg, warning = howtocontinue(repo)
3771 if msg is not None:
3789 if msg is not None:
3772 if warning:
3790 if warning:
3773 repo.ui.warn(b"%s\n" % msg)
3791 repo.ui.warn(b"%s\n" % msg)
3774 else:
3792 else:
3775 repo.ui.note(b"%s\n" % msg)
3793 repo.ui.note(b"%s\n" % msg)
3776
3794
3777
3795
3778 def wrongtooltocontinue(repo, task):
3796 def wrongtooltocontinue(repo, task):
3779 '''Raise an abort suggesting how to properly continue if there is an
3797 '''Raise an abort suggesting how to properly continue if there is an
3780 active task.
3798 active task.
3781
3799
3782 Uses howtocontinue() to find the active task.
3800 Uses howtocontinue() to find the active task.
3783
3801
3784 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3802 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3785 a hint.
3803 a hint.
3786 '''
3804 '''
3787 after = howtocontinue(repo)
3805 after = howtocontinue(repo)
3788 hint = None
3806 hint = None
3789 if after[1]:
3807 if after[1]:
3790 hint = after[0]
3808 hint = after[0]
3791 raise error.Abort(_(b'no %s in progress') % task, hint=hint)
3809 raise error.Abort(_(b'no %s in progress') % task, hint=hint)
3792
3810
3793
3811
3794 def abortgraft(ui, repo, graftstate):
3812 def abortgraft(ui, repo, graftstate):
3795 """abort the interrupted graft and rollbacks to the state before interrupted
3813 """abort the interrupted graft and rollbacks to the state before interrupted
3796 graft"""
3814 graft"""
3797 if not graftstate.exists():
3815 if not graftstate.exists():
3798 raise error.Abort(_(b"no interrupted graft to abort"))
3816 raise error.Abort(_(b"no interrupted graft to abort"))
3799 statedata = readgraftstate(repo, graftstate)
3817 statedata = readgraftstate(repo, graftstate)
3800 newnodes = statedata.get(b'newnodes')
3818 newnodes = statedata.get(b'newnodes')
3801 if newnodes is None:
3819 if newnodes is None:
3802 # and old graft state which does not have all the data required to abort
3820 # and old graft state which does not have all the data required to abort
3803 # the graft
3821 # the graft
3804 raise error.Abort(_(b"cannot abort using an old graftstate"))
3822 raise error.Abort(_(b"cannot abort using an old graftstate"))
3805
3823
3806 # changeset from which graft operation was started
3824 # changeset from which graft operation was started
3807 if len(newnodes) > 0:
3825 if len(newnodes) > 0:
3808 startctx = repo[newnodes[0]].p1()
3826 startctx = repo[newnodes[0]].p1()
3809 else:
3827 else:
3810 startctx = repo[b'.']
3828 startctx = repo[b'.']
3811 # whether to strip or not
3829 # whether to strip or not
3812 cleanup = False
3830 cleanup = False
3813
3831
3814 if newnodes:
3832 if newnodes:
3815 newnodes = [repo[r].rev() for r in newnodes]
3833 newnodes = [repo[r].rev() for r in newnodes]
3816 cleanup = True
3834 cleanup = True
3817 # checking that none of the newnodes turned public or is public
3835 # checking that none of the newnodes turned public or is public
3818 immutable = [c for c in newnodes if not repo[c].mutable()]
3836 immutable = [c for c in newnodes if not repo[c].mutable()]
3819 if immutable:
3837 if immutable:
3820 repo.ui.warn(
3838 repo.ui.warn(
3821 _(b"cannot clean up public changesets %s\n")
3839 _(b"cannot clean up public changesets %s\n")
3822 % b', '.join(bytes(repo[r]) for r in immutable),
3840 % b', '.join(bytes(repo[r]) for r in immutable),
3823 hint=_(b"see 'hg help phases' for details"),
3841 hint=_(b"see 'hg help phases' for details"),
3824 )
3842 )
3825 cleanup = False
3843 cleanup = False
3826
3844
3827 # checking that no new nodes are created on top of grafted revs
3845 # checking that no new nodes are created on top of grafted revs
3828 desc = set(repo.changelog.descendants(newnodes))
3846 desc = set(repo.changelog.descendants(newnodes))
3829 if desc - set(newnodes):
3847 if desc - set(newnodes):
3830 repo.ui.warn(
3848 repo.ui.warn(
3831 _(
3849 _(
3832 b"new changesets detected on destination "
3850 b"new changesets detected on destination "
3833 b"branch, can't strip\n"
3851 b"branch, can't strip\n"
3834 )
3852 )
3835 )
3853 )
3836 cleanup = False
3854 cleanup = False
3837
3855
3838 if cleanup:
3856 if cleanup:
3839 with repo.wlock(), repo.lock():
3857 with repo.wlock(), repo.lock():
3840 mergemod.clean_update(startctx)
3858 mergemod.clean_update(startctx)
3841 # stripping the new nodes created
3859 # stripping the new nodes created
3842 strippoints = [
3860 strippoints = [
3843 c.node() for c in repo.set(b"roots(%ld)", newnodes)
3861 c.node() for c in repo.set(b"roots(%ld)", newnodes)
3844 ]
3862 ]
3845 repair.strip(repo.ui, repo, strippoints, backup=False)
3863 repair.strip(repo.ui, repo, strippoints, backup=False)
3846
3864
3847 if not cleanup:
3865 if not cleanup:
3848 # we don't update to the startnode if we can't strip
3866 # we don't update to the startnode if we can't strip
3849 startctx = repo[b'.']
3867 startctx = repo[b'.']
3850 mergemod.clean_update(startctx)
3868 mergemod.clean_update(startctx)
3851
3869
3852 ui.status(_(b"graft aborted\n"))
3870 ui.status(_(b"graft aborted\n"))
3853 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
3871 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
3854 graftstate.delete()
3872 graftstate.delete()
3855 return 0
3873 return 0
3856
3874
3857
3875
3858 def readgraftstate(repo, graftstate):
3876 def readgraftstate(repo, graftstate):
3859 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
3877 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
3860 """read the graft state file and return a dict of the data stored in it"""
3878 """read the graft state file and return a dict of the data stored in it"""
3861 try:
3879 try:
3862 return graftstate.read()
3880 return graftstate.read()
3863 except error.CorruptedState:
3881 except error.CorruptedState:
3864 nodes = repo.vfs.read(b'graftstate').splitlines()
3882 nodes = repo.vfs.read(b'graftstate').splitlines()
3865 return {b'nodes': nodes}
3883 return {b'nodes': nodes}
3866
3884
3867
3885
3868 def hgabortgraft(ui, repo):
3886 def hgabortgraft(ui, repo):
3869 """ abort logic for aborting graft using 'hg abort'"""
3887 """ abort logic for aborting graft using 'hg abort'"""
3870 with repo.wlock():
3888 with repo.wlock():
3871 graftstate = statemod.cmdstate(repo, b'graftstate')
3889 graftstate = statemod.cmdstate(repo, b'graftstate')
3872 return abortgraft(ui, repo, graftstate)
3890 return abortgraft(ui, repo, graftstate)
@@ -1,1600 +1,1609 b''
1 # configitems.py - centralized declaration of configuration option
1 # configitems.py - centralized declaration of configuration option
2 #
2 #
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import functools
10 import functools
11 import re
11 import re
12
12
13 from . import (
13 from . import (
14 encoding,
14 encoding,
15 error,
15 error,
16 )
16 )
17
17
18
18
19 def loadconfigtable(ui, extname, configtable):
19 def loadconfigtable(ui, extname, configtable):
20 """update config item known to the ui with the extension ones"""
20 """update config item known to the ui with the extension ones"""
21 for section, items in sorted(configtable.items()):
21 for section, items in sorted(configtable.items()):
22 knownitems = ui._knownconfig.setdefault(section, itemregister())
22 knownitems = ui._knownconfig.setdefault(section, itemregister())
23 knownkeys = set(knownitems)
23 knownkeys = set(knownitems)
24 newkeys = set(items)
24 newkeys = set(items)
25 for key in sorted(knownkeys & newkeys):
25 for key in sorted(knownkeys & newkeys):
26 msg = b"extension '%s' overwrite config item '%s.%s'"
26 msg = b"extension '%s' overwrite config item '%s.%s'"
27 msg %= (extname, section, key)
27 msg %= (extname, section, key)
28 ui.develwarn(msg, config=b'warn-config')
28 ui.develwarn(msg, config=b'warn-config')
29
29
30 knownitems.update(items)
30 knownitems.update(items)
31
31
32
32
33 class configitem(object):
33 class configitem(object):
34 """represent a known config item
34 """represent a known config item
35
35
36 :section: the official config section where to find this item,
36 :section: the official config section where to find this item,
37 :name: the official name within the section,
37 :name: the official name within the section,
38 :default: default value for this item,
38 :default: default value for this item,
39 :alias: optional list of tuples as alternatives,
39 :alias: optional list of tuples as alternatives,
40 :generic: this is a generic definition, match name using regular expression.
40 :generic: this is a generic definition, match name using regular expression.
41 """
41 """
42
42
43 def __init__(
43 def __init__(
44 self,
44 self,
45 section,
45 section,
46 name,
46 name,
47 default=None,
47 default=None,
48 alias=(),
48 alias=(),
49 generic=False,
49 generic=False,
50 priority=0,
50 priority=0,
51 experimental=False,
51 experimental=False,
52 ):
52 ):
53 self.section = section
53 self.section = section
54 self.name = name
54 self.name = name
55 self.default = default
55 self.default = default
56 self.alias = list(alias)
56 self.alias = list(alias)
57 self.generic = generic
57 self.generic = generic
58 self.priority = priority
58 self.priority = priority
59 self.experimental = experimental
59 self.experimental = experimental
60 self._re = None
60 self._re = None
61 if generic:
61 if generic:
62 self._re = re.compile(self.name)
62 self._re = re.compile(self.name)
63
63
64
64
65 class itemregister(dict):
65 class itemregister(dict):
66 """A specialized dictionary that can handle wild-card selection"""
66 """A specialized dictionary that can handle wild-card selection"""
67
67
68 def __init__(self):
68 def __init__(self):
69 super(itemregister, self).__init__()
69 super(itemregister, self).__init__()
70 self._generics = set()
70 self._generics = set()
71
71
72 def update(self, other):
72 def update(self, other):
73 super(itemregister, self).update(other)
73 super(itemregister, self).update(other)
74 self._generics.update(other._generics)
74 self._generics.update(other._generics)
75
75
76 def __setitem__(self, key, item):
76 def __setitem__(self, key, item):
77 super(itemregister, self).__setitem__(key, item)
77 super(itemregister, self).__setitem__(key, item)
78 if item.generic:
78 if item.generic:
79 self._generics.add(item)
79 self._generics.add(item)
80
80
81 def get(self, key):
81 def get(self, key):
82 baseitem = super(itemregister, self).get(key)
82 baseitem = super(itemregister, self).get(key)
83 if baseitem is not None and not baseitem.generic:
83 if baseitem is not None and not baseitem.generic:
84 return baseitem
84 return baseitem
85
85
86 # search for a matching generic item
86 # search for a matching generic item
87 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
87 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
88 for item in generics:
88 for item in generics:
89 # we use 'match' instead of 'search' to make the matching simpler
89 # we use 'match' instead of 'search' to make the matching simpler
90 # for people unfamiliar with regular expression. Having the match
90 # for people unfamiliar with regular expression. Having the match
91 # rooted to the start of the string will produce less surprising
91 # rooted to the start of the string will produce less surprising
92 # result for user writing simple regex for sub-attribute.
92 # result for user writing simple regex for sub-attribute.
93 #
93 #
94 # For example using "color\..*" match produces an unsurprising
94 # For example using "color\..*" match produces an unsurprising
95 # result, while using search could suddenly match apparently
95 # result, while using search could suddenly match apparently
96 # unrelated configuration that happens to contains "color."
96 # unrelated configuration that happens to contains "color."
97 # anywhere. This is a tradeoff where we favor requiring ".*" on
97 # anywhere. This is a tradeoff where we favor requiring ".*" on
98 # some match to avoid the need to prefix most pattern with "^".
98 # some match to avoid the need to prefix most pattern with "^".
99 # The "^" seems more error prone.
99 # The "^" seems more error prone.
100 if item._re.match(key):
100 if item._re.match(key):
101 return item
101 return item
102
102
103 return None
103 return None
104
104
105
105
106 coreitems = {}
106 coreitems = {}
107
107
108
108
109 def _register(configtable, *args, **kwargs):
109 def _register(configtable, *args, **kwargs):
110 item = configitem(*args, **kwargs)
110 item = configitem(*args, **kwargs)
111 section = configtable.setdefault(item.section, itemregister())
111 section = configtable.setdefault(item.section, itemregister())
112 if item.name in section:
112 if item.name in section:
113 msg = b"duplicated config item registration for '%s.%s'"
113 msg = b"duplicated config item registration for '%s.%s'"
114 raise error.ProgrammingError(msg % (item.section, item.name))
114 raise error.ProgrammingError(msg % (item.section, item.name))
115 section[item.name] = item
115 section[item.name] = item
116
116
117
117
118 # special value for case where the default is derived from other values
118 # special value for case where the default is derived from other values
119 dynamicdefault = object()
119 dynamicdefault = object()
120
120
121 # Registering actual config items
121 # Registering actual config items
122
122
123
123
124 def getitemregister(configtable):
124 def getitemregister(configtable):
125 f = functools.partial(_register, configtable)
125 f = functools.partial(_register, configtable)
126 # export pseudo enum as configitem.*
126 # export pseudo enum as configitem.*
127 f.dynamicdefault = dynamicdefault
127 f.dynamicdefault = dynamicdefault
128 return f
128 return f
129
129
130
130
131 coreconfigitem = getitemregister(coreitems)
131 coreconfigitem = getitemregister(coreitems)
132
132
133
133
134 def _registerdiffopts(section, configprefix=b''):
134 def _registerdiffopts(section, configprefix=b''):
135 coreconfigitem(
135 coreconfigitem(
136 section, configprefix + b'nodates', default=False,
136 section, configprefix + b'nodates', default=False,
137 )
137 )
138 coreconfigitem(
138 coreconfigitem(
139 section, configprefix + b'showfunc', default=False,
139 section, configprefix + b'showfunc', default=False,
140 )
140 )
141 coreconfigitem(
141 coreconfigitem(
142 section, configprefix + b'unified', default=None,
142 section, configprefix + b'unified', default=None,
143 )
143 )
144 coreconfigitem(
144 coreconfigitem(
145 section, configprefix + b'git', default=False,
145 section, configprefix + b'git', default=False,
146 )
146 )
147 coreconfigitem(
147 coreconfigitem(
148 section, configprefix + b'ignorews', default=False,
148 section, configprefix + b'ignorews', default=False,
149 )
149 )
150 coreconfigitem(
150 coreconfigitem(
151 section, configprefix + b'ignorewsamount', default=False,
151 section, configprefix + b'ignorewsamount', default=False,
152 )
152 )
153 coreconfigitem(
153 coreconfigitem(
154 section, configprefix + b'ignoreblanklines', default=False,
154 section, configprefix + b'ignoreblanklines', default=False,
155 )
155 )
156 coreconfigitem(
156 coreconfigitem(
157 section, configprefix + b'ignorewseol', default=False,
157 section, configprefix + b'ignorewseol', default=False,
158 )
158 )
159 coreconfigitem(
159 coreconfigitem(
160 section, configprefix + b'nobinary', default=False,
160 section, configprefix + b'nobinary', default=False,
161 )
161 )
162 coreconfigitem(
162 coreconfigitem(
163 section, configprefix + b'noprefix', default=False,
163 section, configprefix + b'noprefix', default=False,
164 )
164 )
165 coreconfigitem(
165 coreconfigitem(
166 section, configprefix + b'word-diff', default=False,
166 section, configprefix + b'word-diff', default=False,
167 )
167 )
168
168
169
169
170 coreconfigitem(
170 coreconfigitem(
171 b'alias', b'.*', default=dynamicdefault, generic=True,
171 b'alias', b'.*', default=dynamicdefault, generic=True,
172 )
172 )
173 coreconfigitem(
173 coreconfigitem(
174 b'auth', b'cookiefile', default=None,
174 b'auth', b'cookiefile', default=None,
175 )
175 )
176 _registerdiffopts(section=b'annotate')
176 _registerdiffopts(section=b'annotate')
177 # bookmarks.pushing: internal hack for discovery
177 # bookmarks.pushing: internal hack for discovery
178 coreconfigitem(
178 coreconfigitem(
179 b'bookmarks', b'pushing', default=list,
179 b'bookmarks', b'pushing', default=list,
180 )
180 )
181 # bundle.mainreporoot: internal hack for bundlerepo
181 # bundle.mainreporoot: internal hack for bundlerepo
182 coreconfigitem(
182 coreconfigitem(
183 b'bundle', b'mainreporoot', default=b'',
183 b'bundle', b'mainreporoot', default=b'',
184 )
184 )
185 coreconfigitem(
185 coreconfigitem(
186 b'censor', b'policy', default=b'abort', experimental=True,
186 b'censor', b'policy', default=b'abort', experimental=True,
187 )
187 )
188 coreconfigitem(
188 coreconfigitem(
189 b'chgserver', b'idletimeout', default=3600,
189 b'chgserver', b'idletimeout', default=3600,
190 )
190 )
191 coreconfigitem(
191 coreconfigitem(
192 b'chgserver', b'skiphash', default=False,
192 b'chgserver', b'skiphash', default=False,
193 )
193 )
194 coreconfigitem(
194 coreconfigitem(
195 b'cmdserver', b'log', default=None,
195 b'cmdserver', b'log', default=None,
196 )
196 )
197 coreconfigitem(
197 coreconfigitem(
198 b'cmdserver', b'max-log-files', default=7,
198 b'cmdserver', b'max-log-files', default=7,
199 )
199 )
200 coreconfigitem(
200 coreconfigitem(
201 b'cmdserver', b'max-log-size', default=b'1 MB',
201 b'cmdserver', b'max-log-size', default=b'1 MB',
202 )
202 )
203 coreconfigitem(
203 coreconfigitem(
204 b'cmdserver', b'max-repo-cache', default=0, experimental=True,
204 b'cmdserver', b'max-repo-cache', default=0, experimental=True,
205 )
205 )
206 coreconfigitem(
206 coreconfigitem(
207 b'cmdserver', b'message-encodings', default=list,
207 b'cmdserver', b'message-encodings', default=list,
208 )
208 )
209 coreconfigitem(
209 coreconfigitem(
210 b'cmdserver',
210 b'cmdserver',
211 b'track-log',
211 b'track-log',
212 default=lambda: [b'chgserver', b'cmdserver', b'repocache'],
212 default=lambda: [b'chgserver', b'cmdserver', b'repocache'],
213 )
213 )
214 coreconfigitem(
214 coreconfigitem(
215 b'cmdserver', b'shutdown-on-interrupt', default=True,
215 b'cmdserver', b'shutdown-on-interrupt', default=True,
216 )
216 )
217 coreconfigitem(
217 coreconfigitem(
218 b'color', b'.*', default=None, generic=True,
218 b'color', b'.*', default=None, generic=True,
219 )
219 )
220 coreconfigitem(
220 coreconfigitem(
221 b'color', b'mode', default=b'auto',
221 b'color', b'mode', default=b'auto',
222 )
222 )
223 coreconfigitem(
223 coreconfigitem(
224 b'color', b'pagermode', default=dynamicdefault,
224 b'color', b'pagermode', default=dynamicdefault,
225 )
225 )
226 coreconfigitem(
226 coreconfigitem(
227 b'command-templates',
227 b'command-templates',
228 b'graphnode',
228 b'graphnode',
229 default=None,
229 default=None,
230 alias=[(b'ui', b'graphnodetemplate')],
230 alias=[(b'ui', b'graphnodetemplate')],
231 )
231 )
232 coreconfigitem(
232 coreconfigitem(
233 b'command-templates', b'log', default=None, alias=[(b'ui', b'logtemplate')],
233 b'command-templates', b'log', default=None, alias=[(b'ui', b'logtemplate')],
234 )
234 )
235 coreconfigitem(
235 coreconfigitem(
236 b'command-templates',
236 b'command-templates',
237 b'mergemarker',
237 b'mergemarker',
238 default=(
238 default=(
239 b'{node|short} '
239 b'{node|short} '
240 b'{ifeq(tags, "tip", "", '
240 b'{ifeq(tags, "tip", "", '
241 b'ifeq(tags, "", "", "{tags} "))}'
241 b'ifeq(tags, "", "", "{tags} "))}'
242 b'{if(bookmarks, "{bookmarks} ")}'
242 b'{if(bookmarks, "{bookmarks} ")}'
243 b'{ifeq(branch, "default", "", "{branch} ")}'
243 b'{ifeq(branch, "default", "", "{branch} ")}'
244 b'- {author|user}: {desc|firstline}'
244 b'- {author|user}: {desc|firstline}'
245 ),
245 ),
246 alias=[(b'ui', b'mergemarkertemplate')],
246 alias=[(b'ui', b'mergemarkertemplate')],
247 )
247 )
248 coreconfigitem(
248 coreconfigitem(
249 b'command-templates',
249 b'command-templates',
250 b'pre-merge-tool-output',
250 b'pre-merge-tool-output',
251 default=None,
251 default=None,
252 alias=[(b'ui', b'pre-merge-tool-output-template')],
252 alias=[(b'ui', b'pre-merge-tool-output-template')],
253 )
253 )
254 coreconfigitem(
255 b'command-templates', b'oneline-summary', default=None,
256 )
257 coreconfigitem(
258 b'command-templates',
259 b'oneline-summary.*',
260 default=dynamicdefault,
261 generic=True,
262 )
254 _registerdiffopts(section=b'commands', configprefix=b'commit.interactive.')
263 _registerdiffopts(section=b'commands', configprefix=b'commit.interactive.')
255 coreconfigitem(
264 coreconfigitem(
256 b'commands', b'commit.post-status', default=False,
265 b'commands', b'commit.post-status', default=False,
257 )
266 )
258 coreconfigitem(
267 coreconfigitem(
259 b'commands', b'grep.all-files', default=False, experimental=True,
268 b'commands', b'grep.all-files', default=False, experimental=True,
260 )
269 )
261 coreconfigitem(
270 coreconfigitem(
262 b'commands', b'merge.require-rev', default=False,
271 b'commands', b'merge.require-rev', default=False,
263 )
272 )
264 coreconfigitem(
273 coreconfigitem(
265 b'commands', b'push.require-revs', default=False,
274 b'commands', b'push.require-revs', default=False,
266 )
275 )
267 coreconfigitem(
276 coreconfigitem(
268 b'commands', b'resolve.confirm', default=False,
277 b'commands', b'resolve.confirm', default=False,
269 )
278 )
270 coreconfigitem(
279 coreconfigitem(
271 b'commands', b'resolve.explicit-re-merge', default=False,
280 b'commands', b'resolve.explicit-re-merge', default=False,
272 )
281 )
273 coreconfigitem(
282 coreconfigitem(
274 b'commands', b'resolve.mark-check', default=b'none',
283 b'commands', b'resolve.mark-check', default=b'none',
275 )
284 )
276 _registerdiffopts(section=b'commands', configprefix=b'revert.interactive.')
285 _registerdiffopts(section=b'commands', configprefix=b'revert.interactive.')
277 coreconfigitem(
286 coreconfigitem(
278 b'commands', b'show.aliasprefix', default=list,
287 b'commands', b'show.aliasprefix', default=list,
279 )
288 )
280 coreconfigitem(
289 coreconfigitem(
281 b'commands', b'status.relative', default=False,
290 b'commands', b'status.relative', default=False,
282 )
291 )
283 coreconfigitem(
292 coreconfigitem(
284 b'commands', b'status.skipstates', default=[], experimental=True,
293 b'commands', b'status.skipstates', default=[], experimental=True,
285 )
294 )
286 coreconfigitem(
295 coreconfigitem(
287 b'commands', b'status.terse', default=b'',
296 b'commands', b'status.terse', default=b'',
288 )
297 )
289 coreconfigitem(
298 coreconfigitem(
290 b'commands', b'status.verbose', default=False,
299 b'commands', b'status.verbose', default=False,
291 )
300 )
292 coreconfigitem(
301 coreconfigitem(
293 b'commands', b'update.check', default=None,
302 b'commands', b'update.check', default=None,
294 )
303 )
295 coreconfigitem(
304 coreconfigitem(
296 b'commands', b'update.requiredest', default=False,
305 b'commands', b'update.requiredest', default=False,
297 )
306 )
298 coreconfigitem(
307 coreconfigitem(
299 b'committemplate', b'.*', default=None, generic=True,
308 b'committemplate', b'.*', default=None, generic=True,
300 )
309 )
301 coreconfigitem(
310 coreconfigitem(
302 b'convert', b'bzr.saverev', default=True,
311 b'convert', b'bzr.saverev', default=True,
303 )
312 )
304 coreconfigitem(
313 coreconfigitem(
305 b'convert', b'cvsps.cache', default=True,
314 b'convert', b'cvsps.cache', default=True,
306 )
315 )
307 coreconfigitem(
316 coreconfigitem(
308 b'convert', b'cvsps.fuzz', default=60,
317 b'convert', b'cvsps.fuzz', default=60,
309 )
318 )
310 coreconfigitem(
319 coreconfigitem(
311 b'convert', b'cvsps.logencoding', default=None,
320 b'convert', b'cvsps.logencoding', default=None,
312 )
321 )
313 coreconfigitem(
322 coreconfigitem(
314 b'convert', b'cvsps.mergefrom', default=None,
323 b'convert', b'cvsps.mergefrom', default=None,
315 )
324 )
316 coreconfigitem(
325 coreconfigitem(
317 b'convert', b'cvsps.mergeto', default=None,
326 b'convert', b'cvsps.mergeto', default=None,
318 )
327 )
319 coreconfigitem(
328 coreconfigitem(
320 b'convert', b'git.committeractions', default=lambda: [b'messagedifferent'],
329 b'convert', b'git.committeractions', default=lambda: [b'messagedifferent'],
321 )
330 )
322 coreconfigitem(
331 coreconfigitem(
323 b'convert', b'git.extrakeys', default=list,
332 b'convert', b'git.extrakeys', default=list,
324 )
333 )
325 coreconfigitem(
334 coreconfigitem(
326 b'convert', b'git.findcopiesharder', default=False,
335 b'convert', b'git.findcopiesharder', default=False,
327 )
336 )
328 coreconfigitem(
337 coreconfigitem(
329 b'convert', b'git.remoteprefix', default=b'remote',
338 b'convert', b'git.remoteprefix', default=b'remote',
330 )
339 )
331 coreconfigitem(
340 coreconfigitem(
332 b'convert', b'git.renamelimit', default=400,
341 b'convert', b'git.renamelimit', default=400,
333 )
342 )
334 coreconfigitem(
343 coreconfigitem(
335 b'convert', b'git.saverev', default=True,
344 b'convert', b'git.saverev', default=True,
336 )
345 )
337 coreconfigitem(
346 coreconfigitem(
338 b'convert', b'git.similarity', default=50,
347 b'convert', b'git.similarity', default=50,
339 )
348 )
340 coreconfigitem(
349 coreconfigitem(
341 b'convert', b'git.skipsubmodules', default=False,
350 b'convert', b'git.skipsubmodules', default=False,
342 )
351 )
343 coreconfigitem(
352 coreconfigitem(
344 b'convert', b'hg.clonebranches', default=False,
353 b'convert', b'hg.clonebranches', default=False,
345 )
354 )
346 coreconfigitem(
355 coreconfigitem(
347 b'convert', b'hg.ignoreerrors', default=False,
356 b'convert', b'hg.ignoreerrors', default=False,
348 )
357 )
349 coreconfigitem(
358 coreconfigitem(
350 b'convert', b'hg.preserve-hash', default=False,
359 b'convert', b'hg.preserve-hash', default=False,
351 )
360 )
352 coreconfigitem(
361 coreconfigitem(
353 b'convert', b'hg.revs', default=None,
362 b'convert', b'hg.revs', default=None,
354 )
363 )
355 coreconfigitem(
364 coreconfigitem(
356 b'convert', b'hg.saverev', default=False,
365 b'convert', b'hg.saverev', default=False,
357 )
366 )
358 coreconfigitem(
367 coreconfigitem(
359 b'convert', b'hg.sourcename', default=None,
368 b'convert', b'hg.sourcename', default=None,
360 )
369 )
361 coreconfigitem(
370 coreconfigitem(
362 b'convert', b'hg.startrev', default=None,
371 b'convert', b'hg.startrev', default=None,
363 )
372 )
364 coreconfigitem(
373 coreconfigitem(
365 b'convert', b'hg.tagsbranch', default=b'default',
374 b'convert', b'hg.tagsbranch', default=b'default',
366 )
375 )
367 coreconfigitem(
376 coreconfigitem(
368 b'convert', b'hg.usebranchnames', default=True,
377 b'convert', b'hg.usebranchnames', default=True,
369 )
378 )
370 coreconfigitem(
379 coreconfigitem(
371 b'convert', b'ignoreancestorcheck', default=False, experimental=True,
380 b'convert', b'ignoreancestorcheck', default=False, experimental=True,
372 )
381 )
373 coreconfigitem(
382 coreconfigitem(
374 b'convert', b'localtimezone', default=False,
383 b'convert', b'localtimezone', default=False,
375 )
384 )
376 coreconfigitem(
385 coreconfigitem(
377 b'convert', b'p4.encoding', default=dynamicdefault,
386 b'convert', b'p4.encoding', default=dynamicdefault,
378 )
387 )
379 coreconfigitem(
388 coreconfigitem(
380 b'convert', b'p4.startrev', default=0,
389 b'convert', b'p4.startrev', default=0,
381 )
390 )
382 coreconfigitem(
391 coreconfigitem(
383 b'convert', b'skiptags', default=False,
392 b'convert', b'skiptags', default=False,
384 )
393 )
385 coreconfigitem(
394 coreconfigitem(
386 b'convert', b'svn.debugsvnlog', default=True,
395 b'convert', b'svn.debugsvnlog', default=True,
387 )
396 )
388 coreconfigitem(
397 coreconfigitem(
389 b'convert', b'svn.trunk', default=None,
398 b'convert', b'svn.trunk', default=None,
390 )
399 )
391 coreconfigitem(
400 coreconfigitem(
392 b'convert', b'svn.tags', default=None,
401 b'convert', b'svn.tags', default=None,
393 )
402 )
394 coreconfigitem(
403 coreconfigitem(
395 b'convert', b'svn.branches', default=None,
404 b'convert', b'svn.branches', default=None,
396 )
405 )
397 coreconfigitem(
406 coreconfigitem(
398 b'convert', b'svn.startrev', default=0,
407 b'convert', b'svn.startrev', default=0,
399 )
408 )
400 coreconfigitem(
409 coreconfigitem(
401 b'debug', b'dirstate.delaywrite', default=0,
410 b'debug', b'dirstate.delaywrite', default=0,
402 )
411 )
403 coreconfigitem(
412 coreconfigitem(
404 b'defaults', b'.*', default=None, generic=True,
413 b'defaults', b'.*', default=None, generic=True,
405 )
414 )
406 coreconfigitem(
415 coreconfigitem(
407 b'devel', b'all-warnings', default=False,
416 b'devel', b'all-warnings', default=False,
408 )
417 )
409 coreconfigitem(
418 coreconfigitem(
410 b'devel', b'bundle2.debug', default=False,
419 b'devel', b'bundle2.debug', default=False,
411 )
420 )
412 coreconfigitem(
421 coreconfigitem(
413 b'devel', b'bundle.delta', default=b'',
422 b'devel', b'bundle.delta', default=b'',
414 )
423 )
415 coreconfigitem(
424 coreconfigitem(
416 b'devel', b'cache-vfs', default=None,
425 b'devel', b'cache-vfs', default=None,
417 )
426 )
418 coreconfigitem(
427 coreconfigitem(
419 b'devel', b'check-locks', default=False,
428 b'devel', b'check-locks', default=False,
420 )
429 )
421 coreconfigitem(
430 coreconfigitem(
422 b'devel', b'check-relroot', default=False,
431 b'devel', b'check-relroot', default=False,
423 )
432 )
424 coreconfigitem(
433 coreconfigitem(
425 b'devel', b'default-date', default=None,
434 b'devel', b'default-date', default=None,
426 )
435 )
427 coreconfigitem(
436 coreconfigitem(
428 b'devel', b'deprec-warn', default=False,
437 b'devel', b'deprec-warn', default=False,
429 )
438 )
430 coreconfigitem(
439 coreconfigitem(
431 b'devel', b'disableloaddefaultcerts', default=False,
440 b'devel', b'disableloaddefaultcerts', default=False,
432 )
441 )
433 coreconfigitem(
442 coreconfigitem(
434 b'devel', b'warn-empty-changegroup', default=False,
443 b'devel', b'warn-empty-changegroup', default=False,
435 )
444 )
436 coreconfigitem(
445 coreconfigitem(
437 b'devel', b'legacy.exchange', default=list,
446 b'devel', b'legacy.exchange', default=list,
438 )
447 )
439 coreconfigitem(
448 coreconfigitem(
440 b'devel', b'persistent-nodemap', default=False,
449 b'devel', b'persistent-nodemap', default=False,
441 )
450 )
442 coreconfigitem(
451 coreconfigitem(
443 b'devel', b'servercafile', default=b'',
452 b'devel', b'servercafile', default=b'',
444 )
453 )
445 coreconfigitem(
454 coreconfigitem(
446 b'devel', b'serverexactprotocol', default=b'',
455 b'devel', b'serverexactprotocol', default=b'',
447 )
456 )
448 coreconfigitem(
457 coreconfigitem(
449 b'devel', b'serverrequirecert', default=False,
458 b'devel', b'serverrequirecert', default=False,
450 )
459 )
451 coreconfigitem(
460 coreconfigitem(
452 b'devel', b'strip-obsmarkers', default=True,
461 b'devel', b'strip-obsmarkers', default=True,
453 )
462 )
454 coreconfigitem(
463 coreconfigitem(
455 b'devel', b'warn-config', default=None,
464 b'devel', b'warn-config', default=None,
456 )
465 )
457 coreconfigitem(
466 coreconfigitem(
458 b'devel', b'warn-config-default', default=None,
467 b'devel', b'warn-config-default', default=None,
459 )
468 )
460 coreconfigitem(
469 coreconfigitem(
461 b'devel', b'user.obsmarker', default=None,
470 b'devel', b'user.obsmarker', default=None,
462 )
471 )
463 coreconfigitem(
472 coreconfigitem(
464 b'devel', b'warn-config-unknown', default=None,
473 b'devel', b'warn-config-unknown', default=None,
465 )
474 )
466 coreconfigitem(
475 coreconfigitem(
467 b'devel', b'debug.copies', default=False,
476 b'devel', b'debug.copies', default=False,
468 )
477 )
469 coreconfigitem(
478 coreconfigitem(
470 b'devel', b'debug.extensions', default=False,
479 b'devel', b'debug.extensions', default=False,
471 )
480 )
472 coreconfigitem(
481 coreconfigitem(
473 b'devel', b'debug.repo-filters', default=False,
482 b'devel', b'debug.repo-filters', default=False,
474 )
483 )
475 coreconfigitem(
484 coreconfigitem(
476 b'devel', b'debug.peer-request', default=False,
485 b'devel', b'debug.peer-request', default=False,
477 )
486 )
478 coreconfigitem(
487 coreconfigitem(
479 b'devel', b'discovery.randomize', default=True,
488 b'devel', b'discovery.randomize', default=True,
480 )
489 )
481 _registerdiffopts(section=b'diff')
490 _registerdiffopts(section=b'diff')
482 coreconfigitem(
491 coreconfigitem(
483 b'email', b'bcc', default=None,
492 b'email', b'bcc', default=None,
484 )
493 )
485 coreconfigitem(
494 coreconfigitem(
486 b'email', b'cc', default=None,
495 b'email', b'cc', default=None,
487 )
496 )
488 coreconfigitem(
497 coreconfigitem(
489 b'email', b'charsets', default=list,
498 b'email', b'charsets', default=list,
490 )
499 )
491 coreconfigitem(
500 coreconfigitem(
492 b'email', b'from', default=None,
501 b'email', b'from', default=None,
493 )
502 )
494 coreconfigitem(
503 coreconfigitem(
495 b'email', b'method', default=b'smtp',
504 b'email', b'method', default=b'smtp',
496 )
505 )
497 coreconfigitem(
506 coreconfigitem(
498 b'email', b'reply-to', default=None,
507 b'email', b'reply-to', default=None,
499 )
508 )
500 coreconfigitem(
509 coreconfigitem(
501 b'email', b'to', default=None,
510 b'email', b'to', default=None,
502 )
511 )
503 coreconfigitem(
512 coreconfigitem(
504 b'experimental', b'archivemetatemplate', default=dynamicdefault,
513 b'experimental', b'archivemetatemplate', default=dynamicdefault,
505 )
514 )
506 coreconfigitem(
515 coreconfigitem(
507 b'experimental', b'auto-publish', default=b'publish',
516 b'experimental', b'auto-publish', default=b'publish',
508 )
517 )
509 coreconfigitem(
518 coreconfigitem(
510 b'experimental', b'bundle-phases', default=False,
519 b'experimental', b'bundle-phases', default=False,
511 )
520 )
512 coreconfigitem(
521 coreconfigitem(
513 b'experimental', b'bundle2-advertise', default=True,
522 b'experimental', b'bundle2-advertise', default=True,
514 )
523 )
515 coreconfigitem(
524 coreconfigitem(
516 b'experimental', b'bundle2-output-capture', default=False,
525 b'experimental', b'bundle2-output-capture', default=False,
517 )
526 )
518 coreconfigitem(
527 coreconfigitem(
519 b'experimental', b'bundle2.pushback', default=False,
528 b'experimental', b'bundle2.pushback', default=False,
520 )
529 )
521 coreconfigitem(
530 coreconfigitem(
522 b'experimental', b'bundle2lazylocking', default=False,
531 b'experimental', b'bundle2lazylocking', default=False,
523 )
532 )
524 coreconfigitem(
533 coreconfigitem(
525 b'experimental', b'bundlecomplevel', default=None,
534 b'experimental', b'bundlecomplevel', default=None,
526 )
535 )
527 coreconfigitem(
536 coreconfigitem(
528 b'experimental', b'bundlecomplevel.bzip2', default=None,
537 b'experimental', b'bundlecomplevel.bzip2', default=None,
529 )
538 )
530 coreconfigitem(
539 coreconfigitem(
531 b'experimental', b'bundlecomplevel.gzip', default=None,
540 b'experimental', b'bundlecomplevel.gzip', default=None,
532 )
541 )
533 coreconfigitem(
542 coreconfigitem(
534 b'experimental', b'bundlecomplevel.none', default=None,
543 b'experimental', b'bundlecomplevel.none', default=None,
535 )
544 )
536 coreconfigitem(
545 coreconfigitem(
537 b'experimental', b'bundlecomplevel.zstd', default=None,
546 b'experimental', b'bundlecomplevel.zstd', default=None,
538 )
547 )
539 coreconfigitem(
548 coreconfigitem(
540 b'experimental', b'changegroup3', default=False,
549 b'experimental', b'changegroup3', default=False,
541 )
550 )
542 coreconfigitem(
551 coreconfigitem(
543 b'experimental', b'cleanup-as-archived', default=False,
552 b'experimental', b'cleanup-as-archived', default=False,
544 )
553 )
545 coreconfigitem(
554 coreconfigitem(
546 b'experimental', b'clientcompressionengines', default=list,
555 b'experimental', b'clientcompressionengines', default=list,
547 )
556 )
548 coreconfigitem(
557 coreconfigitem(
549 b'experimental', b'copytrace', default=b'on',
558 b'experimental', b'copytrace', default=b'on',
550 )
559 )
551 coreconfigitem(
560 coreconfigitem(
552 b'experimental', b'copytrace.movecandidateslimit', default=100,
561 b'experimental', b'copytrace.movecandidateslimit', default=100,
553 )
562 )
554 coreconfigitem(
563 coreconfigitem(
555 b'experimental', b'copytrace.sourcecommitlimit', default=100,
564 b'experimental', b'copytrace.sourcecommitlimit', default=100,
556 )
565 )
557 coreconfigitem(
566 coreconfigitem(
558 b'experimental', b'copies.read-from', default=b"filelog-only",
567 b'experimental', b'copies.read-from', default=b"filelog-only",
559 )
568 )
560 coreconfigitem(
569 coreconfigitem(
561 b'experimental', b'copies.write-to', default=b'filelog-only',
570 b'experimental', b'copies.write-to', default=b'filelog-only',
562 )
571 )
563 coreconfigitem(
572 coreconfigitem(
564 b'experimental', b'crecordtest', default=None,
573 b'experimental', b'crecordtest', default=None,
565 )
574 )
566 coreconfigitem(
575 coreconfigitem(
567 b'experimental', b'directaccess', default=False,
576 b'experimental', b'directaccess', default=False,
568 )
577 )
569 coreconfigitem(
578 coreconfigitem(
570 b'experimental', b'directaccess.revnums', default=False,
579 b'experimental', b'directaccess.revnums', default=False,
571 )
580 )
572 coreconfigitem(
581 coreconfigitem(
573 b'experimental', b'editortmpinhg', default=False,
582 b'experimental', b'editortmpinhg', default=False,
574 )
583 )
575 coreconfigitem(
584 coreconfigitem(
576 b'experimental', b'evolution', default=list,
585 b'experimental', b'evolution', default=list,
577 )
586 )
578 coreconfigitem(
587 coreconfigitem(
579 b'experimental',
588 b'experimental',
580 b'evolution.allowdivergence',
589 b'evolution.allowdivergence',
581 default=False,
590 default=False,
582 alias=[(b'experimental', b'allowdivergence')],
591 alias=[(b'experimental', b'allowdivergence')],
583 )
592 )
584 coreconfigitem(
593 coreconfigitem(
585 b'experimental', b'evolution.allowunstable', default=None,
594 b'experimental', b'evolution.allowunstable', default=None,
586 )
595 )
587 coreconfigitem(
596 coreconfigitem(
588 b'experimental', b'evolution.createmarkers', default=None,
597 b'experimental', b'evolution.createmarkers', default=None,
589 )
598 )
590 coreconfigitem(
599 coreconfigitem(
591 b'experimental',
600 b'experimental',
592 b'evolution.effect-flags',
601 b'evolution.effect-flags',
593 default=True,
602 default=True,
594 alias=[(b'experimental', b'effect-flags')],
603 alias=[(b'experimental', b'effect-flags')],
595 )
604 )
596 coreconfigitem(
605 coreconfigitem(
597 b'experimental', b'evolution.exchange', default=None,
606 b'experimental', b'evolution.exchange', default=None,
598 )
607 )
599 coreconfigitem(
608 coreconfigitem(
600 b'experimental', b'evolution.bundle-obsmarker', default=False,
609 b'experimental', b'evolution.bundle-obsmarker', default=False,
601 )
610 )
602 coreconfigitem(
611 coreconfigitem(
603 b'experimental', b'log.topo', default=False,
612 b'experimental', b'log.topo', default=False,
604 )
613 )
605 coreconfigitem(
614 coreconfigitem(
606 b'experimental', b'evolution.report-instabilities', default=True,
615 b'experimental', b'evolution.report-instabilities', default=True,
607 )
616 )
608 coreconfigitem(
617 coreconfigitem(
609 b'experimental', b'evolution.track-operation', default=True,
618 b'experimental', b'evolution.track-operation', default=True,
610 )
619 )
611 # repo-level config to exclude a revset visibility
620 # repo-level config to exclude a revset visibility
612 #
621 #
613 # The target use case is to use `share` to expose different subset of the same
622 # The target use case is to use `share` to expose different subset of the same
614 # repository, especially server side. See also `server.view`.
623 # repository, especially server side. See also `server.view`.
615 coreconfigitem(
624 coreconfigitem(
616 b'experimental', b'extra-filter-revs', default=None,
625 b'experimental', b'extra-filter-revs', default=None,
617 )
626 )
618 coreconfigitem(
627 coreconfigitem(
619 b'experimental', b'maxdeltachainspan', default=-1,
628 b'experimental', b'maxdeltachainspan', default=-1,
620 )
629 )
621 # tracks files which were undeleted (merge might delete them but we explicitly
630 # tracks files which were undeleted (merge might delete them but we explicitly
622 # kept/undeleted them) and creates new filenodes for them
631 # kept/undeleted them) and creates new filenodes for them
623 coreconfigitem(
632 coreconfigitem(
624 b'experimental', b'merge-track-salvaged', default=False,
633 b'experimental', b'merge-track-salvaged', default=False,
625 )
634 )
626 coreconfigitem(
635 coreconfigitem(
627 b'experimental', b'mergetempdirprefix', default=None,
636 b'experimental', b'mergetempdirprefix', default=None,
628 )
637 )
629 coreconfigitem(
638 coreconfigitem(
630 b'experimental', b'mmapindexthreshold', default=None,
639 b'experimental', b'mmapindexthreshold', default=None,
631 )
640 )
632 coreconfigitem(
641 coreconfigitem(
633 b'experimental', b'narrow', default=False,
642 b'experimental', b'narrow', default=False,
634 )
643 )
635 coreconfigitem(
644 coreconfigitem(
636 b'experimental', b'nonnormalparanoidcheck', default=False,
645 b'experimental', b'nonnormalparanoidcheck', default=False,
637 )
646 )
638 coreconfigitem(
647 coreconfigitem(
639 b'experimental', b'exportableenviron', default=list,
648 b'experimental', b'exportableenviron', default=list,
640 )
649 )
641 coreconfigitem(
650 coreconfigitem(
642 b'experimental', b'extendedheader.index', default=None,
651 b'experimental', b'extendedheader.index', default=None,
643 )
652 )
644 coreconfigitem(
653 coreconfigitem(
645 b'experimental', b'extendedheader.similarity', default=False,
654 b'experimental', b'extendedheader.similarity', default=False,
646 )
655 )
647 coreconfigitem(
656 coreconfigitem(
648 b'experimental', b'graphshorten', default=False,
657 b'experimental', b'graphshorten', default=False,
649 )
658 )
650 coreconfigitem(
659 coreconfigitem(
651 b'experimental', b'graphstyle.parent', default=dynamicdefault,
660 b'experimental', b'graphstyle.parent', default=dynamicdefault,
652 )
661 )
653 coreconfigitem(
662 coreconfigitem(
654 b'experimental', b'graphstyle.missing', default=dynamicdefault,
663 b'experimental', b'graphstyle.missing', default=dynamicdefault,
655 )
664 )
656 coreconfigitem(
665 coreconfigitem(
657 b'experimental', b'graphstyle.grandparent', default=dynamicdefault,
666 b'experimental', b'graphstyle.grandparent', default=dynamicdefault,
658 )
667 )
659 coreconfigitem(
668 coreconfigitem(
660 b'experimental', b'hook-track-tags', default=False,
669 b'experimental', b'hook-track-tags', default=False,
661 )
670 )
662 coreconfigitem(
671 coreconfigitem(
663 b'experimental', b'httppeer.advertise-v2', default=False,
672 b'experimental', b'httppeer.advertise-v2', default=False,
664 )
673 )
665 coreconfigitem(
674 coreconfigitem(
666 b'experimental', b'httppeer.v2-encoder-order', default=None,
675 b'experimental', b'httppeer.v2-encoder-order', default=None,
667 )
676 )
668 coreconfigitem(
677 coreconfigitem(
669 b'experimental', b'httppostargs', default=False,
678 b'experimental', b'httppostargs', default=False,
670 )
679 )
671 coreconfigitem(b'experimental', b'nointerrupt', default=False)
680 coreconfigitem(b'experimental', b'nointerrupt', default=False)
672 coreconfigitem(b'experimental', b'nointerrupt-interactiveonly', default=True)
681 coreconfigitem(b'experimental', b'nointerrupt-interactiveonly', default=True)
673
682
674 coreconfigitem(
683 coreconfigitem(
675 b'experimental', b'obsmarkers-exchange-debug', default=False,
684 b'experimental', b'obsmarkers-exchange-debug', default=False,
676 )
685 )
677 coreconfigitem(
686 coreconfigitem(
678 b'experimental', b'remotenames', default=False,
687 b'experimental', b'remotenames', default=False,
679 )
688 )
680 coreconfigitem(
689 coreconfigitem(
681 b'experimental', b'removeemptydirs', default=True,
690 b'experimental', b'removeemptydirs', default=True,
682 )
691 )
683 coreconfigitem(
692 coreconfigitem(
684 b'experimental', b'revert.interactive.select-to-keep', default=False,
693 b'experimental', b'revert.interactive.select-to-keep', default=False,
685 )
694 )
686 coreconfigitem(
695 coreconfigitem(
687 b'experimental', b'revisions.prefixhexnode', default=False,
696 b'experimental', b'revisions.prefixhexnode', default=False,
688 )
697 )
689 coreconfigitem(
698 coreconfigitem(
690 b'experimental', b'revlogv2', default=None,
699 b'experimental', b'revlogv2', default=None,
691 )
700 )
692 coreconfigitem(
701 coreconfigitem(
693 b'experimental', b'revisions.disambiguatewithin', default=None,
702 b'experimental', b'revisions.disambiguatewithin', default=None,
694 )
703 )
695 coreconfigitem(
704 coreconfigitem(
696 b'experimental', b'rust.index', default=False,
705 b'experimental', b'rust.index', default=False,
697 )
706 )
698 coreconfigitem(
707 coreconfigitem(
699 b'experimental', b'server.filesdata.recommended-batch-size', default=50000,
708 b'experimental', b'server.filesdata.recommended-batch-size', default=50000,
700 )
709 )
701 coreconfigitem(
710 coreconfigitem(
702 b'experimental',
711 b'experimental',
703 b'server.manifestdata.recommended-batch-size',
712 b'server.manifestdata.recommended-batch-size',
704 default=100000,
713 default=100000,
705 )
714 )
706 coreconfigitem(
715 coreconfigitem(
707 b'experimental', b'server.stream-narrow-clones', default=False,
716 b'experimental', b'server.stream-narrow-clones', default=False,
708 )
717 )
709 coreconfigitem(
718 coreconfigitem(
710 b'experimental', b'single-head-per-branch', default=False,
719 b'experimental', b'single-head-per-branch', default=False,
711 )
720 )
712 coreconfigitem(
721 coreconfigitem(
713 b'experimental',
722 b'experimental',
714 b'single-head-per-branch:account-closed-heads',
723 b'single-head-per-branch:account-closed-heads',
715 default=False,
724 default=False,
716 )
725 )
717 coreconfigitem(
726 coreconfigitem(
718 b'experimental', b'sshserver.support-v2', default=False,
727 b'experimental', b'sshserver.support-v2', default=False,
719 )
728 )
720 coreconfigitem(
729 coreconfigitem(
721 b'experimental', b'sparse-read', default=False,
730 b'experimental', b'sparse-read', default=False,
722 )
731 )
723 coreconfigitem(
732 coreconfigitem(
724 b'experimental', b'sparse-read.density-threshold', default=0.50,
733 b'experimental', b'sparse-read.density-threshold', default=0.50,
725 )
734 )
726 coreconfigitem(
735 coreconfigitem(
727 b'experimental', b'sparse-read.min-gap-size', default=b'65K',
736 b'experimental', b'sparse-read.min-gap-size', default=b'65K',
728 )
737 )
729 coreconfigitem(
738 coreconfigitem(
730 b'experimental', b'treemanifest', default=False,
739 b'experimental', b'treemanifest', default=False,
731 )
740 )
732 coreconfigitem(
741 coreconfigitem(
733 b'experimental', b'update.atomic-file', default=False,
742 b'experimental', b'update.atomic-file', default=False,
734 )
743 )
735 coreconfigitem(
744 coreconfigitem(
736 b'experimental', b'sshpeer.advertise-v2', default=False,
745 b'experimental', b'sshpeer.advertise-v2', default=False,
737 )
746 )
738 coreconfigitem(
747 coreconfigitem(
739 b'experimental', b'web.apiserver', default=False,
748 b'experimental', b'web.apiserver', default=False,
740 )
749 )
741 coreconfigitem(
750 coreconfigitem(
742 b'experimental', b'web.api.http-v2', default=False,
751 b'experimental', b'web.api.http-v2', default=False,
743 )
752 )
744 coreconfigitem(
753 coreconfigitem(
745 b'experimental', b'web.api.debugreflect', default=False,
754 b'experimental', b'web.api.debugreflect', default=False,
746 )
755 )
747 coreconfigitem(
756 coreconfigitem(
748 b'experimental', b'worker.wdir-get-thread-safe', default=False,
757 b'experimental', b'worker.wdir-get-thread-safe', default=False,
749 )
758 )
750 coreconfigitem(
759 coreconfigitem(
751 b'experimental', b'worker.repository-upgrade', default=False,
760 b'experimental', b'worker.repository-upgrade', default=False,
752 )
761 )
753 coreconfigitem(
762 coreconfigitem(
754 b'experimental', b'xdiff', default=False,
763 b'experimental', b'xdiff', default=False,
755 )
764 )
756 coreconfigitem(
765 coreconfigitem(
757 b'extensions', b'.*', default=None, generic=True,
766 b'extensions', b'.*', default=None, generic=True,
758 )
767 )
759 coreconfigitem(
768 coreconfigitem(
760 b'extdata', b'.*', default=None, generic=True,
769 b'extdata', b'.*', default=None, generic=True,
761 )
770 )
762 coreconfigitem(
771 coreconfigitem(
763 b'format', b'bookmarks-in-store', default=False,
772 b'format', b'bookmarks-in-store', default=False,
764 )
773 )
765 coreconfigitem(
774 coreconfigitem(
766 b'format', b'chunkcachesize', default=None, experimental=True,
775 b'format', b'chunkcachesize', default=None, experimental=True,
767 )
776 )
768 coreconfigitem(
777 coreconfigitem(
769 b'format', b'dotencode', default=True,
778 b'format', b'dotencode', default=True,
770 )
779 )
771 coreconfigitem(
780 coreconfigitem(
772 b'format', b'generaldelta', default=False, experimental=True,
781 b'format', b'generaldelta', default=False, experimental=True,
773 )
782 )
774 coreconfigitem(
783 coreconfigitem(
775 b'format', b'manifestcachesize', default=None, experimental=True,
784 b'format', b'manifestcachesize', default=None, experimental=True,
776 )
785 )
777 coreconfigitem(
786 coreconfigitem(
778 b'format', b'maxchainlen', default=dynamicdefault, experimental=True,
787 b'format', b'maxchainlen', default=dynamicdefault, experimental=True,
779 )
788 )
780 coreconfigitem(
789 coreconfigitem(
781 b'format', b'obsstore-version', default=None,
790 b'format', b'obsstore-version', default=None,
782 )
791 )
783 coreconfigitem(
792 coreconfigitem(
784 b'format', b'sparse-revlog', default=True,
793 b'format', b'sparse-revlog', default=True,
785 )
794 )
786 coreconfigitem(
795 coreconfigitem(
787 b'format',
796 b'format',
788 b'revlog-compression',
797 b'revlog-compression',
789 default=lambda: [b'zlib'],
798 default=lambda: [b'zlib'],
790 alias=[(b'experimental', b'format.compression')],
799 alias=[(b'experimental', b'format.compression')],
791 )
800 )
792 coreconfigitem(
801 coreconfigitem(
793 b'format', b'usefncache', default=True,
802 b'format', b'usefncache', default=True,
794 )
803 )
795 coreconfigitem(
804 coreconfigitem(
796 b'format', b'usegeneraldelta', default=True,
805 b'format', b'usegeneraldelta', default=True,
797 )
806 )
798 coreconfigitem(
807 coreconfigitem(
799 b'format', b'usestore', default=True,
808 b'format', b'usestore', default=True,
800 )
809 )
801 # Right now, the only efficient implement of the nodemap logic is in Rust, so
810 # Right now, the only efficient implement of the nodemap logic is in Rust, so
802 # the persistent nodemap feature needs to stay experimental as long as the Rust
811 # the persistent nodemap feature needs to stay experimental as long as the Rust
803 # extensions are an experimental feature.
812 # extensions are an experimental feature.
804 coreconfigitem(
813 coreconfigitem(
805 b'format', b'use-persistent-nodemap', default=False, experimental=True
814 b'format', b'use-persistent-nodemap', default=False, experimental=True
806 )
815 )
807 coreconfigitem(
816 coreconfigitem(
808 b'format',
817 b'format',
809 b'exp-use-copies-side-data-changeset',
818 b'exp-use-copies-side-data-changeset',
810 default=False,
819 default=False,
811 experimental=True,
820 experimental=True,
812 )
821 )
813 coreconfigitem(
822 coreconfigitem(
814 b'format', b'exp-use-side-data', default=False, experimental=True,
823 b'format', b'exp-use-side-data', default=False, experimental=True,
815 )
824 )
816 coreconfigitem(
825 coreconfigitem(
817 b'format', b'exp-share-safe', default=False, experimental=True,
826 b'format', b'exp-share-safe', default=False, experimental=True,
818 )
827 )
819 coreconfigitem(
828 coreconfigitem(
820 b'format', b'internal-phase', default=False, experimental=True,
829 b'format', b'internal-phase', default=False, experimental=True,
821 )
830 )
822 coreconfigitem(
831 coreconfigitem(
823 b'fsmonitor', b'warn_when_unused', default=True,
832 b'fsmonitor', b'warn_when_unused', default=True,
824 )
833 )
825 coreconfigitem(
834 coreconfigitem(
826 b'fsmonitor', b'warn_update_file_count', default=50000,
835 b'fsmonitor', b'warn_update_file_count', default=50000,
827 )
836 )
828 coreconfigitem(
837 coreconfigitem(
829 b'fsmonitor', b'warn_update_file_count_rust', default=400000,
838 b'fsmonitor', b'warn_update_file_count_rust', default=400000,
830 )
839 )
831 coreconfigitem(
840 coreconfigitem(
832 b'help', br'hidden-command\..*', default=False, generic=True,
841 b'help', br'hidden-command\..*', default=False, generic=True,
833 )
842 )
834 coreconfigitem(
843 coreconfigitem(
835 b'help', br'hidden-topic\..*', default=False, generic=True,
844 b'help', br'hidden-topic\..*', default=False, generic=True,
836 )
845 )
837 coreconfigitem(
846 coreconfigitem(
838 b'hooks', b'.*', default=dynamicdefault, generic=True,
847 b'hooks', b'.*', default=dynamicdefault, generic=True,
839 )
848 )
840 coreconfigitem(
849 coreconfigitem(
841 b'hgweb-paths', b'.*', default=list, generic=True,
850 b'hgweb-paths', b'.*', default=list, generic=True,
842 )
851 )
843 coreconfigitem(
852 coreconfigitem(
844 b'hostfingerprints', b'.*', default=list, generic=True,
853 b'hostfingerprints', b'.*', default=list, generic=True,
845 )
854 )
846 coreconfigitem(
855 coreconfigitem(
847 b'hostsecurity', b'ciphers', default=None,
856 b'hostsecurity', b'ciphers', default=None,
848 )
857 )
849 coreconfigitem(
858 coreconfigitem(
850 b'hostsecurity', b'minimumprotocol', default=dynamicdefault,
859 b'hostsecurity', b'minimumprotocol', default=dynamicdefault,
851 )
860 )
852 coreconfigitem(
861 coreconfigitem(
853 b'hostsecurity',
862 b'hostsecurity',
854 b'.*:minimumprotocol$',
863 b'.*:minimumprotocol$',
855 default=dynamicdefault,
864 default=dynamicdefault,
856 generic=True,
865 generic=True,
857 )
866 )
858 coreconfigitem(
867 coreconfigitem(
859 b'hostsecurity', b'.*:ciphers$', default=dynamicdefault, generic=True,
868 b'hostsecurity', b'.*:ciphers$', default=dynamicdefault, generic=True,
860 )
869 )
861 coreconfigitem(
870 coreconfigitem(
862 b'hostsecurity', b'.*:fingerprints$', default=list, generic=True,
871 b'hostsecurity', b'.*:fingerprints$', default=list, generic=True,
863 )
872 )
864 coreconfigitem(
873 coreconfigitem(
865 b'hostsecurity', b'.*:verifycertsfile$', default=None, generic=True,
874 b'hostsecurity', b'.*:verifycertsfile$', default=None, generic=True,
866 )
875 )
867
876
868 coreconfigitem(
877 coreconfigitem(
869 b'http_proxy', b'always', default=False,
878 b'http_proxy', b'always', default=False,
870 )
879 )
871 coreconfigitem(
880 coreconfigitem(
872 b'http_proxy', b'host', default=None,
881 b'http_proxy', b'host', default=None,
873 )
882 )
874 coreconfigitem(
883 coreconfigitem(
875 b'http_proxy', b'no', default=list,
884 b'http_proxy', b'no', default=list,
876 )
885 )
877 coreconfigitem(
886 coreconfigitem(
878 b'http_proxy', b'passwd', default=None,
887 b'http_proxy', b'passwd', default=None,
879 )
888 )
880 coreconfigitem(
889 coreconfigitem(
881 b'http_proxy', b'user', default=None,
890 b'http_proxy', b'user', default=None,
882 )
891 )
883
892
884 coreconfigitem(
893 coreconfigitem(
885 b'http', b'timeout', default=None,
894 b'http', b'timeout', default=None,
886 )
895 )
887
896
888 coreconfigitem(
897 coreconfigitem(
889 b'logtoprocess', b'commandexception', default=None,
898 b'logtoprocess', b'commandexception', default=None,
890 )
899 )
891 coreconfigitem(
900 coreconfigitem(
892 b'logtoprocess', b'commandfinish', default=None,
901 b'logtoprocess', b'commandfinish', default=None,
893 )
902 )
894 coreconfigitem(
903 coreconfigitem(
895 b'logtoprocess', b'command', default=None,
904 b'logtoprocess', b'command', default=None,
896 )
905 )
897 coreconfigitem(
906 coreconfigitem(
898 b'logtoprocess', b'develwarn', default=None,
907 b'logtoprocess', b'develwarn', default=None,
899 )
908 )
900 coreconfigitem(
909 coreconfigitem(
901 b'logtoprocess', b'uiblocked', default=None,
910 b'logtoprocess', b'uiblocked', default=None,
902 )
911 )
903 coreconfigitem(
912 coreconfigitem(
904 b'merge', b'checkunknown', default=b'abort',
913 b'merge', b'checkunknown', default=b'abort',
905 )
914 )
906 coreconfigitem(
915 coreconfigitem(
907 b'merge', b'checkignored', default=b'abort',
916 b'merge', b'checkignored', default=b'abort',
908 )
917 )
909 coreconfigitem(
918 coreconfigitem(
910 b'experimental', b'merge.checkpathconflicts', default=False,
919 b'experimental', b'merge.checkpathconflicts', default=False,
911 )
920 )
912 coreconfigitem(
921 coreconfigitem(
913 b'merge', b'followcopies', default=True,
922 b'merge', b'followcopies', default=True,
914 )
923 )
915 coreconfigitem(
924 coreconfigitem(
916 b'merge', b'on-failure', default=b'continue',
925 b'merge', b'on-failure', default=b'continue',
917 )
926 )
918 coreconfigitem(
927 coreconfigitem(
919 b'merge', b'preferancestor', default=lambda: [b'*'], experimental=True,
928 b'merge', b'preferancestor', default=lambda: [b'*'], experimental=True,
920 )
929 )
921 coreconfigitem(
930 coreconfigitem(
922 b'merge', b'strict-capability-check', default=False,
931 b'merge', b'strict-capability-check', default=False,
923 )
932 )
924 coreconfigitem(
933 coreconfigitem(
925 b'merge-tools', b'.*', default=None, generic=True,
934 b'merge-tools', b'.*', default=None, generic=True,
926 )
935 )
927 coreconfigitem(
936 coreconfigitem(
928 b'merge-tools',
937 b'merge-tools',
929 br'.*\.args$',
938 br'.*\.args$',
930 default=b"$local $base $other",
939 default=b"$local $base $other",
931 generic=True,
940 generic=True,
932 priority=-1,
941 priority=-1,
933 )
942 )
934 coreconfigitem(
943 coreconfigitem(
935 b'merge-tools', br'.*\.binary$', default=False, generic=True, priority=-1,
944 b'merge-tools', br'.*\.binary$', default=False, generic=True, priority=-1,
936 )
945 )
937 coreconfigitem(
946 coreconfigitem(
938 b'merge-tools', br'.*\.check$', default=list, generic=True, priority=-1,
947 b'merge-tools', br'.*\.check$', default=list, generic=True, priority=-1,
939 )
948 )
940 coreconfigitem(
949 coreconfigitem(
941 b'merge-tools',
950 b'merge-tools',
942 br'.*\.checkchanged$',
951 br'.*\.checkchanged$',
943 default=False,
952 default=False,
944 generic=True,
953 generic=True,
945 priority=-1,
954 priority=-1,
946 )
955 )
947 coreconfigitem(
956 coreconfigitem(
948 b'merge-tools',
957 b'merge-tools',
949 br'.*\.executable$',
958 br'.*\.executable$',
950 default=dynamicdefault,
959 default=dynamicdefault,
951 generic=True,
960 generic=True,
952 priority=-1,
961 priority=-1,
953 )
962 )
954 coreconfigitem(
963 coreconfigitem(
955 b'merge-tools', br'.*\.fixeol$', default=False, generic=True, priority=-1,
964 b'merge-tools', br'.*\.fixeol$', default=False, generic=True, priority=-1,
956 )
965 )
957 coreconfigitem(
966 coreconfigitem(
958 b'merge-tools', br'.*\.gui$', default=False, generic=True, priority=-1,
967 b'merge-tools', br'.*\.gui$', default=False, generic=True, priority=-1,
959 )
968 )
960 coreconfigitem(
969 coreconfigitem(
961 b'merge-tools',
970 b'merge-tools',
962 br'.*\.mergemarkers$',
971 br'.*\.mergemarkers$',
963 default=b'basic',
972 default=b'basic',
964 generic=True,
973 generic=True,
965 priority=-1,
974 priority=-1,
966 )
975 )
967 coreconfigitem(
976 coreconfigitem(
968 b'merge-tools',
977 b'merge-tools',
969 br'.*\.mergemarkertemplate$',
978 br'.*\.mergemarkertemplate$',
970 default=dynamicdefault, # take from command-templates.mergemarker
979 default=dynamicdefault, # take from command-templates.mergemarker
971 generic=True,
980 generic=True,
972 priority=-1,
981 priority=-1,
973 )
982 )
974 coreconfigitem(
983 coreconfigitem(
975 b'merge-tools', br'.*\.priority$', default=0, generic=True, priority=-1,
984 b'merge-tools', br'.*\.priority$', default=0, generic=True, priority=-1,
976 )
985 )
977 coreconfigitem(
986 coreconfigitem(
978 b'merge-tools',
987 b'merge-tools',
979 br'.*\.premerge$',
988 br'.*\.premerge$',
980 default=dynamicdefault,
989 default=dynamicdefault,
981 generic=True,
990 generic=True,
982 priority=-1,
991 priority=-1,
983 )
992 )
984 coreconfigitem(
993 coreconfigitem(
985 b'merge-tools', br'.*\.symlink$', default=False, generic=True, priority=-1,
994 b'merge-tools', br'.*\.symlink$', default=False, generic=True, priority=-1,
986 )
995 )
987 coreconfigitem(
996 coreconfigitem(
988 b'pager', b'attend-.*', default=dynamicdefault, generic=True,
997 b'pager', b'attend-.*', default=dynamicdefault, generic=True,
989 )
998 )
990 coreconfigitem(
999 coreconfigitem(
991 b'pager', b'ignore', default=list,
1000 b'pager', b'ignore', default=list,
992 )
1001 )
993 coreconfigitem(
1002 coreconfigitem(
994 b'pager', b'pager', default=dynamicdefault,
1003 b'pager', b'pager', default=dynamicdefault,
995 )
1004 )
996 coreconfigitem(
1005 coreconfigitem(
997 b'patch', b'eol', default=b'strict',
1006 b'patch', b'eol', default=b'strict',
998 )
1007 )
999 coreconfigitem(
1008 coreconfigitem(
1000 b'patch', b'fuzz', default=2,
1009 b'patch', b'fuzz', default=2,
1001 )
1010 )
1002 coreconfigitem(
1011 coreconfigitem(
1003 b'paths', b'default', default=None,
1012 b'paths', b'default', default=None,
1004 )
1013 )
1005 coreconfigitem(
1014 coreconfigitem(
1006 b'paths', b'default-push', default=None,
1015 b'paths', b'default-push', default=None,
1007 )
1016 )
1008 coreconfigitem(
1017 coreconfigitem(
1009 b'paths', b'.*', default=None, generic=True,
1018 b'paths', b'.*', default=None, generic=True,
1010 )
1019 )
1011 coreconfigitem(
1020 coreconfigitem(
1012 b'phases', b'checksubrepos', default=b'follow',
1021 b'phases', b'checksubrepos', default=b'follow',
1013 )
1022 )
1014 coreconfigitem(
1023 coreconfigitem(
1015 b'phases', b'new-commit', default=b'draft',
1024 b'phases', b'new-commit', default=b'draft',
1016 )
1025 )
1017 coreconfigitem(
1026 coreconfigitem(
1018 b'phases', b'publish', default=True,
1027 b'phases', b'publish', default=True,
1019 )
1028 )
1020 coreconfigitem(
1029 coreconfigitem(
1021 b'profiling', b'enabled', default=False,
1030 b'profiling', b'enabled', default=False,
1022 )
1031 )
1023 coreconfigitem(
1032 coreconfigitem(
1024 b'profiling', b'format', default=b'text',
1033 b'profiling', b'format', default=b'text',
1025 )
1034 )
1026 coreconfigitem(
1035 coreconfigitem(
1027 b'profiling', b'freq', default=1000,
1036 b'profiling', b'freq', default=1000,
1028 )
1037 )
1029 coreconfigitem(
1038 coreconfigitem(
1030 b'profiling', b'limit', default=30,
1039 b'profiling', b'limit', default=30,
1031 )
1040 )
1032 coreconfigitem(
1041 coreconfigitem(
1033 b'profiling', b'nested', default=0,
1042 b'profiling', b'nested', default=0,
1034 )
1043 )
1035 coreconfigitem(
1044 coreconfigitem(
1036 b'profiling', b'output', default=None,
1045 b'profiling', b'output', default=None,
1037 )
1046 )
1038 coreconfigitem(
1047 coreconfigitem(
1039 b'profiling', b'showmax', default=0.999,
1048 b'profiling', b'showmax', default=0.999,
1040 )
1049 )
1041 coreconfigitem(
1050 coreconfigitem(
1042 b'profiling', b'showmin', default=dynamicdefault,
1051 b'profiling', b'showmin', default=dynamicdefault,
1043 )
1052 )
1044 coreconfigitem(
1053 coreconfigitem(
1045 b'profiling', b'showtime', default=True,
1054 b'profiling', b'showtime', default=True,
1046 )
1055 )
1047 coreconfigitem(
1056 coreconfigitem(
1048 b'profiling', b'sort', default=b'inlinetime',
1057 b'profiling', b'sort', default=b'inlinetime',
1049 )
1058 )
1050 coreconfigitem(
1059 coreconfigitem(
1051 b'profiling', b'statformat', default=b'hotpath',
1060 b'profiling', b'statformat', default=b'hotpath',
1052 )
1061 )
1053 coreconfigitem(
1062 coreconfigitem(
1054 b'profiling', b'time-track', default=dynamicdefault,
1063 b'profiling', b'time-track', default=dynamicdefault,
1055 )
1064 )
1056 coreconfigitem(
1065 coreconfigitem(
1057 b'profiling', b'type', default=b'stat',
1066 b'profiling', b'type', default=b'stat',
1058 )
1067 )
1059 coreconfigitem(
1068 coreconfigitem(
1060 b'progress', b'assume-tty', default=False,
1069 b'progress', b'assume-tty', default=False,
1061 )
1070 )
1062 coreconfigitem(
1071 coreconfigitem(
1063 b'progress', b'changedelay', default=1,
1072 b'progress', b'changedelay', default=1,
1064 )
1073 )
1065 coreconfigitem(
1074 coreconfigitem(
1066 b'progress', b'clear-complete', default=True,
1075 b'progress', b'clear-complete', default=True,
1067 )
1076 )
1068 coreconfigitem(
1077 coreconfigitem(
1069 b'progress', b'debug', default=False,
1078 b'progress', b'debug', default=False,
1070 )
1079 )
1071 coreconfigitem(
1080 coreconfigitem(
1072 b'progress', b'delay', default=3,
1081 b'progress', b'delay', default=3,
1073 )
1082 )
1074 coreconfigitem(
1083 coreconfigitem(
1075 b'progress', b'disable', default=False,
1084 b'progress', b'disable', default=False,
1076 )
1085 )
1077 coreconfigitem(
1086 coreconfigitem(
1078 b'progress', b'estimateinterval', default=60.0,
1087 b'progress', b'estimateinterval', default=60.0,
1079 )
1088 )
1080 coreconfigitem(
1089 coreconfigitem(
1081 b'progress',
1090 b'progress',
1082 b'format',
1091 b'format',
1083 default=lambda: [b'topic', b'bar', b'number', b'estimate'],
1092 default=lambda: [b'topic', b'bar', b'number', b'estimate'],
1084 )
1093 )
1085 coreconfigitem(
1094 coreconfigitem(
1086 b'progress', b'refresh', default=0.1,
1095 b'progress', b'refresh', default=0.1,
1087 )
1096 )
1088 coreconfigitem(
1097 coreconfigitem(
1089 b'progress', b'width', default=dynamicdefault,
1098 b'progress', b'width', default=dynamicdefault,
1090 )
1099 )
1091 coreconfigitem(
1100 coreconfigitem(
1092 b'pull', b'confirm', default=False,
1101 b'pull', b'confirm', default=False,
1093 )
1102 )
1094 coreconfigitem(
1103 coreconfigitem(
1095 b'push', b'pushvars.server', default=False,
1104 b'push', b'pushvars.server', default=False,
1096 )
1105 )
1097 coreconfigitem(
1106 coreconfigitem(
1098 b'rewrite',
1107 b'rewrite',
1099 b'backup-bundle',
1108 b'backup-bundle',
1100 default=True,
1109 default=True,
1101 alias=[(b'ui', b'history-editing-backup')],
1110 alias=[(b'ui', b'history-editing-backup')],
1102 )
1111 )
1103 coreconfigitem(
1112 coreconfigitem(
1104 b'rewrite', b'update-timestamp', default=False,
1113 b'rewrite', b'update-timestamp', default=False,
1105 )
1114 )
1106 coreconfigitem(
1115 coreconfigitem(
1107 b'rewrite', b'empty-successor', default=b'skip', experimental=True,
1116 b'rewrite', b'empty-successor', default=b'skip', experimental=True,
1108 )
1117 )
1109 coreconfigitem(
1118 coreconfigitem(
1110 b'storage', b'new-repo-backend', default=b'revlogv1', experimental=True,
1119 b'storage', b'new-repo-backend', default=b'revlogv1', experimental=True,
1111 )
1120 )
1112 coreconfigitem(
1121 coreconfigitem(
1113 b'storage',
1122 b'storage',
1114 b'revlog.optimize-delta-parent-choice',
1123 b'revlog.optimize-delta-parent-choice',
1115 default=True,
1124 default=True,
1116 alias=[(b'format', b'aggressivemergedeltas')],
1125 alias=[(b'format', b'aggressivemergedeltas')],
1117 )
1126 )
1118 # experimental as long as rust is experimental (or a C version is implemented)
1127 # experimental as long as rust is experimental (or a C version is implemented)
1119 coreconfigitem(
1128 coreconfigitem(
1120 b'storage', b'revlog.nodemap.mmap', default=True, experimental=True
1129 b'storage', b'revlog.nodemap.mmap', default=True, experimental=True
1121 )
1130 )
1122 # experimental as long as format.use-persistent-nodemap is.
1131 # experimental as long as format.use-persistent-nodemap is.
1123 coreconfigitem(
1132 coreconfigitem(
1124 b'storage', b'revlog.nodemap.mode', default=b'compat', experimental=True
1133 b'storage', b'revlog.nodemap.mode', default=b'compat', experimental=True
1125 )
1134 )
1126 coreconfigitem(
1135 coreconfigitem(
1127 b'storage', b'revlog.reuse-external-delta', default=True,
1136 b'storage', b'revlog.reuse-external-delta', default=True,
1128 )
1137 )
1129 coreconfigitem(
1138 coreconfigitem(
1130 b'storage', b'revlog.reuse-external-delta-parent', default=None,
1139 b'storage', b'revlog.reuse-external-delta-parent', default=None,
1131 )
1140 )
1132 coreconfigitem(
1141 coreconfigitem(
1133 b'storage', b'revlog.zlib.level', default=None,
1142 b'storage', b'revlog.zlib.level', default=None,
1134 )
1143 )
1135 coreconfigitem(
1144 coreconfigitem(
1136 b'storage', b'revlog.zstd.level', default=None,
1145 b'storage', b'revlog.zstd.level', default=None,
1137 )
1146 )
1138 coreconfigitem(
1147 coreconfigitem(
1139 b'server', b'bookmarks-pushkey-compat', default=True,
1148 b'server', b'bookmarks-pushkey-compat', default=True,
1140 )
1149 )
1141 coreconfigitem(
1150 coreconfigitem(
1142 b'server', b'bundle1', default=True,
1151 b'server', b'bundle1', default=True,
1143 )
1152 )
1144 coreconfigitem(
1153 coreconfigitem(
1145 b'server', b'bundle1gd', default=None,
1154 b'server', b'bundle1gd', default=None,
1146 )
1155 )
1147 coreconfigitem(
1156 coreconfigitem(
1148 b'server', b'bundle1.pull', default=None,
1157 b'server', b'bundle1.pull', default=None,
1149 )
1158 )
1150 coreconfigitem(
1159 coreconfigitem(
1151 b'server', b'bundle1gd.pull', default=None,
1160 b'server', b'bundle1gd.pull', default=None,
1152 )
1161 )
1153 coreconfigitem(
1162 coreconfigitem(
1154 b'server', b'bundle1.push', default=None,
1163 b'server', b'bundle1.push', default=None,
1155 )
1164 )
1156 coreconfigitem(
1165 coreconfigitem(
1157 b'server', b'bundle1gd.push', default=None,
1166 b'server', b'bundle1gd.push', default=None,
1158 )
1167 )
1159 coreconfigitem(
1168 coreconfigitem(
1160 b'server',
1169 b'server',
1161 b'bundle2.stream',
1170 b'bundle2.stream',
1162 default=True,
1171 default=True,
1163 alias=[(b'experimental', b'bundle2.stream')],
1172 alias=[(b'experimental', b'bundle2.stream')],
1164 )
1173 )
1165 coreconfigitem(
1174 coreconfigitem(
1166 b'server', b'compressionengines', default=list,
1175 b'server', b'compressionengines', default=list,
1167 )
1176 )
1168 coreconfigitem(
1177 coreconfigitem(
1169 b'server', b'concurrent-push-mode', default=b'check-related',
1178 b'server', b'concurrent-push-mode', default=b'check-related',
1170 )
1179 )
1171 coreconfigitem(
1180 coreconfigitem(
1172 b'server', b'disablefullbundle', default=False,
1181 b'server', b'disablefullbundle', default=False,
1173 )
1182 )
1174 coreconfigitem(
1183 coreconfigitem(
1175 b'server', b'maxhttpheaderlen', default=1024,
1184 b'server', b'maxhttpheaderlen', default=1024,
1176 )
1185 )
1177 coreconfigitem(
1186 coreconfigitem(
1178 b'server', b'pullbundle', default=False,
1187 b'server', b'pullbundle', default=False,
1179 )
1188 )
1180 coreconfigitem(
1189 coreconfigitem(
1181 b'server', b'preferuncompressed', default=False,
1190 b'server', b'preferuncompressed', default=False,
1182 )
1191 )
1183 coreconfigitem(
1192 coreconfigitem(
1184 b'server', b'streamunbundle', default=False,
1193 b'server', b'streamunbundle', default=False,
1185 )
1194 )
1186 coreconfigitem(
1195 coreconfigitem(
1187 b'server', b'uncompressed', default=True,
1196 b'server', b'uncompressed', default=True,
1188 )
1197 )
1189 coreconfigitem(
1198 coreconfigitem(
1190 b'server', b'uncompressedallowsecret', default=False,
1199 b'server', b'uncompressedallowsecret', default=False,
1191 )
1200 )
1192 coreconfigitem(
1201 coreconfigitem(
1193 b'server', b'view', default=b'served',
1202 b'server', b'view', default=b'served',
1194 )
1203 )
1195 coreconfigitem(
1204 coreconfigitem(
1196 b'server', b'validate', default=False,
1205 b'server', b'validate', default=False,
1197 )
1206 )
1198 coreconfigitem(
1207 coreconfigitem(
1199 b'server', b'zliblevel', default=-1,
1208 b'server', b'zliblevel', default=-1,
1200 )
1209 )
1201 coreconfigitem(
1210 coreconfigitem(
1202 b'server', b'zstdlevel', default=3,
1211 b'server', b'zstdlevel', default=3,
1203 )
1212 )
1204 coreconfigitem(
1213 coreconfigitem(
1205 b'share', b'pool', default=None,
1214 b'share', b'pool', default=None,
1206 )
1215 )
1207 coreconfigitem(
1216 coreconfigitem(
1208 b'share', b'poolnaming', default=b'identity',
1217 b'share', b'poolnaming', default=b'identity',
1209 )
1218 )
1210 coreconfigitem(
1219 coreconfigitem(
1211 b'shelve', b'maxbackups', default=10,
1220 b'shelve', b'maxbackups', default=10,
1212 )
1221 )
1213 coreconfigitem(
1222 coreconfigitem(
1214 b'smtp', b'host', default=None,
1223 b'smtp', b'host', default=None,
1215 )
1224 )
1216 coreconfigitem(
1225 coreconfigitem(
1217 b'smtp', b'local_hostname', default=None,
1226 b'smtp', b'local_hostname', default=None,
1218 )
1227 )
1219 coreconfigitem(
1228 coreconfigitem(
1220 b'smtp', b'password', default=None,
1229 b'smtp', b'password', default=None,
1221 )
1230 )
1222 coreconfigitem(
1231 coreconfigitem(
1223 b'smtp', b'port', default=dynamicdefault,
1232 b'smtp', b'port', default=dynamicdefault,
1224 )
1233 )
1225 coreconfigitem(
1234 coreconfigitem(
1226 b'smtp', b'tls', default=b'none',
1235 b'smtp', b'tls', default=b'none',
1227 )
1236 )
1228 coreconfigitem(
1237 coreconfigitem(
1229 b'smtp', b'username', default=None,
1238 b'smtp', b'username', default=None,
1230 )
1239 )
1231 coreconfigitem(
1240 coreconfigitem(
1232 b'sparse', b'missingwarning', default=True, experimental=True,
1241 b'sparse', b'missingwarning', default=True, experimental=True,
1233 )
1242 )
1234 coreconfigitem(
1243 coreconfigitem(
1235 b'subrepos',
1244 b'subrepos',
1236 b'allowed',
1245 b'allowed',
1237 default=dynamicdefault, # to make backporting simpler
1246 default=dynamicdefault, # to make backporting simpler
1238 )
1247 )
1239 coreconfigitem(
1248 coreconfigitem(
1240 b'subrepos', b'hg:allowed', default=dynamicdefault,
1249 b'subrepos', b'hg:allowed', default=dynamicdefault,
1241 )
1250 )
1242 coreconfigitem(
1251 coreconfigitem(
1243 b'subrepos', b'git:allowed', default=dynamicdefault,
1252 b'subrepos', b'git:allowed', default=dynamicdefault,
1244 )
1253 )
1245 coreconfigitem(
1254 coreconfigitem(
1246 b'subrepos', b'svn:allowed', default=dynamicdefault,
1255 b'subrepos', b'svn:allowed', default=dynamicdefault,
1247 )
1256 )
1248 coreconfigitem(
1257 coreconfigitem(
1249 b'templates', b'.*', default=None, generic=True,
1258 b'templates', b'.*', default=None, generic=True,
1250 )
1259 )
1251 coreconfigitem(
1260 coreconfigitem(
1252 b'templateconfig', b'.*', default=dynamicdefault, generic=True,
1261 b'templateconfig', b'.*', default=dynamicdefault, generic=True,
1253 )
1262 )
1254 coreconfigitem(
1263 coreconfigitem(
1255 b'trusted', b'groups', default=list,
1264 b'trusted', b'groups', default=list,
1256 )
1265 )
1257 coreconfigitem(
1266 coreconfigitem(
1258 b'trusted', b'users', default=list,
1267 b'trusted', b'users', default=list,
1259 )
1268 )
1260 coreconfigitem(
1269 coreconfigitem(
1261 b'ui', b'_usedassubrepo', default=False,
1270 b'ui', b'_usedassubrepo', default=False,
1262 )
1271 )
1263 coreconfigitem(
1272 coreconfigitem(
1264 b'ui', b'allowemptycommit', default=False,
1273 b'ui', b'allowemptycommit', default=False,
1265 )
1274 )
1266 coreconfigitem(
1275 coreconfigitem(
1267 b'ui', b'archivemeta', default=True,
1276 b'ui', b'archivemeta', default=True,
1268 )
1277 )
1269 coreconfigitem(
1278 coreconfigitem(
1270 b'ui', b'askusername', default=False,
1279 b'ui', b'askusername', default=False,
1271 )
1280 )
1272 coreconfigitem(
1281 coreconfigitem(
1273 b'ui', b'available-memory', default=None,
1282 b'ui', b'available-memory', default=None,
1274 )
1283 )
1275
1284
1276 coreconfigitem(
1285 coreconfigitem(
1277 b'ui', b'clonebundlefallback', default=False,
1286 b'ui', b'clonebundlefallback', default=False,
1278 )
1287 )
1279 coreconfigitem(
1288 coreconfigitem(
1280 b'ui', b'clonebundleprefers', default=list,
1289 b'ui', b'clonebundleprefers', default=list,
1281 )
1290 )
1282 coreconfigitem(
1291 coreconfigitem(
1283 b'ui', b'clonebundles', default=True,
1292 b'ui', b'clonebundles', default=True,
1284 )
1293 )
1285 coreconfigitem(
1294 coreconfigitem(
1286 b'ui', b'color', default=b'auto',
1295 b'ui', b'color', default=b'auto',
1287 )
1296 )
1288 coreconfigitem(
1297 coreconfigitem(
1289 b'ui', b'commitsubrepos', default=False,
1298 b'ui', b'commitsubrepos', default=False,
1290 )
1299 )
1291 coreconfigitem(
1300 coreconfigitem(
1292 b'ui', b'debug', default=False,
1301 b'ui', b'debug', default=False,
1293 )
1302 )
1294 coreconfigitem(
1303 coreconfigitem(
1295 b'ui', b'debugger', default=None,
1304 b'ui', b'debugger', default=None,
1296 )
1305 )
1297 coreconfigitem(
1306 coreconfigitem(
1298 b'ui', b'editor', default=dynamicdefault,
1307 b'ui', b'editor', default=dynamicdefault,
1299 )
1308 )
1300 coreconfigitem(
1309 coreconfigitem(
1301 b'ui', b'fallbackencoding', default=None,
1310 b'ui', b'fallbackencoding', default=None,
1302 )
1311 )
1303 coreconfigitem(
1312 coreconfigitem(
1304 b'ui', b'forcecwd', default=None,
1313 b'ui', b'forcecwd', default=None,
1305 )
1314 )
1306 coreconfigitem(
1315 coreconfigitem(
1307 b'ui', b'forcemerge', default=None,
1316 b'ui', b'forcemerge', default=None,
1308 )
1317 )
1309 coreconfigitem(
1318 coreconfigitem(
1310 b'ui', b'formatdebug', default=False,
1319 b'ui', b'formatdebug', default=False,
1311 )
1320 )
1312 coreconfigitem(
1321 coreconfigitem(
1313 b'ui', b'formatjson', default=False,
1322 b'ui', b'formatjson', default=False,
1314 )
1323 )
1315 coreconfigitem(
1324 coreconfigitem(
1316 b'ui', b'formatted', default=None,
1325 b'ui', b'formatted', default=None,
1317 )
1326 )
1318 coreconfigitem(
1327 coreconfigitem(
1319 b'ui', b'interactive', default=None,
1328 b'ui', b'interactive', default=None,
1320 )
1329 )
1321 coreconfigitem(
1330 coreconfigitem(
1322 b'ui', b'interface', default=None,
1331 b'ui', b'interface', default=None,
1323 )
1332 )
1324 coreconfigitem(
1333 coreconfigitem(
1325 b'ui', b'interface.chunkselector', default=None,
1334 b'ui', b'interface.chunkselector', default=None,
1326 )
1335 )
1327 coreconfigitem(
1336 coreconfigitem(
1328 b'ui', b'large-file-limit', default=10000000,
1337 b'ui', b'large-file-limit', default=10000000,
1329 )
1338 )
1330 coreconfigitem(
1339 coreconfigitem(
1331 b'ui', b'logblockedtimes', default=False,
1340 b'ui', b'logblockedtimes', default=False,
1332 )
1341 )
1333 coreconfigitem(
1342 coreconfigitem(
1334 b'ui', b'merge', default=None,
1343 b'ui', b'merge', default=None,
1335 )
1344 )
1336 coreconfigitem(
1345 coreconfigitem(
1337 b'ui', b'mergemarkers', default=b'basic',
1346 b'ui', b'mergemarkers', default=b'basic',
1338 )
1347 )
1339 coreconfigitem(
1348 coreconfigitem(
1340 b'ui', b'message-output', default=b'stdio',
1349 b'ui', b'message-output', default=b'stdio',
1341 )
1350 )
1342 coreconfigitem(
1351 coreconfigitem(
1343 b'ui', b'nontty', default=False,
1352 b'ui', b'nontty', default=False,
1344 )
1353 )
1345 coreconfigitem(
1354 coreconfigitem(
1346 b'ui', b'origbackuppath', default=None,
1355 b'ui', b'origbackuppath', default=None,
1347 )
1356 )
1348 coreconfigitem(
1357 coreconfigitem(
1349 b'ui', b'paginate', default=True,
1358 b'ui', b'paginate', default=True,
1350 )
1359 )
1351 coreconfigitem(
1360 coreconfigitem(
1352 b'ui', b'patch', default=None,
1361 b'ui', b'patch', default=None,
1353 )
1362 )
1354 coreconfigitem(
1363 coreconfigitem(
1355 b'ui', b'portablefilenames', default=b'warn',
1364 b'ui', b'portablefilenames', default=b'warn',
1356 )
1365 )
1357 coreconfigitem(
1366 coreconfigitem(
1358 b'ui', b'promptecho', default=False,
1367 b'ui', b'promptecho', default=False,
1359 )
1368 )
1360 coreconfigitem(
1369 coreconfigitem(
1361 b'ui', b'quiet', default=False,
1370 b'ui', b'quiet', default=False,
1362 )
1371 )
1363 coreconfigitem(
1372 coreconfigitem(
1364 b'ui', b'quietbookmarkmove', default=False,
1373 b'ui', b'quietbookmarkmove', default=False,
1365 )
1374 )
1366 coreconfigitem(
1375 coreconfigitem(
1367 b'ui', b'relative-paths', default=b'legacy',
1376 b'ui', b'relative-paths', default=b'legacy',
1368 )
1377 )
1369 coreconfigitem(
1378 coreconfigitem(
1370 b'ui', b'remotecmd', default=b'hg',
1379 b'ui', b'remotecmd', default=b'hg',
1371 )
1380 )
1372 coreconfigitem(
1381 coreconfigitem(
1373 b'ui', b'report_untrusted', default=True,
1382 b'ui', b'report_untrusted', default=True,
1374 )
1383 )
1375 coreconfigitem(
1384 coreconfigitem(
1376 b'ui', b'rollback', default=True,
1385 b'ui', b'rollback', default=True,
1377 )
1386 )
1378 coreconfigitem(
1387 coreconfigitem(
1379 b'ui', b'signal-safe-lock', default=True,
1388 b'ui', b'signal-safe-lock', default=True,
1380 )
1389 )
1381 coreconfigitem(
1390 coreconfigitem(
1382 b'ui', b'slash', default=False,
1391 b'ui', b'slash', default=False,
1383 )
1392 )
1384 coreconfigitem(
1393 coreconfigitem(
1385 b'ui', b'ssh', default=b'ssh',
1394 b'ui', b'ssh', default=b'ssh',
1386 )
1395 )
1387 coreconfigitem(
1396 coreconfigitem(
1388 b'ui', b'ssherrorhint', default=None,
1397 b'ui', b'ssherrorhint', default=None,
1389 )
1398 )
1390 coreconfigitem(
1399 coreconfigitem(
1391 b'ui', b'statuscopies', default=False,
1400 b'ui', b'statuscopies', default=False,
1392 )
1401 )
1393 coreconfigitem(
1402 coreconfigitem(
1394 b'ui', b'strict', default=False,
1403 b'ui', b'strict', default=False,
1395 )
1404 )
1396 coreconfigitem(
1405 coreconfigitem(
1397 b'ui', b'style', default=b'',
1406 b'ui', b'style', default=b'',
1398 )
1407 )
1399 coreconfigitem(
1408 coreconfigitem(
1400 b'ui', b'supportcontact', default=None,
1409 b'ui', b'supportcontact', default=None,
1401 )
1410 )
1402 coreconfigitem(
1411 coreconfigitem(
1403 b'ui', b'textwidth', default=78,
1412 b'ui', b'textwidth', default=78,
1404 )
1413 )
1405 coreconfigitem(
1414 coreconfigitem(
1406 b'ui', b'timeout', default=b'600',
1415 b'ui', b'timeout', default=b'600',
1407 )
1416 )
1408 coreconfigitem(
1417 coreconfigitem(
1409 b'ui', b'timeout.warn', default=0,
1418 b'ui', b'timeout.warn', default=0,
1410 )
1419 )
1411 coreconfigitem(
1420 coreconfigitem(
1412 b'ui', b'timestamp-output', default=False,
1421 b'ui', b'timestamp-output', default=False,
1413 )
1422 )
1414 coreconfigitem(
1423 coreconfigitem(
1415 b'ui', b'traceback', default=False,
1424 b'ui', b'traceback', default=False,
1416 )
1425 )
1417 coreconfigitem(
1426 coreconfigitem(
1418 b'ui', b'tweakdefaults', default=False,
1427 b'ui', b'tweakdefaults', default=False,
1419 )
1428 )
1420 coreconfigitem(b'ui', b'username', alias=[(b'ui', b'user')])
1429 coreconfigitem(b'ui', b'username', alias=[(b'ui', b'user')])
1421 coreconfigitem(
1430 coreconfigitem(
1422 b'ui', b'verbose', default=False,
1431 b'ui', b'verbose', default=False,
1423 )
1432 )
1424 coreconfigitem(
1433 coreconfigitem(
1425 b'verify', b'skipflags', default=None,
1434 b'verify', b'skipflags', default=None,
1426 )
1435 )
1427 coreconfigitem(
1436 coreconfigitem(
1428 b'web', b'allowbz2', default=False,
1437 b'web', b'allowbz2', default=False,
1429 )
1438 )
1430 coreconfigitem(
1439 coreconfigitem(
1431 b'web', b'allowgz', default=False,
1440 b'web', b'allowgz', default=False,
1432 )
1441 )
1433 coreconfigitem(
1442 coreconfigitem(
1434 b'web', b'allow-pull', alias=[(b'web', b'allowpull')], default=True,
1443 b'web', b'allow-pull', alias=[(b'web', b'allowpull')], default=True,
1435 )
1444 )
1436 coreconfigitem(
1445 coreconfigitem(
1437 b'web', b'allow-push', alias=[(b'web', b'allow_push')], default=list,
1446 b'web', b'allow-push', alias=[(b'web', b'allow_push')], default=list,
1438 )
1447 )
1439 coreconfigitem(
1448 coreconfigitem(
1440 b'web', b'allowzip', default=False,
1449 b'web', b'allowzip', default=False,
1441 )
1450 )
1442 coreconfigitem(
1451 coreconfigitem(
1443 b'web', b'archivesubrepos', default=False,
1452 b'web', b'archivesubrepos', default=False,
1444 )
1453 )
1445 coreconfigitem(
1454 coreconfigitem(
1446 b'web', b'cache', default=True,
1455 b'web', b'cache', default=True,
1447 )
1456 )
1448 coreconfigitem(
1457 coreconfigitem(
1449 b'web', b'comparisoncontext', default=5,
1458 b'web', b'comparisoncontext', default=5,
1450 )
1459 )
1451 coreconfigitem(
1460 coreconfigitem(
1452 b'web', b'contact', default=None,
1461 b'web', b'contact', default=None,
1453 )
1462 )
1454 coreconfigitem(
1463 coreconfigitem(
1455 b'web', b'deny_push', default=list,
1464 b'web', b'deny_push', default=list,
1456 )
1465 )
1457 coreconfigitem(
1466 coreconfigitem(
1458 b'web', b'guessmime', default=False,
1467 b'web', b'guessmime', default=False,
1459 )
1468 )
1460 coreconfigitem(
1469 coreconfigitem(
1461 b'web', b'hidden', default=False,
1470 b'web', b'hidden', default=False,
1462 )
1471 )
1463 coreconfigitem(
1472 coreconfigitem(
1464 b'web', b'labels', default=list,
1473 b'web', b'labels', default=list,
1465 )
1474 )
1466 coreconfigitem(
1475 coreconfigitem(
1467 b'web', b'logoimg', default=b'hglogo.png',
1476 b'web', b'logoimg', default=b'hglogo.png',
1468 )
1477 )
1469 coreconfigitem(
1478 coreconfigitem(
1470 b'web', b'logourl', default=b'https://mercurial-scm.org/',
1479 b'web', b'logourl', default=b'https://mercurial-scm.org/',
1471 )
1480 )
1472 coreconfigitem(
1481 coreconfigitem(
1473 b'web', b'accesslog', default=b'-',
1482 b'web', b'accesslog', default=b'-',
1474 )
1483 )
1475 coreconfigitem(
1484 coreconfigitem(
1476 b'web', b'address', default=b'',
1485 b'web', b'address', default=b'',
1477 )
1486 )
1478 coreconfigitem(
1487 coreconfigitem(
1479 b'web', b'allow-archive', alias=[(b'web', b'allow_archive')], default=list,
1488 b'web', b'allow-archive', alias=[(b'web', b'allow_archive')], default=list,
1480 )
1489 )
1481 coreconfigitem(
1490 coreconfigitem(
1482 b'web', b'allow_read', default=list,
1491 b'web', b'allow_read', default=list,
1483 )
1492 )
1484 coreconfigitem(
1493 coreconfigitem(
1485 b'web', b'baseurl', default=None,
1494 b'web', b'baseurl', default=None,
1486 )
1495 )
1487 coreconfigitem(
1496 coreconfigitem(
1488 b'web', b'cacerts', default=None,
1497 b'web', b'cacerts', default=None,
1489 )
1498 )
1490 coreconfigitem(
1499 coreconfigitem(
1491 b'web', b'certificate', default=None,
1500 b'web', b'certificate', default=None,
1492 )
1501 )
1493 coreconfigitem(
1502 coreconfigitem(
1494 b'web', b'collapse', default=False,
1503 b'web', b'collapse', default=False,
1495 )
1504 )
1496 coreconfigitem(
1505 coreconfigitem(
1497 b'web', b'csp', default=None,
1506 b'web', b'csp', default=None,
1498 )
1507 )
1499 coreconfigitem(
1508 coreconfigitem(
1500 b'web', b'deny_read', default=list,
1509 b'web', b'deny_read', default=list,
1501 )
1510 )
1502 coreconfigitem(
1511 coreconfigitem(
1503 b'web', b'descend', default=True,
1512 b'web', b'descend', default=True,
1504 )
1513 )
1505 coreconfigitem(
1514 coreconfigitem(
1506 b'web', b'description', default=b"",
1515 b'web', b'description', default=b"",
1507 )
1516 )
1508 coreconfigitem(
1517 coreconfigitem(
1509 b'web', b'encoding', default=lambda: encoding.encoding,
1518 b'web', b'encoding', default=lambda: encoding.encoding,
1510 )
1519 )
1511 coreconfigitem(
1520 coreconfigitem(
1512 b'web', b'errorlog', default=b'-',
1521 b'web', b'errorlog', default=b'-',
1513 )
1522 )
1514 coreconfigitem(
1523 coreconfigitem(
1515 b'web', b'ipv6', default=False,
1524 b'web', b'ipv6', default=False,
1516 )
1525 )
1517 coreconfigitem(
1526 coreconfigitem(
1518 b'web', b'maxchanges', default=10,
1527 b'web', b'maxchanges', default=10,
1519 )
1528 )
1520 coreconfigitem(
1529 coreconfigitem(
1521 b'web', b'maxfiles', default=10,
1530 b'web', b'maxfiles', default=10,
1522 )
1531 )
1523 coreconfigitem(
1532 coreconfigitem(
1524 b'web', b'maxshortchanges', default=60,
1533 b'web', b'maxshortchanges', default=60,
1525 )
1534 )
1526 coreconfigitem(
1535 coreconfigitem(
1527 b'web', b'motd', default=b'',
1536 b'web', b'motd', default=b'',
1528 )
1537 )
1529 coreconfigitem(
1538 coreconfigitem(
1530 b'web', b'name', default=dynamicdefault,
1539 b'web', b'name', default=dynamicdefault,
1531 )
1540 )
1532 coreconfigitem(
1541 coreconfigitem(
1533 b'web', b'port', default=8000,
1542 b'web', b'port', default=8000,
1534 )
1543 )
1535 coreconfigitem(
1544 coreconfigitem(
1536 b'web', b'prefix', default=b'',
1545 b'web', b'prefix', default=b'',
1537 )
1546 )
1538 coreconfigitem(
1547 coreconfigitem(
1539 b'web', b'push_ssl', default=True,
1548 b'web', b'push_ssl', default=True,
1540 )
1549 )
1541 coreconfigitem(
1550 coreconfigitem(
1542 b'web', b'refreshinterval', default=20,
1551 b'web', b'refreshinterval', default=20,
1543 )
1552 )
1544 coreconfigitem(
1553 coreconfigitem(
1545 b'web', b'server-header', default=None,
1554 b'web', b'server-header', default=None,
1546 )
1555 )
1547 coreconfigitem(
1556 coreconfigitem(
1548 b'web', b'static', default=None,
1557 b'web', b'static', default=None,
1549 )
1558 )
1550 coreconfigitem(
1559 coreconfigitem(
1551 b'web', b'staticurl', default=None,
1560 b'web', b'staticurl', default=None,
1552 )
1561 )
1553 coreconfigitem(
1562 coreconfigitem(
1554 b'web', b'stripes', default=1,
1563 b'web', b'stripes', default=1,
1555 )
1564 )
1556 coreconfigitem(
1565 coreconfigitem(
1557 b'web', b'style', default=b'paper',
1566 b'web', b'style', default=b'paper',
1558 )
1567 )
1559 coreconfigitem(
1568 coreconfigitem(
1560 b'web', b'templates', default=None,
1569 b'web', b'templates', default=None,
1561 )
1570 )
1562 coreconfigitem(
1571 coreconfigitem(
1563 b'web', b'view', default=b'served', experimental=True,
1572 b'web', b'view', default=b'served', experimental=True,
1564 )
1573 )
1565 coreconfigitem(
1574 coreconfigitem(
1566 b'worker', b'backgroundclose', default=dynamicdefault,
1575 b'worker', b'backgroundclose', default=dynamicdefault,
1567 )
1576 )
1568 # Windows defaults to a limit of 512 open files. A buffer of 128
1577 # Windows defaults to a limit of 512 open files. A buffer of 128
1569 # should give us enough headway.
1578 # should give us enough headway.
1570 coreconfigitem(
1579 coreconfigitem(
1571 b'worker', b'backgroundclosemaxqueue', default=384,
1580 b'worker', b'backgroundclosemaxqueue', default=384,
1572 )
1581 )
1573 coreconfigitem(
1582 coreconfigitem(
1574 b'worker', b'backgroundcloseminfilecount', default=2048,
1583 b'worker', b'backgroundcloseminfilecount', default=2048,
1575 )
1584 )
1576 coreconfigitem(
1585 coreconfigitem(
1577 b'worker', b'backgroundclosethreadcount', default=4,
1586 b'worker', b'backgroundclosethreadcount', default=4,
1578 )
1587 )
1579 coreconfigitem(
1588 coreconfigitem(
1580 b'worker', b'enabled', default=True,
1589 b'worker', b'enabled', default=True,
1581 )
1590 )
1582 coreconfigitem(
1591 coreconfigitem(
1583 b'worker', b'numcpus', default=None,
1592 b'worker', b'numcpus', default=None,
1584 )
1593 )
1585
1594
1586 # Rebase related configuration moved to core because other extension are doing
1595 # Rebase related configuration moved to core because other extension are doing
1587 # strange things. For example, shelve import the extensions to reuse some bit
1596 # strange things. For example, shelve import the extensions to reuse some bit
1588 # without formally loading it.
1597 # without formally loading it.
1589 coreconfigitem(
1598 coreconfigitem(
1590 b'commands', b'rebase.requiredest', default=False,
1599 b'commands', b'rebase.requiredest', default=False,
1591 )
1600 )
1592 coreconfigitem(
1601 coreconfigitem(
1593 b'experimental', b'rebaseskipobsolete', default=True,
1602 b'experimental', b'rebaseskipobsolete', default=True,
1594 )
1603 )
1595 coreconfigitem(
1604 coreconfigitem(
1596 b'rebase', b'singletransaction', default=False,
1605 b'rebase', b'singletransaction', default=False,
1597 )
1606 )
1598 coreconfigitem(
1607 coreconfigitem(
1599 b'rebase', b'experimental.inmemory', default=False,
1608 b'rebase', b'experimental.inmemory', default=False,
1600 )
1609 )
@@ -1,84 +1,102 b''
1 Testing templating for rebase command
1 Testing templating for rebase command
2
2
3 Setup
3 Setup
4
4
5 $ cat >> $HGRCPATH <<EOF
5 $ cat >> $HGRCPATH <<EOF
6 > [extensions]
6 > [extensions]
7 > rebase=
7 > rebase=
8 > [experimental]
8 > [experimental]
9 > evolution=createmarkers
9 > evolution=createmarkers
10 > EOF
10 > EOF
11
11
12 $ hg init repo
12 $ hg init repo
13 $ cd repo
13 $ cd repo
14 $ for ch in a b c d; do echo foo > $ch; hg commit -Aqm "Added "$ch; done
14 $ for ch in a b c d; do echo foo > $ch; hg commit -Aqm "Added "$ch; done
15
15
16 $ hg log -G -T "{rev}:{node|short} {desc}"
16 $ hg log -G -T "{rev}:{node|short} {desc}"
17 @ 3:62615734edd5 Added d
17 @ 3:62615734edd5 Added d
18 |
18 |
19 o 2:28ad74487de9 Added c
19 o 2:28ad74487de9 Added c
20 |
20 |
21 o 1:29becc82797a Added b
21 o 1:29becc82797a Added b
22 |
22 |
23 o 0:18d04c59bb5d Added a
23 o 0:18d04c59bb5d Added a
24
24
25 Getting the JSON output for nodechanges
25 Getting the JSON output for nodechanges
26
26
27 $ hg rebase -s 2 -d 0 -q -Tjson
27 $ hg rebase -s 2 -d 0 -q -Tjson
28 [
28 [
29 {
29 {
30 "nodechanges": {"28ad74487de9599d00d81085be739c61fc340652": ["849767420fd5519cf0026232411a943ed03cc9fb"], "62615734edd52f06b6fb9c2beb429e4fe30d57b8": ["df21b32134ba85d86bca590cbe9b8b7cbc346c53"]}
30 "nodechanges": {"28ad74487de9599d00d81085be739c61fc340652": ["849767420fd5519cf0026232411a943ed03cc9fb"], "62615734edd52f06b6fb9c2beb429e4fe30d57b8": ["df21b32134ba85d86bca590cbe9b8b7cbc346c53"]}
31 }
31 }
32 ]
32 ]
33
33
34 $ hg log -G -T "{rev}:{node|short} {desc}"
34 $ hg log -G -T "{rev}:{node|short} {desc}"
35 @ 5:df21b32134ba Added d
35 @ 5:df21b32134ba Added d
36 |
36 |
37 o 4:849767420fd5 Added c
37 o 4:849767420fd5 Added c
38 |
38 |
39 | o 1:29becc82797a Added b
39 | o 1:29becc82797a Added b
40 |/
40 |/
41 o 0:18d04c59bb5d Added a
41 o 0:18d04c59bb5d Added a
42
42
43 $ hg rebase -s 1 -d 5 -q -T "{nodechanges|json}"
43 $ hg rebase -s 1 -d 5 -q -T "{nodechanges|json}"
44 {"29becc82797a4bc11ec8880b58eaecd2ab3e7760": ["d9d6773efc831c274eace04bc13e8e6412517139"]} (no-eol)
44 {"29becc82797a4bc11ec8880b58eaecd2ab3e7760": ["d9d6773efc831c274eace04bc13e8e6412517139"]} (no-eol)
45
45
46 $ hg log -G -T "{rev}:{node|short} {desc}"
46 $ hg log -G -T "{rev}:{node|short} {desc}"
47 o 6:d9d6773efc83 Added b
47 o 6:d9d6773efc83 Added b
48 |
48 |
49 @ 5:df21b32134ba Added d
49 @ 5:df21b32134ba Added d
50 |
50 |
51 o 4:849767420fd5 Added c
51 o 4:849767420fd5 Added c
52 |
52 |
53 o 0:18d04c59bb5d Added a
53 o 0:18d04c59bb5d Added a
54
54
55
55
56 $ hg rebase -s 6 -d 4 -q -T "{nodechanges % '{oldnode}:{newnodes % ' {node} '}'}"
56 $ hg rebase -s 6 -d 4 -q -T "{nodechanges % '{oldnode}:{newnodes % ' {node} '}'}"
57 d9d6773efc831c274eace04bc13e8e6412517139: f48cd65c6dc3d2acb55da54402a5b029546e546f (no-eol)
57 d9d6773efc831c274eace04bc13e8e6412517139: f48cd65c6dc3d2acb55da54402a5b029546e546f (no-eol)
58
58
59 $ hg log -G -T "{rev}:{node|short} {desc}"
59 $ hg log -G -T "{rev}:{node|short} {desc}"
60 o 7:f48cd65c6dc3 Added b
60 o 7:f48cd65c6dc3 Added b
61 |
61 |
62 | @ 5:df21b32134ba Added d
62 | @ 5:df21b32134ba Added d
63 |/
63 |/
64 o 4:849767420fd5 Added c
64 o 4:849767420fd5 Added c
65 |
65 |
66 o 0:18d04c59bb5d Added a
66 o 0:18d04c59bb5d Added a
67
67
68
68
69
69
70 $ hg rebase -s 7 -d 5 -q --keep -T "{nodechanges % '{oldnode}:{newnodes % ' {node} '}'}"
70 $ hg rebase -s 7 -d 5 -q --keep -T "{nodechanges % '{oldnode}:{newnodes % ' {node} '}'}"
71 f48cd65c6dc3d2acb55da54402a5b029546e546f: 6f7dda91e55e728fb798f3e44dbecf0ebaa83267 (no-eol)
71 f48cd65c6dc3d2acb55da54402a5b029546e546f: 6f7dda91e55e728fb798f3e44dbecf0ebaa83267 (no-eol)
72
72
73 $ hg log -G -T "{rev}:{node|short} {desc}"
73 $ hg log -G -T "{rev}:{node|short} {desc}"
74 o 8:6f7dda91e55e Added b
74 o 8:6f7dda91e55e Added b
75 |
75 |
76 | o 7:f48cd65c6dc3 Added b
76 | o 7:f48cd65c6dc3 Added b
77 | |
77 | |
78 @ | 5:df21b32134ba Added d
78 @ | 5:df21b32134ba Added d
79 |/
79 |/
80 o 4:849767420fd5 Added c
80 o 4:849767420fd5 Added c
81 |
81 |
82 o 0:18d04c59bb5d Added a
82 o 0:18d04c59bb5d Added a
83
83
84
84
85 Respects command-templates.oneline-summary
86
87 $ hg rebase -r 7 -d 8 -n --config command-templates.oneline-summary='rev: {rev}'
88 starting dry-run rebase; repository will not be changed
89 rebasing rev: 7
90 note: not rebasing rev: 7, its destination already has all its changes
91 dry-run rebase completed successfully; run without -n/--dry-run to perform this rebase
92
93
94 command-templates.oneline-summary.rebase overrides
95
96 $ hg rebase -r 7 -d 8 -n \
97 > --config command-templates.oneline-summary='global: {rev}' \
98 > --config command-templates.oneline-summary.rebase='override: {rev}'
99 starting dry-run rebase; repository will not be changed
100 rebasing override: 7
101 note: not rebasing override: 7, its destination already has all its changes
102 dry-run rebase completed successfully; run without -n/--dry-run to perform this rebase
General Comments 0
You need to be logged in to leave comments. Login now