##// END OF EJS Templates
rebase: use cmdutil.check_at_most_one_arg() for --confirm/--dry-run...
Martin von Zweigbergk -
r44378:daed70e9 default
parent child Browse files
Show More
@@ -1,2309 +1,2308 b''
1 # rebase.py - rebasing feature for mercurial
1 # rebase.py - rebasing feature for mercurial
2 #
2 #
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to move sets of revisions to a different ancestor
8 '''command to move sets of revisions to a different ancestor
9
9
10 This extension lets you rebase changesets in an existing Mercurial
10 This extension lets you rebase changesets in an existing Mercurial
11 repository.
11 repository.
12
12
13 For more information:
13 For more information:
14 https://mercurial-scm.org/wiki/RebaseExtension
14 https://mercurial-scm.org/wiki/RebaseExtension
15 '''
15 '''
16
16
17 from __future__ import absolute_import
17 from __future__ import absolute_import
18
18
19 import errno
19 import errno
20 import os
20 import os
21
21
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23 from mercurial.node import (
23 from mercurial.node import (
24 nullrev,
24 nullrev,
25 short,
25 short,
26 )
26 )
27 from mercurial.pycompat import open
27 from mercurial.pycompat import open
28 from mercurial import (
28 from mercurial import (
29 bookmarks,
29 bookmarks,
30 cmdutil,
30 cmdutil,
31 commands,
31 commands,
32 copies,
32 copies,
33 destutil,
33 destutil,
34 dirstateguard,
34 dirstateguard,
35 error,
35 error,
36 extensions,
36 extensions,
37 hg,
37 hg,
38 merge as mergemod,
38 merge as mergemod,
39 mergeutil,
39 mergeutil,
40 obsolete,
40 obsolete,
41 obsutil,
41 obsutil,
42 patch,
42 patch,
43 phases,
43 phases,
44 pycompat,
44 pycompat,
45 registrar,
45 registrar,
46 repair,
46 repair,
47 revset,
47 revset,
48 revsetlang,
48 revsetlang,
49 scmutil,
49 scmutil,
50 smartset,
50 smartset,
51 state as statemod,
51 state as statemod,
52 util,
52 util,
53 )
53 )
54
54
55 # The following constants are used throughout the rebase module. The ordering of
55 # The following constants are used throughout the rebase module. The ordering of
56 # their values must be maintained.
56 # their values must be maintained.
57
57
58 # Indicates that a revision needs to be rebased
58 # Indicates that a revision needs to be rebased
59 revtodo = -1
59 revtodo = -1
60 revtodostr = b'-1'
60 revtodostr = b'-1'
61
61
62 # legacy revstates no longer needed in current code
62 # legacy revstates no longer needed in current code
63 # -2: nullmerge, -3: revignored, -4: revprecursor, -5: revpruned
63 # -2: nullmerge, -3: revignored, -4: revprecursor, -5: revpruned
64 legacystates = {b'-2', b'-3', b'-4', b'-5'}
64 legacystates = {b'-2', b'-3', b'-4', b'-5'}
65
65
66 cmdtable = {}
66 cmdtable = {}
67 command = registrar.command(cmdtable)
67 command = registrar.command(cmdtable)
68 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
68 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
69 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
69 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
70 # be specifying the version(s) of Mercurial they are tested with, or
70 # be specifying the version(s) of Mercurial they are tested with, or
71 # leave the attribute unspecified.
71 # leave the attribute unspecified.
72 testedwith = b'ships-with-hg-core'
72 testedwith = b'ships-with-hg-core'
73
73
74
74
75 def _nothingtorebase():
75 def _nothingtorebase():
76 return 1
76 return 1
77
77
78
78
79 def _savegraft(ctx, extra):
79 def _savegraft(ctx, extra):
80 s = ctx.extra().get(b'source', None)
80 s = ctx.extra().get(b'source', None)
81 if s is not None:
81 if s is not None:
82 extra[b'source'] = s
82 extra[b'source'] = s
83 s = ctx.extra().get(b'intermediate-source', None)
83 s = ctx.extra().get(b'intermediate-source', None)
84 if s is not None:
84 if s is not None:
85 extra[b'intermediate-source'] = s
85 extra[b'intermediate-source'] = s
86
86
87
87
88 def _savebranch(ctx, extra):
88 def _savebranch(ctx, extra):
89 extra[b'branch'] = ctx.branch()
89 extra[b'branch'] = ctx.branch()
90
90
91
91
92 def _destrebase(repo, sourceset, destspace=None):
92 def _destrebase(repo, sourceset, destspace=None):
93 """small wrapper around destmerge to pass the right extra args
93 """small wrapper around destmerge to pass the right extra args
94
94
95 Please wrap destutil.destmerge instead."""
95 Please wrap destutil.destmerge instead."""
96 return destutil.destmerge(
96 return destutil.destmerge(
97 repo,
97 repo,
98 action=b'rebase',
98 action=b'rebase',
99 sourceset=sourceset,
99 sourceset=sourceset,
100 onheadcheck=False,
100 onheadcheck=False,
101 destspace=destspace,
101 destspace=destspace,
102 )
102 )
103
103
104
104
105 revsetpredicate = registrar.revsetpredicate()
105 revsetpredicate = registrar.revsetpredicate()
106
106
107
107
108 @revsetpredicate(b'_destrebase')
108 @revsetpredicate(b'_destrebase')
109 def _revsetdestrebase(repo, subset, x):
109 def _revsetdestrebase(repo, subset, x):
110 # ``_rebasedefaultdest()``
110 # ``_rebasedefaultdest()``
111
111
112 # default destination for rebase.
112 # default destination for rebase.
113 # # XXX: Currently private because I expect the signature to change.
113 # # XXX: Currently private because I expect the signature to change.
114 # # XXX: - bailing out in case of ambiguity vs returning all data.
114 # # XXX: - bailing out in case of ambiguity vs returning all data.
115 # i18n: "_rebasedefaultdest" is a keyword
115 # i18n: "_rebasedefaultdest" is a keyword
116 sourceset = None
116 sourceset = None
117 if x is not None:
117 if x is not None:
118 sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
118 sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
119 return subset & smartset.baseset([_destrebase(repo, sourceset)])
119 return subset & smartset.baseset([_destrebase(repo, sourceset)])
120
120
121
121
122 @revsetpredicate(b'_destautoorphanrebase')
122 @revsetpredicate(b'_destautoorphanrebase')
123 def _revsetdestautoorphanrebase(repo, subset, x):
123 def _revsetdestautoorphanrebase(repo, subset, x):
124 # ``_destautoorphanrebase()``
124 # ``_destautoorphanrebase()``
125
125
126 # automatic rebase destination for a single orphan revision.
126 # automatic rebase destination for a single orphan revision.
127 unfi = repo.unfiltered()
127 unfi = repo.unfiltered()
128 obsoleted = unfi.revs(b'obsolete()')
128 obsoleted = unfi.revs(b'obsolete()')
129
129
130 src = revset.getset(repo, subset, x).first()
130 src = revset.getset(repo, subset, x).first()
131
131
132 # Empty src or already obsoleted - Do not return a destination
132 # Empty src or already obsoleted - Do not return a destination
133 if not src or src in obsoleted:
133 if not src or src in obsoleted:
134 return smartset.baseset()
134 return smartset.baseset()
135 dests = destutil.orphanpossibledestination(repo, src)
135 dests = destutil.orphanpossibledestination(repo, src)
136 if len(dests) > 1:
136 if len(dests) > 1:
137 raise error.Abort(
137 raise error.Abort(
138 _(b"ambiguous automatic rebase: %r could end up on any of %r")
138 _(b"ambiguous automatic rebase: %r could end up on any of %r")
139 % (src, dests)
139 % (src, dests)
140 )
140 )
141 # We have zero or one destination, so we can just return here.
141 # We have zero or one destination, so we can just return here.
142 return smartset.baseset(dests)
142 return smartset.baseset(dests)
143
143
144
144
145 def _ctxdesc(ctx):
145 def _ctxdesc(ctx):
146 """short description for a context"""
146 """short description for a context"""
147 desc = b'%d:%s "%s"' % (
147 desc = b'%d:%s "%s"' % (
148 ctx.rev(),
148 ctx.rev(),
149 ctx,
149 ctx,
150 ctx.description().split(b'\n', 1)[0],
150 ctx.description().split(b'\n', 1)[0],
151 )
151 )
152 repo = ctx.repo()
152 repo = ctx.repo()
153 names = []
153 names = []
154 for nsname, ns in pycompat.iteritems(repo.names):
154 for nsname, ns in pycompat.iteritems(repo.names):
155 if nsname == b'branches':
155 if nsname == b'branches':
156 continue
156 continue
157 names.extend(ns.names(repo, ctx.node()))
157 names.extend(ns.names(repo, ctx.node()))
158 if names:
158 if names:
159 desc += b' (%s)' % b' '.join(names)
159 desc += b' (%s)' % b' '.join(names)
160 return desc
160 return desc
161
161
162
162
163 class rebaseruntime(object):
163 class rebaseruntime(object):
164 """This class is a container for rebase runtime state"""
164 """This class is a container for rebase runtime state"""
165
165
166 def __init__(self, repo, ui, inmemory=False, opts=None):
166 def __init__(self, repo, ui, inmemory=False, opts=None):
167 if opts is None:
167 if opts is None:
168 opts = {}
168 opts = {}
169
169
170 # prepared: whether we have rebasestate prepared or not. Currently it
170 # prepared: whether we have rebasestate prepared or not. Currently it
171 # decides whether "self.repo" is unfiltered or not.
171 # decides whether "self.repo" is unfiltered or not.
172 # The rebasestate has explicit hash to hash instructions not depending
172 # The rebasestate has explicit hash to hash instructions not depending
173 # on visibility. If rebasestate exists (in-memory or on-disk), use
173 # on visibility. If rebasestate exists (in-memory or on-disk), use
174 # unfiltered repo to avoid visibility issues.
174 # unfiltered repo to avoid visibility issues.
175 # Before knowing rebasestate (i.e. when starting a new rebase (not
175 # Before knowing rebasestate (i.e. when starting a new rebase (not
176 # --continue or --abort)), the original repo should be used so
176 # --continue or --abort)), the original repo should be used so
177 # visibility-dependent revsets are correct.
177 # visibility-dependent revsets are correct.
178 self.prepared = False
178 self.prepared = False
179 self._repo = repo
179 self._repo = repo
180
180
181 self.ui = ui
181 self.ui = ui
182 self.opts = opts
182 self.opts = opts
183 self.originalwd = None
183 self.originalwd = None
184 self.external = nullrev
184 self.external = nullrev
185 # Mapping between the old revision id and either what is the new rebased
185 # Mapping between the old revision id and either what is the new rebased
186 # revision or what needs to be done with the old revision. The state
186 # revision or what needs to be done with the old revision. The state
187 # dict will be what contains most of the rebase progress state.
187 # dict will be what contains most of the rebase progress state.
188 self.state = {}
188 self.state = {}
189 self.activebookmark = None
189 self.activebookmark = None
190 self.destmap = {}
190 self.destmap = {}
191 self.skipped = set()
191 self.skipped = set()
192
192
193 self.collapsef = opts.get(b'collapse', False)
193 self.collapsef = opts.get(b'collapse', False)
194 self.collapsemsg = cmdutil.logmessage(ui, opts)
194 self.collapsemsg = cmdutil.logmessage(ui, opts)
195 self.date = opts.get(b'date', None)
195 self.date = opts.get(b'date', None)
196
196
197 e = opts.get(b'extrafn') # internal, used by e.g. hgsubversion
197 e = opts.get(b'extrafn') # internal, used by e.g. hgsubversion
198 self.extrafns = [_savegraft]
198 self.extrafns = [_savegraft]
199 if e:
199 if e:
200 self.extrafns = [e]
200 self.extrafns = [e]
201
201
202 self.backupf = ui.configbool(b'rewrite', b'backup-bundle')
202 self.backupf = ui.configbool(b'rewrite', b'backup-bundle')
203 self.keepf = opts.get(b'keep', False)
203 self.keepf = opts.get(b'keep', False)
204 self.keepbranchesf = opts.get(b'keepbranches', False)
204 self.keepbranchesf = opts.get(b'keepbranches', False)
205 self.obsoletenotrebased = {}
205 self.obsoletenotrebased = {}
206 self.obsoletewithoutsuccessorindestination = set()
206 self.obsoletewithoutsuccessorindestination = set()
207 self.inmemory = inmemory
207 self.inmemory = inmemory
208 self.stateobj = statemod.cmdstate(repo, b'rebasestate')
208 self.stateobj = statemod.cmdstate(repo, b'rebasestate')
209
209
210 @property
210 @property
211 def repo(self):
211 def repo(self):
212 if self.prepared:
212 if self.prepared:
213 return self._repo.unfiltered()
213 return self._repo.unfiltered()
214 else:
214 else:
215 return self._repo
215 return self._repo
216
216
217 def storestatus(self, tr=None):
217 def storestatus(self, tr=None):
218 """Store the current status to allow recovery"""
218 """Store the current status to allow recovery"""
219 if tr:
219 if tr:
220 tr.addfilegenerator(
220 tr.addfilegenerator(
221 b'rebasestate',
221 b'rebasestate',
222 (b'rebasestate',),
222 (b'rebasestate',),
223 self._writestatus,
223 self._writestatus,
224 location=b'plain',
224 location=b'plain',
225 )
225 )
226 else:
226 else:
227 with self.repo.vfs(b"rebasestate", b"w") as f:
227 with self.repo.vfs(b"rebasestate", b"w") as f:
228 self._writestatus(f)
228 self._writestatus(f)
229
229
230 def _writestatus(self, f):
230 def _writestatus(self, f):
231 repo = self.repo
231 repo = self.repo
232 assert repo.filtername is None
232 assert repo.filtername is None
233 f.write(repo[self.originalwd].hex() + b'\n')
233 f.write(repo[self.originalwd].hex() + b'\n')
234 # was "dest". we now write dest per src root below.
234 # was "dest". we now write dest per src root below.
235 f.write(b'\n')
235 f.write(b'\n')
236 f.write(repo[self.external].hex() + b'\n')
236 f.write(repo[self.external].hex() + b'\n')
237 f.write(b'%d\n' % int(self.collapsef))
237 f.write(b'%d\n' % int(self.collapsef))
238 f.write(b'%d\n' % int(self.keepf))
238 f.write(b'%d\n' % int(self.keepf))
239 f.write(b'%d\n' % int(self.keepbranchesf))
239 f.write(b'%d\n' % int(self.keepbranchesf))
240 f.write(b'%s\n' % (self.activebookmark or b''))
240 f.write(b'%s\n' % (self.activebookmark or b''))
241 destmap = self.destmap
241 destmap = self.destmap
242 for d, v in pycompat.iteritems(self.state):
242 for d, v in pycompat.iteritems(self.state):
243 oldrev = repo[d].hex()
243 oldrev = repo[d].hex()
244 if v >= 0:
244 if v >= 0:
245 newrev = repo[v].hex()
245 newrev = repo[v].hex()
246 else:
246 else:
247 newrev = b"%d" % v
247 newrev = b"%d" % v
248 destnode = repo[destmap[d]].hex()
248 destnode = repo[destmap[d]].hex()
249 f.write(b"%s:%s:%s\n" % (oldrev, newrev, destnode))
249 f.write(b"%s:%s:%s\n" % (oldrev, newrev, destnode))
250 repo.ui.debug(b'rebase status stored\n')
250 repo.ui.debug(b'rebase status stored\n')
251
251
252 def restorestatus(self):
252 def restorestatus(self):
253 """Restore a previously stored status"""
253 """Restore a previously stored status"""
254 if not self.stateobj.exists():
254 if not self.stateobj.exists():
255 cmdutil.wrongtooltocontinue(self.repo, _(b'rebase'))
255 cmdutil.wrongtooltocontinue(self.repo, _(b'rebase'))
256
256
257 data = self._read()
257 data = self._read()
258 self.repo.ui.debug(b'rebase status resumed\n')
258 self.repo.ui.debug(b'rebase status resumed\n')
259
259
260 self.originalwd = data[b'originalwd']
260 self.originalwd = data[b'originalwd']
261 self.destmap = data[b'destmap']
261 self.destmap = data[b'destmap']
262 self.state = data[b'state']
262 self.state = data[b'state']
263 self.skipped = data[b'skipped']
263 self.skipped = data[b'skipped']
264 self.collapsef = data[b'collapse']
264 self.collapsef = data[b'collapse']
265 self.keepf = data[b'keep']
265 self.keepf = data[b'keep']
266 self.keepbranchesf = data[b'keepbranches']
266 self.keepbranchesf = data[b'keepbranches']
267 self.external = data[b'external']
267 self.external = data[b'external']
268 self.activebookmark = data[b'activebookmark']
268 self.activebookmark = data[b'activebookmark']
269
269
270 def _read(self):
270 def _read(self):
271 self.prepared = True
271 self.prepared = True
272 repo = self.repo
272 repo = self.repo
273 assert repo.filtername is None
273 assert repo.filtername is None
274 data = {
274 data = {
275 b'keepbranches': None,
275 b'keepbranches': None,
276 b'collapse': None,
276 b'collapse': None,
277 b'activebookmark': None,
277 b'activebookmark': None,
278 b'external': nullrev,
278 b'external': nullrev,
279 b'keep': None,
279 b'keep': None,
280 b'originalwd': None,
280 b'originalwd': None,
281 }
281 }
282 legacydest = None
282 legacydest = None
283 state = {}
283 state = {}
284 destmap = {}
284 destmap = {}
285
285
286 if True:
286 if True:
287 f = repo.vfs(b"rebasestate")
287 f = repo.vfs(b"rebasestate")
288 for i, l in enumerate(f.read().splitlines()):
288 for i, l in enumerate(f.read().splitlines()):
289 if i == 0:
289 if i == 0:
290 data[b'originalwd'] = repo[l].rev()
290 data[b'originalwd'] = repo[l].rev()
291 elif i == 1:
291 elif i == 1:
292 # this line should be empty in newer version. but legacy
292 # this line should be empty in newer version. but legacy
293 # clients may still use it
293 # clients may still use it
294 if l:
294 if l:
295 legacydest = repo[l].rev()
295 legacydest = repo[l].rev()
296 elif i == 2:
296 elif i == 2:
297 data[b'external'] = repo[l].rev()
297 data[b'external'] = repo[l].rev()
298 elif i == 3:
298 elif i == 3:
299 data[b'collapse'] = bool(int(l))
299 data[b'collapse'] = bool(int(l))
300 elif i == 4:
300 elif i == 4:
301 data[b'keep'] = bool(int(l))
301 data[b'keep'] = bool(int(l))
302 elif i == 5:
302 elif i == 5:
303 data[b'keepbranches'] = bool(int(l))
303 data[b'keepbranches'] = bool(int(l))
304 elif i == 6 and not (len(l) == 81 and b':' in l):
304 elif i == 6 and not (len(l) == 81 and b':' in l):
305 # line 6 is a recent addition, so for backwards
305 # line 6 is a recent addition, so for backwards
306 # compatibility check that the line doesn't look like the
306 # compatibility check that the line doesn't look like the
307 # oldrev:newrev lines
307 # oldrev:newrev lines
308 data[b'activebookmark'] = l
308 data[b'activebookmark'] = l
309 else:
309 else:
310 args = l.split(b':')
310 args = l.split(b':')
311 oldrev = repo[args[0]].rev()
311 oldrev = repo[args[0]].rev()
312 newrev = args[1]
312 newrev = args[1]
313 if newrev in legacystates:
313 if newrev in legacystates:
314 continue
314 continue
315 if len(args) > 2:
315 if len(args) > 2:
316 destrev = repo[args[2]].rev()
316 destrev = repo[args[2]].rev()
317 else:
317 else:
318 destrev = legacydest
318 destrev = legacydest
319 destmap[oldrev] = destrev
319 destmap[oldrev] = destrev
320 if newrev == revtodostr:
320 if newrev == revtodostr:
321 state[oldrev] = revtodo
321 state[oldrev] = revtodo
322 # Legacy compat special case
322 # Legacy compat special case
323 else:
323 else:
324 state[oldrev] = repo[newrev].rev()
324 state[oldrev] = repo[newrev].rev()
325
325
326 if data[b'keepbranches'] is None:
326 if data[b'keepbranches'] is None:
327 raise error.Abort(_(b'.hg/rebasestate is incomplete'))
327 raise error.Abort(_(b'.hg/rebasestate is incomplete'))
328
328
329 data[b'destmap'] = destmap
329 data[b'destmap'] = destmap
330 data[b'state'] = state
330 data[b'state'] = state
331 skipped = set()
331 skipped = set()
332 # recompute the set of skipped revs
332 # recompute the set of skipped revs
333 if not data[b'collapse']:
333 if not data[b'collapse']:
334 seen = set(destmap.values())
334 seen = set(destmap.values())
335 for old, new in sorted(state.items()):
335 for old, new in sorted(state.items()):
336 if new != revtodo and new in seen:
336 if new != revtodo and new in seen:
337 skipped.add(old)
337 skipped.add(old)
338 seen.add(new)
338 seen.add(new)
339 data[b'skipped'] = skipped
339 data[b'skipped'] = skipped
340 repo.ui.debug(
340 repo.ui.debug(
341 b'computed skipped revs: %s\n'
341 b'computed skipped revs: %s\n'
342 % (b' '.join(b'%d' % r for r in sorted(skipped)) or b'')
342 % (b' '.join(b'%d' % r for r in sorted(skipped)) or b'')
343 )
343 )
344
344
345 return data
345 return data
346
346
347 def _handleskippingobsolete(self, obsoleterevs, destmap):
347 def _handleskippingobsolete(self, obsoleterevs, destmap):
348 """Compute structures necessary for skipping obsolete revisions
348 """Compute structures necessary for skipping obsolete revisions
349
349
350 obsoleterevs: iterable of all obsolete revisions in rebaseset
350 obsoleterevs: iterable of all obsolete revisions in rebaseset
351 destmap: {srcrev: destrev} destination revisions
351 destmap: {srcrev: destrev} destination revisions
352 """
352 """
353 self.obsoletenotrebased = {}
353 self.obsoletenotrebased = {}
354 if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'):
354 if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'):
355 return
355 return
356 obsoleteset = set(obsoleterevs)
356 obsoleteset = set(obsoleterevs)
357 (
357 (
358 self.obsoletenotrebased,
358 self.obsoletenotrebased,
359 self.obsoletewithoutsuccessorindestination,
359 self.obsoletewithoutsuccessorindestination,
360 obsoleteextinctsuccessors,
360 obsoleteextinctsuccessors,
361 ) = _computeobsoletenotrebased(self.repo, obsoleteset, destmap)
361 ) = _computeobsoletenotrebased(self.repo, obsoleteset, destmap)
362 skippedset = set(self.obsoletenotrebased)
362 skippedset = set(self.obsoletenotrebased)
363 skippedset.update(self.obsoletewithoutsuccessorindestination)
363 skippedset.update(self.obsoletewithoutsuccessorindestination)
364 skippedset.update(obsoleteextinctsuccessors)
364 skippedset.update(obsoleteextinctsuccessors)
365 _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
365 _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
366
366
367 def _prepareabortorcontinue(self, isabort, backup=True, suppwarns=False):
367 def _prepareabortorcontinue(self, isabort, backup=True, suppwarns=False):
368 try:
368 try:
369 self.restorestatus()
369 self.restorestatus()
370 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
370 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
371 except error.RepoLookupError:
371 except error.RepoLookupError:
372 if isabort:
372 if isabort:
373 clearstatus(self.repo)
373 clearstatus(self.repo)
374 clearcollapsemsg(self.repo)
374 clearcollapsemsg(self.repo)
375 self.repo.ui.warn(
375 self.repo.ui.warn(
376 _(
376 _(
377 b'rebase aborted (no revision is removed,'
377 b'rebase aborted (no revision is removed,'
378 b' only broken state is cleared)\n'
378 b' only broken state is cleared)\n'
379 )
379 )
380 )
380 )
381 return 0
381 return 0
382 else:
382 else:
383 msg = _(b'cannot continue inconsistent rebase')
383 msg = _(b'cannot continue inconsistent rebase')
384 hint = _(b'use "hg rebase --abort" to clear broken state')
384 hint = _(b'use "hg rebase --abort" to clear broken state')
385 raise error.Abort(msg, hint=hint)
385 raise error.Abort(msg, hint=hint)
386
386
387 if isabort:
387 if isabort:
388 backup = backup and self.backupf
388 backup = backup and self.backupf
389 return self._abort(backup=backup, suppwarns=suppwarns)
389 return self._abort(backup=backup, suppwarns=suppwarns)
390
390
391 def _preparenewrebase(self, destmap):
391 def _preparenewrebase(self, destmap):
392 if not destmap:
392 if not destmap:
393 return _nothingtorebase()
393 return _nothingtorebase()
394
394
395 rebaseset = destmap.keys()
395 rebaseset = destmap.keys()
396 allowunstable = obsolete.isenabled(self.repo, obsolete.allowunstableopt)
396 allowunstable = obsolete.isenabled(self.repo, obsolete.allowunstableopt)
397 if not (self.keepf or allowunstable) and self.repo.revs(
397 if not (self.keepf or allowunstable) and self.repo.revs(
398 b'first(children(%ld) - %ld)', rebaseset, rebaseset
398 b'first(children(%ld) - %ld)', rebaseset, rebaseset
399 ):
399 ):
400 raise error.Abort(
400 raise error.Abort(
401 _(
401 _(
402 b"can't remove original changesets with"
402 b"can't remove original changesets with"
403 b" unrebased descendants"
403 b" unrebased descendants"
404 ),
404 ),
405 hint=_(b'use --keep to keep original changesets'),
405 hint=_(b'use --keep to keep original changesets'),
406 )
406 )
407
407
408 result = buildstate(self.repo, destmap, self.collapsef)
408 result = buildstate(self.repo, destmap, self.collapsef)
409
409
410 if not result:
410 if not result:
411 # Empty state built, nothing to rebase
411 # Empty state built, nothing to rebase
412 self.ui.status(_(b'nothing to rebase\n'))
412 self.ui.status(_(b'nothing to rebase\n'))
413 return _nothingtorebase()
413 return _nothingtorebase()
414
414
415 for root in self.repo.set(b'roots(%ld)', rebaseset):
415 for root in self.repo.set(b'roots(%ld)', rebaseset):
416 if not self.keepf and not root.mutable():
416 if not self.keepf and not root.mutable():
417 raise error.Abort(
417 raise error.Abort(
418 _(b"can't rebase public changeset %s") % root,
418 _(b"can't rebase public changeset %s") % root,
419 hint=_(b"see 'hg help phases' for details"),
419 hint=_(b"see 'hg help phases' for details"),
420 )
420 )
421
421
422 (self.originalwd, self.destmap, self.state) = result
422 (self.originalwd, self.destmap, self.state) = result
423 if self.collapsef:
423 if self.collapsef:
424 dests = set(self.destmap.values())
424 dests = set(self.destmap.values())
425 if len(dests) != 1:
425 if len(dests) != 1:
426 raise error.Abort(
426 raise error.Abort(
427 _(b'--collapse does not work with multiple destinations')
427 _(b'--collapse does not work with multiple destinations')
428 )
428 )
429 destrev = next(iter(dests))
429 destrev = next(iter(dests))
430 destancestors = self.repo.changelog.ancestors(
430 destancestors = self.repo.changelog.ancestors(
431 [destrev], inclusive=True
431 [destrev], inclusive=True
432 )
432 )
433 self.external = externalparent(self.repo, self.state, destancestors)
433 self.external = externalparent(self.repo, self.state, destancestors)
434
434
435 for destrev in sorted(set(destmap.values())):
435 for destrev in sorted(set(destmap.values())):
436 dest = self.repo[destrev]
436 dest = self.repo[destrev]
437 if dest.closesbranch() and not self.keepbranchesf:
437 if dest.closesbranch() and not self.keepbranchesf:
438 self.ui.status(_(b'reopening closed branch head %s\n') % dest)
438 self.ui.status(_(b'reopening closed branch head %s\n') % dest)
439
439
440 self.prepared = True
440 self.prepared = True
441
441
442 def _assignworkingcopy(self):
442 def _assignworkingcopy(self):
443 if self.inmemory:
443 if self.inmemory:
444 from mercurial.context import overlayworkingctx
444 from mercurial.context import overlayworkingctx
445
445
446 self.wctx = overlayworkingctx(self.repo)
446 self.wctx = overlayworkingctx(self.repo)
447 self.repo.ui.debug(b"rebasing in-memory\n")
447 self.repo.ui.debug(b"rebasing in-memory\n")
448 else:
448 else:
449 self.wctx = self.repo[None]
449 self.wctx = self.repo[None]
450 self.repo.ui.debug(b"rebasing on disk\n")
450 self.repo.ui.debug(b"rebasing on disk\n")
451 self.repo.ui.log(
451 self.repo.ui.log(
452 b"rebase",
452 b"rebase",
453 b"using in-memory rebase: %r\n",
453 b"using in-memory rebase: %r\n",
454 self.inmemory,
454 self.inmemory,
455 rebase_imm_used=self.inmemory,
455 rebase_imm_used=self.inmemory,
456 )
456 )
457
457
458 def _performrebase(self, tr):
458 def _performrebase(self, tr):
459 self._assignworkingcopy()
459 self._assignworkingcopy()
460 repo, ui = self.repo, self.ui
460 repo, ui = self.repo, self.ui
461 if self.keepbranchesf:
461 if self.keepbranchesf:
462 # insert _savebranch at the start of extrafns so if
462 # insert _savebranch at the start of extrafns so if
463 # there's a user-provided extrafn it can clobber branch if
463 # there's a user-provided extrafn it can clobber branch if
464 # desired
464 # desired
465 self.extrafns.insert(0, _savebranch)
465 self.extrafns.insert(0, _savebranch)
466 if self.collapsef:
466 if self.collapsef:
467 branches = set()
467 branches = set()
468 for rev in self.state:
468 for rev in self.state:
469 branches.add(repo[rev].branch())
469 branches.add(repo[rev].branch())
470 if len(branches) > 1:
470 if len(branches) > 1:
471 raise error.Abort(
471 raise error.Abort(
472 _(b'cannot collapse multiple named branches')
472 _(b'cannot collapse multiple named branches')
473 )
473 )
474
474
475 # Calculate self.obsoletenotrebased
475 # Calculate self.obsoletenotrebased
476 obsrevs = _filterobsoleterevs(self.repo, self.state)
476 obsrevs = _filterobsoleterevs(self.repo, self.state)
477 self._handleskippingobsolete(obsrevs, self.destmap)
477 self._handleskippingobsolete(obsrevs, self.destmap)
478
478
479 # Keep track of the active bookmarks in order to reset them later
479 # Keep track of the active bookmarks in order to reset them later
480 self.activebookmark = self.activebookmark or repo._activebookmark
480 self.activebookmark = self.activebookmark or repo._activebookmark
481 if self.activebookmark:
481 if self.activebookmark:
482 bookmarks.deactivate(repo)
482 bookmarks.deactivate(repo)
483
483
484 # Store the state before we begin so users can run 'hg rebase --abort'
484 # Store the state before we begin so users can run 'hg rebase --abort'
485 # if we fail before the transaction closes.
485 # if we fail before the transaction closes.
486 self.storestatus()
486 self.storestatus()
487 if tr:
487 if tr:
488 # When using single transaction, store state when transaction
488 # When using single transaction, store state when transaction
489 # commits.
489 # commits.
490 self.storestatus(tr)
490 self.storestatus(tr)
491
491
492 cands = [k for k, v in pycompat.iteritems(self.state) if v == revtodo]
492 cands = [k for k, v in pycompat.iteritems(self.state) if v == revtodo]
493 p = repo.ui.makeprogress(
493 p = repo.ui.makeprogress(
494 _(b"rebasing"), unit=_(b'changesets'), total=len(cands)
494 _(b"rebasing"), unit=_(b'changesets'), total=len(cands)
495 )
495 )
496
496
497 def progress(ctx):
497 def progress(ctx):
498 p.increment(item=(b"%d:%s" % (ctx.rev(), ctx)))
498 p.increment(item=(b"%d:%s" % (ctx.rev(), ctx)))
499
499
500 allowdivergence = self.ui.configbool(
500 allowdivergence = self.ui.configbool(
501 b'experimental', b'evolution.allowdivergence'
501 b'experimental', b'evolution.allowdivergence'
502 )
502 )
503 for subset in sortsource(self.destmap):
503 for subset in sortsource(self.destmap):
504 sortedrevs = self.repo.revs(b'sort(%ld, -topo)', subset)
504 sortedrevs = self.repo.revs(b'sort(%ld, -topo)', subset)
505 if not allowdivergence:
505 if not allowdivergence:
506 sortedrevs -= self.repo.revs(
506 sortedrevs -= self.repo.revs(
507 b'descendants(%ld) and not %ld',
507 b'descendants(%ld) and not %ld',
508 self.obsoletewithoutsuccessorindestination,
508 self.obsoletewithoutsuccessorindestination,
509 self.obsoletewithoutsuccessorindestination,
509 self.obsoletewithoutsuccessorindestination,
510 )
510 )
511 for rev in sortedrevs:
511 for rev in sortedrevs:
512 self._rebasenode(tr, rev, allowdivergence, progress)
512 self._rebasenode(tr, rev, allowdivergence, progress)
513 p.complete()
513 p.complete()
514 ui.note(_(b'rebase merging completed\n'))
514 ui.note(_(b'rebase merging completed\n'))
515
515
516 def _concludenode(self, rev, p1, p2, editor, commitmsg=None):
516 def _concludenode(self, rev, p1, p2, editor, commitmsg=None):
517 '''Commit the wd changes with parents p1 and p2.
517 '''Commit the wd changes with parents p1 and p2.
518
518
519 Reuse commit info from rev but also store useful information in extra.
519 Reuse commit info from rev but also store useful information in extra.
520 Return node of committed revision.'''
520 Return node of committed revision.'''
521 repo = self.repo
521 repo = self.repo
522 ctx = repo[rev]
522 ctx = repo[rev]
523 if commitmsg is None:
523 if commitmsg is None:
524 commitmsg = ctx.description()
524 commitmsg = ctx.description()
525 date = self.date
525 date = self.date
526 if date is None:
526 if date is None:
527 date = ctx.date()
527 date = ctx.date()
528 extra = {b'rebase_source': ctx.hex()}
528 extra = {b'rebase_source': ctx.hex()}
529 for c in self.extrafns:
529 for c in self.extrafns:
530 c(ctx, extra)
530 c(ctx, extra)
531 keepbranch = self.keepbranchesf and repo[p1].branch() != ctx.branch()
531 keepbranch = self.keepbranchesf and repo[p1].branch() != ctx.branch()
532 destphase = max(ctx.phase(), phases.draft)
532 destphase = max(ctx.phase(), phases.draft)
533 overrides = {(b'phases', b'new-commit'): destphase}
533 overrides = {(b'phases', b'new-commit'): destphase}
534 if keepbranch:
534 if keepbranch:
535 overrides[(b'ui', b'allowemptycommit')] = True
535 overrides[(b'ui', b'allowemptycommit')] = True
536 with repo.ui.configoverride(overrides, b'rebase'):
536 with repo.ui.configoverride(overrides, b'rebase'):
537 if self.inmemory:
537 if self.inmemory:
538 newnode = commitmemorynode(
538 newnode = commitmemorynode(
539 repo,
539 repo,
540 p1,
540 p1,
541 p2,
541 p2,
542 wctx=self.wctx,
542 wctx=self.wctx,
543 extra=extra,
543 extra=extra,
544 commitmsg=commitmsg,
544 commitmsg=commitmsg,
545 editor=editor,
545 editor=editor,
546 user=ctx.user(),
546 user=ctx.user(),
547 date=date,
547 date=date,
548 )
548 )
549 mergemod.mergestate.clean(repo)
549 mergemod.mergestate.clean(repo)
550 else:
550 else:
551 newnode = commitnode(
551 newnode = commitnode(
552 repo,
552 repo,
553 p1,
553 p1,
554 p2,
554 p2,
555 extra=extra,
555 extra=extra,
556 commitmsg=commitmsg,
556 commitmsg=commitmsg,
557 editor=editor,
557 editor=editor,
558 user=ctx.user(),
558 user=ctx.user(),
559 date=date,
559 date=date,
560 )
560 )
561
561
562 if newnode is None:
562 if newnode is None:
563 # If it ended up being a no-op commit, then the normal
563 # If it ended up being a no-op commit, then the normal
564 # merge state clean-up path doesn't happen, so do it
564 # merge state clean-up path doesn't happen, so do it
565 # here. Fix issue5494
565 # here. Fix issue5494
566 mergemod.mergestate.clean(repo)
566 mergemod.mergestate.clean(repo)
567 return newnode
567 return newnode
568
568
569 def _rebasenode(self, tr, rev, allowdivergence, progressfn):
569 def _rebasenode(self, tr, rev, allowdivergence, progressfn):
570 repo, ui, opts = self.repo, self.ui, self.opts
570 repo, ui, opts = self.repo, self.ui, self.opts
571 dest = self.destmap[rev]
571 dest = self.destmap[rev]
572 ctx = repo[rev]
572 ctx = repo[rev]
573 desc = _ctxdesc(ctx)
573 desc = _ctxdesc(ctx)
574 if self.state[rev] == rev:
574 if self.state[rev] == rev:
575 ui.status(_(b'already rebased %s\n') % desc)
575 ui.status(_(b'already rebased %s\n') % desc)
576 elif (
576 elif (
577 not allowdivergence
577 not allowdivergence
578 and rev in self.obsoletewithoutsuccessorindestination
578 and rev in self.obsoletewithoutsuccessorindestination
579 ):
579 ):
580 msg = (
580 msg = (
581 _(
581 _(
582 b'note: not rebasing %s and its descendants as '
582 b'note: not rebasing %s and its descendants as '
583 b'this would cause divergence\n'
583 b'this would cause divergence\n'
584 )
584 )
585 % desc
585 % desc
586 )
586 )
587 repo.ui.status(msg)
587 repo.ui.status(msg)
588 self.skipped.add(rev)
588 self.skipped.add(rev)
589 elif rev in self.obsoletenotrebased:
589 elif rev in self.obsoletenotrebased:
590 succ = self.obsoletenotrebased[rev]
590 succ = self.obsoletenotrebased[rev]
591 if succ is None:
591 if succ is None:
592 msg = _(b'note: not rebasing %s, it has no successor\n') % desc
592 msg = _(b'note: not rebasing %s, it has no successor\n') % desc
593 else:
593 else:
594 succdesc = _ctxdesc(repo[succ])
594 succdesc = _ctxdesc(repo[succ])
595 msg = _(
595 msg = _(
596 b'note: not rebasing %s, already in destination as %s\n'
596 b'note: not rebasing %s, already in destination as %s\n'
597 ) % (desc, succdesc)
597 ) % (desc, succdesc)
598 repo.ui.status(msg)
598 repo.ui.status(msg)
599 # Make clearrebased aware state[rev] is not a true successor
599 # Make clearrebased aware state[rev] is not a true successor
600 self.skipped.add(rev)
600 self.skipped.add(rev)
601 # Record rev as moved to its desired destination in self.state.
601 # Record rev as moved to its desired destination in self.state.
602 # This helps bookmark and working parent movement.
602 # This helps bookmark and working parent movement.
603 dest = max(
603 dest = max(
604 adjustdest(repo, rev, self.destmap, self.state, self.skipped)
604 adjustdest(repo, rev, self.destmap, self.state, self.skipped)
605 )
605 )
606 self.state[rev] = dest
606 self.state[rev] = dest
607 elif self.state[rev] == revtodo:
607 elif self.state[rev] == revtodo:
608 ui.status(_(b'rebasing %s\n') % desc)
608 ui.status(_(b'rebasing %s\n') % desc)
609 progressfn(ctx)
609 progressfn(ctx)
610 p1, p2, base = defineparents(
610 p1, p2, base = defineparents(
611 repo,
611 repo,
612 rev,
612 rev,
613 self.destmap,
613 self.destmap,
614 self.state,
614 self.state,
615 self.skipped,
615 self.skipped,
616 self.obsoletenotrebased,
616 self.obsoletenotrebased,
617 )
617 )
618 if not self.inmemory and len(repo[None].parents()) == 2:
618 if not self.inmemory and len(repo[None].parents()) == 2:
619 repo.ui.debug(b'resuming interrupted rebase\n')
619 repo.ui.debug(b'resuming interrupted rebase\n')
620 else:
620 else:
621 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
621 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
622 with ui.configoverride(overrides, b'rebase'):
622 with ui.configoverride(overrides, b'rebase'):
623 stats = rebasenode(
623 stats = rebasenode(
624 repo,
624 repo,
625 rev,
625 rev,
626 p1,
626 p1,
627 base,
627 base,
628 self.collapsef,
628 self.collapsef,
629 dest,
629 dest,
630 wctx=self.wctx,
630 wctx=self.wctx,
631 )
631 )
632 if stats.unresolvedcount > 0:
632 if stats.unresolvedcount > 0:
633 if self.inmemory:
633 if self.inmemory:
634 raise error.InMemoryMergeConflictsError()
634 raise error.InMemoryMergeConflictsError()
635 else:
635 else:
636 raise error.InterventionRequired(
636 raise error.InterventionRequired(
637 _(
637 _(
638 b'unresolved conflicts (see hg '
638 b'unresolved conflicts (see hg '
639 b'resolve, then hg rebase --continue)'
639 b'resolve, then hg rebase --continue)'
640 )
640 )
641 )
641 )
642 if not self.collapsef:
642 if not self.collapsef:
643 merging = p2 != nullrev
643 merging = p2 != nullrev
644 editform = cmdutil.mergeeditform(merging, b'rebase')
644 editform = cmdutil.mergeeditform(merging, b'rebase')
645 editor = cmdutil.getcommiteditor(
645 editor = cmdutil.getcommiteditor(
646 editform=editform, **pycompat.strkwargs(opts)
646 editform=editform, **pycompat.strkwargs(opts)
647 )
647 )
648 newnode = self._concludenode(rev, p1, p2, editor)
648 newnode = self._concludenode(rev, p1, p2, editor)
649 else:
649 else:
650 # Skip commit if we are collapsing
650 # Skip commit if we are collapsing
651 if self.inmemory:
651 if self.inmemory:
652 self.wctx.setbase(repo[p1])
652 self.wctx.setbase(repo[p1])
653 else:
653 else:
654 repo.setparents(repo[p1].node())
654 repo.setparents(repo[p1].node())
655 newnode = None
655 newnode = None
656 # Update the state
656 # Update the state
657 if newnode is not None:
657 if newnode is not None:
658 self.state[rev] = repo[newnode].rev()
658 self.state[rev] = repo[newnode].rev()
659 ui.debug(b'rebased as %s\n' % short(newnode))
659 ui.debug(b'rebased as %s\n' % short(newnode))
660 else:
660 else:
661 if not self.collapsef:
661 if not self.collapsef:
662 ui.warn(
662 ui.warn(
663 _(
663 _(
664 b'note: not rebasing %s, its destination already '
664 b'note: not rebasing %s, its destination already '
665 b'has all its changes\n'
665 b'has all its changes\n'
666 )
666 )
667 % desc
667 % desc
668 )
668 )
669 self.skipped.add(rev)
669 self.skipped.add(rev)
670 self.state[rev] = p1
670 self.state[rev] = p1
671 ui.debug(b'next revision set to %d\n' % p1)
671 ui.debug(b'next revision set to %d\n' % p1)
672 else:
672 else:
673 ui.status(
673 ui.status(
674 _(b'already rebased %s as %s\n') % (desc, repo[self.state[rev]])
674 _(b'already rebased %s as %s\n') % (desc, repo[self.state[rev]])
675 )
675 )
676 if not tr:
676 if not tr:
677 # When not using single transaction, store state after each
677 # When not using single transaction, store state after each
678 # commit is completely done. On InterventionRequired, we thus
678 # commit is completely done. On InterventionRequired, we thus
679 # won't store the status. Instead, we'll hit the "len(parents) == 2"
679 # won't store the status. Instead, we'll hit the "len(parents) == 2"
680 # case and realize that the commit was in progress.
680 # case and realize that the commit was in progress.
681 self.storestatus()
681 self.storestatus()
682
682
683 def _finishrebase(self):
683 def _finishrebase(self):
684 repo, ui, opts = self.repo, self.ui, self.opts
684 repo, ui, opts = self.repo, self.ui, self.opts
685 fm = ui.formatter(b'rebase', opts)
685 fm = ui.formatter(b'rebase', opts)
686 fm.startitem()
686 fm.startitem()
687 if self.collapsef:
687 if self.collapsef:
688 p1, p2, _base = defineparents(
688 p1, p2, _base = defineparents(
689 repo,
689 repo,
690 min(self.state),
690 min(self.state),
691 self.destmap,
691 self.destmap,
692 self.state,
692 self.state,
693 self.skipped,
693 self.skipped,
694 self.obsoletenotrebased,
694 self.obsoletenotrebased,
695 )
695 )
696 editopt = opts.get(b'edit')
696 editopt = opts.get(b'edit')
697 editform = b'rebase.collapse'
697 editform = b'rebase.collapse'
698 if self.collapsemsg:
698 if self.collapsemsg:
699 commitmsg = self.collapsemsg
699 commitmsg = self.collapsemsg
700 else:
700 else:
701 commitmsg = b'Collapsed revision'
701 commitmsg = b'Collapsed revision'
702 for rebased in sorted(self.state):
702 for rebased in sorted(self.state):
703 if rebased not in self.skipped:
703 if rebased not in self.skipped:
704 commitmsg += b'\n* %s' % repo[rebased].description()
704 commitmsg += b'\n* %s' % repo[rebased].description()
705 editopt = True
705 editopt = True
706 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
706 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
707 revtoreuse = max(self.state)
707 revtoreuse = max(self.state)
708
708
709 newnode = self._concludenode(
709 newnode = self._concludenode(
710 revtoreuse, p1, self.external, editor, commitmsg=commitmsg
710 revtoreuse, p1, self.external, editor, commitmsg=commitmsg
711 )
711 )
712
712
713 if newnode is not None:
713 if newnode is not None:
714 newrev = repo[newnode].rev()
714 newrev = repo[newnode].rev()
715 for oldrev in self.state:
715 for oldrev in self.state:
716 self.state[oldrev] = newrev
716 self.state[oldrev] = newrev
717
717
718 if b'qtip' in repo.tags():
718 if b'qtip' in repo.tags():
719 updatemq(repo, self.state, self.skipped, **pycompat.strkwargs(opts))
719 updatemq(repo, self.state, self.skipped, **pycompat.strkwargs(opts))
720
720
721 # restore original working directory
721 # restore original working directory
722 # (we do this before stripping)
722 # (we do this before stripping)
723 newwd = self.state.get(self.originalwd, self.originalwd)
723 newwd = self.state.get(self.originalwd, self.originalwd)
724 if newwd < 0:
724 if newwd < 0:
725 # original directory is a parent of rebase set root or ignored
725 # original directory is a parent of rebase set root or ignored
726 newwd = self.originalwd
726 newwd = self.originalwd
727 if newwd not in [c.rev() for c in repo[None].parents()]:
727 if newwd not in [c.rev() for c in repo[None].parents()]:
728 ui.note(_(b"update back to initial working directory parent\n"))
728 ui.note(_(b"update back to initial working directory parent\n"))
729 hg.updaterepo(repo, newwd, overwrite=False)
729 hg.updaterepo(repo, newwd, overwrite=False)
730
730
731 collapsedas = None
731 collapsedas = None
732 if self.collapsef and not self.keepf:
732 if self.collapsef and not self.keepf:
733 collapsedas = newnode
733 collapsedas = newnode
734 clearrebased(
734 clearrebased(
735 ui,
735 ui,
736 repo,
736 repo,
737 self.destmap,
737 self.destmap,
738 self.state,
738 self.state,
739 self.skipped,
739 self.skipped,
740 collapsedas,
740 collapsedas,
741 self.keepf,
741 self.keepf,
742 fm=fm,
742 fm=fm,
743 backup=self.backupf,
743 backup=self.backupf,
744 )
744 )
745
745
746 clearstatus(repo)
746 clearstatus(repo)
747 clearcollapsemsg(repo)
747 clearcollapsemsg(repo)
748
748
749 ui.note(_(b"rebase completed\n"))
749 ui.note(_(b"rebase completed\n"))
750 util.unlinkpath(repo.sjoin(b'undo'), ignoremissing=True)
750 util.unlinkpath(repo.sjoin(b'undo'), ignoremissing=True)
751 if self.skipped:
751 if self.skipped:
752 skippedlen = len(self.skipped)
752 skippedlen = len(self.skipped)
753 ui.note(_(b"%d revisions have been skipped\n") % skippedlen)
753 ui.note(_(b"%d revisions have been skipped\n") % skippedlen)
754 fm.end()
754 fm.end()
755
755
756 if (
756 if (
757 self.activebookmark
757 self.activebookmark
758 and self.activebookmark in repo._bookmarks
758 and self.activebookmark in repo._bookmarks
759 and repo[b'.'].node() == repo._bookmarks[self.activebookmark]
759 and repo[b'.'].node() == repo._bookmarks[self.activebookmark]
760 ):
760 ):
761 bookmarks.activate(repo, self.activebookmark)
761 bookmarks.activate(repo, self.activebookmark)
762
762
763 def _abort(self, backup=True, suppwarns=False):
763 def _abort(self, backup=True, suppwarns=False):
764 '''Restore the repository to its original state.'''
764 '''Restore the repository to its original state.'''
765
765
766 repo = self.repo
766 repo = self.repo
767 try:
767 try:
768 # If the first commits in the rebased set get skipped during the
768 # If the first commits in the rebased set get skipped during the
769 # rebase, their values within the state mapping will be the dest
769 # rebase, their values within the state mapping will be the dest
770 # rev id. The rebased list must must not contain the dest rev
770 # rev id. The rebased list must must not contain the dest rev
771 # (issue4896)
771 # (issue4896)
772 rebased = [
772 rebased = [
773 s
773 s
774 for r, s in self.state.items()
774 for r, s in self.state.items()
775 if s >= 0 and s != r and s != self.destmap[r]
775 if s >= 0 and s != r and s != self.destmap[r]
776 ]
776 ]
777 immutable = [d for d in rebased if not repo[d].mutable()]
777 immutable = [d for d in rebased if not repo[d].mutable()]
778 cleanup = True
778 cleanup = True
779 if immutable:
779 if immutable:
780 repo.ui.warn(
780 repo.ui.warn(
781 _(b"warning: can't clean up public changesets %s\n")
781 _(b"warning: can't clean up public changesets %s\n")
782 % b', '.join(bytes(repo[r]) for r in immutable),
782 % b', '.join(bytes(repo[r]) for r in immutable),
783 hint=_(b"see 'hg help phases' for details"),
783 hint=_(b"see 'hg help phases' for details"),
784 )
784 )
785 cleanup = False
785 cleanup = False
786
786
787 descendants = set()
787 descendants = set()
788 if rebased:
788 if rebased:
789 descendants = set(repo.changelog.descendants(rebased))
789 descendants = set(repo.changelog.descendants(rebased))
790 if descendants - set(rebased):
790 if descendants - set(rebased):
791 repo.ui.warn(
791 repo.ui.warn(
792 _(
792 _(
793 b"warning: new changesets detected on "
793 b"warning: new changesets detected on "
794 b"destination branch, can't strip\n"
794 b"destination branch, can't strip\n"
795 )
795 )
796 )
796 )
797 cleanup = False
797 cleanup = False
798
798
799 if cleanup:
799 if cleanup:
800 shouldupdate = False
800 shouldupdate = False
801 if rebased:
801 if rebased:
802 strippoints = [
802 strippoints = [
803 c.node() for c in repo.set(b'roots(%ld)', rebased)
803 c.node() for c in repo.set(b'roots(%ld)', rebased)
804 ]
804 ]
805
805
806 updateifonnodes = set(rebased)
806 updateifonnodes = set(rebased)
807 updateifonnodes.update(self.destmap.values())
807 updateifonnodes.update(self.destmap.values())
808 updateifonnodes.add(self.originalwd)
808 updateifonnodes.add(self.originalwd)
809 shouldupdate = repo[b'.'].rev() in updateifonnodes
809 shouldupdate = repo[b'.'].rev() in updateifonnodes
810
810
811 # Update away from the rebase if necessary
811 # Update away from the rebase if necessary
812 if shouldupdate or needupdate(repo, self.state):
812 if shouldupdate or needupdate(repo, self.state):
813 mergemod.update(
813 mergemod.update(
814 repo, self.originalwd, branchmerge=False, force=True
814 repo, self.originalwd, branchmerge=False, force=True
815 )
815 )
816
816
817 # Strip from the first rebased revision
817 # Strip from the first rebased revision
818 if rebased:
818 if rebased:
819 repair.strip(repo.ui, repo, strippoints, backup=backup)
819 repair.strip(repo.ui, repo, strippoints, backup=backup)
820
820
821 if self.activebookmark and self.activebookmark in repo._bookmarks:
821 if self.activebookmark and self.activebookmark in repo._bookmarks:
822 bookmarks.activate(repo, self.activebookmark)
822 bookmarks.activate(repo, self.activebookmark)
823
823
824 finally:
824 finally:
825 clearstatus(repo)
825 clearstatus(repo)
826 clearcollapsemsg(repo)
826 clearcollapsemsg(repo)
827 if not suppwarns:
827 if not suppwarns:
828 repo.ui.warn(_(b'rebase aborted\n'))
828 repo.ui.warn(_(b'rebase aborted\n'))
829 return 0
829 return 0
830
830
831
831
832 @command(
832 @command(
833 b'rebase',
833 b'rebase',
834 [
834 [
835 (
835 (
836 b's',
836 b's',
837 b'source',
837 b'source',
838 b'',
838 b'',
839 _(b'rebase the specified changeset and descendants'),
839 _(b'rebase the specified changeset and descendants'),
840 _(b'REV'),
840 _(b'REV'),
841 ),
841 ),
842 (
842 (
843 b'b',
843 b'b',
844 b'base',
844 b'base',
845 b'',
845 b'',
846 _(b'rebase everything from branching point of specified changeset'),
846 _(b'rebase everything from branching point of specified changeset'),
847 _(b'REV'),
847 _(b'REV'),
848 ),
848 ),
849 (b'r', b'rev', [], _(b'rebase these revisions'), _(b'REV')),
849 (b'r', b'rev', [], _(b'rebase these revisions'), _(b'REV')),
850 (
850 (
851 b'd',
851 b'd',
852 b'dest',
852 b'dest',
853 b'',
853 b'',
854 _(b'rebase onto the specified changeset'),
854 _(b'rebase onto the specified changeset'),
855 _(b'REV'),
855 _(b'REV'),
856 ),
856 ),
857 (b'', b'collapse', False, _(b'collapse the rebased changesets')),
857 (b'', b'collapse', False, _(b'collapse the rebased changesets')),
858 (
858 (
859 b'm',
859 b'm',
860 b'message',
860 b'message',
861 b'',
861 b'',
862 _(b'use text as collapse commit message'),
862 _(b'use text as collapse commit message'),
863 _(b'TEXT'),
863 _(b'TEXT'),
864 ),
864 ),
865 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
865 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
866 (
866 (
867 b'l',
867 b'l',
868 b'logfile',
868 b'logfile',
869 b'',
869 b'',
870 _(b'read collapse commit message from file'),
870 _(b'read collapse commit message from file'),
871 _(b'FILE'),
871 _(b'FILE'),
872 ),
872 ),
873 (b'k', b'keep', False, _(b'keep original changesets')),
873 (b'k', b'keep', False, _(b'keep original changesets')),
874 (b'', b'keepbranches', False, _(b'keep original branch names')),
874 (b'', b'keepbranches', False, _(b'keep original branch names')),
875 (b'D', b'detach', False, _(b'(DEPRECATED)')),
875 (b'D', b'detach', False, _(b'(DEPRECATED)')),
876 (b'i', b'interactive', False, _(b'(DEPRECATED)')),
876 (b'i', b'interactive', False, _(b'(DEPRECATED)')),
877 (b't', b'tool', b'', _(b'specify merge tool')),
877 (b't', b'tool', b'', _(b'specify merge tool')),
878 (b'', b'stop', False, _(b'stop interrupted rebase')),
878 (b'', b'stop', False, _(b'stop interrupted rebase')),
879 (b'c', b'continue', False, _(b'continue an interrupted rebase')),
879 (b'c', b'continue', False, _(b'continue an interrupted rebase')),
880 (b'a', b'abort', False, _(b'abort an interrupted rebase')),
880 (b'a', b'abort', False, _(b'abort an interrupted rebase')),
881 (
881 (
882 b'',
882 b'',
883 b'auto-orphans',
883 b'auto-orphans',
884 b'',
884 b'',
885 _(
885 _(
886 b'automatically rebase orphan revisions '
886 b'automatically rebase orphan revisions '
887 b'in the specified revset (EXPERIMENTAL)'
887 b'in the specified revset (EXPERIMENTAL)'
888 ),
888 ),
889 ),
889 ),
890 ]
890 ]
891 + cmdutil.dryrunopts
891 + cmdutil.dryrunopts
892 + cmdutil.formatteropts
892 + cmdutil.formatteropts
893 + cmdutil.confirmopts,
893 + cmdutil.confirmopts,
894 _(b'[-s REV | -b REV] [-d REV] [OPTION]'),
894 _(b'[-s REV | -b REV] [-d REV] [OPTION]'),
895 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
895 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
896 )
896 )
897 def rebase(ui, repo, **opts):
897 def rebase(ui, repo, **opts):
898 """move changeset (and descendants) to a different branch
898 """move changeset (and descendants) to a different branch
899
899
900 Rebase uses repeated merging to graft changesets from one part of
900 Rebase uses repeated merging to graft changesets from one part of
901 history (the source) onto another (the destination). This can be
901 history (the source) onto another (the destination). This can be
902 useful for linearizing *local* changes relative to a master
902 useful for linearizing *local* changes relative to a master
903 development tree.
903 development tree.
904
904
905 Published commits cannot be rebased (see :hg:`help phases`).
905 Published commits cannot be rebased (see :hg:`help phases`).
906 To copy commits, see :hg:`help graft`.
906 To copy commits, see :hg:`help graft`.
907
907
908 If you don't specify a destination changeset (``-d/--dest``), rebase
908 If you don't specify a destination changeset (``-d/--dest``), rebase
909 will use the same logic as :hg:`merge` to pick a destination. if
909 will use the same logic as :hg:`merge` to pick a destination. if
910 the current branch contains exactly one other head, the other head
910 the current branch contains exactly one other head, the other head
911 is merged with by default. Otherwise, an explicit revision with
911 is merged with by default. Otherwise, an explicit revision with
912 which to merge with must be provided. (destination changeset is not
912 which to merge with must be provided. (destination changeset is not
913 modified by rebasing, but new changesets are added as its
913 modified by rebasing, but new changesets are added as its
914 descendants.)
914 descendants.)
915
915
916 Here are the ways to select changesets:
916 Here are the ways to select changesets:
917
917
918 1. Explicitly select them using ``--rev``.
918 1. Explicitly select them using ``--rev``.
919
919
920 2. Use ``--source`` to select a root changeset and include all of its
920 2. Use ``--source`` to select a root changeset and include all of its
921 descendants.
921 descendants.
922
922
923 3. Use ``--base`` to select a changeset; rebase will find ancestors
923 3. Use ``--base`` to select a changeset; rebase will find ancestors
924 and their descendants which are not also ancestors of the destination.
924 and their descendants which are not also ancestors of the destination.
925
925
926 4. If you do not specify any of ``--rev``, ``--source``, or ``--base``,
926 4. If you do not specify any of ``--rev``, ``--source``, or ``--base``,
927 rebase will use ``--base .`` as above.
927 rebase will use ``--base .`` as above.
928
928
929 If ``--source`` or ``--rev`` is used, special names ``SRC`` and ``ALLSRC``
929 If ``--source`` or ``--rev`` is used, special names ``SRC`` and ``ALLSRC``
930 can be used in ``--dest``. Destination would be calculated per source
930 can be used in ``--dest``. Destination would be calculated per source
931 revision with ``SRC`` substituted by that single source revision and
931 revision with ``SRC`` substituted by that single source revision and
932 ``ALLSRC`` substituted by all source revisions.
932 ``ALLSRC`` substituted by all source revisions.
933
933
934 Rebase will destroy original changesets unless you use ``--keep``.
934 Rebase will destroy original changesets unless you use ``--keep``.
935 It will also move your bookmarks (even if you do).
935 It will also move your bookmarks (even if you do).
936
936
937 Some changesets may be dropped if they do not contribute changes
937 Some changesets may be dropped if they do not contribute changes
938 (e.g. merges from the destination branch).
938 (e.g. merges from the destination branch).
939
939
940 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
940 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
941 a named branch with two heads. You will need to explicitly specify source
941 a named branch with two heads. You will need to explicitly specify source
942 and/or destination.
942 and/or destination.
943
943
944 If you need to use a tool to automate merge/conflict decisions, you
944 If you need to use a tool to automate merge/conflict decisions, you
945 can specify one with ``--tool``, see :hg:`help merge-tools`.
945 can specify one with ``--tool``, see :hg:`help merge-tools`.
946 As a caveat: the tool will not be used to mediate when a file was
946 As a caveat: the tool will not be used to mediate when a file was
947 deleted, there is no hook presently available for this.
947 deleted, there is no hook presently available for this.
948
948
949 If a rebase is interrupted to manually resolve a conflict, it can be
949 If a rebase is interrupted to manually resolve a conflict, it can be
950 continued with --continue/-c, aborted with --abort/-a, or stopped with
950 continued with --continue/-c, aborted with --abort/-a, or stopped with
951 --stop.
951 --stop.
952
952
953 .. container:: verbose
953 .. container:: verbose
954
954
955 Examples:
955 Examples:
956
956
957 - move "local changes" (current commit back to branching point)
957 - move "local changes" (current commit back to branching point)
958 to the current branch tip after a pull::
958 to the current branch tip after a pull::
959
959
960 hg rebase
960 hg rebase
961
961
962 - move a single changeset to the stable branch::
962 - move a single changeset to the stable branch::
963
963
964 hg rebase -r 5f493448 -d stable
964 hg rebase -r 5f493448 -d stable
965
965
966 - splice a commit and all its descendants onto another part of history::
966 - splice a commit and all its descendants onto another part of history::
967
967
968 hg rebase --source c0c3 --dest 4cf9
968 hg rebase --source c0c3 --dest 4cf9
969
969
970 - rebase everything on a branch marked by a bookmark onto the
970 - rebase everything on a branch marked by a bookmark onto the
971 default branch::
971 default branch::
972
972
973 hg rebase --base myfeature --dest default
973 hg rebase --base myfeature --dest default
974
974
975 - collapse a sequence of changes into a single commit::
975 - collapse a sequence of changes into a single commit::
976
976
977 hg rebase --collapse -r 1520:1525 -d .
977 hg rebase --collapse -r 1520:1525 -d .
978
978
979 - move a named branch while preserving its name::
979 - move a named branch while preserving its name::
980
980
981 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
981 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
982
982
983 - stabilize orphaned changesets so history looks linear::
983 - stabilize orphaned changesets so history looks linear::
984
984
985 hg rebase -r 'orphan()-obsolete()'\
985 hg rebase -r 'orphan()-obsolete()'\
986 -d 'first(max((successors(max(roots(ALLSRC) & ::SRC)^)-obsolete())::) +\
986 -d 'first(max((successors(max(roots(ALLSRC) & ::SRC)^)-obsolete())::) +\
987 max(::((roots(ALLSRC) & ::SRC)^)-obsolete()))'
987 max(::((roots(ALLSRC) & ::SRC)^)-obsolete()))'
988
988
989 Configuration Options:
989 Configuration Options:
990
990
991 You can make rebase require a destination if you set the following config
991 You can make rebase require a destination if you set the following config
992 option::
992 option::
993
993
994 [commands]
994 [commands]
995 rebase.requiredest = True
995 rebase.requiredest = True
996
996
997 By default, rebase will close the transaction after each commit. For
997 By default, rebase will close the transaction after each commit. For
998 performance purposes, you can configure rebase to use a single transaction
998 performance purposes, you can configure rebase to use a single transaction
999 across the entire rebase. WARNING: This setting introduces a significant
999 across the entire rebase. WARNING: This setting introduces a significant
1000 risk of losing the work you've done in a rebase if the rebase aborts
1000 risk of losing the work you've done in a rebase if the rebase aborts
1001 unexpectedly::
1001 unexpectedly::
1002
1002
1003 [rebase]
1003 [rebase]
1004 singletransaction = True
1004 singletransaction = True
1005
1005
1006 By default, rebase writes to the working copy, but you can configure it to
1006 By default, rebase writes to the working copy, but you can configure it to
1007 run in-memory for better performance. When the rebase is not moving the
1007 run in-memory for better performance. When the rebase is not moving the
1008 parent(s) of the working copy (AKA the "currently checked out changesets"),
1008 parent(s) of the working copy (AKA the "currently checked out changesets"),
1009 this may also allow it to run even if the working copy is dirty::
1009 this may also allow it to run even if the working copy is dirty::
1010
1010
1011 [rebase]
1011 [rebase]
1012 experimental.inmemory = True
1012 experimental.inmemory = True
1013
1013
1014 Return Values:
1014 Return Values:
1015
1015
1016 Returns 0 on success, 1 if nothing to rebase or there are
1016 Returns 0 on success, 1 if nothing to rebase or there are
1017 unresolved conflicts.
1017 unresolved conflicts.
1018
1018
1019 """
1019 """
1020 opts = pycompat.byteskwargs(opts)
1020 opts = pycompat.byteskwargs(opts)
1021 inmemory = ui.configbool(b'rebase', b'experimental.inmemory')
1021 inmemory = ui.configbool(b'rebase', b'experimental.inmemory')
1022 dryrun = opts.get(b'dry_run')
1022 dryrun = opts.get(b'dry_run')
1023 confirm = opts.get(b'confirm')
1023 confirm = opts.get(b'confirm')
1024 action = cmdutil.check_at_most_one_arg(opts, b'abort', b'stop', b'continue')
1024 action = cmdutil.check_at_most_one_arg(opts, b'abort', b'stop', b'continue')
1025 if dryrun and action:
1025 if dryrun and action:
1026 raise error.Abort(_(b'cannot specify both --dry-run and --%s') % action)
1026 raise error.Abort(_(b'cannot specify both --dry-run and --%s') % action)
1027 if confirm and action:
1027 if confirm and action:
1028 raise error.Abort(_(b'cannot specify both --confirm and --%s') % action)
1028 raise error.Abort(_(b'cannot specify both --confirm and --%s') % action)
1029 if dryrun and confirm:
1029 cmdutil.check_at_most_one_arg(opts, b'confirm', b'dry_run')
1030 raise error.Abort(_(b'cannot specify both --confirm and --dry-run'))
1031
1030
1032 if action or repo.currenttransaction() is not None:
1031 if action or repo.currenttransaction() is not None:
1033 # in-memory rebase is not compatible with resuming rebases.
1032 # in-memory rebase is not compatible with resuming rebases.
1034 # (Or if it is run within a transaction, since the restart logic can
1033 # (Or if it is run within a transaction, since the restart logic can
1035 # fail the entire transaction.)
1034 # fail the entire transaction.)
1036 inmemory = False
1035 inmemory = False
1037
1036
1038 if opts.get(b'auto_orphans'):
1037 if opts.get(b'auto_orphans'):
1039 for key in opts:
1038 for key in opts:
1040 if key != b'auto_orphans' and opts.get(key):
1039 if key != b'auto_orphans' and opts.get(key):
1041 raise error.Abort(
1040 raise error.Abort(
1042 _(b'--auto-orphans is incompatible with %s') % (b'--' + key)
1041 _(b'--auto-orphans is incompatible with %s') % (b'--' + key)
1043 )
1042 )
1044 userrevs = list(repo.revs(opts.get(b'auto_orphans')))
1043 userrevs = list(repo.revs(opts.get(b'auto_orphans')))
1045 opts[b'rev'] = [revsetlang.formatspec(b'%ld and orphan()', userrevs)]
1044 opts[b'rev'] = [revsetlang.formatspec(b'%ld and orphan()', userrevs)]
1046 opts[b'dest'] = b'_destautoorphanrebase(SRC)'
1045 opts[b'dest'] = b'_destautoorphanrebase(SRC)'
1047
1046
1048 if dryrun or confirm:
1047 if dryrun or confirm:
1049 return _dryrunrebase(ui, repo, action, opts)
1048 return _dryrunrebase(ui, repo, action, opts)
1050 elif action == b'stop':
1049 elif action == b'stop':
1051 rbsrt = rebaseruntime(repo, ui)
1050 rbsrt = rebaseruntime(repo, ui)
1052 with repo.wlock(), repo.lock():
1051 with repo.wlock(), repo.lock():
1053 rbsrt.restorestatus()
1052 rbsrt.restorestatus()
1054 if rbsrt.collapsef:
1053 if rbsrt.collapsef:
1055 raise error.Abort(_(b"cannot stop in --collapse session"))
1054 raise error.Abort(_(b"cannot stop in --collapse session"))
1056 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1055 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1057 if not (rbsrt.keepf or allowunstable):
1056 if not (rbsrt.keepf or allowunstable):
1058 raise error.Abort(
1057 raise error.Abort(
1059 _(
1058 _(
1060 b"cannot remove original changesets with"
1059 b"cannot remove original changesets with"
1061 b" unrebased descendants"
1060 b" unrebased descendants"
1062 ),
1061 ),
1063 hint=_(
1062 hint=_(
1064 b'either enable obsmarkers to allow unstable '
1063 b'either enable obsmarkers to allow unstable '
1065 b'revisions or use --keep to keep original '
1064 b'revisions or use --keep to keep original '
1066 b'changesets'
1065 b'changesets'
1067 ),
1066 ),
1068 )
1067 )
1069 if needupdate(repo, rbsrt.state):
1068 if needupdate(repo, rbsrt.state):
1070 # update to the current working revision
1069 # update to the current working revision
1071 # to clear interrupted merge
1070 # to clear interrupted merge
1072 hg.updaterepo(repo, rbsrt.originalwd, overwrite=True)
1071 hg.updaterepo(repo, rbsrt.originalwd, overwrite=True)
1073 rbsrt._finishrebase()
1072 rbsrt._finishrebase()
1074 return 0
1073 return 0
1075 elif inmemory:
1074 elif inmemory:
1076 try:
1075 try:
1077 # in-memory merge doesn't support conflicts, so if we hit any, abort
1076 # in-memory merge doesn't support conflicts, so if we hit any, abort
1078 # and re-run as an on-disk merge.
1077 # and re-run as an on-disk merge.
1079 overrides = {(b'rebase', b'singletransaction'): True}
1078 overrides = {(b'rebase', b'singletransaction'): True}
1080 with ui.configoverride(overrides, b'rebase'):
1079 with ui.configoverride(overrides, b'rebase'):
1081 return _dorebase(ui, repo, action, opts, inmemory=inmemory)
1080 return _dorebase(ui, repo, action, opts, inmemory=inmemory)
1082 except error.InMemoryMergeConflictsError:
1081 except error.InMemoryMergeConflictsError:
1083 ui.warn(
1082 ui.warn(
1084 _(
1083 _(
1085 b'hit merge conflicts; re-running rebase without in-memory'
1084 b'hit merge conflicts; re-running rebase without in-memory'
1086 b' merge\n'
1085 b' merge\n'
1087 )
1086 )
1088 )
1087 )
1089 # TODO: Make in-memory merge not use the on-disk merge state, so
1088 # TODO: Make in-memory merge not use the on-disk merge state, so
1090 # we don't have to clean it here
1089 # we don't have to clean it here
1091 mergemod.mergestate.clean(repo)
1090 mergemod.mergestate.clean(repo)
1092 clearstatus(repo)
1091 clearstatus(repo)
1093 clearcollapsemsg(repo)
1092 clearcollapsemsg(repo)
1094 return _dorebase(ui, repo, action, opts, inmemory=False)
1093 return _dorebase(ui, repo, action, opts, inmemory=False)
1095 else:
1094 else:
1096 return _dorebase(ui, repo, action, opts)
1095 return _dorebase(ui, repo, action, opts)
1097
1096
1098
1097
1099 def _dryrunrebase(ui, repo, action, opts):
1098 def _dryrunrebase(ui, repo, action, opts):
1100 rbsrt = rebaseruntime(repo, ui, inmemory=True, opts=opts)
1099 rbsrt = rebaseruntime(repo, ui, inmemory=True, opts=opts)
1101 confirm = opts.get(b'confirm')
1100 confirm = opts.get(b'confirm')
1102 if confirm:
1101 if confirm:
1103 ui.status(_(b'starting in-memory rebase\n'))
1102 ui.status(_(b'starting in-memory rebase\n'))
1104 else:
1103 else:
1105 ui.status(
1104 ui.status(
1106 _(b'starting dry-run rebase; repository will not be changed\n')
1105 _(b'starting dry-run rebase; repository will not be changed\n')
1107 )
1106 )
1108 with repo.wlock(), repo.lock():
1107 with repo.wlock(), repo.lock():
1109 needsabort = True
1108 needsabort = True
1110 try:
1109 try:
1111 overrides = {(b'rebase', b'singletransaction'): True}
1110 overrides = {(b'rebase', b'singletransaction'): True}
1112 with ui.configoverride(overrides, b'rebase'):
1111 with ui.configoverride(overrides, b'rebase'):
1113 _origrebase(
1112 _origrebase(
1114 ui,
1113 ui,
1115 repo,
1114 repo,
1116 action,
1115 action,
1117 opts,
1116 opts,
1118 rbsrt,
1117 rbsrt,
1119 inmemory=True,
1118 inmemory=True,
1120 leaveunfinished=True,
1119 leaveunfinished=True,
1121 )
1120 )
1122 except error.InMemoryMergeConflictsError:
1121 except error.InMemoryMergeConflictsError:
1123 ui.status(_(b'hit a merge conflict\n'))
1122 ui.status(_(b'hit a merge conflict\n'))
1124 return 1
1123 return 1
1125 except error.Abort:
1124 except error.Abort:
1126 needsabort = False
1125 needsabort = False
1127 raise
1126 raise
1128 else:
1127 else:
1129 if confirm:
1128 if confirm:
1130 ui.status(_(b'rebase completed successfully\n'))
1129 ui.status(_(b'rebase completed successfully\n'))
1131 if not ui.promptchoice(_(b'apply changes (yn)?$$ &Yes $$ &No')):
1130 if not ui.promptchoice(_(b'apply changes (yn)?$$ &Yes $$ &No')):
1132 # finish unfinished rebase
1131 # finish unfinished rebase
1133 rbsrt._finishrebase()
1132 rbsrt._finishrebase()
1134 else:
1133 else:
1135 rbsrt._prepareabortorcontinue(
1134 rbsrt._prepareabortorcontinue(
1136 isabort=True, backup=False, suppwarns=True
1135 isabort=True, backup=False, suppwarns=True
1137 )
1136 )
1138 needsabort = False
1137 needsabort = False
1139 else:
1138 else:
1140 ui.status(
1139 ui.status(
1141 _(
1140 _(
1142 b'dry-run rebase completed successfully; run without'
1141 b'dry-run rebase completed successfully; run without'
1143 b' -n/--dry-run to perform this rebase\n'
1142 b' -n/--dry-run to perform this rebase\n'
1144 )
1143 )
1145 )
1144 )
1146 return 0
1145 return 0
1147 finally:
1146 finally:
1148 if needsabort:
1147 if needsabort:
1149 # no need to store backup in case of dryrun
1148 # no need to store backup in case of dryrun
1150 rbsrt._prepareabortorcontinue(
1149 rbsrt._prepareabortorcontinue(
1151 isabort=True, backup=False, suppwarns=True
1150 isabort=True, backup=False, suppwarns=True
1152 )
1151 )
1153
1152
1154
1153
1155 def _dorebase(ui, repo, action, opts, inmemory=False):
1154 def _dorebase(ui, repo, action, opts, inmemory=False):
1156 rbsrt = rebaseruntime(repo, ui, inmemory, opts)
1155 rbsrt = rebaseruntime(repo, ui, inmemory, opts)
1157 return _origrebase(ui, repo, action, opts, rbsrt, inmemory=inmemory)
1156 return _origrebase(ui, repo, action, opts, rbsrt, inmemory=inmemory)
1158
1157
1159
1158
1160 def _origrebase(
1159 def _origrebase(
1161 ui, repo, action, opts, rbsrt, inmemory=False, leaveunfinished=False
1160 ui, repo, action, opts, rbsrt, inmemory=False, leaveunfinished=False
1162 ):
1161 ):
1163 assert action != b'stop'
1162 assert action != b'stop'
1164 with repo.wlock(), repo.lock():
1163 with repo.wlock(), repo.lock():
1165 # Validate input and define rebasing points
1164 # Validate input and define rebasing points
1166 destf = opts.get(b'dest', None)
1165 destf = opts.get(b'dest', None)
1167 srcf = opts.get(b'source', None)
1166 srcf = opts.get(b'source', None)
1168 basef = opts.get(b'base', None)
1167 basef = opts.get(b'base', None)
1169 revf = opts.get(b'rev', [])
1168 revf = opts.get(b'rev', [])
1170 # search default destination in this space
1169 # search default destination in this space
1171 # used in the 'hg pull --rebase' case, see issue 5214.
1170 # used in the 'hg pull --rebase' case, see issue 5214.
1172 destspace = opts.get(b'_destspace')
1171 destspace = opts.get(b'_destspace')
1173 if opts.get(b'interactive'):
1172 if opts.get(b'interactive'):
1174 try:
1173 try:
1175 if extensions.find(b'histedit'):
1174 if extensions.find(b'histedit'):
1176 enablehistedit = b''
1175 enablehistedit = b''
1177 except KeyError:
1176 except KeyError:
1178 enablehistedit = b" --config extensions.histedit="
1177 enablehistedit = b" --config extensions.histedit="
1179 help = b"hg%s help -e histedit" % enablehistedit
1178 help = b"hg%s help -e histedit" % enablehistedit
1180 msg = (
1179 msg = (
1181 _(
1180 _(
1182 b"interactive history editing is supported by the "
1181 b"interactive history editing is supported by the "
1183 b"'histedit' extension (see \"%s\")"
1182 b"'histedit' extension (see \"%s\")"
1184 )
1183 )
1185 % help
1184 % help
1186 )
1185 )
1187 raise error.Abort(msg)
1186 raise error.Abort(msg)
1188
1187
1189 if rbsrt.collapsemsg and not rbsrt.collapsef:
1188 if rbsrt.collapsemsg and not rbsrt.collapsef:
1190 raise error.Abort(_(b'message can only be specified with collapse'))
1189 raise error.Abort(_(b'message can only be specified with collapse'))
1191
1190
1192 if action:
1191 if action:
1193 if rbsrt.collapsef:
1192 if rbsrt.collapsef:
1194 raise error.Abort(
1193 raise error.Abort(
1195 _(b'cannot use collapse with continue or abort')
1194 _(b'cannot use collapse with continue or abort')
1196 )
1195 )
1197 if srcf or basef or destf:
1196 if srcf or basef or destf:
1198 raise error.Abort(
1197 raise error.Abort(
1199 _(b'abort and continue do not allow specifying revisions')
1198 _(b'abort and continue do not allow specifying revisions')
1200 )
1199 )
1201 if action == b'abort' and opts.get(b'tool', False):
1200 if action == b'abort' and opts.get(b'tool', False):
1202 ui.warn(_(b'tool option will be ignored\n'))
1201 ui.warn(_(b'tool option will be ignored\n'))
1203 if action == b'continue':
1202 if action == b'continue':
1204 ms = mergemod.mergestate.read(repo)
1203 ms = mergemod.mergestate.read(repo)
1205 mergeutil.checkunresolved(ms)
1204 mergeutil.checkunresolved(ms)
1206
1205
1207 retcode = rbsrt._prepareabortorcontinue(
1206 retcode = rbsrt._prepareabortorcontinue(
1208 isabort=(action == b'abort')
1207 isabort=(action == b'abort')
1209 )
1208 )
1210 if retcode is not None:
1209 if retcode is not None:
1211 return retcode
1210 return retcode
1212 else:
1211 else:
1213 destmap = _definedestmap(
1212 destmap = _definedestmap(
1214 ui,
1213 ui,
1215 repo,
1214 repo,
1216 inmemory,
1215 inmemory,
1217 destf,
1216 destf,
1218 srcf,
1217 srcf,
1219 basef,
1218 basef,
1220 revf,
1219 revf,
1221 destspace=destspace,
1220 destspace=destspace,
1222 )
1221 )
1223 retcode = rbsrt._preparenewrebase(destmap)
1222 retcode = rbsrt._preparenewrebase(destmap)
1224 if retcode is not None:
1223 if retcode is not None:
1225 return retcode
1224 return retcode
1226 storecollapsemsg(repo, rbsrt.collapsemsg)
1225 storecollapsemsg(repo, rbsrt.collapsemsg)
1227
1226
1228 tr = None
1227 tr = None
1229
1228
1230 singletr = ui.configbool(b'rebase', b'singletransaction')
1229 singletr = ui.configbool(b'rebase', b'singletransaction')
1231 if singletr:
1230 if singletr:
1232 tr = repo.transaction(b'rebase')
1231 tr = repo.transaction(b'rebase')
1233
1232
1234 # If `rebase.singletransaction` is enabled, wrap the entire operation in
1233 # If `rebase.singletransaction` is enabled, wrap the entire operation in
1235 # one transaction here. Otherwise, transactions are obtained when
1234 # one transaction here. Otherwise, transactions are obtained when
1236 # committing each node, which is slower but allows partial success.
1235 # committing each node, which is slower but allows partial success.
1237 with util.acceptintervention(tr):
1236 with util.acceptintervention(tr):
1238 # Same logic for the dirstate guard, except we don't create one when
1237 # Same logic for the dirstate guard, except we don't create one when
1239 # rebasing in-memory (it's not needed).
1238 # rebasing in-memory (it's not needed).
1240 dsguard = None
1239 dsguard = None
1241 if singletr and not inmemory:
1240 if singletr and not inmemory:
1242 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1241 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1243 with util.acceptintervention(dsguard):
1242 with util.acceptintervention(dsguard):
1244 rbsrt._performrebase(tr)
1243 rbsrt._performrebase(tr)
1245 if not leaveunfinished:
1244 if not leaveunfinished:
1246 rbsrt._finishrebase()
1245 rbsrt._finishrebase()
1247
1246
1248
1247
1249 def _definedestmap(
1248 def _definedestmap(
1250 ui,
1249 ui,
1251 repo,
1250 repo,
1252 inmemory,
1251 inmemory,
1253 destf=None,
1252 destf=None,
1254 srcf=None,
1253 srcf=None,
1255 basef=None,
1254 basef=None,
1256 revf=None,
1255 revf=None,
1257 destspace=None,
1256 destspace=None,
1258 ):
1257 ):
1259 """use revisions argument to define destmap {srcrev: destrev}"""
1258 """use revisions argument to define destmap {srcrev: destrev}"""
1260 if revf is None:
1259 if revf is None:
1261 revf = []
1260 revf = []
1262
1261
1263 # destspace is here to work around issues with `hg pull --rebase` see
1262 # destspace is here to work around issues with `hg pull --rebase` see
1264 # issue5214 for details
1263 # issue5214 for details
1265 if srcf and basef:
1264 if srcf and basef:
1266 raise error.Abort(_(b'cannot specify both a source and a base'))
1265 raise error.Abort(_(b'cannot specify both a source and a base'))
1267 if revf and basef:
1266 if revf and basef:
1268 raise error.Abort(_(b'cannot specify both a revision and a base'))
1267 raise error.Abort(_(b'cannot specify both a revision and a base'))
1269 if revf and srcf:
1268 if revf and srcf:
1270 raise error.Abort(_(b'cannot specify both a revision and a source'))
1269 raise error.Abort(_(b'cannot specify both a revision and a source'))
1271
1270
1272 cmdutil.checkunfinished(repo)
1271 cmdutil.checkunfinished(repo)
1273 if not inmemory:
1272 if not inmemory:
1274 cmdutil.bailifchanged(repo)
1273 cmdutil.bailifchanged(repo)
1275
1274
1276 if ui.configbool(b'commands', b'rebase.requiredest') and not destf:
1275 if ui.configbool(b'commands', b'rebase.requiredest') and not destf:
1277 raise error.Abort(
1276 raise error.Abort(
1278 _(b'you must specify a destination'),
1277 _(b'you must specify a destination'),
1279 hint=_(b'use: hg rebase -d REV'),
1278 hint=_(b'use: hg rebase -d REV'),
1280 )
1279 )
1281
1280
1282 dest = None
1281 dest = None
1283
1282
1284 if revf:
1283 if revf:
1285 rebaseset = scmutil.revrange(repo, revf)
1284 rebaseset = scmutil.revrange(repo, revf)
1286 if not rebaseset:
1285 if not rebaseset:
1287 ui.status(_(b'empty "rev" revision set - nothing to rebase\n'))
1286 ui.status(_(b'empty "rev" revision set - nothing to rebase\n'))
1288 return None
1287 return None
1289 elif srcf:
1288 elif srcf:
1290 src = scmutil.revrange(repo, [srcf])
1289 src = scmutil.revrange(repo, [srcf])
1291 if not src:
1290 if not src:
1292 ui.status(_(b'empty "source" revision set - nothing to rebase\n'))
1291 ui.status(_(b'empty "source" revision set - nothing to rebase\n'))
1293 return None
1292 return None
1294 rebaseset = repo.revs(b'(%ld)::', src)
1293 rebaseset = repo.revs(b'(%ld)::', src)
1295 assert rebaseset
1294 assert rebaseset
1296 else:
1295 else:
1297 base = scmutil.revrange(repo, [basef or b'.'])
1296 base = scmutil.revrange(repo, [basef or b'.'])
1298 if not base:
1297 if not base:
1299 ui.status(
1298 ui.status(
1300 _(b'empty "base" revision set - ' b"can't compute rebase set\n")
1299 _(b'empty "base" revision set - ' b"can't compute rebase set\n")
1301 )
1300 )
1302 return None
1301 return None
1303 if destf:
1302 if destf:
1304 # --base does not support multiple destinations
1303 # --base does not support multiple destinations
1305 dest = scmutil.revsingle(repo, destf)
1304 dest = scmutil.revsingle(repo, destf)
1306 else:
1305 else:
1307 dest = repo[_destrebase(repo, base, destspace=destspace)]
1306 dest = repo[_destrebase(repo, base, destspace=destspace)]
1308 destf = bytes(dest)
1307 destf = bytes(dest)
1309
1308
1310 roots = [] # selected children of branching points
1309 roots = [] # selected children of branching points
1311 bpbase = {} # {branchingpoint: [origbase]}
1310 bpbase = {} # {branchingpoint: [origbase]}
1312 for b in base: # group bases by branching points
1311 for b in base: # group bases by branching points
1313 bp = repo.revs(b'ancestor(%d, %d)', b, dest.rev()).first()
1312 bp = repo.revs(b'ancestor(%d, %d)', b, dest.rev()).first()
1314 bpbase[bp] = bpbase.get(bp, []) + [b]
1313 bpbase[bp] = bpbase.get(bp, []) + [b]
1315 if None in bpbase:
1314 if None in bpbase:
1316 # emulate the old behavior, showing "nothing to rebase" (a better
1315 # emulate the old behavior, showing "nothing to rebase" (a better
1317 # behavior may be abort with "cannot find branching point" error)
1316 # behavior may be abort with "cannot find branching point" error)
1318 bpbase.clear()
1317 bpbase.clear()
1319 for bp, bs in pycompat.iteritems(bpbase): # calculate roots
1318 for bp, bs in pycompat.iteritems(bpbase): # calculate roots
1320 roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs))
1319 roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs))
1321
1320
1322 rebaseset = repo.revs(b'%ld::', roots)
1321 rebaseset = repo.revs(b'%ld::', roots)
1323
1322
1324 if not rebaseset:
1323 if not rebaseset:
1325 # transform to list because smartsets are not comparable to
1324 # transform to list because smartsets are not comparable to
1326 # lists. This should be improved to honor laziness of
1325 # lists. This should be improved to honor laziness of
1327 # smartset.
1326 # smartset.
1328 if list(base) == [dest.rev()]:
1327 if list(base) == [dest.rev()]:
1329 if basef:
1328 if basef:
1330 ui.status(
1329 ui.status(
1331 _(
1330 _(
1332 b'nothing to rebase - %s is both "base"'
1331 b'nothing to rebase - %s is both "base"'
1333 b' and destination\n'
1332 b' and destination\n'
1334 )
1333 )
1335 % dest
1334 % dest
1336 )
1335 )
1337 else:
1336 else:
1338 ui.status(
1337 ui.status(
1339 _(
1338 _(
1340 b'nothing to rebase - working directory '
1339 b'nothing to rebase - working directory '
1341 b'parent is also destination\n'
1340 b'parent is also destination\n'
1342 )
1341 )
1343 )
1342 )
1344 elif not repo.revs(b'%ld - ::%d', base, dest.rev()):
1343 elif not repo.revs(b'%ld - ::%d', base, dest.rev()):
1345 if basef:
1344 if basef:
1346 ui.status(
1345 ui.status(
1347 _(
1346 _(
1348 b'nothing to rebase - "base" %s is '
1347 b'nothing to rebase - "base" %s is '
1349 b'already an ancestor of destination '
1348 b'already an ancestor of destination '
1350 b'%s\n'
1349 b'%s\n'
1351 )
1350 )
1352 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1351 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1353 )
1352 )
1354 else:
1353 else:
1355 ui.status(
1354 ui.status(
1356 _(
1355 _(
1357 b'nothing to rebase - working '
1356 b'nothing to rebase - working '
1358 b'directory parent is already an '
1357 b'directory parent is already an '
1359 b'ancestor of destination %s\n'
1358 b'ancestor of destination %s\n'
1360 )
1359 )
1361 % dest
1360 % dest
1362 )
1361 )
1363 else: # can it happen?
1362 else: # can it happen?
1364 ui.status(
1363 ui.status(
1365 _(b'nothing to rebase from %s to %s\n')
1364 _(b'nothing to rebase from %s to %s\n')
1366 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1365 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1367 )
1366 )
1368 return None
1367 return None
1369
1368
1370 rebasingwcp = repo[b'.'].rev() in rebaseset
1369 rebasingwcp = repo[b'.'].rev() in rebaseset
1371 ui.log(
1370 ui.log(
1372 b"rebase",
1371 b"rebase",
1373 b"rebasing working copy parent: %r\n",
1372 b"rebasing working copy parent: %r\n",
1374 rebasingwcp,
1373 rebasingwcp,
1375 rebase_rebasing_wcp=rebasingwcp,
1374 rebase_rebasing_wcp=rebasingwcp,
1376 )
1375 )
1377 if inmemory and rebasingwcp:
1376 if inmemory and rebasingwcp:
1378 # Check these since we did not before.
1377 # Check these since we did not before.
1379 cmdutil.checkunfinished(repo)
1378 cmdutil.checkunfinished(repo)
1380 cmdutil.bailifchanged(repo)
1379 cmdutil.bailifchanged(repo)
1381
1380
1382 if not destf:
1381 if not destf:
1383 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
1382 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
1384 destf = bytes(dest)
1383 destf = bytes(dest)
1385
1384
1386 allsrc = revsetlang.formatspec(b'%ld', rebaseset)
1385 allsrc = revsetlang.formatspec(b'%ld', rebaseset)
1387 alias = {b'ALLSRC': allsrc}
1386 alias = {b'ALLSRC': allsrc}
1388
1387
1389 if dest is None:
1388 if dest is None:
1390 try:
1389 try:
1391 # fast path: try to resolve dest without SRC alias
1390 # fast path: try to resolve dest without SRC alias
1392 dest = scmutil.revsingle(repo, destf, localalias=alias)
1391 dest = scmutil.revsingle(repo, destf, localalias=alias)
1393 except error.RepoLookupError:
1392 except error.RepoLookupError:
1394 # multi-dest path: resolve dest for each SRC separately
1393 # multi-dest path: resolve dest for each SRC separately
1395 destmap = {}
1394 destmap = {}
1396 for r in rebaseset:
1395 for r in rebaseset:
1397 alias[b'SRC'] = revsetlang.formatspec(b'%d', r)
1396 alias[b'SRC'] = revsetlang.formatspec(b'%d', r)
1398 # use repo.anyrevs instead of scmutil.revsingle because we
1397 # use repo.anyrevs instead of scmutil.revsingle because we
1399 # don't want to abort if destset is empty.
1398 # don't want to abort if destset is empty.
1400 destset = repo.anyrevs([destf], user=True, localalias=alias)
1399 destset = repo.anyrevs([destf], user=True, localalias=alias)
1401 size = len(destset)
1400 size = len(destset)
1402 if size == 1:
1401 if size == 1:
1403 destmap[r] = destset.first()
1402 destmap[r] = destset.first()
1404 elif size == 0:
1403 elif size == 0:
1405 ui.note(_(b'skipping %s - empty destination\n') % repo[r])
1404 ui.note(_(b'skipping %s - empty destination\n') % repo[r])
1406 else:
1405 else:
1407 raise error.Abort(
1406 raise error.Abort(
1408 _(b'rebase destination for %s is not unique') % repo[r]
1407 _(b'rebase destination for %s is not unique') % repo[r]
1409 )
1408 )
1410
1409
1411 if dest is not None:
1410 if dest is not None:
1412 # single-dest case: assign dest to each rev in rebaseset
1411 # single-dest case: assign dest to each rev in rebaseset
1413 destrev = dest.rev()
1412 destrev = dest.rev()
1414 destmap = {r: destrev for r in rebaseset} # {srcrev: destrev}
1413 destmap = {r: destrev for r in rebaseset} # {srcrev: destrev}
1415
1414
1416 if not destmap:
1415 if not destmap:
1417 ui.status(_(b'nothing to rebase - empty destination\n'))
1416 ui.status(_(b'nothing to rebase - empty destination\n'))
1418 return None
1417 return None
1419
1418
1420 return destmap
1419 return destmap
1421
1420
1422
1421
1423 def externalparent(repo, state, destancestors):
1422 def externalparent(repo, state, destancestors):
1424 """Return the revision that should be used as the second parent
1423 """Return the revision that should be used as the second parent
1425 when the revisions in state is collapsed on top of destancestors.
1424 when the revisions in state is collapsed on top of destancestors.
1426 Abort if there is more than one parent.
1425 Abort if there is more than one parent.
1427 """
1426 """
1428 parents = set()
1427 parents = set()
1429 source = min(state)
1428 source = min(state)
1430 for rev in state:
1429 for rev in state:
1431 if rev == source:
1430 if rev == source:
1432 continue
1431 continue
1433 for p in repo[rev].parents():
1432 for p in repo[rev].parents():
1434 if p.rev() not in state and p.rev() not in destancestors:
1433 if p.rev() not in state and p.rev() not in destancestors:
1435 parents.add(p.rev())
1434 parents.add(p.rev())
1436 if not parents:
1435 if not parents:
1437 return nullrev
1436 return nullrev
1438 if len(parents) == 1:
1437 if len(parents) == 1:
1439 return parents.pop()
1438 return parents.pop()
1440 raise error.Abort(
1439 raise error.Abort(
1441 _(
1440 _(
1442 b'unable to collapse on top of %d, there is more '
1441 b'unable to collapse on top of %d, there is more '
1443 b'than one external parent: %s'
1442 b'than one external parent: %s'
1444 )
1443 )
1445 % (max(destancestors), b', '.join(b"%d" % p for p in sorted(parents)))
1444 % (max(destancestors), b', '.join(b"%d" % p for p in sorted(parents)))
1446 )
1445 )
1447
1446
1448
1447
1449 def commitmemorynode(repo, p1, p2, wctx, editor, extra, user, date, commitmsg):
1448 def commitmemorynode(repo, p1, p2, wctx, editor, extra, user, date, commitmsg):
1450 '''Commit the memory changes with parents p1 and p2.
1449 '''Commit the memory changes with parents p1 and p2.
1451 Return node of committed revision.'''
1450 Return node of committed revision.'''
1452 # Replicates the empty check in ``repo.commit``.
1451 # Replicates the empty check in ``repo.commit``.
1453 if wctx.isempty() and not repo.ui.configbool(b'ui', b'allowemptycommit'):
1452 if wctx.isempty() and not repo.ui.configbool(b'ui', b'allowemptycommit'):
1454 return None
1453 return None
1455
1454
1456 # By convention, ``extra['branch']`` (set by extrafn) clobbers
1455 # By convention, ``extra['branch']`` (set by extrafn) clobbers
1457 # ``branch`` (used when passing ``--keepbranches``).
1456 # ``branch`` (used when passing ``--keepbranches``).
1458 branch = repo[p1].branch()
1457 branch = repo[p1].branch()
1459 if b'branch' in extra:
1458 if b'branch' in extra:
1460 branch = extra[b'branch']
1459 branch = extra[b'branch']
1461
1460
1462 memctx = wctx.tomemctx(
1461 memctx = wctx.tomemctx(
1463 commitmsg,
1462 commitmsg,
1464 parents=(p1, p2),
1463 parents=(p1, p2),
1465 date=date,
1464 date=date,
1466 extra=extra,
1465 extra=extra,
1467 user=user,
1466 user=user,
1468 branch=branch,
1467 branch=branch,
1469 editor=editor,
1468 editor=editor,
1470 )
1469 )
1471 commitres = repo.commitctx(memctx)
1470 commitres = repo.commitctx(memctx)
1472 wctx.clean() # Might be reused
1471 wctx.clean() # Might be reused
1473 return commitres
1472 return commitres
1474
1473
1475
1474
1476 def commitnode(repo, p1, p2, editor, extra, user, date, commitmsg):
1475 def commitnode(repo, p1, p2, editor, extra, user, date, commitmsg):
1477 '''Commit the wd changes with parents p1 and p2.
1476 '''Commit the wd changes with parents p1 and p2.
1478 Return node of committed revision.'''
1477 Return node of committed revision.'''
1479 dsguard = util.nullcontextmanager()
1478 dsguard = util.nullcontextmanager()
1480 if not repo.ui.configbool(b'rebase', b'singletransaction'):
1479 if not repo.ui.configbool(b'rebase', b'singletransaction'):
1481 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1480 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1482 with dsguard:
1481 with dsguard:
1483 repo.setparents(repo[p1].node(), repo[p2].node())
1482 repo.setparents(repo[p1].node(), repo[p2].node())
1484
1483
1485 # Commit might fail if unresolved files exist
1484 # Commit might fail if unresolved files exist
1486 newnode = repo.commit(
1485 newnode = repo.commit(
1487 text=commitmsg, user=user, date=date, extra=extra, editor=editor
1486 text=commitmsg, user=user, date=date, extra=extra, editor=editor
1488 )
1487 )
1489
1488
1490 repo.dirstate.setbranch(repo[newnode].branch())
1489 repo.dirstate.setbranch(repo[newnode].branch())
1491 return newnode
1490 return newnode
1492
1491
1493
1492
1494 def rebasenode(repo, rev, p1, base, collapse, dest, wctx):
1493 def rebasenode(repo, rev, p1, base, collapse, dest, wctx):
1495 """Rebase a single revision rev on top of p1 using base as merge ancestor"""
1494 """Rebase a single revision rev on top of p1 using base as merge ancestor"""
1496 # Merge phase
1495 # Merge phase
1497 # Update to destination and merge it with local
1496 # Update to destination and merge it with local
1498 if wctx.isinmemory():
1497 if wctx.isinmemory():
1499 wctx.setbase(repo[p1])
1498 wctx.setbase(repo[p1])
1500 else:
1499 else:
1501 if repo[b'.'].rev() != p1:
1500 if repo[b'.'].rev() != p1:
1502 repo.ui.debug(b" update to %d:%s\n" % (p1, repo[p1]))
1501 repo.ui.debug(b" update to %d:%s\n" % (p1, repo[p1]))
1503 mergemod.update(repo, p1, branchmerge=False, force=True)
1502 mergemod.update(repo, p1, branchmerge=False, force=True)
1504 else:
1503 else:
1505 repo.ui.debug(b" already in destination\n")
1504 repo.ui.debug(b" already in destination\n")
1506 # This is, alas, necessary to invalidate workingctx's manifest cache,
1505 # This is, alas, necessary to invalidate workingctx's manifest cache,
1507 # as well as other data we litter on it in other places.
1506 # as well as other data we litter on it in other places.
1508 wctx = repo[None]
1507 wctx = repo[None]
1509 repo.dirstate.write(repo.currenttransaction())
1508 repo.dirstate.write(repo.currenttransaction())
1510 repo.ui.debug(b" merge against %d:%s\n" % (rev, repo[rev]))
1509 repo.ui.debug(b" merge against %d:%s\n" % (rev, repo[rev]))
1511 if base is not None:
1510 if base is not None:
1512 repo.ui.debug(b" detach base %d:%s\n" % (base, repo[base]))
1511 repo.ui.debug(b" detach base %d:%s\n" % (base, repo[base]))
1513 # When collapsing in-place, the parent is the common ancestor, we
1512 # When collapsing in-place, the parent is the common ancestor, we
1514 # have to allow merging with it.
1513 # have to allow merging with it.
1515 stats = mergemod.update(
1514 stats = mergemod.update(
1516 repo,
1515 repo,
1517 rev,
1516 rev,
1518 branchmerge=True,
1517 branchmerge=True,
1519 force=True,
1518 force=True,
1520 ancestor=base,
1519 ancestor=base,
1521 mergeancestor=collapse,
1520 mergeancestor=collapse,
1522 labels=[b'dest', b'source'],
1521 labels=[b'dest', b'source'],
1523 wc=wctx,
1522 wc=wctx,
1524 )
1523 )
1525 if collapse:
1524 if collapse:
1526 copies.duplicatecopies(repo, wctx, rev, dest)
1525 copies.duplicatecopies(repo, wctx, rev, dest)
1527 else:
1526 else:
1528 # If we're not using --collapse, we need to
1527 # If we're not using --collapse, we need to
1529 # duplicate copies between the revision we're
1528 # duplicate copies between the revision we're
1530 # rebasing and its first parent, but *not*
1529 # rebasing and its first parent, but *not*
1531 # duplicate any copies that have already been
1530 # duplicate any copies that have already been
1532 # performed in the destination.
1531 # performed in the destination.
1533 p1rev = repo[rev].p1().rev()
1532 p1rev = repo[rev].p1().rev()
1534 copies.duplicatecopies(repo, wctx, rev, p1rev, skiprev=dest)
1533 copies.duplicatecopies(repo, wctx, rev, p1rev, skiprev=dest)
1535 return stats
1534 return stats
1536
1535
1537
1536
1538 def adjustdest(repo, rev, destmap, state, skipped):
1537 def adjustdest(repo, rev, destmap, state, skipped):
1539 r"""adjust rebase destination given the current rebase state
1538 r"""adjust rebase destination given the current rebase state
1540
1539
1541 rev is what is being rebased. Return a list of two revs, which are the
1540 rev is what is being rebased. Return a list of two revs, which are the
1542 adjusted destinations for rev's p1 and p2, respectively. If a parent is
1541 adjusted destinations for rev's p1 and p2, respectively. If a parent is
1543 nullrev, return dest without adjustment for it.
1542 nullrev, return dest without adjustment for it.
1544
1543
1545 For example, when doing rebasing B+E to F, C to G, rebase will first move B
1544 For example, when doing rebasing B+E to F, C to G, rebase will first move B
1546 to B1, and E's destination will be adjusted from F to B1.
1545 to B1, and E's destination will be adjusted from F to B1.
1547
1546
1548 B1 <- written during rebasing B
1547 B1 <- written during rebasing B
1549 |
1548 |
1550 F <- original destination of B, E
1549 F <- original destination of B, E
1551 |
1550 |
1552 | E <- rev, which is being rebased
1551 | E <- rev, which is being rebased
1553 | |
1552 | |
1554 | D <- prev, one parent of rev being checked
1553 | D <- prev, one parent of rev being checked
1555 | |
1554 | |
1556 | x <- skipped, ex. no successor or successor in (::dest)
1555 | x <- skipped, ex. no successor or successor in (::dest)
1557 | |
1556 | |
1558 | C <- rebased as C', different destination
1557 | C <- rebased as C', different destination
1559 | |
1558 | |
1560 | B <- rebased as B1 C'
1559 | B <- rebased as B1 C'
1561 |/ |
1560 |/ |
1562 A G <- destination of C, different
1561 A G <- destination of C, different
1563
1562
1564 Another example about merge changeset, rebase -r C+G+H -d K, rebase will
1563 Another example about merge changeset, rebase -r C+G+H -d K, rebase will
1565 first move C to C1, G to G1, and when it's checking H, the adjusted
1564 first move C to C1, G to G1, and when it's checking H, the adjusted
1566 destinations will be [C1, G1].
1565 destinations will be [C1, G1].
1567
1566
1568 H C1 G1
1567 H C1 G1
1569 /| | /
1568 /| | /
1570 F G |/
1569 F G |/
1571 K | | -> K
1570 K | | -> K
1572 | C D |
1571 | C D |
1573 | |/ |
1572 | |/ |
1574 | B | ...
1573 | B | ...
1575 |/ |/
1574 |/ |/
1576 A A
1575 A A
1577
1576
1578 Besides, adjust dest according to existing rebase information. For example,
1577 Besides, adjust dest according to existing rebase information. For example,
1579
1578
1580 B C D B needs to be rebased on top of C, C needs to be rebased on top
1579 B C D B needs to be rebased on top of C, C needs to be rebased on top
1581 \|/ of D. We will rebase C first.
1580 \|/ of D. We will rebase C first.
1582 A
1581 A
1583
1582
1584 C' After rebasing C, when considering B's destination, use C'
1583 C' After rebasing C, when considering B's destination, use C'
1585 | instead of the original C.
1584 | instead of the original C.
1586 B D
1585 B D
1587 \ /
1586 \ /
1588 A
1587 A
1589 """
1588 """
1590 # pick already rebased revs with same dest from state as interesting source
1589 # pick already rebased revs with same dest from state as interesting source
1591 dest = destmap[rev]
1590 dest = destmap[rev]
1592 source = [
1591 source = [
1593 s
1592 s
1594 for s, d in state.items()
1593 for s, d in state.items()
1595 if d > 0 and destmap[s] == dest and s not in skipped
1594 if d > 0 and destmap[s] == dest and s not in skipped
1596 ]
1595 ]
1597
1596
1598 result = []
1597 result = []
1599 for prev in repo.changelog.parentrevs(rev):
1598 for prev in repo.changelog.parentrevs(rev):
1600 adjusted = dest
1599 adjusted = dest
1601 if prev != nullrev:
1600 if prev != nullrev:
1602 candidate = repo.revs(b'max(%ld and (::%d))', source, prev).first()
1601 candidate = repo.revs(b'max(%ld and (::%d))', source, prev).first()
1603 if candidate is not None:
1602 if candidate is not None:
1604 adjusted = state[candidate]
1603 adjusted = state[candidate]
1605 if adjusted == dest and dest in state:
1604 if adjusted == dest and dest in state:
1606 adjusted = state[dest]
1605 adjusted = state[dest]
1607 if adjusted == revtodo:
1606 if adjusted == revtodo:
1608 # sortsource should produce an order that makes this impossible
1607 # sortsource should produce an order that makes this impossible
1609 raise error.ProgrammingError(
1608 raise error.ProgrammingError(
1610 b'rev %d should be rebased already at this time' % dest
1609 b'rev %d should be rebased already at this time' % dest
1611 )
1610 )
1612 result.append(adjusted)
1611 result.append(adjusted)
1613 return result
1612 return result
1614
1613
1615
1614
1616 def _checkobsrebase(repo, ui, rebaseobsrevs, rebaseobsskipped):
1615 def _checkobsrebase(repo, ui, rebaseobsrevs, rebaseobsskipped):
1617 """
1616 """
1618 Abort if rebase will create divergence or rebase is noop because of markers
1617 Abort if rebase will create divergence or rebase is noop because of markers
1619
1618
1620 `rebaseobsrevs`: set of obsolete revision in source
1619 `rebaseobsrevs`: set of obsolete revision in source
1621 `rebaseobsskipped`: set of revisions from source skipped because they have
1620 `rebaseobsskipped`: set of revisions from source skipped because they have
1622 successors in destination or no non-obsolete successor.
1621 successors in destination or no non-obsolete successor.
1623 """
1622 """
1624 # Obsolete node with successors not in dest leads to divergence
1623 # Obsolete node with successors not in dest leads to divergence
1625 divergenceok = ui.configbool(b'experimental', b'evolution.allowdivergence')
1624 divergenceok = ui.configbool(b'experimental', b'evolution.allowdivergence')
1626 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
1625 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
1627
1626
1628 if divergencebasecandidates and not divergenceok:
1627 if divergencebasecandidates and not divergenceok:
1629 divhashes = (bytes(repo[r]) for r in divergencebasecandidates)
1628 divhashes = (bytes(repo[r]) for r in divergencebasecandidates)
1630 msg = _(b"this rebase will cause divergences from: %s")
1629 msg = _(b"this rebase will cause divergences from: %s")
1631 h = _(
1630 h = _(
1632 b"to force the rebase please set "
1631 b"to force the rebase please set "
1633 b"experimental.evolution.allowdivergence=True"
1632 b"experimental.evolution.allowdivergence=True"
1634 )
1633 )
1635 raise error.Abort(msg % (b",".join(divhashes),), hint=h)
1634 raise error.Abort(msg % (b",".join(divhashes),), hint=h)
1636
1635
1637
1636
1638 def successorrevs(unfi, rev):
1637 def successorrevs(unfi, rev):
1639 """yield revision numbers for successors of rev"""
1638 """yield revision numbers for successors of rev"""
1640 assert unfi.filtername is None
1639 assert unfi.filtername is None
1641 get_rev = unfi.changelog.index.get_rev
1640 get_rev = unfi.changelog.index.get_rev
1642 for s in obsutil.allsuccessors(unfi.obsstore, [unfi[rev].node()]):
1641 for s in obsutil.allsuccessors(unfi.obsstore, [unfi[rev].node()]):
1643 r = get_rev(s)
1642 r = get_rev(s)
1644 if r is not None:
1643 if r is not None:
1645 yield r
1644 yield r
1646
1645
1647
1646
1648 def defineparents(repo, rev, destmap, state, skipped, obsskipped):
1647 def defineparents(repo, rev, destmap, state, skipped, obsskipped):
1649 """Return new parents and optionally a merge base for rev being rebased
1648 """Return new parents and optionally a merge base for rev being rebased
1650
1649
1651 The destination specified by "dest" cannot always be used directly because
1650 The destination specified by "dest" cannot always be used directly because
1652 previously rebase result could affect destination. For example,
1651 previously rebase result could affect destination. For example,
1653
1652
1654 D E rebase -r C+D+E -d B
1653 D E rebase -r C+D+E -d B
1655 |/ C will be rebased to C'
1654 |/ C will be rebased to C'
1656 B C D's new destination will be C' instead of B
1655 B C D's new destination will be C' instead of B
1657 |/ E's new destination will be C' instead of B
1656 |/ E's new destination will be C' instead of B
1658 A
1657 A
1659
1658
1660 The new parents of a merge is slightly more complicated. See the comment
1659 The new parents of a merge is slightly more complicated. See the comment
1661 block below.
1660 block below.
1662 """
1661 """
1663 # use unfiltered changelog since successorrevs may return filtered nodes
1662 # use unfiltered changelog since successorrevs may return filtered nodes
1664 assert repo.filtername is None
1663 assert repo.filtername is None
1665 cl = repo.changelog
1664 cl = repo.changelog
1666 isancestor = cl.isancestorrev
1665 isancestor = cl.isancestorrev
1667
1666
1668 dest = destmap[rev]
1667 dest = destmap[rev]
1669 oldps = repo.changelog.parentrevs(rev) # old parents
1668 oldps = repo.changelog.parentrevs(rev) # old parents
1670 newps = [nullrev, nullrev] # new parents
1669 newps = [nullrev, nullrev] # new parents
1671 dests = adjustdest(repo, rev, destmap, state, skipped)
1670 dests = adjustdest(repo, rev, destmap, state, skipped)
1672 bases = list(oldps) # merge base candidates, initially just old parents
1671 bases = list(oldps) # merge base candidates, initially just old parents
1673
1672
1674 if all(r == nullrev for r in oldps[1:]):
1673 if all(r == nullrev for r in oldps[1:]):
1675 # For non-merge changeset, just move p to adjusted dest as requested.
1674 # For non-merge changeset, just move p to adjusted dest as requested.
1676 newps[0] = dests[0]
1675 newps[0] = dests[0]
1677 else:
1676 else:
1678 # For merge changeset, if we move p to dests[i] unconditionally, both
1677 # For merge changeset, if we move p to dests[i] unconditionally, both
1679 # parents may change and the end result looks like "the merge loses a
1678 # parents may change and the end result looks like "the merge loses a
1680 # parent", which is a surprise. This is a limit because "--dest" only
1679 # parent", which is a surprise. This is a limit because "--dest" only
1681 # accepts one dest per src.
1680 # accepts one dest per src.
1682 #
1681 #
1683 # Therefore, only move p with reasonable conditions (in this order):
1682 # Therefore, only move p with reasonable conditions (in this order):
1684 # 1. use dest, if dest is a descendent of (p or one of p's successors)
1683 # 1. use dest, if dest is a descendent of (p or one of p's successors)
1685 # 2. use p's rebased result, if p is rebased (state[p] > 0)
1684 # 2. use p's rebased result, if p is rebased (state[p] > 0)
1686 #
1685 #
1687 # Comparing with adjustdest, the logic here does some additional work:
1686 # Comparing with adjustdest, the logic here does some additional work:
1688 # 1. decide which parents will not be moved towards dest
1687 # 1. decide which parents will not be moved towards dest
1689 # 2. if the above decision is "no", should a parent still be moved
1688 # 2. if the above decision is "no", should a parent still be moved
1690 # because it was rebased?
1689 # because it was rebased?
1691 #
1690 #
1692 # For example:
1691 # For example:
1693 #
1692 #
1694 # C # "rebase -r C -d D" is an error since none of the parents
1693 # C # "rebase -r C -d D" is an error since none of the parents
1695 # /| # can be moved. "rebase -r B+C -d D" will move C's parent
1694 # /| # can be moved. "rebase -r B+C -d D" will move C's parent
1696 # A B D # B (using rule "2."), since B will be rebased.
1695 # A B D # B (using rule "2."), since B will be rebased.
1697 #
1696 #
1698 # The loop tries to be not rely on the fact that a Mercurial node has
1697 # The loop tries to be not rely on the fact that a Mercurial node has
1699 # at most 2 parents.
1698 # at most 2 parents.
1700 for i, p in enumerate(oldps):
1699 for i, p in enumerate(oldps):
1701 np = p # new parent
1700 np = p # new parent
1702 if any(isancestor(x, dests[i]) for x in successorrevs(repo, p)):
1701 if any(isancestor(x, dests[i]) for x in successorrevs(repo, p)):
1703 np = dests[i]
1702 np = dests[i]
1704 elif p in state and state[p] > 0:
1703 elif p in state and state[p] > 0:
1705 np = state[p]
1704 np = state[p]
1706
1705
1707 # "bases" only record "special" merge bases that cannot be
1706 # "bases" only record "special" merge bases that cannot be
1708 # calculated from changelog DAG (i.e. isancestor(p, np) is False).
1707 # calculated from changelog DAG (i.e. isancestor(p, np) is False).
1709 # For example:
1708 # For example:
1710 #
1709 #
1711 # B' # rebase -s B -d D, when B was rebased to B'. dest for C
1710 # B' # rebase -s B -d D, when B was rebased to B'. dest for C
1712 # | C # is B', but merge base for C is B, instead of
1711 # | C # is B', but merge base for C is B, instead of
1713 # D | # changelog.ancestor(C, B') == A. If changelog DAG and
1712 # D | # changelog.ancestor(C, B') == A. If changelog DAG and
1714 # | B # "state" edges are merged (so there will be an edge from
1713 # | B # "state" edges are merged (so there will be an edge from
1715 # |/ # B to B'), the merge base is still ancestor(C, B') in
1714 # |/ # B to B'), the merge base is still ancestor(C, B') in
1716 # A # the merged graph.
1715 # A # the merged graph.
1717 #
1716 #
1718 # Also see https://bz.mercurial-scm.org/show_bug.cgi?id=1950#c8
1717 # Also see https://bz.mercurial-scm.org/show_bug.cgi?id=1950#c8
1719 # which uses "virtual null merge" to explain this situation.
1718 # which uses "virtual null merge" to explain this situation.
1720 if isancestor(p, np):
1719 if isancestor(p, np):
1721 bases[i] = nullrev
1720 bases[i] = nullrev
1722
1721
1723 # If one parent becomes an ancestor of the other, drop the ancestor
1722 # If one parent becomes an ancestor of the other, drop the ancestor
1724 for j, x in enumerate(newps[:i]):
1723 for j, x in enumerate(newps[:i]):
1725 if x == nullrev:
1724 if x == nullrev:
1726 continue
1725 continue
1727 if isancestor(np, x): # CASE-1
1726 if isancestor(np, x): # CASE-1
1728 np = nullrev
1727 np = nullrev
1729 elif isancestor(x, np): # CASE-2
1728 elif isancestor(x, np): # CASE-2
1730 newps[j] = np
1729 newps[j] = np
1731 np = nullrev
1730 np = nullrev
1732 # New parents forming an ancestor relationship does not
1731 # New parents forming an ancestor relationship does not
1733 # mean the old parents have a similar relationship. Do not
1732 # mean the old parents have a similar relationship. Do not
1734 # set bases[x] to nullrev.
1733 # set bases[x] to nullrev.
1735 bases[j], bases[i] = bases[i], bases[j]
1734 bases[j], bases[i] = bases[i], bases[j]
1736
1735
1737 newps[i] = np
1736 newps[i] = np
1738
1737
1739 # "rebasenode" updates to new p1, and the old p1 will be used as merge
1738 # "rebasenode" updates to new p1, and the old p1 will be used as merge
1740 # base. If only p2 changes, merging using unchanged p1 as merge base is
1739 # base. If only p2 changes, merging using unchanged p1 as merge base is
1741 # suboptimal. Therefore swap parents to make the merge sane.
1740 # suboptimal. Therefore swap parents to make the merge sane.
1742 if newps[1] != nullrev and oldps[0] == newps[0]:
1741 if newps[1] != nullrev and oldps[0] == newps[0]:
1743 assert len(newps) == 2 and len(oldps) == 2
1742 assert len(newps) == 2 and len(oldps) == 2
1744 newps.reverse()
1743 newps.reverse()
1745 bases.reverse()
1744 bases.reverse()
1746
1745
1747 # No parent change might be an error because we fail to make rev a
1746 # No parent change might be an error because we fail to make rev a
1748 # descendent of requested dest. This can happen, for example:
1747 # descendent of requested dest. This can happen, for example:
1749 #
1748 #
1750 # C # rebase -r C -d D
1749 # C # rebase -r C -d D
1751 # /| # None of A and B will be changed to D and rebase fails.
1750 # /| # None of A and B will be changed to D and rebase fails.
1752 # A B D
1751 # A B D
1753 if set(newps) == set(oldps) and dest not in newps:
1752 if set(newps) == set(oldps) and dest not in newps:
1754 raise error.Abort(
1753 raise error.Abort(
1755 _(
1754 _(
1756 b'cannot rebase %d:%s without '
1755 b'cannot rebase %d:%s without '
1757 b'moving at least one of its parents'
1756 b'moving at least one of its parents'
1758 )
1757 )
1759 % (rev, repo[rev])
1758 % (rev, repo[rev])
1760 )
1759 )
1761
1760
1762 # Source should not be ancestor of dest. The check here guarantees it's
1761 # Source should not be ancestor of dest. The check here guarantees it's
1763 # impossible. With multi-dest, the initial check does not cover complex
1762 # impossible. With multi-dest, the initial check does not cover complex
1764 # cases since we don't have abstractions to dry-run rebase cheaply.
1763 # cases since we don't have abstractions to dry-run rebase cheaply.
1765 if any(p != nullrev and isancestor(rev, p) for p in newps):
1764 if any(p != nullrev and isancestor(rev, p) for p in newps):
1766 raise error.Abort(_(b'source is ancestor of destination'))
1765 raise error.Abort(_(b'source is ancestor of destination'))
1767
1766
1768 # "rebasenode" updates to new p1, use the corresponding merge base.
1767 # "rebasenode" updates to new p1, use the corresponding merge base.
1769 if bases[0] != nullrev:
1768 if bases[0] != nullrev:
1770 base = bases[0]
1769 base = bases[0]
1771 else:
1770 else:
1772 base = None
1771 base = None
1773
1772
1774 # Check if the merge will contain unwanted changes. That may happen if
1773 # Check if the merge will contain unwanted changes. That may happen if
1775 # there are multiple special (non-changelog ancestor) merge bases, which
1774 # there are multiple special (non-changelog ancestor) merge bases, which
1776 # cannot be handled well by the 3-way merge algorithm. For example:
1775 # cannot be handled well by the 3-way merge algorithm. For example:
1777 #
1776 #
1778 # F
1777 # F
1779 # /|
1778 # /|
1780 # D E # "rebase -r D+E+F -d Z", when rebasing F, if "D" was chosen
1779 # D E # "rebase -r D+E+F -d Z", when rebasing F, if "D" was chosen
1781 # | | # as merge base, the difference between D and F will include
1780 # | | # as merge base, the difference between D and F will include
1782 # B C # C, so the rebased F will contain C surprisingly. If "E" was
1781 # B C # C, so the rebased F will contain C surprisingly. If "E" was
1783 # |/ # chosen, the rebased F will contain B.
1782 # |/ # chosen, the rebased F will contain B.
1784 # A Z
1783 # A Z
1785 #
1784 #
1786 # But our merge base candidates (D and E in above case) could still be
1785 # But our merge base candidates (D and E in above case) could still be
1787 # better than the default (ancestor(F, Z) == null). Therefore still
1786 # better than the default (ancestor(F, Z) == null). Therefore still
1788 # pick one (so choose p1 above).
1787 # pick one (so choose p1 above).
1789 if sum(1 for b in set(bases) if b != nullrev) > 1:
1788 if sum(1 for b in set(bases) if b != nullrev) > 1:
1790 unwanted = [None, None] # unwanted[i]: unwanted revs if choose bases[i]
1789 unwanted = [None, None] # unwanted[i]: unwanted revs if choose bases[i]
1791 for i, base in enumerate(bases):
1790 for i, base in enumerate(bases):
1792 if base == nullrev:
1791 if base == nullrev:
1793 continue
1792 continue
1794 # Revisions in the side (not chosen as merge base) branch that
1793 # Revisions in the side (not chosen as merge base) branch that
1795 # might contain "surprising" contents
1794 # might contain "surprising" contents
1796 siderevs = list(
1795 siderevs = list(
1797 repo.revs(b'((%ld-%d) %% (%d+%d))', bases, base, base, dest)
1796 repo.revs(b'((%ld-%d) %% (%d+%d))', bases, base, base, dest)
1798 )
1797 )
1799
1798
1800 # If those revisions are covered by rebaseset, the result is good.
1799 # If those revisions are covered by rebaseset, the result is good.
1801 # A merge in rebaseset would be considered to cover its ancestors.
1800 # A merge in rebaseset would be considered to cover its ancestors.
1802 if siderevs:
1801 if siderevs:
1803 rebaseset = [
1802 rebaseset = [
1804 r for r, d in state.items() if d > 0 and r not in obsskipped
1803 r for r, d in state.items() if d > 0 and r not in obsskipped
1805 ]
1804 ]
1806 merges = [
1805 merges = [
1807 r for r in rebaseset if cl.parentrevs(r)[1] != nullrev
1806 r for r in rebaseset if cl.parentrevs(r)[1] != nullrev
1808 ]
1807 ]
1809 unwanted[i] = list(
1808 unwanted[i] = list(
1810 repo.revs(
1809 repo.revs(
1811 b'%ld - (::%ld) - %ld', siderevs, merges, rebaseset
1810 b'%ld - (::%ld) - %ld', siderevs, merges, rebaseset
1812 )
1811 )
1813 )
1812 )
1814
1813
1815 # Choose a merge base that has a minimal number of unwanted revs.
1814 # Choose a merge base that has a minimal number of unwanted revs.
1816 l, i = min(
1815 l, i = min(
1817 (len(revs), i)
1816 (len(revs), i)
1818 for i, revs in enumerate(unwanted)
1817 for i, revs in enumerate(unwanted)
1819 if revs is not None
1818 if revs is not None
1820 )
1819 )
1821 base = bases[i]
1820 base = bases[i]
1822
1821
1823 # newps[0] should match merge base if possible. Currently, if newps[i]
1822 # newps[0] should match merge base if possible. Currently, if newps[i]
1824 # is nullrev, the only case is newps[i] and newps[j] (j < i), one is
1823 # is nullrev, the only case is newps[i] and newps[j] (j < i), one is
1825 # the other's ancestor. In that case, it's fine to not swap newps here.
1824 # the other's ancestor. In that case, it's fine to not swap newps here.
1826 # (see CASE-1 and CASE-2 above)
1825 # (see CASE-1 and CASE-2 above)
1827 if i != 0 and newps[i] != nullrev:
1826 if i != 0 and newps[i] != nullrev:
1828 newps[0], newps[i] = newps[i], newps[0]
1827 newps[0], newps[i] = newps[i], newps[0]
1829
1828
1830 # The merge will include unwanted revisions. Abort now. Revisit this if
1829 # The merge will include unwanted revisions. Abort now. Revisit this if
1831 # we have a more advanced merge algorithm that handles multiple bases.
1830 # we have a more advanced merge algorithm that handles multiple bases.
1832 if l > 0:
1831 if l > 0:
1833 unwanteddesc = _(b' or ').join(
1832 unwanteddesc = _(b' or ').join(
1834 (
1833 (
1835 b', '.join(b'%d:%s' % (r, repo[r]) for r in revs)
1834 b', '.join(b'%d:%s' % (r, repo[r]) for r in revs)
1836 for revs in unwanted
1835 for revs in unwanted
1837 if revs is not None
1836 if revs is not None
1838 )
1837 )
1839 )
1838 )
1840 raise error.Abort(
1839 raise error.Abort(
1841 _(b'rebasing %d:%s will include unwanted changes from %s')
1840 _(b'rebasing %d:%s will include unwanted changes from %s')
1842 % (rev, repo[rev], unwanteddesc)
1841 % (rev, repo[rev], unwanteddesc)
1843 )
1842 )
1844
1843
1845 repo.ui.debug(b" future parents are %d and %d\n" % tuple(newps))
1844 repo.ui.debug(b" future parents are %d and %d\n" % tuple(newps))
1846
1845
1847 return newps[0], newps[1], base
1846 return newps[0], newps[1], base
1848
1847
1849
1848
1850 def isagitpatch(repo, patchname):
1849 def isagitpatch(repo, patchname):
1851 """Return true if the given patch is in git format"""
1850 """Return true if the given patch is in git format"""
1852 mqpatch = os.path.join(repo.mq.path, patchname)
1851 mqpatch = os.path.join(repo.mq.path, patchname)
1853 for line in patch.linereader(open(mqpatch, b'rb')):
1852 for line in patch.linereader(open(mqpatch, b'rb')):
1854 if line.startswith(b'diff --git'):
1853 if line.startswith(b'diff --git'):
1855 return True
1854 return True
1856 return False
1855 return False
1857
1856
1858
1857
1859 def updatemq(repo, state, skipped, **opts):
1858 def updatemq(repo, state, skipped, **opts):
1860 """Update rebased mq patches - finalize and then import them"""
1859 """Update rebased mq patches - finalize and then import them"""
1861 mqrebase = {}
1860 mqrebase = {}
1862 mq = repo.mq
1861 mq = repo.mq
1863 original_series = mq.fullseries[:]
1862 original_series = mq.fullseries[:]
1864 skippedpatches = set()
1863 skippedpatches = set()
1865
1864
1866 for p in mq.applied:
1865 for p in mq.applied:
1867 rev = repo[p.node].rev()
1866 rev = repo[p.node].rev()
1868 if rev in state:
1867 if rev in state:
1869 repo.ui.debug(
1868 repo.ui.debug(
1870 b'revision %d is an mq patch (%s), finalize it.\n'
1869 b'revision %d is an mq patch (%s), finalize it.\n'
1871 % (rev, p.name)
1870 % (rev, p.name)
1872 )
1871 )
1873 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1872 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1874 else:
1873 else:
1875 # Applied but not rebased, not sure this should happen
1874 # Applied but not rebased, not sure this should happen
1876 skippedpatches.add(p.name)
1875 skippedpatches.add(p.name)
1877
1876
1878 if mqrebase:
1877 if mqrebase:
1879 mq.finish(repo, mqrebase.keys())
1878 mq.finish(repo, mqrebase.keys())
1880
1879
1881 # We must start import from the newest revision
1880 # We must start import from the newest revision
1882 for rev in sorted(mqrebase, reverse=True):
1881 for rev in sorted(mqrebase, reverse=True):
1883 if rev not in skipped:
1882 if rev not in skipped:
1884 name, isgit = mqrebase[rev]
1883 name, isgit = mqrebase[rev]
1885 repo.ui.note(
1884 repo.ui.note(
1886 _(b'updating mq patch %s to %d:%s\n')
1885 _(b'updating mq patch %s to %d:%s\n')
1887 % (name, state[rev], repo[state[rev]])
1886 % (name, state[rev], repo[state[rev]])
1888 )
1887 )
1889 mq.qimport(
1888 mq.qimport(
1890 repo,
1889 repo,
1891 (),
1890 (),
1892 patchname=name,
1891 patchname=name,
1893 git=isgit,
1892 git=isgit,
1894 rev=[b"%d" % state[rev]],
1893 rev=[b"%d" % state[rev]],
1895 )
1894 )
1896 else:
1895 else:
1897 # Rebased and skipped
1896 # Rebased and skipped
1898 skippedpatches.add(mqrebase[rev][0])
1897 skippedpatches.add(mqrebase[rev][0])
1899
1898
1900 # Patches were either applied and rebased and imported in
1899 # Patches were either applied and rebased and imported in
1901 # order, applied and removed or unapplied. Discard the removed
1900 # order, applied and removed or unapplied. Discard the removed
1902 # ones while preserving the original series order and guards.
1901 # ones while preserving the original series order and guards.
1903 newseries = [
1902 newseries = [
1904 s
1903 s
1905 for s in original_series
1904 for s in original_series
1906 if mq.guard_re.split(s, 1)[0] not in skippedpatches
1905 if mq.guard_re.split(s, 1)[0] not in skippedpatches
1907 ]
1906 ]
1908 mq.fullseries[:] = newseries
1907 mq.fullseries[:] = newseries
1909 mq.seriesdirty = True
1908 mq.seriesdirty = True
1910 mq.savedirty()
1909 mq.savedirty()
1911
1910
1912
1911
1913 def storecollapsemsg(repo, collapsemsg):
1912 def storecollapsemsg(repo, collapsemsg):
1914 """Store the collapse message to allow recovery"""
1913 """Store the collapse message to allow recovery"""
1915 collapsemsg = collapsemsg or b''
1914 collapsemsg = collapsemsg or b''
1916 f = repo.vfs(b"last-message.txt", b"w")
1915 f = repo.vfs(b"last-message.txt", b"w")
1917 f.write(b"%s\n" % collapsemsg)
1916 f.write(b"%s\n" % collapsemsg)
1918 f.close()
1917 f.close()
1919
1918
1920
1919
1921 def clearcollapsemsg(repo):
1920 def clearcollapsemsg(repo):
1922 """Remove collapse message file"""
1921 """Remove collapse message file"""
1923 repo.vfs.unlinkpath(b"last-message.txt", ignoremissing=True)
1922 repo.vfs.unlinkpath(b"last-message.txt", ignoremissing=True)
1924
1923
1925
1924
1926 def restorecollapsemsg(repo, isabort):
1925 def restorecollapsemsg(repo, isabort):
1927 """Restore previously stored collapse message"""
1926 """Restore previously stored collapse message"""
1928 try:
1927 try:
1929 f = repo.vfs(b"last-message.txt")
1928 f = repo.vfs(b"last-message.txt")
1930 collapsemsg = f.readline().strip()
1929 collapsemsg = f.readline().strip()
1931 f.close()
1930 f.close()
1932 except IOError as err:
1931 except IOError as err:
1933 if err.errno != errno.ENOENT:
1932 if err.errno != errno.ENOENT:
1934 raise
1933 raise
1935 if isabort:
1934 if isabort:
1936 # Oh well, just abort like normal
1935 # Oh well, just abort like normal
1937 collapsemsg = b''
1936 collapsemsg = b''
1938 else:
1937 else:
1939 raise error.Abort(_(b'missing .hg/last-message.txt for rebase'))
1938 raise error.Abort(_(b'missing .hg/last-message.txt for rebase'))
1940 return collapsemsg
1939 return collapsemsg
1941
1940
1942
1941
1943 def clearstatus(repo):
1942 def clearstatus(repo):
1944 """Remove the status files"""
1943 """Remove the status files"""
1945 # Make sure the active transaction won't write the state file
1944 # Make sure the active transaction won't write the state file
1946 tr = repo.currenttransaction()
1945 tr = repo.currenttransaction()
1947 if tr:
1946 if tr:
1948 tr.removefilegenerator(b'rebasestate')
1947 tr.removefilegenerator(b'rebasestate')
1949 repo.vfs.unlinkpath(b"rebasestate", ignoremissing=True)
1948 repo.vfs.unlinkpath(b"rebasestate", ignoremissing=True)
1950
1949
1951
1950
1952 def needupdate(repo, state):
1951 def needupdate(repo, state):
1953 '''check whether we should `update --clean` away from a merge, or if
1952 '''check whether we should `update --clean` away from a merge, or if
1954 somehow the working dir got forcibly updated, e.g. by older hg'''
1953 somehow the working dir got forcibly updated, e.g. by older hg'''
1955 parents = [p.rev() for p in repo[None].parents()]
1954 parents = [p.rev() for p in repo[None].parents()]
1956
1955
1957 # Are we in a merge state at all?
1956 # Are we in a merge state at all?
1958 if len(parents) < 2:
1957 if len(parents) < 2:
1959 return False
1958 return False
1960
1959
1961 # We should be standing on the first as-of-yet unrebased commit.
1960 # We should be standing on the first as-of-yet unrebased commit.
1962 firstunrebased = min(
1961 firstunrebased = min(
1963 [old for old, new in pycompat.iteritems(state) if new == nullrev]
1962 [old for old, new in pycompat.iteritems(state) if new == nullrev]
1964 )
1963 )
1965 if firstunrebased in parents:
1964 if firstunrebased in parents:
1966 return True
1965 return True
1967
1966
1968 return False
1967 return False
1969
1968
1970
1969
1971 def sortsource(destmap):
1970 def sortsource(destmap):
1972 """yield source revisions in an order that we only rebase things once
1971 """yield source revisions in an order that we only rebase things once
1973
1972
1974 If source and destination overlaps, we should filter out revisions
1973 If source and destination overlaps, we should filter out revisions
1975 depending on other revisions which hasn't been rebased yet.
1974 depending on other revisions which hasn't been rebased yet.
1976
1975
1977 Yield a sorted list of revisions each time.
1976 Yield a sorted list of revisions each time.
1978
1977
1979 For example, when rebasing A to B, B to C. This function yields [B], then
1978 For example, when rebasing A to B, B to C. This function yields [B], then
1980 [A], indicating B needs to be rebased first.
1979 [A], indicating B needs to be rebased first.
1981
1980
1982 Raise if there is a cycle so the rebase is impossible.
1981 Raise if there is a cycle so the rebase is impossible.
1983 """
1982 """
1984 srcset = set(destmap)
1983 srcset = set(destmap)
1985 while srcset:
1984 while srcset:
1986 srclist = sorted(srcset)
1985 srclist = sorted(srcset)
1987 result = []
1986 result = []
1988 for r in srclist:
1987 for r in srclist:
1989 if destmap[r] not in srcset:
1988 if destmap[r] not in srcset:
1990 result.append(r)
1989 result.append(r)
1991 if not result:
1990 if not result:
1992 raise error.Abort(_(b'source and destination form a cycle'))
1991 raise error.Abort(_(b'source and destination form a cycle'))
1993 srcset -= set(result)
1992 srcset -= set(result)
1994 yield result
1993 yield result
1995
1994
1996
1995
1997 def buildstate(repo, destmap, collapse):
1996 def buildstate(repo, destmap, collapse):
1998 '''Define which revisions are going to be rebased and where
1997 '''Define which revisions are going to be rebased and where
1999
1998
2000 repo: repo
1999 repo: repo
2001 destmap: {srcrev: destrev}
2000 destmap: {srcrev: destrev}
2002 '''
2001 '''
2003 rebaseset = destmap.keys()
2002 rebaseset = destmap.keys()
2004 originalwd = repo[b'.'].rev()
2003 originalwd = repo[b'.'].rev()
2005
2004
2006 # This check isn't strictly necessary, since mq detects commits over an
2005 # This check isn't strictly necessary, since mq detects commits over an
2007 # applied patch. But it prevents messing up the working directory when
2006 # applied patch. But it prevents messing up the working directory when
2008 # a partially completed rebase is blocked by mq.
2007 # a partially completed rebase is blocked by mq.
2009 if b'qtip' in repo.tags():
2008 if b'qtip' in repo.tags():
2010 mqapplied = set(repo[s.node].rev() for s in repo.mq.applied)
2009 mqapplied = set(repo[s.node].rev() for s in repo.mq.applied)
2011 if set(destmap.values()) & mqapplied:
2010 if set(destmap.values()) & mqapplied:
2012 raise error.Abort(_(b'cannot rebase onto an applied mq patch'))
2011 raise error.Abort(_(b'cannot rebase onto an applied mq patch'))
2013
2012
2014 # Get "cycle" error early by exhausting the generator.
2013 # Get "cycle" error early by exhausting the generator.
2015 sortedsrc = list(sortsource(destmap)) # a list of sorted revs
2014 sortedsrc = list(sortsource(destmap)) # a list of sorted revs
2016 if not sortedsrc:
2015 if not sortedsrc:
2017 raise error.Abort(_(b'no matching revisions'))
2016 raise error.Abort(_(b'no matching revisions'))
2018
2017
2019 # Only check the first batch of revisions to rebase not depending on other
2018 # Only check the first batch of revisions to rebase not depending on other
2020 # rebaseset. This means "source is ancestor of destination" for the second
2019 # rebaseset. This means "source is ancestor of destination" for the second
2021 # (and following) batches of revisions are not checked here. We rely on
2020 # (and following) batches of revisions are not checked here. We rely on
2022 # "defineparents" to do that check.
2021 # "defineparents" to do that check.
2023 roots = list(repo.set(b'roots(%ld)', sortedsrc[0]))
2022 roots = list(repo.set(b'roots(%ld)', sortedsrc[0]))
2024 if not roots:
2023 if not roots:
2025 raise error.Abort(_(b'no matching revisions'))
2024 raise error.Abort(_(b'no matching revisions'))
2026
2025
2027 def revof(r):
2026 def revof(r):
2028 return r.rev()
2027 return r.rev()
2029
2028
2030 roots = sorted(roots, key=revof)
2029 roots = sorted(roots, key=revof)
2031 state = dict.fromkeys(rebaseset, revtodo)
2030 state = dict.fromkeys(rebaseset, revtodo)
2032 emptyrebase = len(sortedsrc) == 1
2031 emptyrebase = len(sortedsrc) == 1
2033 for root in roots:
2032 for root in roots:
2034 dest = repo[destmap[root.rev()]]
2033 dest = repo[destmap[root.rev()]]
2035 commonbase = root.ancestor(dest)
2034 commonbase = root.ancestor(dest)
2036 if commonbase == root:
2035 if commonbase == root:
2037 raise error.Abort(_(b'source is ancestor of destination'))
2036 raise error.Abort(_(b'source is ancestor of destination'))
2038 if commonbase == dest:
2037 if commonbase == dest:
2039 wctx = repo[None]
2038 wctx = repo[None]
2040 if dest == wctx.p1():
2039 if dest == wctx.p1():
2041 # when rebasing to '.', it will use the current wd branch name
2040 # when rebasing to '.', it will use the current wd branch name
2042 samebranch = root.branch() == wctx.branch()
2041 samebranch = root.branch() == wctx.branch()
2043 else:
2042 else:
2044 samebranch = root.branch() == dest.branch()
2043 samebranch = root.branch() == dest.branch()
2045 if not collapse and samebranch and dest in root.parents():
2044 if not collapse and samebranch and dest in root.parents():
2046 # mark the revision as done by setting its new revision
2045 # mark the revision as done by setting its new revision
2047 # equal to its old (current) revisions
2046 # equal to its old (current) revisions
2048 state[root.rev()] = root.rev()
2047 state[root.rev()] = root.rev()
2049 repo.ui.debug(b'source is a child of destination\n')
2048 repo.ui.debug(b'source is a child of destination\n')
2050 continue
2049 continue
2051
2050
2052 emptyrebase = False
2051 emptyrebase = False
2053 repo.ui.debug(b'rebase onto %s starting from %s\n' % (dest, root))
2052 repo.ui.debug(b'rebase onto %s starting from %s\n' % (dest, root))
2054 if emptyrebase:
2053 if emptyrebase:
2055 return None
2054 return None
2056 for rev in sorted(state):
2055 for rev in sorted(state):
2057 parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
2056 parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
2058 # if all parents of this revision are done, then so is this revision
2057 # if all parents of this revision are done, then so is this revision
2059 if parents and all((state.get(p) == p for p in parents)):
2058 if parents and all((state.get(p) == p for p in parents)):
2060 state[rev] = rev
2059 state[rev] = rev
2061 return originalwd, destmap, state
2060 return originalwd, destmap, state
2062
2061
2063
2062
2064 def clearrebased(
2063 def clearrebased(
2065 ui,
2064 ui,
2066 repo,
2065 repo,
2067 destmap,
2066 destmap,
2068 state,
2067 state,
2069 skipped,
2068 skipped,
2070 collapsedas=None,
2069 collapsedas=None,
2071 keepf=False,
2070 keepf=False,
2072 fm=None,
2071 fm=None,
2073 backup=True,
2072 backup=True,
2074 ):
2073 ):
2075 """dispose of rebased revision at the end of the rebase
2074 """dispose of rebased revision at the end of the rebase
2076
2075
2077 If `collapsedas` is not None, the rebase was a collapse whose result if the
2076 If `collapsedas` is not None, the rebase was a collapse whose result if the
2078 `collapsedas` node.
2077 `collapsedas` node.
2079
2078
2080 If `keepf` is not True, the rebase has --keep set and no nodes should be
2079 If `keepf` is not True, the rebase has --keep set and no nodes should be
2081 removed (but bookmarks still need to be moved).
2080 removed (but bookmarks still need to be moved).
2082
2081
2083 If `backup` is False, no backup will be stored when stripping rebased
2082 If `backup` is False, no backup will be stored when stripping rebased
2084 revisions.
2083 revisions.
2085 """
2084 """
2086 tonode = repo.changelog.node
2085 tonode = repo.changelog.node
2087 replacements = {}
2086 replacements = {}
2088 moves = {}
2087 moves = {}
2089 stripcleanup = not obsolete.isenabled(repo, obsolete.createmarkersopt)
2088 stripcleanup = not obsolete.isenabled(repo, obsolete.createmarkersopt)
2090
2089
2091 collapsednodes = []
2090 collapsednodes = []
2092 for rev, newrev in sorted(state.items()):
2091 for rev, newrev in sorted(state.items()):
2093 if newrev >= 0 and newrev != rev:
2092 if newrev >= 0 and newrev != rev:
2094 oldnode = tonode(rev)
2093 oldnode = tonode(rev)
2095 newnode = collapsedas or tonode(newrev)
2094 newnode = collapsedas or tonode(newrev)
2096 moves[oldnode] = newnode
2095 moves[oldnode] = newnode
2097 succs = None
2096 succs = None
2098 if rev in skipped:
2097 if rev in skipped:
2099 if stripcleanup or not repo[rev].obsolete():
2098 if stripcleanup or not repo[rev].obsolete():
2100 succs = ()
2099 succs = ()
2101 elif collapsedas:
2100 elif collapsedas:
2102 collapsednodes.append(oldnode)
2101 collapsednodes.append(oldnode)
2103 else:
2102 else:
2104 succs = (newnode,)
2103 succs = (newnode,)
2105 if succs is not None:
2104 if succs is not None:
2106 replacements[(oldnode,)] = succs
2105 replacements[(oldnode,)] = succs
2107 if collapsednodes:
2106 if collapsednodes:
2108 replacements[tuple(collapsednodes)] = (collapsedas,)
2107 replacements[tuple(collapsednodes)] = (collapsedas,)
2109 if fm:
2108 if fm:
2110 hf = fm.hexfunc
2109 hf = fm.hexfunc
2111 fl = fm.formatlist
2110 fl = fm.formatlist
2112 fd = fm.formatdict
2111 fd = fm.formatdict
2113 changes = {}
2112 changes = {}
2114 for oldns, newn in pycompat.iteritems(replacements):
2113 for oldns, newn in pycompat.iteritems(replacements):
2115 for oldn in oldns:
2114 for oldn in oldns:
2116 changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node')
2115 changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node')
2117 nodechanges = fd(changes, key=b"oldnode", value=b"newnodes")
2116 nodechanges = fd(changes, key=b"oldnode", value=b"newnodes")
2118 fm.data(nodechanges=nodechanges)
2117 fm.data(nodechanges=nodechanges)
2119 if keepf:
2118 if keepf:
2120 replacements = {}
2119 replacements = {}
2121 scmutil.cleanupnodes(repo, replacements, b'rebase', moves, backup=backup)
2120 scmutil.cleanupnodes(repo, replacements, b'rebase', moves, backup=backup)
2122
2121
2123
2122
2124 def pullrebase(orig, ui, repo, *args, **opts):
2123 def pullrebase(orig, ui, repo, *args, **opts):
2125 """Call rebase after pull if the latter has been invoked with --rebase"""
2124 """Call rebase after pull if the latter has been invoked with --rebase"""
2126 if opts.get('rebase'):
2125 if opts.get('rebase'):
2127 if ui.configbool(b'commands', b'rebase.requiredest'):
2126 if ui.configbool(b'commands', b'rebase.requiredest'):
2128 msg = _(b'rebase destination required by configuration')
2127 msg = _(b'rebase destination required by configuration')
2129 hint = _(b'use hg pull followed by hg rebase -d DEST')
2128 hint = _(b'use hg pull followed by hg rebase -d DEST')
2130 raise error.Abort(msg, hint=hint)
2129 raise error.Abort(msg, hint=hint)
2131
2130
2132 with repo.wlock(), repo.lock():
2131 with repo.wlock(), repo.lock():
2133 if opts.get('update'):
2132 if opts.get('update'):
2134 del opts['update']
2133 del opts['update']
2135 ui.debug(
2134 ui.debug(
2136 b'--update and --rebase are not compatible, ignoring '
2135 b'--update and --rebase are not compatible, ignoring '
2137 b'the update flag\n'
2136 b'the update flag\n'
2138 )
2137 )
2139
2138
2140 cmdutil.checkunfinished(repo, skipmerge=True)
2139 cmdutil.checkunfinished(repo, skipmerge=True)
2141 cmdutil.bailifchanged(
2140 cmdutil.bailifchanged(
2142 repo,
2141 repo,
2143 hint=_(
2142 hint=_(
2144 b'cannot pull with rebase: '
2143 b'cannot pull with rebase: '
2145 b'please commit or shelve your changes first'
2144 b'please commit or shelve your changes first'
2146 ),
2145 ),
2147 )
2146 )
2148
2147
2149 revsprepull = len(repo)
2148 revsprepull = len(repo)
2150 origpostincoming = commands.postincoming
2149 origpostincoming = commands.postincoming
2151
2150
2152 def _dummy(*args, **kwargs):
2151 def _dummy(*args, **kwargs):
2153 pass
2152 pass
2154
2153
2155 commands.postincoming = _dummy
2154 commands.postincoming = _dummy
2156 try:
2155 try:
2157 ret = orig(ui, repo, *args, **opts)
2156 ret = orig(ui, repo, *args, **opts)
2158 finally:
2157 finally:
2159 commands.postincoming = origpostincoming
2158 commands.postincoming = origpostincoming
2160 revspostpull = len(repo)
2159 revspostpull = len(repo)
2161 if revspostpull > revsprepull:
2160 if revspostpull > revsprepull:
2162 # --rev option from pull conflict with rebase own --rev
2161 # --rev option from pull conflict with rebase own --rev
2163 # dropping it
2162 # dropping it
2164 if 'rev' in opts:
2163 if 'rev' in opts:
2165 del opts['rev']
2164 del opts['rev']
2166 # positional argument from pull conflicts with rebase's own
2165 # positional argument from pull conflicts with rebase's own
2167 # --source.
2166 # --source.
2168 if 'source' in opts:
2167 if 'source' in opts:
2169 del opts['source']
2168 del opts['source']
2170 # revsprepull is the len of the repo, not revnum of tip.
2169 # revsprepull is the len of the repo, not revnum of tip.
2171 destspace = list(repo.changelog.revs(start=revsprepull))
2170 destspace = list(repo.changelog.revs(start=revsprepull))
2172 opts['_destspace'] = destspace
2171 opts['_destspace'] = destspace
2173 try:
2172 try:
2174 rebase(ui, repo, **opts)
2173 rebase(ui, repo, **opts)
2175 except error.NoMergeDestAbort:
2174 except error.NoMergeDestAbort:
2176 # we can maybe update instead
2175 # we can maybe update instead
2177 rev, _a, _b = destutil.destupdate(repo)
2176 rev, _a, _b = destutil.destupdate(repo)
2178 if rev == repo[b'.'].rev():
2177 if rev == repo[b'.'].rev():
2179 ui.status(_(b'nothing to rebase\n'))
2178 ui.status(_(b'nothing to rebase\n'))
2180 else:
2179 else:
2181 ui.status(_(b'nothing to rebase - updating instead\n'))
2180 ui.status(_(b'nothing to rebase - updating instead\n'))
2182 # not passing argument to get the bare update behavior
2181 # not passing argument to get the bare update behavior
2183 # with warning and trumpets
2182 # with warning and trumpets
2184 commands.update(ui, repo)
2183 commands.update(ui, repo)
2185 else:
2184 else:
2186 if opts.get('tool'):
2185 if opts.get('tool'):
2187 raise error.Abort(_(b'--tool can only be used with --rebase'))
2186 raise error.Abort(_(b'--tool can only be used with --rebase'))
2188 ret = orig(ui, repo, *args, **opts)
2187 ret = orig(ui, repo, *args, **opts)
2189
2188
2190 return ret
2189 return ret
2191
2190
2192
2191
2193 def _filterobsoleterevs(repo, revs):
2192 def _filterobsoleterevs(repo, revs):
2194 """returns a set of the obsolete revisions in revs"""
2193 """returns a set of the obsolete revisions in revs"""
2195 return set(r for r in revs if repo[r].obsolete())
2194 return set(r for r in revs if repo[r].obsolete())
2196
2195
2197
2196
2198 def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap):
2197 def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap):
2199 """Return (obsoletenotrebased, obsoletewithoutsuccessorindestination).
2198 """Return (obsoletenotrebased, obsoletewithoutsuccessorindestination).
2200
2199
2201 `obsoletenotrebased` is a mapping mapping obsolete => successor for all
2200 `obsoletenotrebased` is a mapping mapping obsolete => successor for all
2202 obsolete nodes to be rebased given in `rebaseobsrevs`.
2201 obsolete nodes to be rebased given in `rebaseobsrevs`.
2203
2202
2204 `obsoletewithoutsuccessorindestination` is a set with obsolete revisions
2203 `obsoletewithoutsuccessorindestination` is a set with obsolete revisions
2205 without a successor in destination.
2204 without a successor in destination.
2206
2205
2207 `obsoleteextinctsuccessors` is a set of obsolete revisions with only
2206 `obsoleteextinctsuccessors` is a set of obsolete revisions with only
2208 obsolete successors.
2207 obsolete successors.
2209 """
2208 """
2210 obsoletenotrebased = {}
2209 obsoletenotrebased = {}
2211 obsoletewithoutsuccessorindestination = set()
2210 obsoletewithoutsuccessorindestination = set()
2212 obsoleteextinctsuccessors = set()
2211 obsoleteextinctsuccessors = set()
2213
2212
2214 assert repo.filtername is None
2213 assert repo.filtername is None
2215 cl = repo.changelog
2214 cl = repo.changelog
2216 get_rev = cl.index.get_rev
2215 get_rev = cl.index.get_rev
2217 extinctrevs = set(repo.revs(b'extinct()'))
2216 extinctrevs = set(repo.revs(b'extinct()'))
2218 for srcrev in rebaseobsrevs:
2217 for srcrev in rebaseobsrevs:
2219 srcnode = cl.node(srcrev)
2218 srcnode = cl.node(srcrev)
2220 # XXX: more advanced APIs are required to handle split correctly
2219 # XXX: more advanced APIs are required to handle split correctly
2221 successors = set(obsutil.allsuccessors(repo.obsstore, [srcnode]))
2220 successors = set(obsutil.allsuccessors(repo.obsstore, [srcnode]))
2222 # obsutil.allsuccessors includes node itself
2221 # obsutil.allsuccessors includes node itself
2223 successors.remove(srcnode)
2222 successors.remove(srcnode)
2224 succrevs = {get_rev(s) for s in successors}
2223 succrevs = {get_rev(s) for s in successors}
2225 succrevs.discard(None)
2224 succrevs.discard(None)
2226 if succrevs.issubset(extinctrevs):
2225 if succrevs.issubset(extinctrevs):
2227 # all successors are extinct
2226 # all successors are extinct
2228 obsoleteextinctsuccessors.add(srcrev)
2227 obsoleteextinctsuccessors.add(srcrev)
2229 if not successors:
2228 if not successors:
2230 # no successor
2229 # no successor
2231 obsoletenotrebased[srcrev] = None
2230 obsoletenotrebased[srcrev] = None
2232 else:
2231 else:
2233 dstrev = destmap[srcrev]
2232 dstrev = destmap[srcrev]
2234 for succrev in succrevs:
2233 for succrev in succrevs:
2235 if cl.isancestorrev(succrev, dstrev):
2234 if cl.isancestorrev(succrev, dstrev):
2236 obsoletenotrebased[srcrev] = succrev
2235 obsoletenotrebased[srcrev] = succrev
2237 break
2236 break
2238 else:
2237 else:
2239 # If 'srcrev' has a successor in rebase set but none in
2238 # If 'srcrev' has a successor in rebase set but none in
2240 # destination (which would be catched above), we shall skip it
2239 # destination (which would be catched above), we shall skip it
2241 # and its descendants to avoid divergence.
2240 # and its descendants to avoid divergence.
2242 if srcrev in extinctrevs or any(s in destmap for s in succrevs):
2241 if srcrev in extinctrevs or any(s in destmap for s in succrevs):
2243 obsoletewithoutsuccessorindestination.add(srcrev)
2242 obsoletewithoutsuccessorindestination.add(srcrev)
2244
2243
2245 return (
2244 return (
2246 obsoletenotrebased,
2245 obsoletenotrebased,
2247 obsoletewithoutsuccessorindestination,
2246 obsoletewithoutsuccessorindestination,
2248 obsoleteextinctsuccessors,
2247 obsoleteextinctsuccessors,
2249 )
2248 )
2250
2249
2251
2250
2252 def abortrebase(ui, repo):
2251 def abortrebase(ui, repo):
2253 with repo.wlock(), repo.lock():
2252 with repo.wlock(), repo.lock():
2254 rbsrt = rebaseruntime(repo, ui)
2253 rbsrt = rebaseruntime(repo, ui)
2255 rbsrt._prepareabortorcontinue(isabort=True)
2254 rbsrt._prepareabortorcontinue(isabort=True)
2256
2255
2257
2256
2258 def continuerebase(ui, repo):
2257 def continuerebase(ui, repo):
2259 with repo.wlock(), repo.lock():
2258 with repo.wlock(), repo.lock():
2260 rbsrt = rebaseruntime(repo, ui)
2259 rbsrt = rebaseruntime(repo, ui)
2261 ms = mergemod.mergestate.read(repo)
2260 ms = mergemod.mergestate.read(repo)
2262 mergeutil.checkunresolved(ms)
2261 mergeutil.checkunresolved(ms)
2263 retcode = rbsrt._prepareabortorcontinue(isabort=False)
2262 retcode = rbsrt._prepareabortorcontinue(isabort=False)
2264 if retcode is not None:
2263 if retcode is not None:
2265 return retcode
2264 return retcode
2266 rbsrt._performrebase(None)
2265 rbsrt._performrebase(None)
2267 rbsrt._finishrebase()
2266 rbsrt._finishrebase()
2268
2267
2269
2268
2270 def summaryhook(ui, repo):
2269 def summaryhook(ui, repo):
2271 if not repo.vfs.exists(b'rebasestate'):
2270 if not repo.vfs.exists(b'rebasestate'):
2272 return
2271 return
2273 try:
2272 try:
2274 rbsrt = rebaseruntime(repo, ui, {})
2273 rbsrt = rebaseruntime(repo, ui, {})
2275 rbsrt.restorestatus()
2274 rbsrt.restorestatus()
2276 state = rbsrt.state
2275 state = rbsrt.state
2277 except error.RepoLookupError:
2276 except error.RepoLookupError:
2278 # i18n: column positioning for "hg summary"
2277 # i18n: column positioning for "hg summary"
2279 msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n')
2278 msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n')
2280 ui.write(msg)
2279 ui.write(msg)
2281 return
2280 return
2282 numrebased = len([i for i in pycompat.itervalues(state) if i >= 0])
2281 numrebased = len([i for i in pycompat.itervalues(state) if i >= 0])
2283 # i18n: column positioning for "hg summary"
2282 # i18n: column positioning for "hg summary"
2284 ui.write(
2283 ui.write(
2285 _(b'rebase: %s, %s (rebase --continue)\n')
2284 _(b'rebase: %s, %s (rebase --continue)\n')
2286 % (
2285 % (
2287 ui.label(_(b'%d rebased'), b'rebase.rebased') % numrebased,
2286 ui.label(_(b'%d rebased'), b'rebase.rebased') % numrebased,
2288 ui.label(_(b'%d remaining'), b'rebase.remaining')
2287 ui.label(_(b'%d remaining'), b'rebase.remaining')
2289 % (len(state) - numrebased),
2288 % (len(state) - numrebased),
2290 )
2289 )
2291 )
2290 )
2292
2291
2293
2292
2294 def uisetup(ui):
2293 def uisetup(ui):
2295 # Replace pull with a decorator to provide --rebase option
2294 # Replace pull with a decorator to provide --rebase option
2296 entry = extensions.wrapcommand(commands.table, b'pull', pullrebase)
2295 entry = extensions.wrapcommand(commands.table, b'pull', pullrebase)
2297 entry[1].append(
2296 entry[1].append(
2298 (b'', b'rebase', None, _(b"rebase working directory to branch head"))
2297 (b'', b'rebase', None, _(b"rebase working directory to branch head"))
2299 )
2298 )
2300 entry[1].append((b't', b'tool', b'', _(b"specify merge tool for rebase")))
2299 entry[1].append((b't', b'tool', b'', _(b"specify merge tool for rebase")))
2301 cmdutil.summaryhooks.add(b'rebase', summaryhook)
2300 cmdutil.summaryhooks.add(b'rebase', summaryhook)
2302 statemod.addunfinished(
2301 statemod.addunfinished(
2303 b'rebase',
2302 b'rebase',
2304 fname=b'rebasestate',
2303 fname=b'rebasestate',
2305 stopflag=True,
2304 stopflag=True,
2306 continueflag=True,
2305 continueflag=True,
2307 abortfunc=abortrebase,
2306 abortfunc=abortrebase,
2308 continuefunc=continuerebase,
2307 continuefunc=continuerebase,
2309 )
2308 )
@@ -1,4047 +1,4048 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy as copymod
10 import copy as copymod
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 nullrev,
19 nullrev,
20 short,
20 short,
21 )
21 )
22 from .pycompat import (
22 from .pycompat import (
23 getattr,
23 getattr,
24 open,
24 open,
25 setattr,
25 setattr,
26 )
26 )
27 from .thirdparty import attr
27 from .thirdparty import attr
28
28
29 from . import (
29 from . import (
30 bookmarks,
30 bookmarks,
31 changelog,
31 changelog,
32 copies,
32 copies,
33 crecord as crecordmod,
33 crecord as crecordmod,
34 dirstateguard,
34 dirstateguard,
35 encoding,
35 encoding,
36 error,
36 error,
37 formatter,
37 formatter,
38 logcmdutil,
38 logcmdutil,
39 match as matchmod,
39 match as matchmod,
40 merge as mergemod,
40 merge as mergemod,
41 mergeutil,
41 mergeutil,
42 obsolete,
42 obsolete,
43 patch,
43 patch,
44 pathutil,
44 pathutil,
45 phases,
45 phases,
46 pycompat,
46 pycompat,
47 repair,
47 repair,
48 revlog,
48 revlog,
49 rewriteutil,
49 rewriteutil,
50 scmutil,
50 scmutil,
51 smartset,
51 smartset,
52 state as statemod,
52 state as statemod,
53 subrepoutil,
53 subrepoutil,
54 templatekw,
54 templatekw,
55 templater,
55 templater,
56 util,
56 util,
57 vfs as vfsmod,
57 vfs as vfsmod,
58 )
58 )
59
59
60 from .utils import (
60 from .utils import (
61 dateutil,
61 dateutil,
62 stringutil,
62 stringutil,
63 )
63 )
64
64
65 if pycompat.TYPE_CHECKING:
65 if pycompat.TYPE_CHECKING:
66 from typing import (
66 from typing import (
67 Any,
67 Any,
68 Dict,
68 Dict,
69 )
69 )
70
70
71 for t in (Any, Dict):
71 for t in (Any, Dict):
72 assert t
72 assert t
73
73
74 stringio = util.stringio
74 stringio = util.stringio
75
75
76 # templates of common command options
76 # templates of common command options
77
77
78 dryrunopts = [
78 dryrunopts = [
79 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
79 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
80 ]
80 ]
81
81
82 confirmopts = [
82 confirmopts = [
83 (b'', b'confirm', None, _(b'ask before applying actions')),
83 (b'', b'confirm', None, _(b'ask before applying actions')),
84 ]
84 ]
85
85
86 remoteopts = [
86 remoteopts = [
87 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
87 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
88 (
88 (
89 b'',
89 b'',
90 b'remotecmd',
90 b'remotecmd',
91 b'',
91 b'',
92 _(b'specify hg command to run on the remote side'),
92 _(b'specify hg command to run on the remote side'),
93 _(b'CMD'),
93 _(b'CMD'),
94 ),
94 ),
95 (
95 (
96 b'',
96 b'',
97 b'insecure',
97 b'insecure',
98 None,
98 None,
99 _(b'do not verify server certificate (ignoring web.cacerts config)'),
99 _(b'do not verify server certificate (ignoring web.cacerts config)'),
100 ),
100 ),
101 ]
101 ]
102
102
103 walkopts = [
103 walkopts = [
104 (
104 (
105 b'I',
105 b'I',
106 b'include',
106 b'include',
107 [],
107 [],
108 _(b'include names matching the given patterns'),
108 _(b'include names matching the given patterns'),
109 _(b'PATTERN'),
109 _(b'PATTERN'),
110 ),
110 ),
111 (
111 (
112 b'X',
112 b'X',
113 b'exclude',
113 b'exclude',
114 [],
114 [],
115 _(b'exclude names matching the given patterns'),
115 _(b'exclude names matching the given patterns'),
116 _(b'PATTERN'),
116 _(b'PATTERN'),
117 ),
117 ),
118 ]
118 ]
119
119
120 commitopts = [
120 commitopts = [
121 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
121 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
122 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
122 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
123 ]
123 ]
124
124
125 commitopts2 = [
125 commitopts2 = [
126 (
126 (
127 b'd',
127 b'd',
128 b'date',
128 b'date',
129 b'',
129 b'',
130 _(b'record the specified date as commit date'),
130 _(b'record the specified date as commit date'),
131 _(b'DATE'),
131 _(b'DATE'),
132 ),
132 ),
133 (
133 (
134 b'u',
134 b'u',
135 b'user',
135 b'user',
136 b'',
136 b'',
137 _(b'record the specified user as committer'),
137 _(b'record the specified user as committer'),
138 _(b'USER'),
138 _(b'USER'),
139 ),
139 ),
140 ]
140 ]
141
141
142 commitopts3 = [
142 commitopts3 = [
143 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
143 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
144 (b'U', b'currentuser', None, _(b'record the current user as committer')),
144 (b'U', b'currentuser', None, _(b'record the current user as committer')),
145 ]
145 ]
146
146
147 formatteropts = [
147 formatteropts = [
148 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
148 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
149 ]
149 ]
150
150
151 templateopts = [
151 templateopts = [
152 (
152 (
153 b'',
153 b'',
154 b'style',
154 b'style',
155 b'',
155 b'',
156 _(b'display using template map file (DEPRECATED)'),
156 _(b'display using template map file (DEPRECATED)'),
157 _(b'STYLE'),
157 _(b'STYLE'),
158 ),
158 ),
159 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
159 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
160 ]
160 ]
161
161
162 logopts = [
162 logopts = [
163 (b'p', b'patch', None, _(b'show patch')),
163 (b'p', b'patch', None, _(b'show patch')),
164 (b'g', b'git', None, _(b'use git extended diff format')),
164 (b'g', b'git', None, _(b'use git extended diff format')),
165 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
165 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
166 (b'M', b'no-merges', None, _(b'do not show merges')),
166 (b'M', b'no-merges', None, _(b'do not show merges')),
167 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
167 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
168 (b'G', b'graph', None, _(b"show the revision DAG")),
168 (b'G', b'graph', None, _(b"show the revision DAG")),
169 ] + templateopts
169 ] + templateopts
170
170
171 diffopts = [
171 diffopts = [
172 (b'a', b'text', None, _(b'treat all files as text')),
172 (b'a', b'text', None, _(b'treat all files as text')),
173 (b'g', b'git', None, _(b'use git extended diff format')),
173 (b'g', b'git', None, _(b'use git extended diff format')),
174 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
174 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
175 (b'', b'nodates', None, _(b'omit dates from diff headers')),
175 (b'', b'nodates', None, _(b'omit dates from diff headers')),
176 ]
176 ]
177
177
178 diffwsopts = [
178 diffwsopts = [
179 (
179 (
180 b'w',
180 b'w',
181 b'ignore-all-space',
181 b'ignore-all-space',
182 None,
182 None,
183 _(b'ignore white space when comparing lines'),
183 _(b'ignore white space when comparing lines'),
184 ),
184 ),
185 (
185 (
186 b'b',
186 b'b',
187 b'ignore-space-change',
187 b'ignore-space-change',
188 None,
188 None,
189 _(b'ignore changes in the amount of white space'),
189 _(b'ignore changes in the amount of white space'),
190 ),
190 ),
191 (
191 (
192 b'B',
192 b'B',
193 b'ignore-blank-lines',
193 b'ignore-blank-lines',
194 None,
194 None,
195 _(b'ignore changes whose lines are all blank'),
195 _(b'ignore changes whose lines are all blank'),
196 ),
196 ),
197 (
197 (
198 b'Z',
198 b'Z',
199 b'ignore-space-at-eol',
199 b'ignore-space-at-eol',
200 None,
200 None,
201 _(b'ignore changes in whitespace at EOL'),
201 _(b'ignore changes in whitespace at EOL'),
202 ),
202 ),
203 ]
203 ]
204
204
205 diffopts2 = (
205 diffopts2 = (
206 [
206 [
207 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
207 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
208 (
208 (
209 b'p',
209 b'p',
210 b'show-function',
210 b'show-function',
211 None,
211 None,
212 _(b'show which function each change is in'),
212 _(b'show which function each change is in'),
213 ),
213 ),
214 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
214 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
215 ]
215 ]
216 + diffwsopts
216 + diffwsopts
217 + [
217 + [
218 (
218 (
219 b'U',
219 b'U',
220 b'unified',
220 b'unified',
221 b'',
221 b'',
222 _(b'number of lines of context to show'),
222 _(b'number of lines of context to show'),
223 _(b'NUM'),
223 _(b'NUM'),
224 ),
224 ),
225 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
225 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
226 (
226 (
227 b'',
227 b'',
228 b'root',
228 b'root',
229 b'',
229 b'',
230 _(b'produce diffs relative to subdirectory'),
230 _(b'produce diffs relative to subdirectory'),
231 _(b'DIR'),
231 _(b'DIR'),
232 ),
232 ),
233 ]
233 ]
234 )
234 )
235
235
236 mergetoolopts = [
236 mergetoolopts = [
237 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
237 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
238 ]
238 ]
239
239
240 similarityopts = [
240 similarityopts = [
241 (
241 (
242 b's',
242 b's',
243 b'similarity',
243 b'similarity',
244 b'',
244 b'',
245 _(b'guess renamed files by similarity (0<=s<=100)'),
245 _(b'guess renamed files by similarity (0<=s<=100)'),
246 _(b'SIMILARITY'),
246 _(b'SIMILARITY'),
247 )
247 )
248 ]
248 ]
249
249
250 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
250 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
251
251
252 debugrevlogopts = [
252 debugrevlogopts = [
253 (b'c', b'changelog', False, _(b'open changelog')),
253 (b'c', b'changelog', False, _(b'open changelog')),
254 (b'm', b'manifest', False, _(b'open manifest')),
254 (b'm', b'manifest', False, _(b'open manifest')),
255 (b'', b'dir', b'', _(b'open directory manifest')),
255 (b'', b'dir', b'', _(b'open directory manifest')),
256 ]
256 ]
257
257
258 # special string such that everything below this line will be ingored in the
258 # special string such that everything below this line will be ingored in the
259 # editor text
259 # editor text
260 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
260 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
261
261
262
262
263 def check_at_most_one_arg(opts, *args):
263 def check_at_most_one_arg(opts, *args):
264 """abort if more than one of the arguments are in opts
264 """abort if more than one of the arguments are in opts
265
265
266 Returns the unique argument or None if none of them were specified.
266 Returns the unique argument or None if none of them were specified.
267 """
267 """
268 previous = None
268 previous = None
269 for x in args:
269 for x in args:
270 if opts.get(x):
270 if opts.get(x):
271 x = x.replace(b'_', b'-')
271 if previous:
272 if previous:
272 raise error.Abort(
273 raise error.Abort(
273 _(b'cannot specify both --%s and --%s') % (previous, x)
274 _(b'cannot specify both --%s and --%s') % (previous, x)
274 )
275 )
275 previous = x
276 previous = x
276 return previous
277 return previous
277
278
278
279
279 def check_incompatible_arguments(opts, first, *others):
280 def check_incompatible_arguments(opts, first, *others):
280 """abort if the first argument is given along with any of the others
281 """abort if the first argument is given along with any of the others
281
282
282 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
283 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
283 among themselves.
284 among themselves.
284 """
285 """
285 for other in others:
286 for other in others:
286 check_at_most_one_arg(opts, first, other)
287 check_at_most_one_arg(opts, first, other)
287
288
288
289
289 def resolvecommitoptions(ui, opts):
290 def resolvecommitoptions(ui, opts):
290 """modify commit options dict to handle related options
291 """modify commit options dict to handle related options
291
292
292 The return value indicates that ``rewrite.update-timestamp`` is the reason
293 The return value indicates that ``rewrite.update-timestamp`` is the reason
293 the ``date`` option is set.
294 the ``date`` option is set.
294 """
295 """
295 check_at_most_one_arg(opts, b'date', b'currentdate')
296 check_at_most_one_arg(opts, b'date', b'currentdate')
296 check_at_most_one_arg(opts, b'user', b'currentuser')
297 check_at_most_one_arg(opts, b'user', b'currentuser')
297
298
298 datemaydiffer = False # date-only change should be ignored?
299 datemaydiffer = False # date-only change should be ignored?
299
300
300 if opts.get(b'currentdate'):
301 if opts.get(b'currentdate'):
301 opts[b'date'] = b'%d %d' % dateutil.makedate()
302 opts[b'date'] = b'%d %d' % dateutil.makedate()
302 elif (
303 elif (
303 not opts.get(b'date')
304 not opts.get(b'date')
304 and ui.configbool(b'rewrite', b'update-timestamp')
305 and ui.configbool(b'rewrite', b'update-timestamp')
305 and opts.get(b'currentdate') is None
306 and opts.get(b'currentdate') is None
306 ):
307 ):
307 opts[b'date'] = b'%d %d' % dateutil.makedate()
308 opts[b'date'] = b'%d %d' % dateutil.makedate()
308 datemaydiffer = True
309 datemaydiffer = True
309
310
310 if opts.get(b'currentuser'):
311 if opts.get(b'currentuser'):
311 opts[b'user'] = ui.username()
312 opts[b'user'] = ui.username()
312
313
313 return datemaydiffer
314 return datemaydiffer
314
315
315
316
316 def checknotesize(ui, opts):
317 def checknotesize(ui, opts):
317 """ make sure note is of valid format """
318 """ make sure note is of valid format """
318
319
319 note = opts.get(b'note')
320 note = opts.get(b'note')
320 if not note:
321 if not note:
321 return
322 return
322
323
323 if len(note) > 255:
324 if len(note) > 255:
324 raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
325 raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
325 if b'\n' in note:
326 if b'\n' in note:
326 raise error.Abort(_(b"note cannot contain a newline"))
327 raise error.Abort(_(b"note cannot contain a newline"))
327
328
328
329
329 def ishunk(x):
330 def ishunk(x):
330 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
331 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
331 return isinstance(x, hunkclasses)
332 return isinstance(x, hunkclasses)
332
333
333
334
334 def newandmodified(chunks, originalchunks):
335 def newandmodified(chunks, originalchunks):
335 newlyaddedandmodifiedfiles = set()
336 newlyaddedandmodifiedfiles = set()
336 alsorestore = set()
337 alsorestore = set()
337 for chunk in chunks:
338 for chunk in chunks:
338 if (
339 if (
339 ishunk(chunk)
340 ishunk(chunk)
340 and chunk.header.isnewfile()
341 and chunk.header.isnewfile()
341 and chunk not in originalchunks
342 and chunk not in originalchunks
342 ):
343 ):
343 newlyaddedandmodifiedfiles.add(chunk.header.filename())
344 newlyaddedandmodifiedfiles.add(chunk.header.filename())
344 alsorestore.update(
345 alsorestore.update(
345 set(chunk.header.files()) - {chunk.header.filename()}
346 set(chunk.header.files()) - {chunk.header.filename()}
346 )
347 )
347 return newlyaddedandmodifiedfiles, alsorestore
348 return newlyaddedandmodifiedfiles, alsorestore
348
349
349
350
350 def parsealiases(cmd):
351 def parsealiases(cmd):
351 return cmd.split(b"|")
352 return cmd.split(b"|")
352
353
353
354
354 def setupwrapcolorwrite(ui):
355 def setupwrapcolorwrite(ui):
355 # wrap ui.write so diff output can be labeled/colorized
356 # wrap ui.write so diff output can be labeled/colorized
356 def wrapwrite(orig, *args, **kw):
357 def wrapwrite(orig, *args, **kw):
357 label = kw.pop('label', b'')
358 label = kw.pop('label', b'')
358 for chunk, l in patch.difflabel(lambda: args):
359 for chunk, l in patch.difflabel(lambda: args):
359 orig(chunk, label=label + l)
360 orig(chunk, label=label + l)
360
361
361 oldwrite = ui.write
362 oldwrite = ui.write
362
363
363 def wrap(*args, **kwargs):
364 def wrap(*args, **kwargs):
364 return wrapwrite(oldwrite, *args, **kwargs)
365 return wrapwrite(oldwrite, *args, **kwargs)
365
366
366 setattr(ui, 'write', wrap)
367 setattr(ui, 'write', wrap)
367 return oldwrite
368 return oldwrite
368
369
369
370
370 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
371 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
371 try:
372 try:
372 if usecurses:
373 if usecurses:
373 if testfile:
374 if testfile:
374 recordfn = crecordmod.testdecorator(
375 recordfn = crecordmod.testdecorator(
375 testfile, crecordmod.testchunkselector
376 testfile, crecordmod.testchunkselector
376 )
377 )
377 else:
378 else:
378 recordfn = crecordmod.chunkselector
379 recordfn = crecordmod.chunkselector
379
380
380 return crecordmod.filterpatch(
381 return crecordmod.filterpatch(
381 ui, originalhunks, recordfn, operation
382 ui, originalhunks, recordfn, operation
382 )
383 )
383 except crecordmod.fallbackerror as e:
384 except crecordmod.fallbackerror as e:
384 ui.warn(b'%s\n' % e.message) # pytype: disable=attribute-error
385 ui.warn(b'%s\n' % e.message) # pytype: disable=attribute-error
385 ui.warn(_(b'falling back to text mode\n'))
386 ui.warn(_(b'falling back to text mode\n'))
386
387
387 return patch.filterpatch(ui, originalhunks, match, operation)
388 return patch.filterpatch(ui, originalhunks, match, operation)
388
389
389
390
390 def recordfilter(ui, originalhunks, match, operation=None):
391 def recordfilter(ui, originalhunks, match, operation=None):
391 """ Prompts the user to filter the originalhunks and return a list of
392 """ Prompts the user to filter the originalhunks and return a list of
392 selected hunks.
393 selected hunks.
393 *operation* is used for to build ui messages to indicate the user what
394 *operation* is used for to build ui messages to indicate the user what
394 kind of filtering they are doing: reverting, committing, shelving, etc.
395 kind of filtering they are doing: reverting, committing, shelving, etc.
395 (see patch.filterpatch).
396 (see patch.filterpatch).
396 """
397 """
397 usecurses = crecordmod.checkcurses(ui)
398 usecurses = crecordmod.checkcurses(ui)
398 testfile = ui.config(b'experimental', b'crecordtest')
399 testfile = ui.config(b'experimental', b'crecordtest')
399 oldwrite = setupwrapcolorwrite(ui)
400 oldwrite = setupwrapcolorwrite(ui)
400 try:
401 try:
401 newchunks, newopts = filterchunks(
402 newchunks, newopts = filterchunks(
402 ui, originalhunks, usecurses, testfile, match, operation
403 ui, originalhunks, usecurses, testfile, match, operation
403 )
404 )
404 finally:
405 finally:
405 ui.write = oldwrite
406 ui.write = oldwrite
406 return newchunks, newopts
407 return newchunks, newopts
407
408
408
409
409 def dorecord(
410 def dorecord(
410 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
411 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
411 ):
412 ):
412 opts = pycompat.byteskwargs(opts)
413 opts = pycompat.byteskwargs(opts)
413 if not ui.interactive():
414 if not ui.interactive():
414 if cmdsuggest:
415 if cmdsuggest:
415 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
416 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
416 else:
417 else:
417 msg = _(b'running non-interactively')
418 msg = _(b'running non-interactively')
418 raise error.Abort(msg)
419 raise error.Abort(msg)
419
420
420 # make sure username is set before going interactive
421 # make sure username is set before going interactive
421 if not opts.get(b'user'):
422 if not opts.get(b'user'):
422 ui.username() # raise exception, username not provided
423 ui.username() # raise exception, username not provided
423
424
424 def recordfunc(ui, repo, message, match, opts):
425 def recordfunc(ui, repo, message, match, opts):
425 """This is generic record driver.
426 """This is generic record driver.
426
427
427 Its job is to interactively filter local changes, and
428 Its job is to interactively filter local changes, and
428 accordingly prepare working directory into a state in which the
429 accordingly prepare working directory into a state in which the
429 job can be delegated to a non-interactive commit command such as
430 job can be delegated to a non-interactive commit command such as
430 'commit' or 'qrefresh'.
431 'commit' or 'qrefresh'.
431
432
432 After the actual job is done by non-interactive command, the
433 After the actual job is done by non-interactive command, the
433 working directory is restored to its original state.
434 working directory is restored to its original state.
434
435
435 In the end we'll record interesting changes, and everything else
436 In the end we'll record interesting changes, and everything else
436 will be left in place, so the user can continue working.
437 will be left in place, so the user can continue working.
437 """
438 """
438 if not opts.get(b'interactive-unshelve'):
439 if not opts.get(b'interactive-unshelve'):
439 checkunfinished(repo, commit=True)
440 checkunfinished(repo, commit=True)
440 wctx = repo[None]
441 wctx = repo[None]
441 merge = len(wctx.parents()) > 1
442 merge = len(wctx.parents()) > 1
442 if merge:
443 if merge:
443 raise error.Abort(
444 raise error.Abort(
444 _(
445 _(
445 b'cannot partially commit a merge '
446 b'cannot partially commit a merge '
446 b'(use "hg commit" instead)'
447 b'(use "hg commit" instead)'
447 )
448 )
448 )
449 )
449
450
450 def fail(f, msg):
451 def fail(f, msg):
451 raise error.Abort(b'%s: %s' % (f, msg))
452 raise error.Abort(b'%s: %s' % (f, msg))
452
453
453 force = opts.get(b'force')
454 force = opts.get(b'force')
454 if not force:
455 if not force:
455 match = matchmod.badmatch(match, fail)
456 match = matchmod.badmatch(match, fail)
456
457
457 status = repo.status(match=match)
458 status = repo.status(match=match)
458
459
459 overrides = {(b'ui', b'commitsubrepos'): True}
460 overrides = {(b'ui', b'commitsubrepos'): True}
460
461
461 with repo.ui.configoverride(overrides, b'record'):
462 with repo.ui.configoverride(overrides, b'record'):
462 # subrepoutil.precommit() modifies the status
463 # subrepoutil.precommit() modifies the status
463 tmpstatus = scmutil.status(
464 tmpstatus = scmutil.status(
464 copymod.copy(status.modified),
465 copymod.copy(status.modified),
465 copymod.copy(status.added),
466 copymod.copy(status.added),
466 copymod.copy(status.removed),
467 copymod.copy(status.removed),
467 copymod.copy(status.deleted),
468 copymod.copy(status.deleted),
468 copymod.copy(status.unknown),
469 copymod.copy(status.unknown),
469 copymod.copy(status.ignored),
470 copymod.copy(status.ignored),
470 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
471 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
471 )
472 )
472
473
473 # Force allows -X subrepo to skip the subrepo.
474 # Force allows -X subrepo to skip the subrepo.
474 subs, commitsubs, newstate = subrepoutil.precommit(
475 subs, commitsubs, newstate = subrepoutil.precommit(
475 repo.ui, wctx, tmpstatus, match, force=True
476 repo.ui, wctx, tmpstatus, match, force=True
476 )
477 )
477 for s in subs:
478 for s in subs:
478 if s in commitsubs:
479 if s in commitsubs:
479 dirtyreason = wctx.sub(s).dirtyreason(True)
480 dirtyreason = wctx.sub(s).dirtyreason(True)
480 raise error.Abort(dirtyreason)
481 raise error.Abort(dirtyreason)
481
482
482 if not force:
483 if not force:
483 repo.checkcommitpatterns(wctx, match, status, fail)
484 repo.checkcommitpatterns(wctx, match, status, fail)
484 diffopts = patch.difffeatureopts(
485 diffopts = patch.difffeatureopts(
485 ui,
486 ui,
486 opts=opts,
487 opts=opts,
487 whitespace=True,
488 whitespace=True,
488 section=b'commands',
489 section=b'commands',
489 configprefix=b'commit.interactive.',
490 configprefix=b'commit.interactive.',
490 )
491 )
491 diffopts.nodates = True
492 diffopts.nodates = True
492 diffopts.git = True
493 diffopts.git = True
493 diffopts.showfunc = True
494 diffopts.showfunc = True
494 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
495 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
495 originalchunks = patch.parsepatch(originaldiff)
496 originalchunks = patch.parsepatch(originaldiff)
496 match = scmutil.match(repo[None], pats)
497 match = scmutil.match(repo[None], pats)
497
498
498 # 1. filter patch, since we are intending to apply subset of it
499 # 1. filter patch, since we are intending to apply subset of it
499 try:
500 try:
500 chunks, newopts = filterfn(ui, originalchunks, match)
501 chunks, newopts = filterfn(ui, originalchunks, match)
501 except error.PatchError as err:
502 except error.PatchError as err:
502 raise error.Abort(_(b'error parsing patch: %s') % err)
503 raise error.Abort(_(b'error parsing patch: %s') % err)
503 opts.update(newopts)
504 opts.update(newopts)
504
505
505 # We need to keep a backup of files that have been newly added and
506 # We need to keep a backup of files that have been newly added and
506 # modified during the recording process because there is a previous
507 # modified during the recording process because there is a previous
507 # version without the edit in the workdir. We also will need to restore
508 # version without the edit in the workdir. We also will need to restore
508 # files that were the sources of renames so that the patch application
509 # files that were the sources of renames so that the patch application
509 # works.
510 # works.
510 newlyaddedandmodifiedfiles, alsorestore = newandmodified(
511 newlyaddedandmodifiedfiles, alsorestore = newandmodified(
511 chunks, originalchunks
512 chunks, originalchunks
512 )
513 )
513 contenders = set()
514 contenders = set()
514 for h in chunks:
515 for h in chunks:
515 try:
516 try:
516 contenders.update(set(h.files()))
517 contenders.update(set(h.files()))
517 except AttributeError:
518 except AttributeError:
518 pass
519 pass
519
520
520 changed = status.modified + status.added + status.removed
521 changed = status.modified + status.added + status.removed
521 newfiles = [f for f in changed if f in contenders]
522 newfiles = [f for f in changed if f in contenders]
522 if not newfiles:
523 if not newfiles:
523 ui.status(_(b'no changes to record\n'))
524 ui.status(_(b'no changes to record\n'))
524 return 0
525 return 0
525
526
526 modified = set(status.modified)
527 modified = set(status.modified)
527
528
528 # 2. backup changed files, so we can restore them in the end
529 # 2. backup changed files, so we can restore them in the end
529
530
530 if backupall:
531 if backupall:
531 tobackup = changed
532 tobackup = changed
532 else:
533 else:
533 tobackup = [
534 tobackup = [
534 f
535 f
535 for f in newfiles
536 for f in newfiles
536 if f in modified or f in newlyaddedandmodifiedfiles
537 if f in modified or f in newlyaddedandmodifiedfiles
537 ]
538 ]
538 backups = {}
539 backups = {}
539 if tobackup:
540 if tobackup:
540 backupdir = repo.vfs.join(b'record-backups')
541 backupdir = repo.vfs.join(b'record-backups')
541 try:
542 try:
542 os.mkdir(backupdir)
543 os.mkdir(backupdir)
543 except OSError as err:
544 except OSError as err:
544 if err.errno != errno.EEXIST:
545 if err.errno != errno.EEXIST:
545 raise
546 raise
546 try:
547 try:
547 # backup continues
548 # backup continues
548 for f in tobackup:
549 for f in tobackup:
549 fd, tmpname = pycompat.mkstemp(
550 fd, tmpname = pycompat.mkstemp(
550 prefix=f.replace(b'/', b'_') + b'.', dir=backupdir
551 prefix=f.replace(b'/', b'_') + b'.', dir=backupdir
551 )
552 )
552 os.close(fd)
553 os.close(fd)
553 ui.debug(b'backup %r as %r\n' % (f, tmpname))
554 ui.debug(b'backup %r as %r\n' % (f, tmpname))
554 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
555 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
555 backups[f] = tmpname
556 backups[f] = tmpname
556
557
557 fp = stringio()
558 fp = stringio()
558 for c in chunks:
559 for c in chunks:
559 fname = c.filename()
560 fname = c.filename()
560 if fname in backups:
561 if fname in backups:
561 c.write(fp)
562 c.write(fp)
562 dopatch = fp.tell()
563 dopatch = fp.tell()
563 fp.seek(0)
564 fp.seek(0)
564
565
565 # 2.5 optionally review / modify patch in text editor
566 # 2.5 optionally review / modify patch in text editor
566 if opts.get(b'review', False):
567 if opts.get(b'review', False):
567 patchtext = (
568 patchtext = (
568 crecordmod.diffhelptext
569 crecordmod.diffhelptext
569 + crecordmod.patchhelptext
570 + crecordmod.patchhelptext
570 + fp.read()
571 + fp.read()
571 )
572 )
572 reviewedpatch = ui.edit(
573 reviewedpatch = ui.edit(
573 patchtext, b"", action=b"diff", repopath=repo.path
574 patchtext, b"", action=b"diff", repopath=repo.path
574 )
575 )
575 fp.truncate(0)
576 fp.truncate(0)
576 fp.write(reviewedpatch)
577 fp.write(reviewedpatch)
577 fp.seek(0)
578 fp.seek(0)
578
579
579 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
580 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
580 # 3a. apply filtered patch to clean repo (clean)
581 # 3a. apply filtered patch to clean repo (clean)
581 if backups:
582 if backups:
582 # Equivalent to hg.revert
583 # Equivalent to hg.revert
583 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
584 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
584 mergemod.update(
585 mergemod.update(
585 repo,
586 repo,
586 repo.dirstate.p1(),
587 repo.dirstate.p1(),
587 branchmerge=False,
588 branchmerge=False,
588 force=True,
589 force=True,
589 matcher=m,
590 matcher=m,
590 )
591 )
591
592
592 # 3b. (apply)
593 # 3b. (apply)
593 if dopatch:
594 if dopatch:
594 try:
595 try:
595 ui.debug(b'applying patch\n')
596 ui.debug(b'applying patch\n')
596 ui.debug(fp.getvalue())
597 ui.debug(fp.getvalue())
597 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
598 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
598 except error.PatchError as err:
599 except error.PatchError as err:
599 raise error.Abort(pycompat.bytestr(err))
600 raise error.Abort(pycompat.bytestr(err))
600 del fp
601 del fp
601
602
602 # 4. We prepared working directory according to filtered
603 # 4. We prepared working directory according to filtered
603 # patch. Now is the time to delegate the job to
604 # patch. Now is the time to delegate the job to
604 # commit/qrefresh or the like!
605 # commit/qrefresh or the like!
605
606
606 # Make all of the pathnames absolute.
607 # Make all of the pathnames absolute.
607 newfiles = [repo.wjoin(nf) for nf in newfiles]
608 newfiles = [repo.wjoin(nf) for nf in newfiles]
608 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
609 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
609 finally:
610 finally:
610 # 5. finally restore backed-up files
611 # 5. finally restore backed-up files
611 try:
612 try:
612 dirstate = repo.dirstate
613 dirstate = repo.dirstate
613 for realname, tmpname in pycompat.iteritems(backups):
614 for realname, tmpname in pycompat.iteritems(backups):
614 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
615 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
615
616
616 if dirstate[realname] == b'n':
617 if dirstate[realname] == b'n':
617 # without normallookup, restoring timestamp
618 # without normallookup, restoring timestamp
618 # may cause partially committed files
619 # may cause partially committed files
619 # to be treated as unmodified
620 # to be treated as unmodified
620 dirstate.normallookup(realname)
621 dirstate.normallookup(realname)
621
622
622 # copystat=True here and above are a hack to trick any
623 # copystat=True here and above are a hack to trick any
623 # editors that have f open that we haven't modified them.
624 # editors that have f open that we haven't modified them.
624 #
625 #
625 # Also note that this racy as an editor could notice the
626 # Also note that this racy as an editor could notice the
626 # file's mtime before we've finished writing it.
627 # file's mtime before we've finished writing it.
627 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
628 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
628 os.unlink(tmpname)
629 os.unlink(tmpname)
629 if tobackup:
630 if tobackup:
630 os.rmdir(backupdir)
631 os.rmdir(backupdir)
631 except OSError:
632 except OSError:
632 pass
633 pass
633
634
634 def recordinwlock(ui, repo, message, match, opts):
635 def recordinwlock(ui, repo, message, match, opts):
635 with repo.wlock():
636 with repo.wlock():
636 return recordfunc(ui, repo, message, match, opts)
637 return recordfunc(ui, repo, message, match, opts)
637
638
638 return commit(ui, repo, recordinwlock, pats, opts)
639 return commit(ui, repo, recordinwlock, pats, opts)
639
640
640
641
641 class dirnode(object):
642 class dirnode(object):
642 """
643 """
643 Represent a directory in user working copy with information required for
644 Represent a directory in user working copy with information required for
644 the purpose of tersing its status.
645 the purpose of tersing its status.
645
646
646 path is the path to the directory, without a trailing '/'
647 path is the path to the directory, without a trailing '/'
647
648
648 statuses is a set of statuses of all files in this directory (this includes
649 statuses is a set of statuses of all files in this directory (this includes
649 all the files in all the subdirectories too)
650 all the files in all the subdirectories too)
650
651
651 files is a list of files which are direct child of this directory
652 files is a list of files which are direct child of this directory
652
653
653 subdirs is a dictionary of sub-directory name as the key and it's own
654 subdirs is a dictionary of sub-directory name as the key and it's own
654 dirnode object as the value
655 dirnode object as the value
655 """
656 """
656
657
657 def __init__(self, dirpath):
658 def __init__(self, dirpath):
658 self.path = dirpath
659 self.path = dirpath
659 self.statuses = set()
660 self.statuses = set()
660 self.files = []
661 self.files = []
661 self.subdirs = {}
662 self.subdirs = {}
662
663
663 def _addfileindir(self, filename, status):
664 def _addfileindir(self, filename, status):
664 """Add a file in this directory as a direct child."""
665 """Add a file in this directory as a direct child."""
665 self.files.append((filename, status))
666 self.files.append((filename, status))
666
667
667 def addfile(self, filename, status):
668 def addfile(self, filename, status):
668 """
669 """
669 Add a file to this directory or to its direct parent directory.
670 Add a file to this directory or to its direct parent directory.
670
671
671 If the file is not direct child of this directory, we traverse to the
672 If the file is not direct child of this directory, we traverse to the
672 directory of which this file is a direct child of and add the file
673 directory of which this file is a direct child of and add the file
673 there.
674 there.
674 """
675 """
675
676
676 # the filename contains a path separator, it means it's not the direct
677 # the filename contains a path separator, it means it's not the direct
677 # child of this directory
678 # child of this directory
678 if b'/' in filename:
679 if b'/' in filename:
679 subdir, filep = filename.split(b'/', 1)
680 subdir, filep = filename.split(b'/', 1)
680
681
681 # does the dirnode object for subdir exists
682 # does the dirnode object for subdir exists
682 if subdir not in self.subdirs:
683 if subdir not in self.subdirs:
683 subdirpath = pathutil.join(self.path, subdir)
684 subdirpath = pathutil.join(self.path, subdir)
684 self.subdirs[subdir] = dirnode(subdirpath)
685 self.subdirs[subdir] = dirnode(subdirpath)
685
686
686 # try adding the file in subdir
687 # try adding the file in subdir
687 self.subdirs[subdir].addfile(filep, status)
688 self.subdirs[subdir].addfile(filep, status)
688
689
689 else:
690 else:
690 self._addfileindir(filename, status)
691 self._addfileindir(filename, status)
691
692
692 if status not in self.statuses:
693 if status not in self.statuses:
693 self.statuses.add(status)
694 self.statuses.add(status)
694
695
695 def iterfilepaths(self):
696 def iterfilepaths(self):
696 """Yield (status, path) for files directly under this directory."""
697 """Yield (status, path) for files directly under this directory."""
697 for f, st in self.files:
698 for f, st in self.files:
698 yield st, pathutil.join(self.path, f)
699 yield st, pathutil.join(self.path, f)
699
700
700 def tersewalk(self, terseargs):
701 def tersewalk(self, terseargs):
701 """
702 """
702 Yield (status, path) obtained by processing the status of this
703 Yield (status, path) obtained by processing the status of this
703 dirnode.
704 dirnode.
704
705
705 terseargs is the string of arguments passed by the user with `--terse`
706 terseargs is the string of arguments passed by the user with `--terse`
706 flag.
707 flag.
707
708
708 Following are the cases which can happen:
709 Following are the cases which can happen:
709
710
710 1) All the files in the directory (including all the files in its
711 1) All the files in the directory (including all the files in its
711 subdirectories) share the same status and the user has asked us to terse
712 subdirectories) share the same status and the user has asked us to terse
712 that status. -> yield (status, dirpath). dirpath will end in '/'.
713 that status. -> yield (status, dirpath). dirpath will end in '/'.
713
714
714 2) Otherwise, we do following:
715 2) Otherwise, we do following:
715
716
716 a) Yield (status, filepath) for all the files which are in this
717 a) Yield (status, filepath) for all the files which are in this
717 directory (only the ones in this directory, not the subdirs)
718 directory (only the ones in this directory, not the subdirs)
718
719
719 b) Recurse the function on all the subdirectories of this
720 b) Recurse the function on all the subdirectories of this
720 directory
721 directory
721 """
722 """
722
723
723 if len(self.statuses) == 1:
724 if len(self.statuses) == 1:
724 onlyst = self.statuses.pop()
725 onlyst = self.statuses.pop()
725
726
726 # Making sure we terse only when the status abbreviation is
727 # Making sure we terse only when the status abbreviation is
727 # passed as terse argument
728 # passed as terse argument
728 if onlyst in terseargs:
729 if onlyst in terseargs:
729 yield onlyst, self.path + b'/'
730 yield onlyst, self.path + b'/'
730 return
731 return
731
732
732 # add the files to status list
733 # add the files to status list
733 for st, fpath in self.iterfilepaths():
734 for st, fpath in self.iterfilepaths():
734 yield st, fpath
735 yield st, fpath
735
736
736 # recurse on the subdirs
737 # recurse on the subdirs
737 for dirobj in self.subdirs.values():
738 for dirobj in self.subdirs.values():
738 for st, fpath in dirobj.tersewalk(terseargs):
739 for st, fpath in dirobj.tersewalk(terseargs):
739 yield st, fpath
740 yield st, fpath
740
741
741
742
742 def tersedir(statuslist, terseargs):
743 def tersedir(statuslist, terseargs):
743 """
744 """
744 Terse the status if all the files in a directory shares the same status.
745 Terse the status if all the files in a directory shares the same status.
745
746
746 statuslist is scmutil.status() object which contains a list of files for
747 statuslist is scmutil.status() object which contains a list of files for
747 each status.
748 each status.
748 terseargs is string which is passed by the user as the argument to `--terse`
749 terseargs is string which is passed by the user as the argument to `--terse`
749 flag.
750 flag.
750
751
751 The function makes a tree of objects of dirnode class, and at each node it
752 The function makes a tree of objects of dirnode class, and at each node it
752 stores the information required to know whether we can terse a certain
753 stores the information required to know whether we can terse a certain
753 directory or not.
754 directory or not.
754 """
755 """
755 # the order matters here as that is used to produce final list
756 # the order matters here as that is used to produce final list
756 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
757 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
757
758
758 # checking the argument validity
759 # checking the argument validity
759 for s in pycompat.bytestr(terseargs):
760 for s in pycompat.bytestr(terseargs):
760 if s not in allst:
761 if s not in allst:
761 raise error.Abort(_(b"'%s' not recognized") % s)
762 raise error.Abort(_(b"'%s' not recognized") % s)
762
763
763 # creating a dirnode object for the root of the repo
764 # creating a dirnode object for the root of the repo
764 rootobj = dirnode(b'')
765 rootobj = dirnode(b'')
765 pstatus = (
766 pstatus = (
766 b'modified',
767 b'modified',
767 b'added',
768 b'added',
768 b'deleted',
769 b'deleted',
769 b'clean',
770 b'clean',
770 b'unknown',
771 b'unknown',
771 b'ignored',
772 b'ignored',
772 b'removed',
773 b'removed',
773 )
774 )
774
775
775 tersedict = {}
776 tersedict = {}
776 for attrname in pstatus:
777 for attrname in pstatus:
777 statuschar = attrname[0:1]
778 statuschar = attrname[0:1]
778 for f in getattr(statuslist, attrname):
779 for f in getattr(statuslist, attrname):
779 rootobj.addfile(f, statuschar)
780 rootobj.addfile(f, statuschar)
780 tersedict[statuschar] = []
781 tersedict[statuschar] = []
781
782
782 # we won't be tersing the root dir, so add files in it
783 # we won't be tersing the root dir, so add files in it
783 for st, fpath in rootobj.iterfilepaths():
784 for st, fpath in rootobj.iterfilepaths():
784 tersedict[st].append(fpath)
785 tersedict[st].append(fpath)
785
786
786 # process each sub-directory and build tersedict
787 # process each sub-directory and build tersedict
787 for subdir in rootobj.subdirs.values():
788 for subdir in rootobj.subdirs.values():
788 for st, f in subdir.tersewalk(terseargs):
789 for st, f in subdir.tersewalk(terseargs):
789 tersedict[st].append(f)
790 tersedict[st].append(f)
790
791
791 tersedlist = []
792 tersedlist = []
792 for st in allst:
793 for st in allst:
793 tersedict[st].sort()
794 tersedict[st].sort()
794 tersedlist.append(tersedict[st])
795 tersedlist.append(tersedict[st])
795
796
796 return scmutil.status(*tersedlist)
797 return scmutil.status(*tersedlist)
797
798
798
799
799 def _commentlines(raw):
800 def _commentlines(raw):
800 '''Surround lineswith a comment char and a new line'''
801 '''Surround lineswith a comment char and a new line'''
801 lines = raw.splitlines()
802 lines = raw.splitlines()
802 commentedlines = [b'# %s' % line for line in lines]
803 commentedlines = [b'# %s' % line for line in lines]
803 return b'\n'.join(commentedlines) + b'\n'
804 return b'\n'.join(commentedlines) + b'\n'
804
805
805
806
806 @attr.s(frozen=True)
807 @attr.s(frozen=True)
807 class morestatus(object):
808 class morestatus(object):
808 reporoot = attr.ib()
809 reporoot = attr.ib()
809 unfinishedop = attr.ib()
810 unfinishedop = attr.ib()
810 unfinishedmsg = attr.ib()
811 unfinishedmsg = attr.ib()
811 inmergestate = attr.ib()
812 inmergestate = attr.ib()
812 unresolvedpaths = attr.ib()
813 unresolvedpaths = attr.ib()
813 _label = b'status.morestatus'
814 _label = b'status.morestatus'
814
815
815 def formatfile(self, path, fm):
816 def formatfile(self, path, fm):
816 if self.inmergestate and path in self.unresolvedpaths:
817 if self.inmergestate and path in self.unresolvedpaths:
817 fm.data(unresolved=True)
818 fm.data(unresolved=True)
818
819
819 def formatfooter(self, fm):
820 def formatfooter(self, fm):
820 fm.startitem()
821 fm.startitem()
821 fm.data(
822 fm.data(
822 itemtype=b'morestatus',
823 itemtype=b'morestatus',
823 unfinished=self.unfinishedop,
824 unfinished=self.unfinishedop,
824 unfinishedmsg=self.unfinishedmsg,
825 unfinishedmsg=self.unfinishedmsg,
825 )
826 )
826
827
827 statemsg = (
828 statemsg = (
828 _(b'The repository is in an unfinished *%s* state.')
829 _(b'The repository is in an unfinished *%s* state.')
829 % self.unfinishedop
830 % self.unfinishedop
830 )
831 )
831 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
832 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
832
833
833 self._formatconflicts(fm)
834 self._formatconflicts(fm)
834 if self.unfinishedmsg:
835 if self.unfinishedmsg:
835 fm.plain(
836 fm.plain(
836 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
837 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
837 )
838 )
838
839
839 def _formatconflicts(self, fm):
840 def _formatconflicts(self, fm):
840 if not self.inmergestate:
841 if not self.inmergestate:
841 return
842 return
842
843
843 if self.unresolvedpaths:
844 if self.unresolvedpaths:
844 mergeliststr = b'\n'.join(
845 mergeliststr = b'\n'.join(
845 [
846 [
846 b' %s'
847 b' %s'
847 % util.pathto(self.reporoot, encoding.getcwd(), path)
848 % util.pathto(self.reporoot, encoding.getcwd(), path)
848 for path in self.unresolvedpaths
849 for path in self.unresolvedpaths
849 ]
850 ]
850 )
851 )
851 msg = (
852 msg = (
852 _(
853 _(
853 '''Unresolved merge conflicts:
854 '''Unresolved merge conflicts:
854
855
855 %s
856 %s
856
857
857 To mark files as resolved: hg resolve --mark FILE'''
858 To mark files as resolved: hg resolve --mark FILE'''
858 )
859 )
859 % mergeliststr
860 % mergeliststr
860 )
861 )
861 else:
862 else:
862 msg = _(b'No unresolved merge conflicts.')
863 msg = _(b'No unresolved merge conflicts.')
863
864
864 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
865 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
865
866
866
867
867 def readmorestatus(repo):
868 def readmorestatus(repo):
868 """Returns a morestatus object if the repo has unfinished state."""
869 """Returns a morestatus object if the repo has unfinished state."""
869 statetuple = statemod.getrepostate(repo)
870 statetuple = statemod.getrepostate(repo)
870 if not statetuple:
871 if not statetuple:
871 return None
872 return None
872
873
873 unfinishedop, unfinishedmsg = statetuple
874 unfinishedop, unfinishedmsg = statetuple
874 mergestate = mergemod.mergestate.read(repo)
875 mergestate = mergemod.mergestate.read(repo)
875 unresolved = None
876 unresolved = None
876 if mergestate.active():
877 if mergestate.active():
877 unresolved = sorted(mergestate.unresolved())
878 unresolved = sorted(mergestate.unresolved())
878 return morestatus(
879 return morestatus(
879 repo.root,
880 repo.root,
880 unfinishedop,
881 unfinishedop,
881 unfinishedmsg,
882 unfinishedmsg,
882 unresolved is not None,
883 unresolved is not None,
883 unresolved,
884 unresolved,
884 )
885 )
885
886
886
887
887 def findpossible(cmd, table, strict=False):
888 def findpossible(cmd, table, strict=False):
888 """
889 """
889 Return cmd -> (aliases, command table entry)
890 Return cmd -> (aliases, command table entry)
890 for each matching command.
891 for each matching command.
891 Return debug commands (or their aliases) only if no normal command matches.
892 Return debug commands (or their aliases) only if no normal command matches.
892 """
893 """
893 choice = {}
894 choice = {}
894 debugchoice = {}
895 debugchoice = {}
895
896
896 if cmd in table:
897 if cmd in table:
897 # short-circuit exact matches, "log" alias beats "log|history"
898 # short-circuit exact matches, "log" alias beats "log|history"
898 keys = [cmd]
899 keys = [cmd]
899 else:
900 else:
900 keys = table.keys()
901 keys = table.keys()
901
902
902 allcmds = []
903 allcmds = []
903 for e in keys:
904 for e in keys:
904 aliases = parsealiases(e)
905 aliases = parsealiases(e)
905 allcmds.extend(aliases)
906 allcmds.extend(aliases)
906 found = None
907 found = None
907 if cmd in aliases:
908 if cmd in aliases:
908 found = cmd
909 found = cmd
909 elif not strict:
910 elif not strict:
910 for a in aliases:
911 for a in aliases:
911 if a.startswith(cmd):
912 if a.startswith(cmd):
912 found = a
913 found = a
913 break
914 break
914 if found is not None:
915 if found is not None:
915 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
916 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
916 debugchoice[found] = (aliases, table[e])
917 debugchoice[found] = (aliases, table[e])
917 else:
918 else:
918 choice[found] = (aliases, table[e])
919 choice[found] = (aliases, table[e])
919
920
920 if not choice and debugchoice:
921 if not choice and debugchoice:
921 choice = debugchoice
922 choice = debugchoice
922
923
923 return choice, allcmds
924 return choice, allcmds
924
925
925
926
926 def findcmd(cmd, table, strict=True):
927 def findcmd(cmd, table, strict=True):
927 """Return (aliases, command table entry) for command string."""
928 """Return (aliases, command table entry) for command string."""
928 choice, allcmds = findpossible(cmd, table, strict)
929 choice, allcmds = findpossible(cmd, table, strict)
929
930
930 if cmd in choice:
931 if cmd in choice:
931 return choice[cmd]
932 return choice[cmd]
932
933
933 if len(choice) > 1:
934 if len(choice) > 1:
934 clist = sorted(choice)
935 clist = sorted(choice)
935 raise error.AmbiguousCommand(cmd, clist)
936 raise error.AmbiguousCommand(cmd, clist)
936
937
937 if choice:
938 if choice:
938 return list(choice.values())[0]
939 return list(choice.values())[0]
939
940
940 raise error.UnknownCommand(cmd, allcmds)
941 raise error.UnknownCommand(cmd, allcmds)
941
942
942
943
943 def changebranch(ui, repo, revs, label):
944 def changebranch(ui, repo, revs, label):
944 """ Change the branch name of given revs to label """
945 """ Change the branch name of given revs to label """
945
946
946 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
947 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
947 # abort in case of uncommitted merge or dirty wdir
948 # abort in case of uncommitted merge or dirty wdir
948 bailifchanged(repo)
949 bailifchanged(repo)
949 revs = scmutil.revrange(repo, revs)
950 revs = scmutil.revrange(repo, revs)
950 if not revs:
951 if not revs:
951 raise error.Abort(b"empty revision set")
952 raise error.Abort(b"empty revision set")
952 roots = repo.revs(b'roots(%ld)', revs)
953 roots = repo.revs(b'roots(%ld)', revs)
953 if len(roots) > 1:
954 if len(roots) > 1:
954 raise error.Abort(
955 raise error.Abort(
955 _(b"cannot change branch of non-linear revisions")
956 _(b"cannot change branch of non-linear revisions")
956 )
957 )
957 rewriteutil.precheck(repo, revs, b'change branch of')
958 rewriteutil.precheck(repo, revs, b'change branch of')
958
959
959 root = repo[roots.first()]
960 root = repo[roots.first()]
960 rpb = {parent.branch() for parent in root.parents()}
961 rpb = {parent.branch() for parent in root.parents()}
961 if label not in rpb and label in repo.branchmap():
962 if label not in rpb and label in repo.branchmap():
962 raise error.Abort(_(b"a branch of the same name already exists"))
963 raise error.Abort(_(b"a branch of the same name already exists"))
963
964
964 if repo.revs(b'obsolete() and %ld', revs):
965 if repo.revs(b'obsolete() and %ld', revs):
965 raise error.Abort(
966 raise error.Abort(
966 _(b"cannot change branch of a obsolete changeset")
967 _(b"cannot change branch of a obsolete changeset")
967 )
968 )
968
969
969 # make sure only topological heads
970 # make sure only topological heads
970 if repo.revs(b'heads(%ld) - head()', revs):
971 if repo.revs(b'heads(%ld) - head()', revs):
971 raise error.Abort(_(b"cannot change branch in middle of a stack"))
972 raise error.Abort(_(b"cannot change branch in middle of a stack"))
972
973
973 replacements = {}
974 replacements = {}
974 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
975 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
975 # mercurial.subrepo -> mercurial.cmdutil
976 # mercurial.subrepo -> mercurial.cmdutil
976 from . import context
977 from . import context
977
978
978 for rev in revs:
979 for rev in revs:
979 ctx = repo[rev]
980 ctx = repo[rev]
980 oldbranch = ctx.branch()
981 oldbranch = ctx.branch()
981 # check if ctx has same branch
982 # check if ctx has same branch
982 if oldbranch == label:
983 if oldbranch == label:
983 continue
984 continue
984
985
985 def filectxfn(repo, newctx, path):
986 def filectxfn(repo, newctx, path):
986 try:
987 try:
987 return ctx[path]
988 return ctx[path]
988 except error.ManifestLookupError:
989 except error.ManifestLookupError:
989 return None
990 return None
990
991
991 ui.debug(
992 ui.debug(
992 b"changing branch of '%s' from '%s' to '%s'\n"
993 b"changing branch of '%s' from '%s' to '%s'\n"
993 % (hex(ctx.node()), oldbranch, label)
994 % (hex(ctx.node()), oldbranch, label)
994 )
995 )
995 extra = ctx.extra()
996 extra = ctx.extra()
996 extra[b'branch_change'] = hex(ctx.node())
997 extra[b'branch_change'] = hex(ctx.node())
997 # While changing branch of set of linear commits, make sure that
998 # While changing branch of set of linear commits, make sure that
998 # we base our commits on new parent rather than old parent which
999 # we base our commits on new parent rather than old parent which
999 # was obsoleted while changing the branch
1000 # was obsoleted while changing the branch
1000 p1 = ctx.p1().node()
1001 p1 = ctx.p1().node()
1001 p2 = ctx.p2().node()
1002 p2 = ctx.p2().node()
1002 if p1 in replacements:
1003 if p1 in replacements:
1003 p1 = replacements[p1][0]
1004 p1 = replacements[p1][0]
1004 if p2 in replacements:
1005 if p2 in replacements:
1005 p2 = replacements[p2][0]
1006 p2 = replacements[p2][0]
1006
1007
1007 mc = context.memctx(
1008 mc = context.memctx(
1008 repo,
1009 repo,
1009 (p1, p2),
1010 (p1, p2),
1010 ctx.description(),
1011 ctx.description(),
1011 ctx.files(),
1012 ctx.files(),
1012 filectxfn,
1013 filectxfn,
1013 user=ctx.user(),
1014 user=ctx.user(),
1014 date=ctx.date(),
1015 date=ctx.date(),
1015 extra=extra,
1016 extra=extra,
1016 branch=label,
1017 branch=label,
1017 )
1018 )
1018
1019
1019 newnode = repo.commitctx(mc)
1020 newnode = repo.commitctx(mc)
1020 replacements[ctx.node()] = (newnode,)
1021 replacements[ctx.node()] = (newnode,)
1021 ui.debug(b'new node id is %s\n' % hex(newnode))
1022 ui.debug(b'new node id is %s\n' % hex(newnode))
1022
1023
1023 # create obsmarkers and move bookmarks
1024 # create obsmarkers and move bookmarks
1024 scmutil.cleanupnodes(
1025 scmutil.cleanupnodes(
1025 repo, replacements, b'branch-change', fixphase=True
1026 repo, replacements, b'branch-change', fixphase=True
1026 )
1027 )
1027
1028
1028 # move the working copy too
1029 # move the working copy too
1029 wctx = repo[None]
1030 wctx = repo[None]
1030 # in-progress merge is a bit too complex for now.
1031 # in-progress merge is a bit too complex for now.
1031 if len(wctx.parents()) == 1:
1032 if len(wctx.parents()) == 1:
1032 newid = replacements.get(wctx.p1().node())
1033 newid = replacements.get(wctx.p1().node())
1033 if newid is not None:
1034 if newid is not None:
1034 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1035 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1035 # mercurial.cmdutil
1036 # mercurial.cmdutil
1036 from . import hg
1037 from . import hg
1037
1038
1038 hg.update(repo, newid[0], quietempty=True)
1039 hg.update(repo, newid[0], quietempty=True)
1039
1040
1040 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1041 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1041
1042
1042
1043
1043 def findrepo(p):
1044 def findrepo(p):
1044 while not os.path.isdir(os.path.join(p, b".hg")):
1045 while not os.path.isdir(os.path.join(p, b".hg")):
1045 oldp, p = p, os.path.dirname(p)
1046 oldp, p = p, os.path.dirname(p)
1046 if p == oldp:
1047 if p == oldp:
1047 return None
1048 return None
1048
1049
1049 return p
1050 return p
1050
1051
1051
1052
1052 def bailifchanged(repo, merge=True, hint=None):
1053 def bailifchanged(repo, merge=True, hint=None):
1053 """ enforce the precondition that working directory must be clean.
1054 """ enforce the precondition that working directory must be clean.
1054
1055
1055 'merge' can be set to false if a pending uncommitted merge should be
1056 'merge' can be set to false if a pending uncommitted merge should be
1056 ignored (such as when 'update --check' runs).
1057 ignored (such as when 'update --check' runs).
1057
1058
1058 'hint' is the usual hint given to Abort exception.
1059 'hint' is the usual hint given to Abort exception.
1059 """
1060 """
1060
1061
1061 if merge and repo.dirstate.p2() != nullid:
1062 if merge and repo.dirstate.p2() != nullid:
1062 raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
1063 raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
1063 st = repo.status()
1064 st = repo.status()
1064 if st.modified or st.added or st.removed or st.deleted:
1065 if st.modified or st.added or st.removed or st.deleted:
1065 raise error.Abort(_(b'uncommitted changes'), hint=hint)
1066 raise error.Abort(_(b'uncommitted changes'), hint=hint)
1066 ctx = repo[None]
1067 ctx = repo[None]
1067 for s in sorted(ctx.substate):
1068 for s in sorted(ctx.substate):
1068 ctx.sub(s).bailifchanged(hint=hint)
1069 ctx.sub(s).bailifchanged(hint=hint)
1069
1070
1070
1071
1071 def logmessage(ui, opts):
1072 def logmessage(ui, opts):
1072 """ get the log message according to -m and -l option """
1073 """ get the log message according to -m and -l option """
1073
1074
1074 check_at_most_one_arg(opts, b'message', b'logfile')
1075 check_at_most_one_arg(opts, b'message', b'logfile')
1075
1076
1076 message = opts.get(b'message')
1077 message = opts.get(b'message')
1077 logfile = opts.get(b'logfile')
1078 logfile = opts.get(b'logfile')
1078
1079
1079 if not message and logfile:
1080 if not message and logfile:
1080 try:
1081 try:
1081 if isstdiofilename(logfile):
1082 if isstdiofilename(logfile):
1082 message = ui.fin.read()
1083 message = ui.fin.read()
1083 else:
1084 else:
1084 message = b'\n'.join(util.readfile(logfile).splitlines())
1085 message = b'\n'.join(util.readfile(logfile).splitlines())
1085 except IOError as inst:
1086 except IOError as inst:
1086 raise error.Abort(
1087 raise error.Abort(
1087 _(b"can't read commit message '%s': %s")
1088 _(b"can't read commit message '%s': %s")
1088 % (logfile, encoding.strtolocal(inst.strerror))
1089 % (logfile, encoding.strtolocal(inst.strerror))
1089 )
1090 )
1090 return message
1091 return message
1091
1092
1092
1093
1093 def mergeeditform(ctxorbool, baseformname):
1094 def mergeeditform(ctxorbool, baseformname):
1094 """return appropriate editform name (referencing a committemplate)
1095 """return appropriate editform name (referencing a committemplate)
1095
1096
1096 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1097 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1097 merging is committed.
1098 merging is committed.
1098
1099
1099 This returns baseformname with '.merge' appended if it is a merge,
1100 This returns baseformname with '.merge' appended if it is a merge,
1100 otherwise '.normal' is appended.
1101 otherwise '.normal' is appended.
1101 """
1102 """
1102 if isinstance(ctxorbool, bool):
1103 if isinstance(ctxorbool, bool):
1103 if ctxorbool:
1104 if ctxorbool:
1104 return baseformname + b".merge"
1105 return baseformname + b".merge"
1105 elif len(ctxorbool.parents()) > 1:
1106 elif len(ctxorbool.parents()) > 1:
1106 return baseformname + b".merge"
1107 return baseformname + b".merge"
1107
1108
1108 return baseformname + b".normal"
1109 return baseformname + b".normal"
1109
1110
1110
1111
1111 def getcommiteditor(
1112 def getcommiteditor(
1112 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1113 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1113 ):
1114 ):
1114 """get appropriate commit message editor according to '--edit' option
1115 """get appropriate commit message editor according to '--edit' option
1115
1116
1116 'finishdesc' is a function to be called with edited commit message
1117 'finishdesc' is a function to be called with edited commit message
1117 (= 'description' of the new changeset) just after editing, but
1118 (= 'description' of the new changeset) just after editing, but
1118 before checking empty-ness. It should return actual text to be
1119 before checking empty-ness. It should return actual text to be
1119 stored into history. This allows to change description before
1120 stored into history. This allows to change description before
1120 storing.
1121 storing.
1121
1122
1122 'extramsg' is a extra message to be shown in the editor instead of
1123 'extramsg' is a extra message to be shown in the editor instead of
1123 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1124 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1124 is automatically added.
1125 is automatically added.
1125
1126
1126 'editform' is a dot-separated list of names, to distinguish
1127 'editform' is a dot-separated list of names, to distinguish
1127 the purpose of commit text editing.
1128 the purpose of commit text editing.
1128
1129
1129 'getcommiteditor' returns 'commitforceeditor' regardless of
1130 'getcommiteditor' returns 'commitforceeditor' regardless of
1130 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1131 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1131 they are specific for usage in MQ.
1132 they are specific for usage in MQ.
1132 """
1133 """
1133 if edit or finishdesc or extramsg:
1134 if edit or finishdesc or extramsg:
1134 return lambda r, c, s: commitforceeditor(
1135 return lambda r, c, s: commitforceeditor(
1135 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1136 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1136 )
1137 )
1137 elif editform:
1138 elif editform:
1138 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1139 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1139 else:
1140 else:
1140 return commiteditor
1141 return commiteditor
1141
1142
1142
1143
1143 def _escapecommandtemplate(tmpl):
1144 def _escapecommandtemplate(tmpl):
1144 parts = []
1145 parts = []
1145 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1146 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1146 if typ == b'string':
1147 if typ == b'string':
1147 parts.append(stringutil.escapestr(tmpl[start:end]))
1148 parts.append(stringutil.escapestr(tmpl[start:end]))
1148 else:
1149 else:
1149 parts.append(tmpl[start:end])
1150 parts.append(tmpl[start:end])
1150 return b''.join(parts)
1151 return b''.join(parts)
1151
1152
1152
1153
1153 def rendercommandtemplate(ui, tmpl, props):
1154 def rendercommandtemplate(ui, tmpl, props):
1154 r"""Expand a literal template 'tmpl' in a way suitable for command line
1155 r"""Expand a literal template 'tmpl' in a way suitable for command line
1155
1156
1156 '\' in outermost string is not taken as an escape character because it
1157 '\' in outermost string is not taken as an escape character because it
1157 is a directory separator on Windows.
1158 is a directory separator on Windows.
1158
1159
1159 >>> from . import ui as uimod
1160 >>> from . import ui as uimod
1160 >>> ui = uimod.ui()
1161 >>> ui = uimod.ui()
1161 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1162 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1162 'c:\\foo'
1163 'c:\\foo'
1163 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1164 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1164 'c:{path}'
1165 'c:{path}'
1165 """
1166 """
1166 if not tmpl:
1167 if not tmpl:
1167 return tmpl
1168 return tmpl
1168 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1169 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1169 return t.renderdefault(props)
1170 return t.renderdefault(props)
1170
1171
1171
1172
1172 def rendertemplate(ctx, tmpl, props=None):
1173 def rendertemplate(ctx, tmpl, props=None):
1173 """Expand a literal template 'tmpl' byte-string against one changeset
1174 """Expand a literal template 'tmpl' byte-string against one changeset
1174
1175
1175 Each props item must be a stringify-able value or a callable returning
1176 Each props item must be a stringify-able value or a callable returning
1176 such value, i.e. no bare list nor dict should be passed.
1177 such value, i.e. no bare list nor dict should be passed.
1177 """
1178 """
1178 repo = ctx.repo()
1179 repo = ctx.repo()
1179 tres = formatter.templateresources(repo.ui, repo)
1180 tres = formatter.templateresources(repo.ui, repo)
1180 t = formatter.maketemplater(
1181 t = formatter.maketemplater(
1181 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1182 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1182 )
1183 )
1183 mapping = {b'ctx': ctx}
1184 mapping = {b'ctx': ctx}
1184 if props:
1185 if props:
1185 mapping.update(props)
1186 mapping.update(props)
1186 return t.renderdefault(mapping)
1187 return t.renderdefault(mapping)
1187
1188
1188
1189
1189 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1190 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1190 r"""Convert old-style filename format string to template string
1191 r"""Convert old-style filename format string to template string
1191
1192
1192 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1193 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1193 'foo-{reporoot|basename}-{seqno}.patch'
1194 'foo-{reporoot|basename}-{seqno}.patch'
1194 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1195 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1195 '{rev}{tags % "{tag}"}{node}'
1196 '{rev}{tags % "{tag}"}{node}'
1196
1197
1197 '\' in outermost strings has to be escaped because it is a directory
1198 '\' in outermost strings has to be escaped because it is a directory
1198 separator on Windows:
1199 separator on Windows:
1199
1200
1200 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1201 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1201 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1202 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1202 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1203 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1203 '\\\\\\\\foo\\\\bar.patch'
1204 '\\\\\\\\foo\\\\bar.patch'
1204 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1205 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1205 '\\\\{tags % "{tag}"}'
1206 '\\\\{tags % "{tag}"}'
1206
1207
1207 but inner strings follow the template rules (i.e. '\' is taken as an
1208 but inner strings follow the template rules (i.e. '\' is taken as an
1208 escape character):
1209 escape character):
1209
1210
1210 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1211 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1211 '{"c:\\tmp"}'
1212 '{"c:\\tmp"}'
1212 """
1213 """
1213 expander = {
1214 expander = {
1214 b'H': b'{node}',
1215 b'H': b'{node}',
1215 b'R': b'{rev}',
1216 b'R': b'{rev}',
1216 b'h': b'{node|short}',
1217 b'h': b'{node|short}',
1217 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1218 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1218 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1219 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1219 b'%': b'%',
1220 b'%': b'%',
1220 b'b': b'{reporoot|basename}',
1221 b'b': b'{reporoot|basename}',
1221 }
1222 }
1222 if total is not None:
1223 if total is not None:
1223 expander[b'N'] = b'{total}'
1224 expander[b'N'] = b'{total}'
1224 if seqno is not None:
1225 if seqno is not None:
1225 expander[b'n'] = b'{seqno}'
1226 expander[b'n'] = b'{seqno}'
1226 if total is not None and seqno is not None:
1227 if total is not None and seqno is not None:
1227 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1228 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1228 if pathname is not None:
1229 if pathname is not None:
1229 expander[b's'] = b'{pathname|basename}'
1230 expander[b's'] = b'{pathname|basename}'
1230 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1231 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1231 expander[b'p'] = b'{pathname}'
1232 expander[b'p'] = b'{pathname}'
1232
1233
1233 newname = []
1234 newname = []
1234 for typ, start, end in templater.scantemplate(pat, raw=True):
1235 for typ, start, end in templater.scantemplate(pat, raw=True):
1235 if typ != b'string':
1236 if typ != b'string':
1236 newname.append(pat[start:end])
1237 newname.append(pat[start:end])
1237 continue
1238 continue
1238 i = start
1239 i = start
1239 while i < end:
1240 while i < end:
1240 n = pat.find(b'%', i, end)
1241 n = pat.find(b'%', i, end)
1241 if n < 0:
1242 if n < 0:
1242 newname.append(stringutil.escapestr(pat[i:end]))
1243 newname.append(stringutil.escapestr(pat[i:end]))
1243 break
1244 break
1244 newname.append(stringutil.escapestr(pat[i:n]))
1245 newname.append(stringutil.escapestr(pat[i:n]))
1245 if n + 2 > end:
1246 if n + 2 > end:
1246 raise error.Abort(
1247 raise error.Abort(
1247 _(b"incomplete format spec in output filename")
1248 _(b"incomplete format spec in output filename")
1248 )
1249 )
1249 c = pat[n + 1 : n + 2]
1250 c = pat[n + 1 : n + 2]
1250 i = n + 2
1251 i = n + 2
1251 try:
1252 try:
1252 newname.append(expander[c])
1253 newname.append(expander[c])
1253 except KeyError:
1254 except KeyError:
1254 raise error.Abort(
1255 raise error.Abort(
1255 _(b"invalid format spec '%%%s' in output filename") % c
1256 _(b"invalid format spec '%%%s' in output filename") % c
1256 )
1257 )
1257 return b''.join(newname)
1258 return b''.join(newname)
1258
1259
1259
1260
1260 def makefilename(ctx, pat, **props):
1261 def makefilename(ctx, pat, **props):
1261 if not pat:
1262 if not pat:
1262 return pat
1263 return pat
1263 tmpl = _buildfntemplate(pat, **props)
1264 tmpl = _buildfntemplate(pat, **props)
1264 # BUG: alias expansion shouldn't be made against template fragments
1265 # BUG: alias expansion shouldn't be made against template fragments
1265 # rewritten from %-format strings, but we have no easy way to partially
1266 # rewritten from %-format strings, but we have no easy way to partially
1266 # disable the expansion.
1267 # disable the expansion.
1267 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1268 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1268
1269
1269
1270
1270 def isstdiofilename(pat):
1271 def isstdiofilename(pat):
1271 """True if the given pat looks like a filename denoting stdin/stdout"""
1272 """True if the given pat looks like a filename denoting stdin/stdout"""
1272 return not pat or pat == b'-'
1273 return not pat or pat == b'-'
1273
1274
1274
1275
1275 class _unclosablefile(object):
1276 class _unclosablefile(object):
1276 def __init__(self, fp):
1277 def __init__(self, fp):
1277 self._fp = fp
1278 self._fp = fp
1278
1279
1279 def close(self):
1280 def close(self):
1280 pass
1281 pass
1281
1282
1282 def __iter__(self):
1283 def __iter__(self):
1283 return iter(self._fp)
1284 return iter(self._fp)
1284
1285
1285 def __getattr__(self, attr):
1286 def __getattr__(self, attr):
1286 return getattr(self._fp, attr)
1287 return getattr(self._fp, attr)
1287
1288
1288 def __enter__(self):
1289 def __enter__(self):
1289 return self
1290 return self
1290
1291
1291 def __exit__(self, exc_type, exc_value, exc_tb):
1292 def __exit__(self, exc_type, exc_value, exc_tb):
1292 pass
1293 pass
1293
1294
1294
1295
1295 def makefileobj(ctx, pat, mode=b'wb', **props):
1296 def makefileobj(ctx, pat, mode=b'wb', **props):
1296 writable = mode not in (b'r', b'rb')
1297 writable = mode not in (b'r', b'rb')
1297
1298
1298 if isstdiofilename(pat):
1299 if isstdiofilename(pat):
1299 repo = ctx.repo()
1300 repo = ctx.repo()
1300 if writable:
1301 if writable:
1301 fp = repo.ui.fout
1302 fp = repo.ui.fout
1302 else:
1303 else:
1303 fp = repo.ui.fin
1304 fp = repo.ui.fin
1304 return _unclosablefile(fp)
1305 return _unclosablefile(fp)
1305 fn = makefilename(ctx, pat, **props)
1306 fn = makefilename(ctx, pat, **props)
1306 return open(fn, mode)
1307 return open(fn, mode)
1307
1308
1308
1309
1309 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1310 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1310 """opens the changelog, manifest, a filelog or a given revlog"""
1311 """opens the changelog, manifest, a filelog or a given revlog"""
1311 cl = opts[b'changelog']
1312 cl = opts[b'changelog']
1312 mf = opts[b'manifest']
1313 mf = opts[b'manifest']
1313 dir = opts[b'dir']
1314 dir = opts[b'dir']
1314 msg = None
1315 msg = None
1315 if cl and mf:
1316 if cl and mf:
1316 msg = _(b'cannot specify --changelog and --manifest at the same time')
1317 msg = _(b'cannot specify --changelog and --manifest at the same time')
1317 elif cl and dir:
1318 elif cl and dir:
1318 msg = _(b'cannot specify --changelog and --dir at the same time')
1319 msg = _(b'cannot specify --changelog and --dir at the same time')
1319 elif cl or mf or dir:
1320 elif cl or mf or dir:
1320 if file_:
1321 if file_:
1321 msg = _(b'cannot specify filename with --changelog or --manifest')
1322 msg = _(b'cannot specify filename with --changelog or --manifest')
1322 elif not repo:
1323 elif not repo:
1323 msg = _(
1324 msg = _(
1324 b'cannot specify --changelog or --manifest or --dir '
1325 b'cannot specify --changelog or --manifest or --dir '
1325 b'without a repository'
1326 b'without a repository'
1326 )
1327 )
1327 if msg:
1328 if msg:
1328 raise error.Abort(msg)
1329 raise error.Abort(msg)
1329
1330
1330 r = None
1331 r = None
1331 if repo:
1332 if repo:
1332 if cl:
1333 if cl:
1333 r = repo.unfiltered().changelog
1334 r = repo.unfiltered().changelog
1334 elif dir:
1335 elif dir:
1335 if b'treemanifest' not in repo.requirements:
1336 if b'treemanifest' not in repo.requirements:
1336 raise error.Abort(
1337 raise error.Abort(
1337 _(
1338 _(
1338 b"--dir can only be used on repos with "
1339 b"--dir can only be used on repos with "
1339 b"treemanifest enabled"
1340 b"treemanifest enabled"
1340 )
1341 )
1341 )
1342 )
1342 if not dir.endswith(b'/'):
1343 if not dir.endswith(b'/'):
1343 dir = dir + b'/'
1344 dir = dir + b'/'
1344 dirlog = repo.manifestlog.getstorage(dir)
1345 dirlog = repo.manifestlog.getstorage(dir)
1345 if len(dirlog):
1346 if len(dirlog):
1346 r = dirlog
1347 r = dirlog
1347 elif mf:
1348 elif mf:
1348 r = repo.manifestlog.getstorage(b'')
1349 r = repo.manifestlog.getstorage(b'')
1349 elif file_:
1350 elif file_:
1350 filelog = repo.file(file_)
1351 filelog = repo.file(file_)
1351 if len(filelog):
1352 if len(filelog):
1352 r = filelog
1353 r = filelog
1353
1354
1354 # Not all storage may be revlogs. If requested, try to return an actual
1355 # Not all storage may be revlogs. If requested, try to return an actual
1355 # revlog instance.
1356 # revlog instance.
1356 if returnrevlog:
1357 if returnrevlog:
1357 if isinstance(r, revlog.revlog):
1358 if isinstance(r, revlog.revlog):
1358 pass
1359 pass
1359 elif util.safehasattr(r, b'_revlog'):
1360 elif util.safehasattr(r, b'_revlog'):
1360 r = r._revlog # pytype: disable=attribute-error
1361 r = r._revlog # pytype: disable=attribute-error
1361 elif r is not None:
1362 elif r is not None:
1362 raise error.Abort(_(b'%r does not appear to be a revlog') % r)
1363 raise error.Abort(_(b'%r does not appear to be a revlog') % r)
1363
1364
1364 if not r:
1365 if not r:
1365 if not returnrevlog:
1366 if not returnrevlog:
1366 raise error.Abort(_(b'cannot give path to non-revlog'))
1367 raise error.Abort(_(b'cannot give path to non-revlog'))
1367
1368
1368 if not file_:
1369 if not file_:
1369 raise error.CommandError(cmd, _(b'invalid arguments'))
1370 raise error.CommandError(cmd, _(b'invalid arguments'))
1370 if not os.path.isfile(file_):
1371 if not os.path.isfile(file_):
1371 raise error.Abort(_(b"revlog '%s' not found") % file_)
1372 raise error.Abort(_(b"revlog '%s' not found") % file_)
1372 r = revlog.revlog(
1373 r = revlog.revlog(
1373 vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
1374 vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
1374 )
1375 )
1375 return r
1376 return r
1376
1377
1377
1378
1378 def openrevlog(repo, cmd, file_, opts):
1379 def openrevlog(repo, cmd, file_, opts):
1379 """Obtain a revlog backing storage of an item.
1380 """Obtain a revlog backing storage of an item.
1380
1381
1381 This is similar to ``openstorage()`` except it always returns a revlog.
1382 This is similar to ``openstorage()`` except it always returns a revlog.
1382
1383
1383 In most cases, a caller cares about the main storage object - not the
1384 In most cases, a caller cares about the main storage object - not the
1384 revlog backing it. Therefore, this function should only be used by code
1385 revlog backing it. Therefore, this function should only be used by code
1385 that needs to examine low-level revlog implementation details. e.g. debug
1386 that needs to examine low-level revlog implementation details. e.g. debug
1386 commands.
1387 commands.
1387 """
1388 """
1388 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1389 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1389
1390
1390
1391
1391 def copy(ui, repo, pats, opts, rename=False):
1392 def copy(ui, repo, pats, opts, rename=False):
1392 # called with the repo lock held
1393 # called with the repo lock held
1393 #
1394 #
1394 # hgsep => pathname that uses "/" to separate directories
1395 # hgsep => pathname that uses "/" to separate directories
1395 # ossep => pathname that uses os.sep to separate directories
1396 # ossep => pathname that uses os.sep to separate directories
1396 cwd = repo.getcwd()
1397 cwd = repo.getcwd()
1397 targets = {}
1398 targets = {}
1398 after = opts.get(b"after")
1399 after = opts.get(b"after")
1399 dryrun = opts.get(b"dry_run")
1400 dryrun = opts.get(b"dry_run")
1400 wctx = repo[None]
1401 wctx = repo[None]
1401
1402
1402 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1403 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1403
1404
1404 def walkpat(pat):
1405 def walkpat(pat):
1405 srcs = []
1406 srcs = []
1406 if after:
1407 if after:
1407 badstates = b'?'
1408 badstates = b'?'
1408 else:
1409 else:
1409 badstates = b'?r'
1410 badstates = b'?r'
1410 m = scmutil.match(wctx, [pat], opts, globbed=True)
1411 m = scmutil.match(wctx, [pat], opts, globbed=True)
1411 for abs in wctx.walk(m):
1412 for abs in wctx.walk(m):
1412 state = repo.dirstate[abs]
1413 state = repo.dirstate[abs]
1413 rel = uipathfn(abs)
1414 rel = uipathfn(abs)
1414 exact = m.exact(abs)
1415 exact = m.exact(abs)
1415 if state in badstates:
1416 if state in badstates:
1416 if exact and state == b'?':
1417 if exact and state == b'?':
1417 ui.warn(_(b'%s: not copying - file is not managed\n') % rel)
1418 ui.warn(_(b'%s: not copying - file is not managed\n') % rel)
1418 if exact and state == b'r':
1419 if exact and state == b'r':
1419 ui.warn(
1420 ui.warn(
1420 _(
1421 _(
1421 b'%s: not copying - file has been marked for'
1422 b'%s: not copying - file has been marked for'
1422 b' remove\n'
1423 b' remove\n'
1423 )
1424 )
1424 % rel
1425 % rel
1425 )
1426 )
1426 continue
1427 continue
1427 # abs: hgsep
1428 # abs: hgsep
1428 # rel: ossep
1429 # rel: ossep
1429 srcs.append((abs, rel, exact))
1430 srcs.append((abs, rel, exact))
1430 return srcs
1431 return srcs
1431
1432
1432 # abssrc: hgsep
1433 # abssrc: hgsep
1433 # relsrc: ossep
1434 # relsrc: ossep
1434 # otarget: ossep
1435 # otarget: ossep
1435 def copyfile(abssrc, relsrc, otarget, exact):
1436 def copyfile(abssrc, relsrc, otarget, exact):
1436 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1437 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1437 if b'/' in abstarget:
1438 if b'/' in abstarget:
1438 # We cannot normalize abstarget itself, this would prevent
1439 # We cannot normalize abstarget itself, this would prevent
1439 # case only renames, like a => A.
1440 # case only renames, like a => A.
1440 abspath, absname = abstarget.rsplit(b'/', 1)
1441 abspath, absname = abstarget.rsplit(b'/', 1)
1441 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1442 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1442 reltarget = repo.pathto(abstarget, cwd)
1443 reltarget = repo.pathto(abstarget, cwd)
1443 target = repo.wjoin(abstarget)
1444 target = repo.wjoin(abstarget)
1444 src = repo.wjoin(abssrc)
1445 src = repo.wjoin(abssrc)
1445 state = repo.dirstate[abstarget]
1446 state = repo.dirstate[abstarget]
1446
1447
1447 scmutil.checkportable(ui, abstarget)
1448 scmutil.checkportable(ui, abstarget)
1448
1449
1449 # check for collisions
1450 # check for collisions
1450 prevsrc = targets.get(abstarget)
1451 prevsrc = targets.get(abstarget)
1451 if prevsrc is not None:
1452 if prevsrc is not None:
1452 ui.warn(
1453 ui.warn(
1453 _(b'%s: not overwriting - %s collides with %s\n')
1454 _(b'%s: not overwriting - %s collides with %s\n')
1454 % (
1455 % (
1455 reltarget,
1456 reltarget,
1456 repo.pathto(abssrc, cwd),
1457 repo.pathto(abssrc, cwd),
1457 repo.pathto(prevsrc, cwd),
1458 repo.pathto(prevsrc, cwd),
1458 )
1459 )
1459 )
1460 )
1460 return True # report a failure
1461 return True # report a failure
1461
1462
1462 # check for overwrites
1463 # check for overwrites
1463 exists = os.path.lexists(target)
1464 exists = os.path.lexists(target)
1464 samefile = False
1465 samefile = False
1465 if exists and abssrc != abstarget:
1466 if exists and abssrc != abstarget:
1466 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1467 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1467 abstarget
1468 abstarget
1468 ):
1469 ):
1469 if not rename:
1470 if not rename:
1470 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1471 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1471 return True # report a failure
1472 return True # report a failure
1472 exists = False
1473 exists = False
1473 samefile = True
1474 samefile = True
1474
1475
1475 if not after and exists or after and state in b'mn':
1476 if not after and exists or after and state in b'mn':
1476 if not opts[b'force']:
1477 if not opts[b'force']:
1477 if state in b'mn':
1478 if state in b'mn':
1478 msg = _(b'%s: not overwriting - file already committed\n')
1479 msg = _(b'%s: not overwriting - file already committed\n')
1479 if after:
1480 if after:
1480 flags = b'--after --force'
1481 flags = b'--after --force'
1481 else:
1482 else:
1482 flags = b'--force'
1483 flags = b'--force'
1483 if rename:
1484 if rename:
1484 hint = (
1485 hint = (
1485 _(
1486 _(
1486 b"('hg rename %s' to replace the file by "
1487 b"('hg rename %s' to replace the file by "
1487 b'recording a rename)\n'
1488 b'recording a rename)\n'
1488 )
1489 )
1489 % flags
1490 % flags
1490 )
1491 )
1491 else:
1492 else:
1492 hint = (
1493 hint = (
1493 _(
1494 _(
1494 b"('hg copy %s' to replace the file by "
1495 b"('hg copy %s' to replace the file by "
1495 b'recording a copy)\n'
1496 b'recording a copy)\n'
1496 )
1497 )
1497 % flags
1498 % flags
1498 )
1499 )
1499 else:
1500 else:
1500 msg = _(b'%s: not overwriting - file exists\n')
1501 msg = _(b'%s: not overwriting - file exists\n')
1501 if rename:
1502 if rename:
1502 hint = _(
1503 hint = _(
1503 b"('hg rename --after' to record the rename)\n"
1504 b"('hg rename --after' to record the rename)\n"
1504 )
1505 )
1505 else:
1506 else:
1506 hint = _(b"('hg copy --after' to record the copy)\n")
1507 hint = _(b"('hg copy --after' to record the copy)\n")
1507 ui.warn(msg % reltarget)
1508 ui.warn(msg % reltarget)
1508 ui.warn(hint)
1509 ui.warn(hint)
1509 return True # report a failure
1510 return True # report a failure
1510
1511
1511 if after:
1512 if after:
1512 if not exists:
1513 if not exists:
1513 if rename:
1514 if rename:
1514 ui.warn(
1515 ui.warn(
1515 _(b'%s: not recording move - %s does not exist\n')
1516 _(b'%s: not recording move - %s does not exist\n')
1516 % (relsrc, reltarget)
1517 % (relsrc, reltarget)
1517 )
1518 )
1518 else:
1519 else:
1519 ui.warn(
1520 ui.warn(
1520 _(b'%s: not recording copy - %s does not exist\n')
1521 _(b'%s: not recording copy - %s does not exist\n')
1521 % (relsrc, reltarget)
1522 % (relsrc, reltarget)
1522 )
1523 )
1523 return True # report a failure
1524 return True # report a failure
1524 elif not dryrun:
1525 elif not dryrun:
1525 try:
1526 try:
1526 if exists:
1527 if exists:
1527 os.unlink(target)
1528 os.unlink(target)
1528 targetdir = os.path.dirname(target) or b'.'
1529 targetdir = os.path.dirname(target) or b'.'
1529 if not os.path.isdir(targetdir):
1530 if not os.path.isdir(targetdir):
1530 os.makedirs(targetdir)
1531 os.makedirs(targetdir)
1531 if samefile:
1532 if samefile:
1532 tmp = target + b"~hgrename"
1533 tmp = target + b"~hgrename"
1533 os.rename(src, tmp)
1534 os.rename(src, tmp)
1534 os.rename(tmp, target)
1535 os.rename(tmp, target)
1535 else:
1536 else:
1536 # Preserve stat info on renames, not on copies; this matches
1537 # Preserve stat info on renames, not on copies; this matches
1537 # Linux CLI behavior.
1538 # Linux CLI behavior.
1538 util.copyfile(src, target, copystat=rename)
1539 util.copyfile(src, target, copystat=rename)
1539 srcexists = True
1540 srcexists = True
1540 except IOError as inst:
1541 except IOError as inst:
1541 if inst.errno == errno.ENOENT:
1542 if inst.errno == errno.ENOENT:
1542 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1543 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1543 srcexists = False
1544 srcexists = False
1544 else:
1545 else:
1545 ui.warn(
1546 ui.warn(
1546 _(b'%s: cannot copy - %s\n')
1547 _(b'%s: cannot copy - %s\n')
1547 % (relsrc, encoding.strtolocal(inst.strerror))
1548 % (relsrc, encoding.strtolocal(inst.strerror))
1548 )
1549 )
1549 return True # report a failure
1550 return True # report a failure
1550
1551
1551 if ui.verbose or not exact:
1552 if ui.verbose or not exact:
1552 if rename:
1553 if rename:
1553 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1554 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1554 else:
1555 else:
1555 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1556 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1556
1557
1557 targets[abstarget] = abssrc
1558 targets[abstarget] = abssrc
1558
1559
1559 # fix up dirstate
1560 # fix up dirstate
1560 scmutil.dirstatecopy(
1561 scmutil.dirstatecopy(
1561 ui, repo, wctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1562 ui, repo, wctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1562 )
1563 )
1563 if rename and not dryrun:
1564 if rename and not dryrun:
1564 if not after and srcexists and not samefile:
1565 if not after and srcexists and not samefile:
1565 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1566 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1566 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1567 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1567 wctx.forget([abssrc])
1568 wctx.forget([abssrc])
1568
1569
1569 # pat: ossep
1570 # pat: ossep
1570 # dest ossep
1571 # dest ossep
1571 # srcs: list of (hgsep, hgsep, ossep, bool)
1572 # srcs: list of (hgsep, hgsep, ossep, bool)
1572 # return: function that takes hgsep and returns ossep
1573 # return: function that takes hgsep and returns ossep
1573 def targetpathfn(pat, dest, srcs):
1574 def targetpathfn(pat, dest, srcs):
1574 if os.path.isdir(pat):
1575 if os.path.isdir(pat):
1575 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1576 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1576 abspfx = util.localpath(abspfx)
1577 abspfx = util.localpath(abspfx)
1577 if destdirexists:
1578 if destdirexists:
1578 striplen = len(os.path.split(abspfx)[0])
1579 striplen = len(os.path.split(abspfx)[0])
1579 else:
1580 else:
1580 striplen = len(abspfx)
1581 striplen = len(abspfx)
1581 if striplen:
1582 if striplen:
1582 striplen += len(pycompat.ossep)
1583 striplen += len(pycompat.ossep)
1583 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1584 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1584 elif destdirexists:
1585 elif destdirexists:
1585 res = lambda p: os.path.join(
1586 res = lambda p: os.path.join(
1586 dest, os.path.basename(util.localpath(p))
1587 dest, os.path.basename(util.localpath(p))
1587 )
1588 )
1588 else:
1589 else:
1589 res = lambda p: dest
1590 res = lambda p: dest
1590 return res
1591 return res
1591
1592
1592 # pat: ossep
1593 # pat: ossep
1593 # dest ossep
1594 # dest ossep
1594 # srcs: list of (hgsep, hgsep, ossep, bool)
1595 # srcs: list of (hgsep, hgsep, ossep, bool)
1595 # return: function that takes hgsep and returns ossep
1596 # return: function that takes hgsep and returns ossep
1596 def targetpathafterfn(pat, dest, srcs):
1597 def targetpathafterfn(pat, dest, srcs):
1597 if matchmod.patkind(pat):
1598 if matchmod.patkind(pat):
1598 # a mercurial pattern
1599 # a mercurial pattern
1599 res = lambda p: os.path.join(
1600 res = lambda p: os.path.join(
1600 dest, os.path.basename(util.localpath(p))
1601 dest, os.path.basename(util.localpath(p))
1601 )
1602 )
1602 else:
1603 else:
1603 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1604 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1604 if len(abspfx) < len(srcs[0][0]):
1605 if len(abspfx) < len(srcs[0][0]):
1605 # A directory. Either the target path contains the last
1606 # A directory. Either the target path contains the last
1606 # component of the source path or it does not.
1607 # component of the source path or it does not.
1607 def evalpath(striplen):
1608 def evalpath(striplen):
1608 score = 0
1609 score = 0
1609 for s in srcs:
1610 for s in srcs:
1610 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1611 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1611 if os.path.lexists(t):
1612 if os.path.lexists(t):
1612 score += 1
1613 score += 1
1613 return score
1614 return score
1614
1615
1615 abspfx = util.localpath(abspfx)
1616 abspfx = util.localpath(abspfx)
1616 striplen = len(abspfx)
1617 striplen = len(abspfx)
1617 if striplen:
1618 if striplen:
1618 striplen += len(pycompat.ossep)
1619 striplen += len(pycompat.ossep)
1619 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1620 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1620 score = evalpath(striplen)
1621 score = evalpath(striplen)
1621 striplen1 = len(os.path.split(abspfx)[0])
1622 striplen1 = len(os.path.split(abspfx)[0])
1622 if striplen1:
1623 if striplen1:
1623 striplen1 += len(pycompat.ossep)
1624 striplen1 += len(pycompat.ossep)
1624 if evalpath(striplen1) > score:
1625 if evalpath(striplen1) > score:
1625 striplen = striplen1
1626 striplen = striplen1
1626 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1627 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1627 else:
1628 else:
1628 # a file
1629 # a file
1629 if destdirexists:
1630 if destdirexists:
1630 res = lambda p: os.path.join(
1631 res = lambda p: os.path.join(
1631 dest, os.path.basename(util.localpath(p))
1632 dest, os.path.basename(util.localpath(p))
1632 )
1633 )
1633 else:
1634 else:
1634 res = lambda p: dest
1635 res = lambda p: dest
1635 return res
1636 return res
1636
1637
1637 pats = scmutil.expandpats(pats)
1638 pats = scmutil.expandpats(pats)
1638 if not pats:
1639 if not pats:
1639 raise error.Abort(_(b'no source or destination specified'))
1640 raise error.Abort(_(b'no source or destination specified'))
1640 if len(pats) == 1:
1641 if len(pats) == 1:
1641 raise error.Abort(_(b'no destination specified'))
1642 raise error.Abort(_(b'no destination specified'))
1642 dest = pats.pop()
1643 dest = pats.pop()
1643 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1644 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1644 if not destdirexists:
1645 if not destdirexists:
1645 if len(pats) > 1 or matchmod.patkind(pats[0]):
1646 if len(pats) > 1 or matchmod.patkind(pats[0]):
1646 raise error.Abort(
1647 raise error.Abort(
1647 _(
1648 _(
1648 b'with multiple sources, destination must be an '
1649 b'with multiple sources, destination must be an '
1649 b'existing directory'
1650 b'existing directory'
1650 )
1651 )
1651 )
1652 )
1652 if util.endswithsep(dest):
1653 if util.endswithsep(dest):
1653 raise error.Abort(_(b'destination %s is not a directory') % dest)
1654 raise error.Abort(_(b'destination %s is not a directory') % dest)
1654
1655
1655 tfn = targetpathfn
1656 tfn = targetpathfn
1656 if after:
1657 if after:
1657 tfn = targetpathafterfn
1658 tfn = targetpathafterfn
1658 copylist = []
1659 copylist = []
1659 for pat in pats:
1660 for pat in pats:
1660 srcs = walkpat(pat)
1661 srcs = walkpat(pat)
1661 if not srcs:
1662 if not srcs:
1662 continue
1663 continue
1663 copylist.append((tfn(pat, dest, srcs), srcs))
1664 copylist.append((tfn(pat, dest, srcs), srcs))
1664 if not copylist:
1665 if not copylist:
1665 raise error.Abort(_(b'no files to copy'))
1666 raise error.Abort(_(b'no files to copy'))
1666
1667
1667 errors = 0
1668 errors = 0
1668 for targetpath, srcs in copylist:
1669 for targetpath, srcs in copylist:
1669 for abssrc, relsrc, exact in srcs:
1670 for abssrc, relsrc, exact in srcs:
1670 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1671 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1671 errors += 1
1672 errors += 1
1672
1673
1673 return errors != 0
1674 return errors != 0
1674
1675
1675
1676
1676 ## facility to let extension process additional data into an import patch
1677 ## facility to let extension process additional data into an import patch
1677 # list of identifier to be executed in order
1678 # list of identifier to be executed in order
1678 extrapreimport = [] # run before commit
1679 extrapreimport = [] # run before commit
1679 extrapostimport = [] # run after commit
1680 extrapostimport = [] # run after commit
1680 # mapping from identifier to actual import function
1681 # mapping from identifier to actual import function
1681 #
1682 #
1682 # 'preimport' are run before the commit is made and are provided the following
1683 # 'preimport' are run before the commit is made and are provided the following
1683 # arguments:
1684 # arguments:
1684 # - repo: the localrepository instance,
1685 # - repo: the localrepository instance,
1685 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1686 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1686 # - extra: the future extra dictionary of the changeset, please mutate it,
1687 # - extra: the future extra dictionary of the changeset, please mutate it,
1687 # - opts: the import options.
1688 # - opts: the import options.
1688 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1689 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1689 # mutation of in memory commit and more. Feel free to rework the code to get
1690 # mutation of in memory commit and more. Feel free to rework the code to get
1690 # there.
1691 # there.
1691 extrapreimportmap = {}
1692 extrapreimportmap = {}
1692 # 'postimport' are run after the commit is made and are provided the following
1693 # 'postimport' are run after the commit is made and are provided the following
1693 # argument:
1694 # argument:
1694 # - ctx: the changectx created by import.
1695 # - ctx: the changectx created by import.
1695 extrapostimportmap = {}
1696 extrapostimportmap = {}
1696
1697
1697
1698
1698 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1699 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1699 """Utility function used by commands.import to import a single patch
1700 """Utility function used by commands.import to import a single patch
1700
1701
1701 This function is explicitly defined here to help the evolve extension to
1702 This function is explicitly defined here to help the evolve extension to
1702 wrap this part of the import logic.
1703 wrap this part of the import logic.
1703
1704
1704 The API is currently a bit ugly because it a simple code translation from
1705 The API is currently a bit ugly because it a simple code translation from
1705 the import command. Feel free to make it better.
1706 the import command. Feel free to make it better.
1706
1707
1707 :patchdata: a dictionary containing parsed patch data (such as from
1708 :patchdata: a dictionary containing parsed patch data (such as from
1708 ``patch.extract()``)
1709 ``patch.extract()``)
1709 :parents: nodes that will be parent of the created commit
1710 :parents: nodes that will be parent of the created commit
1710 :opts: the full dict of option passed to the import command
1711 :opts: the full dict of option passed to the import command
1711 :msgs: list to save commit message to.
1712 :msgs: list to save commit message to.
1712 (used in case we need to save it when failing)
1713 (used in case we need to save it when failing)
1713 :updatefunc: a function that update a repo to a given node
1714 :updatefunc: a function that update a repo to a given node
1714 updatefunc(<repo>, <node>)
1715 updatefunc(<repo>, <node>)
1715 """
1716 """
1716 # avoid cycle context -> subrepo -> cmdutil
1717 # avoid cycle context -> subrepo -> cmdutil
1717 from . import context
1718 from . import context
1718
1719
1719 tmpname = patchdata.get(b'filename')
1720 tmpname = patchdata.get(b'filename')
1720 message = patchdata.get(b'message')
1721 message = patchdata.get(b'message')
1721 user = opts.get(b'user') or patchdata.get(b'user')
1722 user = opts.get(b'user') or patchdata.get(b'user')
1722 date = opts.get(b'date') or patchdata.get(b'date')
1723 date = opts.get(b'date') or patchdata.get(b'date')
1723 branch = patchdata.get(b'branch')
1724 branch = patchdata.get(b'branch')
1724 nodeid = patchdata.get(b'nodeid')
1725 nodeid = patchdata.get(b'nodeid')
1725 p1 = patchdata.get(b'p1')
1726 p1 = patchdata.get(b'p1')
1726 p2 = patchdata.get(b'p2')
1727 p2 = patchdata.get(b'p2')
1727
1728
1728 nocommit = opts.get(b'no_commit')
1729 nocommit = opts.get(b'no_commit')
1729 importbranch = opts.get(b'import_branch')
1730 importbranch = opts.get(b'import_branch')
1730 update = not opts.get(b'bypass')
1731 update = not opts.get(b'bypass')
1731 strip = opts[b"strip"]
1732 strip = opts[b"strip"]
1732 prefix = opts[b"prefix"]
1733 prefix = opts[b"prefix"]
1733 sim = float(opts.get(b'similarity') or 0)
1734 sim = float(opts.get(b'similarity') or 0)
1734
1735
1735 if not tmpname:
1736 if not tmpname:
1736 return None, None, False
1737 return None, None, False
1737
1738
1738 rejects = False
1739 rejects = False
1739
1740
1740 cmdline_message = logmessage(ui, opts)
1741 cmdline_message = logmessage(ui, opts)
1741 if cmdline_message:
1742 if cmdline_message:
1742 # pickup the cmdline msg
1743 # pickup the cmdline msg
1743 message = cmdline_message
1744 message = cmdline_message
1744 elif message:
1745 elif message:
1745 # pickup the patch msg
1746 # pickup the patch msg
1746 message = message.strip()
1747 message = message.strip()
1747 else:
1748 else:
1748 # launch the editor
1749 # launch the editor
1749 message = None
1750 message = None
1750 ui.debug(b'message:\n%s\n' % (message or b''))
1751 ui.debug(b'message:\n%s\n' % (message or b''))
1751
1752
1752 if len(parents) == 1:
1753 if len(parents) == 1:
1753 parents.append(repo[nullid])
1754 parents.append(repo[nullid])
1754 if opts.get(b'exact'):
1755 if opts.get(b'exact'):
1755 if not nodeid or not p1:
1756 if not nodeid or not p1:
1756 raise error.Abort(_(b'not a Mercurial patch'))
1757 raise error.Abort(_(b'not a Mercurial patch'))
1757 p1 = repo[p1]
1758 p1 = repo[p1]
1758 p2 = repo[p2 or nullid]
1759 p2 = repo[p2 or nullid]
1759 elif p2:
1760 elif p2:
1760 try:
1761 try:
1761 p1 = repo[p1]
1762 p1 = repo[p1]
1762 p2 = repo[p2]
1763 p2 = repo[p2]
1763 # Without any options, consider p2 only if the
1764 # Without any options, consider p2 only if the
1764 # patch is being applied on top of the recorded
1765 # patch is being applied on top of the recorded
1765 # first parent.
1766 # first parent.
1766 if p1 != parents[0]:
1767 if p1 != parents[0]:
1767 p1 = parents[0]
1768 p1 = parents[0]
1768 p2 = repo[nullid]
1769 p2 = repo[nullid]
1769 except error.RepoError:
1770 except error.RepoError:
1770 p1, p2 = parents
1771 p1, p2 = parents
1771 if p2.node() == nullid:
1772 if p2.node() == nullid:
1772 ui.warn(
1773 ui.warn(
1773 _(
1774 _(
1774 b"warning: import the patch as a normal revision\n"
1775 b"warning: import the patch as a normal revision\n"
1775 b"(use --exact to import the patch as a merge)\n"
1776 b"(use --exact to import the patch as a merge)\n"
1776 )
1777 )
1777 )
1778 )
1778 else:
1779 else:
1779 p1, p2 = parents
1780 p1, p2 = parents
1780
1781
1781 n = None
1782 n = None
1782 if update:
1783 if update:
1783 if p1 != parents[0]:
1784 if p1 != parents[0]:
1784 updatefunc(repo, p1.node())
1785 updatefunc(repo, p1.node())
1785 if p2 != parents[1]:
1786 if p2 != parents[1]:
1786 repo.setparents(p1.node(), p2.node())
1787 repo.setparents(p1.node(), p2.node())
1787
1788
1788 if opts.get(b'exact') or importbranch:
1789 if opts.get(b'exact') or importbranch:
1789 repo.dirstate.setbranch(branch or b'default')
1790 repo.dirstate.setbranch(branch or b'default')
1790
1791
1791 partial = opts.get(b'partial', False)
1792 partial = opts.get(b'partial', False)
1792 files = set()
1793 files = set()
1793 try:
1794 try:
1794 patch.patch(
1795 patch.patch(
1795 ui,
1796 ui,
1796 repo,
1797 repo,
1797 tmpname,
1798 tmpname,
1798 strip=strip,
1799 strip=strip,
1799 prefix=prefix,
1800 prefix=prefix,
1800 files=files,
1801 files=files,
1801 eolmode=None,
1802 eolmode=None,
1802 similarity=sim / 100.0,
1803 similarity=sim / 100.0,
1803 )
1804 )
1804 except error.PatchError as e:
1805 except error.PatchError as e:
1805 if not partial:
1806 if not partial:
1806 raise error.Abort(pycompat.bytestr(e))
1807 raise error.Abort(pycompat.bytestr(e))
1807 if partial:
1808 if partial:
1808 rejects = True
1809 rejects = True
1809
1810
1810 files = list(files)
1811 files = list(files)
1811 if nocommit:
1812 if nocommit:
1812 if message:
1813 if message:
1813 msgs.append(message)
1814 msgs.append(message)
1814 else:
1815 else:
1815 if opts.get(b'exact') or p2:
1816 if opts.get(b'exact') or p2:
1816 # If you got here, you either use --force and know what
1817 # If you got here, you either use --force and know what
1817 # you are doing or used --exact or a merge patch while
1818 # you are doing or used --exact or a merge patch while
1818 # being updated to its first parent.
1819 # being updated to its first parent.
1819 m = None
1820 m = None
1820 else:
1821 else:
1821 m = scmutil.matchfiles(repo, files or [])
1822 m = scmutil.matchfiles(repo, files or [])
1822 editform = mergeeditform(repo[None], b'import.normal')
1823 editform = mergeeditform(repo[None], b'import.normal')
1823 if opts.get(b'exact'):
1824 if opts.get(b'exact'):
1824 editor = None
1825 editor = None
1825 else:
1826 else:
1826 editor = getcommiteditor(
1827 editor = getcommiteditor(
1827 editform=editform, **pycompat.strkwargs(opts)
1828 editform=editform, **pycompat.strkwargs(opts)
1828 )
1829 )
1829 extra = {}
1830 extra = {}
1830 for idfunc in extrapreimport:
1831 for idfunc in extrapreimport:
1831 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1832 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1832 overrides = {}
1833 overrides = {}
1833 if partial:
1834 if partial:
1834 overrides[(b'ui', b'allowemptycommit')] = True
1835 overrides[(b'ui', b'allowemptycommit')] = True
1835 if opts.get(b'secret'):
1836 if opts.get(b'secret'):
1836 overrides[(b'phases', b'new-commit')] = b'secret'
1837 overrides[(b'phases', b'new-commit')] = b'secret'
1837 with repo.ui.configoverride(overrides, b'import'):
1838 with repo.ui.configoverride(overrides, b'import'):
1838 n = repo.commit(
1839 n = repo.commit(
1839 message, user, date, match=m, editor=editor, extra=extra
1840 message, user, date, match=m, editor=editor, extra=extra
1840 )
1841 )
1841 for idfunc in extrapostimport:
1842 for idfunc in extrapostimport:
1842 extrapostimportmap[idfunc](repo[n])
1843 extrapostimportmap[idfunc](repo[n])
1843 else:
1844 else:
1844 if opts.get(b'exact') or importbranch:
1845 if opts.get(b'exact') or importbranch:
1845 branch = branch or b'default'
1846 branch = branch or b'default'
1846 else:
1847 else:
1847 branch = p1.branch()
1848 branch = p1.branch()
1848 store = patch.filestore()
1849 store = patch.filestore()
1849 try:
1850 try:
1850 files = set()
1851 files = set()
1851 try:
1852 try:
1852 patch.patchrepo(
1853 patch.patchrepo(
1853 ui,
1854 ui,
1854 repo,
1855 repo,
1855 p1,
1856 p1,
1856 store,
1857 store,
1857 tmpname,
1858 tmpname,
1858 strip,
1859 strip,
1859 prefix,
1860 prefix,
1860 files,
1861 files,
1861 eolmode=None,
1862 eolmode=None,
1862 )
1863 )
1863 except error.PatchError as e:
1864 except error.PatchError as e:
1864 raise error.Abort(stringutil.forcebytestr(e))
1865 raise error.Abort(stringutil.forcebytestr(e))
1865 if opts.get(b'exact'):
1866 if opts.get(b'exact'):
1866 editor = None
1867 editor = None
1867 else:
1868 else:
1868 editor = getcommiteditor(editform=b'import.bypass')
1869 editor = getcommiteditor(editform=b'import.bypass')
1869 memctx = context.memctx(
1870 memctx = context.memctx(
1870 repo,
1871 repo,
1871 (p1.node(), p2.node()),
1872 (p1.node(), p2.node()),
1872 message,
1873 message,
1873 files=files,
1874 files=files,
1874 filectxfn=store,
1875 filectxfn=store,
1875 user=user,
1876 user=user,
1876 date=date,
1877 date=date,
1877 branch=branch,
1878 branch=branch,
1878 editor=editor,
1879 editor=editor,
1879 )
1880 )
1880 n = memctx.commit()
1881 n = memctx.commit()
1881 finally:
1882 finally:
1882 store.close()
1883 store.close()
1883 if opts.get(b'exact') and nocommit:
1884 if opts.get(b'exact') and nocommit:
1884 # --exact with --no-commit is still useful in that it does merge
1885 # --exact with --no-commit is still useful in that it does merge
1885 # and branch bits
1886 # and branch bits
1886 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
1887 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
1887 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
1888 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
1888 raise error.Abort(_(b'patch is damaged or loses information'))
1889 raise error.Abort(_(b'patch is damaged or loses information'))
1889 msg = _(b'applied to working directory')
1890 msg = _(b'applied to working directory')
1890 if n:
1891 if n:
1891 # i18n: refers to a short changeset id
1892 # i18n: refers to a short changeset id
1892 msg = _(b'created %s') % short(n)
1893 msg = _(b'created %s') % short(n)
1893 return msg, n, rejects
1894 return msg, n, rejects
1894
1895
1895
1896
1896 # facility to let extensions include additional data in an exported patch
1897 # facility to let extensions include additional data in an exported patch
1897 # list of identifiers to be executed in order
1898 # list of identifiers to be executed in order
1898 extraexport = []
1899 extraexport = []
1899 # mapping from identifier to actual export function
1900 # mapping from identifier to actual export function
1900 # function as to return a string to be added to the header or None
1901 # function as to return a string to be added to the header or None
1901 # it is given two arguments (sequencenumber, changectx)
1902 # it is given two arguments (sequencenumber, changectx)
1902 extraexportmap = {}
1903 extraexportmap = {}
1903
1904
1904
1905
1905 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
1906 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
1906 node = scmutil.binnode(ctx)
1907 node = scmutil.binnode(ctx)
1907 parents = [p.node() for p in ctx.parents() if p]
1908 parents = [p.node() for p in ctx.parents() if p]
1908 branch = ctx.branch()
1909 branch = ctx.branch()
1909 if switch_parent:
1910 if switch_parent:
1910 parents.reverse()
1911 parents.reverse()
1911
1912
1912 if parents:
1913 if parents:
1913 prev = parents[0]
1914 prev = parents[0]
1914 else:
1915 else:
1915 prev = nullid
1916 prev = nullid
1916
1917
1917 fm.context(ctx=ctx)
1918 fm.context(ctx=ctx)
1918 fm.plain(b'# HG changeset patch\n')
1919 fm.plain(b'# HG changeset patch\n')
1919 fm.write(b'user', b'# User %s\n', ctx.user())
1920 fm.write(b'user', b'# User %s\n', ctx.user())
1920 fm.plain(b'# Date %d %d\n' % ctx.date())
1921 fm.plain(b'# Date %d %d\n' % ctx.date())
1921 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
1922 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
1922 fm.condwrite(
1923 fm.condwrite(
1923 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
1924 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
1924 )
1925 )
1925 fm.write(b'node', b'# Node ID %s\n', hex(node))
1926 fm.write(b'node', b'# Node ID %s\n', hex(node))
1926 fm.plain(b'# Parent %s\n' % hex(prev))
1927 fm.plain(b'# Parent %s\n' % hex(prev))
1927 if len(parents) > 1:
1928 if len(parents) > 1:
1928 fm.plain(b'# Parent %s\n' % hex(parents[1]))
1929 fm.plain(b'# Parent %s\n' % hex(parents[1]))
1929 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
1930 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
1930
1931
1931 # TODO: redesign extraexportmap function to support formatter
1932 # TODO: redesign extraexportmap function to support formatter
1932 for headerid in extraexport:
1933 for headerid in extraexport:
1933 header = extraexportmap[headerid](seqno, ctx)
1934 header = extraexportmap[headerid](seqno, ctx)
1934 if header is not None:
1935 if header is not None:
1935 fm.plain(b'# %s\n' % header)
1936 fm.plain(b'# %s\n' % header)
1936
1937
1937 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
1938 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
1938 fm.plain(b'\n')
1939 fm.plain(b'\n')
1939
1940
1940 if fm.isplain():
1941 if fm.isplain():
1941 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
1942 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
1942 for chunk, label in chunkiter:
1943 for chunk, label in chunkiter:
1943 fm.plain(chunk, label=label)
1944 fm.plain(chunk, label=label)
1944 else:
1945 else:
1945 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
1946 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
1946 # TODO: make it structured?
1947 # TODO: make it structured?
1947 fm.data(diff=b''.join(chunkiter))
1948 fm.data(diff=b''.join(chunkiter))
1948
1949
1949
1950
1950 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
1951 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
1951 """Export changesets to stdout or a single file"""
1952 """Export changesets to stdout or a single file"""
1952 for seqno, rev in enumerate(revs, 1):
1953 for seqno, rev in enumerate(revs, 1):
1953 ctx = repo[rev]
1954 ctx = repo[rev]
1954 if not dest.startswith(b'<'):
1955 if not dest.startswith(b'<'):
1955 repo.ui.note(b"%s\n" % dest)
1956 repo.ui.note(b"%s\n" % dest)
1956 fm.startitem()
1957 fm.startitem()
1957 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
1958 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
1958
1959
1959
1960
1960 def _exportfntemplate(
1961 def _exportfntemplate(
1961 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
1962 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
1962 ):
1963 ):
1963 """Export changesets to possibly multiple files"""
1964 """Export changesets to possibly multiple files"""
1964 total = len(revs)
1965 total = len(revs)
1965 revwidth = max(len(str(rev)) for rev in revs)
1966 revwidth = max(len(str(rev)) for rev in revs)
1966 filemap = util.sortdict() # filename: [(seqno, rev), ...]
1967 filemap = util.sortdict() # filename: [(seqno, rev), ...]
1967
1968
1968 for seqno, rev in enumerate(revs, 1):
1969 for seqno, rev in enumerate(revs, 1):
1969 ctx = repo[rev]
1970 ctx = repo[rev]
1970 dest = makefilename(
1971 dest = makefilename(
1971 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
1972 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
1972 )
1973 )
1973 filemap.setdefault(dest, []).append((seqno, rev))
1974 filemap.setdefault(dest, []).append((seqno, rev))
1974
1975
1975 for dest in filemap:
1976 for dest in filemap:
1976 with formatter.maybereopen(basefm, dest) as fm:
1977 with formatter.maybereopen(basefm, dest) as fm:
1977 repo.ui.note(b"%s\n" % dest)
1978 repo.ui.note(b"%s\n" % dest)
1978 for seqno, rev in filemap[dest]:
1979 for seqno, rev in filemap[dest]:
1979 fm.startitem()
1980 fm.startitem()
1980 ctx = repo[rev]
1981 ctx = repo[rev]
1981 _exportsingle(
1982 _exportsingle(
1982 repo, ctx, fm, match, switch_parent, seqno, diffopts
1983 repo, ctx, fm, match, switch_parent, seqno, diffopts
1983 )
1984 )
1984
1985
1985
1986
1986 def _prefetchchangedfiles(repo, revs, match):
1987 def _prefetchchangedfiles(repo, revs, match):
1987 allfiles = set()
1988 allfiles = set()
1988 for rev in revs:
1989 for rev in revs:
1989 for file in repo[rev].files():
1990 for file in repo[rev].files():
1990 if not match or match(file):
1991 if not match or match(file):
1991 allfiles.add(file)
1992 allfiles.add(file)
1992 scmutil.prefetchfiles(repo, revs, scmutil.matchfiles(repo, allfiles))
1993 scmutil.prefetchfiles(repo, revs, scmutil.matchfiles(repo, allfiles))
1993
1994
1994
1995
1995 def export(
1996 def export(
1996 repo,
1997 repo,
1997 revs,
1998 revs,
1998 basefm,
1999 basefm,
1999 fntemplate=b'hg-%h.patch',
2000 fntemplate=b'hg-%h.patch',
2000 switch_parent=False,
2001 switch_parent=False,
2001 opts=None,
2002 opts=None,
2002 match=None,
2003 match=None,
2003 ):
2004 ):
2004 '''export changesets as hg patches
2005 '''export changesets as hg patches
2005
2006
2006 Args:
2007 Args:
2007 repo: The repository from which we're exporting revisions.
2008 repo: The repository from which we're exporting revisions.
2008 revs: A list of revisions to export as revision numbers.
2009 revs: A list of revisions to export as revision numbers.
2009 basefm: A formatter to which patches should be written.
2010 basefm: A formatter to which patches should be written.
2010 fntemplate: An optional string to use for generating patch file names.
2011 fntemplate: An optional string to use for generating patch file names.
2011 switch_parent: If True, show diffs against second parent when not nullid.
2012 switch_parent: If True, show diffs against second parent when not nullid.
2012 Default is false, which always shows diff against p1.
2013 Default is false, which always shows diff against p1.
2013 opts: diff options to use for generating the patch.
2014 opts: diff options to use for generating the patch.
2014 match: If specified, only export changes to files matching this matcher.
2015 match: If specified, only export changes to files matching this matcher.
2015
2016
2016 Returns:
2017 Returns:
2017 Nothing.
2018 Nothing.
2018
2019
2019 Side Effect:
2020 Side Effect:
2020 "HG Changeset Patch" data is emitted to one of the following
2021 "HG Changeset Patch" data is emitted to one of the following
2021 destinations:
2022 destinations:
2022 fntemplate specified: Each rev is written to a unique file named using
2023 fntemplate specified: Each rev is written to a unique file named using
2023 the given template.
2024 the given template.
2024 Otherwise: All revs will be written to basefm.
2025 Otherwise: All revs will be written to basefm.
2025 '''
2026 '''
2026 _prefetchchangedfiles(repo, revs, match)
2027 _prefetchchangedfiles(repo, revs, match)
2027
2028
2028 if not fntemplate:
2029 if not fntemplate:
2029 _exportfile(
2030 _exportfile(
2030 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2031 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2031 )
2032 )
2032 else:
2033 else:
2033 _exportfntemplate(
2034 _exportfntemplate(
2034 repo, revs, basefm, fntemplate, switch_parent, opts, match
2035 repo, revs, basefm, fntemplate, switch_parent, opts, match
2035 )
2036 )
2036
2037
2037
2038
2038 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2039 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2039 """Export changesets to the given file stream"""
2040 """Export changesets to the given file stream"""
2040 _prefetchchangedfiles(repo, revs, match)
2041 _prefetchchangedfiles(repo, revs, match)
2041
2042
2042 dest = getattr(fp, 'name', b'<unnamed>')
2043 dest = getattr(fp, 'name', b'<unnamed>')
2043 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2044 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2044 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2045 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2045
2046
2046
2047
2047 def showmarker(fm, marker, index=None):
2048 def showmarker(fm, marker, index=None):
2048 """utility function to display obsolescence marker in a readable way
2049 """utility function to display obsolescence marker in a readable way
2049
2050
2050 To be used by debug function."""
2051 To be used by debug function."""
2051 if index is not None:
2052 if index is not None:
2052 fm.write(b'index', b'%i ', index)
2053 fm.write(b'index', b'%i ', index)
2053 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2054 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2054 succs = marker.succnodes()
2055 succs = marker.succnodes()
2055 fm.condwrite(
2056 fm.condwrite(
2056 succs,
2057 succs,
2057 b'succnodes',
2058 b'succnodes',
2058 b'%s ',
2059 b'%s ',
2059 fm.formatlist(map(hex, succs), name=b'node'),
2060 fm.formatlist(map(hex, succs), name=b'node'),
2060 )
2061 )
2061 fm.write(b'flag', b'%X ', marker.flags())
2062 fm.write(b'flag', b'%X ', marker.flags())
2062 parents = marker.parentnodes()
2063 parents = marker.parentnodes()
2063 if parents is not None:
2064 if parents is not None:
2064 fm.write(
2065 fm.write(
2065 b'parentnodes',
2066 b'parentnodes',
2066 b'{%s} ',
2067 b'{%s} ',
2067 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2068 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2068 )
2069 )
2069 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2070 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2070 meta = marker.metadata().copy()
2071 meta = marker.metadata().copy()
2071 meta.pop(b'date', None)
2072 meta.pop(b'date', None)
2072 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2073 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2073 fm.write(
2074 fm.write(
2074 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2075 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2075 )
2076 )
2076 fm.plain(b'\n')
2077 fm.plain(b'\n')
2077
2078
2078
2079
2079 def finddate(ui, repo, date):
2080 def finddate(ui, repo, date):
2080 """Find the tipmost changeset that matches the given date spec"""
2081 """Find the tipmost changeset that matches the given date spec"""
2081
2082
2082 df = dateutil.matchdate(date)
2083 df = dateutil.matchdate(date)
2083 m = scmutil.matchall(repo)
2084 m = scmutil.matchall(repo)
2084 results = {}
2085 results = {}
2085
2086
2086 def prep(ctx, fns):
2087 def prep(ctx, fns):
2087 d = ctx.date()
2088 d = ctx.date()
2088 if df(d[0]):
2089 if df(d[0]):
2089 results[ctx.rev()] = d
2090 results[ctx.rev()] = d
2090
2091
2091 for ctx in walkchangerevs(repo, m, {b'rev': None}, prep):
2092 for ctx in walkchangerevs(repo, m, {b'rev': None}, prep):
2092 rev = ctx.rev()
2093 rev = ctx.rev()
2093 if rev in results:
2094 if rev in results:
2094 ui.status(
2095 ui.status(
2095 _(b"found revision %d from %s\n")
2096 _(b"found revision %d from %s\n")
2096 % (rev, dateutil.datestr(results[rev]))
2097 % (rev, dateutil.datestr(results[rev]))
2097 )
2098 )
2098 return b'%d' % rev
2099 return b'%d' % rev
2099
2100
2100 raise error.Abort(_(b"revision matching date not found"))
2101 raise error.Abort(_(b"revision matching date not found"))
2101
2102
2102
2103
2103 def increasingwindows(windowsize=8, sizelimit=512):
2104 def increasingwindows(windowsize=8, sizelimit=512):
2104 while True:
2105 while True:
2105 yield windowsize
2106 yield windowsize
2106 if windowsize < sizelimit:
2107 if windowsize < sizelimit:
2107 windowsize *= 2
2108 windowsize *= 2
2108
2109
2109
2110
2110 def _walkrevs(repo, opts):
2111 def _walkrevs(repo, opts):
2111 # Default --rev value depends on --follow but --follow behavior
2112 # Default --rev value depends on --follow but --follow behavior
2112 # depends on revisions resolved from --rev...
2113 # depends on revisions resolved from --rev...
2113 follow = opts.get(b'follow') or opts.get(b'follow_first')
2114 follow = opts.get(b'follow') or opts.get(b'follow_first')
2114 if opts.get(b'rev'):
2115 if opts.get(b'rev'):
2115 revs = scmutil.revrange(repo, opts[b'rev'])
2116 revs = scmutil.revrange(repo, opts[b'rev'])
2116 elif follow and repo.dirstate.p1() == nullid:
2117 elif follow and repo.dirstate.p1() == nullid:
2117 revs = smartset.baseset()
2118 revs = smartset.baseset()
2118 elif follow:
2119 elif follow:
2119 revs = repo.revs(b'reverse(:.)')
2120 revs = repo.revs(b'reverse(:.)')
2120 else:
2121 else:
2121 revs = smartset.spanset(repo)
2122 revs = smartset.spanset(repo)
2122 revs.reverse()
2123 revs.reverse()
2123 return revs
2124 return revs
2124
2125
2125
2126
2126 class FileWalkError(Exception):
2127 class FileWalkError(Exception):
2127 pass
2128 pass
2128
2129
2129
2130
2130 def walkfilerevs(repo, match, follow, revs, fncache):
2131 def walkfilerevs(repo, match, follow, revs, fncache):
2131 '''Walks the file history for the matched files.
2132 '''Walks the file history for the matched files.
2132
2133
2133 Returns the changeset revs that are involved in the file history.
2134 Returns the changeset revs that are involved in the file history.
2134
2135
2135 Throws FileWalkError if the file history can't be walked using
2136 Throws FileWalkError if the file history can't be walked using
2136 filelogs alone.
2137 filelogs alone.
2137 '''
2138 '''
2138 wanted = set()
2139 wanted = set()
2139 copies = []
2140 copies = []
2140 minrev, maxrev = min(revs), max(revs)
2141 minrev, maxrev = min(revs), max(revs)
2141
2142
2142 def filerevs(filelog, last):
2143 def filerevs(filelog, last):
2143 """
2144 """
2144 Only files, no patterns. Check the history of each file.
2145 Only files, no patterns. Check the history of each file.
2145
2146
2146 Examines filelog entries within minrev, maxrev linkrev range
2147 Examines filelog entries within minrev, maxrev linkrev range
2147 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2148 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2148 tuples in backwards order
2149 tuples in backwards order
2149 """
2150 """
2150 cl_count = len(repo)
2151 cl_count = len(repo)
2151 revs = []
2152 revs = []
2152 for j in pycompat.xrange(0, last + 1):
2153 for j in pycompat.xrange(0, last + 1):
2153 linkrev = filelog.linkrev(j)
2154 linkrev = filelog.linkrev(j)
2154 if linkrev < minrev:
2155 if linkrev < minrev:
2155 continue
2156 continue
2156 # only yield rev for which we have the changelog, it can
2157 # only yield rev for which we have the changelog, it can
2157 # happen while doing "hg log" during a pull or commit
2158 # happen while doing "hg log" during a pull or commit
2158 if linkrev >= cl_count:
2159 if linkrev >= cl_count:
2159 break
2160 break
2160
2161
2161 parentlinkrevs = []
2162 parentlinkrevs = []
2162 for p in filelog.parentrevs(j):
2163 for p in filelog.parentrevs(j):
2163 if p != nullrev:
2164 if p != nullrev:
2164 parentlinkrevs.append(filelog.linkrev(p))
2165 parentlinkrevs.append(filelog.linkrev(p))
2165 n = filelog.node(j)
2166 n = filelog.node(j)
2166 revs.append(
2167 revs.append(
2167 (linkrev, parentlinkrevs, follow and filelog.renamed(n))
2168 (linkrev, parentlinkrevs, follow and filelog.renamed(n))
2168 )
2169 )
2169
2170
2170 return reversed(revs)
2171 return reversed(revs)
2171
2172
2172 def iterfiles():
2173 def iterfiles():
2173 pctx = repo[b'.']
2174 pctx = repo[b'.']
2174 for filename in match.files():
2175 for filename in match.files():
2175 if follow:
2176 if follow:
2176 if filename not in pctx:
2177 if filename not in pctx:
2177 raise error.Abort(
2178 raise error.Abort(
2178 _(
2179 _(
2179 b'cannot follow file not in parent '
2180 b'cannot follow file not in parent '
2180 b'revision: "%s"'
2181 b'revision: "%s"'
2181 )
2182 )
2182 % filename
2183 % filename
2183 )
2184 )
2184 yield filename, pctx[filename].filenode()
2185 yield filename, pctx[filename].filenode()
2185 else:
2186 else:
2186 yield filename, None
2187 yield filename, None
2187 for filename_node in copies:
2188 for filename_node in copies:
2188 yield filename_node
2189 yield filename_node
2189
2190
2190 for file_, node in iterfiles():
2191 for file_, node in iterfiles():
2191 filelog = repo.file(file_)
2192 filelog = repo.file(file_)
2192 if not len(filelog):
2193 if not len(filelog):
2193 if node is None:
2194 if node is None:
2194 # A zero count may be a directory or deleted file, so
2195 # A zero count may be a directory or deleted file, so
2195 # try to find matching entries on the slow path.
2196 # try to find matching entries on the slow path.
2196 if follow:
2197 if follow:
2197 raise error.Abort(
2198 raise error.Abort(
2198 _(b'cannot follow nonexistent file: "%s"') % file_
2199 _(b'cannot follow nonexistent file: "%s"') % file_
2199 )
2200 )
2200 raise FileWalkError(b"Cannot walk via filelog")
2201 raise FileWalkError(b"Cannot walk via filelog")
2201 else:
2202 else:
2202 continue
2203 continue
2203
2204
2204 if node is None:
2205 if node is None:
2205 last = len(filelog) - 1
2206 last = len(filelog) - 1
2206 else:
2207 else:
2207 last = filelog.rev(node)
2208 last = filelog.rev(node)
2208
2209
2209 # keep track of all ancestors of the file
2210 # keep track of all ancestors of the file
2210 ancestors = {filelog.linkrev(last)}
2211 ancestors = {filelog.linkrev(last)}
2211
2212
2212 # iterate from latest to oldest revision
2213 # iterate from latest to oldest revision
2213 for rev, flparentlinkrevs, copied in filerevs(filelog, last):
2214 for rev, flparentlinkrevs, copied in filerevs(filelog, last):
2214 if not follow:
2215 if not follow:
2215 if rev > maxrev:
2216 if rev > maxrev:
2216 continue
2217 continue
2217 else:
2218 else:
2218 # Note that last might not be the first interesting
2219 # Note that last might not be the first interesting
2219 # rev to us:
2220 # rev to us:
2220 # if the file has been changed after maxrev, we'll
2221 # if the file has been changed after maxrev, we'll
2221 # have linkrev(last) > maxrev, and we still need
2222 # have linkrev(last) > maxrev, and we still need
2222 # to explore the file graph
2223 # to explore the file graph
2223 if rev not in ancestors:
2224 if rev not in ancestors:
2224 continue
2225 continue
2225 # XXX insert 1327 fix here
2226 # XXX insert 1327 fix here
2226 if flparentlinkrevs:
2227 if flparentlinkrevs:
2227 ancestors.update(flparentlinkrevs)
2228 ancestors.update(flparentlinkrevs)
2228
2229
2229 fncache.setdefault(rev, []).append(file_)
2230 fncache.setdefault(rev, []).append(file_)
2230 wanted.add(rev)
2231 wanted.add(rev)
2231 if copied:
2232 if copied:
2232 copies.append(copied)
2233 copies.append(copied)
2233
2234
2234 return wanted
2235 return wanted
2235
2236
2236
2237
2237 class _followfilter(object):
2238 class _followfilter(object):
2238 def __init__(self, repo, onlyfirst=False):
2239 def __init__(self, repo, onlyfirst=False):
2239 self.repo = repo
2240 self.repo = repo
2240 self.startrev = nullrev
2241 self.startrev = nullrev
2241 self.roots = set()
2242 self.roots = set()
2242 self.onlyfirst = onlyfirst
2243 self.onlyfirst = onlyfirst
2243
2244
2244 def match(self, rev):
2245 def match(self, rev):
2245 def realparents(rev):
2246 def realparents(rev):
2246 if self.onlyfirst:
2247 if self.onlyfirst:
2247 return self.repo.changelog.parentrevs(rev)[0:1]
2248 return self.repo.changelog.parentrevs(rev)[0:1]
2248 else:
2249 else:
2249 return filter(
2250 return filter(
2250 lambda x: x != nullrev, self.repo.changelog.parentrevs(rev)
2251 lambda x: x != nullrev, self.repo.changelog.parentrevs(rev)
2251 )
2252 )
2252
2253
2253 if self.startrev == nullrev:
2254 if self.startrev == nullrev:
2254 self.startrev = rev
2255 self.startrev = rev
2255 return True
2256 return True
2256
2257
2257 if rev > self.startrev:
2258 if rev > self.startrev:
2258 # forward: all descendants
2259 # forward: all descendants
2259 if not self.roots:
2260 if not self.roots:
2260 self.roots.add(self.startrev)
2261 self.roots.add(self.startrev)
2261 for parent in realparents(rev):
2262 for parent in realparents(rev):
2262 if parent in self.roots:
2263 if parent in self.roots:
2263 self.roots.add(rev)
2264 self.roots.add(rev)
2264 return True
2265 return True
2265 else:
2266 else:
2266 # backwards: all parents
2267 # backwards: all parents
2267 if not self.roots:
2268 if not self.roots:
2268 self.roots.update(realparents(self.startrev))
2269 self.roots.update(realparents(self.startrev))
2269 if rev in self.roots:
2270 if rev in self.roots:
2270 self.roots.remove(rev)
2271 self.roots.remove(rev)
2271 self.roots.update(realparents(rev))
2272 self.roots.update(realparents(rev))
2272 return True
2273 return True
2273
2274
2274 return False
2275 return False
2275
2276
2276
2277
2277 def walkchangerevs(repo, match, opts, prepare):
2278 def walkchangerevs(repo, match, opts, prepare):
2278 '''Iterate over files and the revs in which they changed.
2279 '''Iterate over files and the revs in which they changed.
2279
2280
2280 Callers most commonly need to iterate backwards over the history
2281 Callers most commonly need to iterate backwards over the history
2281 in which they are interested. Doing so has awful (quadratic-looking)
2282 in which they are interested. Doing so has awful (quadratic-looking)
2282 performance, so we use iterators in a "windowed" way.
2283 performance, so we use iterators in a "windowed" way.
2283
2284
2284 We walk a window of revisions in the desired order. Within the
2285 We walk a window of revisions in the desired order. Within the
2285 window, we first walk forwards to gather data, then in the desired
2286 window, we first walk forwards to gather data, then in the desired
2286 order (usually backwards) to display it.
2287 order (usually backwards) to display it.
2287
2288
2288 This function returns an iterator yielding contexts. Before
2289 This function returns an iterator yielding contexts. Before
2289 yielding each context, the iterator will first call the prepare
2290 yielding each context, the iterator will first call the prepare
2290 function on each context in the window in forward order.'''
2291 function on each context in the window in forward order.'''
2291
2292
2292 allfiles = opts.get(b'all_files')
2293 allfiles = opts.get(b'all_files')
2293 follow = opts.get(b'follow') or opts.get(b'follow_first')
2294 follow = opts.get(b'follow') or opts.get(b'follow_first')
2294 revs = _walkrevs(repo, opts)
2295 revs = _walkrevs(repo, opts)
2295 if not revs:
2296 if not revs:
2296 return []
2297 return []
2297 wanted = set()
2298 wanted = set()
2298 slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
2299 slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
2299 fncache = {}
2300 fncache = {}
2300 change = repo.__getitem__
2301 change = repo.__getitem__
2301
2302
2302 # First step is to fill wanted, the set of revisions that we want to yield.
2303 # First step is to fill wanted, the set of revisions that we want to yield.
2303 # When it does not induce extra cost, we also fill fncache for revisions in
2304 # When it does not induce extra cost, we also fill fncache for revisions in
2304 # wanted: a cache of filenames that were changed (ctx.files()) and that
2305 # wanted: a cache of filenames that were changed (ctx.files()) and that
2305 # match the file filtering conditions.
2306 # match the file filtering conditions.
2306
2307
2307 if match.always() or allfiles:
2308 if match.always() or allfiles:
2308 # No files, no patterns. Display all revs.
2309 # No files, no patterns. Display all revs.
2309 wanted = revs
2310 wanted = revs
2310 elif not slowpath:
2311 elif not slowpath:
2311 # We only have to read through the filelog to find wanted revisions
2312 # We only have to read through the filelog to find wanted revisions
2312
2313
2313 try:
2314 try:
2314 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2315 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2315 except FileWalkError:
2316 except FileWalkError:
2316 slowpath = True
2317 slowpath = True
2317
2318
2318 # We decided to fall back to the slowpath because at least one
2319 # We decided to fall back to the slowpath because at least one
2319 # of the paths was not a file. Check to see if at least one of them
2320 # of the paths was not a file. Check to see if at least one of them
2320 # existed in history, otherwise simply return
2321 # existed in history, otherwise simply return
2321 for path in match.files():
2322 for path in match.files():
2322 if path == b'.' or path in repo.store:
2323 if path == b'.' or path in repo.store:
2323 break
2324 break
2324 else:
2325 else:
2325 return []
2326 return []
2326
2327
2327 if slowpath:
2328 if slowpath:
2328 # We have to read the changelog to match filenames against
2329 # We have to read the changelog to match filenames against
2329 # changed files
2330 # changed files
2330
2331
2331 if follow:
2332 if follow:
2332 raise error.Abort(
2333 raise error.Abort(
2333 _(b'can only follow copies/renames for explicit filenames')
2334 _(b'can only follow copies/renames for explicit filenames')
2334 )
2335 )
2335
2336
2336 # The slow path checks files modified in every changeset.
2337 # The slow path checks files modified in every changeset.
2337 # This is really slow on large repos, so compute the set lazily.
2338 # This is really slow on large repos, so compute the set lazily.
2338 class lazywantedset(object):
2339 class lazywantedset(object):
2339 def __init__(self):
2340 def __init__(self):
2340 self.set = set()
2341 self.set = set()
2341 self.revs = set(revs)
2342 self.revs = set(revs)
2342
2343
2343 # No need to worry about locality here because it will be accessed
2344 # No need to worry about locality here because it will be accessed
2344 # in the same order as the increasing window below.
2345 # in the same order as the increasing window below.
2345 def __contains__(self, value):
2346 def __contains__(self, value):
2346 if value in self.set:
2347 if value in self.set:
2347 return True
2348 return True
2348 elif not value in self.revs:
2349 elif not value in self.revs:
2349 return False
2350 return False
2350 else:
2351 else:
2351 self.revs.discard(value)
2352 self.revs.discard(value)
2352 ctx = change(value)
2353 ctx = change(value)
2353 if allfiles:
2354 if allfiles:
2354 matches = list(ctx.manifest().walk(match))
2355 matches = list(ctx.manifest().walk(match))
2355 else:
2356 else:
2356 matches = [f for f in ctx.files() if match(f)]
2357 matches = [f for f in ctx.files() if match(f)]
2357 if matches:
2358 if matches:
2358 fncache[value] = matches
2359 fncache[value] = matches
2359 self.set.add(value)
2360 self.set.add(value)
2360 return True
2361 return True
2361 return False
2362 return False
2362
2363
2363 def discard(self, value):
2364 def discard(self, value):
2364 self.revs.discard(value)
2365 self.revs.discard(value)
2365 self.set.discard(value)
2366 self.set.discard(value)
2366
2367
2367 wanted = lazywantedset()
2368 wanted = lazywantedset()
2368
2369
2369 # it might be worthwhile to do this in the iterator if the rev range
2370 # it might be worthwhile to do this in the iterator if the rev range
2370 # is descending and the prune args are all within that range
2371 # is descending and the prune args are all within that range
2371 for rev in opts.get(b'prune', ()):
2372 for rev in opts.get(b'prune', ()):
2372 rev = repo[rev].rev()
2373 rev = repo[rev].rev()
2373 ff = _followfilter(repo)
2374 ff = _followfilter(repo)
2374 stop = min(revs[0], revs[-1])
2375 stop = min(revs[0], revs[-1])
2375 for x in pycompat.xrange(rev, stop - 1, -1):
2376 for x in pycompat.xrange(rev, stop - 1, -1):
2376 if ff.match(x):
2377 if ff.match(x):
2377 wanted = wanted - [x]
2378 wanted = wanted - [x]
2378
2379
2379 # Now that wanted is correctly initialized, we can iterate over the
2380 # Now that wanted is correctly initialized, we can iterate over the
2380 # revision range, yielding only revisions in wanted.
2381 # revision range, yielding only revisions in wanted.
2381 def iterate():
2382 def iterate():
2382 if follow and match.always():
2383 if follow and match.always():
2383 ff = _followfilter(repo, onlyfirst=opts.get(b'follow_first'))
2384 ff = _followfilter(repo, onlyfirst=opts.get(b'follow_first'))
2384
2385
2385 def want(rev):
2386 def want(rev):
2386 return ff.match(rev) and rev in wanted
2387 return ff.match(rev) and rev in wanted
2387
2388
2388 else:
2389 else:
2389
2390
2390 def want(rev):
2391 def want(rev):
2391 return rev in wanted
2392 return rev in wanted
2392
2393
2393 it = iter(revs)
2394 it = iter(revs)
2394 stopiteration = False
2395 stopiteration = False
2395 for windowsize in increasingwindows():
2396 for windowsize in increasingwindows():
2396 nrevs = []
2397 nrevs = []
2397 for i in pycompat.xrange(windowsize):
2398 for i in pycompat.xrange(windowsize):
2398 rev = next(it, None)
2399 rev = next(it, None)
2399 if rev is None:
2400 if rev is None:
2400 stopiteration = True
2401 stopiteration = True
2401 break
2402 break
2402 elif want(rev):
2403 elif want(rev):
2403 nrevs.append(rev)
2404 nrevs.append(rev)
2404 for rev in sorted(nrevs):
2405 for rev in sorted(nrevs):
2405 fns = fncache.get(rev)
2406 fns = fncache.get(rev)
2406 ctx = change(rev)
2407 ctx = change(rev)
2407 if not fns:
2408 if not fns:
2408
2409
2409 def fns_generator():
2410 def fns_generator():
2410 if allfiles:
2411 if allfiles:
2411 fiter = iter(ctx)
2412 fiter = iter(ctx)
2412 else:
2413 else:
2413 fiter = ctx.files()
2414 fiter = ctx.files()
2414 for f in fiter:
2415 for f in fiter:
2415 if match(f):
2416 if match(f):
2416 yield f
2417 yield f
2417
2418
2418 fns = fns_generator()
2419 fns = fns_generator()
2419 prepare(ctx, fns)
2420 prepare(ctx, fns)
2420 for rev in nrevs:
2421 for rev in nrevs:
2421 yield change(rev)
2422 yield change(rev)
2422
2423
2423 if stopiteration:
2424 if stopiteration:
2424 break
2425 break
2425
2426
2426 return iterate()
2427 return iterate()
2427
2428
2428
2429
2429 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2430 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2430 bad = []
2431 bad = []
2431
2432
2432 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2433 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2433 names = []
2434 names = []
2434 wctx = repo[None]
2435 wctx = repo[None]
2435 cca = None
2436 cca = None
2436 abort, warn = scmutil.checkportabilityalert(ui)
2437 abort, warn = scmutil.checkportabilityalert(ui)
2437 if abort or warn:
2438 if abort or warn:
2438 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2439 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2439
2440
2440 match = repo.narrowmatch(match, includeexact=True)
2441 match = repo.narrowmatch(match, includeexact=True)
2441 badmatch = matchmod.badmatch(match, badfn)
2442 badmatch = matchmod.badmatch(match, badfn)
2442 dirstate = repo.dirstate
2443 dirstate = repo.dirstate
2443 # We don't want to just call wctx.walk here, since it would return a lot of
2444 # We don't want to just call wctx.walk here, since it would return a lot of
2444 # clean files, which we aren't interested in and takes time.
2445 # clean files, which we aren't interested in and takes time.
2445 for f in sorted(
2446 for f in sorted(
2446 dirstate.walk(
2447 dirstate.walk(
2447 badmatch,
2448 badmatch,
2448 subrepos=sorted(wctx.substate),
2449 subrepos=sorted(wctx.substate),
2449 unknown=True,
2450 unknown=True,
2450 ignored=False,
2451 ignored=False,
2451 full=False,
2452 full=False,
2452 )
2453 )
2453 ):
2454 ):
2454 exact = match.exact(f)
2455 exact = match.exact(f)
2455 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2456 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2456 if cca:
2457 if cca:
2457 cca(f)
2458 cca(f)
2458 names.append(f)
2459 names.append(f)
2459 if ui.verbose or not exact:
2460 if ui.verbose or not exact:
2460 ui.status(
2461 ui.status(
2461 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2462 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2462 )
2463 )
2463
2464
2464 for subpath in sorted(wctx.substate):
2465 for subpath in sorted(wctx.substate):
2465 sub = wctx.sub(subpath)
2466 sub = wctx.sub(subpath)
2466 try:
2467 try:
2467 submatch = matchmod.subdirmatcher(subpath, match)
2468 submatch = matchmod.subdirmatcher(subpath, match)
2468 subprefix = repo.wvfs.reljoin(prefix, subpath)
2469 subprefix = repo.wvfs.reljoin(prefix, subpath)
2469 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2470 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2470 if opts.get('subrepos'):
2471 if opts.get('subrepos'):
2471 bad.extend(
2472 bad.extend(
2472 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2473 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2473 )
2474 )
2474 else:
2475 else:
2475 bad.extend(
2476 bad.extend(
2476 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2477 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2477 )
2478 )
2478 except error.LookupError:
2479 except error.LookupError:
2479 ui.status(
2480 ui.status(
2480 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2481 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2481 )
2482 )
2482
2483
2483 if not opts.get('dry_run'):
2484 if not opts.get('dry_run'):
2484 rejected = wctx.add(names, prefix)
2485 rejected = wctx.add(names, prefix)
2485 bad.extend(f for f in rejected if f in match.files())
2486 bad.extend(f for f in rejected if f in match.files())
2486 return bad
2487 return bad
2487
2488
2488
2489
2489 def addwebdirpath(repo, serverpath, webconf):
2490 def addwebdirpath(repo, serverpath, webconf):
2490 webconf[serverpath] = repo.root
2491 webconf[serverpath] = repo.root
2491 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2492 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2492
2493
2493 for r in repo.revs(b'filelog("path:.hgsub")'):
2494 for r in repo.revs(b'filelog("path:.hgsub")'):
2494 ctx = repo[r]
2495 ctx = repo[r]
2495 for subpath in ctx.substate:
2496 for subpath in ctx.substate:
2496 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2497 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2497
2498
2498
2499
2499 def forget(
2500 def forget(
2500 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2501 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2501 ):
2502 ):
2502 if dryrun and interactive:
2503 if dryrun and interactive:
2503 raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
2504 raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
2504 bad = []
2505 bad = []
2505 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2506 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2506 wctx = repo[None]
2507 wctx = repo[None]
2507 forgot = []
2508 forgot = []
2508
2509
2509 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2510 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2510 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2511 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2511 if explicitonly:
2512 if explicitonly:
2512 forget = [f for f in forget if match.exact(f)]
2513 forget = [f for f in forget if match.exact(f)]
2513
2514
2514 for subpath in sorted(wctx.substate):
2515 for subpath in sorted(wctx.substate):
2515 sub = wctx.sub(subpath)
2516 sub = wctx.sub(subpath)
2516 submatch = matchmod.subdirmatcher(subpath, match)
2517 submatch = matchmod.subdirmatcher(subpath, match)
2517 subprefix = repo.wvfs.reljoin(prefix, subpath)
2518 subprefix = repo.wvfs.reljoin(prefix, subpath)
2518 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2519 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2519 try:
2520 try:
2520 subbad, subforgot = sub.forget(
2521 subbad, subforgot = sub.forget(
2521 submatch,
2522 submatch,
2522 subprefix,
2523 subprefix,
2523 subuipathfn,
2524 subuipathfn,
2524 dryrun=dryrun,
2525 dryrun=dryrun,
2525 interactive=interactive,
2526 interactive=interactive,
2526 )
2527 )
2527 bad.extend([subpath + b'/' + f for f in subbad])
2528 bad.extend([subpath + b'/' + f for f in subbad])
2528 forgot.extend([subpath + b'/' + f for f in subforgot])
2529 forgot.extend([subpath + b'/' + f for f in subforgot])
2529 except error.LookupError:
2530 except error.LookupError:
2530 ui.status(
2531 ui.status(
2531 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2532 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2532 )
2533 )
2533
2534
2534 if not explicitonly:
2535 if not explicitonly:
2535 for f in match.files():
2536 for f in match.files():
2536 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2537 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2537 if f not in forgot:
2538 if f not in forgot:
2538 if repo.wvfs.exists(f):
2539 if repo.wvfs.exists(f):
2539 # Don't complain if the exact case match wasn't given.
2540 # Don't complain if the exact case match wasn't given.
2540 # But don't do this until after checking 'forgot', so
2541 # But don't do this until after checking 'forgot', so
2541 # that subrepo files aren't normalized, and this op is
2542 # that subrepo files aren't normalized, and this op is
2542 # purely from data cached by the status walk above.
2543 # purely from data cached by the status walk above.
2543 if repo.dirstate.normalize(f) in repo.dirstate:
2544 if repo.dirstate.normalize(f) in repo.dirstate:
2544 continue
2545 continue
2545 ui.warn(
2546 ui.warn(
2546 _(
2547 _(
2547 b'not removing %s: '
2548 b'not removing %s: '
2548 b'file is already untracked\n'
2549 b'file is already untracked\n'
2549 )
2550 )
2550 % uipathfn(f)
2551 % uipathfn(f)
2551 )
2552 )
2552 bad.append(f)
2553 bad.append(f)
2553
2554
2554 if interactive:
2555 if interactive:
2555 responses = _(
2556 responses = _(
2556 b'[Ynsa?]'
2557 b'[Ynsa?]'
2557 b'$$ &Yes, forget this file'
2558 b'$$ &Yes, forget this file'
2558 b'$$ &No, skip this file'
2559 b'$$ &No, skip this file'
2559 b'$$ &Skip remaining files'
2560 b'$$ &Skip remaining files'
2560 b'$$ Include &all remaining files'
2561 b'$$ Include &all remaining files'
2561 b'$$ &? (display help)'
2562 b'$$ &? (display help)'
2562 )
2563 )
2563 for filename in forget[:]:
2564 for filename in forget[:]:
2564 r = ui.promptchoice(
2565 r = ui.promptchoice(
2565 _(b'forget %s %s') % (uipathfn(filename), responses)
2566 _(b'forget %s %s') % (uipathfn(filename), responses)
2566 )
2567 )
2567 if r == 4: # ?
2568 if r == 4: # ?
2568 while r == 4:
2569 while r == 4:
2569 for c, t in ui.extractchoices(responses)[1]:
2570 for c, t in ui.extractchoices(responses)[1]:
2570 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2571 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2571 r = ui.promptchoice(
2572 r = ui.promptchoice(
2572 _(b'forget %s %s') % (uipathfn(filename), responses)
2573 _(b'forget %s %s') % (uipathfn(filename), responses)
2573 )
2574 )
2574 if r == 0: # yes
2575 if r == 0: # yes
2575 continue
2576 continue
2576 elif r == 1: # no
2577 elif r == 1: # no
2577 forget.remove(filename)
2578 forget.remove(filename)
2578 elif r == 2: # Skip
2579 elif r == 2: # Skip
2579 fnindex = forget.index(filename)
2580 fnindex = forget.index(filename)
2580 del forget[fnindex:]
2581 del forget[fnindex:]
2581 break
2582 break
2582 elif r == 3: # All
2583 elif r == 3: # All
2583 break
2584 break
2584
2585
2585 for f in forget:
2586 for f in forget:
2586 if ui.verbose or not match.exact(f) or interactive:
2587 if ui.verbose or not match.exact(f) or interactive:
2587 ui.status(
2588 ui.status(
2588 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2589 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2589 )
2590 )
2590
2591
2591 if not dryrun:
2592 if not dryrun:
2592 rejected = wctx.forget(forget, prefix)
2593 rejected = wctx.forget(forget, prefix)
2593 bad.extend(f for f in rejected if f in match.files())
2594 bad.extend(f for f in rejected if f in match.files())
2594 forgot.extend(f for f in forget if f not in rejected)
2595 forgot.extend(f for f in forget if f not in rejected)
2595 return bad, forgot
2596 return bad, forgot
2596
2597
2597
2598
2598 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2599 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2599 ret = 1
2600 ret = 1
2600
2601
2601 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2602 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2602 for f in ctx.matches(m):
2603 for f in ctx.matches(m):
2603 fm.startitem()
2604 fm.startitem()
2604 fm.context(ctx=ctx)
2605 fm.context(ctx=ctx)
2605 if needsfctx:
2606 if needsfctx:
2606 fc = ctx[f]
2607 fc = ctx[f]
2607 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2608 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2608 fm.data(path=f)
2609 fm.data(path=f)
2609 fm.plain(fmt % uipathfn(f))
2610 fm.plain(fmt % uipathfn(f))
2610 ret = 0
2611 ret = 0
2611
2612
2612 for subpath in sorted(ctx.substate):
2613 for subpath in sorted(ctx.substate):
2613 submatch = matchmod.subdirmatcher(subpath, m)
2614 submatch = matchmod.subdirmatcher(subpath, m)
2614 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2615 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2615 if subrepos or m.exact(subpath) or any(submatch.files()):
2616 if subrepos or m.exact(subpath) or any(submatch.files()):
2616 sub = ctx.sub(subpath)
2617 sub = ctx.sub(subpath)
2617 try:
2618 try:
2618 recurse = m.exact(subpath) or subrepos
2619 recurse = m.exact(subpath) or subrepos
2619 if (
2620 if (
2620 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2621 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2621 == 0
2622 == 0
2622 ):
2623 ):
2623 ret = 0
2624 ret = 0
2624 except error.LookupError:
2625 except error.LookupError:
2625 ui.status(
2626 ui.status(
2626 _(b"skipping missing subrepository: %s\n")
2627 _(b"skipping missing subrepository: %s\n")
2627 % uipathfn(subpath)
2628 % uipathfn(subpath)
2628 )
2629 )
2629
2630
2630 return ret
2631 return ret
2631
2632
2632
2633
2633 def remove(
2634 def remove(
2634 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2635 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2635 ):
2636 ):
2636 ret = 0
2637 ret = 0
2637 s = repo.status(match=m, clean=True)
2638 s = repo.status(match=m, clean=True)
2638 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2639 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2639
2640
2640 wctx = repo[None]
2641 wctx = repo[None]
2641
2642
2642 if warnings is None:
2643 if warnings is None:
2643 warnings = []
2644 warnings = []
2644 warn = True
2645 warn = True
2645 else:
2646 else:
2646 warn = False
2647 warn = False
2647
2648
2648 subs = sorted(wctx.substate)
2649 subs = sorted(wctx.substate)
2649 progress = ui.makeprogress(
2650 progress = ui.makeprogress(
2650 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2651 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2651 )
2652 )
2652 for subpath in subs:
2653 for subpath in subs:
2653 submatch = matchmod.subdirmatcher(subpath, m)
2654 submatch = matchmod.subdirmatcher(subpath, m)
2654 subprefix = repo.wvfs.reljoin(prefix, subpath)
2655 subprefix = repo.wvfs.reljoin(prefix, subpath)
2655 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2656 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2656 if subrepos or m.exact(subpath) or any(submatch.files()):
2657 if subrepos or m.exact(subpath) or any(submatch.files()):
2657 progress.increment()
2658 progress.increment()
2658 sub = wctx.sub(subpath)
2659 sub = wctx.sub(subpath)
2659 try:
2660 try:
2660 if sub.removefiles(
2661 if sub.removefiles(
2661 submatch,
2662 submatch,
2662 subprefix,
2663 subprefix,
2663 subuipathfn,
2664 subuipathfn,
2664 after,
2665 after,
2665 force,
2666 force,
2666 subrepos,
2667 subrepos,
2667 dryrun,
2668 dryrun,
2668 warnings,
2669 warnings,
2669 ):
2670 ):
2670 ret = 1
2671 ret = 1
2671 except error.LookupError:
2672 except error.LookupError:
2672 warnings.append(
2673 warnings.append(
2673 _(b"skipping missing subrepository: %s\n")
2674 _(b"skipping missing subrepository: %s\n")
2674 % uipathfn(subpath)
2675 % uipathfn(subpath)
2675 )
2676 )
2676 progress.complete()
2677 progress.complete()
2677
2678
2678 # warn about failure to delete explicit files/dirs
2679 # warn about failure to delete explicit files/dirs
2679 deleteddirs = pathutil.dirs(deleted)
2680 deleteddirs = pathutil.dirs(deleted)
2680 files = m.files()
2681 files = m.files()
2681 progress = ui.makeprogress(
2682 progress = ui.makeprogress(
2682 _(b'deleting'), total=len(files), unit=_(b'files')
2683 _(b'deleting'), total=len(files), unit=_(b'files')
2683 )
2684 )
2684 for f in files:
2685 for f in files:
2685
2686
2686 def insubrepo():
2687 def insubrepo():
2687 for subpath in wctx.substate:
2688 for subpath in wctx.substate:
2688 if f.startswith(subpath + b'/'):
2689 if f.startswith(subpath + b'/'):
2689 return True
2690 return True
2690 return False
2691 return False
2691
2692
2692 progress.increment()
2693 progress.increment()
2693 isdir = f in deleteddirs or wctx.hasdir(f)
2694 isdir = f in deleteddirs or wctx.hasdir(f)
2694 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2695 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2695 continue
2696 continue
2696
2697
2697 if repo.wvfs.exists(f):
2698 if repo.wvfs.exists(f):
2698 if repo.wvfs.isdir(f):
2699 if repo.wvfs.isdir(f):
2699 warnings.append(
2700 warnings.append(
2700 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2701 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2701 )
2702 )
2702 else:
2703 else:
2703 warnings.append(
2704 warnings.append(
2704 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2705 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2705 )
2706 )
2706 # missing files will generate a warning elsewhere
2707 # missing files will generate a warning elsewhere
2707 ret = 1
2708 ret = 1
2708 progress.complete()
2709 progress.complete()
2709
2710
2710 if force:
2711 if force:
2711 list = modified + deleted + clean + added
2712 list = modified + deleted + clean + added
2712 elif after:
2713 elif after:
2713 list = deleted
2714 list = deleted
2714 remaining = modified + added + clean
2715 remaining = modified + added + clean
2715 progress = ui.makeprogress(
2716 progress = ui.makeprogress(
2716 _(b'skipping'), total=len(remaining), unit=_(b'files')
2717 _(b'skipping'), total=len(remaining), unit=_(b'files')
2717 )
2718 )
2718 for f in remaining:
2719 for f in remaining:
2719 progress.increment()
2720 progress.increment()
2720 if ui.verbose or (f in files):
2721 if ui.verbose or (f in files):
2721 warnings.append(
2722 warnings.append(
2722 _(b'not removing %s: file still exists\n') % uipathfn(f)
2723 _(b'not removing %s: file still exists\n') % uipathfn(f)
2723 )
2724 )
2724 ret = 1
2725 ret = 1
2725 progress.complete()
2726 progress.complete()
2726 else:
2727 else:
2727 list = deleted + clean
2728 list = deleted + clean
2728 progress = ui.makeprogress(
2729 progress = ui.makeprogress(
2729 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2730 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2730 )
2731 )
2731 for f in modified:
2732 for f in modified:
2732 progress.increment()
2733 progress.increment()
2733 warnings.append(
2734 warnings.append(
2734 _(
2735 _(
2735 b'not removing %s: file is modified (use -f'
2736 b'not removing %s: file is modified (use -f'
2736 b' to force removal)\n'
2737 b' to force removal)\n'
2737 )
2738 )
2738 % uipathfn(f)
2739 % uipathfn(f)
2739 )
2740 )
2740 ret = 1
2741 ret = 1
2741 for f in added:
2742 for f in added:
2742 progress.increment()
2743 progress.increment()
2743 warnings.append(
2744 warnings.append(
2744 _(
2745 _(
2745 b"not removing %s: file has been marked for add"
2746 b"not removing %s: file has been marked for add"
2746 b" (use 'hg forget' to undo add)\n"
2747 b" (use 'hg forget' to undo add)\n"
2747 )
2748 )
2748 % uipathfn(f)
2749 % uipathfn(f)
2749 )
2750 )
2750 ret = 1
2751 ret = 1
2751 progress.complete()
2752 progress.complete()
2752
2753
2753 list = sorted(list)
2754 list = sorted(list)
2754 progress = ui.makeprogress(
2755 progress = ui.makeprogress(
2755 _(b'deleting'), total=len(list), unit=_(b'files')
2756 _(b'deleting'), total=len(list), unit=_(b'files')
2756 )
2757 )
2757 for f in list:
2758 for f in list:
2758 if ui.verbose or not m.exact(f):
2759 if ui.verbose or not m.exact(f):
2759 progress.increment()
2760 progress.increment()
2760 ui.status(
2761 ui.status(
2761 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2762 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2762 )
2763 )
2763 progress.complete()
2764 progress.complete()
2764
2765
2765 if not dryrun:
2766 if not dryrun:
2766 with repo.wlock():
2767 with repo.wlock():
2767 if not after:
2768 if not after:
2768 for f in list:
2769 for f in list:
2769 if f in added:
2770 if f in added:
2770 continue # we never unlink added files on remove
2771 continue # we never unlink added files on remove
2771 rmdir = repo.ui.configbool(
2772 rmdir = repo.ui.configbool(
2772 b'experimental', b'removeemptydirs'
2773 b'experimental', b'removeemptydirs'
2773 )
2774 )
2774 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2775 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2775 repo[None].forget(list)
2776 repo[None].forget(list)
2776
2777
2777 if warn:
2778 if warn:
2778 for warning in warnings:
2779 for warning in warnings:
2779 ui.warn(warning)
2780 ui.warn(warning)
2780
2781
2781 return ret
2782 return ret
2782
2783
2783
2784
2784 def _catfmtneedsdata(fm):
2785 def _catfmtneedsdata(fm):
2785 return not fm.datahint() or b'data' in fm.datahint()
2786 return not fm.datahint() or b'data' in fm.datahint()
2786
2787
2787
2788
2788 def _updatecatformatter(fm, ctx, matcher, path, decode):
2789 def _updatecatformatter(fm, ctx, matcher, path, decode):
2789 """Hook for adding data to the formatter used by ``hg cat``.
2790 """Hook for adding data to the formatter used by ``hg cat``.
2790
2791
2791 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2792 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2792 this method first."""
2793 this method first."""
2793
2794
2794 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2795 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2795 # wasn't requested.
2796 # wasn't requested.
2796 data = b''
2797 data = b''
2797 if _catfmtneedsdata(fm):
2798 if _catfmtneedsdata(fm):
2798 data = ctx[path].data()
2799 data = ctx[path].data()
2799 if decode:
2800 if decode:
2800 data = ctx.repo().wwritedata(path, data)
2801 data = ctx.repo().wwritedata(path, data)
2801 fm.startitem()
2802 fm.startitem()
2802 fm.context(ctx=ctx)
2803 fm.context(ctx=ctx)
2803 fm.write(b'data', b'%s', data)
2804 fm.write(b'data', b'%s', data)
2804 fm.data(path=path)
2805 fm.data(path=path)
2805
2806
2806
2807
2807 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2808 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2808 err = 1
2809 err = 1
2809 opts = pycompat.byteskwargs(opts)
2810 opts = pycompat.byteskwargs(opts)
2810
2811
2811 def write(path):
2812 def write(path):
2812 filename = None
2813 filename = None
2813 if fntemplate:
2814 if fntemplate:
2814 filename = makefilename(
2815 filename = makefilename(
2815 ctx, fntemplate, pathname=os.path.join(prefix, path)
2816 ctx, fntemplate, pathname=os.path.join(prefix, path)
2816 )
2817 )
2817 # attempt to create the directory if it does not already exist
2818 # attempt to create the directory if it does not already exist
2818 try:
2819 try:
2819 os.makedirs(os.path.dirname(filename))
2820 os.makedirs(os.path.dirname(filename))
2820 except OSError:
2821 except OSError:
2821 pass
2822 pass
2822 with formatter.maybereopen(basefm, filename) as fm:
2823 with formatter.maybereopen(basefm, filename) as fm:
2823 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2824 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2824
2825
2825 # Automation often uses hg cat on single files, so special case it
2826 # Automation often uses hg cat on single files, so special case it
2826 # for performance to avoid the cost of parsing the manifest.
2827 # for performance to avoid the cost of parsing the manifest.
2827 if len(matcher.files()) == 1 and not matcher.anypats():
2828 if len(matcher.files()) == 1 and not matcher.anypats():
2828 file = matcher.files()[0]
2829 file = matcher.files()[0]
2829 mfl = repo.manifestlog
2830 mfl = repo.manifestlog
2830 mfnode = ctx.manifestnode()
2831 mfnode = ctx.manifestnode()
2831 try:
2832 try:
2832 if mfnode and mfl[mfnode].find(file)[0]:
2833 if mfnode and mfl[mfnode].find(file)[0]:
2833 if _catfmtneedsdata(basefm):
2834 if _catfmtneedsdata(basefm):
2834 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2835 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2835 write(file)
2836 write(file)
2836 return 0
2837 return 0
2837 except KeyError:
2838 except KeyError:
2838 pass
2839 pass
2839
2840
2840 if _catfmtneedsdata(basefm):
2841 if _catfmtneedsdata(basefm):
2841 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2842 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2842
2843
2843 for abs in ctx.walk(matcher):
2844 for abs in ctx.walk(matcher):
2844 write(abs)
2845 write(abs)
2845 err = 0
2846 err = 0
2846
2847
2847 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2848 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2848 for subpath in sorted(ctx.substate):
2849 for subpath in sorted(ctx.substate):
2849 sub = ctx.sub(subpath)
2850 sub = ctx.sub(subpath)
2850 try:
2851 try:
2851 submatch = matchmod.subdirmatcher(subpath, matcher)
2852 submatch = matchmod.subdirmatcher(subpath, matcher)
2852 subprefix = os.path.join(prefix, subpath)
2853 subprefix = os.path.join(prefix, subpath)
2853 if not sub.cat(
2854 if not sub.cat(
2854 submatch,
2855 submatch,
2855 basefm,
2856 basefm,
2856 fntemplate,
2857 fntemplate,
2857 subprefix,
2858 subprefix,
2858 **pycompat.strkwargs(opts)
2859 **pycompat.strkwargs(opts)
2859 ):
2860 ):
2860 err = 0
2861 err = 0
2861 except error.RepoLookupError:
2862 except error.RepoLookupError:
2862 ui.status(
2863 ui.status(
2863 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2864 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2864 )
2865 )
2865
2866
2866 return err
2867 return err
2867
2868
2868
2869
2869 def commit(ui, repo, commitfunc, pats, opts):
2870 def commit(ui, repo, commitfunc, pats, opts):
2870 '''commit the specified files or all outstanding changes'''
2871 '''commit the specified files or all outstanding changes'''
2871 date = opts.get(b'date')
2872 date = opts.get(b'date')
2872 if date:
2873 if date:
2873 opts[b'date'] = dateutil.parsedate(date)
2874 opts[b'date'] = dateutil.parsedate(date)
2874 message = logmessage(ui, opts)
2875 message = logmessage(ui, opts)
2875 matcher = scmutil.match(repo[None], pats, opts)
2876 matcher = scmutil.match(repo[None], pats, opts)
2876
2877
2877 dsguard = None
2878 dsguard = None
2878 # extract addremove carefully -- this function can be called from a command
2879 # extract addremove carefully -- this function can be called from a command
2879 # that doesn't support addremove
2880 # that doesn't support addremove
2880 if opts.get(b'addremove'):
2881 if opts.get(b'addremove'):
2881 dsguard = dirstateguard.dirstateguard(repo, b'commit')
2882 dsguard = dirstateguard.dirstateguard(repo, b'commit')
2882 with dsguard or util.nullcontextmanager():
2883 with dsguard or util.nullcontextmanager():
2883 if dsguard:
2884 if dsguard:
2884 relative = scmutil.anypats(pats, opts)
2885 relative = scmutil.anypats(pats, opts)
2885 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2886 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2886 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
2887 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
2887 raise error.Abort(
2888 raise error.Abort(
2888 _(b"failed to mark all new/missing files as added/removed")
2889 _(b"failed to mark all new/missing files as added/removed")
2889 )
2890 )
2890
2891
2891 return commitfunc(ui, repo, message, matcher, opts)
2892 return commitfunc(ui, repo, message, matcher, opts)
2892
2893
2893
2894
2894 def samefile(f, ctx1, ctx2):
2895 def samefile(f, ctx1, ctx2):
2895 if f in ctx1.manifest():
2896 if f in ctx1.manifest():
2896 a = ctx1.filectx(f)
2897 a = ctx1.filectx(f)
2897 if f in ctx2.manifest():
2898 if f in ctx2.manifest():
2898 b = ctx2.filectx(f)
2899 b = ctx2.filectx(f)
2899 return not a.cmp(b) and a.flags() == b.flags()
2900 return not a.cmp(b) and a.flags() == b.flags()
2900 else:
2901 else:
2901 return False
2902 return False
2902 else:
2903 else:
2903 return f not in ctx2.manifest()
2904 return f not in ctx2.manifest()
2904
2905
2905
2906
2906 def amend(ui, repo, old, extra, pats, opts):
2907 def amend(ui, repo, old, extra, pats, opts):
2907 # avoid cycle context -> subrepo -> cmdutil
2908 # avoid cycle context -> subrepo -> cmdutil
2908 from . import context
2909 from . import context
2909
2910
2910 # amend will reuse the existing user if not specified, but the obsolete
2911 # amend will reuse the existing user if not specified, but the obsolete
2911 # marker creation requires that the current user's name is specified.
2912 # marker creation requires that the current user's name is specified.
2912 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2913 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2913 ui.username() # raise exception if username not set
2914 ui.username() # raise exception if username not set
2914
2915
2915 ui.note(_(b'amending changeset %s\n') % old)
2916 ui.note(_(b'amending changeset %s\n') % old)
2916 base = old.p1()
2917 base = old.p1()
2917
2918
2918 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
2919 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
2919 # Participating changesets:
2920 # Participating changesets:
2920 #
2921 #
2921 # wctx o - workingctx that contains changes from working copy
2922 # wctx o - workingctx that contains changes from working copy
2922 # | to go into amending commit
2923 # | to go into amending commit
2923 # |
2924 # |
2924 # old o - changeset to amend
2925 # old o - changeset to amend
2925 # |
2926 # |
2926 # base o - first parent of the changeset to amend
2927 # base o - first parent of the changeset to amend
2927 wctx = repo[None]
2928 wctx = repo[None]
2928
2929
2929 # Copy to avoid mutating input
2930 # Copy to avoid mutating input
2930 extra = extra.copy()
2931 extra = extra.copy()
2931 # Update extra dict from amended commit (e.g. to preserve graft
2932 # Update extra dict from amended commit (e.g. to preserve graft
2932 # source)
2933 # source)
2933 extra.update(old.extra())
2934 extra.update(old.extra())
2934
2935
2935 # Also update it from the from the wctx
2936 # Also update it from the from the wctx
2936 extra.update(wctx.extra())
2937 extra.update(wctx.extra())
2937
2938
2938 # date-only change should be ignored?
2939 # date-only change should be ignored?
2939 datemaydiffer = resolvecommitoptions(ui, opts)
2940 datemaydiffer = resolvecommitoptions(ui, opts)
2940
2941
2941 date = old.date()
2942 date = old.date()
2942 if opts.get(b'date'):
2943 if opts.get(b'date'):
2943 date = dateutil.parsedate(opts.get(b'date'))
2944 date = dateutil.parsedate(opts.get(b'date'))
2944 user = opts.get(b'user') or old.user()
2945 user = opts.get(b'user') or old.user()
2945
2946
2946 if len(old.parents()) > 1:
2947 if len(old.parents()) > 1:
2947 # ctx.files() isn't reliable for merges, so fall back to the
2948 # ctx.files() isn't reliable for merges, so fall back to the
2948 # slower repo.status() method
2949 # slower repo.status() method
2949 st = base.status(old)
2950 st = base.status(old)
2950 files = set(st.modified) | set(st.added) | set(st.removed)
2951 files = set(st.modified) | set(st.added) | set(st.removed)
2951 else:
2952 else:
2952 files = set(old.files())
2953 files = set(old.files())
2953
2954
2954 # add/remove the files to the working copy if the "addremove" option
2955 # add/remove the files to the working copy if the "addremove" option
2955 # was specified.
2956 # was specified.
2956 matcher = scmutil.match(wctx, pats, opts)
2957 matcher = scmutil.match(wctx, pats, opts)
2957 relative = scmutil.anypats(pats, opts)
2958 relative = scmutil.anypats(pats, opts)
2958 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2959 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2959 if opts.get(b'addremove') and scmutil.addremove(
2960 if opts.get(b'addremove') and scmutil.addremove(
2960 repo, matcher, b"", uipathfn, opts
2961 repo, matcher, b"", uipathfn, opts
2961 ):
2962 ):
2962 raise error.Abort(
2963 raise error.Abort(
2963 _(b"failed to mark all new/missing files as added/removed")
2964 _(b"failed to mark all new/missing files as added/removed")
2964 )
2965 )
2965
2966
2966 # Check subrepos. This depends on in-place wctx._status update in
2967 # Check subrepos. This depends on in-place wctx._status update in
2967 # subrepo.precommit(). To minimize the risk of this hack, we do
2968 # subrepo.precommit(). To minimize the risk of this hack, we do
2968 # nothing if .hgsub does not exist.
2969 # nothing if .hgsub does not exist.
2969 if b'.hgsub' in wctx or b'.hgsub' in old:
2970 if b'.hgsub' in wctx or b'.hgsub' in old:
2970 subs, commitsubs, newsubstate = subrepoutil.precommit(
2971 subs, commitsubs, newsubstate = subrepoutil.precommit(
2971 ui, wctx, wctx._status, matcher
2972 ui, wctx, wctx._status, matcher
2972 )
2973 )
2973 # amend should abort if commitsubrepos is enabled
2974 # amend should abort if commitsubrepos is enabled
2974 assert not commitsubs
2975 assert not commitsubs
2975 if subs:
2976 if subs:
2976 subrepoutil.writestate(repo, newsubstate)
2977 subrepoutil.writestate(repo, newsubstate)
2977
2978
2978 ms = mergemod.mergestate.read(repo)
2979 ms = mergemod.mergestate.read(repo)
2979 mergeutil.checkunresolved(ms)
2980 mergeutil.checkunresolved(ms)
2980
2981
2981 filestoamend = set(f for f in wctx.files() if matcher(f))
2982 filestoamend = set(f for f in wctx.files() if matcher(f))
2982
2983
2983 changes = len(filestoamend) > 0
2984 changes = len(filestoamend) > 0
2984 if changes:
2985 if changes:
2985 # Recompute copies (avoid recording a -> b -> a)
2986 # Recompute copies (avoid recording a -> b -> a)
2986 copied = copies.pathcopies(base, wctx, matcher)
2987 copied = copies.pathcopies(base, wctx, matcher)
2987 if old.p2:
2988 if old.p2:
2988 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2989 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2989
2990
2990 # Prune files which were reverted by the updates: if old
2991 # Prune files which were reverted by the updates: if old
2991 # introduced file X and the file was renamed in the working
2992 # introduced file X and the file was renamed in the working
2992 # copy, then those two files are the same and
2993 # copy, then those two files are the same and
2993 # we can discard X from our list of files. Likewise if X
2994 # we can discard X from our list of files. Likewise if X
2994 # was removed, it's no longer relevant. If X is missing (aka
2995 # was removed, it's no longer relevant. If X is missing (aka
2995 # deleted), old X must be preserved.
2996 # deleted), old X must be preserved.
2996 files.update(filestoamend)
2997 files.update(filestoamend)
2997 files = [
2998 files = [
2998 f
2999 f
2999 for f in files
3000 for f in files
3000 if (f not in filestoamend or not samefile(f, wctx, base))
3001 if (f not in filestoamend or not samefile(f, wctx, base))
3001 ]
3002 ]
3002
3003
3003 def filectxfn(repo, ctx_, path):
3004 def filectxfn(repo, ctx_, path):
3004 try:
3005 try:
3005 # If the file being considered is not amongst the files
3006 # If the file being considered is not amongst the files
3006 # to be amended, we should return the file context from the
3007 # to be amended, we should return the file context from the
3007 # old changeset. This avoids issues when only some files in
3008 # old changeset. This avoids issues when only some files in
3008 # the working copy are being amended but there are also
3009 # the working copy are being amended but there are also
3009 # changes to other files from the old changeset.
3010 # changes to other files from the old changeset.
3010 if path not in filestoamend:
3011 if path not in filestoamend:
3011 return old.filectx(path)
3012 return old.filectx(path)
3012
3013
3013 # Return None for removed files.
3014 # Return None for removed files.
3014 if path in wctx.removed():
3015 if path in wctx.removed():
3015 return None
3016 return None
3016
3017
3017 fctx = wctx[path]
3018 fctx = wctx[path]
3018 flags = fctx.flags()
3019 flags = fctx.flags()
3019 mctx = context.memfilectx(
3020 mctx = context.memfilectx(
3020 repo,
3021 repo,
3021 ctx_,
3022 ctx_,
3022 fctx.path(),
3023 fctx.path(),
3023 fctx.data(),
3024 fctx.data(),
3024 islink=b'l' in flags,
3025 islink=b'l' in flags,
3025 isexec=b'x' in flags,
3026 isexec=b'x' in flags,
3026 copysource=copied.get(path),
3027 copysource=copied.get(path),
3027 )
3028 )
3028 return mctx
3029 return mctx
3029 except KeyError:
3030 except KeyError:
3030 return None
3031 return None
3031
3032
3032 else:
3033 else:
3033 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
3034 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
3034
3035
3035 # Use version of files as in the old cset
3036 # Use version of files as in the old cset
3036 def filectxfn(repo, ctx_, path):
3037 def filectxfn(repo, ctx_, path):
3037 try:
3038 try:
3038 return old.filectx(path)
3039 return old.filectx(path)
3039 except KeyError:
3040 except KeyError:
3040 return None
3041 return None
3041
3042
3042 # See if we got a message from -m or -l, if not, open the editor with
3043 # See if we got a message from -m or -l, if not, open the editor with
3043 # the message of the changeset to amend.
3044 # the message of the changeset to amend.
3044 message = logmessage(ui, opts)
3045 message = logmessage(ui, opts)
3045
3046
3046 editform = mergeeditform(old, b'commit.amend')
3047 editform = mergeeditform(old, b'commit.amend')
3047
3048
3048 if not message:
3049 if not message:
3049 message = old.description()
3050 message = old.description()
3050 # Default if message isn't provided and --edit is not passed is to
3051 # Default if message isn't provided and --edit is not passed is to
3051 # invoke editor, but allow --no-edit. If somehow we don't have any
3052 # invoke editor, but allow --no-edit. If somehow we don't have any
3052 # description, let's always start the editor.
3053 # description, let's always start the editor.
3053 doedit = not message or opts.get(b'edit') in [True, None]
3054 doedit = not message or opts.get(b'edit') in [True, None]
3054 else:
3055 else:
3055 # Default if message is provided is to not invoke editor, but allow
3056 # Default if message is provided is to not invoke editor, but allow
3056 # --edit.
3057 # --edit.
3057 doedit = opts.get(b'edit') is True
3058 doedit = opts.get(b'edit') is True
3058 editor = getcommiteditor(edit=doedit, editform=editform)
3059 editor = getcommiteditor(edit=doedit, editform=editform)
3059
3060
3060 pureextra = extra.copy()
3061 pureextra = extra.copy()
3061 extra[b'amend_source'] = old.hex()
3062 extra[b'amend_source'] = old.hex()
3062
3063
3063 new = context.memctx(
3064 new = context.memctx(
3064 repo,
3065 repo,
3065 parents=[base.node(), old.p2().node()],
3066 parents=[base.node(), old.p2().node()],
3066 text=message,
3067 text=message,
3067 files=files,
3068 files=files,
3068 filectxfn=filectxfn,
3069 filectxfn=filectxfn,
3069 user=user,
3070 user=user,
3070 date=date,
3071 date=date,
3071 extra=extra,
3072 extra=extra,
3072 editor=editor,
3073 editor=editor,
3073 )
3074 )
3074
3075
3075 newdesc = changelog.stripdesc(new.description())
3076 newdesc = changelog.stripdesc(new.description())
3076 if (
3077 if (
3077 (not changes)
3078 (not changes)
3078 and newdesc == old.description()
3079 and newdesc == old.description()
3079 and user == old.user()
3080 and user == old.user()
3080 and (date == old.date() or datemaydiffer)
3081 and (date == old.date() or datemaydiffer)
3081 and pureextra == old.extra()
3082 and pureextra == old.extra()
3082 ):
3083 ):
3083 # nothing changed. continuing here would create a new node
3084 # nothing changed. continuing here would create a new node
3084 # anyway because of the amend_source noise.
3085 # anyway because of the amend_source noise.
3085 #
3086 #
3086 # This not what we expect from amend.
3087 # This not what we expect from amend.
3087 return old.node()
3088 return old.node()
3088
3089
3089 commitphase = None
3090 commitphase = None
3090 if opts.get(b'secret'):
3091 if opts.get(b'secret'):
3091 commitphase = phases.secret
3092 commitphase = phases.secret
3092 newid = repo.commitctx(new)
3093 newid = repo.commitctx(new)
3093
3094
3094 # Reroute the working copy parent to the new changeset
3095 # Reroute the working copy parent to the new changeset
3095 repo.setparents(newid, nullid)
3096 repo.setparents(newid, nullid)
3096 mapping = {old.node(): (newid,)}
3097 mapping = {old.node(): (newid,)}
3097 obsmetadata = None
3098 obsmetadata = None
3098 if opts.get(b'note'):
3099 if opts.get(b'note'):
3099 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
3100 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
3100 backup = ui.configbool(b'rewrite', b'backup-bundle')
3101 backup = ui.configbool(b'rewrite', b'backup-bundle')
3101 scmutil.cleanupnodes(
3102 scmutil.cleanupnodes(
3102 repo,
3103 repo,
3103 mapping,
3104 mapping,
3104 b'amend',
3105 b'amend',
3105 metadata=obsmetadata,
3106 metadata=obsmetadata,
3106 fixphase=True,
3107 fixphase=True,
3107 targetphase=commitphase,
3108 targetphase=commitphase,
3108 backup=backup,
3109 backup=backup,
3109 )
3110 )
3110
3111
3111 # Fixing the dirstate because localrepo.commitctx does not update
3112 # Fixing the dirstate because localrepo.commitctx does not update
3112 # it. This is rather convenient because we did not need to update
3113 # it. This is rather convenient because we did not need to update
3113 # the dirstate for all the files in the new commit which commitctx
3114 # the dirstate for all the files in the new commit which commitctx
3114 # could have done if it updated the dirstate. Now, we can
3115 # could have done if it updated the dirstate. Now, we can
3115 # selectively update the dirstate only for the amended files.
3116 # selectively update the dirstate only for the amended files.
3116 dirstate = repo.dirstate
3117 dirstate = repo.dirstate
3117
3118
3118 # Update the state of the files which were added and modified in the
3119 # Update the state of the files which were added and modified in the
3119 # amend to "normal" in the dirstate. We need to use "normallookup" since
3120 # amend to "normal" in the dirstate. We need to use "normallookup" since
3120 # the files may have changed since the command started; using "normal"
3121 # the files may have changed since the command started; using "normal"
3121 # would mark them as clean but with uncommitted contents.
3122 # would mark them as clean but with uncommitted contents.
3122 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3123 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3123 for f in normalfiles:
3124 for f in normalfiles:
3124 dirstate.normallookup(f)
3125 dirstate.normallookup(f)
3125
3126
3126 # Update the state of files which were removed in the amend
3127 # Update the state of files which were removed in the amend
3127 # to "removed" in the dirstate.
3128 # to "removed" in the dirstate.
3128 removedfiles = set(wctx.removed()) & filestoamend
3129 removedfiles = set(wctx.removed()) & filestoamend
3129 for f in removedfiles:
3130 for f in removedfiles:
3130 dirstate.drop(f)
3131 dirstate.drop(f)
3131
3132
3132 return newid
3133 return newid
3133
3134
3134
3135
3135 def commiteditor(repo, ctx, subs, editform=b''):
3136 def commiteditor(repo, ctx, subs, editform=b''):
3136 if ctx.description():
3137 if ctx.description():
3137 return ctx.description()
3138 return ctx.description()
3138 return commitforceeditor(
3139 return commitforceeditor(
3139 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3140 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3140 )
3141 )
3141
3142
3142
3143
3143 def commitforceeditor(
3144 def commitforceeditor(
3144 repo,
3145 repo,
3145 ctx,
3146 ctx,
3146 subs,
3147 subs,
3147 finishdesc=None,
3148 finishdesc=None,
3148 extramsg=None,
3149 extramsg=None,
3149 editform=b'',
3150 editform=b'',
3150 unchangedmessagedetection=False,
3151 unchangedmessagedetection=False,
3151 ):
3152 ):
3152 if not extramsg:
3153 if not extramsg:
3153 extramsg = _(b"Leave message empty to abort commit.")
3154 extramsg = _(b"Leave message empty to abort commit.")
3154
3155
3155 forms = [e for e in editform.split(b'.') if e]
3156 forms = [e for e in editform.split(b'.') if e]
3156 forms.insert(0, b'changeset')
3157 forms.insert(0, b'changeset')
3157 templatetext = None
3158 templatetext = None
3158 while forms:
3159 while forms:
3159 ref = b'.'.join(forms)
3160 ref = b'.'.join(forms)
3160 if repo.ui.config(b'committemplate', ref):
3161 if repo.ui.config(b'committemplate', ref):
3161 templatetext = committext = buildcommittemplate(
3162 templatetext = committext = buildcommittemplate(
3162 repo, ctx, subs, extramsg, ref
3163 repo, ctx, subs, extramsg, ref
3163 )
3164 )
3164 break
3165 break
3165 forms.pop()
3166 forms.pop()
3166 else:
3167 else:
3167 committext = buildcommittext(repo, ctx, subs, extramsg)
3168 committext = buildcommittext(repo, ctx, subs, extramsg)
3168
3169
3169 # run editor in the repository root
3170 # run editor in the repository root
3170 olddir = encoding.getcwd()
3171 olddir = encoding.getcwd()
3171 os.chdir(repo.root)
3172 os.chdir(repo.root)
3172
3173
3173 # make in-memory changes visible to external process
3174 # make in-memory changes visible to external process
3174 tr = repo.currenttransaction()
3175 tr = repo.currenttransaction()
3175 repo.dirstate.write(tr)
3176 repo.dirstate.write(tr)
3176 pending = tr and tr.writepending() and repo.root
3177 pending = tr and tr.writepending() and repo.root
3177
3178
3178 editortext = repo.ui.edit(
3179 editortext = repo.ui.edit(
3179 committext,
3180 committext,
3180 ctx.user(),
3181 ctx.user(),
3181 ctx.extra(),
3182 ctx.extra(),
3182 editform=editform,
3183 editform=editform,
3183 pending=pending,
3184 pending=pending,
3184 repopath=repo.path,
3185 repopath=repo.path,
3185 action=b'commit',
3186 action=b'commit',
3186 )
3187 )
3187 text = editortext
3188 text = editortext
3188
3189
3189 # strip away anything below this special string (used for editors that want
3190 # strip away anything below this special string (used for editors that want
3190 # to display the diff)
3191 # to display the diff)
3191 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3192 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3192 if stripbelow:
3193 if stripbelow:
3193 text = text[: stripbelow.start()]
3194 text = text[: stripbelow.start()]
3194
3195
3195 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3196 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3196 os.chdir(olddir)
3197 os.chdir(olddir)
3197
3198
3198 if finishdesc:
3199 if finishdesc:
3199 text = finishdesc(text)
3200 text = finishdesc(text)
3200 if not text.strip():
3201 if not text.strip():
3201 raise error.Abort(_(b"empty commit message"))
3202 raise error.Abort(_(b"empty commit message"))
3202 if unchangedmessagedetection and editortext == templatetext:
3203 if unchangedmessagedetection and editortext == templatetext:
3203 raise error.Abort(_(b"commit message unchanged"))
3204 raise error.Abort(_(b"commit message unchanged"))
3204
3205
3205 return text
3206 return text
3206
3207
3207
3208
3208 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3209 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3209 ui = repo.ui
3210 ui = repo.ui
3210 spec = formatter.templatespec(ref, None, None)
3211 spec = formatter.templatespec(ref, None, None)
3211 t = logcmdutil.changesettemplater(ui, repo, spec)
3212 t = logcmdutil.changesettemplater(ui, repo, spec)
3212 t.t.cache.update(
3213 t.t.cache.update(
3213 (k, templater.unquotestring(v))
3214 (k, templater.unquotestring(v))
3214 for k, v in repo.ui.configitems(b'committemplate')
3215 for k, v in repo.ui.configitems(b'committemplate')
3215 )
3216 )
3216
3217
3217 if not extramsg:
3218 if not extramsg:
3218 extramsg = b'' # ensure that extramsg is string
3219 extramsg = b'' # ensure that extramsg is string
3219
3220
3220 ui.pushbuffer()
3221 ui.pushbuffer()
3221 t.show(ctx, extramsg=extramsg)
3222 t.show(ctx, extramsg=extramsg)
3222 return ui.popbuffer()
3223 return ui.popbuffer()
3223
3224
3224
3225
3225 def hgprefix(msg):
3226 def hgprefix(msg):
3226 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3227 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3227
3228
3228
3229
3229 def buildcommittext(repo, ctx, subs, extramsg):
3230 def buildcommittext(repo, ctx, subs, extramsg):
3230 edittext = []
3231 edittext = []
3231 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3232 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3232 if ctx.description():
3233 if ctx.description():
3233 edittext.append(ctx.description())
3234 edittext.append(ctx.description())
3234 edittext.append(b"")
3235 edittext.append(b"")
3235 edittext.append(b"") # Empty line between message and comments.
3236 edittext.append(b"") # Empty line between message and comments.
3236 edittext.append(
3237 edittext.append(
3237 hgprefix(
3238 hgprefix(
3238 _(
3239 _(
3239 b"Enter commit message."
3240 b"Enter commit message."
3240 b" Lines beginning with 'HG:' are removed."
3241 b" Lines beginning with 'HG:' are removed."
3241 )
3242 )
3242 )
3243 )
3243 )
3244 )
3244 edittext.append(hgprefix(extramsg))
3245 edittext.append(hgprefix(extramsg))
3245 edittext.append(b"HG: --")
3246 edittext.append(b"HG: --")
3246 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3247 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3247 if ctx.p2():
3248 if ctx.p2():
3248 edittext.append(hgprefix(_(b"branch merge")))
3249 edittext.append(hgprefix(_(b"branch merge")))
3249 if ctx.branch():
3250 if ctx.branch():
3250 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3251 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3251 if bookmarks.isactivewdirparent(repo):
3252 if bookmarks.isactivewdirparent(repo):
3252 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3253 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3253 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3254 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3254 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3255 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3255 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3256 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3256 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3257 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3257 if not added and not modified and not removed:
3258 if not added and not modified and not removed:
3258 edittext.append(hgprefix(_(b"no files changed")))
3259 edittext.append(hgprefix(_(b"no files changed")))
3259 edittext.append(b"")
3260 edittext.append(b"")
3260
3261
3261 return b"\n".join(edittext)
3262 return b"\n".join(edittext)
3262
3263
3263
3264
3264 def commitstatus(repo, node, branch, bheads=None, opts=None):
3265 def commitstatus(repo, node, branch, bheads=None, opts=None):
3265 if opts is None:
3266 if opts is None:
3266 opts = {}
3267 opts = {}
3267 ctx = repo[node]
3268 ctx = repo[node]
3268 parents = ctx.parents()
3269 parents = ctx.parents()
3269
3270
3270 if (
3271 if (
3271 not opts.get(b'amend')
3272 not opts.get(b'amend')
3272 and bheads
3273 and bheads
3273 and node not in bheads
3274 and node not in bheads
3274 and not [
3275 and not [
3275 x for x in parents if x.node() in bheads and x.branch() == branch
3276 x for x in parents if x.node() in bheads and x.branch() == branch
3276 ]
3277 ]
3277 ):
3278 ):
3278 repo.ui.status(_(b'created new head\n'))
3279 repo.ui.status(_(b'created new head\n'))
3279 # The message is not printed for initial roots. For the other
3280 # The message is not printed for initial roots. For the other
3280 # changesets, it is printed in the following situations:
3281 # changesets, it is printed in the following situations:
3281 #
3282 #
3282 # Par column: for the 2 parents with ...
3283 # Par column: for the 2 parents with ...
3283 # N: null or no parent
3284 # N: null or no parent
3284 # B: parent is on another named branch
3285 # B: parent is on another named branch
3285 # C: parent is a regular non head changeset
3286 # C: parent is a regular non head changeset
3286 # H: parent was a branch head of the current branch
3287 # H: parent was a branch head of the current branch
3287 # Msg column: whether we print "created new head" message
3288 # Msg column: whether we print "created new head" message
3288 # In the following, it is assumed that there already exists some
3289 # In the following, it is assumed that there already exists some
3289 # initial branch heads of the current branch, otherwise nothing is
3290 # initial branch heads of the current branch, otherwise nothing is
3290 # printed anyway.
3291 # printed anyway.
3291 #
3292 #
3292 # Par Msg Comment
3293 # Par Msg Comment
3293 # N N y additional topo root
3294 # N N y additional topo root
3294 #
3295 #
3295 # B N y additional branch root
3296 # B N y additional branch root
3296 # C N y additional topo head
3297 # C N y additional topo head
3297 # H N n usual case
3298 # H N n usual case
3298 #
3299 #
3299 # B B y weird additional branch root
3300 # B B y weird additional branch root
3300 # C B y branch merge
3301 # C B y branch merge
3301 # H B n merge with named branch
3302 # H B n merge with named branch
3302 #
3303 #
3303 # C C y additional head from merge
3304 # C C y additional head from merge
3304 # C H n merge with a head
3305 # C H n merge with a head
3305 #
3306 #
3306 # H H n head merge: head count decreases
3307 # H H n head merge: head count decreases
3307
3308
3308 if not opts.get(b'close_branch'):
3309 if not opts.get(b'close_branch'):
3309 for r in parents:
3310 for r in parents:
3310 if r.closesbranch() and r.branch() == branch:
3311 if r.closesbranch() and r.branch() == branch:
3311 repo.ui.status(
3312 repo.ui.status(
3312 _(b'reopening closed branch head %d\n') % r.rev()
3313 _(b'reopening closed branch head %d\n') % r.rev()
3313 )
3314 )
3314
3315
3315 if repo.ui.debugflag:
3316 if repo.ui.debugflag:
3316 repo.ui.write(
3317 repo.ui.write(
3317 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3318 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3318 )
3319 )
3319 elif repo.ui.verbose:
3320 elif repo.ui.verbose:
3320 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3321 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3321
3322
3322
3323
3323 def postcommitstatus(repo, pats, opts):
3324 def postcommitstatus(repo, pats, opts):
3324 return repo.status(match=scmutil.match(repo[None], pats, opts))
3325 return repo.status(match=scmutil.match(repo[None], pats, opts))
3325
3326
3326
3327
3327 def revert(ui, repo, ctx, parents, *pats, **opts):
3328 def revert(ui, repo, ctx, parents, *pats, **opts):
3328 opts = pycompat.byteskwargs(opts)
3329 opts = pycompat.byteskwargs(opts)
3329 parent, p2 = parents
3330 parent, p2 = parents
3330 node = ctx.node()
3331 node = ctx.node()
3331
3332
3332 mf = ctx.manifest()
3333 mf = ctx.manifest()
3333 if node == p2:
3334 if node == p2:
3334 parent = p2
3335 parent = p2
3335
3336
3336 # need all matching names in dirstate and manifest of target rev,
3337 # need all matching names in dirstate and manifest of target rev,
3337 # so have to walk both. do not print errors if files exist in one
3338 # so have to walk both. do not print errors if files exist in one
3338 # but not other. in both cases, filesets should be evaluated against
3339 # but not other. in both cases, filesets should be evaluated against
3339 # workingctx to get consistent result (issue4497). this means 'set:**'
3340 # workingctx to get consistent result (issue4497). this means 'set:**'
3340 # cannot be used to select missing files from target rev.
3341 # cannot be used to select missing files from target rev.
3341
3342
3342 # `names` is a mapping for all elements in working copy and target revision
3343 # `names` is a mapping for all elements in working copy and target revision
3343 # The mapping is in the form:
3344 # The mapping is in the form:
3344 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3345 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3345 names = {}
3346 names = {}
3346 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3347 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3347
3348
3348 with repo.wlock():
3349 with repo.wlock():
3349 ## filling of the `names` mapping
3350 ## filling of the `names` mapping
3350 # walk dirstate to fill `names`
3351 # walk dirstate to fill `names`
3351
3352
3352 interactive = opts.get(b'interactive', False)
3353 interactive = opts.get(b'interactive', False)
3353 wctx = repo[None]
3354 wctx = repo[None]
3354 m = scmutil.match(wctx, pats, opts)
3355 m = scmutil.match(wctx, pats, opts)
3355
3356
3356 # we'll need this later
3357 # we'll need this later
3357 targetsubs = sorted(s for s in wctx.substate if m(s))
3358 targetsubs = sorted(s for s in wctx.substate if m(s))
3358
3359
3359 if not m.always():
3360 if not m.always():
3360 matcher = matchmod.badmatch(m, lambda x, y: False)
3361 matcher = matchmod.badmatch(m, lambda x, y: False)
3361 for abs in wctx.walk(matcher):
3362 for abs in wctx.walk(matcher):
3362 names[abs] = m.exact(abs)
3363 names[abs] = m.exact(abs)
3363
3364
3364 # walk target manifest to fill `names`
3365 # walk target manifest to fill `names`
3365
3366
3366 def badfn(path, msg):
3367 def badfn(path, msg):
3367 if path in names:
3368 if path in names:
3368 return
3369 return
3369 if path in ctx.substate:
3370 if path in ctx.substate:
3370 return
3371 return
3371 path_ = path + b'/'
3372 path_ = path + b'/'
3372 for f in names:
3373 for f in names:
3373 if f.startswith(path_):
3374 if f.startswith(path_):
3374 return
3375 return
3375 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3376 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3376
3377
3377 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3378 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3378 if abs not in names:
3379 if abs not in names:
3379 names[abs] = m.exact(abs)
3380 names[abs] = m.exact(abs)
3380
3381
3381 # Find status of all file in `names`.
3382 # Find status of all file in `names`.
3382 m = scmutil.matchfiles(repo, names)
3383 m = scmutil.matchfiles(repo, names)
3383
3384
3384 changes = repo.status(
3385 changes = repo.status(
3385 node1=node, match=m, unknown=True, ignored=True, clean=True
3386 node1=node, match=m, unknown=True, ignored=True, clean=True
3386 )
3387 )
3387 else:
3388 else:
3388 changes = repo.status(node1=node, match=m)
3389 changes = repo.status(node1=node, match=m)
3389 for kind in changes:
3390 for kind in changes:
3390 for abs in kind:
3391 for abs in kind:
3391 names[abs] = m.exact(abs)
3392 names[abs] = m.exact(abs)
3392
3393
3393 m = scmutil.matchfiles(repo, names)
3394 m = scmutil.matchfiles(repo, names)
3394
3395
3395 modified = set(changes.modified)
3396 modified = set(changes.modified)
3396 added = set(changes.added)
3397 added = set(changes.added)
3397 removed = set(changes.removed)
3398 removed = set(changes.removed)
3398 _deleted = set(changes.deleted)
3399 _deleted = set(changes.deleted)
3399 unknown = set(changes.unknown)
3400 unknown = set(changes.unknown)
3400 unknown.update(changes.ignored)
3401 unknown.update(changes.ignored)
3401 clean = set(changes.clean)
3402 clean = set(changes.clean)
3402 modadded = set()
3403 modadded = set()
3403
3404
3404 # We need to account for the state of the file in the dirstate,
3405 # We need to account for the state of the file in the dirstate,
3405 # even when we revert against something else than parent. This will
3406 # even when we revert against something else than parent. This will
3406 # slightly alter the behavior of revert (doing back up or not, delete
3407 # slightly alter the behavior of revert (doing back up or not, delete
3407 # or just forget etc).
3408 # or just forget etc).
3408 if parent == node:
3409 if parent == node:
3409 dsmodified = modified
3410 dsmodified = modified
3410 dsadded = added
3411 dsadded = added
3411 dsremoved = removed
3412 dsremoved = removed
3412 # store all local modifications, useful later for rename detection
3413 # store all local modifications, useful later for rename detection
3413 localchanges = dsmodified | dsadded
3414 localchanges = dsmodified | dsadded
3414 modified, added, removed = set(), set(), set()
3415 modified, added, removed = set(), set(), set()
3415 else:
3416 else:
3416 changes = repo.status(node1=parent, match=m)
3417 changes = repo.status(node1=parent, match=m)
3417 dsmodified = set(changes.modified)
3418 dsmodified = set(changes.modified)
3418 dsadded = set(changes.added)
3419 dsadded = set(changes.added)
3419 dsremoved = set(changes.removed)
3420 dsremoved = set(changes.removed)
3420 # store all local modifications, useful later for rename detection
3421 # store all local modifications, useful later for rename detection
3421 localchanges = dsmodified | dsadded
3422 localchanges = dsmodified | dsadded
3422
3423
3423 # only take into account for removes between wc and target
3424 # only take into account for removes between wc and target
3424 clean |= dsremoved - removed
3425 clean |= dsremoved - removed
3425 dsremoved &= removed
3426 dsremoved &= removed
3426 # distinct between dirstate remove and other
3427 # distinct between dirstate remove and other
3427 removed -= dsremoved
3428 removed -= dsremoved
3428
3429
3429 modadded = added & dsmodified
3430 modadded = added & dsmodified
3430 added -= modadded
3431 added -= modadded
3431
3432
3432 # tell newly modified apart.
3433 # tell newly modified apart.
3433 dsmodified &= modified
3434 dsmodified &= modified
3434 dsmodified |= modified & dsadded # dirstate added may need backup
3435 dsmodified |= modified & dsadded # dirstate added may need backup
3435 modified -= dsmodified
3436 modified -= dsmodified
3436
3437
3437 # We need to wait for some post-processing to update this set
3438 # We need to wait for some post-processing to update this set
3438 # before making the distinction. The dirstate will be used for
3439 # before making the distinction. The dirstate will be used for
3439 # that purpose.
3440 # that purpose.
3440 dsadded = added
3441 dsadded = added
3441
3442
3442 # in case of merge, files that are actually added can be reported as
3443 # in case of merge, files that are actually added can be reported as
3443 # modified, we need to post process the result
3444 # modified, we need to post process the result
3444 if p2 != nullid:
3445 if p2 != nullid:
3445 mergeadd = set(dsmodified)
3446 mergeadd = set(dsmodified)
3446 for path in dsmodified:
3447 for path in dsmodified:
3447 if path in mf:
3448 if path in mf:
3448 mergeadd.remove(path)
3449 mergeadd.remove(path)
3449 dsadded |= mergeadd
3450 dsadded |= mergeadd
3450 dsmodified -= mergeadd
3451 dsmodified -= mergeadd
3451
3452
3452 # if f is a rename, update `names` to also revert the source
3453 # if f is a rename, update `names` to also revert the source
3453 for f in localchanges:
3454 for f in localchanges:
3454 src = repo.dirstate.copied(f)
3455 src = repo.dirstate.copied(f)
3455 # XXX should we check for rename down to target node?
3456 # XXX should we check for rename down to target node?
3456 if src and src not in names and repo.dirstate[src] == b'r':
3457 if src and src not in names and repo.dirstate[src] == b'r':
3457 dsremoved.add(src)
3458 dsremoved.add(src)
3458 names[src] = True
3459 names[src] = True
3459
3460
3460 # determine the exact nature of the deleted changesets
3461 # determine the exact nature of the deleted changesets
3461 deladded = set(_deleted)
3462 deladded = set(_deleted)
3462 for path in _deleted:
3463 for path in _deleted:
3463 if path in mf:
3464 if path in mf:
3464 deladded.remove(path)
3465 deladded.remove(path)
3465 deleted = _deleted - deladded
3466 deleted = _deleted - deladded
3466
3467
3467 # distinguish between file to forget and the other
3468 # distinguish between file to forget and the other
3468 added = set()
3469 added = set()
3469 for abs in dsadded:
3470 for abs in dsadded:
3470 if repo.dirstate[abs] != b'a':
3471 if repo.dirstate[abs] != b'a':
3471 added.add(abs)
3472 added.add(abs)
3472 dsadded -= added
3473 dsadded -= added
3473
3474
3474 for abs in deladded:
3475 for abs in deladded:
3475 if repo.dirstate[abs] == b'a':
3476 if repo.dirstate[abs] == b'a':
3476 dsadded.add(abs)
3477 dsadded.add(abs)
3477 deladded -= dsadded
3478 deladded -= dsadded
3478
3479
3479 # For files marked as removed, we check if an unknown file is present at
3480 # For files marked as removed, we check if an unknown file is present at
3480 # the same path. If a such file exists it may need to be backed up.
3481 # the same path. If a such file exists it may need to be backed up.
3481 # Making the distinction at this stage helps have simpler backup
3482 # Making the distinction at this stage helps have simpler backup
3482 # logic.
3483 # logic.
3483 removunk = set()
3484 removunk = set()
3484 for abs in removed:
3485 for abs in removed:
3485 target = repo.wjoin(abs)
3486 target = repo.wjoin(abs)
3486 if os.path.lexists(target):
3487 if os.path.lexists(target):
3487 removunk.add(abs)
3488 removunk.add(abs)
3488 removed -= removunk
3489 removed -= removunk
3489
3490
3490 dsremovunk = set()
3491 dsremovunk = set()
3491 for abs in dsremoved:
3492 for abs in dsremoved:
3492 target = repo.wjoin(abs)
3493 target = repo.wjoin(abs)
3493 if os.path.lexists(target):
3494 if os.path.lexists(target):
3494 dsremovunk.add(abs)
3495 dsremovunk.add(abs)
3495 dsremoved -= dsremovunk
3496 dsremoved -= dsremovunk
3496
3497
3497 # action to be actually performed by revert
3498 # action to be actually performed by revert
3498 # (<list of file>, message>) tuple
3499 # (<list of file>, message>) tuple
3499 actions = {
3500 actions = {
3500 b'revert': ([], _(b'reverting %s\n')),
3501 b'revert': ([], _(b'reverting %s\n')),
3501 b'add': ([], _(b'adding %s\n')),
3502 b'add': ([], _(b'adding %s\n')),
3502 b'remove': ([], _(b'removing %s\n')),
3503 b'remove': ([], _(b'removing %s\n')),
3503 b'drop': ([], _(b'removing %s\n')),
3504 b'drop': ([], _(b'removing %s\n')),
3504 b'forget': ([], _(b'forgetting %s\n')),
3505 b'forget': ([], _(b'forgetting %s\n')),
3505 b'undelete': ([], _(b'undeleting %s\n')),
3506 b'undelete': ([], _(b'undeleting %s\n')),
3506 b'noop': (None, _(b'no changes needed to %s\n')),
3507 b'noop': (None, _(b'no changes needed to %s\n')),
3507 b'unknown': (None, _(b'file not managed: %s\n')),
3508 b'unknown': (None, _(b'file not managed: %s\n')),
3508 }
3509 }
3509
3510
3510 # "constant" that convey the backup strategy.
3511 # "constant" that convey the backup strategy.
3511 # All set to `discard` if `no-backup` is set do avoid checking
3512 # All set to `discard` if `no-backup` is set do avoid checking
3512 # no_backup lower in the code.
3513 # no_backup lower in the code.
3513 # These values are ordered for comparison purposes
3514 # These values are ordered for comparison purposes
3514 backupinteractive = 3 # do backup if interactively modified
3515 backupinteractive = 3 # do backup if interactively modified
3515 backup = 2 # unconditionally do backup
3516 backup = 2 # unconditionally do backup
3516 check = 1 # check if the existing file differs from target
3517 check = 1 # check if the existing file differs from target
3517 discard = 0 # never do backup
3518 discard = 0 # never do backup
3518 if opts.get(b'no_backup'):
3519 if opts.get(b'no_backup'):
3519 backupinteractive = backup = check = discard
3520 backupinteractive = backup = check = discard
3520 if interactive:
3521 if interactive:
3521 dsmodifiedbackup = backupinteractive
3522 dsmodifiedbackup = backupinteractive
3522 else:
3523 else:
3523 dsmodifiedbackup = backup
3524 dsmodifiedbackup = backup
3524 tobackup = set()
3525 tobackup = set()
3525
3526
3526 backupanddel = actions[b'remove']
3527 backupanddel = actions[b'remove']
3527 if not opts.get(b'no_backup'):
3528 if not opts.get(b'no_backup'):
3528 backupanddel = actions[b'drop']
3529 backupanddel = actions[b'drop']
3529
3530
3530 disptable = (
3531 disptable = (
3531 # dispatch table:
3532 # dispatch table:
3532 # file state
3533 # file state
3533 # action
3534 # action
3534 # make backup
3535 # make backup
3535 ## Sets that results that will change file on disk
3536 ## Sets that results that will change file on disk
3536 # Modified compared to target, no local change
3537 # Modified compared to target, no local change
3537 (modified, actions[b'revert'], discard),
3538 (modified, actions[b'revert'], discard),
3538 # Modified compared to target, but local file is deleted
3539 # Modified compared to target, but local file is deleted
3539 (deleted, actions[b'revert'], discard),
3540 (deleted, actions[b'revert'], discard),
3540 # Modified compared to target, local change
3541 # Modified compared to target, local change
3541 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3542 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3542 # Added since target
3543 # Added since target
3543 (added, actions[b'remove'], discard),
3544 (added, actions[b'remove'], discard),
3544 # Added in working directory
3545 # Added in working directory
3545 (dsadded, actions[b'forget'], discard),
3546 (dsadded, actions[b'forget'], discard),
3546 # Added since target, have local modification
3547 # Added since target, have local modification
3547 (modadded, backupanddel, backup),
3548 (modadded, backupanddel, backup),
3548 # Added since target but file is missing in working directory
3549 # Added since target but file is missing in working directory
3549 (deladded, actions[b'drop'], discard),
3550 (deladded, actions[b'drop'], discard),
3550 # Removed since target, before working copy parent
3551 # Removed since target, before working copy parent
3551 (removed, actions[b'add'], discard),
3552 (removed, actions[b'add'], discard),
3552 # Same as `removed` but an unknown file exists at the same path
3553 # Same as `removed` but an unknown file exists at the same path
3553 (removunk, actions[b'add'], check),
3554 (removunk, actions[b'add'], check),
3554 # Removed since targe, marked as such in working copy parent
3555 # Removed since targe, marked as such in working copy parent
3555 (dsremoved, actions[b'undelete'], discard),
3556 (dsremoved, actions[b'undelete'], discard),
3556 # Same as `dsremoved` but an unknown file exists at the same path
3557 # Same as `dsremoved` but an unknown file exists at the same path
3557 (dsremovunk, actions[b'undelete'], check),
3558 (dsremovunk, actions[b'undelete'], check),
3558 ## the following sets does not result in any file changes
3559 ## the following sets does not result in any file changes
3559 # File with no modification
3560 # File with no modification
3560 (clean, actions[b'noop'], discard),
3561 (clean, actions[b'noop'], discard),
3561 # Existing file, not tracked anywhere
3562 # Existing file, not tracked anywhere
3562 (unknown, actions[b'unknown'], discard),
3563 (unknown, actions[b'unknown'], discard),
3563 )
3564 )
3564
3565
3565 for abs, exact in sorted(names.items()):
3566 for abs, exact in sorted(names.items()):
3566 # target file to be touch on disk (relative to cwd)
3567 # target file to be touch on disk (relative to cwd)
3567 target = repo.wjoin(abs)
3568 target = repo.wjoin(abs)
3568 # search the entry in the dispatch table.
3569 # search the entry in the dispatch table.
3569 # if the file is in any of these sets, it was touched in the working
3570 # if the file is in any of these sets, it was touched in the working
3570 # directory parent and we are sure it needs to be reverted.
3571 # directory parent and we are sure it needs to be reverted.
3571 for table, (xlist, msg), dobackup in disptable:
3572 for table, (xlist, msg), dobackup in disptable:
3572 if abs not in table:
3573 if abs not in table:
3573 continue
3574 continue
3574 if xlist is not None:
3575 if xlist is not None:
3575 xlist.append(abs)
3576 xlist.append(abs)
3576 if dobackup:
3577 if dobackup:
3577 # If in interactive mode, don't automatically create
3578 # If in interactive mode, don't automatically create
3578 # .orig files (issue4793)
3579 # .orig files (issue4793)
3579 if dobackup == backupinteractive:
3580 if dobackup == backupinteractive:
3580 tobackup.add(abs)
3581 tobackup.add(abs)
3581 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3582 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3582 absbakname = scmutil.backuppath(ui, repo, abs)
3583 absbakname = scmutil.backuppath(ui, repo, abs)
3583 bakname = os.path.relpath(
3584 bakname = os.path.relpath(
3584 absbakname, start=repo.root
3585 absbakname, start=repo.root
3585 )
3586 )
3586 ui.note(
3587 ui.note(
3587 _(b'saving current version of %s as %s\n')
3588 _(b'saving current version of %s as %s\n')
3588 % (uipathfn(abs), uipathfn(bakname))
3589 % (uipathfn(abs), uipathfn(bakname))
3589 )
3590 )
3590 if not opts.get(b'dry_run'):
3591 if not opts.get(b'dry_run'):
3591 if interactive:
3592 if interactive:
3592 util.copyfile(target, absbakname)
3593 util.copyfile(target, absbakname)
3593 else:
3594 else:
3594 util.rename(target, absbakname)
3595 util.rename(target, absbakname)
3595 if opts.get(b'dry_run'):
3596 if opts.get(b'dry_run'):
3596 if ui.verbose or not exact:
3597 if ui.verbose or not exact:
3597 ui.status(msg % uipathfn(abs))
3598 ui.status(msg % uipathfn(abs))
3598 elif exact:
3599 elif exact:
3599 ui.warn(msg % uipathfn(abs))
3600 ui.warn(msg % uipathfn(abs))
3600 break
3601 break
3601
3602
3602 if not opts.get(b'dry_run'):
3603 if not opts.get(b'dry_run'):
3603 needdata = (b'revert', b'add', b'undelete')
3604 needdata = (b'revert', b'add', b'undelete')
3604 oplist = [actions[name][0] for name in needdata]
3605 oplist = [actions[name][0] for name in needdata]
3605 prefetch = scmutil.prefetchfiles
3606 prefetch = scmutil.prefetchfiles
3606 matchfiles = scmutil.matchfiles
3607 matchfiles = scmutil.matchfiles
3607 prefetch(
3608 prefetch(
3608 repo,
3609 repo,
3609 [ctx.rev()],
3610 [ctx.rev()],
3610 matchfiles(repo, [f for sublist in oplist for f in sublist]),
3611 matchfiles(repo, [f for sublist in oplist for f in sublist]),
3611 )
3612 )
3612 match = scmutil.match(repo[None], pats)
3613 match = scmutil.match(repo[None], pats)
3613 _performrevert(
3614 _performrevert(
3614 repo,
3615 repo,
3615 parents,
3616 parents,
3616 ctx,
3617 ctx,
3617 names,
3618 names,
3618 uipathfn,
3619 uipathfn,
3619 actions,
3620 actions,
3620 match,
3621 match,
3621 interactive,
3622 interactive,
3622 tobackup,
3623 tobackup,
3623 )
3624 )
3624
3625
3625 if targetsubs:
3626 if targetsubs:
3626 # Revert the subrepos on the revert list
3627 # Revert the subrepos on the revert list
3627 for sub in targetsubs:
3628 for sub in targetsubs:
3628 try:
3629 try:
3629 wctx.sub(sub).revert(
3630 wctx.sub(sub).revert(
3630 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3631 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3631 )
3632 )
3632 except KeyError:
3633 except KeyError:
3633 raise error.Abort(
3634 raise error.Abort(
3634 b"subrepository '%s' does not exist in %s!"
3635 b"subrepository '%s' does not exist in %s!"
3635 % (sub, short(ctx.node()))
3636 % (sub, short(ctx.node()))
3636 )
3637 )
3637
3638
3638
3639
3639 def _performrevert(
3640 def _performrevert(
3640 repo,
3641 repo,
3641 parents,
3642 parents,
3642 ctx,
3643 ctx,
3643 names,
3644 names,
3644 uipathfn,
3645 uipathfn,
3645 actions,
3646 actions,
3646 match,
3647 match,
3647 interactive=False,
3648 interactive=False,
3648 tobackup=None,
3649 tobackup=None,
3649 ):
3650 ):
3650 """function that actually perform all the actions computed for revert
3651 """function that actually perform all the actions computed for revert
3651
3652
3652 This is an independent function to let extension to plug in and react to
3653 This is an independent function to let extension to plug in and react to
3653 the imminent revert.
3654 the imminent revert.
3654
3655
3655 Make sure you have the working directory locked when calling this function.
3656 Make sure you have the working directory locked when calling this function.
3656 """
3657 """
3657 parent, p2 = parents
3658 parent, p2 = parents
3658 node = ctx.node()
3659 node = ctx.node()
3659 excluded_files = []
3660 excluded_files = []
3660
3661
3661 def checkout(f):
3662 def checkout(f):
3662 fc = ctx[f]
3663 fc = ctx[f]
3663 repo.wwrite(f, fc.data(), fc.flags())
3664 repo.wwrite(f, fc.data(), fc.flags())
3664
3665
3665 def doremove(f):
3666 def doremove(f):
3666 try:
3667 try:
3667 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3668 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3668 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3669 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3669 except OSError:
3670 except OSError:
3670 pass
3671 pass
3671 repo.dirstate.remove(f)
3672 repo.dirstate.remove(f)
3672
3673
3673 def prntstatusmsg(action, f):
3674 def prntstatusmsg(action, f):
3674 exact = names[f]
3675 exact = names[f]
3675 if repo.ui.verbose or not exact:
3676 if repo.ui.verbose or not exact:
3676 repo.ui.status(actions[action][1] % uipathfn(f))
3677 repo.ui.status(actions[action][1] % uipathfn(f))
3677
3678
3678 audit_path = pathutil.pathauditor(repo.root, cached=True)
3679 audit_path = pathutil.pathauditor(repo.root, cached=True)
3679 for f in actions[b'forget'][0]:
3680 for f in actions[b'forget'][0]:
3680 if interactive:
3681 if interactive:
3681 choice = repo.ui.promptchoice(
3682 choice = repo.ui.promptchoice(
3682 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3683 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3683 )
3684 )
3684 if choice == 0:
3685 if choice == 0:
3685 prntstatusmsg(b'forget', f)
3686 prntstatusmsg(b'forget', f)
3686 repo.dirstate.drop(f)
3687 repo.dirstate.drop(f)
3687 else:
3688 else:
3688 excluded_files.append(f)
3689 excluded_files.append(f)
3689 else:
3690 else:
3690 prntstatusmsg(b'forget', f)
3691 prntstatusmsg(b'forget', f)
3691 repo.dirstate.drop(f)
3692 repo.dirstate.drop(f)
3692 for f in actions[b'remove'][0]:
3693 for f in actions[b'remove'][0]:
3693 audit_path(f)
3694 audit_path(f)
3694 if interactive:
3695 if interactive:
3695 choice = repo.ui.promptchoice(
3696 choice = repo.ui.promptchoice(
3696 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3697 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3697 )
3698 )
3698 if choice == 0:
3699 if choice == 0:
3699 prntstatusmsg(b'remove', f)
3700 prntstatusmsg(b'remove', f)
3700 doremove(f)
3701 doremove(f)
3701 else:
3702 else:
3702 excluded_files.append(f)
3703 excluded_files.append(f)
3703 else:
3704 else:
3704 prntstatusmsg(b'remove', f)
3705 prntstatusmsg(b'remove', f)
3705 doremove(f)
3706 doremove(f)
3706 for f in actions[b'drop'][0]:
3707 for f in actions[b'drop'][0]:
3707 audit_path(f)
3708 audit_path(f)
3708 prntstatusmsg(b'drop', f)
3709 prntstatusmsg(b'drop', f)
3709 repo.dirstate.remove(f)
3710 repo.dirstate.remove(f)
3710
3711
3711 normal = None
3712 normal = None
3712 if node == parent:
3713 if node == parent:
3713 # We're reverting to our parent. If possible, we'd like status
3714 # We're reverting to our parent. If possible, we'd like status
3714 # to report the file as clean. We have to use normallookup for
3715 # to report the file as clean. We have to use normallookup for
3715 # merges to avoid losing information about merged/dirty files.
3716 # merges to avoid losing information about merged/dirty files.
3716 if p2 != nullid:
3717 if p2 != nullid:
3717 normal = repo.dirstate.normallookup
3718 normal = repo.dirstate.normallookup
3718 else:
3719 else:
3719 normal = repo.dirstate.normal
3720 normal = repo.dirstate.normal
3720
3721
3721 newlyaddedandmodifiedfiles = set()
3722 newlyaddedandmodifiedfiles = set()
3722 if interactive:
3723 if interactive:
3723 # Prompt the user for changes to revert
3724 # Prompt the user for changes to revert
3724 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3725 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3725 m = scmutil.matchfiles(repo, torevert)
3726 m = scmutil.matchfiles(repo, torevert)
3726 diffopts = patch.difffeatureopts(
3727 diffopts = patch.difffeatureopts(
3727 repo.ui,
3728 repo.ui,
3728 whitespace=True,
3729 whitespace=True,
3729 section=b'commands',
3730 section=b'commands',
3730 configprefix=b'revert.interactive.',
3731 configprefix=b'revert.interactive.',
3731 )
3732 )
3732 diffopts.nodates = True
3733 diffopts.nodates = True
3733 diffopts.git = True
3734 diffopts.git = True
3734 operation = b'apply'
3735 operation = b'apply'
3735 if node == parent:
3736 if node == parent:
3736 if repo.ui.configbool(
3737 if repo.ui.configbool(
3737 b'experimental', b'revert.interactive.select-to-keep'
3738 b'experimental', b'revert.interactive.select-to-keep'
3738 ):
3739 ):
3739 operation = b'keep'
3740 operation = b'keep'
3740 else:
3741 else:
3741 operation = b'discard'
3742 operation = b'discard'
3742
3743
3743 if operation == b'apply':
3744 if operation == b'apply':
3744 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3745 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3745 else:
3746 else:
3746 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3747 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3747 originalchunks = patch.parsepatch(diff)
3748 originalchunks = patch.parsepatch(diff)
3748
3749
3749 try:
3750 try:
3750
3751
3751 chunks, opts = recordfilter(
3752 chunks, opts = recordfilter(
3752 repo.ui, originalchunks, match, operation=operation
3753 repo.ui, originalchunks, match, operation=operation
3753 )
3754 )
3754 if operation == b'discard':
3755 if operation == b'discard':
3755 chunks = patch.reversehunks(chunks)
3756 chunks = patch.reversehunks(chunks)
3756
3757
3757 except error.PatchError as err:
3758 except error.PatchError as err:
3758 raise error.Abort(_(b'error parsing patch: %s') % err)
3759 raise error.Abort(_(b'error parsing patch: %s') % err)
3759
3760
3760 # FIXME: when doing an interactive revert of a copy, there's no way of
3761 # FIXME: when doing an interactive revert of a copy, there's no way of
3761 # performing a partial revert of the added file, the only option is
3762 # performing a partial revert of the added file, the only option is
3762 # "remove added file <name> (Yn)?", so we don't need to worry about the
3763 # "remove added file <name> (Yn)?", so we don't need to worry about the
3763 # alsorestore value. Ideally we'd be able to partially revert
3764 # alsorestore value. Ideally we'd be able to partially revert
3764 # copied/renamed files.
3765 # copied/renamed files.
3765 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
3766 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
3766 chunks, originalchunks
3767 chunks, originalchunks
3767 )
3768 )
3768 if tobackup is None:
3769 if tobackup is None:
3769 tobackup = set()
3770 tobackup = set()
3770 # Apply changes
3771 # Apply changes
3771 fp = stringio()
3772 fp = stringio()
3772 # chunks are serialized per file, but files aren't sorted
3773 # chunks are serialized per file, but files aren't sorted
3773 for f in sorted(set(c.header.filename() for c in chunks if ishunk(c))):
3774 for f in sorted(set(c.header.filename() for c in chunks if ishunk(c))):
3774 prntstatusmsg(b'revert', f)
3775 prntstatusmsg(b'revert', f)
3775 files = set()
3776 files = set()
3776 for c in chunks:
3777 for c in chunks:
3777 if ishunk(c):
3778 if ishunk(c):
3778 abs = c.header.filename()
3779 abs = c.header.filename()
3779 # Create a backup file only if this hunk should be backed up
3780 # Create a backup file only if this hunk should be backed up
3780 if c.header.filename() in tobackup:
3781 if c.header.filename() in tobackup:
3781 target = repo.wjoin(abs)
3782 target = repo.wjoin(abs)
3782 bakname = scmutil.backuppath(repo.ui, repo, abs)
3783 bakname = scmutil.backuppath(repo.ui, repo, abs)
3783 util.copyfile(target, bakname)
3784 util.copyfile(target, bakname)
3784 tobackup.remove(abs)
3785 tobackup.remove(abs)
3785 if abs not in files:
3786 if abs not in files:
3786 files.add(abs)
3787 files.add(abs)
3787 if operation == b'keep':
3788 if operation == b'keep':
3788 checkout(abs)
3789 checkout(abs)
3789 c.write(fp)
3790 c.write(fp)
3790 dopatch = fp.tell()
3791 dopatch = fp.tell()
3791 fp.seek(0)
3792 fp.seek(0)
3792 if dopatch:
3793 if dopatch:
3793 try:
3794 try:
3794 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3795 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3795 except error.PatchError as err:
3796 except error.PatchError as err:
3796 raise error.Abort(pycompat.bytestr(err))
3797 raise error.Abort(pycompat.bytestr(err))
3797 del fp
3798 del fp
3798 else:
3799 else:
3799 for f in actions[b'revert'][0]:
3800 for f in actions[b'revert'][0]:
3800 prntstatusmsg(b'revert', f)
3801 prntstatusmsg(b'revert', f)
3801 checkout(f)
3802 checkout(f)
3802 if normal:
3803 if normal:
3803 normal(f)
3804 normal(f)
3804
3805
3805 for f in actions[b'add'][0]:
3806 for f in actions[b'add'][0]:
3806 # Don't checkout modified files, they are already created by the diff
3807 # Don't checkout modified files, they are already created by the diff
3807 if f not in newlyaddedandmodifiedfiles:
3808 if f not in newlyaddedandmodifiedfiles:
3808 prntstatusmsg(b'add', f)
3809 prntstatusmsg(b'add', f)
3809 checkout(f)
3810 checkout(f)
3810 repo.dirstate.add(f)
3811 repo.dirstate.add(f)
3811
3812
3812 normal = repo.dirstate.normallookup
3813 normal = repo.dirstate.normallookup
3813 if node == parent and p2 == nullid:
3814 if node == parent and p2 == nullid:
3814 normal = repo.dirstate.normal
3815 normal = repo.dirstate.normal
3815 for f in actions[b'undelete'][0]:
3816 for f in actions[b'undelete'][0]:
3816 if interactive:
3817 if interactive:
3817 choice = repo.ui.promptchoice(
3818 choice = repo.ui.promptchoice(
3818 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3819 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3819 )
3820 )
3820 if choice == 0:
3821 if choice == 0:
3821 prntstatusmsg(b'undelete', f)
3822 prntstatusmsg(b'undelete', f)
3822 checkout(f)
3823 checkout(f)
3823 normal(f)
3824 normal(f)
3824 else:
3825 else:
3825 excluded_files.append(f)
3826 excluded_files.append(f)
3826 else:
3827 else:
3827 prntstatusmsg(b'undelete', f)
3828 prntstatusmsg(b'undelete', f)
3828 checkout(f)
3829 checkout(f)
3829 normal(f)
3830 normal(f)
3830
3831
3831 copied = copies.pathcopies(repo[parent], ctx)
3832 copied = copies.pathcopies(repo[parent], ctx)
3832
3833
3833 for f in (
3834 for f in (
3834 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
3835 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
3835 ):
3836 ):
3836 if f in copied:
3837 if f in copied:
3837 repo.dirstate.copy(copied[f], f)
3838 repo.dirstate.copy(copied[f], f)
3838
3839
3839
3840
3840 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3841 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3841 # commands.outgoing. "missing" is "missing" of the result of
3842 # commands.outgoing. "missing" is "missing" of the result of
3842 # "findcommonoutgoing()"
3843 # "findcommonoutgoing()"
3843 outgoinghooks = util.hooks()
3844 outgoinghooks = util.hooks()
3844
3845
3845 # a list of (ui, repo) functions called by commands.summary
3846 # a list of (ui, repo) functions called by commands.summary
3846 summaryhooks = util.hooks()
3847 summaryhooks = util.hooks()
3847
3848
3848 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3849 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3849 #
3850 #
3850 # functions should return tuple of booleans below, if 'changes' is None:
3851 # functions should return tuple of booleans below, if 'changes' is None:
3851 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3852 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3852 #
3853 #
3853 # otherwise, 'changes' is a tuple of tuples below:
3854 # otherwise, 'changes' is a tuple of tuples below:
3854 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3855 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3855 # - (desturl, destbranch, destpeer, outgoing)
3856 # - (desturl, destbranch, destpeer, outgoing)
3856 summaryremotehooks = util.hooks()
3857 summaryremotehooks = util.hooks()
3857
3858
3858
3859
3859 def checkunfinished(repo, commit=False, skipmerge=False):
3860 def checkunfinished(repo, commit=False, skipmerge=False):
3860 '''Look for an unfinished multistep operation, like graft, and abort
3861 '''Look for an unfinished multistep operation, like graft, and abort
3861 if found. It's probably good to check this right before
3862 if found. It's probably good to check this right before
3862 bailifchanged().
3863 bailifchanged().
3863 '''
3864 '''
3864 # Check for non-clearable states first, so things like rebase will take
3865 # Check for non-clearable states first, so things like rebase will take
3865 # precedence over update.
3866 # precedence over update.
3866 for state in statemod._unfinishedstates:
3867 for state in statemod._unfinishedstates:
3867 if (
3868 if (
3868 state._clearable
3869 state._clearable
3869 or (commit and state._allowcommit)
3870 or (commit and state._allowcommit)
3870 or state._reportonly
3871 or state._reportonly
3871 ):
3872 ):
3872 continue
3873 continue
3873 if state.isunfinished(repo):
3874 if state.isunfinished(repo):
3874 raise error.Abort(state.msg(), hint=state.hint())
3875 raise error.Abort(state.msg(), hint=state.hint())
3875
3876
3876 for s in statemod._unfinishedstates:
3877 for s in statemod._unfinishedstates:
3877 if (
3878 if (
3878 not s._clearable
3879 not s._clearable
3879 or (commit and s._allowcommit)
3880 or (commit and s._allowcommit)
3880 or (s._opname == b'merge' and skipmerge)
3881 or (s._opname == b'merge' and skipmerge)
3881 or s._reportonly
3882 or s._reportonly
3882 ):
3883 ):
3883 continue
3884 continue
3884 if s.isunfinished(repo):
3885 if s.isunfinished(repo):
3885 raise error.Abort(s.msg(), hint=s.hint())
3886 raise error.Abort(s.msg(), hint=s.hint())
3886
3887
3887
3888
3888 def clearunfinished(repo):
3889 def clearunfinished(repo):
3889 '''Check for unfinished operations (as above), and clear the ones
3890 '''Check for unfinished operations (as above), and clear the ones
3890 that are clearable.
3891 that are clearable.
3891 '''
3892 '''
3892 for state in statemod._unfinishedstates:
3893 for state in statemod._unfinishedstates:
3893 if state._reportonly:
3894 if state._reportonly:
3894 continue
3895 continue
3895 if not state._clearable and state.isunfinished(repo):
3896 if not state._clearable and state.isunfinished(repo):
3896 raise error.Abort(state.msg(), hint=state.hint())
3897 raise error.Abort(state.msg(), hint=state.hint())
3897
3898
3898 for s in statemod._unfinishedstates:
3899 for s in statemod._unfinishedstates:
3899 if s._opname == b'merge' or state._reportonly:
3900 if s._opname == b'merge' or state._reportonly:
3900 continue
3901 continue
3901 if s._clearable and s.isunfinished(repo):
3902 if s._clearable and s.isunfinished(repo):
3902 util.unlink(repo.vfs.join(s._fname))
3903 util.unlink(repo.vfs.join(s._fname))
3903
3904
3904
3905
3905 def getunfinishedstate(repo):
3906 def getunfinishedstate(repo):
3906 ''' Checks for unfinished operations and returns statecheck object
3907 ''' Checks for unfinished operations and returns statecheck object
3907 for it'''
3908 for it'''
3908 for state in statemod._unfinishedstates:
3909 for state in statemod._unfinishedstates:
3909 if state.isunfinished(repo):
3910 if state.isunfinished(repo):
3910 return state
3911 return state
3911 return None
3912 return None
3912
3913
3913
3914
3914 def howtocontinue(repo):
3915 def howtocontinue(repo):
3915 '''Check for an unfinished operation and return the command to finish
3916 '''Check for an unfinished operation and return the command to finish
3916 it.
3917 it.
3917
3918
3918 statemod._unfinishedstates list is checked for an unfinished operation
3919 statemod._unfinishedstates list is checked for an unfinished operation
3919 and the corresponding message to finish it is generated if a method to
3920 and the corresponding message to finish it is generated if a method to
3920 continue is supported by the operation.
3921 continue is supported by the operation.
3921
3922
3922 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3923 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3923 a boolean.
3924 a boolean.
3924 '''
3925 '''
3925 contmsg = _(b"continue: %s")
3926 contmsg = _(b"continue: %s")
3926 for state in statemod._unfinishedstates:
3927 for state in statemod._unfinishedstates:
3927 if not state._continueflag:
3928 if not state._continueflag:
3928 continue
3929 continue
3929 if state.isunfinished(repo):
3930 if state.isunfinished(repo):
3930 return contmsg % state.continuemsg(), True
3931 return contmsg % state.continuemsg(), True
3931 if repo[None].dirty(missing=True, merge=False, branch=False):
3932 if repo[None].dirty(missing=True, merge=False, branch=False):
3932 return contmsg % _(b"hg commit"), False
3933 return contmsg % _(b"hg commit"), False
3933 return None, None
3934 return None, None
3934
3935
3935
3936
3936 def checkafterresolved(repo):
3937 def checkafterresolved(repo):
3937 '''Inform the user about the next action after completing hg resolve
3938 '''Inform the user about the next action after completing hg resolve
3938
3939
3939 If there's a an unfinished operation that supports continue flag,
3940 If there's a an unfinished operation that supports continue flag,
3940 howtocontinue will yield repo.ui.warn as the reporter.
3941 howtocontinue will yield repo.ui.warn as the reporter.
3941
3942
3942 Otherwise, it will yield repo.ui.note.
3943 Otherwise, it will yield repo.ui.note.
3943 '''
3944 '''
3944 msg, warning = howtocontinue(repo)
3945 msg, warning = howtocontinue(repo)
3945 if msg is not None:
3946 if msg is not None:
3946 if warning:
3947 if warning:
3947 repo.ui.warn(b"%s\n" % msg)
3948 repo.ui.warn(b"%s\n" % msg)
3948 else:
3949 else:
3949 repo.ui.note(b"%s\n" % msg)
3950 repo.ui.note(b"%s\n" % msg)
3950
3951
3951
3952
3952 def wrongtooltocontinue(repo, task):
3953 def wrongtooltocontinue(repo, task):
3953 '''Raise an abort suggesting how to properly continue if there is an
3954 '''Raise an abort suggesting how to properly continue if there is an
3954 active task.
3955 active task.
3955
3956
3956 Uses howtocontinue() to find the active task.
3957 Uses howtocontinue() to find the active task.
3957
3958
3958 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3959 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3959 a hint.
3960 a hint.
3960 '''
3961 '''
3961 after = howtocontinue(repo)
3962 after = howtocontinue(repo)
3962 hint = None
3963 hint = None
3963 if after[1]:
3964 if after[1]:
3964 hint = after[0]
3965 hint = after[0]
3965 raise error.Abort(_(b'no %s in progress') % task, hint=hint)
3966 raise error.Abort(_(b'no %s in progress') % task, hint=hint)
3966
3967
3967
3968
3968 def abortgraft(ui, repo, graftstate):
3969 def abortgraft(ui, repo, graftstate):
3969 """abort the interrupted graft and rollbacks to the state before interrupted
3970 """abort the interrupted graft and rollbacks to the state before interrupted
3970 graft"""
3971 graft"""
3971 if not graftstate.exists():
3972 if not graftstate.exists():
3972 raise error.Abort(_(b"no interrupted graft to abort"))
3973 raise error.Abort(_(b"no interrupted graft to abort"))
3973 statedata = readgraftstate(repo, graftstate)
3974 statedata = readgraftstate(repo, graftstate)
3974 newnodes = statedata.get(b'newnodes')
3975 newnodes = statedata.get(b'newnodes')
3975 if newnodes is None:
3976 if newnodes is None:
3976 # and old graft state which does not have all the data required to abort
3977 # and old graft state which does not have all the data required to abort
3977 # the graft
3978 # the graft
3978 raise error.Abort(_(b"cannot abort using an old graftstate"))
3979 raise error.Abort(_(b"cannot abort using an old graftstate"))
3979
3980
3980 # changeset from which graft operation was started
3981 # changeset from which graft operation was started
3981 if len(newnodes) > 0:
3982 if len(newnodes) > 0:
3982 startctx = repo[newnodes[0]].p1()
3983 startctx = repo[newnodes[0]].p1()
3983 else:
3984 else:
3984 startctx = repo[b'.']
3985 startctx = repo[b'.']
3985 # whether to strip or not
3986 # whether to strip or not
3986 cleanup = False
3987 cleanup = False
3987 from . import hg
3988 from . import hg
3988
3989
3989 if newnodes:
3990 if newnodes:
3990 newnodes = [repo[r].rev() for r in newnodes]
3991 newnodes = [repo[r].rev() for r in newnodes]
3991 cleanup = True
3992 cleanup = True
3992 # checking that none of the newnodes turned public or is public
3993 # checking that none of the newnodes turned public or is public
3993 immutable = [c for c in newnodes if not repo[c].mutable()]
3994 immutable = [c for c in newnodes if not repo[c].mutable()]
3994 if immutable:
3995 if immutable:
3995 repo.ui.warn(
3996 repo.ui.warn(
3996 _(b"cannot clean up public changesets %s\n")
3997 _(b"cannot clean up public changesets %s\n")
3997 % b', '.join(bytes(repo[r]) for r in immutable),
3998 % b', '.join(bytes(repo[r]) for r in immutable),
3998 hint=_(b"see 'hg help phases' for details"),
3999 hint=_(b"see 'hg help phases' for details"),
3999 )
4000 )
4000 cleanup = False
4001 cleanup = False
4001
4002
4002 # checking that no new nodes are created on top of grafted revs
4003 # checking that no new nodes are created on top of grafted revs
4003 desc = set(repo.changelog.descendants(newnodes))
4004 desc = set(repo.changelog.descendants(newnodes))
4004 if desc - set(newnodes):
4005 if desc - set(newnodes):
4005 repo.ui.warn(
4006 repo.ui.warn(
4006 _(
4007 _(
4007 b"new changesets detected on destination "
4008 b"new changesets detected on destination "
4008 b"branch, can't strip\n"
4009 b"branch, can't strip\n"
4009 )
4010 )
4010 )
4011 )
4011 cleanup = False
4012 cleanup = False
4012
4013
4013 if cleanup:
4014 if cleanup:
4014 with repo.wlock(), repo.lock():
4015 with repo.wlock(), repo.lock():
4015 hg.updaterepo(repo, startctx.node(), overwrite=True)
4016 hg.updaterepo(repo, startctx.node(), overwrite=True)
4016 # stripping the new nodes created
4017 # stripping the new nodes created
4017 strippoints = [
4018 strippoints = [
4018 c.node() for c in repo.set(b"roots(%ld)", newnodes)
4019 c.node() for c in repo.set(b"roots(%ld)", newnodes)
4019 ]
4020 ]
4020 repair.strip(repo.ui, repo, strippoints, backup=False)
4021 repair.strip(repo.ui, repo, strippoints, backup=False)
4021
4022
4022 if not cleanup:
4023 if not cleanup:
4023 # we don't update to the startnode if we can't strip
4024 # we don't update to the startnode if we can't strip
4024 startctx = repo[b'.']
4025 startctx = repo[b'.']
4025 hg.updaterepo(repo, startctx.node(), overwrite=True)
4026 hg.updaterepo(repo, startctx.node(), overwrite=True)
4026
4027
4027 ui.status(_(b"graft aborted\n"))
4028 ui.status(_(b"graft aborted\n"))
4028 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
4029 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
4029 graftstate.delete()
4030 graftstate.delete()
4030 return 0
4031 return 0
4031
4032
4032
4033
4033 def readgraftstate(repo, graftstate):
4034 def readgraftstate(repo, graftstate):
4034 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
4035 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
4035 """read the graft state file and return a dict of the data stored in it"""
4036 """read the graft state file and return a dict of the data stored in it"""
4036 try:
4037 try:
4037 return graftstate.read()
4038 return graftstate.read()
4038 except error.CorruptedState:
4039 except error.CorruptedState:
4039 nodes = repo.vfs.read(b'graftstate').splitlines()
4040 nodes = repo.vfs.read(b'graftstate').splitlines()
4040 return {b'nodes': nodes}
4041 return {b'nodes': nodes}
4041
4042
4042
4043
4043 def hgabortgraft(ui, repo):
4044 def hgabortgraft(ui, repo):
4044 """ abort logic for aborting graft using 'hg abort'"""
4045 """ abort logic for aborting graft using 'hg abort'"""
4045 with repo.wlock():
4046 with repo.wlock():
4046 graftstate = statemod.cmdstate(repo, b'graftstate')
4047 graftstate = statemod.cmdstate(repo, b'graftstate')
4047 return abortgraft(ui, repo, graftstate)
4048 return abortgraft(ui, repo, graftstate)
General Comments 0
You need to be logged in to leave comments. Login now