##// END OF EJS Templates
merge: use merge.clean_update() when applicable...
Martin von Zweigbergk -
r46133:03726f5b default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,2282 +1,2282
1 # rebase.py - rebasing feature for mercurial
1 # rebase.py - rebasing feature for mercurial
2 #
2 #
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to move sets of revisions to a different ancestor
8 '''command to move sets of revisions to a different ancestor
9
9
10 This extension lets you rebase changesets in an existing Mercurial
10 This extension lets you rebase changesets in an existing Mercurial
11 repository.
11 repository.
12
12
13 For more information:
13 For more information:
14 https://mercurial-scm.org/wiki/RebaseExtension
14 https://mercurial-scm.org/wiki/RebaseExtension
15 '''
15 '''
16
16
17 from __future__ import absolute_import
17 from __future__ import absolute_import
18
18
19 import errno
19 import errno
20 import os
20 import os
21
21
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23 from mercurial.node import (
23 from mercurial.node import (
24 nullrev,
24 nullrev,
25 short,
25 short,
26 )
26 )
27 from mercurial.pycompat import open
27 from mercurial.pycompat import open
28 from mercurial import (
28 from mercurial import (
29 bookmarks,
29 bookmarks,
30 cmdutil,
30 cmdutil,
31 commands,
31 commands,
32 copies,
32 copies,
33 destutil,
33 destutil,
34 dirstateguard,
34 dirstateguard,
35 error,
35 error,
36 extensions,
36 extensions,
37 hg,
37 hg,
38 merge as mergemod,
38 merge as mergemod,
39 mergestate as mergestatemod,
39 mergestate as mergestatemod,
40 mergeutil,
40 mergeutil,
41 node as nodemod,
41 node as nodemod,
42 obsolete,
42 obsolete,
43 obsutil,
43 obsutil,
44 patch,
44 patch,
45 phases,
45 phases,
46 pycompat,
46 pycompat,
47 registrar,
47 registrar,
48 repair,
48 repair,
49 revset,
49 revset,
50 revsetlang,
50 revsetlang,
51 rewriteutil,
51 rewriteutil,
52 scmutil,
52 scmutil,
53 smartset,
53 smartset,
54 state as statemod,
54 state as statemod,
55 util,
55 util,
56 )
56 )
57
57
58 # The following constants are used throughout the rebase module. The ordering of
58 # The following constants are used throughout the rebase module. The ordering of
59 # their values must be maintained.
59 # their values must be maintained.
60
60
61 # Indicates that a revision needs to be rebased
61 # Indicates that a revision needs to be rebased
62 revtodo = -1
62 revtodo = -1
63 revtodostr = b'-1'
63 revtodostr = b'-1'
64
64
65 # legacy revstates no longer needed in current code
65 # legacy revstates no longer needed in current code
66 # -2: nullmerge, -3: revignored, -4: revprecursor, -5: revpruned
66 # -2: nullmerge, -3: revignored, -4: revprecursor, -5: revpruned
67 legacystates = {b'-2', b'-3', b'-4', b'-5'}
67 legacystates = {b'-2', b'-3', b'-4', b'-5'}
68
68
69 cmdtable = {}
69 cmdtable = {}
70 command = registrar.command(cmdtable)
70 command = registrar.command(cmdtable)
71 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
71 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
72 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
72 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
73 # be specifying the version(s) of Mercurial they are tested with, or
73 # be specifying the version(s) of Mercurial they are tested with, or
74 # leave the attribute unspecified.
74 # leave the attribute unspecified.
75 testedwith = b'ships-with-hg-core'
75 testedwith = b'ships-with-hg-core'
76
76
77
77
78 def _nothingtorebase():
78 def _nothingtorebase():
79 return 1
79 return 1
80
80
81
81
82 def _savegraft(ctx, extra):
82 def _savegraft(ctx, extra):
83 s = ctx.extra().get(b'source', None)
83 s = ctx.extra().get(b'source', None)
84 if s is not None:
84 if s is not None:
85 extra[b'source'] = s
85 extra[b'source'] = s
86 s = ctx.extra().get(b'intermediate-source', None)
86 s = ctx.extra().get(b'intermediate-source', None)
87 if s is not None:
87 if s is not None:
88 extra[b'intermediate-source'] = s
88 extra[b'intermediate-source'] = s
89
89
90
90
91 def _savebranch(ctx, extra):
91 def _savebranch(ctx, extra):
92 extra[b'branch'] = ctx.branch()
92 extra[b'branch'] = ctx.branch()
93
93
94
94
95 def _destrebase(repo, sourceset, destspace=None):
95 def _destrebase(repo, sourceset, destspace=None):
96 """small wrapper around destmerge to pass the right extra args
96 """small wrapper around destmerge to pass the right extra args
97
97
98 Please wrap destutil.destmerge instead."""
98 Please wrap destutil.destmerge instead."""
99 return destutil.destmerge(
99 return destutil.destmerge(
100 repo,
100 repo,
101 action=b'rebase',
101 action=b'rebase',
102 sourceset=sourceset,
102 sourceset=sourceset,
103 onheadcheck=False,
103 onheadcheck=False,
104 destspace=destspace,
104 destspace=destspace,
105 )
105 )
106
106
107
107
108 revsetpredicate = registrar.revsetpredicate()
108 revsetpredicate = registrar.revsetpredicate()
109
109
110
110
111 @revsetpredicate(b'_destrebase')
111 @revsetpredicate(b'_destrebase')
112 def _revsetdestrebase(repo, subset, x):
112 def _revsetdestrebase(repo, subset, x):
113 # ``_rebasedefaultdest()``
113 # ``_rebasedefaultdest()``
114
114
115 # default destination for rebase.
115 # default destination for rebase.
116 # # XXX: Currently private because I expect the signature to change.
116 # # XXX: Currently private because I expect the signature to change.
117 # # XXX: - bailing out in case of ambiguity vs returning all data.
117 # # XXX: - bailing out in case of ambiguity vs returning all data.
118 # i18n: "_rebasedefaultdest" is a keyword
118 # i18n: "_rebasedefaultdest" is a keyword
119 sourceset = None
119 sourceset = None
120 if x is not None:
120 if x is not None:
121 sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
121 sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
122 return subset & smartset.baseset([_destrebase(repo, sourceset)])
122 return subset & smartset.baseset([_destrebase(repo, sourceset)])
123
123
124
124
125 @revsetpredicate(b'_destautoorphanrebase')
125 @revsetpredicate(b'_destautoorphanrebase')
126 def _revsetdestautoorphanrebase(repo, subset, x):
126 def _revsetdestautoorphanrebase(repo, subset, x):
127 # ``_destautoorphanrebase()``
127 # ``_destautoorphanrebase()``
128
128
129 # automatic rebase destination for a single orphan revision.
129 # automatic rebase destination for a single orphan revision.
130 unfi = repo.unfiltered()
130 unfi = repo.unfiltered()
131 obsoleted = unfi.revs(b'obsolete()')
131 obsoleted = unfi.revs(b'obsolete()')
132
132
133 src = revset.getset(repo, subset, x).first()
133 src = revset.getset(repo, subset, x).first()
134
134
135 # Empty src or already obsoleted - Do not return a destination
135 # Empty src or already obsoleted - Do not return a destination
136 if not src or src in obsoleted:
136 if not src or src in obsoleted:
137 return smartset.baseset()
137 return smartset.baseset()
138 dests = destutil.orphanpossibledestination(repo, src)
138 dests = destutil.orphanpossibledestination(repo, src)
139 if len(dests) > 1:
139 if len(dests) > 1:
140 raise error.Abort(
140 raise error.Abort(
141 _(b"ambiguous automatic rebase: %r could end up on any of %r")
141 _(b"ambiguous automatic rebase: %r could end up on any of %r")
142 % (src, dests)
142 % (src, dests)
143 )
143 )
144 # We have zero or one destination, so we can just return here.
144 # We have zero or one destination, so we can just return here.
145 return smartset.baseset(dests)
145 return smartset.baseset(dests)
146
146
147
147
148 def _ctxdesc(ctx):
148 def _ctxdesc(ctx):
149 """short description for a context"""
149 """short description for a context"""
150 desc = b'%d:%s "%s"' % (
150 desc = b'%d:%s "%s"' % (
151 ctx.rev(),
151 ctx.rev(),
152 ctx,
152 ctx,
153 ctx.description().split(b'\n', 1)[0],
153 ctx.description().split(b'\n', 1)[0],
154 )
154 )
155 repo = ctx.repo()
155 repo = ctx.repo()
156 names = []
156 names = []
157 for nsname, ns in pycompat.iteritems(repo.names):
157 for nsname, ns in pycompat.iteritems(repo.names):
158 if nsname == b'branches':
158 if nsname == b'branches':
159 continue
159 continue
160 names.extend(ns.names(repo, ctx.node()))
160 names.extend(ns.names(repo, ctx.node()))
161 if names:
161 if names:
162 desc += b' (%s)' % b' '.join(names)
162 desc += b' (%s)' % b' '.join(names)
163 return desc
163 return desc
164
164
165
165
166 class rebaseruntime(object):
166 class rebaseruntime(object):
167 """This class is a container for rebase runtime state"""
167 """This class is a container for rebase runtime state"""
168
168
169 def __init__(self, repo, ui, inmemory=False, dryrun=False, opts=None):
169 def __init__(self, repo, ui, inmemory=False, dryrun=False, opts=None):
170 if opts is None:
170 if opts is None:
171 opts = {}
171 opts = {}
172
172
173 # prepared: whether we have rebasestate prepared or not. Currently it
173 # prepared: whether we have rebasestate prepared or not. Currently it
174 # decides whether "self.repo" is unfiltered or not.
174 # decides whether "self.repo" is unfiltered or not.
175 # The rebasestate has explicit hash to hash instructions not depending
175 # The rebasestate has explicit hash to hash instructions not depending
176 # on visibility. If rebasestate exists (in-memory or on-disk), use
176 # on visibility. If rebasestate exists (in-memory or on-disk), use
177 # unfiltered repo to avoid visibility issues.
177 # unfiltered repo to avoid visibility issues.
178 # Before knowing rebasestate (i.e. when starting a new rebase (not
178 # Before knowing rebasestate (i.e. when starting a new rebase (not
179 # --continue or --abort)), the original repo should be used so
179 # --continue or --abort)), the original repo should be used so
180 # visibility-dependent revsets are correct.
180 # visibility-dependent revsets are correct.
181 self.prepared = False
181 self.prepared = False
182 self.resume = False
182 self.resume = False
183 self._repo = repo
183 self._repo = repo
184
184
185 self.ui = ui
185 self.ui = ui
186 self.opts = opts
186 self.opts = opts
187 self.originalwd = None
187 self.originalwd = None
188 self.external = nullrev
188 self.external = nullrev
189 # Mapping between the old revision id and either what is the new rebased
189 # Mapping between the old revision id and either what is the new rebased
190 # revision or what needs to be done with the old revision. The state
190 # revision or what needs to be done with the old revision. The state
191 # dict will be what contains most of the rebase progress state.
191 # dict will be what contains most of the rebase progress state.
192 self.state = {}
192 self.state = {}
193 self.activebookmark = None
193 self.activebookmark = None
194 self.destmap = {}
194 self.destmap = {}
195 self.skipped = set()
195 self.skipped = set()
196
196
197 self.collapsef = opts.get(b'collapse', False)
197 self.collapsef = opts.get(b'collapse', False)
198 self.collapsemsg = cmdutil.logmessage(ui, opts)
198 self.collapsemsg = cmdutil.logmessage(ui, opts)
199 self.date = opts.get(b'date', None)
199 self.date = opts.get(b'date', None)
200
200
201 e = opts.get(b'extrafn') # internal, used by e.g. hgsubversion
201 e = opts.get(b'extrafn') # internal, used by e.g. hgsubversion
202 self.extrafns = [_savegraft]
202 self.extrafns = [_savegraft]
203 if e:
203 if e:
204 self.extrafns = [e]
204 self.extrafns = [e]
205
205
206 self.backupf = ui.configbool(b'rewrite', b'backup-bundle')
206 self.backupf = ui.configbool(b'rewrite', b'backup-bundle')
207 self.keepf = opts.get(b'keep', False)
207 self.keepf = opts.get(b'keep', False)
208 self.keepbranchesf = opts.get(b'keepbranches', False)
208 self.keepbranchesf = opts.get(b'keepbranches', False)
209 self.skipemptysuccessorf = rewriteutil.skip_empty_successor(
209 self.skipemptysuccessorf = rewriteutil.skip_empty_successor(
210 repo.ui, b'rebase'
210 repo.ui, b'rebase'
211 )
211 )
212 self.obsoletenotrebased = {}
212 self.obsoletenotrebased = {}
213 self.obsoletewithoutsuccessorindestination = set()
213 self.obsoletewithoutsuccessorindestination = set()
214 self.inmemory = inmemory
214 self.inmemory = inmemory
215 self.dryrun = dryrun
215 self.dryrun = dryrun
216 self.stateobj = statemod.cmdstate(repo, b'rebasestate')
216 self.stateobj = statemod.cmdstate(repo, b'rebasestate')
217
217
218 @property
218 @property
219 def repo(self):
219 def repo(self):
220 if self.prepared:
220 if self.prepared:
221 return self._repo.unfiltered()
221 return self._repo.unfiltered()
222 else:
222 else:
223 return self._repo
223 return self._repo
224
224
225 def storestatus(self, tr=None):
225 def storestatus(self, tr=None):
226 """Store the current status to allow recovery"""
226 """Store the current status to allow recovery"""
227 if tr:
227 if tr:
228 tr.addfilegenerator(
228 tr.addfilegenerator(
229 b'rebasestate',
229 b'rebasestate',
230 (b'rebasestate',),
230 (b'rebasestate',),
231 self._writestatus,
231 self._writestatus,
232 location=b'plain',
232 location=b'plain',
233 )
233 )
234 else:
234 else:
235 with self.repo.vfs(b"rebasestate", b"w") as f:
235 with self.repo.vfs(b"rebasestate", b"w") as f:
236 self._writestatus(f)
236 self._writestatus(f)
237
237
238 def _writestatus(self, f):
238 def _writestatus(self, f):
239 repo = self.repo
239 repo = self.repo
240 assert repo.filtername is None
240 assert repo.filtername is None
241 f.write(repo[self.originalwd].hex() + b'\n')
241 f.write(repo[self.originalwd].hex() + b'\n')
242 # was "dest". we now write dest per src root below.
242 # was "dest". we now write dest per src root below.
243 f.write(b'\n')
243 f.write(b'\n')
244 f.write(repo[self.external].hex() + b'\n')
244 f.write(repo[self.external].hex() + b'\n')
245 f.write(b'%d\n' % int(self.collapsef))
245 f.write(b'%d\n' % int(self.collapsef))
246 f.write(b'%d\n' % int(self.keepf))
246 f.write(b'%d\n' % int(self.keepf))
247 f.write(b'%d\n' % int(self.keepbranchesf))
247 f.write(b'%d\n' % int(self.keepbranchesf))
248 f.write(b'%s\n' % (self.activebookmark or b''))
248 f.write(b'%s\n' % (self.activebookmark or b''))
249 destmap = self.destmap
249 destmap = self.destmap
250 for d, v in pycompat.iteritems(self.state):
250 for d, v in pycompat.iteritems(self.state):
251 oldrev = repo[d].hex()
251 oldrev = repo[d].hex()
252 if v >= 0:
252 if v >= 0:
253 newrev = repo[v].hex()
253 newrev = repo[v].hex()
254 else:
254 else:
255 newrev = b"%d" % v
255 newrev = b"%d" % v
256 destnode = repo[destmap[d]].hex()
256 destnode = repo[destmap[d]].hex()
257 f.write(b"%s:%s:%s\n" % (oldrev, newrev, destnode))
257 f.write(b"%s:%s:%s\n" % (oldrev, newrev, destnode))
258 repo.ui.debug(b'rebase status stored\n')
258 repo.ui.debug(b'rebase status stored\n')
259
259
260 def restorestatus(self):
260 def restorestatus(self):
261 """Restore a previously stored status"""
261 """Restore a previously stored status"""
262 if not self.stateobj.exists():
262 if not self.stateobj.exists():
263 cmdutil.wrongtooltocontinue(self.repo, _(b'rebase'))
263 cmdutil.wrongtooltocontinue(self.repo, _(b'rebase'))
264
264
265 data = self._read()
265 data = self._read()
266 self.repo.ui.debug(b'rebase status resumed\n')
266 self.repo.ui.debug(b'rebase status resumed\n')
267
267
268 self.originalwd = data[b'originalwd']
268 self.originalwd = data[b'originalwd']
269 self.destmap = data[b'destmap']
269 self.destmap = data[b'destmap']
270 self.state = data[b'state']
270 self.state = data[b'state']
271 self.skipped = data[b'skipped']
271 self.skipped = data[b'skipped']
272 self.collapsef = data[b'collapse']
272 self.collapsef = data[b'collapse']
273 self.keepf = data[b'keep']
273 self.keepf = data[b'keep']
274 self.keepbranchesf = data[b'keepbranches']
274 self.keepbranchesf = data[b'keepbranches']
275 self.external = data[b'external']
275 self.external = data[b'external']
276 self.activebookmark = data[b'activebookmark']
276 self.activebookmark = data[b'activebookmark']
277
277
278 def _read(self):
278 def _read(self):
279 self.prepared = True
279 self.prepared = True
280 repo = self.repo
280 repo = self.repo
281 assert repo.filtername is None
281 assert repo.filtername is None
282 data = {
282 data = {
283 b'keepbranches': None,
283 b'keepbranches': None,
284 b'collapse': None,
284 b'collapse': None,
285 b'activebookmark': None,
285 b'activebookmark': None,
286 b'external': nullrev,
286 b'external': nullrev,
287 b'keep': None,
287 b'keep': None,
288 b'originalwd': None,
288 b'originalwd': None,
289 }
289 }
290 legacydest = None
290 legacydest = None
291 state = {}
291 state = {}
292 destmap = {}
292 destmap = {}
293
293
294 if True:
294 if True:
295 f = repo.vfs(b"rebasestate")
295 f = repo.vfs(b"rebasestate")
296 for i, l in enumerate(f.read().splitlines()):
296 for i, l in enumerate(f.read().splitlines()):
297 if i == 0:
297 if i == 0:
298 data[b'originalwd'] = repo[l].rev()
298 data[b'originalwd'] = repo[l].rev()
299 elif i == 1:
299 elif i == 1:
300 # this line should be empty in newer version. but legacy
300 # this line should be empty in newer version. but legacy
301 # clients may still use it
301 # clients may still use it
302 if l:
302 if l:
303 legacydest = repo[l].rev()
303 legacydest = repo[l].rev()
304 elif i == 2:
304 elif i == 2:
305 data[b'external'] = repo[l].rev()
305 data[b'external'] = repo[l].rev()
306 elif i == 3:
306 elif i == 3:
307 data[b'collapse'] = bool(int(l))
307 data[b'collapse'] = bool(int(l))
308 elif i == 4:
308 elif i == 4:
309 data[b'keep'] = bool(int(l))
309 data[b'keep'] = bool(int(l))
310 elif i == 5:
310 elif i == 5:
311 data[b'keepbranches'] = bool(int(l))
311 data[b'keepbranches'] = bool(int(l))
312 elif i == 6 and not (len(l) == 81 and b':' in l):
312 elif i == 6 and not (len(l) == 81 and b':' in l):
313 # line 6 is a recent addition, so for backwards
313 # line 6 is a recent addition, so for backwards
314 # compatibility check that the line doesn't look like the
314 # compatibility check that the line doesn't look like the
315 # oldrev:newrev lines
315 # oldrev:newrev lines
316 data[b'activebookmark'] = l
316 data[b'activebookmark'] = l
317 else:
317 else:
318 args = l.split(b':')
318 args = l.split(b':')
319 oldrev = repo[args[0]].rev()
319 oldrev = repo[args[0]].rev()
320 newrev = args[1]
320 newrev = args[1]
321 if newrev in legacystates:
321 if newrev in legacystates:
322 continue
322 continue
323 if len(args) > 2:
323 if len(args) > 2:
324 destrev = repo[args[2]].rev()
324 destrev = repo[args[2]].rev()
325 else:
325 else:
326 destrev = legacydest
326 destrev = legacydest
327 destmap[oldrev] = destrev
327 destmap[oldrev] = destrev
328 if newrev == revtodostr:
328 if newrev == revtodostr:
329 state[oldrev] = revtodo
329 state[oldrev] = revtodo
330 # Legacy compat special case
330 # Legacy compat special case
331 else:
331 else:
332 state[oldrev] = repo[newrev].rev()
332 state[oldrev] = repo[newrev].rev()
333
333
334 if data[b'keepbranches'] is None:
334 if data[b'keepbranches'] is None:
335 raise error.Abort(_(b'.hg/rebasestate is incomplete'))
335 raise error.Abort(_(b'.hg/rebasestate is incomplete'))
336
336
337 data[b'destmap'] = destmap
337 data[b'destmap'] = destmap
338 data[b'state'] = state
338 data[b'state'] = state
339 skipped = set()
339 skipped = set()
340 # recompute the set of skipped revs
340 # recompute the set of skipped revs
341 if not data[b'collapse']:
341 if not data[b'collapse']:
342 seen = set(destmap.values())
342 seen = set(destmap.values())
343 for old, new in sorted(state.items()):
343 for old, new in sorted(state.items()):
344 if new != revtodo and new in seen:
344 if new != revtodo and new in seen:
345 skipped.add(old)
345 skipped.add(old)
346 seen.add(new)
346 seen.add(new)
347 data[b'skipped'] = skipped
347 data[b'skipped'] = skipped
348 repo.ui.debug(
348 repo.ui.debug(
349 b'computed skipped revs: %s\n'
349 b'computed skipped revs: %s\n'
350 % (b' '.join(b'%d' % r for r in sorted(skipped)) or b'')
350 % (b' '.join(b'%d' % r for r in sorted(skipped)) or b'')
351 )
351 )
352
352
353 return data
353 return data
354
354
355 def _handleskippingobsolete(self, obsoleterevs, destmap):
355 def _handleskippingobsolete(self, obsoleterevs, destmap):
356 """Compute structures necessary for skipping obsolete revisions
356 """Compute structures necessary for skipping obsolete revisions
357
357
358 obsoleterevs: iterable of all obsolete revisions in rebaseset
358 obsoleterevs: iterable of all obsolete revisions in rebaseset
359 destmap: {srcrev: destrev} destination revisions
359 destmap: {srcrev: destrev} destination revisions
360 """
360 """
361 self.obsoletenotrebased = {}
361 self.obsoletenotrebased = {}
362 if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'):
362 if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'):
363 return
363 return
364 obsoleteset = set(obsoleterevs)
364 obsoleteset = set(obsoleterevs)
365 (
365 (
366 self.obsoletenotrebased,
366 self.obsoletenotrebased,
367 self.obsoletewithoutsuccessorindestination,
367 self.obsoletewithoutsuccessorindestination,
368 obsoleteextinctsuccessors,
368 obsoleteextinctsuccessors,
369 ) = _computeobsoletenotrebased(self.repo, obsoleteset, destmap)
369 ) = _computeobsoletenotrebased(self.repo, obsoleteset, destmap)
370 skippedset = set(self.obsoletenotrebased)
370 skippedset = set(self.obsoletenotrebased)
371 skippedset.update(self.obsoletewithoutsuccessorindestination)
371 skippedset.update(self.obsoletewithoutsuccessorindestination)
372 skippedset.update(obsoleteextinctsuccessors)
372 skippedset.update(obsoleteextinctsuccessors)
373 _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
373 _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
374
374
375 def _prepareabortorcontinue(
375 def _prepareabortorcontinue(
376 self, isabort, backup=True, suppwarns=False, dryrun=False, confirm=False
376 self, isabort, backup=True, suppwarns=False, dryrun=False, confirm=False
377 ):
377 ):
378 self.resume = True
378 self.resume = True
379 try:
379 try:
380 self.restorestatus()
380 self.restorestatus()
381 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
381 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
382 except error.RepoLookupError:
382 except error.RepoLookupError:
383 if isabort:
383 if isabort:
384 clearstatus(self.repo)
384 clearstatus(self.repo)
385 clearcollapsemsg(self.repo)
385 clearcollapsemsg(self.repo)
386 self.repo.ui.warn(
386 self.repo.ui.warn(
387 _(
387 _(
388 b'rebase aborted (no revision is removed,'
388 b'rebase aborted (no revision is removed,'
389 b' only broken state is cleared)\n'
389 b' only broken state is cleared)\n'
390 )
390 )
391 )
391 )
392 return 0
392 return 0
393 else:
393 else:
394 msg = _(b'cannot continue inconsistent rebase')
394 msg = _(b'cannot continue inconsistent rebase')
395 hint = _(b'use "hg rebase --abort" to clear broken state')
395 hint = _(b'use "hg rebase --abort" to clear broken state')
396 raise error.Abort(msg, hint=hint)
396 raise error.Abort(msg, hint=hint)
397
397
398 if isabort:
398 if isabort:
399 backup = backup and self.backupf
399 backup = backup and self.backupf
400 return self._abort(
400 return self._abort(
401 backup=backup,
401 backup=backup,
402 suppwarns=suppwarns,
402 suppwarns=suppwarns,
403 dryrun=dryrun,
403 dryrun=dryrun,
404 confirm=confirm,
404 confirm=confirm,
405 )
405 )
406
406
407 def _preparenewrebase(self, destmap):
407 def _preparenewrebase(self, destmap):
408 if not destmap:
408 if not destmap:
409 return _nothingtorebase()
409 return _nothingtorebase()
410
410
411 rebaseset = destmap.keys()
411 rebaseset = destmap.keys()
412 if not self.keepf:
412 if not self.keepf:
413 try:
413 try:
414 rewriteutil.precheck(self.repo, rebaseset, action=b'rebase')
414 rewriteutil.precheck(self.repo, rebaseset, action=b'rebase')
415 except error.Abort as e:
415 except error.Abort as e:
416 if e.hint is None:
416 if e.hint is None:
417 e.hint = _(b'use --keep to keep original changesets')
417 e.hint = _(b'use --keep to keep original changesets')
418 raise e
418 raise e
419
419
420 result = buildstate(self.repo, destmap, self.collapsef)
420 result = buildstate(self.repo, destmap, self.collapsef)
421
421
422 if not result:
422 if not result:
423 # Empty state built, nothing to rebase
423 # Empty state built, nothing to rebase
424 self.ui.status(_(b'nothing to rebase\n'))
424 self.ui.status(_(b'nothing to rebase\n'))
425 return _nothingtorebase()
425 return _nothingtorebase()
426
426
427 (self.originalwd, self.destmap, self.state) = result
427 (self.originalwd, self.destmap, self.state) = result
428 if self.collapsef:
428 if self.collapsef:
429 dests = set(self.destmap.values())
429 dests = set(self.destmap.values())
430 if len(dests) != 1:
430 if len(dests) != 1:
431 raise error.Abort(
431 raise error.Abort(
432 _(b'--collapse does not work with multiple destinations')
432 _(b'--collapse does not work with multiple destinations')
433 )
433 )
434 destrev = next(iter(dests))
434 destrev = next(iter(dests))
435 destancestors = self.repo.changelog.ancestors(
435 destancestors = self.repo.changelog.ancestors(
436 [destrev], inclusive=True
436 [destrev], inclusive=True
437 )
437 )
438 self.external = externalparent(self.repo, self.state, destancestors)
438 self.external = externalparent(self.repo, self.state, destancestors)
439
439
440 for destrev in sorted(set(destmap.values())):
440 for destrev in sorted(set(destmap.values())):
441 dest = self.repo[destrev]
441 dest = self.repo[destrev]
442 if dest.closesbranch() and not self.keepbranchesf:
442 if dest.closesbranch() and not self.keepbranchesf:
443 self.ui.status(_(b'reopening closed branch head %s\n') % dest)
443 self.ui.status(_(b'reopening closed branch head %s\n') % dest)
444
444
445 self.prepared = True
445 self.prepared = True
446
446
447 def _assignworkingcopy(self):
447 def _assignworkingcopy(self):
448 if self.inmemory:
448 if self.inmemory:
449 from mercurial.context import overlayworkingctx
449 from mercurial.context import overlayworkingctx
450
450
451 self.wctx = overlayworkingctx(self.repo)
451 self.wctx = overlayworkingctx(self.repo)
452 self.repo.ui.debug(b"rebasing in memory\n")
452 self.repo.ui.debug(b"rebasing in memory\n")
453 else:
453 else:
454 self.wctx = self.repo[None]
454 self.wctx = self.repo[None]
455 self.repo.ui.debug(b"rebasing on disk\n")
455 self.repo.ui.debug(b"rebasing on disk\n")
456 self.repo.ui.log(
456 self.repo.ui.log(
457 b"rebase",
457 b"rebase",
458 b"using in-memory rebase: %r\n",
458 b"using in-memory rebase: %r\n",
459 self.inmemory,
459 self.inmemory,
460 rebase_imm_used=self.inmemory,
460 rebase_imm_used=self.inmemory,
461 )
461 )
462
462
463 def _performrebase(self, tr):
463 def _performrebase(self, tr):
464 self._assignworkingcopy()
464 self._assignworkingcopy()
465 repo, ui = self.repo, self.ui
465 repo, ui = self.repo, self.ui
466 if self.keepbranchesf:
466 if self.keepbranchesf:
467 # insert _savebranch at the start of extrafns so if
467 # insert _savebranch at the start of extrafns so if
468 # there's a user-provided extrafn it can clobber branch if
468 # there's a user-provided extrafn it can clobber branch if
469 # desired
469 # desired
470 self.extrafns.insert(0, _savebranch)
470 self.extrafns.insert(0, _savebranch)
471 if self.collapsef:
471 if self.collapsef:
472 branches = set()
472 branches = set()
473 for rev in self.state:
473 for rev in self.state:
474 branches.add(repo[rev].branch())
474 branches.add(repo[rev].branch())
475 if len(branches) > 1:
475 if len(branches) > 1:
476 raise error.Abort(
476 raise error.Abort(
477 _(b'cannot collapse multiple named branches')
477 _(b'cannot collapse multiple named branches')
478 )
478 )
479
479
480 # Calculate self.obsoletenotrebased
480 # Calculate self.obsoletenotrebased
481 obsrevs = _filterobsoleterevs(self.repo, self.state)
481 obsrevs = _filterobsoleterevs(self.repo, self.state)
482 self._handleskippingobsolete(obsrevs, self.destmap)
482 self._handleskippingobsolete(obsrevs, self.destmap)
483
483
484 # Keep track of the active bookmarks in order to reset them later
484 # Keep track of the active bookmarks in order to reset them later
485 self.activebookmark = self.activebookmark or repo._activebookmark
485 self.activebookmark = self.activebookmark or repo._activebookmark
486 if self.activebookmark:
486 if self.activebookmark:
487 bookmarks.deactivate(repo)
487 bookmarks.deactivate(repo)
488
488
489 # Store the state before we begin so users can run 'hg rebase --abort'
489 # Store the state before we begin so users can run 'hg rebase --abort'
490 # if we fail before the transaction closes.
490 # if we fail before the transaction closes.
491 self.storestatus()
491 self.storestatus()
492 if tr:
492 if tr:
493 # When using single transaction, store state when transaction
493 # When using single transaction, store state when transaction
494 # commits.
494 # commits.
495 self.storestatus(tr)
495 self.storestatus(tr)
496
496
497 cands = [k for k, v in pycompat.iteritems(self.state) if v == revtodo]
497 cands = [k for k, v in pycompat.iteritems(self.state) if v == revtodo]
498 p = repo.ui.makeprogress(
498 p = repo.ui.makeprogress(
499 _(b"rebasing"), unit=_(b'changesets'), total=len(cands)
499 _(b"rebasing"), unit=_(b'changesets'), total=len(cands)
500 )
500 )
501
501
502 def progress(ctx):
502 def progress(ctx):
503 p.increment(item=(b"%d:%s" % (ctx.rev(), ctx)))
503 p.increment(item=(b"%d:%s" % (ctx.rev(), ctx)))
504
504
505 allowdivergence = self.ui.configbool(
505 allowdivergence = self.ui.configbool(
506 b'experimental', b'evolution.allowdivergence'
506 b'experimental', b'evolution.allowdivergence'
507 )
507 )
508 for subset in sortsource(self.destmap):
508 for subset in sortsource(self.destmap):
509 sortedrevs = self.repo.revs(b'sort(%ld, -topo)', subset)
509 sortedrevs = self.repo.revs(b'sort(%ld, -topo)', subset)
510 if not allowdivergence:
510 if not allowdivergence:
511 sortedrevs -= self.repo.revs(
511 sortedrevs -= self.repo.revs(
512 b'descendants(%ld) and not %ld',
512 b'descendants(%ld) and not %ld',
513 self.obsoletewithoutsuccessorindestination,
513 self.obsoletewithoutsuccessorindestination,
514 self.obsoletewithoutsuccessorindestination,
514 self.obsoletewithoutsuccessorindestination,
515 )
515 )
516 for rev in sortedrevs:
516 for rev in sortedrevs:
517 self._rebasenode(tr, rev, allowdivergence, progress)
517 self._rebasenode(tr, rev, allowdivergence, progress)
518 p.complete()
518 p.complete()
519 ui.note(_(b'rebase merging completed\n'))
519 ui.note(_(b'rebase merging completed\n'))
520
520
521 def _concludenode(self, rev, editor, commitmsg=None):
521 def _concludenode(self, rev, editor, commitmsg=None):
522 '''Commit the wd changes with parents p1 and p2.
522 '''Commit the wd changes with parents p1 and p2.
523
523
524 Reuse commit info from rev but also store useful information in extra.
524 Reuse commit info from rev but also store useful information in extra.
525 Return node of committed revision.'''
525 Return node of committed revision.'''
526 repo = self.repo
526 repo = self.repo
527 ctx = repo[rev]
527 ctx = repo[rev]
528 if commitmsg is None:
528 if commitmsg is None:
529 commitmsg = ctx.description()
529 commitmsg = ctx.description()
530 date = self.date
530 date = self.date
531 if date is None:
531 if date is None:
532 date = ctx.date()
532 date = ctx.date()
533 extra = {b'rebase_source': ctx.hex()}
533 extra = {b'rebase_source': ctx.hex()}
534 for c in self.extrafns:
534 for c in self.extrafns:
535 c(ctx, extra)
535 c(ctx, extra)
536 destphase = max(ctx.phase(), phases.draft)
536 destphase = max(ctx.phase(), phases.draft)
537 overrides = {
537 overrides = {
538 (b'phases', b'new-commit'): destphase,
538 (b'phases', b'new-commit'): destphase,
539 (b'ui', b'allowemptycommit'): not self.skipemptysuccessorf,
539 (b'ui', b'allowemptycommit'): not self.skipemptysuccessorf,
540 }
540 }
541 with repo.ui.configoverride(overrides, b'rebase'):
541 with repo.ui.configoverride(overrides, b'rebase'):
542 if self.inmemory:
542 if self.inmemory:
543 newnode = commitmemorynode(
543 newnode = commitmemorynode(
544 repo,
544 repo,
545 wctx=self.wctx,
545 wctx=self.wctx,
546 extra=extra,
546 extra=extra,
547 commitmsg=commitmsg,
547 commitmsg=commitmsg,
548 editor=editor,
548 editor=editor,
549 user=ctx.user(),
549 user=ctx.user(),
550 date=date,
550 date=date,
551 )
551 )
552 else:
552 else:
553 newnode = commitnode(
553 newnode = commitnode(
554 repo,
554 repo,
555 extra=extra,
555 extra=extra,
556 commitmsg=commitmsg,
556 commitmsg=commitmsg,
557 editor=editor,
557 editor=editor,
558 user=ctx.user(),
558 user=ctx.user(),
559 date=date,
559 date=date,
560 )
560 )
561
561
562 return newnode
562 return newnode
563
563
564 def _rebasenode(self, tr, rev, allowdivergence, progressfn):
564 def _rebasenode(self, tr, rev, allowdivergence, progressfn):
565 repo, ui, opts = self.repo, self.ui, self.opts
565 repo, ui, opts = self.repo, self.ui, self.opts
566 ctx = repo[rev]
566 ctx = repo[rev]
567 desc = _ctxdesc(ctx)
567 desc = _ctxdesc(ctx)
568 if self.state[rev] == rev:
568 if self.state[rev] == rev:
569 ui.status(_(b'already rebased %s\n') % desc)
569 ui.status(_(b'already rebased %s\n') % desc)
570 elif (
570 elif (
571 not allowdivergence
571 not allowdivergence
572 and rev in self.obsoletewithoutsuccessorindestination
572 and rev in self.obsoletewithoutsuccessorindestination
573 ):
573 ):
574 msg = (
574 msg = (
575 _(
575 _(
576 b'note: not rebasing %s and its descendants as '
576 b'note: not rebasing %s and its descendants as '
577 b'this would cause divergence\n'
577 b'this would cause divergence\n'
578 )
578 )
579 % desc
579 % desc
580 )
580 )
581 repo.ui.status(msg)
581 repo.ui.status(msg)
582 self.skipped.add(rev)
582 self.skipped.add(rev)
583 elif rev in self.obsoletenotrebased:
583 elif rev in self.obsoletenotrebased:
584 succ = self.obsoletenotrebased[rev]
584 succ = self.obsoletenotrebased[rev]
585 if succ is None:
585 if succ is None:
586 msg = _(b'note: not rebasing %s, it has no successor\n') % desc
586 msg = _(b'note: not rebasing %s, it has no successor\n') % desc
587 else:
587 else:
588 succdesc = _ctxdesc(repo[succ])
588 succdesc = _ctxdesc(repo[succ])
589 msg = _(
589 msg = _(
590 b'note: not rebasing %s, already in destination as %s\n'
590 b'note: not rebasing %s, already in destination as %s\n'
591 ) % (desc, succdesc)
591 ) % (desc, succdesc)
592 repo.ui.status(msg)
592 repo.ui.status(msg)
593 # Make clearrebased aware state[rev] is not a true successor
593 # Make clearrebased aware state[rev] is not a true successor
594 self.skipped.add(rev)
594 self.skipped.add(rev)
595 # Record rev as moved to its desired destination in self.state.
595 # Record rev as moved to its desired destination in self.state.
596 # This helps bookmark and working parent movement.
596 # This helps bookmark and working parent movement.
597 dest = max(
597 dest = max(
598 adjustdest(repo, rev, self.destmap, self.state, self.skipped)
598 adjustdest(repo, rev, self.destmap, self.state, self.skipped)
599 )
599 )
600 self.state[rev] = dest
600 self.state[rev] = dest
601 elif self.state[rev] == revtodo:
601 elif self.state[rev] == revtodo:
602 ui.status(_(b'rebasing %s\n') % desc)
602 ui.status(_(b'rebasing %s\n') % desc)
603 progressfn(ctx)
603 progressfn(ctx)
604 p1, p2, base = defineparents(
604 p1, p2, base = defineparents(
605 repo,
605 repo,
606 rev,
606 rev,
607 self.destmap,
607 self.destmap,
608 self.state,
608 self.state,
609 self.skipped,
609 self.skipped,
610 self.obsoletenotrebased,
610 self.obsoletenotrebased,
611 )
611 )
612 if self.resume and self.wctx.p1().rev() == p1:
612 if self.resume and self.wctx.p1().rev() == p1:
613 repo.ui.debug(b'resuming interrupted rebase\n')
613 repo.ui.debug(b'resuming interrupted rebase\n')
614 self.resume = False
614 self.resume = False
615 else:
615 else:
616 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
616 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
617 with ui.configoverride(overrides, b'rebase'):
617 with ui.configoverride(overrides, b'rebase'):
618 try:
618 try:
619 rebasenode(
619 rebasenode(
620 repo,
620 repo,
621 rev,
621 rev,
622 p1,
622 p1,
623 p2,
623 p2,
624 base,
624 base,
625 self.collapsef,
625 self.collapsef,
626 wctx=self.wctx,
626 wctx=self.wctx,
627 )
627 )
628 except error.InMemoryMergeConflictsError:
628 except error.InMemoryMergeConflictsError:
629 if self.dryrun:
629 if self.dryrun:
630 raise error.ConflictResolutionRequired(b'rebase')
630 raise error.ConflictResolutionRequired(b'rebase')
631 if self.collapsef:
631 if self.collapsef:
632 # TODO: Make the overlayworkingctx reflected
632 # TODO: Make the overlayworkingctx reflected
633 # in the working copy here instead of re-raising
633 # in the working copy here instead of re-raising
634 # so the entire rebase operation is retried.
634 # so the entire rebase operation is retried.
635 raise
635 raise
636 ui.status(
636 ui.status(
637 _(
637 _(
638 b"hit merge conflicts; rebasing that "
638 b"hit merge conflicts; rebasing that "
639 b"commit again in the working copy\n"
639 b"commit again in the working copy\n"
640 )
640 )
641 )
641 )
642 cmdutil.bailifchanged(repo)
642 cmdutil.bailifchanged(repo)
643 self.inmemory = False
643 self.inmemory = False
644 self._assignworkingcopy()
644 self._assignworkingcopy()
645 mergemod.update(
645 mergemod.update(
646 repo,
646 repo,
647 p1,
647 p1,
648 branchmerge=False,
648 branchmerge=False,
649 force=False,
649 force=False,
650 wc=self.wctx,
650 wc=self.wctx,
651 )
651 )
652 rebasenode(
652 rebasenode(
653 repo,
653 repo,
654 rev,
654 rev,
655 p1,
655 p1,
656 p2,
656 p2,
657 base,
657 base,
658 self.collapsef,
658 self.collapsef,
659 wctx=self.wctx,
659 wctx=self.wctx,
660 )
660 )
661 if not self.collapsef:
661 if not self.collapsef:
662 merging = p2 != nullrev
662 merging = p2 != nullrev
663 editform = cmdutil.mergeeditform(merging, b'rebase')
663 editform = cmdutil.mergeeditform(merging, b'rebase')
664 editor = cmdutil.getcommiteditor(
664 editor = cmdutil.getcommiteditor(
665 editform=editform, **pycompat.strkwargs(opts)
665 editform=editform, **pycompat.strkwargs(opts)
666 )
666 )
667 # We need to set parents again here just in case we're continuing
667 # We need to set parents again here just in case we're continuing
668 # a rebase started with an old hg version (before 9c9cfecd4600),
668 # a rebase started with an old hg version (before 9c9cfecd4600),
669 # because those old versions would have left us with two dirstate
669 # because those old versions would have left us with two dirstate
670 # parents, and we don't want to create a merge commit here (unless
670 # parents, and we don't want to create a merge commit here (unless
671 # we're rebasing a merge commit).
671 # we're rebasing a merge commit).
672 self.wctx.setparents(repo[p1].node(), repo[p2].node())
672 self.wctx.setparents(repo[p1].node(), repo[p2].node())
673 newnode = self._concludenode(rev, editor)
673 newnode = self._concludenode(rev, editor)
674 else:
674 else:
675 # Skip commit if we are collapsing
675 # Skip commit if we are collapsing
676 newnode = None
676 newnode = None
677 # Update the state
677 # Update the state
678 if newnode is not None:
678 if newnode is not None:
679 self.state[rev] = repo[newnode].rev()
679 self.state[rev] = repo[newnode].rev()
680 ui.debug(b'rebased as %s\n' % short(newnode))
680 ui.debug(b'rebased as %s\n' % short(newnode))
681 if repo[newnode].isempty():
681 if repo[newnode].isempty():
682 ui.warn(
682 ui.warn(
683 _(
683 _(
684 b'note: created empty successor for %s, its '
684 b'note: created empty successor for %s, its '
685 b'destination already has all its changes\n'
685 b'destination already has all its changes\n'
686 )
686 )
687 % desc
687 % desc
688 )
688 )
689 else:
689 else:
690 if not self.collapsef:
690 if not self.collapsef:
691 ui.warn(
691 ui.warn(
692 _(
692 _(
693 b'note: not rebasing %s, its destination already '
693 b'note: not rebasing %s, its destination already '
694 b'has all its changes\n'
694 b'has all its changes\n'
695 )
695 )
696 % desc
696 % desc
697 )
697 )
698 self.skipped.add(rev)
698 self.skipped.add(rev)
699 self.state[rev] = p1
699 self.state[rev] = p1
700 ui.debug(b'next revision set to %d\n' % p1)
700 ui.debug(b'next revision set to %d\n' % p1)
701 else:
701 else:
702 ui.status(
702 ui.status(
703 _(b'already rebased %s as %s\n') % (desc, repo[self.state[rev]])
703 _(b'already rebased %s as %s\n') % (desc, repo[self.state[rev]])
704 )
704 )
705 if not tr:
705 if not tr:
706 # When not using single transaction, store state after each
706 # When not using single transaction, store state after each
707 # commit is completely done. On InterventionRequired, we thus
707 # commit is completely done. On InterventionRequired, we thus
708 # won't store the status. Instead, we'll hit the "len(parents) == 2"
708 # won't store the status. Instead, we'll hit the "len(parents) == 2"
709 # case and realize that the commit was in progress.
709 # case and realize that the commit was in progress.
710 self.storestatus()
710 self.storestatus()
711
711
712 def _finishrebase(self):
712 def _finishrebase(self):
713 repo, ui, opts = self.repo, self.ui, self.opts
713 repo, ui, opts = self.repo, self.ui, self.opts
714 fm = ui.formatter(b'rebase', opts)
714 fm = ui.formatter(b'rebase', opts)
715 fm.startitem()
715 fm.startitem()
716 if self.collapsef:
716 if self.collapsef:
717 p1, p2, _base = defineparents(
717 p1, p2, _base = defineparents(
718 repo,
718 repo,
719 min(self.state),
719 min(self.state),
720 self.destmap,
720 self.destmap,
721 self.state,
721 self.state,
722 self.skipped,
722 self.skipped,
723 self.obsoletenotrebased,
723 self.obsoletenotrebased,
724 )
724 )
725 editopt = opts.get(b'edit')
725 editopt = opts.get(b'edit')
726 editform = b'rebase.collapse'
726 editform = b'rebase.collapse'
727 if self.collapsemsg:
727 if self.collapsemsg:
728 commitmsg = self.collapsemsg
728 commitmsg = self.collapsemsg
729 else:
729 else:
730 commitmsg = b'Collapsed revision'
730 commitmsg = b'Collapsed revision'
731 for rebased in sorted(self.state):
731 for rebased in sorted(self.state):
732 if rebased not in self.skipped:
732 if rebased not in self.skipped:
733 commitmsg += b'\n* %s' % repo[rebased].description()
733 commitmsg += b'\n* %s' % repo[rebased].description()
734 editopt = True
734 editopt = True
735 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
735 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
736 revtoreuse = max(self.state)
736 revtoreuse = max(self.state)
737
737
738 self.wctx.setparents(repo[p1].node(), repo[self.external].node())
738 self.wctx.setparents(repo[p1].node(), repo[self.external].node())
739 newnode = self._concludenode(
739 newnode = self._concludenode(
740 revtoreuse, editor, commitmsg=commitmsg
740 revtoreuse, editor, commitmsg=commitmsg
741 )
741 )
742
742
743 if newnode is not None:
743 if newnode is not None:
744 newrev = repo[newnode].rev()
744 newrev = repo[newnode].rev()
745 for oldrev in self.state:
745 for oldrev in self.state:
746 self.state[oldrev] = newrev
746 self.state[oldrev] = newrev
747
747
748 if b'qtip' in repo.tags():
748 if b'qtip' in repo.tags():
749 updatemq(repo, self.state, self.skipped, **pycompat.strkwargs(opts))
749 updatemq(repo, self.state, self.skipped, **pycompat.strkwargs(opts))
750
750
751 # restore original working directory
751 # restore original working directory
752 # (we do this before stripping)
752 # (we do this before stripping)
753 newwd = self.state.get(self.originalwd, self.originalwd)
753 newwd = self.state.get(self.originalwd, self.originalwd)
754 if newwd < 0:
754 if newwd < 0:
755 # original directory is a parent of rebase set root or ignored
755 # original directory is a parent of rebase set root or ignored
756 newwd = self.originalwd
756 newwd = self.originalwd
757 if newwd not in [c.rev() for c in repo[None].parents()]:
757 if newwd not in [c.rev() for c in repo[None].parents()]:
758 ui.note(_(b"update back to initial working directory parent\n"))
758 ui.note(_(b"update back to initial working directory parent\n"))
759 hg.updaterepo(repo, newwd, overwrite=False)
759 hg.updaterepo(repo, newwd, overwrite=False)
760
760
761 collapsedas = None
761 collapsedas = None
762 if self.collapsef and not self.keepf:
762 if self.collapsef and not self.keepf:
763 collapsedas = newnode
763 collapsedas = newnode
764 clearrebased(
764 clearrebased(
765 ui,
765 ui,
766 repo,
766 repo,
767 self.destmap,
767 self.destmap,
768 self.state,
768 self.state,
769 self.skipped,
769 self.skipped,
770 collapsedas,
770 collapsedas,
771 self.keepf,
771 self.keepf,
772 fm=fm,
772 fm=fm,
773 backup=self.backupf,
773 backup=self.backupf,
774 )
774 )
775
775
776 clearstatus(repo)
776 clearstatus(repo)
777 clearcollapsemsg(repo)
777 clearcollapsemsg(repo)
778
778
779 ui.note(_(b"rebase completed\n"))
779 ui.note(_(b"rebase completed\n"))
780 util.unlinkpath(repo.sjoin(b'undo'), ignoremissing=True)
780 util.unlinkpath(repo.sjoin(b'undo'), ignoremissing=True)
781 if self.skipped:
781 if self.skipped:
782 skippedlen = len(self.skipped)
782 skippedlen = len(self.skipped)
783 ui.note(_(b"%d revisions have been skipped\n") % skippedlen)
783 ui.note(_(b"%d revisions have been skipped\n") % skippedlen)
784 fm.end()
784 fm.end()
785
785
786 if (
786 if (
787 self.activebookmark
787 self.activebookmark
788 and self.activebookmark in repo._bookmarks
788 and self.activebookmark in repo._bookmarks
789 and repo[b'.'].node() == repo._bookmarks[self.activebookmark]
789 and repo[b'.'].node() == repo._bookmarks[self.activebookmark]
790 ):
790 ):
791 bookmarks.activate(repo, self.activebookmark)
791 bookmarks.activate(repo, self.activebookmark)
792
792
793 def _abort(self, backup=True, suppwarns=False, dryrun=False, confirm=False):
793 def _abort(self, backup=True, suppwarns=False, dryrun=False, confirm=False):
794 '''Restore the repository to its original state.'''
794 '''Restore the repository to its original state.'''
795
795
796 repo = self.repo
796 repo = self.repo
797 try:
797 try:
798 # If the first commits in the rebased set get skipped during the
798 # If the first commits in the rebased set get skipped during the
799 # rebase, their values within the state mapping will be the dest
799 # rebase, their values within the state mapping will be the dest
800 # rev id. The rebased list must must not contain the dest rev
800 # rev id. The rebased list must must not contain the dest rev
801 # (issue4896)
801 # (issue4896)
802 rebased = [
802 rebased = [
803 s
803 s
804 for r, s in self.state.items()
804 for r, s in self.state.items()
805 if s >= 0 and s != r and s != self.destmap[r]
805 if s >= 0 and s != r and s != self.destmap[r]
806 ]
806 ]
807 immutable = [d for d in rebased if not repo[d].mutable()]
807 immutable = [d for d in rebased if not repo[d].mutable()]
808 cleanup = True
808 cleanup = True
809 if immutable:
809 if immutable:
810 repo.ui.warn(
810 repo.ui.warn(
811 _(b"warning: can't clean up public changesets %s\n")
811 _(b"warning: can't clean up public changesets %s\n")
812 % b', '.join(bytes(repo[r]) for r in immutable),
812 % b', '.join(bytes(repo[r]) for r in immutable),
813 hint=_(b"see 'hg help phases' for details"),
813 hint=_(b"see 'hg help phases' for details"),
814 )
814 )
815 cleanup = False
815 cleanup = False
816
816
817 descendants = set()
817 descendants = set()
818 if rebased:
818 if rebased:
819 descendants = set(repo.changelog.descendants(rebased))
819 descendants = set(repo.changelog.descendants(rebased))
820 if descendants - set(rebased):
820 if descendants - set(rebased):
821 repo.ui.warn(
821 repo.ui.warn(
822 _(
822 _(
823 b"warning: new changesets detected on "
823 b"warning: new changesets detected on "
824 b"destination branch, can't strip\n"
824 b"destination branch, can't strip\n"
825 )
825 )
826 )
826 )
827 cleanup = False
827 cleanup = False
828
828
829 if cleanup:
829 if cleanup:
830 if rebased:
830 if rebased:
831 strippoints = [
831 strippoints = [
832 c.node() for c in repo.set(b'roots(%ld)', rebased)
832 c.node() for c in repo.set(b'roots(%ld)', rebased)
833 ]
833 ]
834
834
835 updateifonnodes = set(rebased)
835 updateifonnodes = set(rebased)
836 updateifonnodes.update(self.destmap.values())
836 updateifonnodes.update(self.destmap.values())
837
837
838 if not dryrun and not confirm:
838 if not dryrun and not confirm:
839 updateifonnodes.add(self.originalwd)
839 updateifonnodes.add(self.originalwd)
840
840
841 shouldupdate = repo[b'.'].rev() in updateifonnodes
841 shouldupdate = repo[b'.'].rev() in updateifonnodes
842
842
843 # Update away from the rebase if necessary
843 # Update away from the rebase if necessary
844 if shouldupdate:
844 if shouldupdate:
845 mergemod.clean_update(repo[self.originalwd])
845 mergemod.clean_update(repo[self.originalwd])
846
846
847 # Strip from the first rebased revision
847 # Strip from the first rebased revision
848 if rebased:
848 if rebased:
849 repair.strip(repo.ui, repo, strippoints, backup=backup)
849 repair.strip(repo.ui, repo, strippoints, backup=backup)
850
850
851 if self.activebookmark and self.activebookmark in repo._bookmarks:
851 if self.activebookmark and self.activebookmark in repo._bookmarks:
852 bookmarks.activate(repo, self.activebookmark)
852 bookmarks.activate(repo, self.activebookmark)
853
853
854 finally:
854 finally:
855 clearstatus(repo)
855 clearstatus(repo)
856 clearcollapsemsg(repo)
856 clearcollapsemsg(repo)
857 if not suppwarns:
857 if not suppwarns:
858 repo.ui.warn(_(b'rebase aborted\n'))
858 repo.ui.warn(_(b'rebase aborted\n'))
859 return 0
859 return 0
860
860
861
861
862 @command(
862 @command(
863 b'rebase',
863 b'rebase',
864 [
864 [
865 (
865 (
866 b's',
866 b's',
867 b'source',
867 b'source',
868 [],
868 [],
869 _(b'rebase the specified changesets and their descendants'),
869 _(b'rebase the specified changesets and their descendants'),
870 _(b'REV'),
870 _(b'REV'),
871 ),
871 ),
872 (
872 (
873 b'b',
873 b'b',
874 b'base',
874 b'base',
875 [],
875 [],
876 _(b'rebase everything from branching point of specified changeset'),
876 _(b'rebase everything from branching point of specified changeset'),
877 _(b'REV'),
877 _(b'REV'),
878 ),
878 ),
879 (b'r', b'rev', [], _(b'rebase these revisions'), _(b'REV')),
879 (b'r', b'rev', [], _(b'rebase these revisions'), _(b'REV')),
880 (
880 (
881 b'd',
881 b'd',
882 b'dest',
882 b'dest',
883 b'',
883 b'',
884 _(b'rebase onto the specified changeset'),
884 _(b'rebase onto the specified changeset'),
885 _(b'REV'),
885 _(b'REV'),
886 ),
886 ),
887 (b'', b'collapse', False, _(b'collapse the rebased changesets')),
887 (b'', b'collapse', False, _(b'collapse the rebased changesets')),
888 (
888 (
889 b'm',
889 b'm',
890 b'message',
890 b'message',
891 b'',
891 b'',
892 _(b'use text as collapse commit message'),
892 _(b'use text as collapse commit message'),
893 _(b'TEXT'),
893 _(b'TEXT'),
894 ),
894 ),
895 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
895 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
896 (
896 (
897 b'l',
897 b'l',
898 b'logfile',
898 b'logfile',
899 b'',
899 b'',
900 _(b'read collapse commit message from file'),
900 _(b'read collapse commit message from file'),
901 _(b'FILE'),
901 _(b'FILE'),
902 ),
902 ),
903 (b'k', b'keep', False, _(b'keep original changesets')),
903 (b'k', b'keep', False, _(b'keep original changesets')),
904 (b'', b'keepbranches', False, _(b'keep original branch names')),
904 (b'', b'keepbranches', False, _(b'keep original branch names')),
905 (b'D', b'detach', False, _(b'(DEPRECATED)')),
905 (b'D', b'detach', False, _(b'(DEPRECATED)')),
906 (b'i', b'interactive', False, _(b'(DEPRECATED)')),
906 (b'i', b'interactive', False, _(b'(DEPRECATED)')),
907 (b't', b'tool', b'', _(b'specify merge tool')),
907 (b't', b'tool', b'', _(b'specify merge tool')),
908 (b'', b'stop', False, _(b'stop interrupted rebase')),
908 (b'', b'stop', False, _(b'stop interrupted rebase')),
909 (b'c', b'continue', False, _(b'continue an interrupted rebase')),
909 (b'c', b'continue', False, _(b'continue an interrupted rebase')),
910 (b'a', b'abort', False, _(b'abort an interrupted rebase')),
910 (b'a', b'abort', False, _(b'abort an interrupted rebase')),
911 (
911 (
912 b'',
912 b'',
913 b'auto-orphans',
913 b'auto-orphans',
914 b'',
914 b'',
915 _(
915 _(
916 b'automatically rebase orphan revisions '
916 b'automatically rebase orphan revisions '
917 b'in the specified revset (EXPERIMENTAL)'
917 b'in the specified revset (EXPERIMENTAL)'
918 ),
918 ),
919 ),
919 ),
920 ]
920 ]
921 + cmdutil.dryrunopts
921 + cmdutil.dryrunopts
922 + cmdutil.formatteropts
922 + cmdutil.formatteropts
923 + cmdutil.confirmopts,
923 + cmdutil.confirmopts,
924 _(b'[[-s REV]... | [-b REV]... | [-r REV]...] [-d REV] [OPTION]...'),
924 _(b'[[-s REV]... | [-b REV]... | [-r REV]...] [-d REV] [OPTION]...'),
925 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
925 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
926 )
926 )
927 def rebase(ui, repo, **opts):
927 def rebase(ui, repo, **opts):
928 """move changeset (and descendants) to a different branch
928 """move changeset (and descendants) to a different branch
929
929
930 Rebase uses repeated merging to graft changesets from one part of
930 Rebase uses repeated merging to graft changesets from one part of
931 history (the source) onto another (the destination). This can be
931 history (the source) onto another (the destination). This can be
932 useful for linearizing *local* changes relative to a master
932 useful for linearizing *local* changes relative to a master
933 development tree.
933 development tree.
934
934
935 Published commits cannot be rebased (see :hg:`help phases`).
935 Published commits cannot be rebased (see :hg:`help phases`).
936 To copy commits, see :hg:`help graft`.
936 To copy commits, see :hg:`help graft`.
937
937
938 If you don't specify a destination changeset (``-d/--dest``), rebase
938 If you don't specify a destination changeset (``-d/--dest``), rebase
939 will use the same logic as :hg:`merge` to pick a destination. if
939 will use the same logic as :hg:`merge` to pick a destination. if
940 the current branch contains exactly one other head, the other head
940 the current branch contains exactly one other head, the other head
941 is merged with by default. Otherwise, an explicit revision with
941 is merged with by default. Otherwise, an explicit revision with
942 which to merge with must be provided. (destination changeset is not
942 which to merge with must be provided. (destination changeset is not
943 modified by rebasing, but new changesets are added as its
943 modified by rebasing, but new changesets are added as its
944 descendants.)
944 descendants.)
945
945
946 Here are the ways to select changesets:
946 Here are the ways to select changesets:
947
947
948 1. Explicitly select them using ``--rev``.
948 1. Explicitly select them using ``--rev``.
949
949
950 2. Use ``--source`` to select a root changeset and include all of its
950 2. Use ``--source`` to select a root changeset and include all of its
951 descendants.
951 descendants.
952
952
953 3. Use ``--base`` to select a changeset; rebase will find ancestors
953 3. Use ``--base`` to select a changeset; rebase will find ancestors
954 and their descendants which are not also ancestors of the destination.
954 and their descendants which are not also ancestors of the destination.
955
955
956 4. If you do not specify any of ``--rev``, ``--source``, or ``--base``,
956 4. If you do not specify any of ``--rev``, ``--source``, or ``--base``,
957 rebase will use ``--base .`` as above.
957 rebase will use ``--base .`` as above.
958
958
959 If ``--source`` or ``--rev`` is used, special names ``SRC`` and ``ALLSRC``
959 If ``--source`` or ``--rev`` is used, special names ``SRC`` and ``ALLSRC``
960 can be used in ``--dest``. Destination would be calculated per source
960 can be used in ``--dest``. Destination would be calculated per source
961 revision with ``SRC`` substituted by that single source revision and
961 revision with ``SRC`` substituted by that single source revision and
962 ``ALLSRC`` substituted by all source revisions.
962 ``ALLSRC`` substituted by all source revisions.
963
963
964 Rebase will destroy original changesets unless you use ``--keep``.
964 Rebase will destroy original changesets unless you use ``--keep``.
965 It will also move your bookmarks (even if you do).
965 It will also move your bookmarks (even if you do).
966
966
967 Some changesets may be dropped if they do not contribute changes
967 Some changesets may be dropped if they do not contribute changes
968 (e.g. merges from the destination branch).
968 (e.g. merges from the destination branch).
969
969
970 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
970 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
971 a named branch with two heads. You will need to explicitly specify source
971 a named branch with two heads. You will need to explicitly specify source
972 and/or destination.
972 and/or destination.
973
973
974 If you need to use a tool to automate merge/conflict decisions, you
974 If you need to use a tool to automate merge/conflict decisions, you
975 can specify one with ``--tool``, see :hg:`help merge-tools`.
975 can specify one with ``--tool``, see :hg:`help merge-tools`.
976 As a caveat: the tool will not be used to mediate when a file was
976 As a caveat: the tool will not be used to mediate when a file was
977 deleted, there is no hook presently available for this.
977 deleted, there is no hook presently available for this.
978
978
979 If a rebase is interrupted to manually resolve a conflict, it can be
979 If a rebase is interrupted to manually resolve a conflict, it can be
980 continued with --continue/-c, aborted with --abort/-a, or stopped with
980 continued with --continue/-c, aborted with --abort/-a, or stopped with
981 --stop.
981 --stop.
982
982
983 .. container:: verbose
983 .. container:: verbose
984
984
985 Examples:
985 Examples:
986
986
987 - move "local changes" (current commit back to branching point)
987 - move "local changes" (current commit back to branching point)
988 to the current branch tip after a pull::
988 to the current branch tip after a pull::
989
989
990 hg rebase
990 hg rebase
991
991
992 - move a single changeset to the stable branch::
992 - move a single changeset to the stable branch::
993
993
994 hg rebase -r 5f493448 -d stable
994 hg rebase -r 5f493448 -d stable
995
995
996 - splice a commit and all its descendants onto another part of history::
996 - splice a commit and all its descendants onto another part of history::
997
997
998 hg rebase --source c0c3 --dest 4cf9
998 hg rebase --source c0c3 --dest 4cf9
999
999
1000 - rebase everything on a branch marked by a bookmark onto the
1000 - rebase everything on a branch marked by a bookmark onto the
1001 default branch::
1001 default branch::
1002
1002
1003 hg rebase --base myfeature --dest default
1003 hg rebase --base myfeature --dest default
1004
1004
1005 - collapse a sequence of changes into a single commit::
1005 - collapse a sequence of changes into a single commit::
1006
1006
1007 hg rebase --collapse -r 1520:1525 -d .
1007 hg rebase --collapse -r 1520:1525 -d .
1008
1008
1009 - move a named branch while preserving its name::
1009 - move a named branch while preserving its name::
1010
1010
1011 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
1011 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
1012
1012
1013 - stabilize orphaned changesets so history looks linear::
1013 - stabilize orphaned changesets so history looks linear::
1014
1014
1015 hg rebase -r 'orphan()-obsolete()'\
1015 hg rebase -r 'orphan()-obsolete()'\
1016 -d 'first(max((successors(max(roots(ALLSRC) & ::SRC)^)-obsolete())::) +\
1016 -d 'first(max((successors(max(roots(ALLSRC) & ::SRC)^)-obsolete())::) +\
1017 max(::((roots(ALLSRC) & ::SRC)^)-obsolete()))'
1017 max(::((roots(ALLSRC) & ::SRC)^)-obsolete()))'
1018
1018
1019 Configuration Options:
1019 Configuration Options:
1020
1020
1021 You can make rebase require a destination if you set the following config
1021 You can make rebase require a destination if you set the following config
1022 option::
1022 option::
1023
1023
1024 [commands]
1024 [commands]
1025 rebase.requiredest = True
1025 rebase.requiredest = True
1026
1026
1027 By default, rebase will close the transaction after each commit. For
1027 By default, rebase will close the transaction after each commit. For
1028 performance purposes, you can configure rebase to use a single transaction
1028 performance purposes, you can configure rebase to use a single transaction
1029 across the entire rebase. WARNING: This setting introduces a significant
1029 across the entire rebase. WARNING: This setting introduces a significant
1030 risk of losing the work you've done in a rebase if the rebase aborts
1030 risk of losing the work you've done in a rebase if the rebase aborts
1031 unexpectedly::
1031 unexpectedly::
1032
1032
1033 [rebase]
1033 [rebase]
1034 singletransaction = True
1034 singletransaction = True
1035
1035
1036 By default, rebase writes to the working copy, but you can configure it to
1036 By default, rebase writes to the working copy, but you can configure it to
1037 run in-memory for better performance. When the rebase is not moving the
1037 run in-memory for better performance. When the rebase is not moving the
1038 parent(s) of the working copy (AKA the "currently checked out changesets"),
1038 parent(s) of the working copy (AKA the "currently checked out changesets"),
1039 this may also allow it to run even if the working copy is dirty::
1039 this may also allow it to run even if the working copy is dirty::
1040
1040
1041 [rebase]
1041 [rebase]
1042 experimental.inmemory = True
1042 experimental.inmemory = True
1043
1043
1044 Return Values:
1044 Return Values:
1045
1045
1046 Returns 0 on success, 1 if nothing to rebase or there are
1046 Returns 0 on success, 1 if nothing to rebase or there are
1047 unresolved conflicts.
1047 unresolved conflicts.
1048
1048
1049 """
1049 """
1050 opts = pycompat.byteskwargs(opts)
1050 opts = pycompat.byteskwargs(opts)
1051 inmemory = ui.configbool(b'rebase', b'experimental.inmemory')
1051 inmemory = ui.configbool(b'rebase', b'experimental.inmemory')
1052 action = cmdutil.check_at_most_one_arg(opts, b'abort', b'stop', b'continue')
1052 action = cmdutil.check_at_most_one_arg(opts, b'abort', b'stop', b'continue')
1053 if action:
1053 if action:
1054 cmdutil.check_incompatible_arguments(
1054 cmdutil.check_incompatible_arguments(
1055 opts, action, [b'confirm', b'dry_run']
1055 opts, action, [b'confirm', b'dry_run']
1056 )
1056 )
1057 cmdutil.check_incompatible_arguments(
1057 cmdutil.check_incompatible_arguments(
1058 opts, action, [b'rev', b'source', b'base', b'dest']
1058 opts, action, [b'rev', b'source', b'base', b'dest']
1059 )
1059 )
1060 cmdutil.check_at_most_one_arg(opts, b'confirm', b'dry_run')
1060 cmdutil.check_at_most_one_arg(opts, b'confirm', b'dry_run')
1061 cmdutil.check_at_most_one_arg(opts, b'rev', b'source', b'base')
1061 cmdutil.check_at_most_one_arg(opts, b'rev', b'source', b'base')
1062
1062
1063 if action or repo.currenttransaction() is not None:
1063 if action or repo.currenttransaction() is not None:
1064 # in-memory rebase is not compatible with resuming rebases.
1064 # in-memory rebase is not compatible with resuming rebases.
1065 # (Or if it is run within a transaction, since the restart logic can
1065 # (Or if it is run within a transaction, since the restart logic can
1066 # fail the entire transaction.)
1066 # fail the entire transaction.)
1067 inmemory = False
1067 inmemory = False
1068
1068
1069 if opts.get(b'auto_orphans'):
1069 if opts.get(b'auto_orphans'):
1070 disallowed_opts = set(opts) - {b'auto_orphans'}
1070 disallowed_opts = set(opts) - {b'auto_orphans'}
1071 cmdutil.check_incompatible_arguments(
1071 cmdutil.check_incompatible_arguments(
1072 opts, b'auto_orphans', disallowed_opts
1072 opts, b'auto_orphans', disallowed_opts
1073 )
1073 )
1074
1074
1075 userrevs = list(repo.revs(opts.get(b'auto_orphans')))
1075 userrevs = list(repo.revs(opts.get(b'auto_orphans')))
1076 opts[b'rev'] = [revsetlang.formatspec(b'%ld and orphan()', userrevs)]
1076 opts[b'rev'] = [revsetlang.formatspec(b'%ld and orphan()', userrevs)]
1077 opts[b'dest'] = b'_destautoorphanrebase(SRC)'
1077 opts[b'dest'] = b'_destautoorphanrebase(SRC)'
1078
1078
1079 if opts.get(b'dry_run') or opts.get(b'confirm'):
1079 if opts.get(b'dry_run') or opts.get(b'confirm'):
1080 return _dryrunrebase(ui, repo, action, opts)
1080 return _dryrunrebase(ui, repo, action, opts)
1081 elif action == b'stop':
1081 elif action == b'stop':
1082 rbsrt = rebaseruntime(repo, ui)
1082 rbsrt = rebaseruntime(repo, ui)
1083 with repo.wlock(), repo.lock():
1083 with repo.wlock(), repo.lock():
1084 rbsrt.restorestatus()
1084 rbsrt.restorestatus()
1085 if rbsrt.collapsef:
1085 if rbsrt.collapsef:
1086 raise error.Abort(_(b"cannot stop in --collapse session"))
1086 raise error.Abort(_(b"cannot stop in --collapse session"))
1087 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1087 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1088 if not (rbsrt.keepf or allowunstable):
1088 if not (rbsrt.keepf or allowunstable):
1089 raise error.Abort(
1089 raise error.Abort(
1090 _(
1090 _(
1091 b"cannot remove original changesets with"
1091 b"cannot remove original changesets with"
1092 b" unrebased descendants"
1092 b" unrebased descendants"
1093 ),
1093 ),
1094 hint=_(
1094 hint=_(
1095 b'either enable obsmarkers to allow unstable '
1095 b'either enable obsmarkers to allow unstable '
1096 b'revisions or use --keep to keep original '
1096 b'revisions or use --keep to keep original '
1097 b'changesets'
1097 b'changesets'
1098 ),
1098 ),
1099 )
1099 )
1100 # update to the current working revision
1100 # update to the current working revision
1101 # to clear interrupted merge
1101 # to clear interrupted merge
1102 hg.updaterepo(repo, rbsrt.originalwd, overwrite=True)
1102 mergemod.clean_update(repo[rbsrt.originalwd])
1103 rbsrt._finishrebase()
1103 rbsrt._finishrebase()
1104 return 0
1104 return 0
1105 elif inmemory:
1105 elif inmemory:
1106 try:
1106 try:
1107 # in-memory merge doesn't support conflicts, so if we hit any, abort
1107 # in-memory merge doesn't support conflicts, so if we hit any, abort
1108 # and re-run as an on-disk merge.
1108 # and re-run as an on-disk merge.
1109 overrides = {(b'rebase', b'singletransaction'): True}
1109 overrides = {(b'rebase', b'singletransaction'): True}
1110 with ui.configoverride(overrides, b'rebase'):
1110 with ui.configoverride(overrides, b'rebase'):
1111 return _dorebase(ui, repo, action, opts, inmemory=inmemory)
1111 return _dorebase(ui, repo, action, opts, inmemory=inmemory)
1112 except error.InMemoryMergeConflictsError:
1112 except error.InMemoryMergeConflictsError:
1113 ui.warn(
1113 ui.warn(
1114 _(
1114 _(
1115 b'hit merge conflicts; re-running rebase without in-memory'
1115 b'hit merge conflicts; re-running rebase without in-memory'
1116 b' merge\n'
1116 b' merge\n'
1117 )
1117 )
1118 )
1118 )
1119 clearstatus(repo)
1119 clearstatus(repo)
1120 clearcollapsemsg(repo)
1120 clearcollapsemsg(repo)
1121 return _dorebase(ui, repo, action, opts, inmemory=False)
1121 return _dorebase(ui, repo, action, opts, inmemory=False)
1122 else:
1122 else:
1123 return _dorebase(ui, repo, action, opts)
1123 return _dorebase(ui, repo, action, opts)
1124
1124
1125
1125
1126 def _dryrunrebase(ui, repo, action, opts):
1126 def _dryrunrebase(ui, repo, action, opts):
1127 rbsrt = rebaseruntime(repo, ui, inmemory=True, dryrun=True, opts=opts)
1127 rbsrt = rebaseruntime(repo, ui, inmemory=True, dryrun=True, opts=opts)
1128 confirm = opts.get(b'confirm')
1128 confirm = opts.get(b'confirm')
1129 if confirm:
1129 if confirm:
1130 ui.status(_(b'starting in-memory rebase\n'))
1130 ui.status(_(b'starting in-memory rebase\n'))
1131 else:
1131 else:
1132 ui.status(
1132 ui.status(
1133 _(b'starting dry-run rebase; repository will not be changed\n')
1133 _(b'starting dry-run rebase; repository will not be changed\n')
1134 )
1134 )
1135 with repo.wlock(), repo.lock():
1135 with repo.wlock(), repo.lock():
1136 needsabort = True
1136 needsabort = True
1137 try:
1137 try:
1138 overrides = {(b'rebase', b'singletransaction'): True}
1138 overrides = {(b'rebase', b'singletransaction'): True}
1139 with ui.configoverride(overrides, b'rebase'):
1139 with ui.configoverride(overrides, b'rebase'):
1140 _origrebase(
1140 _origrebase(
1141 ui, repo, action, opts, rbsrt,
1141 ui, repo, action, opts, rbsrt,
1142 )
1142 )
1143 except error.ConflictResolutionRequired:
1143 except error.ConflictResolutionRequired:
1144 ui.status(_(b'hit a merge conflict\n'))
1144 ui.status(_(b'hit a merge conflict\n'))
1145 return 1
1145 return 1
1146 except error.Abort:
1146 except error.Abort:
1147 needsabort = False
1147 needsabort = False
1148 raise
1148 raise
1149 else:
1149 else:
1150 if confirm:
1150 if confirm:
1151 ui.status(_(b'rebase completed successfully\n'))
1151 ui.status(_(b'rebase completed successfully\n'))
1152 if not ui.promptchoice(_(b'apply changes (yn)?$$ &Yes $$ &No')):
1152 if not ui.promptchoice(_(b'apply changes (yn)?$$ &Yes $$ &No')):
1153 # finish unfinished rebase
1153 # finish unfinished rebase
1154 rbsrt._finishrebase()
1154 rbsrt._finishrebase()
1155 else:
1155 else:
1156 rbsrt._prepareabortorcontinue(
1156 rbsrt._prepareabortorcontinue(
1157 isabort=True,
1157 isabort=True,
1158 backup=False,
1158 backup=False,
1159 suppwarns=True,
1159 suppwarns=True,
1160 confirm=confirm,
1160 confirm=confirm,
1161 )
1161 )
1162 needsabort = False
1162 needsabort = False
1163 else:
1163 else:
1164 ui.status(
1164 ui.status(
1165 _(
1165 _(
1166 b'dry-run rebase completed successfully; run without'
1166 b'dry-run rebase completed successfully; run without'
1167 b' -n/--dry-run to perform this rebase\n'
1167 b' -n/--dry-run to perform this rebase\n'
1168 )
1168 )
1169 )
1169 )
1170 return 0
1170 return 0
1171 finally:
1171 finally:
1172 if needsabort:
1172 if needsabort:
1173 # no need to store backup in case of dryrun
1173 # no need to store backup in case of dryrun
1174 rbsrt._prepareabortorcontinue(
1174 rbsrt._prepareabortorcontinue(
1175 isabort=True,
1175 isabort=True,
1176 backup=False,
1176 backup=False,
1177 suppwarns=True,
1177 suppwarns=True,
1178 dryrun=opts.get(b'dry_run'),
1178 dryrun=opts.get(b'dry_run'),
1179 )
1179 )
1180
1180
1181
1181
1182 def _dorebase(ui, repo, action, opts, inmemory=False):
1182 def _dorebase(ui, repo, action, opts, inmemory=False):
1183 rbsrt = rebaseruntime(repo, ui, inmemory, opts=opts)
1183 rbsrt = rebaseruntime(repo, ui, inmemory, opts=opts)
1184 return _origrebase(ui, repo, action, opts, rbsrt)
1184 return _origrebase(ui, repo, action, opts, rbsrt)
1185
1185
1186
1186
1187 def _origrebase(ui, repo, action, opts, rbsrt):
1187 def _origrebase(ui, repo, action, opts, rbsrt):
1188 assert action != b'stop'
1188 assert action != b'stop'
1189 with repo.wlock(), repo.lock():
1189 with repo.wlock(), repo.lock():
1190 if opts.get(b'interactive'):
1190 if opts.get(b'interactive'):
1191 try:
1191 try:
1192 if extensions.find(b'histedit'):
1192 if extensions.find(b'histedit'):
1193 enablehistedit = b''
1193 enablehistedit = b''
1194 except KeyError:
1194 except KeyError:
1195 enablehistedit = b" --config extensions.histedit="
1195 enablehistedit = b" --config extensions.histedit="
1196 help = b"hg%s help -e histedit" % enablehistedit
1196 help = b"hg%s help -e histedit" % enablehistedit
1197 msg = (
1197 msg = (
1198 _(
1198 _(
1199 b"interactive history editing is supported by the "
1199 b"interactive history editing is supported by the "
1200 b"'histedit' extension (see \"%s\")"
1200 b"'histedit' extension (see \"%s\")"
1201 )
1201 )
1202 % help
1202 % help
1203 )
1203 )
1204 raise error.Abort(msg)
1204 raise error.Abort(msg)
1205
1205
1206 if rbsrt.collapsemsg and not rbsrt.collapsef:
1206 if rbsrt.collapsemsg and not rbsrt.collapsef:
1207 raise error.Abort(_(b'message can only be specified with collapse'))
1207 raise error.Abort(_(b'message can only be specified with collapse'))
1208
1208
1209 if action:
1209 if action:
1210 if rbsrt.collapsef:
1210 if rbsrt.collapsef:
1211 raise error.Abort(
1211 raise error.Abort(
1212 _(b'cannot use collapse with continue or abort')
1212 _(b'cannot use collapse with continue or abort')
1213 )
1213 )
1214 if action == b'abort' and opts.get(b'tool', False):
1214 if action == b'abort' and opts.get(b'tool', False):
1215 ui.warn(_(b'tool option will be ignored\n'))
1215 ui.warn(_(b'tool option will be ignored\n'))
1216 if action == b'continue':
1216 if action == b'continue':
1217 ms = mergestatemod.mergestate.read(repo)
1217 ms = mergestatemod.mergestate.read(repo)
1218 mergeutil.checkunresolved(ms)
1218 mergeutil.checkunresolved(ms)
1219
1219
1220 retcode = rbsrt._prepareabortorcontinue(
1220 retcode = rbsrt._prepareabortorcontinue(
1221 isabort=(action == b'abort')
1221 isabort=(action == b'abort')
1222 )
1222 )
1223 if retcode is not None:
1223 if retcode is not None:
1224 return retcode
1224 return retcode
1225 else:
1225 else:
1226 # search default destination in this space
1226 # search default destination in this space
1227 # used in the 'hg pull --rebase' case, see issue 5214.
1227 # used in the 'hg pull --rebase' case, see issue 5214.
1228 destspace = opts.get(b'_destspace')
1228 destspace = opts.get(b'_destspace')
1229 destmap = _definedestmap(
1229 destmap = _definedestmap(
1230 ui,
1230 ui,
1231 repo,
1231 repo,
1232 rbsrt.inmemory,
1232 rbsrt.inmemory,
1233 opts.get(b'dest', None),
1233 opts.get(b'dest', None),
1234 opts.get(b'source', []),
1234 opts.get(b'source', []),
1235 opts.get(b'base', []),
1235 opts.get(b'base', []),
1236 opts.get(b'rev', []),
1236 opts.get(b'rev', []),
1237 destspace=destspace,
1237 destspace=destspace,
1238 )
1238 )
1239 retcode = rbsrt._preparenewrebase(destmap)
1239 retcode = rbsrt._preparenewrebase(destmap)
1240 if retcode is not None:
1240 if retcode is not None:
1241 return retcode
1241 return retcode
1242 storecollapsemsg(repo, rbsrt.collapsemsg)
1242 storecollapsemsg(repo, rbsrt.collapsemsg)
1243
1243
1244 tr = None
1244 tr = None
1245
1245
1246 singletr = ui.configbool(b'rebase', b'singletransaction')
1246 singletr = ui.configbool(b'rebase', b'singletransaction')
1247 if singletr:
1247 if singletr:
1248 tr = repo.transaction(b'rebase')
1248 tr = repo.transaction(b'rebase')
1249
1249
1250 # If `rebase.singletransaction` is enabled, wrap the entire operation in
1250 # If `rebase.singletransaction` is enabled, wrap the entire operation in
1251 # one transaction here. Otherwise, transactions are obtained when
1251 # one transaction here. Otherwise, transactions are obtained when
1252 # committing each node, which is slower but allows partial success.
1252 # committing each node, which is slower but allows partial success.
1253 with util.acceptintervention(tr):
1253 with util.acceptintervention(tr):
1254 # Same logic for the dirstate guard, except we don't create one when
1254 # Same logic for the dirstate guard, except we don't create one when
1255 # rebasing in-memory (it's not needed).
1255 # rebasing in-memory (it's not needed).
1256 dsguard = None
1256 dsguard = None
1257 if singletr and not rbsrt.inmemory:
1257 if singletr and not rbsrt.inmemory:
1258 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1258 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1259 with util.acceptintervention(dsguard):
1259 with util.acceptintervention(dsguard):
1260 rbsrt._performrebase(tr)
1260 rbsrt._performrebase(tr)
1261 if not rbsrt.dryrun:
1261 if not rbsrt.dryrun:
1262 rbsrt._finishrebase()
1262 rbsrt._finishrebase()
1263
1263
1264
1264
1265 def _definedestmap(ui, repo, inmemory, destf, srcf, basef, revf, destspace):
1265 def _definedestmap(ui, repo, inmemory, destf, srcf, basef, revf, destspace):
1266 """use revisions argument to define destmap {srcrev: destrev}"""
1266 """use revisions argument to define destmap {srcrev: destrev}"""
1267 if revf is None:
1267 if revf is None:
1268 revf = []
1268 revf = []
1269
1269
1270 # destspace is here to work around issues with `hg pull --rebase` see
1270 # destspace is here to work around issues with `hg pull --rebase` see
1271 # issue5214 for details
1271 # issue5214 for details
1272
1272
1273 cmdutil.checkunfinished(repo)
1273 cmdutil.checkunfinished(repo)
1274 if not inmemory:
1274 if not inmemory:
1275 cmdutil.bailifchanged(repo)
1275 cmdutil.bailifchanged(repo)
1276
1276
1277 if ui.configbool(b'commands', b'rebase.requiredest') and not destf:
1277 if ui.configbool(b'commands', b'rebase.requiredest') and not destf:
1278 raise error.Abort(
1278 raise error.Abort(
1279 _(b'you must specify a destination'),
1279 _(b'you must specify a destination'),
1280 hint=_(b'use: hg rebase -d REV'),
1280 hint=_(b'use: hg rebase -d REV'),
1281 )
1281 )
1282
1282
1283 dest = None
1283 dest = None
1284
1284
1285 if revf:
1285 if revf:
1286 rebaseset = scmutil.revrange(repo, revf)
1286 rebaseset = scmutil.revrange(repo, revf)
1287 if not rebaseset:
1287 if not rebaseset:
1288 ui.status(_(b'empty "rev" revision set - nothing to rebase\n'))
1288 ui.status(_(b'empty "rev" revision set - nothing to rebase\n'))
1289 return None
1289 return None
1290 elif srcf:
1290 elif srcf:
1291 src = scmutil.revrange(repo, srcf)
1291 src = scmutil.revrange(repo, srcf)
1292 if not src:
1292 if not src:
1293 ui.status(_(b'empty "source" revision set - nothing to rebase\n'))
1293 ui.status(_(b'empty "source" revision set - nothing to rebase\n'))
1294 return None
1294 return None
1295 # `+ (%ld)` to work around `wdir()::` being empty
1295 # `+ (%ld)` to work around `wdir()::` being empty
1296 rebaseset = repo.revs(b'(%ld):: + (%ld)', src, src)
1296 rebaseset = repo.revs(b'(%ld):: + (%ld)', src, src)
1297 else:
1297 else:
1298 base = scmutil.revrange(repo, basef or [b'.'])
1298 base = scmutil.revrange(repo, basef or [b'.'])
1299 if not base:
1299 if not base:
1300 ui.status(
1300 ui.status(
1301 _(b'empty "base" revision set - ' b"can't compute rebase set\n")
1301 _(b'empty "base" revision set - ' b"can't compute rebase set\n")
1302 )
1302 )
1303 return None
1303 return None
1304 if destf:
1304 if destf:
1305 # --base does not support multiple destinations
1305 # --base does not support multiple destinations
1306 dest = scmutil.revsingle(repo, destf)
1306 dest = scmutil.revsingle(repo, destf)
1307 else:
1307 else:
1308 dest = repo[_destrebase(repo, base, destspace=destspace)]
1308 dest = repo[_destrebase(repo, base, destspace=destspace)]
1309 destf = bytes(dest)
1309 destf = bytes(dest)
1310
1310
1311 roots = [] # selected children of branching points
1311 roots = [] # selected children of branching points
1312 bpbase = {} # {branchingpoint: [origbase]}
1312 bpbase = {} # {branchingpoint: [origbase]}
1313 for b in base: # group bases by branching points
1313 for b in base: # group bases by branching points
1314 bp = repo.revs(b'ancestor(%d, %d)', b, dest.rev()).first()
1314 bp = repo.revs(b'ancestor(%d, %d)', b, dest.rev()).first()
1315 bpbase[bp] = bpbase.get(bp, []) + [b]
1315 bpbase[bp] = bpbase.get(bp, []) + [b]
1316 if None in bpbase:
1316 if None in bpbase:
1317 # emulate the old behavior, showing "nothing to rebase" (a better
1317 # emulate the old behavior, showing "nothing to rebase" (a better
1318 # behavior may be abort with "cannot find branching point" error)
1318 # behavior may be abort with "cannot find branching point" error)
1319 bpbase.clear()
1319 bpbase.clear()
1320 for bp, bs in pycompat.iteritems(bpbase): # calculate roots
1320 for bp, bs in pycompat.iteritems(bpbase): # calculate roots
1321 roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs))
1321 roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs))
1322
1322
1323 rebaseset = repo.revs(b'%ld::', roots)
1323 rebaseset = repo.revs(b'%ld::', roots)
1324
1324
1325 if not rebaseset:
1325 if not rebaseset:
1326 # transform to list because smartsets are not comparable to
1326 # transform to list because smartsets are not comparable to
1327 # lists. This should be improved to honor laziness of
1327 # lists. This should be improved to honor laziness of
1328 # smartset.
1328 # smartset.
1329 if list(base) == [dest.rev()]:
1329 if list(base) == [dest.rev()]:
1330 if basef:
1330 if basef:
1331 ui.status(
1331 ui.status(
1332 _(
1332 _(
1333 b'nothing to rebase - %s is both "base"'
1333 b'nothing to rebase - %s is both "base"'
1334 b' and destination\n'
1334 b' and destination\n'
1335 )
1335 )
1336 % dest
1336 % dest
1337 )
1337 )
1338 else:
1338 else:
1339 ui.status(
1339 ui.status(
1340 _(
1340 _(
1341 b'nothing to rebase - working directory '
1341 b'nothing to rebase - working directory '
1342 b'parent is also destination\n'
1342 b'parent is also destination\n'
1343 )
1343 )
1344 )
1344 )
1345 elif not repo.revs(b'%ld - ::%d', base, dest.rev()):
1345 elif not repo.revs(b'%ld - ::%d', base, dest.rev()):
1346 if basef:
1346 if basef:
1347 ui.status(
1347 ui.status(
1348 _(
1348 _(
1349 b'nothing to rebase - "base" %s is '
1349 b'nothing to rebase - "base" %s is '
1350 b'already an ancestor of destination '
1350 b'already an ancestor of destination '
1351 b'%s\n'
1351 b'%s\n'
1352 )
1352 )
1353 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1353 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1354 )
1354 )
1355 else:
1355 else:
1356 ui.status(
1356 ui.status(
1357 _(
1357 _(
1358 b'nothing to rebase - working '
1358 b'nothing to rebase - working '
1359 b'directory parent is already an '
1359 b'directory parent is already an '
1360 b'ancestor of destination %s\n'
1360 b'ancestor of destination %s\n'
1361 )
1361 )
1362 % dest
1362 % dest
1363 )
1363 )
1364 else: # can it happen?
1364 else: # can it happen?
1365 ui.status(
1365 ui.status(
1366 _(b'nothing to rebase from %s to %s\n')
1366 _(b'nothing to rebase from %s to %s\n')
1367 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1367 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1368 )
1368 )
1369 return None
1369 return None
1370
1370
1371 if nodemod.wdirrev in rebaseset:
1371 if nodemod.wdirrev in rebaseset:
1372 raise error.Abort(_(b'cannot rebase the working copy'))
1372 raise error.Abort(_(b'cannot rebase the working copy'))
1373 rebasingwcp = repo[b'.'].rev() in rebaseset
1373 rebasingwcp = repo[b'.'].rev() in rebaseset
1374 ui.log(
1374 ui.log(
1375 b"rebase",
1375 b"rebase",
1376 b"rebasing working copy parent: %r\n",
1376 b"rebasing working copy parent: %r\n",
1377 rebasingwcp,
1377 rebasingwcp,
1378 rebase_rebasing_wcp=rebasingwcp,
1378 rebase_rebasing_wcp=rebasingwcp,
1379 )
1379 )
1380 if inmemory and rebasingwcp:
1380 if inmemory and rebasingwcp:
1381 # Check these since we did not before.
1381 # Check these since we did not before.
1382 cmdutil.checkunfinished(repo)
1382 cmdutil.checkunfinished(repo)
1383 cmdutil.bailifchanged(repo)
1383 cmdutil.bailifchanged(repo)
1384
1384
1385 if not destf:
1385 if not destf:
1386 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
1386 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
1387 destf = bytes(dest)
1387 destf = bytes(dest)
1388
1388
1389 allsrc = revsetlang.formatspec(b'%ld', rebaseset)
1389 allsrc = revsetlang.formatspec(b'%ld', rebaseset)
1390 alias = {b'ALLSRC': allsrc}
1390 alias = {b'ALLSRC': allsrc}
1391
1391
1392 if dest is None:
1392 if dest is None:
1393 try:
1393 try:
1394 # fast path: try to resolve dest without SRC alias
1394 # fast path: try to resolve dest without SRC alias
1395 dest = scmutil.revsingle(repo, destf, localalias=alias)
1395 dest = scmutil.revsingle(repo, destf, localalias=alias)
1396 except error.RepoLookupError:
1396 except error.RepoLookupError:
1397 # multi-dest path: resolve dest for each SRC separately
1397 # multi-dest path: resolve dest for each SRC separately
1398 destmap = {}
1398 destmap = {}
1399 for r in rebaseset:
1399 for r in rebaseset:
1400 alias[b'SRC'] = revsetlang.formatspec(b'%d', r)
1400 alias[b'SRC'] = revsetlang.formatspec(b'%d', r)
1401 # use repo.anyrevs instead of scmutil.revsingle because we
1401 # use repo.anyrevs instead of scmutil.revsingle because we
1402 # don't want to abort if destset is empty.
1402 # don't want to abort if destset is empty.
1403 destset = repo.anyrevs([destf], user=True, localalias=alias)
1403 destset = repo.anyrevs([destf], user=True, localalias=alias)
1404 size = len(destset)
1404 size = len(destset)
1405 if size == 1:
1405 if size == 1:
1406 destmap[r] = destset.first()
1406 destmap[r] = destset.first()
1407 elif size == 0:
1407 elif size == 0:
1408 ui.note(_(b'skipping %s - empty destination\n') % repo[r])
1408 ui.note(_(b'skipping %s - empty destination\n') % repo[r])
1409 else:
1409 else:
1410 raise error.Abort(
1410 raise error.Abort(
1411 _(b'rebase destination for %s is not unique') % repo[r]
1411 _(b'rebase destination for %s is not unique') % repo[r]
1412 )
1412 )
1413
1413
1414 if dest is not None:
1414 if dest is not None:
1415 # single-dest case: assign dest to each rev in rebaseset
1415 # single-dest case: assign dest to each rev in rebaseset
1416 destrev = dest.rev()
1416 destrev = dest.rev()
1417 destmap = {r: destrev for r in rebaseset} # {srcrev: destrev}
1417 destmap = {r: destrev for r in rebaseset} # {srcrev: destrev}
1418
1418
1419 if not destmap:
1419 if not destmap:
1420 ui.status(_(b'nothing to rebase - empty destination\n'))
1420 ui.status(_(b'nothing to rebase - empty destination\n'))
1421 return None
1421 return None
1422
1422
1423 return destmap
1423 return destmap
1424
1424
1425
1425
1426 def externalparent(repo, state, destancestors):
1426 def externalparent(repo, state, destancestors):
1427 """Return the revision that should be used as the second parent
1427 """Return the revision that should be used as the second parent
1428 when the revisions in state is collapsed on top of destancestors.
1428 when the revisions in state is collapsed on top of destancestors.
1429 Abort if there is more than one parent.
1429 Abort if there is more than one parent.
1430 """
1430 """
1431 parents = set()
1431 parents = set()
1432 source = min(state)
1432 source = min(state)
1433 for rev in state:
1433 for rev in state:
1434 if rev == source:
1434 if rev == source:
1435 continue
1435 continue
1436 for p in repo[rev].parents():
1436 for p in repo[rev].parents():
1437 if p.rev() not in state and p.rev() not in destancestors:
1437 if p.rev() not in state and p.rev() not in destancestors:
1438 parents.add(p.rev())
1438 parents.add(p.rev())
1439 if not parents:
1439 if not parents:
1440 return nullrev
1440 return nullrev
1441 if len(parents) == 1:
1441 if len(parents) == 1:
1442 return parents.pop()
1442 return parents.pop()
1443 raise error.Abort(
1443 raise error.Abort(
1444 _(
1444 _(
1445 b'unable to collapse on top of %d, there is more '
1445 b'unable to collapse on top of %d, there is more '
1446 b'than one external parent: %s'
1446 b'than one external parent: %s'
1447 )
1447 )
1448 % (max(destancestors), b', '.join(b"%d" % p for p in sorted(parents)))
1448 % (max(destancestors), b', '.join(b"%d" % p for p in sorted(parents)))
1449 )
1449 )
1450
1450
1451
1451
1452 def commitmemorynode(repo, wctx, editor, extra, user, date, commitmsg):
1452 def commitmemorynode(repo, wctx, editor, extra, user, date, commitmsg):
1453 '''Commit the memory changes with parents p1 and p2.
1453 '''Commit the memory changes with parents p1 and p2.
1454 Return node of committed revision.'''
1454 Return node of committed revision.'''
1455 # By convention, ``extra['branch']`` (set by extrafn) clobbers
1455 # By convention, ``extra['branch']`` (set by extrafn) clobbers
1456 # ``branch`` (used when passing ``--keepbranches``).
1456 # ``branch`` (used when passing ``--keepbranches``).
1457 branch = None
1457 branch = None
1458 if b'branch' in extra:
1458 if b'branch' in extra:
1459 branch = extra[b'branch']
1459 branch = extra[b'branch']
1460
1460
1461 # FIXME: We call _compact() because it's required to correctly detect
1461 # FIXME: We call _compact() because it's required to correctly detect
1462 # changed files. This was added to fix a regression shortly before the 5.5
1462 # changed files. This was added to fix a regression shortly before the 5.5
1463 # release. A proper fix will be done in the default branch.
1463 # release. A proper fix will be done in the default branch.
1464 wctx._compact()
1464 wctx._compact()
1465 memctx = wctx.tomemctx(
1465 memctx = wctx.tomemctx(
1466 commitmsg,
1466 commitmsg,
1467 date=date,
1467 date=date,
1468 extra=extra,
1468 extra=extra,
1469 user=user,
1469 user=user,
1470 branch=branch,
1470 branch=branch,
1471 editor=editor,
1471 editor=editor,
1472 )
1472 )
1473 if memctx.isempty() and not repo.ui.configbool(b'ui', b'allowemptycommit'):
1473 if memctx.isempty() and not repo.ui.configbool(b'ui', b'allowemptycommit'):
1474 return None
1474 return None
1475 commitres = repo.commitctx(memctx)
1475 commitres = repo.commitctx(memctx)
1476 wctx.clean() # Might be reused
1476 wctx.clean() # Might be reused
1477 return commitres
1477 return commitres
1478
1478
1479
1479
1480 def commitnode(repo, editor, extra, user, date, commitmsg):
1480 def commitnode(repo, editor, extra, user, date, commitmsg):
1481 '''Commit the wd changes with parents p1 and p2.
1481 '''Commit the wd changes with parents p1 and p2.
1482 Return node of committed revision.'''
1482 Return node of committed revision.'''
1483 dsguard = util.nullcontextmanager()
1483 dsguard = util.nullcontextmanager()
1484 if not repo.ui.configbool(b'rebase', b'singletransaction'):
1484 if not repo.ui.configbool(b'rebase', b'singletransaction'):
1485 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1485 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1486 with dsguard:
1486 with dsguard:
1487 # Commit might fail if unresolved files exist
1487 # Commit might fail if unresolved files exist
1488 newnode = repo.commit(
1488 newnode = repo.commit(
1489 text=commitmsg, user=user, date=date, extra=extra, editor=editor
1489 text=commitmsg, user=user, date=date, extra=extra, editor=editor
1490 )
1490 )
1491
1491
1492 repo.dirstate.setbranch(repo[newnode].branch())
1492 repo.dirstate.setbranch(repo[newnode].branch())
1493 return newnode
1493 return newnode
1494
1494
1495
1495
1496 def rebasenode(repo, rev, p1, p2, base, collapse, wctx):
1496 def rebasenode(repo, rev, p1, p2, base, collapse, wctx):
1497 """Rebase a single revision rev on top of p1 using base as merge ancestor"""
1497 """Rebase a single revision rev on top of p1 using base as merge ancestor"""
1498 # Merge phase
1498 # Merge phase
1499 # Update to destination and merge it with local
1499 # Update to destination and merge it with local
1500 p1ctx = repo[p1]
1500 p1ctx = repo[p1]
1501 if wctx.isinmemory():
1501 if wctx.isinmemory():
1502 wctx.setbase(p1ctx)
1502 wctx.setbase(p1ctx)
1503 else:
1503 else:
1504 if repo[b'.'].rev() != p1:
1504 if repo[b'.'].rev() != p1:
1505 repo.ui.debug(b" update to %d:%s\n" % (p1, p1ctx))
1505 repo.ui.debug(b" update to %d:%s\n" % (p1, p1ctx))
1506 mergemod.clean_update(p1ctx)
1506 mergemod.clean_update(p1ctx)
1507 else:
1507 else:
1508 repo.ui.debug(b" already in destination\n")
1508 repo.ui.debug(b" already in destination\n")
1509 # This is, alas, necessary to invalidate workingctx's manifest cache,
1509 # This is, alas, necessary to invalidate workingctx's manifest cache,
1510 # as well as other data we litter on it in other places.
1510 # as well as other data we litter on it in other places.
1511 wctx = repo[None]
1511 wctx = repo[None]
1512 repo.dirstate.write(repo.currenttransaction())
1512 repo.dirstate.write(repo.currenttransaction())
1513 ctx = repo[rev]
1513 ctx = repo[rev]
1514 repo.ui.debug(b" merge against %d:%s\n" % (rev, ctx))
1514 repo.ui.debug(b" merge against %d:%s\n" % (rev, ctx))
1515 if base is not None:
1515 if base is not None:
1516 repo.ui.debug(b" detach base %d:%s\n" % (base, repo[base]))
1516 repo.ui.debug(b" detach base %d:%s\n" % (base, repo[base]))
1517
1517
1518 # See explanation in merge.graft()
1518 # See explanation in merge.graft()
1519 mergeancestor = repo.changelog.isancestor(p1ctx.node(), ctx.node())
1519 mergeancestor = repo.changelog.isancestor(p1ctx.node(), ctx.node())
1520 stats = mergemod.update(
1520 stats = mergemod.update(
1521 repo,
1521 repo,
1522 rev,
1522 rev,
1523 branchmerge=True,
1523 branchmerge=True,
1524 force=True,
1524 force=True,
1525 ancestor=base,
1525 ancestor=base,
1526 mergeancestor=mergeancestor,
1526 mergeancestor=mergeancestor,
1527 labels=[b'dest', b'source'],
1527 labels=[b'dest', b'source'],
1528 wc=wctx,
1528 wc=wctx,
1529 )
1529 )
1530 wctx.setparents(p1ctx.node(), repo[p2].node())
1530 wctx.setparents(p1ctx.node(), repo[p2].node())
1531 if collapse:
1531 if collapse:
1532 copies.graftcopies(wctx, ctx, p1ctx)
1532 copies.graftcopies(wctx, ctx, p1ctx)
1533 else:
1533 else:
1534 # If we're not using --collapse, we need to
1534 # If we're not using --collapse, we need to
1535 # duplicate copies between the revision we're
1535 # duplicate copies between the revision we're
1536 # rebasing and its first parent.
1536 # rebasing and its first parent.
1537 copies.graftcopies(wctx, ctx, ctx.p1())
1537 copies.graftcopies(wctx, ctx, ctx.p1())
1538
1538
1539 if stats.unresolvedcount > 0:
1539 if stats.unresolvedcount > 0:
1540 if wctx.isinmemory():
1540 if wctx.isinmemory():
1541 raise error.InMemoryMergeConflictsError()
1541 raise error.InMemoryMergeConflictsError()
1542 else:
1542 else:
1543 raise error.ConflictResolutionRequired(b'rebase')
1543 raise error.ConflictResolutionRequired(b'rebase')
1544
1544
1545
1545
1546 def adjustdest(repo, rev, destmap, state, skipped):
1546 def adjustdest(repo, rev, destmap, state, skipped):
1547 r"""adjust rebase destination given the current rebase state
1547 r"""adjust rebase destination given the current rebase state
1548
1548
1549 rev is what is being rebased. Return a list of two revs, which are the
1549 rev is what is being rebased. Return a list of two revs, which are the
1550 adjusted destinations for rev's p1 and p2, respectively. If a parent is
1550 adjusted destinations for rev's p1 and p2, respectively. If a parent is
1551 nullrev, return dest without adjustment for it.
1551 nullrev, return dest without adjustment for it.
1552
1552
1553 For example, when doing rebasing B+E to F, C to G, rebase will first move B
1553 For example, when doing rebasing B+E to F, C to G, rebase will first move B
1554 to B1, and E's destination will be adjusted from F to B1.
1554 to B1, and E's destination will be adjusted from F to B1.
1555
1555
1556 B1 <- written during rebasing B
1556 B1 <- written during rebasing B
1557 |
1557 |
1558 F <- original destination of B, E
1558 F <- original destination of B, E
1559 |
1559 |
1560 | E <- rev, which is being rebased
1560 | E <- rev, which is being rebased
1561 | |
1561 | |
1562 | D <- prev, one parent of rev being checked
1562 | D <- prev, one parent of rev being checked
1563 | |
1563 | |
1564 | x <- skipped, ex. no successor or successor in (::dest)
1564 | x <- skipped, ex. no successor or successor in (::dest)
1565 | |
1565 | |
1566 | C <- rebased as C', different destination
1566 | C <- rebased as C', different destination
1567 | |
1567 | |
1568 | B <- rebased as B1 C'
1568 | B <- rebased as B1 C'
1569 |/ |
1569 |/ |
1570 A G <- destination of C, different
1570 A G <- destination of C, different
1571
1571
1572 Another example about merge changeset, rebase -r C+G+H -d K, rebase will
1572 Another example about merge changeset, rebase -r C+G+H -d K, rebase will
1573 first move C to C1, G to G1, and when it's checking H, the adjusted
1573 first move C to C1, G to G1, and when it's checking H, the adjusted
1574 destinations will be [C1, G1].
1574 destinations will be [C1, G1].
1575
1575
1576 H C1 G1
1576 H C1 G1
1577 /| | /
1577 /| | /
1578 F G |/
1578 F G |/
1579 K | | -> K
1579 K | | -> K
1580 | C D |
1580 | C D |
1581 | |/ |
1581 | |/ |
1582 | B | ...
1582 | B | ...
1583 |/ |/
1583 |/ |/
1584 A A
1584 A A
1585
1585
1586 Besides, adjust dest according to existing rebase information. For example,
1586 Besides, adjust dest according to existing rebase information. For example,
1587
1587
1588 B C D B needs to be rebased on top of C, C needs to be rebased on top
1588 B C D B needs to be rebased on top of C, C needs to be rebased on top
1589 \|/ of D. We will rebase C first.
1589 \|/ of D. We will rebase C first.
1590 A
1590 A
1591
1591
1592 C' After rebasing C, when considering B's destination, use C'
1592 C' After rebasing C, when considering B's destination, use C'
1593 | instead of the original C.
1593 | instead of the original C.
1594 B D
1594 B D
1595 \ /
1595 \ /
1596 A
1596 A
1597 """
1597 """
1598 # pick already rebased revs with same dest from state as interesting source
1598 # pick already rebased revs with same dest from state as interesting source
1599 dest = destmap[rev]
1599 dest = destmap[rev]
1600 source = [
1600 source = [
1601 s
1601 s
1602 for s, d in state.items()
1602 for s, d in state.items()
1603 if d > 0 and destmap[s] == dest and s not in skipped
1603 if d > 0 and destmap[s] == dest and s not in skipped
1604 ]
1604 ]
1605
1605
1606 result = []
1606 result = []
1607 for prev in repo.changelog.parentrevs(rev):
1607 for prev in repo.changelog.parentrevs(rev):
1608 adjusted = dest
1608 adjusted = dest
1609 if prev != nullrev:
1609 if prev != nullrev:
1610 candidate = repo.revs(b'max(%ld and (::%d))', source, prev).first()
1610 candidate = repo.revs(b'max(%ld and (::%d))', source, prev).first()
1611 if candidate is not None:
1611 if candidate is not None:
1612 adjusted = state[candidate]
1612 adjusted = state[candidate]
1613 if adjusted == dest and dest in state:
1613 if adjusted == dest and dest in state:
1614 adjusted = state[dest]
1614 adjusted = state[dest]
1615 if adjusted == revtodo:
1615 if adjusted == revtodo:
1616 # sortsource should produce an order that makes this impossible
1616 # sortsource should produce an order that makes this impossible
1617 raise error.ProgrammingError(
1617 raise error.ProgrammingError(
1618 b'rev %d should be rebased already at this time' % dest
1618 b'rev %d should be rebased already at this time' % dest
1619 )
1619 )
1620 result.append(adjusted)
1620 result.append(adjusted)
1621 return result
1621 return result
1622
1622
1623
1623
1624 def _checkobsrebase(repo, ui, rebaseobsrevs, rebaseobsskipped):
1624 def _checkobsrebase(repo, ui, rebaseobsrevs, rebaseobsskipped):
1625 """
1625 """
1626 Abort if rebase will create divergence or rebase is noop because of markers
1626 Abort if rebase will create divergence or rebase is noop because of markers
1627
1627
1628 `rebaseobsrevs`: set of obsolete revision in source
1628 `rebaseobsrevs`: set of obsolete revision in source
1629 `rebaseobsskipped`: set of revisions from source skipped because they have
1629 `rebaseobsskipped`: set of revisions from source skipped because they have
1630 successors in destination or no non-obsolete successor.
1630 successors in destination or no non-obsolete successor.
1631 """
1631 """
1632 # Obsolete node with successors not in dest leads to divergence
1632 # Obsolete node with successors not in dest leads to divergence
1633 divergenceok = ui.configbool(b'experimental', b'evolution.allowdivergence')
1633 divergenceok = ui.configbool(b'experimental', b'evolution.allowdivergence')
1634 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
1634 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
1635
1635
1636 if divergencebasecandidates and not divergenceok:
1636 if divergencebasecandidates and not divergenceok:
1637 divhashes = (bytes(repo[r]) for r in divergencebasecandidates)
1637 divhashes = (bytes(repo[r]) for r in divergencebasecandidates)
1638 msg = _(b"this rebase will cause divergences from: %s")
1638 msg = _(b"this rebase will cause divergences from: %s")
1639 h = _(
1639 h = _(
1640 b"to force the rebase please set "
1640 b"to force the rebase please set "
1641 b"experimental.evolution.allowdivergence=True"
1641 b"experimental.evolution.allowdivergence=True"
1642 )
1642 )
1643 raise error.Abort(msg % (b",".join(divhashes),), hint=h)
1643 raise error.Abort(msg % (b",".join(divhashes),), hint=h)
1644
1644
1645
1645
1646 def successorrevs(unfi, rev):
1646 def successorrevs(unfi, rev):
1647 """yield revision numbers for successors of rev"""
1647 """yield revision numbers for successors of rev"""
1648 assert unfi.filtername is None
1648 assert unfi.filtername is None
1649 get_rev = unfi.changelog.index.get_rev
1649 get_rev = unfi.changelog.index.get_rev
1650 for s in obsutil.allsuccessors(unfi.obsstore, [unfi[rev].node()]):
1650 for s in obsutil.allsuccessors(unfi.obsstore, [unfi[rev].node()]):
1651 r = get_rev(s)
1651 r = get_rev(s)
1652 if r is not None:
1652 if r is not None:
1653 yield r
1653 yield r
1654
1654
1655
1655
1656 def defineparents(repo, rev, destmap, state, skipped, obsskipped):
1656 def defineparents(repo, rev, destmap, state, skipped, obsskipped):
1657 """Return new parents and optionally a merge base for rev being rebased
1657 """Return new parents and optionally a merge base for rev being rebased
1658
1658
1659 The destination specified by "dest" cannot always be used directly because
1659 The destination specified by "dest" cannot always be used directly because
1660 previously rebase result could affect destination. For example,
1660 previously rebase result could affect destination. For example,
1661
1661
1662 D E rebase -r C+D+E -d B
1662 D E rebase -r C+D+E -d B
1663 |/ C will be rebased to C'
1663 |/ C will be rebased to C'
1664 B C D's new destination will be C' instead of B
1664 B C D's new destination will be C' instead of B
1665 |/ E's new destination will be C' instead of B
1665 |/ E's new destination will be C' instead of B
1666 A
1666 A
1667
1667
1668 The new parents of a merge is slightly more complicated. See the comment
1668 The new parents of a merge is slightly more complicated. See the comment
1669 block below.
1669 block below.
1670 """
1670 """
1671 # use unfiltered changelog since successorrevs may return filtered nodes
1671 # use unfiltered changelog since successorrevs may return filtered nodes
1672 assert repo.filtername is None
1672 assert repo.filtername is None
1673 cl = repo.changelog
1673 cl = repo.changelog
1674 isancestor = cl.isancestorrev
1674 isancestor = cl.isancestorrev
1675
1675
1676 dest = destmap[rev]
1676 dest = destmap[rev]
1677 oldps = repo.changelog.parentrevs(rev) # old parents
1677 oldps = repo.changelog.parentrevs(rev) # old parents
1678 newps = [nullrev, nullrev] # new parents
1678 newps = [nullrev, nullrev] # new parents
1679 dests = adjustdest(repo, rev, destmap, state, skipped)
1679 dests = adjustdest(repo, rev, destmap, state, skipped)
1680 bases = list(oldps) # merge base candidates, initially just old parents
1680 bases = list(oldps) # merge base candidates, initially just old parents
1681
1681
1682 if all(r == nullrev for r in oldps[1:]):
1682 if all(r == nullrev for r in oldps[1:]):
1683 # For non-merge changeset, just move p to adjusted dest as requested.
1683 # For non-merge changeset, just move p to adjusted dest as requested.
1684 newps[0] = dests[0]
1684 newps[0] = dests[0]
1685 else:
1685 else:
1686 # For merge changeset, if we move p to dests[i] unconditionally, both
1686 # For merge changeset, if we move p to dests[i] unconditionally, both
1687 # parents may change and the end result looks like "the merge loses a
1687 # parents may change and the end result looks like "the merge loses a
1688 # parent", which is a surprise. This is a limit because "--dest" only
1688 # parent", which is a surprise. This is a limit because "--dest" only
1689 # accepts one dest per src.
1689 # accepts one dest per src.
1690 #
1690 #
1691 # Therefore, only move p with reasonable conditions (in this order):
1691 # Therefore, only move p with reasonable conditions (in this order):
1692 # 1. use dest, if dest is a descendent of (p or one of p's successors)
1692 # 1. use dest, if dest is a descendent of (p or one of p's successors)
1693 # 2. use p's rebased result, if p is rebased (state[p] > 0)
1693 # 2. use p's rebased result, if p is rebased (state[p] > 0)
1694 #
1694 #
1695 # Comparing with adjustdest, the logic here does some additional work:
1695 # Comparing with adjustdest, the logic here does some additional work:
1696 # 1. decide which parents will not be moved towards dest
1696 # 1. decide which parents will not be moved towards dest
1697 # 2. if the above decision is "no", should a parent still be moved
1697 # 2. if the above decision is "no", should a parent still be moved
1698 # because it was rebased?
1698 # because it was rebased?
1699 #
1699 #
1700 # For example:
1700 # For example:
1701 #
1701 #
1702 # C # "rebase -r C -d D" is an error since none of the parents
1702 # C # "rebase -r C -d D" is an error since none of the parents
1703 # /| # can be moved. "rebase -r B+C -d D" will move C's parent
1703 # /| # can be moved. "rebase -r B+C -d D" will move C's parent
1704 # A B D # B (using rule "2."), since B will be rebased.
1704 # A B D # B (using rule "2."), since B will be rebased.
1705 #
1705 #
1706 # The loop tries to be not rely on the fact that a Mercurial node has
1706 # The loop tries to be not rely on the fact that a Mercurial node has
1707 # at most 2 parents.
1707 # at most 2 parents.
1708 for i, p in enumerate(oldps):
1708 for i, p in enumerate(oldps):
1709 np = p # new parent
1709 np = p # new parent
1710 if any(isancestor(x, dests[i]) for x in successorrevs(repo, p)):
1710 if any(isancestor(x, dests[i]) for x in successorrevs(repo, p)):
1711 np = dests[i]
1711 np = dests[i]
1712 elif p in state and state[p] > 0:
1712 elif p in state and state[p] > 0:
1713 np = state[p]
1713 np = state[p]
1714
1714
1715 # If one parent becomes an ancestor of the other, drop the ancestor
1715 # If one parent becomes an ancestor of the other, drop the ancestor
1716 for j, x in enumerate(newps[:i]):
1716 for j, x in enumerate(newps[:i]):
1717 if x == nullrev:
1717 if x == nullrev:
1718 continue
1718 continue
1719 if isancestor(np, x): # CASE-1
1719 if isancestor(np, x): # CASE-1
1720 np = nullrev
1720 np = nullrev
1721 elif isancestor(x, np): # CASE-2
1721 elif isancestor(x, np): # CASE-2
1722 newps[j] = np
1722 newps[j] = np
1723 np = nullrev
1723 np = nullrev
1724 # New parents forming an ancestor relationship does not
1724 # New parents forming an ancestor relationship does not
1725 # mean the old parents have a similar relationship. Do not
1725 # mean the old parents have a similar relationship. Do not
1726 # set bases[x] to nullrev.
1726 # set bases[x] to nullrev.
1727 bases[j], bases[i] = bases[i], bases[j]
1727 bases[j], bases[i] = bases[i], bases[j]
1728
1728
1729 newps[i] = np
1729 newps[i] = np
1730
1730
1731 # "rebasenode" updates to new p1, and the old p1 will be used as merge
1731 # "rebasenode" updates to new p1, and the old p1 will be used as merge
1732 # base. If only p2 changes, merging using unchanged p1 as merge base is
1732 # base. If only p2 changes, merging using unchanged p1 as merge base is
1733 # suboptimal. Therefore swap parents to make the merge sane.
1733 # suboptimal. Therefore swap parents to make the merge sane.
1734 if newps[1] != nullrev and oldps[0] == newps[0]:
1734 if newps[1] != nullrev and oldps[0] == newps[0]:
1735 assert len(newps) == 2 and len(oldps) == 2
1735 assert len(newps) == 2 and len(oldps) == 2
1736 newps.reverse()
1736 newps.reverse()
1737 bases.reverse()
1737 bases.reverse()
1738
1738
1739 # No parent change might be an error because we fail to make rev a
1739 # No parent change might be an error because we fail to make rev a
1740 # descendent of requested dest. This can happen, for example:
1740 # descendent of requested dest. This can happen, for example:
1741 #
1741 #
1742 # C # rebase -r C -d D
1742 # C # rebase -r C -d D
1743 # /| # None of A and B will be changed to D and rebase fails.
1743 # /| # None of A and B will be changed to D and rebase fails.
1744 # A B D
1744 # A B D
1745 if set(newps) == set(oldps) and dest not in newps:
1745 if set(newps) == set(oldps) and dest not in newps:
1746 raise error.Abort(
1746 raise error.Abort(
1747 _(
1747 _(
1748 b'cannot rebase %d:%s without '
1748 b'cannot rebase %d:%s without '
1749 b'moving at least one of its parents'
1749 b'moving at least one of its parents'
1750 )
1750 )
1751 % (rev, repo[rev])
1751 % (rev, repo[rev])
1752 )
1752 )
1753
1753
1754 # Source should not be ancestor of dest. The check here guarantees it's
1754 # Source should not be ancestor of dest. The check here guarantees it's
1755 # impossible. With multi-dest, the initial check does not cover complex
1755 # impossible. With multi-dest, the initial check does not cover complex
1756 # cases since we don't have abstractions to dry-run rebase cheaply.
1756 # cases since we don't have abstractions to dry-run rebase cheaply.
1757 if any(p != nullrev and isancestor(rev, p) for p in newps):
1757 if any(p != nullrev and isancestor(rev, p) for p in newps):
1758 raise error.Abort(_(b'source is ancestor of destination'))
1758 raise error.Abort(_(b'source is ancestor of destination'))
1759
1759
1760 # Check if the merge will contain unwanted changes. That may happen if
1760 # Check if the merge will contain unwanted changes. That may happen if
1761 # there are multiple special (non-changelog ancestor) merge bases, which
1761 # there are multiple special (non-changelog ancestor) merge bases, which
1762 # cannot be handled well by the 3-way merge algorithm. For example:
1762 # cannot be handled well by the 3-way merge algorithm. For example:
1763 #
1763 #
1764 # F
1764 # F
1765 # /|
1765 # /|
1766 # D E # "rebase -r D+E+F -d Z", when rebasing F, if "D" was chosen
1766 # D E # "rebase -r D+E+F -d Z", when rebasing F, if "D" was chosen
1767 # | | # as merge base, the difference between D and F will include
1767 # | | # as merge base, the difference between D and F will include
1768 # B C # C, so the rebased F will contain C surprisingly. If "E" was
1768 # B C # C, so the rebased F will contain C surprisingly. If "E" was
1769 # |/ # chosen, the rebased F will contain B.
1769 # |/ # chosen, the rebased F will contain B.
1770 # A Z
1770 # A Z
1771 #
1771 #
1772 # But our merge base candidates (D and E in above case) could still be
1772 # But our merge base candidates (D and E in above case) could still be
1773 # better than the default (ancestor(F, Z) == null). Therefore still
1773 # better than the default (ancestor(F, Z) == null). Therefore still
1774 # pick one (so choose p1 above).
1774 # pick one (so choose p1 above).
1775 if sum(1 for b in set(bases) if b != nullrev and b not in newps) > 1:
1775 if sum(1 for b in set(bases) if b != nullrev and b not in newps) > 1:
1776 unwanted = [None, None] # unwanted[i]: unwanted revs if choose bases[i]
1776 unwanted = [None, None] # unwanted[i]: unwanted revs if choose bases[i]
1777 for i, base in enumerate(bases):
1777 for i, base in enumerate(bases):
1778 if base == nullrev or base in newps:
1778 if base == nullrev or base in newps:
1779 continue
1779 continue
1780 # Revisions in the side (not chosen as merge base) branch that
1780 # Revisions in the side (not chosen as merge base) branch that
1781 # might contain "surprising" contents
1781 # might contain "surprising" contents
1782 other_bases = set(bases) - {base}
1782 other_bases = set(bases) - {base}
1783 siderevs = list(
1783 siderevs = list(
1784 repo.revs(b'(%ld %% (%d+%d))', other_bases, base, dest)
1784 repo.revs(b'(%ld %% (%d+%d))', other_bases, base, dest)
1785 )
1785 )
1786
1786
1787 # If those revisions are covered by rebaseset, the result is good.
1787 # If those revisions are covered by rebaseset, the result is good.
1788 # A merge in rebaseset would be considered to cover its ancestors.
1788 # A merge in rebaseset would be considered to cover its ancestors.
1789 if siderevs:
1789 if siderevs:
1790 rebaseset = [
1790 rebaseset = [
1791 r for r, d in state.items() if d > 0 and r not in obsskipped
1791 r for r, d in state.items() if d > 0 and r not in obsskipped
1792 ]
1792 ]
1793 merges = [
1793 merges = [
1794 r for r in rebaseset if cl.parentrevs(r)[1] != nullrev
1794 r for r in rebaseset if cl.parentrevs(r)[1] != nullrev
1795 ]
1795 ]
1796 unwanted[i] = list(
1796 unwanted[i] = list(
1797 repo.revs(
1797 repo.revs(
1798 b'%ld - (::%ld) - %ld', siderevs, merges, rebaseset
1798 b'%ld - (::%ld) - %ld', siderevs, merges, rebaseset
1799 )
1799 )
1800 )
1800 )
1801
1801
1802 if any(revs is not None for revs in unwanted):
1802 if any(revs is not None for revs in unwanted):
1803 # Choose a merge base that has a minimal number of unwanted revs.
1803 # Choose a merge base that has a minimal number of unwanted revs.
1804 l, i = min(
1804 l, i = min(
1805 (len(revs), i)
1805 (len(revs), i)
1806 for i, revs in enumerate(unwanted)
1806 for i, revs in enumerate(unwanted)
1807 if revs is not None
1807 if revs is not None
1808 )
1808 )
1809
1809
1810 # The merge will include unwanted revisions. Abort now. Revisit this if
1810 # The merge will include unwanted revisions. Abort now. Revisit this if
1811 # we have a more advanced merge algorithm that handles multiple bases.
1811 # we have a more advanced merge algorithm that handles multiple bases.
1812 if l > 0:
1812 if l > 0:
1813 unwanteddesc = _(b' or ').join(
1813 unwanteddesc = _(b' or ').join(
1814 (
1814 (
1815 b', '.join(b'%d:%s' % (r, repo[r]) for r in revs)
1815 b', '.join(b'%d:%s' % (r, repo[r]) for r in revs)
1816 for revs in unwanted
1816 for revs in unwanted
1817 if revs is not None
1817 if revs is not None
1818 )
1818 )
1819 )
1819 )
1820 raise error.Abort(
1820 raise error.Abort(
1821 _(b'rebasing %d:%s will include unwanted changes from %s')
1821 _(b'rebasing %d:%s will include unwanted changes from %s')
1822 % (rev, repo[rev], unwanteddesc)
1822 % (rev, repo[rev], unwanteddesc)
1823 )
1823 )
1824
1824
1825 # newps[0] should match merge base if possible. Currently, if newps[i]
1825 # newps[0] should match merge base if possible. Currently, if newps[i]
1826 # is nullrev, the only case is newps[i] and newps[j] (j < i), one is
1826 # is nullrev, the only case is newps[i] and newps[j] (j < i), one is
1827 # the other's ancestor. In that case, it's fine to not swap newps here.
1827 # the other's ancestor. In that case, it's fine to not swap newps here.
1828 # (see CASE-1 and CASE-2 above)
1828 # (see CASE-1 and CASE-2 above)
1829 if i != 0:
1829 if i != 0:
1830 if newps[i] != nullrev:
1830 if newps[i] != nullrev:
1831 newps[0], newps[i] = newps[i], newps[0]
1831 newps[0], newps[i] = newps[i], newps[0]
1832 bases[0], bases[i] = bases[i], bases[0]
1832 bases[0], bases[i] = bases[i], bases[0]
1833
1833
1834 # "rebasenode" updates to new p1, use the corresponding merge base.
1834 # "rebasenode" updates to new p1, use the corresponding merge base.
1835 base = bases[0]
1835 base = bases[0]
1836
1836
1837 repo.ui.debug(b" future parents are %d and %d\n" % tuple(newps))
1837 repo.ui.debug(b" future parents are %d and %d\n" % tuple(newps))
1838
1838
1839 return newps[0], newps[1], base
1839 return newps[0], newps[1], base
1840
1840
1841
1841
1842 def isagitpatch(repo, patchname):
1842 def isagitpatch(repo, patchname):
1843 """Return true if the given patch is in git format"""
1843 """Return true if the given patch is in git format"""
1844 mqpatch = os.path.join(repo.mq.path, patchname)
1844 mqpatch = os.path.join(repo.mq.path, patchname)
1845 for line in patch.linereader(open(mqpatch, b'rb')):
1845 for line in patch.linereader(open(mqpatch, b'rb')):
1846 if line.startswith(b'diff --git'):
1846 if line.startswith(b'diff --git'):
1847 return True
1847 return True
1848 return False
1848 return False
1849
1849
1850
1850
1851 def updatemq(repo, state, skipped, **opts):
1851 def updatemq(repo, state, skipped, **opts):
1852 """Update rebased mq patches - finalize and then import them"""
1852 """Update rebased mq patches - finalize and then import them"""
1853 mqrebase = {}
1853 mqrebase = {}
1854 mq = repo.mq
1854 mq = repo.mq
1855 original_series = mq.fullseries[:]
1855 original_series = mq.fullseries[:]
1856 skippedpatches = set()
1856 skippedpatches = set()
1857
1857
1858 for p in mq.applied:
1858 for p in mq.applied:
1859 rev = repo[p.node].rev()
1859 rev = repo[p.node].rev()
1860 if rev in state:
1860 if rev in state:
1861 repo.ui.debug(
1861 repo.ui.debug(
1862 b'revision %d is an mq patch (%s), finalize it.\n'
1862 b'revision %d is an mq patch (%s), finalize it.\n'
1863 % (rev, p.name)
1863 % (rev, p.name)
1864 )
1864 )
1865 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1865 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1866 else:
1866 else:
1867 # Applied but not rebased, not sure this should happen
1867 # Applied but not rebased, not sure this should happen
1868 skippedpatches.add(p.name)
1868 skippedpatches.add(p.name)
1869
1869
1870 if mqrebase:
1870 if mqrebase:
1871 mq.finish(repo, mqrebase.keys())
1871 mq.finish(repo, mqrebase.keys())
1872
1872
1873 # We must start import from the newest revision
1873 # We must start import from the newest revision
1874 for rev in sorted(mqrebase, reverse=True):
1874 for rev in sorted(mqrebase, reverse=True):
1875 if rev not in skipped:
1875 if rev not in skipped:
1876 name, isgit = mqrebase[rev]
1876 name, isgit = mqrebase[rev]
1877 repo.ui.note(
1877 repo.ui.note(
1878 _(b'updating mq patch %s to %d:%s\n')
1878 _(b'updating mq patch %s to %d:%s\n')
1879 % (name, state[rev], repo[state[rev]])
1879 % (name, state[rev], repo[state[rev]])
1880 )
1880 )
1881 mq.qimport(
1881 mq.qimport(
1882 repo,
1882 repo,
1883 (),
1883 (),
1884 patchname=name,
1884 patchname=name,
1885 git=isgit,
1885 git=isgit,
1886 rev=[b"%d" % state[rev]],
1886 rev=[b"%d" % state[rev]],
1887 )
1887 )
1888 else:
1888 else:
1889 # Rebased and skipped
1889 # Rebased and skipped
1890 skippedpatches.add(mqrebase[rev][0])
1890 skippedpatches.add(mqrebase[rev][0])
1891
1891
1892 # Patches were either applied and rebased and imported in
1892 # Patches were either applied and rebased and imported in
1893 # order, applied and removed or unapplied. Discard the removed
1893 # order, applied and removed or unapplied. Discard the removed
1894 # ones while preserving the original series order and guards.
1894 # ones while preserving the original series order and guards.
1895 newseries = [
1895 newseries = [
1896 s
1896 s
1897 for s in original_series
1897 for s in original_series
1898 if mq.guard_re.split(s, 1)[0] not in skippedpatches
1898 if mq.guard_re.split(s, 1)[0] not in skippedpatches
1899 ]
1899 ]
1900 mq.fullseries[:] = newseries
1900 mq.fullseries[:] = newseries
1901 mq.seriesdirty = True
1901 mq.seriesdirty = True
1902 mq.savedirty()
1902 mq.savedirty()
1903
1903
1904
1904
1905 def storecollapsemsg(repo, collapsemsg):
1905 def storecollapsemsg(repo, collapsemsg):
1906 """Store the collapse message to allow recovery"""
1906 """Store the collapse message to allow recovery"""
1907 collapsemsg = collapsemsg or b''
1907 collapsemsg = collapsemsg or b''
1908 f = repo.vfs(b"last-message.txt", b"w")
1908 f = repo.vfs(b"last-message.txt", b"w")
1909 f.write(b"%s\n" % collapsemsg)
1909 f.write(b"%s\n" % collapsemsg)
1910 f.close()
1910 f.close()
1911
1911
1912
1912
1913 def clearcollapsemsg(repo):
1913 def clearcollapsemsg(repo):
1914 """Remove collapse message file"""
1914 """Remove collapse message file"""
1915 repo.vfs.unlinkpath(b"last-message.txt", ignoremissing=True)
1915 repo.vfs.unlinkpath(b"last-message.txt", ignoremissing=True)
1916
1916
1917
1917
1918 def restorecollapsemsg(repo, isabort):
1918 def restorecollapsemsg(repo, isabort):
1919 """Restore previously stored collapse message"""
1919 """Restore previously stored collapse message"""
1920 try:
1920 try:
1921 f = repo.vfs(b"last-message.txt")
1921 f = repo.vfs(b"last-message.txt")
1922 collapsemsg = f.readline().strip()
1922 collapsemsg = f.readline().strip()
1923 f.close()
1923 f.close()
1924 except IOError as err:
1924 except IOError as err:
1925 if err.errno != errno.ENOENT:
1925 if err.errno != errno.ENOENT:
1926 raise
1926 raise
1927 if isabort:
1927 if isabort:
1928 # Oh well, just abort like normal
1928 # Oh well, just abort like normal
1929 collapsemsg = b''
1929 collapsemsg = b''
1930 else:
1930 else:
1931 raise error.Abort(_(b'missing .hg/last-message.txt for rebase'))
1931 raise error.Abort(_(b'missing .hg/last-message.txt for rebase'))
1932 return collapsemsg
1932 return collapsemsg
1933
1933
1934
1934
1935 def clearstatus(repo):
1935 def clearstatus(repo):
1936 """Remove the status files"""
1936 """Remove the status files"""
1937 # Make sure the active transaction won't write the state file
1937 # Make sure the active transaction won't write the state file
1938 tr = repo.currenttransaction()
1938 tr = repo.currenttransaction()
1939 if tr:
1939 if tr:
1940 tr.removefilegenerator(b'rebasestate')
1940 tr.removefilegenerator(b'rebasestate')
1941 repo.vfs.unlinkpath(b"rebasestate", ignoremissing=True)
1941 repo.vfs.unlinkpath(b"rebasestate", ignoremissing=True)
1942
1942
1943
1943
1944 def sortsource(destmap):
1944 def sortsource(destmap):
1945 """yield source revisions in an order that we only rebase things once
1945 """yield source revisions in an order that we only rebase things once
1946
1946
1947 If source and destination overlaps, we should filter out revisions
1947 If source and destination overlaps, we should filter out revisions
1948 depending on other revisions which hasn't been rebased yet.
1948 depending on other revisions which hasn't been rebased yet.
1949
1949
1950 Yield a sorted list of revisions each time.
1950 Yield a sorted list of revisions each time.
1951
1951
1952 For example, when rebasing A to B, B to C. This function yields [B], then
1952 For example, when rebasing A to B, B to C. This function yields [B], then
1953 [A], indicating B needs to be rebased first.
1953 [A], indicating B needs to be rebased first.
1954
1954
1955 Raise if there is a cycle so the rebase is impossible.
1955 Raise if there is a cycle so the rebase is impossible.
1956 """
1956 """
1957 srcset = set(destmap)
1957 srcset = set(destmap)
1958 while srcset:
1958 while srcset:
1959 srclist = sorted(srcset)
1959 srclist = sorted(srcset)
1960 result = []
1960 result = []
1961 for r in srclist:
1961 for r in srclist:
1962 if destmap[r] not in srcset:
1962 if destmap[r] not in srcset:
1963 result.append(r)
1963 result.append(r)
1964 if not result:
1964 if not result:
1965 raise error.Abort(_(b'source and destination form a cycle'))
1965 raise error.Abort(_(b'source and destination form a cycle'))
1966 srcset -= set(result)
1966 srcset -= set(result)
1967 yield result
1967 yield result
1968
1968
1969
1969
1970 def buildstate(repo, destmap, collapse):
1970 def buildstate(repo, destmap, collapse):
1971 '''Define which revisions are going to be rebased and where
1971 '''Define which revisions are going to be rebased and where
1972
1972
1973 repo: repo
1973 repo: repo
1974 destmap: {srcrev: destrev}
1974 destmap: {srcrev: destrev}
1975 '''
1975 '''
1976 rebaseset = destmap.keys()
1976 rebaseset = destmap.keys()
1977 originalwd = repo[b'.'].rev()
1977 originalwd = repo[b'.'].rev()
1978
1978
1979 # This check isn't strictly necessary, since mq detects commits over an
1979 # This check isn't strictly necessary, since mq detects commits over an
1980 # applied patch. But it prevents messing up the working directory when
1980 # applied patch. But it prevents messing up the working directory when
1981 # a partially completed rebase is blocked by mq.
1981 # a partially completed rebase is blocked by mq.
1982 if b'qtip' in repo.tags():
1982 if b'qtip' in repo.tags():
1983 mqapplied = {repo[s.node].rev() for s in repo.mq.applied}
1983 mqapplied = {repo[s.node].rev() for s in repo.mq.applied}
1984 if set(destmap.values()) & mqapplied:
1984 if set(destmap.values()) & mqapplied:
1985 raise error.Abort(_(b'cannot rebase onto an applied mq patch'))
1985 raise error.Abort(_(b'cannot rebase onto an applied mq patch'))
1986
1986
1987 # Get "cycle" error early by exhausting the generator.
1987 # Get "cycle" error early by exhausting the generator.
1988 sortedsrc = list(sortsource(destmap)) # a list of sorted revs
1988 sortedsrc = list(sortsource(destmap)) # a list of sorted revs
1989 if not sortedsrc:
1989 if not sortedsrc:
1990 raise error.Abort(_(b'no matching revisions'))
1990 raise error.Abort(_(b'no matching revisions'))
1991
1991
1992 # Only check the first batch of revisions to rebase not depending on other
1992 # Only check the first batch of revisions to rebase not depending on other
1993 # rebaseset. This means "source is ancestor of destination" for the second
1993 # rebaseset. This means "source is ancestor of destination" for the second
1994 # (and following) batches of revisions are not checked here. We rely on
1994 # (and following) batches of revisions are not checked here. We rely on
1995 # "defineparents" to do that check.
1995 # "defineparents" to do that check.
1996 roots = list(repo.set(b'roots(%ld)', sortedsrc[0]))
1996 roots = list(repo.set(b'roots(%ld)', sortedsrc[0]))
1997 if not roots:
1997 if not roots:
1998 raise error.Abort(_(b'no matching revisions'))
1998 raise error.Abort(_(b'no matching revisions'))
1999
1999
2000 def revof(r):
2000 def revof(r):
2001 return r.rev()
2001 return r.rev()
2002
2002
2003 roots = sorted(roots, key=revof)
2003 roots = sorted(roots, key=revof)
2004 state = dict.fromkeys(rebaseset, revtodo)
2004 state = dict.fromkeys(rebaseset, revtodo)
2005 emptyrebase = len(sortedsrc) == 1
2005 emptyrebase = len(sortedsrc) == 1
2006 for root in roots:
2006 for root in roots:
2007 dest = repo[destmap[root.rev()]]
2007 dest = repo[destmap[root.rev()]]
2008 commonbase = root.ancestor(dest)
2008 commonbase = root.ancestor(dest)
2009 if commonbase == root:
2009 if commonbase == root:
2010 raise error.Abort(_(b'source is ancestor of destination'))
2010 raise error.Abort(_(b'source is ancestor of destination'))
2011 if commonbase == dest:
2011 if commonbase == dest:
2012 wctx = repo[None]
2012 wctx = repo[None]
2013 if dest == wctx.p1():
2013 if dest == wctx.p1():
2014 # when rebasing to '.', it will use the current wd branch name
2014 # when rebasing to '.', it will use the current wd branch name
2015 samebranch = root.branch() == wctx.branch()
2015 samebranch = root.branch() == wctx.branch()
2016 else:
2016 else:
2017 samebranch = root.branch() == dest.branch()
2017 samebranch = root.branch() == dest.branch()
2018 if not collapse and samebranch and dest in root.parents():
2018 if not collapse and samebranch and dest in root.parents():
2019 # mark the revision as done by setting its new revision
2019 # mark the revision as done by setting its new revision
2020 # equal to its old (current) revisions
2020 # equal to its old (current) revisions
2021 state[root.rev()] = root.rev()
2021 state[root.rev()] = root.rev()
2022 repo.ui.debug(b'source is a child of destination\n')
2022 repo.ui.debug(b'source is a child of destination\n')
2023 continue
2023 continue
2024
2024
2025 emptyrebase = False
2025 emptyrebase = False
2026 repo.ui.debug(b'rebase onto %s starting from %s\n' % (dest, root))
2026 repo.ui.debug(b'rebase onto %s starting from %s\n' % (dest, root))
2027 if emptyrebase:
2027 if emptyrebase:
2028 return None
2028 return None
2029 for rev in sorted(state):
2029 for rev in sorted(state):
2030 parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
2030 parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
2031 # if all parents of this revision are done, then so is this revision
2031 # if all parents of this revision are done, then so is this revision
2032 if parents and all((state.get(p) == p for p in parents)):
2032 if parents and all((state.get(p) == p for p in parents)):
2033 state[rev] = rev
2033 state[rev] = rev
2034 return originalwd, destmap, state
2034 return originalwd, destmap, state
2035
2035
2036
2036
2037 def clearrebased(
2037 def clearrebased(
2038 ui,
2038 ui,
2039 repo,
2039 repo,
2040 destmap,
2040 destmap,
2041 state,
2041 state,
2042 skipped,
2042 skipped,
2043 collapsedas=None,
2043 collapsedas=None,
2044 keepf=False,
2044 keepf=False,
2045 fm=None,
2045 fm=None,
2046 backup=True,
2046 backup=True,
2047 ):
2047 ):
2048 """dispose of rebased revision at the end of the rebase
2048 """dispose of rebased revision at the end of the rebase
2049
2049
2050 If `collapsedas` is not None, the rebase was a collapse whose result if the
2050 If `collapsedas` is not None, the rebase was a collapse whose result if the
2051 `collapsedas` node.
2051 `collapsedas` node.
2052
2052
2053 If `keepf` is not True, the rebase has --keep set and no nodes should be
2053 If `keepf` is not True, the rebase has --keep set and no nodes should be
2054 removed (but bookmarks still need to be moved).
2054 removed (but bookmarks still need to be moved).
2055
2055
2056 If `backup` is False, no backup will be stored when stripping rebased
2056 If `backup` is False, no backup will be stored when stripping rebased
2057 revisions.
2057 revisions.
2058 """
2058 """
2059 tonode = repo.changelog.node
2059 tonode = repo.changelog.node
2060 replacements = {}
2060 replacements = {}
2061 moves = {}
2061 moves = {}
2062 stripcleanup = not obsolete.isenabled(repo, obsolete.createmarkersopt)
2062 stripcleanup = not obsolete.isenabled(repo, obsolete.createmarkersopt)
2063
2063
2064 collapsednodes = []
2064 collapsednodes = []
2065 for rev, newrev in sorted(state.items()):
2065 for rev, newrev in sorted(state.items()):
2066 if newrev >= 0 and newrev != rev:
2066 if newrev >= 0 and newrev != rev:
2067 oldnode = tonode(rev)
2067 oldnode = tonode(rev)
2068 newnode = collapsedas or tonode(newrev)
2068 newnode = collapsedas or tonode(newrev)
2069 moves[oldnode] = newnode
2069 moves[oldnode] = newnode
2070 succs = None
2070 succs = None
2071 if rev in skipped:
2071 if rev in skipped:
2072 if stripcleanup or not repo[rev].obsolete():
2072 if stripcleanup or not repo[rev].obsolete():
2073 succs = ()
2073 succs = ()
2074 elif collapsedas:
2074 elif collapsedas:
2075 collapsednodes.append(oldnode)
2075 collapsednodes.append(oldnode)
2076 else:
2076 else:
2077 succs = (newnode,)
2077 succs = (newnode,)
2078 if succs is not None:
2078 if succs is not None:
2079 replacements[(oldnode,)] = succs
2079 replacements[(oldnode,)] = succs
2080 if collapsednodes:
2080 if collapsednodes:
2081 replacements[tuple(collapsednodes)] = (collapsedas,)
2081 replacements[tuple(collapsednodes)] = (collapsedas,)
2082 if fm:
2082 if fm:
2083 hf = fm.hexfunc
2083 hf = fm.hexfunc
2084 fl = fm.formatlist
2084 fl = fm.formatlist
2085 fd = fm.formatdict
2085 fd = fm.formatdict
2086 changes = {}
2086 changes = {}
2087 for oldns, newn in pycompat.iteritems(replacements):
2087 for oldns, newn in pycompat.iteritems(replacements):
2088 for oldn in oldns:
2088 for oldn in oldns:
2089 changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node')
2089 changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node')
2090 nodechanges = fd(changes, key=b"oldnode", value=b"newnodes")
2090 nodechanges = fd(changes, key=b"oldnode", value=b"newnodes")
2091 fm.data(nodechanges=nodechanges)
2091 fm.data(nodechanges=nodechanges)
2092 if keepf:
2092 if keepf:
2093 replacements = {}
2093 replacements = {}
2094 scmutil.cleanupnodes(repo, replacements, b'rebase', moves, backup=backup)
2094 scmutil.cleanupnodes(repo, replacements, b'rebase', moves, backup=backup)
2095
2095
2096
2096
2097 def pullrebase(orig, ui, repo, *args, **opts):
2097 def pullrebase(orig, ui, repo, *args, **opts):
2098 """Call rebase after pull if the latter has been invoked with --rebase"""
2098 """Call rebase after pull if the latter has been invoked with --rebase"""
2099 if opts.get('rebase'):
2099 if opts.get('rebase'):
2100 if ui.configbool(b'commands', b'rebase.requiredest'):
2100 if ui.configbool(b'commands', b'rebase.requiredest'):
2101 msg = _(b'rebase destination required by configuration')
2101 msg = _(b'rebase destination required by configuration')
2102 hint = _(b'use hg pull followed by hg rebase -d DEST')
2102 hint = _(b'use hg pull followed by hg rebase -d DEST')
2103 raise error.Abort(msg, hint=hint)
2103 raise error.Abort(msg, hint=hint)
2104
2104
2105 with repo.wlock(), repo.lock():
2105 with repo.wlock(), repo.lock():
2106 if opts.get('update'):
2106 if opts.get('update'):
2107 del opts['update']
2107 del opts['update']
2108 ui.debug(
2108 ui.debug(
2109 b'--update and --rebase are not compatible, ignoring '
2109 b'--update and --rebase are not compatible, ignoring '
2110 b'the update flag\n'
2110 b'the update flag\n'
2111 )
2111 )
2112
2112
2113 cmdutil.checkunfinished(repo, skipmerge=True)
2113 cmdutil.checkunfinished(repo, skipmerge=True)
2114 cmdutil.bailifchanged(
2114 cmdutil.bailifchanged(
2115 repo,
2115 repo,
2116 hint=_(
2116 hint=_(
2117 b'cannot pull with rebase: '
2117 b'cannot pull with rebase: '
2118 b'please commit or shelve your changes first'
2118 b'please commit or shelve your changes first'
2119 ),
2119 ),
2120 )
2120 )
2121
2121
2122 revsprepull = len(repo)
2122 revsprepull = len(repo)
2123 origpostincoming = commands.postincoming
2123 origpostincoming = commands.postincoming
2124
2124
2125 def _dummy(*args, **kwargs):
2125 def _dummy(*args, **kwargs):
2126 pass
2126 pass
2127
2127
2128 commands.postincoming = _dummy
2128 commands.postincoming = _dummy
2129 try:
2129 try:
2130 ret = orig(ui, repo, *args, **opts)
2130 ret = orig(ui, repo, *args, **opts)
2131 finally:
2131 finally:
2132 commands.postincoming = origpostincoming
2132 commands.postincoming = origpostincoming
2133 revspostpull = len(repo)
2133 revspostpull = len(repo)
2134 if revspostpull > revsprepull:
2134 if revspostpull > revsprepull:
2135 # --rev option from pull conflict with rebase own --rev
2135 # --rev option from pull conflict with rebase own --rev
2136 # dropping it
2136 # dropping it
2137 if 'rev' in opts:
2137 if 'rev' in opts:
2138 del opts['rev']
2138 del opts['rev']
2139 # positional argument from pull conflicts with rebase's own
2139 # positional argument from pull conflicts with rebase's own
2140 # --source.
2140 # --source.
2141 if 'source' in opts:
2141 if 'source' in opts:
2142 del opts['source']
2142 del opts['source']
2143 # revsprepull is the len of the repo, not revnum of tip.
2143 # revsprepull is the len of the repo, not revnum of tip.
2144 destspace = list(repo.changelog.revs(start=revsprepull))
2144 destspace = list(repo.changelog.revs(start=revsprepull))
2145 opts['_destspace'] = destspace
2145 opts['_destspace'] = destspace
2146 try:
2146 try:
2147 rebase(ui, repo, **opts)
2147 rebase(ui, repo, **opts)
2148 except error.NoMergeDestAbort:
2148 except error.NoMergeDestAbort:
2149 # we can maybe update instead
2149 # we can maybe update instead
2150 rev, _a, _b = destutil.destupdate(repo)
2150 rev, _a, _b = destutil.destupdate(repo)
2151 if rev == repo[b'.'].rev():
2151 if rev == repo[b'.'].rev():
2152 ui.status(_(b'nothing to rebase\n'))
2152 ui.status(_(b'nothing to rebase\n'))
2153 else:
2153 else:
2154 ui.status(_(b'nothing to rebase - updating instead\n'))
2154 ui.status(_(b'nothing to rebase - updating instead\n'))
2155 # not passing argument to get the bare update behavior
2155 # not passing argument to get the bare update behavior
2156 # with warning and trumpets
2156 # with warning and trumpets
2157 commands.update(ui, repo)
2157 commands.update(ui, repo)
2158 else:
2158 else:
2159 if opts.get('tool'):
2159 if opts.get('tool'):
2160 raise error.Abort(_(b'--tool can only be used with --rebase'))
2160 raise error.Abort(_(b'--tool can only be used with --rebase'))
2161 ret = orig(ui, repo, *args, **opts)
2161 ret = orig(ui, repo, *args, **opts)
2162
2162
2163 return ret
2163 return ret
2164
2164
2165
2165
2166 def _filterobsoleterevs(repo, revs):
2166 def _filterobsoleterevs(repo, revs):
2167 """returns a set of the obsolete revisions in revs"""
2167 """returns a set of the obsolete revisions in revs"""
2168 return {r for r in revs if repo[r].obsolete()}
2168 return {r for r in revs if repo[r].obsolete()}
2169
2169
2170
2170
2171 def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap):
2171 def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap):
2172 """Return (obsoletenotrebased, obsoletewithoutsuccessorindestination).
2172 """Return (obsoletenotrebased, obsoletewithoutsuccessorindestination).
2173
2173
2174 `obsoletenotrebased` is a mapping mapping obsolete => successor for all
2174 `obsoletenotrebased` is a mapping mapping obsolete => successor for all
2175 obsolete nodes to be rebased given in `rebaseobsrevs`.
2175 obsolete nodes to be rebased given in `rebaseobsrevs`.
2176
2176
2177 `obsoletewithoutsuccessorindestination` is a set with obsolete revisions
2177 `obsoletewithoutsuccessorindestination` is a set with obsolete revisions
2178 without a successor in destination.
2178 without a successor in destination.
2179
2179
2180 `obsoleteextinctsuccessors` is a set of obsolete revisions with only
2180 `obsoleteextinctsuccessors` is a set of obsolete revisions with only
2181 obsolete successors.
2181 obsolete successors.
2182 """
2182 """
2183 obsoletenotrebased = {}
2183 obsoletenotrebased = {}
2184 obsoletewithoutsuccessorindestination = set()
2184 obsoletewithoutsuccessorindestination = set()
2185 obsoleteextinctsuccessors = set()
2185 obsoleteextinctsuccessors = set()
2186
2186
2187 assert repo.filtername is None
2187 assert repo.filtername is None
2188 cl = repo.changelog
2188 cl = repo.changelog
2189 get_rev = cl.index.get_rev
2189 get_rev = cl.index.get_rev
2190 extinctrevs = set(repo.revs(b'extinct()'))
2190 extinctrevs = set(repo.revs(b'extinct()'))
2191 for srcrev in rebaseobsrevs:
2191 for srcrev in rebaseobsrevs:
2192 srcnode = cl.node(srcrev)
2192 srcnode = cl.node(srcrev)
2193 # XXX: more advanced APIs are required to handle split correctly
2193 # XXX: more advanced APIs are required to handle split correctly
2194 successors = set(obsutil.allsuccessors(repo.obsstore, [srcnode]))
2194 successors = set(obsutil.allsuccessors(repo.obsstore, [srcnode]))
2195 # obsutil.allsuccessors includes node itself
2195 # obsutil.allsuccessors includes node itself
2196 successors.remove(srcnode)
2196 successors.remove(srcnode)
2197 succrevs = {get_rev(s) for s in successors}
2197 succrevs = {get_rev(s) for s in successors}
2198 succrevs.discard(None)
2198 succrevs.discard(None)
2199 if succrevs.issubset(extinctrevs):
2199 if succrevs.issubset(extinctrevs):
2200 # all successors are extinct
2200 # all successors are extinct
2201 obsoleteextinctsuccessors.add(srcrev)
2201 obsoleteextinctsuccessors.add(srcrev)
2202 if not successors:
2202 if not successors:
2203 # no successor
2203 # no successor
2204 obsoletenotrebased[srcrev] = None
2204 obsoletenotrebased[srcrev] = None
2205 else:
2205 else:
2206 dstrev = destmap[srcrev]
2206 dstrev = destmap[srcrev]
2207 for succrev in succrevs:
2207 for succrev in succrevs:
2208 if cl.isancestorrev(succrev, dstrev):
2208 if cl.isancestorrev(succrev, dstrev):
2209 obsoletenotrebased[srcrev] = succrev
2209 obsoletenotrebased[srcrev] = succrev
2210 break
2210 break
2211 else:
2211 else:
2212 # If 'srcrev' has a successor in rebase set but none in
2212 # If 'srcrev' has a successor in rebase set but none in
2213 # destination (which would be catched above), we shall skip it
2213 # destination (which would be catched above), we shall skip it
2214 # and its descendants to avoid divergence.
2214 # and its descendants to avoid divergence.
2215 if srcrev in extinctrevs or any(s in destmap for s in succrevs):
2215 if srcrev in extinctrevs or any(s in destmap for s in succrevs):
2216 obsoletewithoutsuccessorindestination.add(srcrev)
2216 obsoletewithoutsuccessorindestination.add(srcrev)
2217
2217
2218 return (
2218 return (
2219 obsoletenotrebased,
2219 obsoletenotrebased,
2220 obsoletewithoutsuccessorindestination,
2220 obsoletewithoutsuccessorindestination,
2221 obsoleteextinctsuccessors,
2221 obsoleteextinctsuccessors,
2222 )
2222 )
2223
2223
2224
2224
2225 def abortrebase(ui, repo):
2225 def abortrebase(ui, repo):
2226 with repo.wlock(), repo.lock():
2226 with repo.wlock(), repo.lock():
2227 rbsrt = rebaseruntime(repo, ui)
2227 rbsrt = rebaseruntime(repo, ui)
2228 rbsrt._prepareabortorcontinue(isabort=True)
2228 rbsrt._prepareabortorcontinue(isabort=True)
2229
2229
2230
2230
2231 def continuerebase(ui, repo):
2231 def continuerebase(ui, repo):
2232 with repo.wlock(), repo.lock():
2232 with repo.wlock(), repo.lock():
2233 rbsrt = rebaseruntime(repo, ui)
2233 rbsrt = rebaseruntime(repo, ui)
2234 ms = mergestatemod.mergestate.read(repo)
2234 ms = mergestatemod.mergestate.read(repo)
2235 mergeutil.checkunresolved(ms)
2235 mergeutil.checkunresolved(ms)
2236 retcode = rbsrt._prepareabortorcontinue(isabort=False)
2236 retcode = rbsrt._prepareabortorcontinue(isabort=False)
2237 if retcode is not None:
2237 if retcode is not None:
2238 return retcode
2238 return retcode
2239 rbsrt._performrebase(None)
2239 rbsrt._performrebase(None)
2240 rbsrt._finishrebase()
2240 rbsrt._finishrebase()
2241
2241
2242
2242
2243 def summaryhook(ui, repo):
2243 def summaryhook(ui, repo):
2244 if not repo.vfs.exists(b'rebasestate'):
2244 if not repo.vfs.exists(b'rebasestate'):
2245 return
2245 return
2246 try:
2246 try:
2247 rbsrt = rebaseruntime(repo, ui, {})
2247 rbsrt = rebaseruntime(repo, ui, {})
2248 rbsrt.restorestatus()
2248 rbsrt.restorestatus()
2249 state = rbsrt.state
2249 state = rbsrt.state
2250 except error.RepoLookupError:
2250 except error.RepoLookupError:
2251 # i18n: column positioning for "hg summary"
2251 # i18n: column positioning for "hg summary"
2252 msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n')
2252 msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n')
2253 ui.write(msg)
2253 ui.write(msg)
2254 return
2254 return
2255 numrebased = len([i for i in pycompat.itervalues(state) if i >= 0])
2255 numrebased = len([i for i in pycompat.itervalues(state) if i >= 0])
2256 # i18n: column positioning for "hg summary"
2256 # i18n: column positioning for "hg summary"
2257 ui.write(
2257 ui.write(
2258 _(b'rebase: %s, %s (rebase --continue)\n')
2258 _(b'rebase: %s, %s (rebase --continue)\n')
2259 % (
2259 % (
2260 ui.label(_(b'%d rebased'), b'rebase.rebased') % numrebased,
2260 ui.label(_(b'%d rebased'), b'rebase.rebased') % numrebased,
2261 ui.label(_(b'%d remaining'), b'rebase.remaining')
2261 ui.label(_(b'%d remaining'), b'rebase.remaining')
2262 % (len(state) - numrebased),
2262 % (len(state) - numrebased),
2263 )
2263 )
2264 )
2264 )
2265
2265
2266
2266
2267 def uisetup(ui):
2267 def uisetup(ui):
2268 # Replace pull with a decorator to provide --rebase option
2268 # Replace pull with a decorator to provide --rebase option
2269 entry = extensions.wrapcommand(commands.table, b'pull', pullrebase)
2269 entry = extensions.wrapcommand(commands.table, b'pull', pullrebase)
2270 entry[1].append(
2270 entry[1].append(
2271 (b'', b'rebase', None, _(b"rebase working directory to branch head"))
2271 (b'', b'rebase', None, _(b"rebase working directory to branch head"))
2272 )
2272 )
2273 entry[1].append((b't', b'tool', b'', _(b"specify merge tool for rebase")))
2273 entry[1].append((b't', b'tool', b'', _(b"specify merge tool for rebase")))
2274 cmdutil.summaryhooks.add(b'rebase', summaryhook)
2274 cmdutil.summaryhooks.add(b'rebase', summaryhook)
2275 statemod.addunfinished(
2275 statemod.addunfinished(
2276 b'rebase',
2276 b'rebase',
2277 fname=b'rebasestate',
2277 fname=b'rebasestate',
2278 stopflag=True,
2278 stopflag=True,
2279 continueflag=True,
2279 continueflag=True,
2280 abortfunc=abortrebase,
2280 abortfunc=abortrebase,
2281 continuefunc=continuerebase,
2281 continuefunc=continuerebase,
2282 )
2282 )
@@ -1,929 +1,929
1 # Patch transplanting extension for Mercurial
1 # Patch transplanting extension for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to transplant changesets from another branch
8 '''command to transplant changesets from another branch
9
9
10 This extension allows you to transplant changes to another parent revision,
10 This extension allows you to transplant changes to another parent revision,
11 possibly in another repository. The transplant is done using 'diff' patches.
11 possibly in another repository. The transplant is done using 'diff' patches.
12
12
13 Transplanted patches are recorded in .hg/transplant/transplants, as a
13 Transplanted patches are recorded in .hg/transplant/transplants, as a
14 map from a changeset hash to its hash in the source repository.
14 map from a changeset hash to its hash in the source repository.
15 '''
15 '''
16 from __future__ import absolute_import
16 from __future__ import absolute_import
17
17
18 import os
18 import os
19
19
20 from mercurial.i18n import _
20 from mercurial.i18n import _
21 from mercurial.pycompat import open
21 from mercurial.pycompat import open
22 from mercurial import (
22 from mercurial import (
23 bundlerepo,
23 bundlerepo,
24 cmdutil,
24 cmdutil,
25 error,
25 error,
26 exchange,
26 exchange,
27 hg,
27 hg,
28 logcmdutil,
28 logcmdutil,
29 match,
29 match,
30 merge,
30 merge,
31 node as nodemod,
31 node as nodemod,
32 patch,
32 patch,
33 pycompat,
33 pycompat,
34 registrar,
34 registrar,
35 revlog,
35 revlog,
36 revset,
36 revset,
37 scmutil,
37 scmutil,
38 smartset,
38 smartset,
39 state as statemod,
39 state as statemod,
40 util,
40 util,
41 vfs as vfsmod,
41 vfs as vfsmod,
42 )
42 )
43 from mercurial.utils import (
43 from mercurial.utils import (
44 procutil,
44 procutil,
45 stringutil,
45 stringutil,
46 )
46 )
47
47
48
48
49 class TransplantError(error.Abort):
49 class TransplantError(error.Abort):
50 pass
50 pass
51
51
52
52
53 cmdtable = {}
53 cmdtable = {}
54 command = registrar.command(cmdtable)
54 command = registrar.command(cmdtable)
55 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
55 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
56 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
56 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
57 # be specifying the version(s) of Mercurial they are tested with, or
57 # be specifying the version(s) of Mercurial they are tested with, or
58 # leave the attribute unspecified.
58 # leave the attribute unspecified.
59 testedwith = b'ships-with-hg-core'
59 testedwith = b'ships-with-hg-core'
60
60
61 configtable = {}
61 configtable = {}
62 configitem = registrar.configitem(configtable)
62 configitem = registrar.configitem(configtable)
63
63
64 configitem(
64 configitem(
65 b'transplant', b'filter', default=None,
65 b'transplant', b'filter', default=None,
66 )
66 )
67 configitem(
67 configitem(
68 b'transplant', b'log', default=None,
68 b'transplant', b'log', default=None,
69 )
69 )
70
70
71
71
72 class transplantentry(object):
72 class transplantentry(object):
73 def __init__(self, lnode, rnode):
73 def __init__(self, lnode, rnode):
74 self.lnode = lnode
74 self.lnode = lnode
75 self.rnode = rnode
75 self.rnode = rnode
76
76
77
77
78 class transplants(object):
78 class transplants(object):
79 def __init__(self, path=None, transplantfile=None, opener=None):
79 def __init__(self, path=None, transplantfile=None, opener=None):
80 self.path = path
80 self.path = path
81 self.transplantfile = transplantfile
81 self.transplantfile = transplantfile
82 self.opener = opener
82 self.opener = opener
83
83
84 if not opener:
84 if not opener:
85 self.opener = vfsmod.vfs(self.path)
85 self.opener = vfsmod.vfs(self.path)
86 self.transplants = {}
86 self.transplants = {}
87 self.dirty = False
87 self.dirty = False
88 self.read()
88 self.read()
89
89
90 def read(self):
90 def read(self):
91 abspath = os.path.join(self.path, self.transplantfile)
91 abspath = os.path.join(self.path, self.transplantfile)
92 if self.transplantfile and os.path.exists(abspath):
92 if self.transplantfile and os.path.exists(abspath):
93 for line in self.opener.read(self.transplantfile).splitlines():
93 for line in self.opener.read(self.transplantfile).splitlines():
94 lnode, rnode = map(revlog.bin, line.split(b':'))
94 lnode, rnode = map(revlog.bin, line.split(b':'))
95 list = self.transplants.setdefault(rnode, [])
95 list = self.transplants.setdefault(rnode, [])
96 list.append(transplantentry(lnode, rnode))
96 list.append(transplantentry(lnode, rnode))
97
97
98 def write(self):
98 def write(self):
99 if self.dirty and self.transplantfile:
99 if self.dirty and self.transplantfile:
100 if not os.path.isdir(self.path):
100 if not os.path.isdir(self.path):
101 os.mkdir(self.path)
101 os.mkdir(self.path)
102 fp = self.opener(self.transplantfile, b'w')
102 fp = self.opener(self.transplantfile, b'w')
103 for list in pycompat.itervalues(self.transplants):
103 for list in pycompat.itervalues(self.transplants):
104 for t in list:
104 for t in list:
105 l, r = map(nodemod.hex, (t.lnode, t.rnode))
105 l, r = map(nodemod.hex, (t.lnode, t.rnode))
106 fp.write(l + b':' + r + b'\n')
106 fp.write(l + b':' + r + b'\n')
107 fp.close()
107 fp.close()
108 self.dirty = False
108 self.dirty = False
109
109
110 def get(self, rnode):
110 def get(self, rnode):
111 return self.transplants.get(rnode) or []
111 return self.transplants.get(rnode) or []
112
112
113 def set(self, lnode, rnode):
113 def set(self, lnode, rnode):
114 list = self.transplants.setdefault(rnode, [])
114 list = self.transplants.setdefault(rnode, [])
115 list.append(transplantentry(lnode, rnode))
115 list.append(transplantentry(lnode, rnode))
116 self.dirty = True
116 self.dirty = True
117
117
118 def remove(self, transplant):
118 def remove(self, transplant):
119 list = self.transplants.get(transplant.rnode)
119 list = self.transplants.get(transplant.rnode)
120 if list:
120 if list:
121 del list[list.index(transplant)]
121 del list[list.index(transplant)]
122 self.dirty = True
122 self.dirty = True
123
123
124
124
125 class transplanter(object):
125 class transplanter(object):
126 def __init__(self, ui, repo, opts):
126 def __init__(self, ui, repo, opts):
127 self.ui = ui
127 self.ui = ui
128 self.path = repo.vfs.join(b'transplant')
128 self.path = repo.vfs.join(b'transplant')
129 self.opener = vfsmod.vfs(self.path)
129 self.opener = vfsmod.vfs(self.path)
130 self.transplants = transplants(
130 self.transplants = transplants(
131 self.path, b'transplants', opener=self.opener
131 self.path, b'transplants', opener=self.opener
132 )
132 )
133
133
134 def getcommiteditor():
134 def getcommiteditor():
135 editform = cmdutil.mergeeditform(repo[None], b'transplant')
135 editform = cmdutil.mergeeditform(repo[None], b'transplant')
136 return cmdutil.getcommiteditor(
136 return cmdutil.getcommiteditor(
137 editform=editform, **pycompat.strkwargs(opts)
137 editform=editform, **pycompat.strkwargs(opts)
138 )
138 )
139
139
140 self.getcommiteditor = getcommiteditor
140 self.getcommiteditor = getcommiteditor
141
141
142 def applied(self, repo, node, parent):
142 def applied(self, repo, node, parent):
143 '''returns True if a node is already an ancestor of parent
143 '''returns True if a node is already an ancestor of parent
144 or is parent or has already been transplanted'''
144 or is parent or has already been transplanted'''
145 if hasnode(repo, parent):
145 if hasnode(repo, parent):
146 parentrev = repo.changelog.rev(parent)
146 parentrev = repo.changelog.rev(parent)
147 if hasnode(repo, node):
147 if hasnode(repo, node):
148 rev = repo.changelog.rev(node)
148 rev = repo.changelog.rev(node)
149 reachable = repo.changelog.ancestors(
149 reachable = repo.changelog.ancestors(
150 [parentrev], rev, inclusive=True
150 [parentrev], rev, inclusive=True
151 )
151 )
152 if rev in reachable:
152 if rev in reachable:
153 return True
153 return True
154 for t in self.transplants.get(node):
154 for t in self.transplants.get(node):
155 # it might have been stripped
155 # it might have been stripped
156 if not hasnode(repo, t.lnode):
156 if not hasnode(repo, t.lnode):
157 self.transplants.remove(t)
157 self.transplants.remove(t)
158 return False
158 return False
159 lnoderev = repo.changelog.rev(t.lnode)
159 lnoderev = repo.changelog.rev(t.lnode)
160 if lnoderev in repo.changelog.ancestors(
160 if lnoderev in repo.changelog.ancestors(
161 [parentrev], lnoderev, inclusive=True
161 [parentrev], lnoderev, inclusive=True
162 ):
162 ):
163 return True
163 return True
164 return False
164 return False
165
165
166 def apply(self, repo, source, revmap, merges, opts=None):
166 def apply(self, repo, source, revmap, merges, opts=None):
167 '''apply the revisions in revmap one by one in revision order'''
167 '''apply the revisions in revmap one by one in revision order'''
168 if opts is None:
168 if opts is None:
169 opts = {}
169 opts = {}
170 revs = sorted(revmap)
170 revs = sorted(revmap)
171 p1 = repo.dirstate.p1()
171 p1 = repo.dirstate.p1()
172 pulls = []
172 pulls = []
173 diffopts = patch.difffeatureopts(self.ui, opts)
173 diffopts = patch.difffeatureopts(self.ui, opts)
174 diffopts.git = True
174 diffopts.git = True
175
175
176 lock = tr = None
176 lock = tr = None
177 try:
177 try:
178 lock = repo.lock()
178 lock = repo.lock()
179 tr = repo.transaction(b'transplant')
179 tr = repo.transaction(b'transplant')
180 for rev in revs:
180 for rev in revs:
181 node = revmap[rev]
181 node = revmap[rev]
182 revstr = b'%d:%s' % (rev, nodemod.short(node))
182 revstr = b'%d:%s' % (rev, nodemod.short(node))
183
183
184 if self.applied(repo, node, p1):
184 if self.applied(repo, node, p1):
185 self.ui.warn(
185 self.ui.warn(
186 _(b'skipping already applied revision %s\n') % revstr
186 _(b'skipping already applied revision %s\n') % revstr
187 )
187 )
188 continue
188 continue
189
189
190 parents = source.changelog.parents(node)
190 parents = source.changelog.parents(node)
191 if not (opts.get(b'filter') or opts.get(b'log')):
191 if not (opts.get(b'filter') or opts.get(b'log')):
192 # If the changeset parent is the same as the
192 # If the changeset parent is the same as the
193 # wdir's parent, just pull it.
193 # wdir's parent, just pull it.
194 if parents[0] == p1:
194 if parents[0] == p1:
195 pulls.append(node)
195 pulls.append(node)
196 p1 = node
196 p1 = node
197 continue
197 continue
198 if pulls:
198 if pulls:
199 if source != repo:
199 if source != repo:
200 exchange.pull(repo, source.peer(), heads=pulls)
200 exchange.pull(repo, source.peer(), heads=pulls)
201 merge.update(
201 merge.update(
202 repo, pulls[-1], branchmerge=False, force=False
202 repo, pulls[-1], branchmerge=False, force=False
203 )
203 )
204 p1 = repo.dirstate.p1()
204 p1 = repo.dirstate.p1()
205 pulls = []
205 pulls = []
206
206
207 domerge = False
207 domerge = False
208 if node in merges:
208 if node in merges:
209 # pulling all the merge revs at once would mean we
209 # pulling all the merge revs at once would mean we
210 # couldn't transplant after the latest even if
210 # couldn't transplant after the latest even if
211 # transplants before them fail.
211 # transplants before them fail.
212 domerge = True
212 domerge = True
213 if not hasnode(repo, node):
213 if not hasnode(repo, node):
214 exchange.pull(repo, source.peer(), heads=[node])
214 exchange.pull(repo, source.peer(), heads=[node])
215
215
216 skipmerge = False
216 skipmerge = False
217 if parents[1] != revlog.nullid:
217 if parents[1] != revlog.nullid:
218 if not opts.get(b'parent'):
218 if not opts.get(b'parent'):
219 self.ui.note(
219 self.ui.note(
220 _(b'skipping merge changeset %d:%s\n')
220 _(b'skipping merge changeset %d:%s\n')
221 % (rev, nodemod.short(node))
221 % (rev, nodemod.short(node))
222 )
222 )
223 skipmerge = True
223 skipmerge = True
224 else:
224 else:
225 parent = source.lookup(opts[b'parent'])
225 parent = source.lookup(opts[b'parent'])
226 if parent not in parents:
226 if parent not in parents:
227 raise error.Abort(
227 raise error.Abort(
228 _(b'%s is not a parent of %s')
228 _(b'%s is not a parent of %s')
229 % (nodemod.short(parent), nodemod.short(node))
229 % (nodemod.short(parent), nodemod.short(node))
230 )
230 )
231 else:
231 else:
232 parent = parents[0]
232 parent = parents[0]
233
233
234 if skipmerge:
234 if skipmerge:
235 patchfile = None
235 patchfile = None
236 else:
236 else:
237 fd, patchfile = pycompat.mkstemp(prefix=b'hg-transplant-')
237 fd, patchfile = pycompat.mkstemp(prefix=b'hg-transplant-')
238 fp = os.fdopen(fd, 'wb')
238 fp = os.fdopen(fd, 'wb')
239 gen = patch.diff(source, parent, node, opts=diffopts)
239 gen = patch.diff(source, parent, node, opts=diffopts)
240 for chunk in gen:
240 for chunk in gen:
241 fp.write(chunk)
241 fp.write(chunk)
242 fp.close()
242 fp.close()
243
243
244 del revmap[rev]
244 del revmap[rev]
245 if patchfile or domerge:
245 if patchfile or domerge:
246 try:
246 try:
247 try:
247 try:
248 n = self.applyone(
248 n = self.applyone(
249 repo,
249 repo,
250 node,
250 node,
251 source.changelog.read(node),
251 source.changelog.read(node),
252 patchfile,
252 patchfile,
253 merge=domerge,
253 merge=domerge,
254 log=opts.get(b'log'),
254 log=opts.get(b'log'),
255 filter=opts.get(b'filter'),
255 filter=opts.get(b'filter'),
256 )
256 )
257 except TransplantError:
257 except TransplantError:
258 # Do not rollback, it is up to the user to
258 # Do not rollback, it is up to the user to
259 # fix the merge or cancel everything
259 # fix the merge or cancel everything
260 tr.close()
260 tr.close()
261 raise
261 raise
262 if n and domerge:
262 if n and domerge:
263 self.ui.status(
263 self.ui.status(
264 _(b'%s merged at %s\n')
264 _(b'%s merged at %s\n')
265 % (revstr, nodemod.short(n))
265 % (revstr, nodemod.short(n))
266 )
266 )
267 elif n:
267 elif n:
268 self.ui.status(
268 self.ui.status(
269 _(b'%s transplanted to %s\n')
269 _(b'%s transplanted to %s\n')
270 % (nodemod.short(node), nodemod.short(n))
270 % (nodemod.short(node), nodemod.short(n))
271 )
271 )
272 finally:
272 finally:
273 if patchfile:
273 if patchfile:
274 os.unlink(patchfile)
274 os.unlink(patchfile)
275 tr.close()
275 tr.close()
276 if pulls:
276 if pulls:
277 exchange.pull(repo, source.peer(), heads=pulls)
277 exchange.pull(repo, source.peer(), heads=pulls)
278 merge.update(repo, pulls[-1], branchmerge=False, force=False)
278 merge.update(repo, pulls[-1], branchmerge=False, force=False)
279 finally:
279 finally:
280 self.saveseries(revmap, merges)
280 self.saveseries(revmap, merges)
281 self.transplants.write()
281 self.transplants.write()
282 if tr:
282 if tr:
283 tr.release()
283 tr.release()
284 if lock:
284 if lock:
285 lock.release()
285 lock.release()
286
286
287 def filter(self, filter, node, changelog, patchfile):
287 def filter(self, filter, node, changelog, patchfile):
288 '''arbitrarily rewrite changeset before applying it'''
288 '''arbitrarily rewrite changeset before applying it'''
289
289
290 self.ui.status(_(b'filtering %s\n') % patchfile)
290 self.ui.status(_(b'filtering %s\n') % patchfile)
291 user, date, msg = (changelog[1], changelog[2], changelog[4])
291 user, date, msg = (changelog[1], changelog[2], changelog[4])
292 fd, headerfile = pycompat.mkstemp(prefix=b'hg-transplant-')
292 fd, headerfile = pycompat.mkstemp(prefix=b'hg-transplant-')
293 fp = os.fdopen(fd, 'wb')
293 fp = os.fdopen(fd, 'wb')
294 fp.write(b"# HG changeset patch\n")
294 fp.write(b"# HG changeset patch\n")
295 fp.write(b"# User %s\n" % user)
295 fp.write(b"# User %s\n" % user)
296 fp.write(b"# Date %d %d\n" % date)
296 fp.write(b"# Date %d %d\n" % date)
297 fp.write(msg + b'\n')
297 fp.write(msg + b'\n')
298 fp.close()
298 fp.close()
299
299
300 try:
300 try:
301 self.ui.system(
301 self.ui.system(
302 b'%s %s %s'
302 b'%s %s %s'
303 % (
303 % (
304 filter,
304 filter,
305 procutil.shellquote(headerfile),
305 procutil.shellquote(headerfile),
306 procutil.shellquote(patchfile),
306 procutil.shellquote(patchfile),
307 ),
307 ),
308 environ={
308 environ={
309 b'HGUSER': changelog[1],
309 b'HGUSER': changelog[1],
310 b'HGREVISION': nodemod.hex(node),
310 b'HGREVISION': nodemod.hex(node),
311 },
311 },
312 onerr=error.Abort,
312 onerr=error.Abort,
313 errprefix=_(b'filter failed'),
313 errprefix=_(b'filter failed'),
314 blockedtag=b'transplant_filter',
314 blockedtag=b'transplant_filter',
315 )
315 )
316 user, date, msg = self.parselog(open(headerfile, b'rb'))[1:4]
316 user, date, msg = self.parselog(open(headerfile, b'rb'))[1:4]
317 finally:
317 finally:
318 os.unlink(headerfile)
318 os.unlink(headerfile)
319
319
320 return (user, date, msg)
320 return (user, date, msg)
321
321
322 def applyone(
322 def applyone(
323 self, repo, node, cl, patchfile, merge=False, log=False, filter=None
323 self, repo, node, cl, patchfile, merge=False, log=False, filter=None
324 ):
324 ):
325 '''apply the patch in patchfile to the repository as a transplant'''
325 '''apply the patch in patchfile to the repository as a transplant'''
326 (manifest, user, (time, timezone), files, message) = cl[:5]
326 (manifest, user, (time, timezone), files, message) = cl[:5]
327 date = b"%d %d" % (time, timezone)
327 date = b"%d %d" % (time, timezone)
328 extra = {b'transplant_source': node}
328 extra = {b'transplant_source': node}
329 if filter:
329 if filter:
330 (user, date, message) = self.filter(filter, node, cl, patchfile)
330 (user, date, message) = self.filter(filter, node, cl, patchfile)
331
331
332 if log:
332 if log:
333 # we don't translate messages inserted into commits
333 # we don't translate messages inserted into commits
334 message += b'\n(transplanted from %s)' % nodemod.hex(node)
334 message += b'\n(transplanted from %s)' % nodemod.hex(node)
335
335
336 self.ui.status(_(b'applying %s\n') % nodemod.short(node))
336 self.ui.status(_(b'applying %s\n') % nodemod.short(node))
337 self.ui.note(b'%s %s\n%s\n' % (user, date, message))
337 self.ui.note(b'%s %s\n%s\n' % (user, date, message))
338
338
339 if not patchfile and not merge:
339 if not patchfile and not merge:
340 raise error.Abort(_(b'can only omit patchfile if merging'))
340 raise error.Abort(_(b'can only omit patchfile if merging'))
341 if patchfile:
341 if patchfile:
342 try:
342 try:
343 files = set()
343 files = set()
344 patch.patch(self.ui, repo, patchfile, files=files, eolmode=None)
344 patch.patch(self.ui, repo, patchfile, files=files, eolmode=None)
345 files = list(files)
345 files = list(files)
346 except Exception as inst:
346 except Exception as inst:
347 seriespath = os.path.join(self.path, b'series')
347 seriespath = os.path.join(self.path, b'series')
348 if os.path.exists(seriespath):
348 if os.path.exists(seriespath):
349 os.unlink(seriespath)
349 os.unlink(seriespath)
350 p1 = repo.dirstate.p1()
350 p1 = repo.dirstate.p1()
351 p2 = node
351 p2 = node
352 self.log(user, date, message, p1, p2, merge=merge)
352 self.log(user, date, message, p1, p2, merge=merge)
353 self.ui.write(stringutil.forcebytestr(inst) + b'\n')
353 self.ui.write(stringutil.forcebytestr(inst) + b'\n')
354 raise TransplantError(
354 raise TransplantError(
355 _(
355 _(
356 b'fix up the working directory and run '
356 b'fix up the working directory and run '
357 b'hg transplant --continue'
357 b'hg transplant --continue'
358 )
358 )
359 )
359 )
360 else:
360 else:
361 files = None
361 files = None
362 if merge:
362 if merge:
363 p1 = repo.dirstate.p1()
363 p1 = repo.dirstate.p1()
364 repo.setparents(p1, node)
364 repo.setparents(p1, node)
365 m = match.always()
365 m = match.always()
366 else:
366 else:
367 m = match.exact(files)
367 m = match.exact(files)
368
368
369 n = repo.commit(
369 n = repo.commit(
370 message,
370 message,
371 user,
371 user,
372 date,
372 date,
373 extra=extra,
373 extra=extra,
374 match=m,
374 match=m,
375 editor=self.getcommiteditor(),
375 editor=self.getcommiteditor(),
376 )
376 )
377 if not n:
377 if not n:
378 self.ui.warn(
378 self.ui.warn(
379 _(b'skipping emptied changeset %s\n') % nodemod.short(node)
379 _(b'skipping emptied changeset %s\n') % nodemod.short(node)
380 )
380 )
381 return None
381 return None
382 if not merge:
382 if not merge:
383 self.transplants.set(n, node)
383 self.transplants.set(n, node)
384
384
385 return n
385 return n
386
386
387 def canresume(self):
387 def canresume(self):
388 return os.path.exists(os.path.join(self.path, b'journal'))
388 return os.path.exists(os.path.join(self.path, b'journal'))
389
389
390 def resume(self, repo, source, opts):
390 def resume(self, repo, source, opts):
391 '''recover last transaction and apply remaining changesets'''
391 '''recover last transaction and apply remaining changesets'''
392 if os.path.exists(os.path.join(self.path, b'journal')):
392 if os.path.exists(os.path.join(self.path, b'journal')):
393 n, node = self.recover(repo, source, opts)
393 n, node = self.recover(repo, source, opts)
394 if n:
394 if n:
395 self.ui.status(
395 self.ui.status(
396 _(b'%s transplanted as %s\n')
396 _(b'%s transplanted as %s\n')
397 % (nodemod.short(node), nodemod.short(n))
397 % (nodemod.short(node), nodemod.short(n))
398 )
398 )
399 else:
399 else:
400 self.ui.status(
400 self.ui.status(
401 _(b'%s skipped due to empty diff\n')
401 _(b'%s skipped due to empty diff\n')
402 % (nodemod.short(node),)
402 % (nodemod.short(node),)
403 )
403 )
404 seriespath = os.path.join(self.path, b'series')
404 seriespath = os.path.join(self.path, b'series')
405 if not os.path.exists(seriespath):
405 if not os.path.exists(seriespath):
406 self.transplants.write()
406 self.transplants.write()
407 return
407 return
408 nodes, merges = self.readseries()
408 nodes, merges = self.readseries()
409 revmap = {}
409 revmap = {}
410 for n in nodes:
410 for n in nodes:
411 revmap[source.changelog.rev(n)] = n
411 revmap[source.changelog.rev(n)] = n
412 os.unlink(seriespath)
412 os.unlink(seriespath)
413
413
414 self.apply(repo, source, revmap, merges, opts)
414 self.apply(repo, source, revmap, merges, opts)
415
415
416 def recover(self, repo, source, opts):
416 def recover(self, repo, source, opts):
417 '''commit working directory using journal metadata'''
417 '''commit working directory using journal metadata'''
418 node, user, date, message, parents = self.readlog()
418 node, user, date, message, parents = self.readlog()
419 merge = False
419 merge = False
420
420
421 if not user or not date or not message or not parents[0]:
421 if not user or not date or not message or not parents[0]:
422 raise error.Abort(_(b'transplant log file is corrupt'))
422 raise error.Abort(_(b'transplant log file is corrupt'))
423
423
424 parent = parents[0]
424 parent = parents[0]
425 if len(parents) > 1:
425 if len(parents) > 1:
426 if opts.get(b'parent'):
426 if opts.get(b'parent'):
427 parent = source.lookup(opts[b'parent'])
427 parent = source.lookup(opts[b'parent'])
428 if parent not in parents:
428 if parent not in parents:
429 raise error.Abort(
429 raise error.Abort(
430 _(b'%s is not a parent of %s')
430 _(b'%s is not a parent of %s')
431 % (nodemod.short(parent), nodemod.short(node))
431 % (nodemod.short(parent), nodemod.short(node))
432 )
432 )
433 else:
433 else:
434 merge = True
434 merge = True
435
435
436 extra = {b'transplant_source': node}
436 extra = {b'transplant_source': node}
437 try:
437 try:
438 p1 = repo.dirstate.p1()
438 p1 = repo.dirstate.p1()
439 if p1 != parent:
439 if p1 != parent:
440 raise error.Abort(
440 raise error.Abort(
441 _(b'working directory not at transplant parent %s')
441 _(b'working directory not at transplant parent %s')
442 % nodemod.hex(parent)
442 % nodemod.hex(parent)
443 )
443 )
444 if merge:
444 if merge:
445 repo.setparents(p1, parents[1])
445 repo.setparents(p1, parents[1])
446 st = repo.status()
446 st = repo.status()
447 modified, added, removed, deleted = (
447 modified, added, removed, deleted = (
448 st.modified,
448 st.modified,
449 st.added,
449 st.added,
450 st.removed,
450 st.removed,
451 st.deleted,
451 st.deleted,
452 )
452 )
453 if merge or modified or added or removed or deleted:
453 if merge or modified or added or removed or deleted:
454 n = repo.commit(
454 n = repo.commit(
455 message,
455 message,
456 user,
456 user,
457 date,
457 date,
458 extra=extra,
458 extra=extra,
459 editor=self.getcommiteditor(),
459 editor=self.getcommiteditor(),
460 )
460 )
461 if not n:
461 if not n:
462 raise error.Abort(_(b'commit failed'))
462 raise error.Abort(_(b'commit failed'))
463 if not merge:
463 if not merge:
464 self.transplants.set(n, node)
464 self.transplants.set(n, node)
465 else:
465 else:
466 n = None
466 n = None
467 self.unlog()
467 self.unlog()
468
468
469 return n, node
469 return n, node
470 finally:
470 finally:
471 # TODO: get rid of this meaningless try/finally enclosing.
471 # TODO: get rid of this meaningless try/finally enclosing.
472 # this is kept only to reduce changes in a patch.
472 # this is kept only to reduce changes in a patch.
473 pass
473 pass
474
474
475 def stop(self, ui, repo):
475 def stop(self, ui, repo):
476 """logic to stop an interrupted transplant"""
476 """logic to stop an interrupted transplant"""
477 if self.canresume():
477 if self.canresume():
478 startctx = repo[b'.']
478 startctx = repo[b'.']
479 hg.updaterepo(repo, startctx.node(), overwrite=True)
479 merge.clean_update(startctx)
480 ui.status(_(b"stopped the interrupted transplant\n"))
480 ui.status(_(b"stopped the interrupted transplant\n"))
481 ui.status(
481 ui.status(
482 _(b"working directory is now at %s\n") % startctx.hex()[:12]
482 _(b"working directory is now at %s\n") % startctx.hex()[:12]
483 )
483 )
484 self.unlog()
484 self.unlog()
485 return 0
485 return 0
486
486
487 def readseries(self):
487 def readseries(self):
488 nodes = []
488 nodes = []
489 merges = []
489 merges = []
490 cur = nodes
490 cur = nodes
491 for line in self.opener.read(b'series').splitlines():
491 for line in self.opener.read(b'series').splitlines():
492 if line.startswith(b'# Merges'):
492 if line.startswith(b'# Merges'):
493 cur = merges
493 cur = merges
494 continue
494 continue
495 cur.append(revlog.bin(line))
495 cur.append(revlog.bin(line))
496
496
497 return (nodes, merges)
497 return (nodes, merges)
498
498
499 def saveseries(self, revmap, merges):
499 def saveseries(self, revmap, merges):
500 if not revmap:
500 if not revmap:
501 return
501 return
502
502
503 if not os.path.isdir(self.path):
503 if not os.path.isdir(self.path):
504 os.mkdir(self.path)
504 os.mkdir(self.path)
505 series = self.opener(b'series', b'w')
505 series = self.opener(b'series', b'w')
506 for rev in sorted(revmap):
506 for rev in sorted(revmap):
507 series.write(nodemod.hex(revmap[rev]) + b'\n')
507 series.write(nodemod.hex(revmap[rev]) + b'\n')
508 if merges:
508 if merges:
509 series.write(b'# Merges\n')
509 series.write(b'# Merges\n')
510 for m in merges:
510 for m in merges:
511 series.write(nodemod.hex(m) + b'\n')
511 series.write(nodemod.hex(m) + b'\n')
512 series.close()
512 series.close()
513
513
514 def parselog(self, fp):
514 def parselog(self, fp):
515 parents = []
515 parents = []
516 message = []
516 message = []
517 node = revlog.nullid
517 node = revlog.nullid
518 inmsg = False
518 inmsg = False
519 user = None
519 user = None
520 date = None
520 date = None
521 for line in fp.read().splitlines():
521 for line in fp.read().splitlines():
522 if inmsg:
522 if inmsg:
523 message.append(line)
523 message.append(line)
524 elif line.startswith(b'# User '):
524 elif line.startswith(b'# User '):
525 user = line[7:]
525 user = line[7:]
526 elif line.startswith(b'# Date '):
526 elif line.startswith(b'# Date '):
527 date = line[7:]
527 date = line[7:]
528 elif line.startswith(b'# Node ID '):
528 elif line.startswith(b'# Node ID '):
529 node = revlog.bin(line[10:])
529 node = revlog.bin(line[10:])
530 elif line.startswith(b'# Parent '):
530 elif line.startswith(b'# Parent '):
531 parents.append(revlog.bin(line[9:]))
531 parents.append(revlog.bin(line[9:]))
532 elif not line.startswith(b'# '):
532 elif not line.startswith(b'# '):
533 inmsg = True
533 inmsg = True
534 message.append(line)
534 message.append(line)
535 if None in (user, date):
535 if None in (user, date):
536 raise error.Abort(
536 raise error.Abort(
537 _(b"filter corrupted changeset (no user or date)")
537 _(b"filter corrupted changeset (no user or date)")
538 )
538 )
539 return (node, user, date, b'\n'.join(message), parents)
539 return (node, user, date, b'\n'.join(message), parents)
540
540
541 def log(self, user, date, message, p1, p2, merge=False):
541 def log(self, user, date, message, p1, p2, merge=False):
542 '''journal changelog metadata for later recover'''
542 '''journal changelog metadata for later recover'''
543
543
544 if not os.path.isdir(self.path):
544 if not os.path.isdir(self.path):
545 os.mkdir(self.path)
545 os.mkdir(self.path)
546 fp = self.opener(b'journal', b'w')
546 fp = self.opener(b'journal', b'w')
547 fp.write(b'# User %s\n' % user)
547 fp.write(b'# User %s\n' % user)
548 fp.write(b'# Date %s\n' % date)
548 fp.write(b'# Date %s\n' % date)
549 fp.write(b'# Node ID %s\n' % nodemod.hex(p2))
549 fp.write(b'# Node ID %s\n' % nodemod.hex(p2))
550 fp.write(b'# Parent ' + nodemod.hex(p1) + b'\n')
550 fp.write(b'# Parent ' + nodemod.hex(p1) + b'\n')
551 if merge:
551 if merge:
552 fp.write(b'# Parent ' + nodemod.hex(p2) + b'\n')
552 fp.write(b'# Parent ' + nodemod.hex(p2) + b'\n')
553 fp.write(message.rstrip() + b'\n')
553 fp.write(message.rstrip() + b'\n')
554 fp.close()
554 fp.close()
555
555
556 def readlog(self):
556 def readlog(self):
557 return self.parselog(self.opener(b'journal'))
557 return self.parselog(self.opener(b'journal'))
558
558
559 def unlog(self):
559 def unlog(self):
560 '''remove changelog journal'''
560 '''remove changelog journal'''
561 absdst = os.path.join(self.path, b'journal')
561 absdst = os.path.join(self.path, b'journal')
562 if os.path.exists(absdst):
562 if os.path.exists(absdst):
563 os.unlink(absdst)
563 os.unlink(absdst)
564
564
565 def transplantfilter(self, repo, source, root):
565 def transplantfilter(self, repo, source, root):
566 def matchfn(node):
566 def matchfn(node):
567 if self.applied(repo, node, root):
567 if self.applied(repo, node, root):
568 return False
568 return False
569 if source.changelog.parents(node)[1] != revlog.nullid:
569 if source.changelog.parents(node)[1] != revlog.nullid:
570 return False
570 return False
571 extra = source.changelog.read(node)[5]
571 extra = source.changelog.read(node)[5]
572 cnode = extra.get(b'transplant_source')
572 cnode = extra.get(b'transplant_source')
573 if cnode and self.applied(repo, cnode, root):
573 if cnode and self.applied(repo, cnode, root):
574 return False
574 return False
575 return True
575 return True
576
576
577 return matchfn
577 return matchfn
578
578
579
579
580 def hasnode(repo, node):
580 def hasnode(repo, node):
581 try:
581 try:
582 return repo.changelog.rev(node) is not None
582 return repo.changelog.rev(node) is not None
583 except error.StorageError:
583 except error.StorageError:
584 return False
584 return False
585
585
586
586
587 def browserevs(ui, repo, nodes, opts):
587 def browserevs(ui, repo, nodes, opts):
588 '''interactively transplant changesets'''
588 '''interactively transplant changesets'''
589 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
589 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
590 transplants = []
590 transplants = []
591 merges = []
591 merges = []
592 prompt = _(
592 prompt = _(
593 b'apply changeset? [ynmpcq?]:'
593 b'apply changeset? [ynmpcq?]:'
594 b'$$ &yes, transplant this changeset'
594 b'$$ &yes, transplant this changeset'
595 b'$$ &no, skip this changeset'
595 b'$$ &no, skip this changeset'
596 b'$$ &merge at this changeset'
596 b'$$ &merge at this changeset'
597 b'$$ show &patch'
597 b'$$ show &patch'
598 b'$$ &commit selected changesets'
598 b'$$ &commit selected changesets'
599 b'$$ &quit and cancel transplant'
599 b'$$ &quit and cancel transplant'
600 b'$$ &? (show this help)'
600 b'$$ &? (show this help)'
601 )
601 )
602 for node in nodes:
602 for node in nodes:
603 displayer.show(repo[node])
603 displayer.show(repo[node])
604 action = None
604 action = None
605 while not action:
605 while not action:
606 choice = ui.promptchoice(prompt)
606 choice = ui.promptchoice(prompt)
607 action = b'ynmpcq?'[choice : choice + 1]
607 action = b'ynmpcq?'[choice : choice + 1]
608 if action == b'?':
608 if action == b'?':
609 for c, t in ui.extractchoices(prompt)[1]:
609 for c, t in ui.extractchoices(prompt)[1]:
610 ui.write(b'%s: %s\n' % (c, t))
610 ui.write(b'%s: %s\n' % (c, t))
611 action = None
611 action = None
612 elif action == b'p':
612 elif action == b'p':
613 parent = repo.changelog.parents(node)[0]
613 parent = repo.changelog.parents(node)[0]
614 for chunk in patch.diff(repo, parent, node):
614 for chunk in patch.diff(repo, parent, node):
615 ui.write(chunk)
615 ui.write(chunk)
616 action = None
616 action = None
617 if action == b'y':
617 if action == b'y':
618 transplants.append(node)
618 transplants.append(node)
619 elif action == b'm':
619 elif action == b'm':
620 merges.append(node)
620 merges.append(node)
621 elif action == b'c':
621 elif action == b'c':
622 break
622 break
623 elif action == b'q':
623 elif action == b'q':
624 transplants = ()
624 transplants = ()
625 merges = ()
625 merges = ()
626 break
626 break
627 displayer.close()
627 displayer.close()
628 return (transplants, merges)
628 return (transplants, merges)
629
629
630
630
631 @command(
631 @command(
632 b'transplant',
632 b'transplant',
633 [
633 [
634 (
634 (
635 b's',
635 b's',
636 b'source',
636 b'source',
637 b'',
637 b'',
638 _(b'transplant changesets from REPO'),
638 _(b'transplant changesets from REPO'),
639 _(b'REPO'),
639 _(b'REPO'),
640 ),
640 ),
641 (
641 (
642 b'b',
642 b'b',
643 b'branch',
643 b'branch',
644 [],
644 [],
645 _(b'use this source changeset as head'),
645 _(b'use this source changeset as head'),
646 _(b'REV'),
646 _(b'REV'),
647 ),
647 ),
648 (
648 (
649 b'a',
649 b'a',
650 b'all',
650 b'all',
651 None,
651 None,
652 _(b'pull all changesets up to the --branch revisions'),
652 _(b'pull all changesets up to the --branch revisions'),
653 ),
653 ),
654 (b'p', b'prune', [], _(b'skip over REV'), _(b'REV')),
654 (b'p', b'prune', [], _(b'skip over REV'), _(b'REV')),
655 (b'm', b'merge', [], _(b'merge at REV'), _(b'REV')),
655 (b'm', b'merge', [], _(b'merge at REV'), _(b'REV')),
656 (
656 (
657 b'',
657 b'',
658 b'parent',
658 b'parent',
659 b'',
659 b'',
660 _(b'parent to choose when transplanting merge'),
660 _(b'parent to choose when transplanting merge'),
661 _(b'REV'),
661 _(b'REV'),
662 ),
662 ),
663 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
663 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
664 (b'', b'log', None, _(b'append transplant info to log message')),
664 (b'', b'log', None, _(b'append transplant info to log message')),
665 (b'', b'stop', False, _(b'stop interrupted transplant')),
665 (b'', b'stop', False, _(b'stop interrupted transplant')),
666 (
666 (
667 b'c',
667 b'c',
668 b'continue',
668 b'continue',
669 None,
669 None,
670 _(b'continue last transplant session after fixing conflicts'),
670 _(b'continue last transplant session after fixing conflicts'),
671 ),
671 ),
672 (
672 (
673 b'',
673 b'',
674 b'filter',
674 b'filter',
675 b'',
675 b'',
676 _(b'filter changesets through command'),
676 _(b'filter changesets through command'),
677 _(b'CMD'),
677 _(b'CMD'),
678 ),
678 ),
679 ],
679 ],
680 _(
680 _(
681 b'hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] '
681 b'hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] '
682 b'[-m REV] [REV]...'
682 b'[-m REV] [REV]...'
683 ),
683 ),
684 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
684 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
685 )
685 )
686 def transplant(ui, repo, *revs, **opts):
686 def transplant(ui, repo, *revs, **opts):
687 '''transplant changesets from another branch
687 '''transplant changesets from another branch
688
688
689 Selected changesets will be applied on top of the current working
689 Selected changesets will be applied on top of the current working
690 directory with the log of the original changeset. The changesets
690 directory with the log of the original changeset. The changesets
691 are copied and will thus appear twice in the history with different
691 are copied and will thus appear twice in the history with different
692 identities.
692 identities.
693
693
694 Consider using the graft command if everything is inside the same
694 Consider using the graft command if everything is inside the same
695 repository - it will use merges and will usually give a better result.
695 repository - it will use merges and will usually give a better result.
696 Use the rebase extension if the changesets are unpublished and you want
696 Use the rebase extension if the changesets are unpublished and you want
697 to move them instead of copying them.
697 to move them instead of copying them.
698
698
699 If --log is specified, log messages will have a comment appended
699 If --log is specified, log messages will have a comment appended
700 of the form::
700 of the form::
701
701
702 (transplanted from CHANGESETHASH)
702 (transplanted from CHANGESETHASH)
703
703
704 You can rewrite the changelog message with the --filter option.
704 You can rewrite the changelog message with the --filter option.
705 Its argument will be invoked with the current changelog message as
705 Its argument will be invoked with the current changelog message as
706 $1 and the patch as $2.
706 $1 and the patch as $2.
707
707
708 --source/-s specifies another repository to use for selecting changesets,
708 --source/-s specifies another repository to use for selecting changesets,
709 just as if it temporarily had been pulled.
709 just as if it temporarily had been pulled.
710 If --branch/-b is specified, these revisions will be used as
710 If --branch/-b is specified, these revisions will be used as
711 heads when deciding which changesets to transplant, just as if only
711 heads when deciding which changesets to transplant, just as if only
712 these revisions had been pulled.
712 these revisions had been pulled.
713 If --all/-a is specified, all the revisions up to the heads specified
713 If --all/-a is specified, all the revisions up to the heads specified
714 with --branch will be transplanted.
714 with --branch will be transplanted.
715
715
716 Example:
716 Example:
717
717
718 - transplant all changes up to REV on top of your current revision::
718 - transplant all changes up to REV on top of your current revision::
719
719
720 hg transplant --branch REV --all
720 hg transplant --branch REV --all
721
721
722 You can optionally mark selected transplanted changesets as merge
722 You can optionally mark selected transplanted changesets as merge
723 changesets. You will not be prompted to transplant any ancestors
723 changesets. You will not be prompted to transplant any ancestors
724 of a merged transplant, and you can merge descendants of them
724 of a merged transplant, and you can merge descendants of them
725 normally instead of transplanting them.
725 normally instead of transplanting them.
726
726
727 Merge changesets may be transplanted directly by specifying the
727 Merge changesets may be transplanted directly by specifying the
728 proper parent changeset by calling :hg:`transplant --parent`.
728 proper parent changeset by calling :hg:`transplant --parent`.
729
729
730 If no merges or revisions are provided, :hg:`transplant` will
730 If no merges or revisions are provided, :hg:`transplant` will
731 start an interactive changeset browser.
731 start an interactive changeset browser.
732
732
733 If a changeset application fails, you can fix the merge by hand
733 If a changeset application fails, you can fix the merge by hand
734 and then resume where you left off by calling :hg:`transplant
734 and then resume where you left off by calling :hg:`transplant
735 --continue/-c`.
735 --continue/-c`.
736 '''
736 '''
737 with repo.wlock():
737 with repo.wlock():
738 return _dotransplant(ui, repo, *revs, **opts)
738 return _dotransplant(ui, repo, *revs, **opts)
739
739
740
740
741 def _dotransplant(ui, repo, *revs, **opts):
741 def _dotransplant(ui, repo, *revs, **opts):
742 def incwalk(repo, csets, match=util.always):
742 def incwalk(repo, csets, match=util.always):
743 for node in csets:
743 for node in csets:
744 if match(node):
744 if match(node):
745 yield node
745 yield node
746
746
747 def transplantwalk(repo, dest, heads, match=util.always):
747 def transplantwalk(repo, dest, heads, match=util.always):
748 '''Yield all nodes that are ancestors of a head but not ancestors
748 '''Yield all nodes that are ancestors of a head but not ancestors
749 of dest.
749 of dest.
750 If no heads are specified, the heads of repo will be used.'''
750 If no heads are specified, the heads of repo will be used.'''
751 if not heads:
751 if not heads:
752 heads = repo.heads()
752 heads = repo.heads()
753 ancestors = []
753 ancestors = []
754 ctx = repo[dest]
754 ctx = repo[dest]
755 for head in heads:
755 for head in heads:
756 ancestors.append(ctx.ancestor(repo[head]).node())
756 ancestors.append(ctx.ancestor(repo[head]).node())
757 for node in repo.changelog.nodesbetween(ancestors, heads)[0]:
757 for node in repo.changelog.nodesbetween(ancestors, heads)[0]:
758 if match(node):
758 if match(node):
759 yield node
759 yield node
760
760
761 def checkopts(opts, revs):
761 def checkopts(opts, revs):
762 if opts.get(b'continue'):
762 if opts.get(b'continue'):
763 cmdutil.check_incompatible_arguments(
763 cmdutil.check_incompatible_arguments(
764 opts, b'continue', [b'branch', b'all', b'merge']
764 opts, b'continue', [b'branch', b'all', b'merge']
765 )
765 )
766 return
766 return
767 if opts.get(b'stop'):
767 if opts.get(b'stop'):
768 cmdutil.check_incompatible_arguments(
768 cmdutil.check_incompatible_arguments(
769 opts, b'stop', [b'branch', b'all', b'merge']
769 opts, b'stop', [b'branch', b'all', b'merge']
770 )
770 )
771 return
771 return
772 if not (
772 if not (
773 opts.get(b'source')
773 opts.get(b'source')
774 or revs
774 or revs
775 or opts.get(b'merge')
775 or opts.get(b'merge')
776 or opts.get(b'branch')
776 or opts.get(b'branch')
777 ):
777 ):
778 raise error.Abort(
778 raise error.Abort(
779 _(
779 _(
780 b'no source URL, branch revision, or revision '
780 b'no source URL, branch revision, or revision '
781 b'list provided'
781 b'list provided'
782 )
782 )
783 )
783 )
784 if opts.get(b'all'):
784 if opts.get(b'all'):
785 if not opts.get(b'branch'):
785 if not opts.get(b'branch'):
786 raise error.Abort(_(b'--all requires a branch revision'))
786 raise error.Abort(_(b'--all requires a branch revision'))
787 if revs:
787 if revs:
788 raise error.Abort(
788 raise error.Abort(
789 _(b'--all is incompatible with a revision list')
789 _(b'--all is incompatible with a revision list')
790 )
790 )
791
791
792 opts = pycompat.byteskwargs(opts)
792 opts = pycompat.byteskwargs(opts)
793 checkopts(opts, revs)
793 checkopts(opts, revs)
794
794
795 if not opts.get(b'log'):
795 if not opts.get(b'log'):
796 # deprecated config: transplant.log
796 # deprecated config: transplant.log
797 opts[b'log'] = ui.config(b'transplant', b'log')
797 opts[b'log'] = ui.config(b'transplant', b'log')
798 if not opts.get(b'filter'):
798 if not opts.get(b'filter'):
799 # deprecated config: transplant.filter
799 # deprecated config: transplant.filter
800 opts[b'filter'] = ui.config(b'transplant', b'filter')
800 opts[b'filter'] = ui.config(b'transplant', b'filter')
801
801
802 tp = transplanter(ui, repo, opts)
802 tp = transplanter(ui, repo, opts)
803
803
804 p1 = repo.dirstate.p1()
804 p1 = repo.dirstate.p1()
805 if len(repo) > 0 and p1 == revlog.nullid:
805 if len(repo) > 0 and p1 == revlog.nullid:
806 raise error.Abort(_(b'no revision checked out'))
806 raise error.Abort(_(b'no revision checked out'))
807 if opts.get(b'continue'):
807 if opts.get(b'continue'):
808 if not tp.canresume():
808 if not tp.canresume():
809 raise error.Abort(_(b'no transplant to continue'))
809 raise error.Abort(_(b'no transplant to continue'))
810 elif opts.get(b'stop'):
810 elif opts.get(b'stop'):
811 if not tp.canresume():
811 if not tp.canresume():
812 raise error.Abort(_(b'no interrupted transplant found'))
812 raise error.Abort(_(b'no interrupted transplant found'))
813 return tp.stop(ui, repo)
813 return tp.stop(ui, repo)
814 else:
814 else:
815 cmdutil.checkunfinished(repo)
815 cmdutil.checkunfinished(repo)
816 cmdutil.bailifchanged(repo)
816 cmdutil.bailifchanged(repo)
817
817
818 sourcerepo = opts.get(b'source')
818 sourcerepo = opts.get(b'source')
819 if sourcerepo:
819 if sourcerepo:
820 peer = hg.peer(repo, opts, ui.expandpath(sourcerepo))
820 peer = hg.peer(repo, opts, ui.expandpath(sourcerepo))
821 heads = pycompat.maplist(peer.lookup, opts.get(b'branch', ()))
821 heads = pycompat.maplist(peer.lookup, opts.get(b'branch', ()))
822 target = set(heads)
822 target = set(heads)
823 for r in revs:
823 for r in revs:
824 try:
824 try:
825 target.add(peer.lookup(r))
825 target.add(peer.lookup(r))
826 except error.RepoError:
826 except error.RepoError:
827 pass
827 pass
828 source, csets, cleanupfn = bundlerepo.getremotechanges(
828 source, csets, cleanupfn = bundlerepo.getremotechanges(
829 ui, repo, peer, onlyheads=sorted(target), force=True
829 ui, repo, peer, onlyheads=sorted(target), force=True
830 )
830 )
831 else:
831 else:
832 source = repo
832 source = repo
833 heads = pycompat.maplist(source.lookup, opts.get(b'branch', ()))
833 heads = pycompat.maplist(source.lookup, opts.get(b'branch', ()))
834 cleanupfn = None
834 cleanupfn = None
835
835
836 try:
836 try:
837 if opts.get(b'continue'):
837 if opts.get(b'continue'):
838 tp.resume(repo, source, opts)
838 tp.resume(repo, source, opts)
839 return
839 return
840
840
841 tf = tp.transplantfilter(repo, source, p1)
841 tf = tp.transplantfilter(repo, source, p1)
842 if opts.get(b'prune'):
842 if opts.get(b'prune'):
843 prune = {
843 prune = {
844 source[r].node()
844 source[r].node()
845 for r in scmutil.revrange(source, opts.get(b'prune'))
845 for r in scmutil.revrange(source, opts.get(b'prune'))
846 }
846 }
847 matchfn = lambda x: tf(x) and x not in prune
847 matchfn = lambda x: tf(x) and x not in prune
848 else:
848 else:
849 matchfn = tf
849 matchfn = tf
850 merges = pycompat.maplist(source.lookup, opts.get(b'merge', ()))
850 merges = pycompat.maplist(source.lookup, opts.get(b'merge', ()))
851 revmap = {}
851 revmap = {}
852 if revs:
852 if revs:
853 for r in scmutil.revrange(source, revs):
853 for r in scmutil.revrange(source, revs):
854 revmap[int(r)] = source[r].node()
854 revmap[int(r)] = source[r].node()
855 elif opts.get(b'all') or not merges:
855 elif opts.get(b'all') or not merges:
856 if source != repo:
856 if source != repo:
857 alltransplants = incwalk(source, csets, match=matchfn)
857 alltransplants = incwalk(source, csets, match=matchfn)
858 else:
858 else:
859 alltransplants = transplantwalk(
859 alltransplants = transplantwalk(
860 source, p1, heads, match=matchfn
860 source, p1, heads, match=matchfn
861 )
861 )
862 if opts.get(b'all'):
862 if opts.get(b'all'):
863 revs = alltransplants
863 revs = alltransplants
864 else:
864 else:
865 revs, newmerges = browserevs(ui, source, alltransplants, opts)
865 revs, newmerges = browserevs(ui, source, alltransplants, opts)
866 merges.extend(newmerges)
866 merges.extend(newmerges)
867 for r in revs:
867 for r in revs:
868 revmap[source.changelog.rev(r)] = r
868 revmap[source.changelog.rev(r)] = r
869 for r in merges:
869 for r in merges:
870 revmap[source.changelog.rev(r)] = r
870 revmap[source.changelog.rev(r)] = r
871
871
872 tp.apply(repo, source, revmap, merges, opts)
872 tp.apply(repo, source, revmap, merges, opts)
873 finally:
873 finally:
874 if cleanupfn:
874 if cleanupfn:
875 cleanupfn()
875 cleanupfn()
876
876
877
877
878 def continuecmd(ui, repo):
878 def continuecmd(ui, repo):
879 """logic to resume an interrupted transplant using
879 """logic to resume an interrupted transplant using
880 'hg continue'"""
880 'hg continue'"""
881 with repo.wlock():
881 with repo.wlock():
882 tp = transplanter(ui, repo, {})
882 tp = transplanter(ui, repo, {})
883 return tp.resume(repo, repo, {})
883 return tp.resume(repo, repo, {})
884
884
885
885
886 revsetpredicate = registrar.revsetpredicate()
886 revsetpredicate = registrar.revsetpredicate()
887
887
888
888
889 @revsetpredicate(b'transplanted([set])')
889 @revsetpredicate(b'transplanted([set])')
890 def revsettransplanted(repo, subset, x):
890 def revsettransplanted(repo, subset, x):
891 """Transplanted changesets in set, or all transplanted changesets.
891 """Transplanted changesets in set, or all transplanted changesets.
892 """
892 """
893 if x:
893 if x:
894 s = revset.getset(repo, subset, x)
894 s = revset.getset(repo, subset, x)
895 else:
895 else:
896 s = subset
896 s = subset
897 return smartset.baseset(
897 return smartset.baseset(
898 [r for r in s if repo[r].extra().get(b'transplant_source')]
898 [r for r in s if repo[r].extra().get(b'transplant_source')]
899 )
899 )
900
900
901
901
902 templatekeyword = registrar.templatekeyword()
902 templatekeyword = registrar.templatekeyword()
903
903
904
904
905 @templatekeyword(b'transplanted', requires={b'ctx'})
905 @templatekeyword(b'transplanted', requires={b'ctx'})
906 def kwtransplanted(context, mapping):
906 def kwtransplanted(context, mapping):
907 """String. The node identifier of the transplanted
907 """String. The node identifier of the transplanted
908 changeset if any."""
908 changeset if any."""
909 ctx = context.resource(mapping, b'ctx')
909 ctx = context.resource(mapping, b'ctx')
910 n = ctx.extra().get(b'transplant_source')
910 n = ctx.extra().get(b'transplant_source')
911 return n and nodemod.hex(n) or b''
911 return n and nodemod.hex(n) or b''
912
912
913
913
914 def extsetup(ui):
914 def extsetup(ui):
915 statemod.addunfinished(
915 statemod.addunfinished(
916 b'transplant',
916 b'transplant',
917 fname=b'transplant/journal',
917 fname=b'transplant/journal',
918 clearable=True,
918 clearable=True,
919 continuefunc=continuecmd,
919 continuefunc=continuecmd,
920 statushint=_(
920 statushint=_(
921 b'To continue: hg transplant --continue\n'
921 b'To continue: hg transplant --continue\n'
922 b'To stop: hg transplant --stop'
922 b'To stop: hg transplant --stop'
923 ),
923 ),
924 cmdhint=_(b"use 'hg transplant --continue' or 'hg transplant --stop'"),
924 cmdhint=_(b"use 'hg transplant --continue' or 'hg transplant --stop'"),
925 )
925 )
926
926
927
927
928 # tell hggettext to extract docstrings from these functions:
928 # tell hggettext to extract docstrings from these functions:
929 i18nfunctions = [revsettransplanted, kwtransplanted]
929 i18nfunctions = [revsettransplanted, kwtransplanted]
@@ -1,4217 +1,4216
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy as copymod
10 import copy as copymod
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 nullrev,
19 nullrev,
20 short,
20 short,
21 )
21 )
22 from .pycompat import (
22 from .pycompat import (
23 getattr,
23 getattr,
24 open,
24 open,
25 setattr,
25 setattr,
26 )
26 )
27 from .thirdparty import attr
27 from .thirdparty import attr
28
28
29 from . import (
29 from . import (
30 bookmarks,
30 bookmarks,
31 changelog,
31 changelog,
32 copies,
32 copies,
33 crecord as crecordmod,
33 crecord as crecordmod,
34 dirstateguard,
34 dirstateguard,
35 encoding,
35 encoding,
36 error,
36 error,
37 formatter,
37 formatter,
38 logcmdutil,
38 logcmdutil,
39 match as matchmod,
39 match as matchmod,
40 merge as mergemod,
40 merge as mergemod,
41 mergestate as mergestatemod,
41 mergestate as mergestatemod,
42 mergeutil,
42 mergeutil,
43 obsolete,
43 obsolete,
44 patch,
44 patch,
45 pathutil,
45 pathutil,
46 phases,
46 phases,
47 pycompat,
47 pycompat,
48 repair,
48 repair,
49 revlog,
49 revlog,
50 rewriteutil,
50 rewriteutil,
51 scmutil,
51 scmutil,
52 smartset,
52 smartset,
53 state as statemod,
53 state as statemod,
54 subrepoutil,
54 subrepoutil,
55 templatekw,
55 templatekw,
56 templater,
56 templater,
57 util,
57 util,
58 vfs as vfsmod,
58 vfs as vfsmod,
59 )
59 )
60
60
61 from .utils import (
61 from .utils import (
62 dateutil,
62 dateutil,
63 stringutil,
63 stringutil,
64 )
64 )
65
65
66 if pycompat.TYPE_CHECKING:
66 if pycompat.TYPE_CHECKING:
67 from typing import (
67 from typing import (
68 Any,
68 Any,
69 Dict,
69 Dict,
70 )
70 )
71
71
72 for t in (Any, Dict):
72 for t in (Any, Dict):
73 assert t
73 assert t
74
74
75 stringio = util.stringio
75 stringio = util.stringio
76
76
77 # templates of common command options
77 # templates of common command options
78
78
79 dryrunopts = [
79 dryrunopts = [
80 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
80 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
81 ]
81 ]
82
82
83 confirmopts = [
83 confirmopts = [
84 (b'', b'confirm', None, _(b'ask before applying actions')),
84 (b'', b'confirm', None, _(b'ask before applying actions')),
85 ]
85 ]
86
86
87 remoteopts = [
87 remoteopts = [
88 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
88 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
89 (
89 (
90 b'',
90 b'',
91 b'remotecmd',
91 b'remotecmd',
92 b'',
92 b'',
93 _(b'specify hg command to run on the remote side'),
93 _(b'specify hg command to run on the remote side'),
94 _(b'CMD'),
94 _(b'CMD'),
95 ),
95 ),
96 (
96 (
97 b'',
97 b'',
98 b'insecure',
98 b'insecure',
99 None,
99 None,
100 _(b'do not verify server certificate (ignoring web.cacerts config)'),
100 _(b'do not verify server certificate (ignoring web.cacerts config)'),
101 ),
101 ),
102 ]
102 ]
103
103
104 walkopts = [
104 walkopts = [
105 (
105 (
106 b'I',
106 b'I',
107 b'include',
107 b'include',
108 [],
108 [],
109 _(b'include names matching the given patterns'),
109 _(b'include names matching the given patterns'),
110 _(b'PATTERN'),
110 _(b'PATTERN'),
111 ),
111 ),
112 (
112 (
113 b'X',
113 b'X',
114 b'exclude',
114 b'exclude',
115 [],
115 [],
116 _(b'exclude names matching the given patterns'),
116 _(b'exclude names matching the given patterns'),
117 _(b'PATTERN'),
117 _(b'PATTERN'),
118 ),
118 ),
119 ]
119 ]
120
120
121 commitopts = [
121 commitopts = [
122 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
122 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
123 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
123 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
124 ]
124 ]
125
125
126 commitopts2 = [
126 commitopts2 = [
127 (
127 (
128 b'd',
128 b'd',
129 b'date',
129 b'date',
130 b'',
130 b'',
131 _(b'record the specified date as commit date'),
131 _(b'record the specified date as commit date'),
132 _(b'DATE'),
132 _(b'DATE'),
133 ),
133 ),
134 (
134 (
135 b'u',
135 b'u',
136 b'user',
136 b'user',
137 b'',
137 b'',
138 _(b'record the specified user as committer'),
138 _(b'record the specified user as committer'),
139 _(b'USER'),
139 _(b'USER'),
140 ),
140 ),
141 ]
141 ]
142
142
143 commitopts3 = [
143 commitopts3 = [
144 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
144 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
145 (b'U', b'currentuser', None, _(b'record the current user as committer')),
145 (b'U', b'currentuser', None, _(b'record the current user as committer')),
146 ]
146 ]
147
147
148 formatteropts = [
148 formatteropts = [
149 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
149 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
150 ]
150 ]
151
151
152 templateopts = [
152 templateopts = [
153 (
153 (
154 b'',
154 b'',
155 b'style',
155 b'style',
156 b'',
156 b'',
157 _(b'display using template map file (DEPRECATED)'),
157 _(b'display using template map file (DEPRECATED)'),
158 _(b'STYLE'),
158 _(b'STYLE'),
159 ),
159 ),
160 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
160 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
161 ]
161 ]
162
162
163 logopts = [
163 logopts = [
164 (b'p', b'patch', None, _(b'show patch')),
164 (b'p', b'patch', None, _(b'show patch')),
165 (b'g', b'git', None, _(b'use git extended diff format')),
165 (b'g', b'git', None, _(b'use git extended diff format')),
166 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
166 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
167 (b'M', b'no-merges', None, _(b'do not show merges')),
167 (b'M', b'no-merges', None, _(b'do not show merges')),
168 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
168 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
169 (b'G', b'graph', None, _(b"show the revision DAG")),
169 (b'G', b'graph', None, _(b"show the revision DAG")),
170 ] + templateopts
170 ] + templateopts
171
171
172 diffopts = [
172 diffopts = [
173 (b'a', b'text', None, _(b'treat all files as text')),
173 (b'a', b'text', None, _(b'treat all files as text')),
174 (
174 (
175 b'g',
175 b'g',
176 b'git',
176 b'git',
177 None,
177 None,
178 _(b'use git extended diff format (DEFAULT: diff.git)'),
178 _(b'use git extended diff format (DEFAULT: diff.git)'),
179 ),
179 ),
180 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
180 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
181 (b'', b'nodates', None, _(b'omit dates from diff headers')),
181 (b'', b'nodates', None, _(b'omit dates from diff headers')),
182 ]
182 ]
183
183
184 diffwsopts = [
184 diffwsopts = [
185 (
185 (
186 b'w',
186 b'w',
187 b'ignore-all-space',
187 b'ignore-all-space',
188 None,
188 None,
189 _(b'ignore white space when comparing lines'),
189 _(b'ignore white space when comparing lines'),
190 ),
190 ),
191 (
191 (
192 b'b',
192 b'b',
193 b'ignore-space-change',
193 b'ignore-space-change',
194 None,
194 None,
195 _(b'ignore changes in the amount of white space'),
195 _(b'ignore changes in the amount of white space'),
196 ),
196 ),
197 (
197 (
198 b'B',
198 b'B',
199 b'ignore-blank-lines',
199 b'ignore-blank-lines',
200 None,
200 None,
201 _(b'ignore changes whose lines are all blank'),
201 _(b'ignore changes whose lines are all blank'),
202 ),
202 ),
203 (
203 (
204 b'Z',
204 b'Z',
205 b'ignore-space-at-eol',
205 b'ignore-space-at-eol',
206 None,
206 None,
207 _(b'ignore changes in whitespace at EOL'),
207 _(b'ignore changes in whitespace at EOL'),
208 ),
208 ),
209 ]
209 ]
210
210
211 diffopts2 = (
211 diffopts2 = (
212 [
212 [
213 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
213 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
214 (
214 (
215 b'p',
215 b'p',
216 b'show-function',
216 b'show-function',
217 None,
217 None,
218 _(
218 _(
219 b'show which function each change is in (DEFAULT: diff.showfunc)'
219 b'show which function each change is in (DEFAULT: diff.showfunc)'
220 ),
220 ),
221 ),
221 ),
222 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
222 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
223 ]
223 ]
224 + diffwsopts
224 + diffwsopts
225 + [
225 + [
226 (
226 (
227 b'U',
227 b'U',
228 b'unified',
228 b'unified',
229 b'',
229 b'',
230 _(b'number of lines of context to show'),
230 _(b'number of lines of context to show'),
231 _(b'NUM'),
231 _(b'NUM'),
232 ),
232 ),
233 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
233 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
234 (
234 (
235 b'',
235 b'',
236 b'root',
236 b'root',
237 b'',
237 b'',
238 _(b'produce diffs relative to subdirectory'),
238 _(b'produce diffs relative to subdirectory'),
239 _(b'DIR'),
239 _(b'DIR'),
240 ),
240 ),
241 ]
241 ]
242 )
242 )
243
243
244 mergetoolopts = [
244 mergetoolopts = [
245 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
245 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
246 ]
246 ]
247
247
248 similarityopts = [
248 similarityopts = [
249 (
249 (
250 b's',
250 b's',
251 b'similarity',
251 b'similarity',
252 b'',
252 b'',
253 _(b'guess renamed files by similarity (0<=s<=100)'),
253 _(b'guess renamed files by similarity (0<=s<=100)'),
254 _(b'SIMILARITY'),
254 _(b'SIMILARITY'),
255 )
255 )
256 ]
256 ]
257
257
258 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
258 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
259
259
260 debugrevlogopts = [
260 debugrevlogopts = [
261 (b'c', b'changelog', False, _(b'open changelog')),
261 (b'c', b'changelog', False, _(b'open changelog')),
262 (b'm', b'manifest', False, _(b'open manifest')),
262 (b'm', b'manifest', False, _(b'open manifest')),
263 (b'', b'dir', b'', _(b'open directory manifest')),
263 (b'', b'dir', b'', _(b'open directory manifest')),
264 ]
264 ]
265
265
266 # special string such that everything below this line will be ingored in the
266 # special string such that everything below this line will be ingored in the
267 # editor text
267 # editor text
268 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
268 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
269
269
270
270
271 def check_at_most_one_arg(opts, *args):
271 def check_at_most_one_arg(opts, *args):
272 """abort if more than one of the arguments are in opts
272 """abort if more than one of the arguments are in opts
273
273
274 Returns the unique argument or None if none of them were specified.
274 Returns the unique argument or None if none of them were specified.
275 """
275 """
276
276
277 def to_display(name):
277 def to_display(name):
278 return pycompat.sysbytes(name).replace(b'_', b'-')
278 return pycompat.sysbytes(name).replace(b'_', b'-')
279
279
280 previous = None
280 previous = None
281 for x in args:
281 for x in args:
282 if opts.get(x):
282 if opts.get(x):
283 if previous:
283 if previous:
284 raise error.Abort(
284 raise error.Abort(
285 _(b'cannot specify both --%s and --%s')
285 _(b'cannot specify both --%s and --%s')
286 % (to_display(previous), to_display(x))
286 % (to_display(previous), to_display(x))
287 )
287 )
288 previous = x
288 previous = x
289 return previous
289 return previous
290
290
291
291
292 def check_incompatible_arguments(opts, first, others):
292 def check_incompatible_arguments(opts, first, others):
293 """abort if the first argument is given along with any of the others
293 """abort if the first argument is given along with any of the others
294
294
295 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
295 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
296 among themselves, and they're passed as a single collection.
296 among themselves, and they're passed as a single collection.
297 """
297 """
298 for other in others:
298 for other in others:
299 check_at_most_one_arg(opts, first, other)
299 check_at_most_one_arg(opts, first, other)
300
300
301
301
302 def resolvecommitoptions(ui, opts):
302 def resolvecommitoptions(ui, opts):
303 """modify commit options dict to handle related options
303 """modify commit options dict to handle related options
304
304
305 The return value indicates that ``rewrite.update-timestamp`` is the reason
305 The return value indicates that ``rewrite.update-timestamp`` is the reason
306 the ``date`` option is set.
306 the ``date`` option is set.
307 """
307 """
308 check_at_most_one_arg(opts, b'date', b'currentdate')
308 check_at_most_one_arg(opts, b'date', b'currentdate')
309 check_at_most_one_arg(opts, b'user', b'currentuser')
309 check_at_most_one_arg(opts, b'user', b'currentuser')
310
310
311 datemaydiffer = False # date-only change should be ignored?
311 datemaydiffer = False # date-only change should be ignored?
312
312
313 if opts.get(b'currentdate'):
313 if opts.get(b'currentdate'):
314 opts[b'date'] = b'%d %d' % dateutil.makedate()
314 opts[b'date'] = b'%d %d' % dateutil.makedate()
315 elif (
315 elif (
316 not opts.get(b'date')
316 not opts.get(b'date')
317 and ui.configbool(b'rewrite', b'update-timestamp')
317 and ui.configbool(b'rewrite', b'update-timestamp')
318 and opts.get(b'currentdate') is None
318 and opts.get(b'currentdate') is None
319 ):
319 ):
320 opts[b'date'] = b'%d %d' % dateutil.makedate()
320 opts[b'date'] = b'%d %d' % dateutil.makedate()
321 datemaydiffer = True
321 datemaydiffer = True
322
322
323 if opts.get(b'currentuser'):
323 if opts.get(b'currentuser'):
324 opts[b'user'] = ui.username()
324 opts[b'user'] = ui.username()
325
325
326 return datemaydiffer
326 return datemaydiffer
327
327
328
328
329 def checknotesize(ui, opts):
329 def checknotesize(ui, opts):
330 """ make sure note is of valid format """
330 """ make sure note is of valid format """
331
331
332 note = opts.get(b'note')
332 note = opts.get(b'note')
333 if not note:
333 if not note:
334 return
334 return
335
335
336 if len(note) > 255:
336 if len(note) > 255:
337 raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
337 raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
338 if b'\n' in note:
338 if b'\n' in note:
339 raise error.Abort(_(b"note cannot contain a newline"))
339 raise error.Abort(_(b"note cannot contain a newline"))
340
340
341
341
342 def ishunk(x):
342 def ishunk(x):
343 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
343 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
344 return isinstance(x, hunkclasses)
344 return isinstance(x, hunkclasses)
345
345
346
346
347 def newandmodified(chunks, originalchunks):
347 def newandmodified(chunks, originalchunks):
348 newlyaddedandmodifiedfiles = set()
348 newlyaddedandmodifiedfiles = set()
349 alsorestore = set()
349 alsorestore = set()
350 for chunk in chunks:
350 for chunk in chunks:
351 if (
351 if (
352 ishunk(chunk)
352 ishunk(chunk)
353 and chunk.header.isnewfile()
353 and chunk.header.isnewfile()
354 and chunk not in originalchunks
354 and chunk not in originalchunks
355 ):
355 ):
356 newlyaddedandmodifiedfiles.add(chunk.header.filename())
356 newlyaddedandmodifiedfiles.add(chunk.header.filename())
357 alsorestore.update(
357 alsorestore.update(
358 set(chunk.header.files()) - {chunk.header.filename()}
358 set(chunk.header.files()) - {chunk.header.filename()}
359 )
359 )
360 return newlyaddedandmodifiedfiles, alsorestore
360 return newlyaddedandmodifiedfiles, alsorestore
361
361
362
362
363 def parsealiases(cmd):
363 def parsealiases(cmd):
364 return cmd.split(b"|")
364 return cmd.split(b"|")
365
365
366
366
367 def setupwrapcolorwrite(ui):
367 def setupwrapcolorwrite(ui):
368 # wrap ui.write so diff output can be labeled/colorized
368 # wrap ui.write so diff output can be labeled/colorized
369 def wrapwrite(orig, *args, **kw):
369 def wrapwrite(orig, *args, **kw):
370 label = kw.pop('label', b'')
370 label = kw.pop('label', b'')
371 for chunk, l in patch.difflabel(lambda: args):
371 for chunk, l in patch.difflabel(lambda: args):
372 orig(chunk, label=label + l)
372 orig(chunk, label=label + l)
373
373
374 oldwrite = ui.write
374 oldwrite = ui.write
375
375
376 def wrap(*args, **kwargs):
376 def wrap(*args, **kwargs):
377 return wrapwrite(oldwrite, *args, **kwargs)
377 return wrapwrite(oldwrite, *args, **kwargs)
378
378
379 setattr(ui, 'write', wrap)
379 setattr(ui, 'write', wrap)
380 return oldwrite
380 return oldwrite
381
381
382
382
383 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
383 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
384 try:
384 try:
385 if usecurses:
385 if usecurses:
386 if testfile:
386 if testfile:
387 recordfn = crecordmod.testdecorator(
387 recordfn = crecordmod.testdecorator(
388 testfile, crecordmod.testchunkselector
388 testfile, crecordmod.testchunkselector
389 )
389 )
390 else:
390 else:
391 recordfn = crecordmod.chunkselector
391 recordfn = crecordmod.chunkselector
392
392
393 return crecordmod.filterpatch(
393 return crecordmod.filterpatch(
394 ui, originalhunks, recordfn, operation
394 ui, originalhunks, recordfn, operation
395 )
395 )
396 except crecordmod.fallbackerror as e:
396 except crecordmod.fallbackerror as e:
397 ui.warn(b'%s\n' % e)
397 ui.warn(b'%s\n' % e)
398 ui.warn(_(b'falling back to text mode\n'))
398 ui.warn(_(b'falling back to text mode\n'))
399
399
400 return patch.filterpatch(ui, originalhunks, match, operation)
400 return patch.filterpatch(ui, originalhunks, match, operation)
401
401
402
402
403 def recordfilter(ui, originalhunks, match, operation=None):
403 def recordfilter(ui, originalhunks, match, operation=None):
404 """ Prompts the user to filter the originalhunks and return a list of
404 """ Prompts the user to filter the originalhunks and return a list of
405 selected hunks.
405 selected hunks.
406 *operation* is used for to build ui messages to indicate the user what
406 *operation* is used for to build ui messages to indicate the user what
407 kind of filtering they are doing: reverting, committing, shelving, etc.
407 kind of filtering they are doing: reverting, committing, shelving, etc.
408 (see patch.filterpatch).
408 (see patch.filterpatch).
409 """
409 """
410 usecurses = crecordmod.checkcurses(ui)
410 usecurses = crecordmod.checkcurses(ui)
411 testfile = ui.config(b'experimental', b'crecordtest')
411 testfile = ui.config(b'experimental', b'crecordtest')
412 oldwrite = setupwrapcolorwrite(ui)
412 oldwrite = setupwrapcolorwrite(ui)
413 try:
413 try:
414 newchunks, newopts = filterchunks(
414 newchunks, newopts = filterchunks(
415 ui, originalhunks, usecurses, testfile, match, operation
415 ui, originalhunks, usecurses, testfile, match, operation
416 )
416 )
417 finally:
417 finally:
418 ui.write = oldwrite
418 ui.write = oldwrite
419 return newchunks, newopts
419 return newchunks, newopts
420
420
421
421
422 def dorecord(
422 def dorecord(
423 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
423 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
424 ):
424 ):
425 opts = pycompat.byteskwargs(opts)
425 opts = pycompat.byteskwargs(opts)
426 if not ui.interactive():
426 if not ui.interactive():
427 if cmdsuggest:
427 if cmdsuggest:
428 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
428 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
429 else:
429 else:
430 msg = _(b'running non-interactively')
430 msg = _(b'running non-interactively')
431 raise error.Abort(msg)
431 raise error.Abort(msg)
432
432
433 # make sure username is set before going interactive
433 # make sure username is set before going interactive
434 if not opts.get(b'user'):
434 if not opts.get(b'user'):
435 ui.username() # raise exception, username not provided
435 ui.username() # raise exception, username not provided
436
436
437 def recordfunc(ui, repo, message, match, opts):
437 def recordfunc(ui, repo, message, match, opts):
438 """This is generic record driver.
438 """This is generic record driver.
439
439
440 Its job is to interactively filter local changes, and
440 Its job is to interactively filter local changes, and
441 accordingly prepare working directory into a state in which the
441 accordingly prepare working directory into a state in which the
442 job can be delegated to a non-interactive commit command such as
442 job can be delegated to a non-interactive commit command such as
443 'commit' or 'qrefresh'.
443 'commit' or 'qrefresh'.
444
444
445 After the actual job is done by non-interactive command, the
445 After the actual job is done by non-interactive command, the
446 working directory is restored to its original state.
446 working directory is restored to its original state.
447
447
448 In the end we'll record interesting changes, and everything else
448 In the end we'll record interesting changes, and everything else
449 will be left in place, so the user can continue working.
449 will be left in place, so the user can continue working.
450 """
450 """
451 if not opts.get(b'interactive-unshelve'):
451 if not opts.get(b'interactive-unshelve'):
452 checkunfinished(repo, commit=True)
452 checkunfinished(repo, commit=True)
453 wctx = repo[None]
453 wctx = repo[None]
454 merge = len(wctx.parents()) > 1
454 merge = len(wctx.parents()) > 1
455 if merge:
455 if merge:
456 raise error.Abort(
456 raise error.Abort(
457 _(
457 _(
458 b'cannot partially commit a merge '
458 b'cannot partially commit a merge '
459 b'(use "hg commit" instead)'
459 b'(use "hg commit" instead)'
460 )
460 )
461 )
461 )
462
462
463 def fail(f, msg):
463 def fail(f, msg):
464 raise error.Abort(b'%s: %s' % (f, msg))
464 raise error.Abort(b'%s: %s' % (f, msg))
465
465
466 force = opts.get(b'force')
466 force = opts.get(b'force')
467 if not force:
467 if not force:
468 match = matchmod.badmatch(match, fail)
468 match = matchmod.badmatch(match, fail)
469
469
470 status = repo.status(match=match)
470 status = repo.status(match=match)
471
471
472 overrides = {(b'ui', b'commitsubrepos'): True}
472 overrides = {(b'ui', b'commitsubrepos'): True}
473
473
474 with repo.ui.configoverride(overrides, b'record'):
474 with repo.ui.configoverride(overrides, b'record'):
475 # subrepoutil.precommit() modifies the status
475 # subrepoutil.precommit() modifies the status
476 tmpstatus = scmutil.status(
476 tmpstatus = scmutil.status(
477 copymod.copy(status.modified),
477 copymod.copy(status.modified),
478 copymod.copy(status.added),
478 copymod.copy(status.added),
479 copymod.copy(status.removed),
479 copymod.copy(status.removed),
480 copymod.copy(status.deleted),
480 copymod.copy(status.deleted),
481 copymod.copy(status.unknown),
481 copymod.copy(status.unknown),
482 copymod.copy(status.ignored),
482 copymod.copy(status.ignored),
483 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
483 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
484 )
484 )
485
485
486 # Force allows -X subrepo to skip the subrepo.
486 # Force allows -X subrepo to skip the subrepo.
487 subs, commitsubs, newstate = subrepoutil.precommit(
487 subs, commitsubs, newstate = subrepoutil.precommit(
488 repo.ui, wctx, tmpstatus, match, force=True
488 repo.ui, wctx, tmpstatus, match, force=True
489 )
489 )
490 for s in subs:
490 for s in subs:
491 if s in commitsubs:
491 if s in commitsubs:
492 dirtyreason = wctx.sub(s).dirtyreason(True)
492 dirtyreason = wctx.sub(s).dirtyreason(True)
493 raise error.Abort(dirtyreason)
493 raise error.Abort(dirtyreason)
494
494
495 if not force:
495 if not force:
496 repo.checkcommitpatterns(wctx, match, status, fail)
496 repo.checkcommitpatterns(wctx, match, status, fail)
497 diffopts = patch.difffeatureopts(
497 diffopts = patch.difffeatureopts(
498 ui,
498 ui,
499 opts=opts,
499 opts=opts,
500 whitespace=True,
500 whitespace=True,
501 section=b'commands',
501 section=b'commands',
502 configprefix=b'commit.interactive.',
502 configprefix=b'commit.interactive.',
503 )
503 )
504 diffopts.nodates = True
504 diffopts.nodates = True
505 diffopts.git = True
505 diffopts.git = True
506 diffopts.showfunc = True
506 diffopts.showfunc = True
507 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
507 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
508 originalchunks = patch.parsepatch(originaldiff)
508 originalchunks = patch.parsepatch(originaldiff)
509 match = scmutil.match(repo[None], pats)
509 match = scmutil.match(repo[None], pats)
510
510
511 # 1. filter patch, since we are intending to apply subset of it
511 # 1. filter patch, since we are intending to apply subset of it
512 try:
512 try:
513 chunks, newopts = filterfn(ui, originalchunks, match)
513 chunks, newopts = filterfn(ui, originalchunks, match)
514 except error.PatchError as err:
514 except error.PatchError as err:
515 raise error.Abort(_(b'error parsing patch: %s') % err)
515 raise error.Abort(_(b'error parsing patch: %s') % err)
516 opts.update(newopts)
516 opts.update(newopts)
517
517
518 # We need to keep a backup of files that have been newly added and
518 # We need to keep a backup of files that have been newly added and
519 # modified during the recording process because there is a previous
519 # modified during the recording process because there is a previous
520 # version without the edit in the workdir. We also will need to restore
520 # version without the edit in the workdir. We also will need to restore
521 # files that were the sources of renames so that the patch application
521 # files that were the sources of renames so that the patch application
522 # works.
522 # works.
523 newlyaddedandmodifiedfiles, alsorestore = newandmodified(
523 newlyaddedandmodifiedfiles, alsorestore = newandmodified(
524 chunks, originalchunks
524 chunks, originalchunks
525 )
525 )
526 contenders = set()
526 contenders = set()
527 for h in chunks:
527 for h in chunks:
528 try:
528 try:
529 contenders.update(set(h.files()))
529 contenders.update(set(h.files()))
530 except AttributeError:
530 except AttributeError:
531 pass
531 pass
532
532
533 changed = status.modified + status.added + status.removed
533 changed = status.modified + status.added + status.removed
534 newfiles = [f for f in changed if f in contenders]
534 newfiles = [f for f in changed if f in contenders]
535 if not newfiles:
535 if not newfiles:
536 ui.status(_(b'no changes to record\n'))
536 ui.status(_(b'no changes to record\n'))
537 return 0
537 return 0
538
538
539 modified = set(status.modified)
539 modified = set(status.modified)
540
540
541 # 2. backup changed files, so we can restore them in the end
541 # 2. backup changed files, so we can restore them in the end
542
542
543 if backupall:
543 if backupall:
544 tobackup = changed
544 tobackup = changed
545 else:
545 else:
546 tobackup = [
546 tobackup = [
547 f
547 f
548 for f in newfiles
548 for f in newfiles
549 if f in modified or f in newlyaddedandmodifiedfiles
549 if f in modified or f in newlyaddedandmodifiedfiles
550 ]
550 ]
551 backups = {}
551 backups = {}
552 if tobackup:
552 if tobackup:
553 backupdir = repo.vfs.join(b'record-backups')
553 backupdir = repo.vfs.join(b'record-backups')
554 try:
554 try:
555 os.mkdir(backupdir)
555 os.mkdir(backupdir)
556 except OSError as err:
556 except OSError as err:
557 if err.errno != errno.EEXIST:
557 if err.errno != errno.EEXIST:
558 raise
558 raise
559 try:
559 try:
560 # backup continues
560 # backup continues
561 for f in tobackup:
561 for f in tobackup:
562 fd, tmpname = pycompat.mkstemp(
562 fd, tmpname = pycompat.mkstemp(
563 prefix=f.replace(b'/', b'_') + b'.', dir=backupdir
563 prefix=f.replace(b'/', b'_') + b'.', dir=backupdir
564 )
564 )
565 os.close(fd)
565 os.close(fd)
566 ui.debug(b'backup %r as %r\n' % (f, tmpname))
566 ui.debug(b'backup %r as %r\n' % (f, tmpname))
567 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
567 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
568 backups[f] = tmpname
568 backups[f] = tmpname
569
569
570 fp = stringio()
570 fp = stringio()
571 for c in chunks:
571 for c in chunks:
572 fname = c.filename()
572 fname = c.filename()
573 if fname in backups:
573 if fname in backups:
574 c.write(fp)
574 c.write(fp)
575 dopatch = fp.tell()
575 dopatch = fp.tell()
576 fp.seek(0)
576 fp.seek(0)
577
577
578 # 2.5 optionally review / modify patch in text editor
578 # 2.5 optionally review / modify patch in text editor
579 if opts.get(b'review', False):
579 if opts.get(b'review', False):
580 patchtext = (
580 patchtext = (
581 crecordmod.diffhelptext
581 crecordmod.diffhelptext
582 + crecordmod.patchhelptext
582 + crecordmod.patchhelptext
583 + fp.read()
583 + fp.read()
584 )
584 )
585 reviewedpatch = ui.edit(
585 reviewedpatch = ui.edit(
586 patchtext, b"", action=b"diff", repopath=repo.path
586 patchtext, b"", action=b"diff", repopath=repo.path
587 )
587 )
588 fp.truncate(0)
588 fp.truncate(0)
589 fp.write(reviewedpatch)
589 fp.write(reviewedpatch)
590 fp.seek(0)
590 fp.seek(0)
591
591
592 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
592 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
593 # 3a. apply filtered patch to clean repo (clean)
593 # 3a. apply filtered patch to clean repo (clean)
594 if backups:
594 if backups:
595 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
595 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
596 mergemod.revert_to(repo[b'.'], matcher=m)
596 mergemod.revert_to(repo[b'.'], matcher=m)
597
597
598 # 3b. (apply)
598 # 3b. (apply)
599 if dopatch:
599 if dopatch:
600 try:
600 try:
601 ui.debug(b'applying patch\n')
601 ui.debug(b'applying patch\n')
602 ui.debug(fp.getvalue())
602 ui.debug(fp.getvalue())
603 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
603 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
604 except error.PatchError as err:
604 except error.PatchError as err:
605 raise error.Abort(pycompat.bytestr(err))
605 raise error.Abort(pycompat.bytestr(err))
606 del fp
606 del fp
607
607
608 # 4. We prepared working directory according to filtered
608 # 4. We prepared working directory according to filtered
609 # patch. Now is the time to delegate the job to
609 # patch. Now is the time to delegate the job to
610 # commit/qrefresh or the like!
610 # commit/qrefresh or the like!
611
611
612 # Make all of the pathnames absolute.
612 # Make all of the pathnames absolute.
613 newfiles = [repo.wjoin(nf) for nf in newfiles]
613 newfiles = [repo.wjoin(nf) for nf in newfiles]
614 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
614 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
615 finally:
615 finally:
616 # 5. finally restore backed-up files
616 # 5. finally restore backed-up files
617 try:
617 try:
618 dirstate = repo.dirstate
618 dirstate = repo.dirstate
619 for realname, tmpname in pycompat.iteritems(backups):
619 for realname, tmpname in pycompat.iteritems(backups):
620 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
620 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
621
621
622 if dirstate[realname] == b'n':
622 if dirstate[realname] == b'n':
623 # without normallookup, restoring timestamp
623 # without normallookup, restoring timestamp
624 # may cause partially committed files
624 # may cause partially committed files
625 # to be treated as unmodified
625 # to be treated as unmodified
626 dirstate.normallookup(realname)
626 dirstate.normallookup(realname)
627
627
628 # copystat=True here and above are a hack to trick any
628 # copystat=True here and above are a hack to trick any
629 # editors that have f open that we haven't modified them.
629 # editors that have f open that we haven't modified them.
630 #
630 #
631 # Also note that this racy as an editor could notice the
631 # Also note that this racy as an editor could notice the
632 # file's mtime before we've finished writing it.
632 # file's mtime before we've finished writing it.
633 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
633 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
634 os.unlink(tmpname)
634 os.unlink(tmpname)
635 if tobackup:
635 if tobackup:
636 os.rmdir(backupdir)
636 os.rmdir(backupdir)
637 except OSError:
637 except OSError:
638 pass
638 pass
639
639
640 def recordinwlock(ui, repo, message, match, opts):
640 def recordinwlock(ui, repo, message, match, opts):
641 with repo.wlock():
641 with repo.wlock():
642 return recordfunc(ui, repo, message, match, opts)
642 return recordfunc(ui, repo, message, match, opts)
643
643
644 return commit(ui, repo, recordinwlock, pats, opts)
644 return commit(ui, repo, recordinwlock, pats, opts)
645
645
646
646
647 class dirnode(object):
647 class dirnode(object):
648 """
648 """
649 Represent a directory in user working copy with information required for
649 Represent a directory in user working copy with information required for
650 the purpose of tersing its status.
650 the purpose of tersing its status.
651
651
652 path is the path to the directory, without a trailing '/'
652 path is the path to the directory, without a trailing '/'
653
653
654 statuses is a set of statuses of all files in this directory (this includes
654 statuses is a set of statuses of all files in this directory (this includes
655 all the files in all the subdirectories too)
655 all the files in all the subdirectories too)
656
656
657 files is a list of files which are direct child of this directory
657 files is a list of files which are direct child of this directory
658
658
659 subdirs is a dictionary of sub-directory name as the key and it's own
659 subdirs is a dictionary of sub-directory name as the key and it's own
660 dirnode object as the value
660 dirnode object as the value
661 """
661 """
662
662
663 def __init__(self, dirpath):
663 def __init__(self, dirpath):
664 self.path = dirpath
664 self.path = dirpath
665 self.statuses = set()
665 self.statuses = set()
666 self.files = []
666 self.files = []
667 self.subdirs = {}
667 self.subdirs = {}
668
668
669 def _addfileindir(self, filename, status):
669 def _addfileindir(self, filename, status):
670 """Add a file in this directory as a direct child."""
670 """Add a file in this directory as a direct child."""
671 self.files.append((filename, status))
671 self.files.append((filename, status))
672
672
673 def addfile(self, filename, status):
673 def addfile(self, filename, status):
674 """
674 """
675 Add a file to this directory or to its direct parent directory.
675 Add a file to this directory or to its direct parent directory.
676
676
677 If the file is not direct child of this directory, we traverse to the
677 If the file is not direct child of this directory, we traverse to the
678 directory of which this file is a direct child of and add the file
678 directory of which this file is a direct child of and add the file
679 there.
679 there.
680 """
680 """
681
681
682 # the filename contains a path separator, it means it's not the direct
682 # the filename contains a path separator, it means it's not the direct
683 # child of this directory
683 # child of this directory
684 if b'/' in filename:
684 if b'/' in filename:
685 subdir, filep = filename.split(b'/', 1)
685 subdir, filep = filename.split(b'/', 1)
686
686
687 # does the dirnode object for subdir exists
687 # does the dirnode object for subdir exists
688 if subdir not in self.subdirs:
688 if subdir not in self.subdirs:
689 subdirpath = pathutil.join(self.path, subdir)
689 subdirpath = pathutil.join(self.path, subdir)
690 self.subdirs[subdir] = dirnode(subdirpath)
690 self.subdirs[subdir] = dirnode(subdirpath)
691
691
692 # try adding the file in subdir
692 # try adding the file in subdir
693 self.subdirs[subdir].addfile(filep, status)
693 self.subdirs[subdir].addfile(filep, status)
694
694
695 else:
695 else:
696 self._addfileindir(filename, status)
696 self._addfileindir(filename, status)
697
697
698 if status not in self.statuses:
698 if status not in self.statuses:
699 self.statuses.add(status)
699 self.statuses.add(status)
700
700
701 def iterfilepaths(self):
701 def iterfilepaths(self):
702 """Yield (status, path) for files directly under this directory."""
702 """Yield (status, path) for files directly under this directory."""
703 for f, st in self.files:
703 for f, st in self.files:
704 yield st, pathutil.join(self.path, f)
704 yield st, pathutil.join(self.path, f)
705
705
706 def tersewalk(self, terseargs):
706 def tersewalk(self, terseargs):
707 """
707 """
708 Yield (status, path) obtained by processing the status of this
708 Yield (status, path) obtained by processing the status of this
709 dirnode.
709 dirnode.
710
710
711 terseargs is the string of arguments passed by the user with `--terse`
711 terseargs is the string of arguments passed by the user with `--terse`
712 flag.
712 flag.
713
713
714 Following are the cases which can happen:
714 Following are the cases which can happen:
715
715
716 1) All the files in the directory (including all the files in its
716 1) All the files in the directory (including all the files in its
717 subdirectories) share the same status and the user has asked us to terse
717 subdirectories) share the same status and the user has asked us to terse
718 that status. -> yield (status, dirpath). dirpath will end in '/'.
718 that status. -> yield (status, dirpath). dirpath will end in '/'.
719
719
720 2) Otherwise, we do following:
720 2) Otherwise, we do following:
721
721
722 a) Yield (status, filepath) for all the files which are in this
722 a) Yield (status, filepath) for all the files which are in this
723 directory (only the ones in this directory, not the subdirs)
723 directory (only the ones in this directory, not the subdirs)
724
724
725 b) Recurse the function on all the subdirectories of this
725 b) Recurse the function on all the subdirectories of this
726 directory
726 directory
727 """
727 """
728
728
729 if len(self.statuses) == 1:
729 if len(self.statuses) == 1:
730 onlyst = self.statuses.pop()
730 onlyst = self.statuses.pop()
731
731
732 # Making sure we terse only when the status abbreviation is
732 # Making sure we terse only when the status abbreviation is
733 # passed as terse argument
733 # passed as terse argument
734 if onlyst in terseargs:
734 if onlyst in terseargs:
735 yield onlyst, self.path + b'/'
735 yield onlyst, self.path + b'/'
736 return
736 return
737
737
738 # add the files to status list
738 # add the files to status list
739 for st, fpath in self.iterfilepaths():
739 for st, fpath in self.iterfilepaths():
740 yield st, fpath
740 yield st, fpath
741
741
742 # recurse on the subdirs
742 # recurse on the subdirs
743 for dirobj in self.subdirs.values():
743 for dirobj in self.subdirs.values():
744 for st, fpath in dirobj.tersewalk(terseargs):
744 for st, fpath in dirobj.tersewalk(terseargs):
745 yield st, fpath
745 yield st, fpath
746
746
747
747
748 def tersedir(statuslist, terseargs):
748 def tersedir(statuslist, terseargs):
749 """
749 """
750 Terse the status if all the files in a directory shares the same status.
750 Terse the status if all the files in a directory shares the same status.
751
751
752 statuslist is scmutil.status() object which contains a list of files for
752 statuslist is scmutil.status() object which contains a list of files for
753 each status.
753 each status.
754 terseargs is string which is passed by the user as the argument to `--terse`
754 terseargs is string which is passed by the user as the argument to `--terse`
755 flag.
755 flag.
756
756
757 The function makes a tree of objects of dirnode class, and at each node it
757 The function makes a tree of objects of dirnode class, and at each node it
758 stores the information required to know whether we can terse a certain
758 stores the information required to know whether we can terse a certain
759 directory or not.
759 directory or not.
760 """
760 """
761 # the order matters here as that is used to produce final list
761 # the order matters here as that is used to produce final list
762 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
762 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
763
763
764 # checking the argument validity
764 # checking the argument validity
765 for s in pycompat.bytestr(terseargs):
765 for s in pycompat.bytestr(terseargs):
766 if s not in allst:
766 if s not in allst:
767 raise error.Abort(_(b"'%s' not recognized") % s)
767 raise error.Abort(_(b"'%s' not recognized") % s)
768
768
769 # creating a dirnode object for the root of the repo
769 # creating a dirnode object for the root of the repo
770 rootobj = dirnode(b'')
770 rootobj = dirnode(b'')
771 pstatus = (
771 pstatus = (
772 b'modified',
772 b'modified',
773 b'added',
773 b'added',
774 b'deleted',
774 b'deleted',
775 b'clean',
775 b'clean',
776 b'unknown',
776 b'unknown',
777 b'ignored',
777 b'ignored',
778 b'removed',
778 b'removed',
779 )
779 )
780
780
781 tersedict = {}
781 tersedict = {}
782 for attrname in pstatus:
782 for attrname in pstatus:
783 statuschar = attrname[0:1]
783 statuschar = attrname[0:1]
784 for f in getattr(statuslist, attrname):
784 for f in getattr(statuslist, attrname):
785 rootobj.addfile(f, statuschar)
785 rootobj.addfile(f, statuschar)
786 tersedict[statuschar] = []
786 tersedict[statuschar] = []
787
787
788 # we won't be tersing the root dir, so add files in it
788 # we won't be tersing the root dir, so add files in it
789 for st, fpath in rootobj.iterfilepaths():
789 for st, fpath in rootobj.iterfilepaths():
790 tersedict[st].append(fpath)
790 tersedict[st].append(fpath)
791
791
792 # process each sub-directory and build tersedict
792 # process each sub-directory and build tersedict
793 for subdir in rootobj.subdirs.values():
793 for subdir in rootobj.subdirs.values():
794 for st, f in subdir.tersewalk(terseargs):
794 for st, f in subdir.tersewalk(terseargs):
795 tersedict[st].append(f)
795 tersedict[st].append(f)
796
796
797 tersedlist = []
797 tersedlist = []
798 for st in allst:
798 for st in allst:
799 tersedict[st].sort()
799 tersedict[st].sort()
800 tersedlist.append(tersedict[st])
800 tersedlist.append(tersedict[st])
801
801
802 return scmutil.status(*tersedlist)
802 return scmutil.status(*tersedlist)
803
803
804
804
805 def _commentlines(raw):
805 def _commentlines(raw):
806 '''Surround lineswith a comment char and a new line'''
806 '''Surround lineswith a comment char and a new line'''
807 lines = raw.splitlines()
807 lines = raw.splitlines()
808 commentedlines = [b'# %s' % line for line in lines]
808 commentedlines = [b'# %s' % line for line in lines]
809 return b'\n'.join(commentedlines) + b'\n'
809 return b'\n'.join(commentedlines) + b'\n'
810
810
811
811
812 @attr.s(frozen=True)
812 @attr.s(frozen=True)
813 class morestatus(object):
813 class morestatus(object):
814 reporoot = attr.ib()
814 reporoot = attr.ib()
815 unfinishedop = attr.ib()
815 unfinishedop = attr.ib()
816 unfinishedmsg = attr.ib()
816 unfinishedmsg = attr.ib()
817 activemerge = attr.ib()
817 activemerge = attr.ib()
818 unresolvedpaths = attr.ib()
818 unresolvedpaths = attr.ib()
819 _formattedpaths = attr.ib(init=False, default=set())
819 _formattedpaths = attr.ib(init=False, default=set())
820 _label = b'status.morestatus'
820 _label = b'status.morestatus'
821
821
822 def formatfile(self, path, fm):
822 def formatfile(self, path, fm):
823 self._formattedpaths.add(path)
823 self._formattedpaths.add(path)
824 if self.activemerge and path in self.unresolvedpaths:
824 if self.activemerge and path in self.unresolvedpaths:
825 fm.data(unresolved=True)
825 fm.data(unresolved=True)
826
826
827 def formatfooter(self, fm):
827 def formatfooter(self, fm):
828 if self.unfinishedop or self.unfinishedmsg:
828 if self.unfinishedop or self.unfinishedmsg:
829 fm.startitem()
829 fm.startitem()
830 fm.data(itemtype=b'morestatus')
830 fm.data(itemtype=b'morestatus')
831
831
832 if self.unfinishedop:
832 if self.unfinishedop:
833 fm.data(unfinished=self.unfinishedop)
833 fm.data(unfinished=self.unfinishedop)
834 statemsg = (
834 statemsg = (
835 _(b'The repository is in an unfinished *%s* state.')
835 _(b'The repository is in an unfinished *%s* state.')
836 % self.unfinishedop
836 % self.unfinishedop
837 )
837 )
838 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
838 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
839 if self.unfinishedmsg:
839 if self.unfinishedmsg:
840 fm.data(unfinishedmsg=self.unfinishedmsg)
840 fm.data(unfinishedmsg=self.unfinishedmsg)
841
841
842 # May also start new data items.
842 # May also start new data items.
843 self._formatconflicts(fm)
843 self._formatconflicts(fm)
844
844
845 if self.unfinishedmsg:
845 if self.unfinishedmsg:
846 fm.plain(
846 fm.plain(
847 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
847 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
848 )
848 )
849
849
850 def _formatconflicts(self, fm):
850 def _formatconflicts(self, fm):
851 if not self.activemerge:
851 if not self.activemerge:
852 return
852 return
853
853
854 if self.unresolvedpaths:
854 if self.unresolvedpaths:
855 mergeliststr = b'\n'.join(
855 mergeliststr = b'\n'.join(
856 [
856 [
857 b' %s'
857 b' %s'
858 % util.pathto(self.reporoot, encoding.getcwd(), path)
858 % util.pathto(self.reporoot, encoding.getcwd(), path)
859 for path in self.unresolvedpaths
859 for path in self.unresolvedpaths
860 ]
860 ]
861 )
861 )
862 msg = (
862 msg = (
863 _(
863 _(
864 '''Unresolved merge conflicts:
864 '''Unresolved merge conflicts:
865
865
866 %s
866 %s
867
867
868 To mark files as resolved: hg resolve --mark FILE'''
868 To mark files as resolved: hg resolve --mark FILE'''
869 )
869 )
870 % mergeliststr
870 % mergeliststr
871 )
871 )
872
872
873 # If any paths with unresolved conflicts were not previously
873 # If any paths with unresolved conflicts were not previously
874 # formatted, output them now.
874 # formatted, output them now.
875 for f in self.unresolvedpaths:
875 for f in self.unresolvedpaths:
876 if f in self._formattedpaths:
876 if f in self._formattedpaths:
877 # Already output.
877 # Already output.
878 continue
878 continue
879 fm.startitem()
879 fm.startitem()
880 # We can't claim to know the status of the file - it may just
880 # We can't claim to know the status of the file - it may just
881 # have been in one of the states that were not requested for
881 # have been in one of the states that were not requested for
882 # display, so it could be anything.
882 # display, so it could be anything.
883 fm.data(itemtype=b'file', path=f, unresolved=True)
883 fm.data(itemtype=b'file', path=f, unresolved=True)
884
884
885 else:
885 else:
886 msg = _(b'No unresolved merge conflicts.')
886 msg = _(b'No unresolved merge conflicts.')
887
887
888 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
888 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
889
889
890
890
891 def readmorestatus(repo):
891 def readmorestatus(repo):
892 """Returns a morestatus object if the repo has unfinished state."""
892 """Returns a morestatus object if the repo has unfinished state."""
893 statetuple = statemod.getrepostate(repo)
893 statetuple = statemod.getrepostate(repo)
894 mergestate = mergestatemod.mergestate.read(repo)
894 mergestate = mergestatemod.mergestate.read(repo)
895 activemerge = mergestate.active()
895 activemerge = mergestate.active()
896 if not statetuple and not activemerge:
896 if not statetuple and not activemerge:
897 return None
897 return None
898
898
899 unfinishedop = unfinishedmsg = unresolved = None
899 unfinishedop = unfinishedmsg = unresolved = None
900 if statetuple:
900 if statetuple:
901 unfinishedop, unfinishedmsg = statetuple
901 unfinishedop, unfinishedmsg = statetuple
902 if activemerge:
902 if activemerge:
903 unresolved = sorted(mergestate.unresolved())
903 unresolved = sorted(mergestate.unresolved())
904 return morestatus(
904 return morestatus(
905 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
905 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
906 )
906 )
907
907
908
908
909 def findpossible(cmd, table, strict=False):
909 def findpossible(cmd, table, strict=False):
910 """
910 """
911 Return cmd -> (aliases, command table entry)
911 Return cmd -> (aliases, command table entry)
912 for each matching command.
912 for each matching command.
913 Return debug commands (or their aliases) only if no normal command matches.
913 Return debug commands (or their aliases) only if no normal command matches.
914 """
914 """
915 choice = {}
915 choice = {}
916 debugchoice = {}
916 debugchoice = {}
917
917
918 if cmd in table:
918 if cmd in table:
919 # short-circuit exact matches, "log" alias beats "log|history"
919 # short-circuit exact matches, "log" alias beats "log|history"
920 keys = [cmd]
920 keys = [cmd]
921 else:
921 else:
922 keys = table.keys()
922 keys = table.keys()
923
923
924 allcmds = []
924 allcmds = []
925 for e in keys:
925 for e in keys:
926 aliases = parsealiases(e)
926 aliases = parsealiases(e)
927 allcmds.extend(aliases)
927 allcmds.extend(aliases)
928 found = None
928 found = None
929 if cmd in aliases:
929 if cmd in aliases:
930 found = cmd
930 found = cmd
931 elif not strict:
931 elif not strict:
932 for a in aliases:
932 for a in aliases:
933 if a.startswith(cmd):
933 if a.startswith(cmd):
934 found = a
934 found = a
935 break
935 break
936 if found is not None:
936 if found is not None:
937 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
937 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
938 debugchoice[found] = (aliases, table[e])
938 debugchoice[found] = (aliases, table[e])
939 else:
939 else:
940 choice[found] = (aliases, table[e])
940 choice[found] = (aliases, table[e])
941
941
942 if not choice and debugchoice:
942 if not choice and debugchoice:
943 choice = debugchoice
943 choice = debugchoice
944
944
945 return choice, allcmds
945 return choice, allcmds
946
946
947
947
948 def findcmd(cmd, table, strict=True):
948 def findcmd(cmd, table, strict=True):
949 """Return (aliases, command table entry) for command string."""
949 """Return (aliases, command table entry) for command string."""
950 choice, allcmds = findpossible(cmd, table, strict)
950 choice, allcmds = findpossible(cmd, table, strict)
951
951
952 if cmd in choice:
952 if cmd in choice:
953 return choice[cmd]
953 return choice[cmd]
954
954
955 if len(choice) > 1:
955 if len(choice) > 1:
956 clist = sorted(choice)
956 clist = sorted(choice)
957 raise error.AmbiguousCommand(cmd, clist)
957 raise error.AmbiguousCommand(cmd, clist)
958
958
959 if choice:
959 if choice:
960 return list(choice.values())[0]
960 return list(choice.values())[0]
961
961
962 raise error.UnknownCommand(cmd, allcmds)
962 raise error.UnknownCommand(cmd, allcmds)
963
963
964
964
965 def changebranch(ui, repo, revs, label, opts):
965 def changebranch(ui, repo, revs, label, opts):
966 """ Change the branch name of given revs to label """
966 """ Change the branch name of given revs to label """
967
967
968 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
968 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
969 # abort in case of uncommitted merge or dirty wdir
969 # abort in case of uncommitted merge or dirty wdir
970 bailifchanged(repo)
970 bailifchanged(repo)
971 revs = scmutil.revrange(repo, revs)
971 revs = scmutil.revrange(repo, revs)
972 if not revs:
972 if not revs:
973 raise error.Abort(b"empty revision set")
973 raise error.Abort(b"empty revision set")
974 roots = repo.revs(b'roots(%ld)', revs)
974 roots = repo.revs(b'roots(%ld)', revs)
975 if len(roots) > 1:
975 if len(roots) > 1:
976 raise error.Abort(
976 raise error.Abort(
977 _(b"cannot change branch of non-linear revisions")
977 _(b"cannot change branch of non-linear revisions")
978 )
978 )
979 rewriteutil.precheck(repo, revs, b'change branch of')
979 rewriteutil.precheck(repo, revs, b'change branch of')
980
980
981 root = repo[roots.first()]
981 root = repo[roots.first()]
982 rpb = {parent.branch() for parent in root.parents()}
982 rpb = {parent.branch() for parent in root.parents()}
983 if (
983 if (
984 not opts.get(b'force')
984 not opts.get(b'force')
985 and label not in rpb
985 and label not in rpb
986 and label in repo.branchmap()
986 and label in repo.branchmap()
987 ):
987 ):
988 raise error.Abort(_(b"a branch of the same name already exists"))
988 raise error.Abort(_(b"a branch of the same name already exists"))
989
989
990 if repo.revs(b'obsolete() and %ld', revs):
990 if repo.revs(b'obsolete() and %ld', revs):
991 raise error.Abort(
991 raise error.Abort(
992 _(b"cannot change branch of a obsolete changeset")
992 _(b"cannot change branch of a obsolete changeset")
993 )
993 )
994
994
995 # make sure only topological heads
995 # make sure only topological heads
996 if repo.revs(b'heads(%ld) - head()', revs):
996 if repo.revs(b'heads(%ld) - head()', revs):
997 raise error.Abort(_(b"cannot change branch in middle of a stack"))
997 raise error.Abort(_(b"cannot change branch in middle of a stack"))
998
998
999 replacements = {}
999 replacements = {}
1000 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
1000 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
1001 # mercurial.subrepo -> mercurial.cmdutil
1001 # mercurial.subrepo -> mercurial.cmdutil
1002 from . import context
1002 from . import context
1003
1003
1004 for rev in revs:
1004 for rev in revs:
1005 ctx = repo[rev]
1005 ctx = repo[rev]
1006 oldbranch = ctx.branch()
1006 oldbranch = ctx.branch()
1007 # check if ctx has same branch
1007 # check if ctx has same branch
1008 if oldbranch == label:
1008 if oldbranch == label:
1009 continue
1009 continue
1010
1010
1011 def filectxfn(repo, newctx, path):
1011 def filectxfn(repo, newctx, path):
1012 try:
1012 try:
1013 return ctx[path]
1013 return ctx[path]
1014 except error.ManifestLookupError:
1014 except error.ManifestLookupError:
1015 return None
1015 return None
1016
1016
1017 ui.debug(
1017 ui.debug(
1018 b"changing branch of '%s' from '%s' to '%s'\n"
1018 b"changing branch of '%s' from '%s' to '%s'\n"
1019 % (hex(ctx.node()), oldbranch, label)
1019 % (hex(ctx.node()), oldbranch, label)
1020 )
1020 )
1021 extra = ctx.extra()
1021 extra = ctx.extra()
1022 extra[b'branch_change'] = hex(ctx.node())
1022 extra[b'branch_change'] = hex(ctx.node())
1023 # While changing branch of set of linear commits, make sure that
1023 # While changing branch of set of linear commits, make sure that
1024 # we base our commits on new parent rather than old parent which
1024 # we base our commits on new parent rather than old parent which
1025 # was obsoleted while changing the branch
1025 # was obsoleted while changing the branch
1026 p1 = ctx.p1().node()
1026 p1 = ctx.p1().node()
1027 p2 = ctx.p2().node()
1027 p2 = ctx.p2().node()
1028 if p1 in replacements:
1028 if p1 in replacements:
1029 p1 = replacements[p1][0]
1029 p1 = replacements[p1][0]
1030 if p2 in replacements:
1030 if p2 in replacements:
1031 p2 = replacements[p2][0]
1031 p2 = replacements[p2][0]
1032
1032
1033 mc = context.memctx(
1033 mc = context.memctx(
1034 repo,
1034 repo,
1035 (p1, p2),
1035 (p1, p2),
1036 ctx.description(),
1036 ctx.description(),
1037 ctx.files(),
1037 ctx.files(),
1038 filectxfn,
1038 filectxfn,
1039 user=ctx.user(),
1039 user=ctx.user(),
1040 date=ctx.date(),
1040 date=ctx.date(),
1041 extra=extra,
1041 extra=extra,
1042 branch=label,
1042 branch=label,
1043 )
1043 )
1044
1044
1045 newnode = repo.commitctx(mc)
1045 newnode = repo.commitctx(mc)
1046 replacements[ctx.node()] = (newnode,)
1046 replacements[ctx.node()] = (newnode,)
1047 ui.debug(b'new node id is %s\n' % hex(newnode))
1047 ui.debug(b'new node id is %s\n' % hex(newnode))
1048
1048
1049 # create obsmarkers and move bookmarks
1049 # create obsmarkers and move bookmarks
1050 scmutil.cleanupnodes(
1050 scmutil.cleanupnodes(
1051 repo, replacements, b'branch-change', fixphase=True
1051 repo, replacements, b'branch-change', fixphase=True
1052 )
1052 )
1053
1053
1054 # move the working copy too
1054 # move the working copy too
1055 wctx = repo[None]
1055 wctx = repo[None]
1056 # in-progress merge is a bit too complex for now.
1056 # in-progress merge is a bit too complex for now.
1057 if len(wctx.parents()) == 1:
1057 if len(wctx.parents()) == 1:
1058 newid = replacements.get(wctx.p1().node())
1058 newid = replacements.get(wctx.p1().node())
1059 if newid is not None:
1059 if newid is not None:
1060 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1060 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1061 # mercurial.cmdutil
1061 # mercurial.cmdutil
1062 from . import hg
1062 from . import hg
1063
1063
1064 hg.update(repo, newid[0], quietempty=True)
1064 hg.update(repo, newid[0], quietempty=True)
1065
1065
1066 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1066 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1067
1067
1068
1068
1069 def findrepo(p):
1069 def findrepo(p):
1070 while not os.path.isdir(os.path.join(p, b".hg")):
1070 while not os.path.isdir(os.path.join(p, b".hg")):
1071 oldp, p = p, os.path.dirname(p)
1071 oldp, p = p, os.path.dirname(p)
1072 if p == oldp:
1072 if p == oldp:
1073 return None
1073 return None
1074
1074
1075 return p
1075 return p
1076
1076
1077
1077
1078 def bailifchanged(repo, merge=True, hint=None):
1078 def bailifchanged(repo, merge=True, hint=None):
1079 """ enforce the precondition that working directory must be clean.
1079 """ enforce the precondition that working directory must be clean.
1080
1080
1081 'merge' can be set to false if a pending uncommitted merge should be
1081 'merge' can be set to false if a pending uncommitted merge should be
1082 ignored (such as when 'update --check' runs).
1082 ignored (such as when 'update --check' runs).
1083
1083
1084 'hint' is the usual hint given to Abort exception.
1084 'hint' is the usual hint given to Abort exception.
1085 """
1085 """
1086
1086
1087 if merge and repo.dirstate.p2() != nullid:
1087 if merge and repo.dirstate.p2() != nullid:
1088 raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
1088 raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
1089 st = repo.status()
1089 st = repo.status()
1090 if st.modified or st.added or st.removed or st.deleted:
1090 if st.modified or st.added or st.removed or st.deleted:
1091 raise error.Abort(_(b'uncommitted changes'), hint=hint)
1091 raise error.Abort(_(b'uncommitted changes'), hint=hint)
1092 ctx = repo[None]
1092 ctx = repo[None]
1093 for s in sorted(ctx.substate):
1093 for s in sorted(ctx.substate):
1094 ctx.sub(s).bailifchanged(hint=hint)
1094 ctx.sub(s).bailifchanged(hint=hint)
1095
1095
1096
1096
1097 def logmessage(ui, opts):
1097 def logmessage(ui, opts):
1098 """ get the log message according to -m and -l option """
1098 """ get the log message according to -m and -l option """
1099
1099
1100 check_at_most_one_arg(opts, b'message', b'logfile')
1100 check_at_most_one_arg(opts, b'message', b'logfile')
1101
1101
1102 message = opts.get(b'message')
1102 message = opts.get(b'message')
1103 logfile = opts.get(b'logfile')
1103 logfile = opts.get(b'logfile')
1104
1104
1105 if not message and logfile:
1105 if not message and logfile:
1106 try:
1106 try:
1107 if isstdiofilename(logfile):
1107 if isstdiofilename(logfile):
1108 message = ui.fin.read()
1108 message = ui.fin.read()
1109 else:
1109 else:
1110 message = b'\n'.join(util.readfile(logfile).splitlines())
1110 message = b'\n'.join(util.readfile(logfile).splitlines())
1111 except IOError as inst:
1111 except IOError as inst:
1112 raise error.Abort(
1112 raise error.Abort(
1113 _(b"can't read commit message '%s': %s")
1113 _(b"can't read commit message '%s': %s")
1114 % (logfile, encoding.strtolocal(inst.strerror))
1114 % (logfile, encoding.strtolocal(inst.strerror))
1115 )
1115 )
1116 return message
1116 return message
1117
1117
1118
1118
1119 def mergeeditform(ctxorbool, baseformname):
1119 def mergeeditform(ctxorbool, baseformname):
1120 """return appropriate editform name (referencing a committemplate)
1120 """return appropriate editform name (referencing a committemplate)
1121
1121
1122 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1122 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1123 merging is committed.
1123 merging is committed.
1124
1124
1125 This returns baseformname with '.merge' appended if it is a merge,
1125 This returns baseformname with '.merge' appended if it is a merge,
1126 otherwise '.normal' is appended.
1126 otherwise '.normal' is appended.
1127 """
1127 """
1128 if isinstance(ctxorbool, bool):
1128 if isinstance(ctxorbool, bool):
1129 if ctxorbool:
1129 if ctxorbool:
1130 return baseformname + b".merge"
1130 return baseformname + b".merge"
1131 elif len(ctxorbool.parents()) > 1:
1131 elif len(ctxorbool.parents()) > 1:
1132 return baseformname + b".merge"
1132 return baseformname + b".merge"
1133
1133
1134 return baseformname + b".normal"
1134 return baseformname + b".normal"
1135
1135
1136
1136
1137 def getcommiteditor(
1137 def getcommiteditor(
1138 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1138 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1139 ):
1139 ):
1140 """get appropriate commit message editor according to '--edit' option
1140 """get appropriate commit message editor according to '--edit' option
1141
1141
1142 'finishdesc' is a function to be called with edited commit message
1142 'finishdesc' is a function to be called with edited commit message
1143 (= 'description' of the new changeset) just after editing, but
1143 (= 'description' of the new changeset) just after editing, but
1144 before checking empty-ness. It should return actual text to be
1144 before checking empty-ness. It should return actual text to be
1145 stored into history. This allows to change description before
1145 stored into history. This allows to change description before
1146 storing.
1146 storing.
1147
1147
1148 'extramsg' is a extra message to be shown in the editor instead of
1148 'extramsg' is a extra message to be shown in the editor instead of
1149 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1149 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1150 is automatically added.
1150 is automatically added.
1151
1151
1152 'editform' is a dot-separated list of names, to distinguish
1152 'editform' is a dot-separated list of names, to distinguish
1153 the purpose of commit text editing.
1153 the purpose of commit text editing.
1154
1154
1155 'getcommiteditor' returns 'commitforceeditor' regardless of
1155 'getcommiteditor' returns 'commitforceeditor' regardless of
1156 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1156 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1157 they are specific for usage in MQ.
1157 they are specific for usage in MQ.
1158 """
1158 """
1159 if edit or finishdesc or extramsg:
1159 if edit or finishdesc or extramsg:
1160 return lambda r, c, s: commitforceeditor(
1160 return lambda r, c, s: commitforceeditor(
1161 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1161 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1162 )
1162 )
1163 elif editform:
1163 elif editform:
1164 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1164 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1165 else:
1165 else:
1166 return commiteditor
1166 return commiteditor
1167
1167
1168
1168
1169 def _escapecommandtemplate(tmpl):
1169 def _escapecommandtemplate(tmpl):
1170 parts = []
1170 parts = []
1171 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1171 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1172 if typ == b'string':
1172 if typ == b'string':
1173 parts.append(stringutil.escapestr(tmpl[start:end]))
1173 parts.append(stringutil.escapestr(tmpl[start:end]))
1174 else:
1174 else:
1175 parts.append(tmpl[start:end])
1175 parts.append(tmpl[start:end])
1176 return b''.join(parts)
1176 return b''.join(parts)
1177
1177
1178
1178
1179 def rendercommandtemplate(ui, tmpl, props):
1179 def rendercommandtemplate(ui, tmpl, props):
1180 r"""Expand a literal template 'tmpl' in a way suitable for command line
1180 r"""Expand a literal template 'tmpl' in a way suitable for command line
1181
1181
1182 '\' in outermost string is not taken as an escape character because it
1182 '\' in outermost string is not taken as an escape character because it
1183 is a directory separator on Windows.
1183 is a directory separator on Windows.
1184
1184
1185 >>> from . import ui as uimod
1185 >>> from . import ui as uimod
1186 >>> ui = uimod.ui()
1186 >>> ui = uimod.ui()
1187 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1187 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1188 'c:\\foo'
1188 'c:\\foo'
1189 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1189 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1190 'c:{path}'
1190 'c:{path}'
1191 """
1191 """
1192 if not tmpl:
1192 if not tmpl:
1193 return tmpl
1193 return tmpl
1194 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1194 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1195 return t.renderdefault(props)
1195 return t.renderdefault(props)
1196
1196
1197
1197
1198 def rendertemplate(ctx, tmpl, props=None):
1198 def rendertemplate(ctx, tmpl, props=None):
1199 """Expand a literal template 'tmpl' byte-string against one changeset
1199 """Expand a literal template 'tmpl' byte-string against one changeset
1200
1200
1201 Each props item must be a stringify-able value or a callable returning
1201 Each props item must be a stringify-able value or a callable returning
1202 such value, i.e. no bare list nor dict should be passed.
1202 such value, i.e. no bare list nor dict should be passed.
1203 """
1203 """
1204 repo = ctx.repo()
1204 repo = ctx.repo()
1205 tres = formatter.templateresources(repo.ui, repo)
1205 tres = formatter.templateresources(repo.ui, repo)
1206 t = formatter.maketemplater(
1206 t = formatter.maketemplater(
1207 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1207 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1208 )
1208 )
1209 mapping = {b'ctx': ctx}
1209 mapping = {b'ctx': ctx}
1210 if props:
1210 if props:
1211 mapping.update(props)
1211 mapping.update(props)
1212 return t.renderdefault(mapping)
1212 return t.renderdefault(mapping)
1213
1213
1214
1214
1215 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1215 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1216 r"""Convert old-style filename format string to template string
1216 r"""Convert old-style filename format string to template string
1217
1217
1218 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1218 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1219 'foo-{reporoot|basename}-{seqno}.patch'
1219 'foo-{reporoot|basename}-{seqno}.patch'
1220 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1220 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1221 '{rev}{tags % "{tag}"}{node}'
1221 '{rev}{tags % "{tag}"}{node}'
1222
1222
1223 '\' in outermost strings has to be escaped because it is a directory
1223 '\' in outermost strings has to be escaped because it is a directory
1224 separator on Windows:
1224 separator on Windows:
1225
1225
1226 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1226 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1227 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1227 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1228 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1228 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1229 '\\\\\\\\foo\\\\bar.patch'
1229 '\\\\\\\\foo\\\\bar.patch'
1230 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1230 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1231 '\\\\{tags % "{tag}"}'
1231 '\\\\{tags % "{tag}"}'
1232
1232
1233 but inner strings follow the template rules (i.e. '\' is taken as an
1233 but inner strings follow the template rules (i.e. '\' is taken as an
1234 escape character):
1234 escape character):
1235
1235
1236 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1236 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1237 '{"c:\\tmp"}'
1237 '{"c:\\tmp"}'
1238 """
1238 """
1239 expander = {
1239 expander = {
1240 b'H': b'{node}',
1240 b'H': b'{node}',
1241 b'R': b'{rev}',
1241 b'R': b'{rev}',
1242 b'h': b'{node|short}',
1242 b'h': b'{node|short}',
1243 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1243 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1244 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1244 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1245 b'%': b'%',
1245 b'%': b'%',
1246 b'b': b'{reporoot|basename}',
1246 b'b': b'{reporoot|basename}',
1247 }
1247 }
1248 if total is not None:
1248 if total is not None:
1249 expander[b'N'] = b'{total}'
1249 expander[b'N'] = b'{total}'
1250 if seqno is not None:
1250 if seqno is not None:
1251 expander[b'n'] = b'{seqno}'
1251 expander[b'n'] = b'{seqno}'
1252 if total is not None and seqno is not None:
1252 if total is not None and seqno is not None:
1253 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1253 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1254 if pathname is not None:
1254 if pathname is not None:
1255 expander[b's'] = b'{pathname|basename}'
1255 expander[b's'] = b'{pathname|basename}'
1256 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1256 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1257 expander[b'p'] = b'{pathname}'
1257 expander[b'p'] = b'{pathname}'
1258
1258
1259 newname = []
1259 newname = []
1260 for typ, start, end in templater.scantemplate(pat, raw=True):
1260 for typ, start, end in templater.scantemplate(pat, raw=True):
1261 if typ != b'string':
1261 if typ != b'string':
1262 newname.append(pat[start:end])
1262 newname.append(pat[start:end])
1263 continue
1263 continue
1264 i = start
1264 i = start
1265 while i < end:
1265 while i < end:
1266 n = pat.find(b'%', i, end)
1266 n = pat.find(b'%', i, end)
1267 if n < 0:
1267 if n < 0:
1268 newname.append(stringutil.escapestr(pat[i:end]))
1268 newname.append(stringutil.escapestr(pat[i:end]))
1269 break
1269 break
1270 newname.append(stringutil.escapestr(pat[i:n]))
1270 newname.append(stringutil.escapestr(pat[i:n]))
1271 if n + 2 > end:
1271 if n + 2 > end:
1272 raise error.Abort(
1272 raise error.Abort(
1273 _(b"incomplete format spec in output filename")
1273 _(b"incomplete format spec in output filename")
1274 )
1274 )
1275 c = pat[n + 1 : n + 2]
1275 c = pat[n + 1 : n + 2]
1276 i = n + 2
1276 i = n + 2
1277 try:
1277 try:
1278 newname.append(expander[c])
1278 newname.append(expander[c])
1279 except KeyError:
1279 except KeyError:
1280 raise error.Abort(
1280 raise error.Abort(
1281 _(b"invalid format spec '%%%s' in output filename") % c
1281 _(b"invalid format spec '%%%s' in output filename") % c
1282 )
1282 )
1283 return b''.join(newname)
1283 return b''.join(newname)
1284
1284
1285
1285
1286 def makefilename(ctx, pat, **props):
1286 def makefilename(ctx, pat, **props):
1287 if not pat:
1287 if not pat:
1288 return pat
1288 return pat
1289 tmpl = _buildfntemplate(pat, **props)
1289 tmpl = _buildfntemplate(pat, **props)
1290 # BUG: alias expansion shouldn't be made against template fragments
1290 # BUG: alias expansion shouldn't be made against template fragments
1291 # rewritten from %-format strings, but we have no easy way to partially
1291 # rewritten from %-format strings, but we have no easy way to partially
1292 # disable the expansion.
1292 # disable the expansion.
1293 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1293 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1294
1294
1295
1295
1296 def isstdiofilename(pat):
1296 def isstdiofilename(pat):
1297 """True if the given pat looks like a filename denoting stdin/stdout"""
1297 """True if the given pat looks like a filename denoting stdin/stdout"""
1298 return not pat or pat == b'-'
1298 return not pat or pat == b'-'
1299
1299
1300
1300
1301 class _unclosablefile(object):
1301 class _unclosablefile(object):
1302 def __init__(self, fp):
1302 def __init__(self, fp):
1303 self._fp = fp
1303 self._fp = fp
1304
1304
1305 def close(self):
1305 def close(self):
1306 pass
1306 pass
1307
1307
1308 def __iter__(self):
1308 def __iter__(self):
1309 return iter(self._fp)
1309 return iter(self._fp)
1310
1310
1311 def __getattr__(self, attr):
1311 def __getattr__(self, attr):
1312 return getattr(self._fp, attr)
1312 return getattr(self._fp, attr)
1313
1313
1314 def __enter__(self):
1314 def __enter__(self):
1315 return self
1315 return self
1316
1316
1317 def __exit__(self, exc_type, exc_value, exc_tb):
1317 def __exit__(self, exc_type, exc_value, exc_tb):
1318 pass
1318 pass
1319
1319
1320
1320
1321 def makefileobj(ctx, pat, mode=b'wb', **props):
1321 def makefileobj(ctx, pat, mode=b'wb', **props):
1322 writable = mode not in (b'r', b'rb')
1322 writable = mode not in (b'r', b'rb')
1323
1323
1324 if isstdiofilename(pat):
1324 if isstdiofilename(pat):
1325 repo = ctx.repo()
1325 repo = ctx.repo()
1326 if writable:
1326 if writable:
1327 fp = repo.ui.fout
1327 fp = repo.ui.fout
1328 else:
1328 else:
1329 fp = repo.ui.fin
1329 fp = repo.ui.fin
1330 return _unclosablefile(fp)
1330 return _unclosablefile(fp)
1331 fn = makefilename(ctx, pat, **props)
1331 fn = makefilename(ctx, pat, **props)
1332 return open(fn, mode)
1332 return open(fn, mode)
1333
1333
1334
1334
1335 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1335 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1336 """opens the changelog, manifest, a filelog or a given revlog"""
1336 """opens the changelog, manifest, a filelog or a given revlog"""
1337 cl = opts[b'changelog']
1337 cl = opts[b'changelog']
1338 mf = opts[b'manifest']
1338 mf = opts[b'manifest']
1339 dir = opts[b'dir']
1339 dir = opts[b'dir']
1340 msg = None
1340 msg = None
1341 if cl and mf:
1341 if cl and mf:
1342 msg = _(b'cannot specify --changelog and --manifest at the same time')
1342 msg = _(b'cannot specify --changelog and --manifest at the same time')
1343 elif cl and dir:
1343 elif cl and dir:
1344 msg = _(b'cannot specify --changelog and --dir at the same time')
1344 msg = _(b'cannot specify --changelog and --dir at the same time')
1345 elif cl or mf or dir:
1345 elif cl or mf or dir:
1346 if file_:
1346 if file_:
1347 msg = _(b'cannot specify filename with --changelog or --manifest')
1347 msg = _(b'cannot specify filename with --changelog or --manifest')
1348 elif not repo:
1348 elif not repo:
1349 msg = _(
1349 msg = _(
1350 b'cannot specify --changelog or --manifest or --dir '
1350 b'cannot specify --changelog or --manifest or --dir '
1351 b'without a repository'
1351 b'without a repository'
1352 )
1352 )
1353 if msg:
1353 if msg:
1354 raise error.Abort(msg)
1354 raise error.Abort(msg)
1355
1355
1356 r = None
1356 r = None
1357 if repo:
1357 if repo:
1358 if cl:
1358 if cl:
1359 r = repo.unfiltered().changelog
1359 r = repo.unfiltered().changelog
1360 elif dir:
1360 elif dir:
1361 if not scmutil.istreemanifest(repo):
1361 if not scmutil.istreemanifest(repo):
1362 raise error.Abort(
1362 raise error.Abort(
1363 _(
1363 _(
1364 b"--dir can only be used on repos with "
1364 b"--dir can only be used on repos with "
1365 b"treemanifest enabled"
1365 b"treemanifest enabled"
1366 )
1366 )
1367 )
1367 )
1368 if not dir.endswith(b'/'):
1368 if not dir.endswith(b'/'):
1369 dir = dir + b'/'
1369 dir = dir + b'/'
1370 dirlog = repo.manifestlog.getstorage(dir)
1370 dirlog = repo.manifestlog.getstorage(dir)
1371 if len(dirlog):
1371 if len(dirlog):
1372 r = dirlog
1372 r = dirlog
1373 elif mf:
1373 elif mf:
1374 r = repo.manifestlog.getstorage(b'')
1374 r = repo.manifestlog.getstorage(b'')
1375 elif file_:
1375 elif file_:
1376 filelog = repo.file(file_)
1376 filelog = repo.file(file_)
1377 if len(filelog):
1377 if len(filelog):
1378 r = filelog
1378 r = filelog
1379
1379
1380 # Not all storage may be revlogs. If requested, try to return an actual
1380 # Not all storage may be revlogs. If requested, try to return an actual
1381 # revlog instance.
1381 # revlog instance.
1382 if returnrevlog:
1382 if returnrevlog:
1383 if isinstance(r, revlog.revlog):
1383 if isinstance(r, revlog.revlog):
1384 pass
1384 pass
1385 elif util.safehasattr(r, b'_revlog'):
1385 elif util.safehasattr(r, b'_revlog'):
1386 r = r._revlog # pytype: disable=attribute-error
1386 r = r._revlog # pytype: disable=attribute-error
1387 elif r is not None:
1387 elif r is not None:
1388 raise error.Abort(_(b'%r does not appear to be a revlog') % r)
1388 raise error.Abort(_(b'%r does not appear to be a revlog') % r)
1389
1389
1390 if not r:
1390 if not r:
1391 if not returnrevlog:
1391 if not returnrevlog:
1392 raise error.Abort(_(b'cannot give path to non-revlog'))
1392 raise error.Abort(_(b'cannot give path to non-revlog'))
1393
1393
1394 if not file_:
1394 if not file_:
1395 raise error.CommandError(cmd, _(b'invalid arguments'))
1395 raise error.CommandError(cmd, _(b'invalid arguments'))
1396 if not os.path.isfile(file_):
1396 if not os.path.isfile(file_):
1397 raise error.Abort(_(b"revlog '%s' not found") % file_)
1397 raise error.Abort(_(b"revlog '%s' not found") % file_)
1398 r = revlog.revlog(
1398 r = revlog.revlog(
1399 vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
1399 vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
1400 )
1400 )
1401 return r
1401 return r
1402
1402
1403
1403
1404 def openrevlog(repo, cmd, file_, opts):
1404 def openrevlog(repo, cmd, file_, opts):
1405 """Obtain a revlog backing storage of an item.
1405 """Obtain a revlog backing storage of an item.
1406
1406
1407 This is similar to ``openstorage()`` except it always returns a revlog.
1407 This is similar to ``openstorage()`` except it always returns a revlog.
1408
1408
1409 In most cases, a caller cares about the main storage object - not the
1409 In most cases, a caller cares about the main storage object - not the
1410 revlog backing it. Therefore, this function should only be used by code
1410 revlog backing it. Therefore, this function should only be used by code
1411 that needs to examine low-level revlog implementation details. e.g. debug
1411 that needs to examine low-level revlog implementation details. e.g. debug
1412 commands.
1412 commands.
1413 """
1413 """
1414 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1414 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1415
1415
1416
1416
1417 def copy(ui, repo, pats, opts, rename=False):
1417 def copy(ui, repo, pats, opts, rename=False):
1418 check_incompatible_arguments(opts, b'forget', [b'dry_run'])
1418 check_incompatible_arguments(opts, b'forget', [b'dry_run'])
1419
1419
1420 # called with the repo lock held
1420 # called with the repo lock held
1421 #
1421 #
1422 # hgsep => pathname that uses "/" to separate directories
1422 # hgsep => pathname that uses "/" to separate directories
1423 # ossep => pathname that uses os.sep to separate directories
1423 # ossep => pathname that uses os.sep to separate directories
1424 cwd = repo.getcwd()
1424 cwd = repo.getcwd()
1425 targets = {}
1425 targets = {}
1426 forget = opts.get(b"forget")
1426 forget = opts.get(b"forget")
1427 after = opts.get(b"after")
1427 after = opts.get(b"after")
1428 dryrun = opts.get(b"dry_run")
1428 dryrun = opts.get(b"dry_run")
1429 rev = opts.get(b'at_rev')
1429 rev = opts.get(b'at_rev')
1430 if rev:
1430 if rev:
1431 if not forget and not after:
1431 if not forget and not after:
1432 # TODO: Remove this restriction and make it also create the copy
1432 # TODO: Remove this restriction and make it also create the copy
1433 # targets (and remove the rename source if rename==True).
1433 # targets (and remove the rename source if rename==True).
1434 raise error.Abort(_(b'--at-rev requires --after'))
1434 raise error.Abort(_(b'--at-rev requires --after'))
1435 ctx = scmutil.revsingle(repo, rev)
1435 ctx = scmutil.revsingle(repo, rev)
1436 if len(ctx.parents()) > 1:
1436 if len(ctx.parents()) > 1:
1437 raise error.Abort(_(b'cannot mark/unmark copy in merge commit'))
1437 raise error.Abort(_(b'cannot mark/unmark copy in merge commit'))
1438 else:
1438 else:
1439 ctx = repo[None]
1439 ctx = repo[None]
1440
1440
1441 pctx = ctx.p1()
1441 pctx = ctx.p1()
1442
1442
1443 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1443 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1444
1444
1445 if forget:
1445 if forget:
1446 if ctx.rev() is None:
1446 if ctx.rev() is None:
1447 new_ctx = ctx
1447 new_ctx = ctx
1448 else:
1448 else:
1449 if len(ctx.parents()) > 1:
1449 if len(ctx.parents()) > 1:
1450 raise error.Abort(_(b'cannot unmark copy in merge commit'))
1450 raise error.Abort(_(b'cannot unmark copy in merge commit'))
1451 # avoid cycle context -> subrepo -> cmdutil
1451 # avoid cycle context -> subrepo -> cmdutil
1452 from . import context
1452 from . import context
1453
1453
1454 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1454 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1455 new_ctx = context.overlayworkingctx(repo)
1455 new_ctx = context.overlayworkingctx(repo)
1456 new_ctx.setbase(ctx.p1())
1456 new_ctx.setbase(ctx.p1())
1457 mergemod.graft(repo, ctx, wctx=new_ctx)
1457 mergemod.graft(repo, ctx, wctx=new_ctx)
1458
1458
1459 match = scmutil.match(ctx, pats, opts)
1459 match = scmutil.match(ctx, pats, opts)
1460
1460
1461 current_copies = ctx.p1copies()
1461 current_copies = ctx.p1copies()
1462 current_copies.update(ctx.p2copies())
1462 current_copies.update(ctx.p2copies())
1463
1463
1464 uipathfn = scmutil.getuipathfn(repo)
1464 uipathfn = scmutil.getuipathfn(repo)
1465 for f in ctx.walk(match):
1465 for f in ctx.walk(match):
1466 if f in current_copies:
1466 if f in current_copies:
1467 new_ctx[f].markcopied(None)
1467 new_ctx[f].markcopied(None)
1468 elif match.exact(f):
1468 elif match.exact(f):
1469 ui.warn(
1469 ui.warn(
1470 _(
1470 _(
1471 b'%s: not unmarking as copy - file is not marked as copied\n'
1471 b'%s: not unmarking as copy - file is not marked as copied\n'
1472 )
1472 )
1473 % uipathfn(f)
1473 % uipathfn(f)
1474 )
1474 )
1475
1475
1476 if ctx.rev() is not None:
1476 if ctx.rev() is not None:
1477 with repo.lock():
1477 with repo.lock():
1478 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1478 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1479 new_node = mem_ctx.commit()
1479 new_node = mem_ctx.commit()
1480
1480
1481 if repo.dirstate.p1() == ctx.node():
1481 if repo.dirstate.p1() == ctx.node():
1482 with repo.dirstate.parentchange():
1482 with repo.dirstate.parentchange():
1483 scmutil.movedirstate(repo, repo[new_node])
1483 scmutil.movedirstate(repo, repo[new_node])
1484 replacements = {ctx.node(): [new_node]}
1484 replacements = {ctx.node(): [new_node]}
1485 scmutil.cleanupnodes(
1485 scmutil.cleanupnodes(
1486 repo, replacements, b'uncopy', fixphase=True
1486 repo, replacements, b'uncopy', fixphase=True
1487 )
1487 )
1488
1488
1489 return
1489 return
1490
1490
1491 pats = scmutil.expandpats(pats)
1491 pats = scmutil.expandpats(pats)
1492 if not pats:
1492 if not pats:
1493 raise error.Abort(_(b'no source or destination specified'))
1493 raise error.Abort(_(b'no source or destination specified'))
1494 if len(pats) == 1:
1494 if len(pats) == 1:
1495 raise error.Abort(_(b'no destination specified'))
1495 raise error.Abort(_(b'no destination specified'))
1496 dest = pats.pop()
1496 dest = pats.pop()
1497
1497
1498 def walkpat(pat):
1498 def walkpat(pat):
1499 srcs = []
1499 srcs = []
1500 # TODO: Inline and simplify the non-working-copy version of this code
1500 # TODO: Inline and simplify the non-working-copy version of this code
1501 # since it shares very little with the working-copy version of it.
1501 # since it shares very little with the working-copy version of it.
1502 ctx_to_walk = ctx if ctx.rev() is None else pctx
1502 ctx_to_walk = ctx if ctx.rev() is None else pctx
1503 m = scmutil.match(ctx_to_walk, [pat], opts, globbed=True)
1503 m = scmutil.match(ctx_to_walk, [pat], opts, globbed=True)
1504 for abs in ctx_to_walk.walk(m):
1504 for abs in ctx_to_walk.walk(m):
1505 rel = uipathfn(abs)
1505 rel = uipathfn(abs)
1506 exact = m.exact(abs)
1506 exact = m.exact(abs)
1507 if abs not in ctx:
1507 if abs not in ctx:
1508 if abs in pctx:
1508 if abs in pctx:
1509 if not after:
1509 if not after:
1510 if exact:
1510 if exact:
1511 ui.warn(
1511 ui.warn(
1512 _(
1512 _(
1513 b'%s: not copying - file has been marked '
1513 b'%s: not copying - file has been marked '
1514 b'for remove\n'
1514 b'for remove\n'
1515 )
1515 )
1516 % rel
1516 % rel
1517 )
1517 )
1518 continue
1518 continue
1519 else:
1519 else:
1520 if exact:
1520 if exact:
1521 ui.warn(
1521 ui.warn(
1522 _(b'%s: not copying - file is not managed\n') % rel
1522 _(b'%s: not copying - file is not managed\n') % rel
1523 )
1523 )
1524 continue
1524 continue
1525
1525
1526 # abs: hgsep
1526 # abs: hgsep
1527 # rel: ossep
1527 # rel: ossep
1528 srcs.append((abs, rel, exact))
1528 srcs.append((abs, rel, exact))
1529 return srcs
1529 return srcs
1530
1530
1531 if ctx.rev() is not None:
1531 if ctx.rev() is not None:
1532 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1532 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1533 absdest = pathutil.canonpath(repo.root, cwd, dest)
1533 absdest = pathutil.canonpath(repo.root, cwd, dest)
1534 if ctx.hasdir(absdest):
1534 if ctx.hasdir(absdest):
1535 raise error.Abort(
1535 raise error.Abort(
1536 _(b'%s: --at-rev does not support a directory as destination')
1536 _(b'%s: --at-rev does not support a directory as destination')
1537 % uipathfn(absdest)
1537 % uipathfn(absdest)
1538 )
1538 )
1539 if absdest not in ctx:
1539 if absdest not in ctx:
1540 raise error.Abort(
1540 raise error.Abort(
1541 _(b'%s: copy destination does not exist in %s')
1541 _(b'%s: copy destination does not exist in %s')
1542 % (uipathfn(absdest), ctx)
1542 % (uipathfn(absdest), ctx)
1543 )
1543 )
1544
1544
1545 # avoid cycle context -> subrepo -> cmdutil
1545 # avoid cycle context -> subrepo -> cmdutil
1546 from . import context
1546 from . import context
1547
1547
1548 copylist = []
1548 copylist = []
1549 for pat in pats:
1549 for pat in pats:
1550 srcs = walkpat(pat)
1550 srcs = walkpat(pat)
1551 if not srcs:
1551 if not srcs:
1552 continue
1552 continue
1553 for abs, rel, exact in srcs:
1553 for abs, rel, exact in srcs:
1554 copylist.append(abs)
1554 copylist.append(abs)
1555
1555
1556 if not copylist:
1556 if not copylist:
1557 raise error.Abort(_(b'no files to copy'))
1557 raise error.Abort(_(b'no files to copy'))
1558 # TODO: Add support for `hg cp --at-rev . foo bar dir` and
1558 # TODO: Add support for `hg cp --at-rev . foo bar dir` and
1559 # `hg cp --at-rev . dir1 dir2`, preferably unifying the code with the
1559 # `hg cp --at-rev . dir1 dir2`, preferably unifying the code with the
1560 # existing functions below.
1560 # existing functions below.
1561 if len(copylist) != 1:
1561 if len(copylist) != 1:
1562 raise error.Abort(_(b'--at-rev requires a single source'))
1562 raise error.Abort(_(b'--at-rev requires a single source'))
1563
1563
1564 new_ctx = context.overlayworkingctx(repo)
1564 new_ctx = context.overlayworkingctx(repo)
1565 new_ctx.setbase(ctx.p1())
1565 new_ctx.setbase(ctx.p1())
1566 mergemod.graft(repo, ctx, wctx=new_ctx)
1566 mergemod.graft(repo, ctx, wctx=new_ctx)
1567
1567
1568 new_ctx.markcopied(absdest, copylist[0])
1568 new_ctx.markcopied(absdest, copylist[0])
1569
1569
1570 with repo.lock():
1570 with repo.lock():
1571 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1571 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1572 new_node = mem_ctx.commit()
1572 new_node = mem_ctx.commit()
1573
1573
1574 if repo.dirstate.p1() == ctx.node():
1574 if repo.dirstate.p1() == ctx.node():
1575 with repo.dirstate.parentchange():
1575 with repo.dirstate.parentchange():
1576 scmutil.movedirstate(repo, repo[new_node])
1576 scmutil.movedirstate(repo, repo[new_node])
1577 replacements = {ctx.node(): [new_node]}
1577 replacements = {ctx.node(): [new_node]}
1578 scmutil.cleanupnodes(repo, replacements, b'copy', fixphase=True)
1578 scmutil.cleanupnodes(repo, replacements, b'copy', fixphase=True)
1579
1579
1580 return
1580 return
1581
1581
1582 # abssrc: hgsep
1582 # abssrc: hgsep
1583 # relsrc: ossep
1583 # relsrc: ossep
1584 # otarget: ossep
1584 # otarget: ossep
1585 def copyfile(abssrc, relsrc, otarget, exact):
1585 def copyfile(abssrc, relsrc, otarget, exact):
1586 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1586 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1587 if b'/' in abstarget:
1587 if b'/' in abstarget:
1588 # We cannot normalize abstarget itself, this would prevent
1588 # We cannot normalize abstarget itself, this would prevent
1589 # case only renames, like a => A.
1589 # case only renames, like a => A.
1590 abspath, absname = abstarget.rsplit(b'/', 1)
1590 abspath, absname = abstarget.rsplit(b'/', 1)
1591 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1591 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1592 reltarget = repo.pathto(abstarget, cwd)
1592 reltarget = repo.pathto(abstarget, cwd)
1593 target = repo.wjoin(abstarget)
1593 target = repo.wjoin(abstarget)
1594 src = repo.wjoin(abssrc)
1594 src = repo.wjoin(abssrc)
1595 state = repo.dirstate[abstarget]
1595 state = repo.dirstate[abstarget]
1596
1596
1597 scmutil.checkportable(ui, abstarget)
1597 scmutil.checkportable(ui, abstarget)
1598
1598
1599 # check for collisions
1599 # check for collisions
1600 prevsrc = targets.get(abstarget)
1600 prevsrc = targets.get(abstarget)
1601 if prevsrc is not None:
1601 if prevsrc is not None:
1602 ui.warn(
1602 ui.warn(
1603 _(b'%s: not overwriting - %s collides with %s\n')
1603 _(b'%s: not overwriting - %s collides with %s\n')
1604 % (
1604 % (
1605 reltarget,
1605 reltarget,
1606 repo.pathto(abssrc, cwd),
1606 repo.pathto(abssrc, cwd),
1607 repo.pathto(prevsrc, cwd),
1607 repo.pathto(prevsrc, cwd),
1608 )
1608 )
1609 )
1609 )
1610 return True # report a failure
1610 return True # report a failure
1611
1611
1612 # check for overwrites
1612 # check for overwrites
1613 exists = os.path.lexists(target)
1613 exists = os.path.lexists(target)
1614 samefile = False
1614 samefile = False
1615 if exists and abssrc != abstarget:
1615 if exists and abssrc != abstarget:
1616 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1616 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1617 abstarget
1617 abstarget
1618 ):
1618 ):
1619 if not rename:
1619 if not rename:
1620 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1620 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1621 return True # report a failure
1621 return True # report a failure
1622 exists = False
1622 exists = False
1623 samefile = True
1623 samefile = True
1624
1624
1625 if not after and exists or after and state in b'mn':
1625 if not after and exists or after and state in b'mn':
1626 if not opts[b'force']:
1626 if not opts[b'force']:
1627 if state in b'mn':
1627 if state in b'mn':
1628 msg = _(b'%s: not overwriting - file already committed\n')
1628 msg = _(b'%s: not overwriting - file already committed\n')
1629 if after:
1629 if after:
1630 flags = b'--after --force'
1630 flags = b'--after --force'
1631 else:
1631 else:
1632 flags = b'--force'
1632 flags = b'--force'
1633 if rename:
1633 if rename:
1634 hint = (
1634 hint = (
1635 _(
1635 _(
1636 b"('hg rename %s' to replace the file by "
1636 b"('hg rename %s' to replace the file by "
1637 b'recording a rename)\n'
1637 b'recording a rename)\n'
1638 )
1638 )
1639 % flags
1639 % flags
1640 )
1640 )
1641 else:
1641 else:
1642 hint = (
1642 hint = (
1643 _(
1643 _(
1644 b"('hg copy %s' to replace the file by "
1644 b"('hg copy %s' to replace the file by "
1645 b'recording a copy)\n'
1645 b'recording a copy)\n'
1646 )
1646 )
1647 % flags
1647 % flags
1648 )
1648 )
1649 else:
1649 else:
1650 msg = _(b'%s: not overwriting - file exists\n')
1650 msg = _(b'%s: not overwriting - file exists\n')
1651 if rename:
1651 if rename:
1652 hint = _(
1652 hint = _(
1653 b"('hg rename --after' to record the rename)\n"
1653 b"('hg rename --after' to record the rename)\n"
1654 )
1654 )
1655 else:
1655 else:
1656 hint = _(b"('hg copy --after' to record the copy)\n")
1656 hint = _(b"('hg copy --after' to record the copy)\n")
1657 ui.warn(msg % reltarget)
1657 ui.warn(msg % reltarget)
1658 ui.warn(hint)
1658 ui.warn(hint)
1659 return True # report a failure
1659 return True # report a failure
1660
1660
1661 if after:
1661 if after:
1662 if not exists:
1662 if not exists:
1663 if rename:
1663 if rename:
1664 ui.warn(
1664 ui.warn(
1665 _(b'%s: not recording move - %s does not exist\n')
1665 _(b'%s: not recording move - %s does not exist\n')
1666 % (relsrc, reltarget)
1666 % (relsrc, reltarget)
1667 )
1667 )
1668 else:
1668 else:
1669 ui.warn(
1669 ui.warn(
1670 _(b'%s: not recording copy - %s does not exist\n')
1670 _(b'%s: not recording copy - %s does not exist\n')
1671 % (relsrc, reltarget)
1671 % (relsrc, reltarget)
1672 )
1672 )
1673 return True # report a failure
1673 return True # report a failure
1674 elif not dryrun:
1674 elif not dryrun:
1675 try:
1675 try:
1676 if exists:
1676 if exists:
1677 os.unlink(target)
1677 os.unlink(target)
1678 targetdir = os.path.dirname(target) or b'.'
1678 targetdir = os.path.dirname(target) or b'.'
1679 if not os.path.isdir(targetdir):
1679 if not os.path.isdir(targetdir):
1680 os.makedirs(targetdir)
1680 os.makedirs(targetdir)
1681 if samefile:
1681 if samefile:
1682 tmp = target + b"~hgrename"
1682 tmp = target + b"~hgrename"
1683 os.rename(src, tmp)
1683 os.rename(src, tmp)
1684 os.rename(tmp, target)
1684 os.rename(tmp, target)
1685 else:
1685 else:
1686 # Preserve stat info on renames, not on copies; this matches
1686 # Preserve stat info on renames, not on copies; this matches
1687 # Linux CLI behavior.
1687 # Linux CLI behavior.
1688 util.copyfile(src, target, copystat=rename)
1688 util.copyfile(src, target, copystat=rename)
1689 srcexists = True
1689 srcexists = True
1690 except IOError as inst:
1690 except IOError as inst:
1691 if inst.errno == errno.ENOENT:
1691 if inst.errno == errno.ENOENT:
1692 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1692 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1693 srcexists = False
1693 srcexists = False
1694 else:
1694 else:
1695 ui.warn(
1695 ui.warn(
1696 _(b'%s: cannot copy - %s\n')
1696 _(b'%s: cannot copy - %s\n')
1697 % (relsrc, encoding.strtolocal(inst.strerror))
1697 % (relsrc, encoding.strtolocal(inst.strerror))
1698 )
1698 )
1699 return True # report a failure
1699 return True # report a failure
1700
1700
1701 if ui.verbose or not exact:
1701 if ui.verbose or not exact:
1702 if rename:
1702 if rename:
1703 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1703 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1704 else:
1704 else:
1705 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1705 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1706
1706
1707 targets[abstarget] = abssrc
1707 targets[abstarget] = abssrc
1708
1708
1709 # fix up dirstate
1709 # fix up dirstate
1710 scmutil.dirstatecopy(
1710 scmutil.dirstatecopy(
1711 ui, repo, ctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1711 ui, repo, ctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1712 )
1712 )
1713 if rename and not dryrun:
1713 if rename and not dryrun:
1714 if not after and srcexists and not samefile:
1714 if not after and srcexists and not samefile:
1715 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1715 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1716 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1716 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1717 ctx.forget([abssrc])
1717 ctx.forget([abssrc])
1718
1718
1719 # pat: ossep
1719 # pat: ossep
1720 # dest ossep
1720 # dest ossep
1721 # srcs: list of (hgsep, hgsep, ossep, bool)
1721 # srcs: list of (hgsep, hgsep, ossep, bool)
1722 # return: function that takes hgsep and returns ossep
1722 # return: function that takes hgsep and returns ossep
1723 def targetpathfn(pat, dest, srcs):
1723 def targetpathfn(pat, dest, srcs):
1724 if os.path.isdir(pat):
1724 if os.path.isdir(pat):
1725 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1725 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1726 abspfx = util.localpath(abspfx)
1726 abspfx = util.localpath(abspfx)
1727 if destdirexists:
1727 if destdirexists:
1728 striplen = len(os.path.split(abspfx)[0])
1728 striplen = len(os.path.split(abspfx)[0])
1729 else:
1729 else:
1730 striplen = len(abspfx)
1730 striplen = len(abspfx)
1731 if striplen:
1731 if striplen:
1732 striplen += len(pycompat.ossep)
1732 striplen += len(pycompat.ossep)
1733 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1733 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1734 elif destdirexists:
1734 elif destdirexists:
1735 res = lambda p: os.path.join(
1735 res = lambda p: os.path.join(
1736 dest, os.path.basename(util.localpath(p))
1736 dest, os.path.basename(util.localpath(p))
1737 )
1737 )
1738 else:
1738 else:
1739 res = lambda p: dest
1739 res = lambda p: dest
1740 return res
1740 return res
1741
1741
1742 # pat: ossep
1742 # pat: ossep
1743 # dest ossep
1743 # dest ossep
1744 # srcs: list of (hgsep, hgsep, ossep, bool)
1744 # srcs: list of (hgsep, hgsep, ossep, bool)
1745 # return: function that takes hgsep and returns ossep
1745 # return: function that takes hgsep and returns ossep
1746 def targetpathafterfn(pat, dest, srcs):
1746 def targetpathafterfn(pat, dest, srcs):
1747 if matchmod.patkind(pat):
1747 if matchmod.patkind(pat):
1748 # a mercurial pattern
1748 # a mercurial pattern
1749 res = lambda p: os.path.join(
1749 res = lambda p: os.path.join(
1750 dest, os.path.basename(util.localpath(p))
1750 dest, os.path.basename(util.localpath(p))
1751 )
1751 )
1752 else:
1752 else:
1753 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1753 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1754 if len(abspfx) < len(srcs[0][0]):
1754 if len(abspfx) < len(srcs[0][0]):
1755 # A directory. Either the target path contains the last
1755 # A directory. Either the target path contains the last
1756 # component of the source path or it does not.
1756 # component of the source path or it does not.
1757 def evalpath(striplen):
1757 def evalpath(striplen):
1758 score = 0
1758 score = 0
1759 for s in srcs:
1759 for s in srcs:
1760 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1760 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1761 if os.path.lexists(t):
1761 if os.path.lexists(t):
1762 score += 1
1762 score += 1
1763 return score
1763 return score
1764
1764
1765 abspfx = util.localpath(abspfx)
1765 abspfx = util.localpath(abspfx)
1766 striplen = len(abspfx)
1766 striplen = len(abspfx)
1767 if striplen:
1767 if striplen:
1768 striplen += len(pycompat.ossep)
1768 striplen += len(pycompat.ossep)
1769 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1769 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1770 score = evalpath(striplen)
1770 score = evalpath(striplen)
1771 striplen1 = len(os.path.split(abspfx)[0])
1771 striplen1 = len(os.path.split(abspfx)[0])
1772 if striplen1:
1772 if striplen1:
1773 striplen1 += len(pycompat.ossep)
1773 striplen1 += len(pycompat.ossep)
1774 if evalpath(striplen1) > score:
1774 if evalpath(striplen1) > score:
1775 striplen = striplen1
1775 striplen = striplen1
1776 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1776 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1777 else:
1777 else:
1778 # a file
1778 # a file
1779 if destdirexists:
1779 if destdirexists:
1780 res = lambda p: os.path.join(
1780 res = lambda p: os.path.join(
1781 dest, os.path.basename(util.localpath(p))
1781 dest, os.path.basename(util.localpath(p))
1782 )
1782 )
1783 else:
1783 else:
1784 res = lambda p: dest
1784 res = lambda p: dest
1785 return res
1785 return res
1786
1786
1787 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1787 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1788 if not destdirexists:
1788 if not destdirexists:
1789 if len(pats) > 1 or matchmod.patkind(pats[0]):
1789 if len(pats) > 1 or matchmod.patkind(pats[0]):
1790 raise error.Abort(
1790 raise error.Abort(
1791 _(
1791 _(
1792 b'with multiple sources, destination must be an '
1792 b'with multiple sources, destination must be an '
1793 b'existing directory'
1793 b'existing directory'
1794 )
1794 )
1795 )
1795 )
1796 if util.endswithsep(dest):
1796 if util.endswithsep(dest):
1797 raise error.Abort(_(b'destination %s is not a directory') % dest)
1797 raise error.Abort(_(b'destination %s is not a directory') % dest)
1798
1798
1799 tfn = targetpathfn
1799 tfn = targetpathfn
1800 if after:
1800 if after:
1801 tfn = targetpathafterfn
1801 tfn = targetpathafterfn
1802 copylist = []
1802 copylist = []
1803 for pat in pats:
1803 for pat in pats:
1804 srcs = walkpat(pat)
1804 srcs = walkpat(pat)
1805 if not srcs:
1805 if not srcs:
1806 continue
1806 continue
1807 copylist.append((tfn(pat, dest, srcs), srcs))
1807 copylist.append((tfn(pat, dest, srcs), srcs))
1808 if not copylist:
1808 if not copylist:
1809 raise error.Abort(_(b'no files to copy'))
1809 raise error.Abort(_(b'no files to copy'))
1810
1810
1811 errors = 0
1811 errors = 0
1812 for targetpath, srcs in copylist:
1812 for targetpath, srcs in copylist:
1813 for abssrc, relsrc, exact in srcs:
1813 for abssrc, relsrc, exact in srcs:
1814 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1814 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1815 errors += 1
1815 errors += 1
1816
1816
1817 return errors != 0
1817 return errors != 0
1818
1818
1819
1819
1820 ## facility to let extension process additional data into an import patch
1820 ## facility to let extension process additional data into an import patch
1821 # list of identifier to be executed in order
1821 # list of identifier to be executed in order
1822 extrapreimport = [] # run before commit
1822 extrapreimport = [] # run before commit
1823 extrapostimport = [] # run after commit
1823 extrapostimport = [] # run after commit
1824 # mapping from identifier to actual import function
1824 # mapping from identifier to actual import function
1825 #
1825 #
1826 # 'preimport' are run before the commit is made and are provided the following
1826 # 'preimport' are run before the commit is made and are provided the following
1827 # arguments:
1827 # arguments:
1828 # - repo: the localrepository instance,
1828 # - repo: the localrepository instance,
1829 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1829 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1830 # - extra: the future extra dictionary of the changeset, please mutate it,
1830 # - extra: the future extra dictionary of the changeset, please mutate it,
1831 # - opts: the import options.
1831 # - opts: the import options.
1832 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1832 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1833 # mutation of in memory commit and more. Feel free to rework the code to get
1833 # mutation of in memory commit and more. Feel free to rework the code to get
1834 # there.
1834 # there.
1835 extrapreimportmap = {}
1835 extrapreimportmap = {}
1836 # 'postimport' are run after the commit is made and are provided the following
1836 # 'postimport' are run after the commit is made and are provided the following
1837 # argument:
1837 # argument:
1838 # - ctx: the changectx created by import.
1838 # - ctx: the changectx created by import.
1839 extrapostimportmap = {}
1839 extrapostimportmap = {}
1840
1840
1841
1841
1842 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1842 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1843 """Utility function used by commands.import to import a single patch
1843 """Utility function used by commands.import to import a single patch
1844
1844
1845 This function is explicitly defined here to help the evolve extension to
1845 This function is explicitly defined here to help the evolve extension to
1846 wrap this part of the import logic.
1846 wrap this part of the import logic.
1847
1847
1848 The API is currently a bit ugly because it a simple code translation from
1848 The API is currently a bit ugly because it a simple code translation from
1849 the import command. Feel free to make it better.
1849 the import command. Feel free to make it better.
1850
1850
1851 :patchdata: a dictionary containing parsed patch data (such as from
1851 :patchdata: a dictionary containing parsed patch data (such as from
1852 ``patch.extract()``)
1852 ``patch.extract()``)
1853 :parents: nodes that will be parent of the created commit
1853 :parents: nodes that will be parent of the created commit
1854 :opts: the full dict of option passed to the import command
1854 :opts: the full dict of option passed to the import command
1855 :msgs: list to save commit message to.
1855 :msgs: list to save commit message to.
1856 (used in case we need to save it when failing)
1856 (used in case we need to save it when failing)
1857 :updatefunc: a function that update a repo to a given node
1857 :updatefunc: a function that update a repo to a given node
1858 updatefunc(<repo>, <node>)
1858 updatefunc(<repo>, <node>)
1859 """
1859 """
1860 # avoid cycle context -> subrepo -> cmdutil
1860 # avoid cycle context -> subrepo -> cmdutil
1861 from . import context
1861 from . import context
1862
1862
1863 tmpname = patchdata.get(b'filename')
1863 tmpname = patchdata.get(b'filename')
1864 message = patchdata.get(b'message')
1864 message = patchdata.get(b'message')
1865 user = opts.get(b'user') or patchdata.get(b'user')
1865 user = opts.get(b'user') or patchdata.get(b'user')
1866 date = opts.get(b'date') or patchdata.get(b'date')
1866 date = opts.get(b'date') or patchdata.get(b'date')
1867 branch = patchdata.get(b'branch')
1867 branch = patchdata.get(b'branch')
1868 nodeid = patchdata.get(b'nodeid')
1868 nodeid = patchdata.get(b'nodeid')
1869 p1 = patchdata.get(b'p1')
1869 p1 = patchdata.get(b'p1')
1870 p2 = patchdata.get(b'p2')
1870 p2 = patchdata.get(b'p2')
1871
1871
1872 nocommit = opts.get(b'no_commit')
1872 nocommit = opts.get(b'no_commit')
1873 importbranch = opts.get(b'import_branch')
1873 importbranch = opts.get(b'import_branch')
1874 update = not opts.get(b'bypass')
1874 update = not opts.get(b'bypass')
1875 strip = opts[b"strip"]
1875 strip = opts[b"strip"]
1876 prefix = opts[b"prefix"]
1876 prefix = opts[b"prefix"]
1877 sim = float(opts.get(b'similarity') or 0)
1877 sim = float(opts.get(b'similarity') or 0)
1878
1878
1879 if not tmpname:
1879 if not tmpname:
1880 return None, None, False
1880 return None, None, False
1881
1881
1882 rejects = False
1882 rejects = False
1883
1883
1884 cmdline_message = logmessage(ui, opts)
1884 cmdline_message = logmessage(ui, opts)
1885 if cmdline_message:
1885 if cmdline_message:
1886 # pickup the cmdline msg
1886 # pickup the cmdline msg
1887 message = cmdline_message
1887 message = cmdline_message
1888 elif message:
1888 elif message:
1889 # pickup the patch msg
1889 # pickup the patch msg
1890 message = message.strip()
1890 message = message.strip()
1891 else:
1891 else:
1892 # launch the editor
1892 # launch the editor
1893 message = None
1893 message = None
1894 ui.debug(b'message:\n%s\n' % (message or b''))
1894 ui.debug(b'message:\n%s\n' % (message or b''))
1895
1895
1896 if len(parents) == 1:
1896 if len(parents) == 1:
1897 parents.append(repo[nullid])
1897 parents.append(repo[nullid])
1898 if opts.get(b'exact'):
1898 if opts.get(b'exact'):
1899 if not nodeid or not p1:
1899 if not nodeid or not p1:
1900 raise error.Abort(_(b'not a Mercurial patch'))
1900 raise error.Abort(_(b'not a Mercurial patch'))
1901 p1 = repo[p1]
1901 p1 = repo[p1]
1902 p2 = repo[p2 or nullid]
1902 p2 = repo[p2 or nullid]
1903 elif p2:
1903 elif p2:
1904 try:
1904 try:
1905 p1 = repo[p1]
1905 p1 = repo[p1]
1906 p2 = repo[p2]
1906 p2 = repo[p2]
1907 # Without any options, consider p2 only if the
1907 # Without any options, consider p2 only if the
1908 # patch is being applied on top of the recorded
1908 # patch is being applied on top of the recorded
1909 # first parent.
1909 # first parent.
1910 if p1 != parents[0]:
1910 if p1 != parents[0]:
1911 p1 = parents[0]
1911 p1 = parents[0]
1912 p2 = repo[nullid]
1912 p2 = repo[nullid]
1913 except error.RepoError:
1913 except error.RepoError:
1914 p1, p2 = parents
1914 p1, p2 = parents
1915 if p2.node() == nullid:
1915 if p2.node() == nullid:
1916 ui.warn(
1916 ui.warn(
1917 _(
1917 _(
1918 b"warning: import the patch as a normal revision\n"
1918 b"warning: import the patch as a normal revision\n"
1919 b"(use --exact to import the patch as a merge)\n"
1919 b"(use --exact to import the patch as a merge)\n"
1920 )
1920 )
1921 )
1921 )
1922 else:
1922 else:
1923 p1, p2 = parents
1923 p1, p2 = parents
1924
1924
1925 n = None
1925 n = None
1926 if update:
1926 if update:
1927 if p1 != parents[0]:
1927 if p1 != parents[0]:
1928 updatefunc(repo, p1.node())
1928 updatefunc(repo, p1.node())
1929 if p2 != parents[1]:
1929 if p2 != parents[1]:
1930 repo.setparents(p1.node(), p2.node())
1930 repo.setparents(p1.node(), p2.node())
1931
1931
1932 if opts.get(b'exact') or importbranch:
1932 if opts.get(b'exact') or importbranch:
1933 repo.dirstate.setbranch(branch or b'default')
1933 repo.dirstate.setbranch(branch or b'default')
1934
1934
1935 partial = opts.get(b'partial', False)
1935 partial = opts.get(b'partial', False)
1936 files = set()
1936 files = set()
1937 try:
1937 try:
1938 patch.patch(
1938 patch.patch(
1939 ui,
1939 ui,
1940 repo,
1940 repo,
1941 tmpname,
1941 tmpname,
1942 strip=strip,
1942 strip=strip,
1943 prefix=prefix,
1943 prefix=prefix,
1944 files=files,
1944 files=files,
1945 eolmode=None,
1945 eolmode=None,
1946 similarity=sim / 100.0,
1946 similarity=sim / 100.0,
1947 )
1947 )
1948 except error.PatchError as e:
1948 except error.PatchError as e:
1949 if not partial:
1949 if not partial:
1950 raise error.Abort(pycompat.bytestr(e))
1950 raise error.Abort(pycompat.bytestr(e))
1951 if partial:
1951 if partial:
1952 rejects = True
1952 rejects = True
1953
1953
1954 files = list(files)
1954 files = list(files)
1955 if nocommit:
1955 if nocommit:
1956 if message:
1956 if message:
1957 msgs.append(message)
1957 msgs.append(message)
1958 else:
1958 else:
1959 if opts.get(b'exact') or p2:
1959 if opts.get(b'exact') or p2:
1960 # If you got here, you either use --force and know what
1960 # If you got here, you either use --force and know what
1961 # you are doing or used --exact or a merge patch while
1961 # you are doing or used --exact or a merge patch while
1962 # being updated to its first parent.
1962 # being updated to its first parent.
1963 m = None
1963 m = None
1964 else:
1964 else:
1965 m = scmutil.matchfiles(repo, files or [])
1965 m = scmutil.matchfiles(repo, files or [])
1966 editform = mergeeditform(repo[None], b'import.normal')
1966 editform = mergeeditform(repo[None], b'import.normal')
1967 if opts.get(b'exact'):
1967 if opts.get(b'exact'):
1968 editor = None
1968 editor = None
1969 else:
1969 else:
1970 editor = getcommiteditor(
1970 editor = getcommiteditor(
1971 editform=editform, **pycompat.strkwargs(opts)
1971 editform=editform, **pycompat.strkwargs(opts)
1972 )
1972 )
1973 extra = {}
1973 extra = {}
1974 for idfunc in extrapreimport:
1974 for idfunc in extrapreimport:
1975 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1975 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1976 overrides = {}
1976 overrides = {}
1977 if partial:
1977 if partial:
1978 overrides[(b'ui', b'allowemptycommit')] = True
1978 overrides[(b'ui', b'allowemptycommit')] = True
1979 if opts.get(b'secret'):
1979 if opts.get(b'secret'):
1980 overrides[(b'phases', b'new-commit')] = b'secret'
1980 overrides[(b'phases', b'new-commit')] = b'secret'
1981 with repo.ui.configoverride(overrides, b'import'):
1981 with repo.ui.configoverride(overrides, b'import'):
1982 n = repo.commit(
1982 n = repo.commit(
1983 message, user, date, match=m, editor=editor, extra=extra
1983 message, user, date, match=m, editor=editor, extra=extra
1984 )
1984 )
1985 for idfunc in extrapostimport:
1985 for idfunc in extrapostimport:
1986 extrapostimportmap[idfunc](repo[n])
1986 extrapostimportmap[idfunc](repo[n])
1987 else:
1987 else:
1988 if opts.get(b'exact') or importbranch:
1988 if opts.get(b'exact') or importbranch:
1989 branch = branch or b'default'
1989 branch = branch or b'default'
1990 else:
1990 else:
1991 branch = p1.branch()
1991 branch = p1.branch()
1992 store = patch.filestore()
1992 store = patch.filestore()
1993 try:
1993 try:
1994 files = set()
1994 files = set()
1995 try:
1995 try:
1996 patch.patchrepo(
1996 patch.patchrepo(
1997 ui,
1997 ui,
1998 repo,
1998 repo,
1999 p1,
1999 p1,
2000 store,
2000 store,
2001 tmpname,
2001 tmpname,
2002 strip,
2002 strip,
2003 prefix,
2003 prefix,
2004 files,
2004 files,
2005 eolmode=None,
2005 eolmode=None,
2006 )
2006 )
2007 except error.PatchError as e:
2007 except error.PatchError as e:
2008 raise error.Abort(stringutil.forcebytestr(e))
2008 raise error.Abort(stringutil.forcebytestr(e))
2009 if opts.get(b'exact'):
2009 if opts.get(b'exact'):
2010 editor = None
2010 editor = None
2011 else:
2011 else:
2012 editor = getcommiteditor(editform=b'import.bypass')
2012 editor = getcommiteditor(editform=b'import.bypass')
2013 memctx = context.memctx(
2013 memctx = context.memctx(
2014 repo,
2014 repo,
2015 (p1.node(), p2.node()),
2015 (p1.node(), p2.node()),
2016 message,
2016 message,
2017 files=files,
2017 files=files,
2018 filectxfn=store,
2018 filectxfn=store,
2019 user=user,
2019 user=user,
2020 date=date,
2020 date=date,
2021 branch=branch,
2021 branch=branch,
2022 editor=editor,
2022 editor=editor,
2023 )
2023 )
2024
2024
2025 overrides = {}
2025 overrides = {}
2026 if opts.get(b'secret'):
2026 if opts.get(b'secret'):
2027 overrides[(b'phases', b'new-commit')] = b'secret'
2027 overrides[(b'phases', b'new-commit')] = b'secret'
2028 with repo.ui.configoverride(overrides, b'import'):
2028 with repo.ui.configoverride(overrides, b'import'):
2029 n = memctx.commit()
2029 n = memctx.commit()
2030 finally:
2030 finally:
2031 store.close()
2031 store.close()
2032 if opts.get(b'exact') and nocommit:
2032 if opts.get(b'exact') and nocommit:
2033 # --exact with --no-commit is still useful in that it does merge
2033 # --exact with --no-commit is still useful in that it does merge
2034 # and branch bits
2034 # and branch bits
2035 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
2035 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
2036 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
2036 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
2037 raise error.Abort(_(b'patch is damaged or loses information'))
2037 raise error.Abort(_(b'patch is damaged or loses information'))
2038 msg = _(b'applied to working directory')
2038 msg = _(b'applied to working directory')
2039 if n:
2039 if n:
2040 # i18n: refers to a short changeset id
2040 # i18n: refers to a short changeset id
2041 msg = _(b'created %s') % short(n)
2041 msg = _(b'created %s') % short(n)
2042 return msg, n, rejects
2042 return msg, n, rejects
2043
2043
2044
2044
2045 # facility to let extensions include additional data in an exported patch
2045 # facility to let extensions include additional data in an exported patch
2046 # list of identifiers to be executed in order
2046 # list of identifiers to be executed in order
2047 extraexport = []
2047 extraexport = []
2048 # mapping from identifier to actual export function
2048 # mapping from identifier to actual export function
2049 # function as to return a string to be added to the header or None
2049 # function as to return a string to be added to the header or None
2050 # it is given two arguments (sequencenumber, changectx)
2050 # it is given two arguments (sequencenumber, changectx)
2051 extraexportmap = {}
2051 extraexportmap = {}
2052
2052
2053
2053
2054 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
2054 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
2055 node = scmutil.binnode(ctx)
2055 node = scmutil.binnode(ctx)
2056 parents = [p.node() for p in ctx.parents() if p]
2056 parents = [p.node() for p in ctx.parents() if p]
2057 branch = ctx.branch()
2057 branch = ctx.branch()
2058 if switch_parent:
2058 if switch_parent:
2059 parents.reverse()
2059 parents.reverse()
2060
2060
2061 if parents:
2061 if parents:
2062 prev = parents[0]
2062 prev = parents[0]
2063 else:
2063 else:
2064 prev = nullid
2064 prev = nullid
2065
2065
2066 fm.context(ctx=ctx)
2066 fm.context(ctx=ctx)
2067 fm.plain(b'# HG changeset patch\n')
2067 fm.plain(b'# HG changeset patch\n')
2068 fm.write(b'user', b'# User %s\n', ctx.user())
2068 fm.write(b'user', b'# User %s\n', ctx.user())
2069 fm.plain(b'# Date %d %d\n' % ctx.date())
2069 fm.plain(b'# Date %d %d\n' % ctx.date())
2070 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
2070 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
2071 fm.condwrite(
2071 fm.condwrite(
2072 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
2072 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
2073 )
2073 )
2074 fm.write(b'node', b'# Node ID %s\n', hex(node))
2074 fm.write(b'node', b'# Node ID %s\n', hex(node))
2075 fm.plain(b'# Parent %s\n' % hex(prev))
2075 fm.plain(b'# Parent %s\n' % hex(prev))
2076 if len(parents) > 1:
2076 if len(parents) > 1:
2077 fm.plain(b'# Parent %s\n' % hex(parents[1]))
2077 fm.plain(b'# Parent %s\n' % hex(parents[1]))
2078 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
2078 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
2079
2079
2080 # TODO: redesign extraexportmap function to support formatter
2080 # TODO: redesign extraexportmap function to support formatter
2081 for headerid in extraexport:
2081 for headerid in extraexport:
2082 header = extraexportmap[headerid](seqno, ctx)
2082 header = extraexportmap[headerid](seqno, ctx)
2083 if header is not None:
2083 if header is not None:
2084 fm.plain(b'# %s\n' % header)
2084 fm.plain(b'# %s\n' % header)
2085
2085
2086 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
2086 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
2087 fm.plain(b'\n')
2087 fm.plain(b'\n')
2088
2088
2089 if fm.isplain():
2089 if fm.isplain():
2090 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
2090 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
2091 for chunk, label in chunkiter:
2091 for chunk, label in chunkiter:
2092 fm.plain(chunk, label=label)
2092 fm.plain(chunk, label=label)
2093 else:
2093 else:
2094 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
2094 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
2095 # TODO: make it structured?
2095 # TODO: make it structured?
2096 fm.data(diff=b''.join(chunkiter))
2096 fm.data(diff=b''.join(chunkiter))
2097
2097
2098
2098
2099 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
2099 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
2100 """Export changesets to stdout or a single file"""
2100 """Export changesets to stdout or a single file"""
2101 for seqno, rev in enumerate(revs, 1):
2101 for seqno, rev in enumerate(revs, 1):
2102 ctx = repo[rev]
2102 ctx = repo[rev]
2103 if not dest.startswith(b'<'):
2103 if not dest.startswith(b'<'):
2104 repo.ui.note(b"%s\n" % dest)
2104 repo.ui.note(b"%s\n" % dest)
2105 fm.startitem()
2105 fm.startitem()
2106 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
2106 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
2107
2107
2108
2108
2109 def _exportfntemplate(
2109 def _exportfntemplate(
2110 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
2110 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
2111 ):
2111 ):
2112 """Export changesets to possibly multiple files"""
2112 """Export changesets to possibly multiple files"""
2113 total = len(revs)
2113 total = len(revs)
2114 revwidth = max(len(str(rev)) for rev in revs)
2114 revwidth = max(len(str(rev)) for rev in revs)
2115 filemap = util.sortdict() # filename: [(seqno, rev), ...]
2115 filemap = util.sortdict() # filename: [(seqno, rev), ...]
2116
2116
2117 for seqno, rev in enumerate(revs, 1):
2117 for seqno, rev in enumerate(revs, 1):
2118 ctx = repo[rev]
2118 ctx = repo[rev]
2119 dest = makefilename(
2119 dest = makefilename(
2120 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
2120 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
2121 )
2121 )
2122 filemap.setdefault(dest, []).append((seqno, rev))
2122 filemap.setdefault(dest, []).append((seqno, rev))
2123
2123
2124 for dest in filemap:
2124 for dest in filemap:
2125 with formatter.maybereopen(basefm, dest) as fm:
2125 with formatter.maybereopen(basefm, dest) as fm:
2126 repo.ui.note(b"%s\n" % dest)
2126 repo.ui.note(b"%s\n" % dest)
2127 for seqno, rev in filemap[dest]:
2127 for seqno, rev in filemap[dest]:
2128 fm.startitem()
2128 fm.startitem()
2129 ctx = repo[rev]
2129 ctx = repo[rev]
2130 _exportsingle(
2130 _exportsingle(
2131 repo, ctx, fm, match, switch_parent, seqno, diffopts
2131 repo, ctx, fm, match, switch_parent, seqno, diffopts
2132 )
2132 )
2133
2133
2134
2134
2135 def _prefetchchangedfiles(repo, revs, match):
2135 def _prefetchchangedfiles(repo, revs, match):
2136 allfiles = set()
2136 allfiles = set()
2137 for rev in revs:
2137 for rev in revs:
2138 for file in repo[rev].files():
2138 for file in repo[rev].files():
2139 if not match or match(file):
2139 if not match or match(file):
2140 allfiles.add(file)
2140 allfiles.add(file)
2141 match = scmutil.matchfiles(repo, allfiles)
2141 match = scmutil.matchfiles(repo, allfiles)
2142 revmatches = [(rev, match) for rev in revs]
2142 revmatches = [(rev, match) for rev in revs]
2143 scmutil.prefetchfiles(repo, revmatches)
2143 scmutil.prefetchfiles(repo, revmatches)
2144
2144
2145
2145
2146 def export(
2146 def export(
2147 repo,
2147 repo,
2148 revs,
2148 revs,
2149 basefm,
2149 basefm,
2150 fntemplate=b'hg-%h.patch',
2150 fntemplate=b'hg-%h.patch',
2151 switch_parent=False,
2151 switch_parent=False,
2152 opts=None,
2152 opts=None,
2153 match=None,
2153 match=None,
2154 ):
2154 ):
2155 '''export changesets as hg patches
2155 '''export changesets as hg patches
2156
2156
2157 Args:
2157 Args:
2158 repo: The repository from which we're exporting revisions.
2158 repo: The repository from which we're exporting revisions.
2159 revs: A list of revisions to export as revision numbers.
2159 revs: A list of revisions to export as revision numbers.
2160 basefm: A formatter to which patches should be written.
2160 basefm: A formatter to which patches should be written.
2161 fntemplate: An optional string to use for generating patch file names.
2161 fntemplate: An optional string to use for generating patch file names.
2162 switch_parent: If True, show diffs against second parent when not nullid.
2162 switch_parent: If True, show diffs against second parent when not nullid.
2163 Default is false, which always shows diff against p1.
2163 Default is false, which always shows diff against p1.
2164 opts: diff options to use for generating the patch.
2164 opts: diff options to use for generating the patch.
2165 match: If specified, only export changes to files matching this matcher.
2165 match: If specified, only export changes to files matching this matcher.
2166
2166
2167 Returns:
2167 Returns:
2168 Nothing.
2168 Nothing.
2169
2169
2170 Side Effect:
2170 Side Effect:
2171 "HG Changeset Patch" data is emitted to one of the following
2171 "HG Changeset Patch" data is emitted to one of the following
2172 destinations:
2172 destinations:
2173 fntemplate specified: Each rev is written to a unique file named using
2173 fntemplate specified: Each rev is written to a unique file named using
2174 the given template.
2174 the given template.
2175 Otherwise: All revs will be written to basefm.
2175 Otherwise: All revs will be written to basefm.
2176 '''
2176 '''
2177 _prefetchchangedfiles(repo, revs, match)
2177 _prefetchchangedfiles(repo, revs, match)
2178
2178
2179 if not fntemplate:
2179 if not fntemplate:
2180 _exportfile(
2180 _exportfile(
2181 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2181 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2182 )
2182 )
2183 else:
2183 else:
2184 _exportfntemplate(
2184 _exportfntemplate(
2185 repo, revs, basefm, fntemplate, switch_parent, opts, match
2185 repo, revs, basefm, fntemplate, switch_parent, opts, match
2186 )
2186 )
2187
2187
2188
2188
2189 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2189 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2190 """Export changesets to the given file stream"""
2190 """Export changesets to the given file stream"""
2191 _prefetchchangedfiles(repo, revs, match)
2191 _prefetchchangedfiles(repo, revs, match)
2192
2192
2193 dest = getattr(fp, 'name', b'<unnamed>')
2193 dest = getattr(fp, 'name', b'<unnamed>')
2194 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2194 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2195 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2195 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2196
2196
2197
2197
2198 def showmarker(fm, marker, index=None):
2198 def showmarker(fm, marker, index=None):
2199 """utility function to display obsolescence marker in a readable way
2199 """utility function to display obsolescence marker in a readable way
2200
2200
2201 To be used by debug function."""
2201 To be used by debug function."""
2202 if index is not None:
2202 if index is not None:
2203 fm.write(b'index', b'%i ', index)
2203 fm.write(b'index', b'%i ', index)
2204 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2204 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2205 succs = marker.succnodes()
2205 succs = marker.succnodes()
2206 fm.condwrite(
2206 fm.condwrite(
2207 succs,
2207 succs,
2208 b'succnodes',
2208 b'succnodes',
2209 b'%s ',
2209 b'%s ',
2210 fm.formatlist(map(hex, succs), name=b'node'),
2210 fm.formatlist(map(hex, succs), name=b'node'),
2211 )
2211 )
2212 fm.write(b'flag', b'%X ', marker.flags())
2212 fm.write(b'flag', b'%X ', marker.flags())
2213 parents = marker.parentnodes()
2213 parents = marker.parentnodes()
2214 if parents is not None:
2214 if parents is not None:
2215 fm.write(
2215 fm.write(
2216 b'parentnodes',
2216 b'parentnodes',
2217 b'{%s} ',
2217 b'{%s} ',
2218 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2218 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2219 )
2219 )
2220 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2220 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2221 meta = marker.metadata().copy()
2221 meta = marker.metadata().copy()
2222 meta.pop(b'date', None)
2222 meta.pop(b'date', None)
2223 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2223 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2224 fm.write(
2224 fm.write(
2225 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2225 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2226 )
2226 )
2227 fm.plain(b'\n')
2227 fm.plain(b'\n')
2228
2228
2229
2229
2230 def finddate(ui, repo, date):
2230 def finddate(ui, repo, date):
2231 """Find the tipmost changeset that matches the given date spec"""
2231 """Find the tipmost changeset that matches the given date spec"""
2232 mrevs = repo.revs(b'date(%s)', date)
2232 mrevs = repo.revs(b'date(%s)', date)
2233 try:
2233 try:
2234 rev = mrevs.max()
2234 rev = mrevs.max()
2235 except ValueError:
2235 except ValueError:
2236 raise error.Abort(_(b"revision matching date not found"))
2236 raise error.Abort(_(b"revision matching date not found"))
2237
2237
2238 ui.status(
2238 ui.status(
2239 _(b"found revision %d from %s\n")
2239 _(b"found revision %d from %s\n")
2240 % (rev, dateutil.datestr(repo[rev].date()))
2240 % (rev, dateutil.datestr(repo[rev].date()))
2241 )
2241 )
2242 return b'%d' % rev
2242 return b'%d' % rev
2243
2243
2244
2244
2245 def increasingwindows(windowsize=8, sizelimit=512):
2245 def increasingwindows(windowsize=8, sizelimit=512):
2246 while True:
2246 while True:
2247 yield windowsize
2247 yield windowsize
2248 if windowsize < sizelimit:
2248 if windowsize < sizelimit:
2249 windowsize *= 2
2249 windowsize *= 2
2250
2250
2251
2251
2252 def _walkrevs(repo, opts):
2252 def _walkrevs(repo, opts):
2253 # Default --rev value depends on --follow but --follow behavior
2253 # Default --rev value depends on --follow but --follow behavior
2254 # depends on revisions resolved from --rev...
2254 # depends on revisions resolved from --rev...
2255 follow = opts.get(b'follow') or opts.get(b'follow_first')
2255 follow = opts.get(b'follow') or opts.get(b'follow_first')
2256 revspec = opts.get(b'rev')
2256 revspec = opts.get(b'rev')
2257 if follow and revspec:
2257 if follow and revspec:
2258 revs = scmutil.revrange(repo, revspec)
2258 revs = scmutil.revrange(repo, revspec)
2259 revs = repo.revs(b'reverse(::%ld)', revs)
2259 revs = repo.revs(b'reverse(::%ld)', revs)
2260 elif revspec:
2260 elif revspec:
2261 revs = scmutil.revrange(repo, revspec)
2261 revs = scmutil.revrange(repo, revspec)
2262 elif follow and repo.dirstate.p1() == nullid:
2262 elif follow and repo.dirstate.p1() == nullid:
2263 revs = smartset.baseset()
2263 revs = smartset.baseset()
2264 elif follow:
2264 elif follow:
2265 revs = repo.revs(b'reverse(:.)')
2265 revs = repo.revs(b'reverse(:.)')
2266 else:
2266 else:
2267 revs = smartset.spanset(repo)
2267 revs = smartset.spanset(repo)
2268 revs.reverse()
2268 revs.reverse()
2269 return revs
2269 return revs
2270
2270
2271
2271
2272 class FileWalkError(Exception):
2272 class FileWalkError(Exception):
2273 pass
2273 pass
2274
2274
2275
2275
2276 def walkfilerevs(repo, match, follow, revs, fncache):
2276 def walkfilerevs(repo, match, follow, revs, fncache):
2277 '''Walks the file history for the matched files.
2277 '''Walks the file history for the matched files.
2278
2278
2279 Returns the changeset revs that are involved in the file history.
2279 Returns the changeset revs that are involved in the file history.
2280
2280
2281 Throws FileWalkError if the file history can't be walked using
2281 Throws FileWalkError if the file history can't be walked using
2282 filelogs alone.
2282 filelogs alone.
2283 '''
2283 '''
2284 wanted = set()
2284 wanted = set()
2285 copies = []
2285 copies = []
2286 minrev, maxrev = min(revs), max(revs)
2286 minrev, maxrev = min(revs), max(revs)
2287
2287
2288 def filerevs(filelog, last):
2288 def filerevs(filelog, last):
2289 """
2289 """
2290 Only files, no patterns. Check the history of each file.
2290 Only files, no patterns. Check the history of each file.
2291
2291
2292 Examines filelog entries within minrev, maxrev linkrev range
2292 Examines filelog entries within minrev, maxrev linkrev range
2293 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2293 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2294 tuples in backwards order
2294 tuples in backwards order
2295 """
2295 """
2296 cl_count = len(repo)
2296 cl_count = len(repo)
2297 revs = []
2297 revs = []
2298 for j in pycompat.xrange(0, last + 1):
2298 for j in pycompat.xrange(0, last + 1):
2299 linkrev = filelog.linkrev(j)
2299 linkrev = filelog.linkrev(j)
2300 if linkrev < minrev:
2300 if linkrev < minrev:
2301 continue
2301 continue
2302 # only yield rev for which we have the changelog, it can
2302 # only yield rev for which we have the changelog, it can
2303 # happen while doing "hg log" during a pull or commit
2303 # happen while doing "hg log" during a pull or commit
2304 if linkrev >= cl_count:
2304 if linkrev >= cl_count:
2305 break
2305 break
2306
2306
2307 parentlinkrevs = []
2307 parentlinkrevs = []
2308 for p in filelog.parentrevs(j):
2308 for p in filelog.parentrevs(j):
2309 if p != nullrev:
2309 if p != nullrev:
2310 parentlinkrevs.append(filelog.linkrev(p))
2310 parentlinkrevs.append(filelog.linkrev(p))
2311 n = filelog.node(j)
2311 n = filelog.node(j)
2312 revs.append(
2312 revs.append(
2313 (linkrev, parentlinkrevs, follow and filelog.renamed(n))
2313 (linkrev, parentlinkrevs, follow and filelog.renamed(n))
2314 )
2314 )
2315
2315
2316 return reversed(revs)
2316 return reversed(revs)
2317
2317
2318 def iterfiles():
2318 def iterfiles():
2319 pctx = repo[b'.']
2319 pctx = repo[b'.']
2320 for filename in match.files():
2320 for filename in match.files():
2321 if follow:
2321 if follow:
2322 if filename not in pctx:
2322 if filename not in pctx:
2323 raise error.Abort(
2323 raise error.Abort(
2324 _(
2324 _(
2325 b'cannot follow file not in parent '
2325 b'cannot follow file not in parent '
2326 b'revision: "%s"'
2326 b'revision: "%s"'
2327 )
2327 )
2328 % filename
2328 % filename
2329 )
2329 )
2330 yield filename, pctx[filename].filenode()
2330 yield filename, pctx[filename].filenode()
2331 else:
2331 else:
2332 yield filename, None
2332 yield filename, None
2333 for filename_node in copies:
2333 for filename_node in copies:
2334 yield filename_node
2334 yield filename_node
2335
2335
2336 for file_, node in iterfiles():
2336 for file_, node in iterfiles():
2337 filelog = repo.file(file_)
2337 filelog = repo.file(file_)
2338 if not len(filelog):
2338 if not len(filelog):
2339 if node is None:
2339 if node is None:
2340 # A zero count may be a directory or deleted file, so
2340 # A zero count may be a directory or deleted file, so
2341 # try to find matching entries on the slow path.
2341 # try to find matching entries on the slow path.
2342 if follow:
2342 if follow:
2343 raise error.Abort(
2343 raise error.Abort(
2344 _(b'cannot follow nonexistent file: "%s"') % file_
2344 _(b'cannot follow nonexistent file: "%s"') % file_
2345 )
2345 )
2346 raise FileWalkError(b"Cannot walk via filelog")
2346 raise FileWalkError(b"Cannot walk via filelog")
2347 else:
2347 else:
2348 continue
2348 continue
2349
2349
2350 if node is None:
2350 if node is None:
2351 last = len(filelog) - 1
2351 last = len(filelog) - 1
2352 else:
2352 else:
2353 last = filelog.rev(node)
2353 last = filelog.rev(node)
2354
2354
2355 # keep track of all ancestors of the file
2355 # keep track of all ancestors of the file
2356 ancestors = {filelog.linkrev(last)}
2356 ancestors = {filelog.linkrev(last)}
2357
2357
2358 # iterate from latest to oldest revision
2358 # iterate from latest to oldest revision
2359 for rev, flparentlinkrevs, copied in filerevs(filelog, last):
2359 for rev, flparentlinkrevs, copied in filerevs(filelog, last):
2360 if not follow:
2360 if not follow:
2361 if rev > maxrev:
2361 if rev > maxrev:
2362 continue
2362 continue
2363 else:
2363 else:
2364 # Note that last might not be the first interesting
2364 # Note that last might not be the first interesting
2365 # rev to us:
2365 # rev to us:
2366 # if the file has been changed after maxrev, we'll
2366 # if the file has been changed after maxrev, we'll
2367 # have linkrev(last) > maxrev, and we still need
2367 # have linkrev(last) > maxrev, and we still need
2368 # to explore the file graph
2368 # to explore the file graph
2369 if rev not in ancestors:
2369 if rev not in ancestors:
2370 continue
2370 continue
2371 # XXX insert 1327 fix here
2371 # XXX insert 1327 fix here
2372 if flparentlinkrevs:
2372 if flparentlinkrevs:
2373 ancestors.update(flparentlinkrevs)
2373 ancestors.update(flparentlinkrevs)
2374
2374
2375 fncache.setdefault(rev, []).append(file_)
2375 fncache.setdefault(rev, []).append(file_)
2376 wanted.add(rev)
2376 wanted.add(rev)
2377 if copied:
2377 if copied:
2378 copies.append(copied)
2378 copies.append(copied)
2379
2379
2380 return wanted
2380 return wanted
2381
2381
2382
2382
2383 class _followfilter(object):
2383 class _followfilter(object):
2384 def __init__(self, repo, onlyfirst=False):
2384 def __init__(self, repo, onlyfirst=False):
2385 self.repo = repo
2385 self.repo = repo
2386 self.startrev = nullrev
2386 self.startrev = nullrev
2387 self.roots = set()
2387 self.roots = set()
2388 self.onlyfirst = onlyfirst
2388 self.onlyfirst = onlyfirst
2389
2389
2390 def match(self, rev):
2390 def match(self, rev):
2391 def realparents(rev):
2391 def realparents(rev):
2392 try:
2392 try:
2393 if self.onlyfirst:
2393 if self.onlyfirst:
2394 return self.repo.changelog.parentrevs(rev)[0:1]
2394 return self.repo.changelog.parentrevs(rev)[0:1]
2395 else:
2395 else:
2396 return filter(
2396 return filter(
2397 lambda x: x != nullrev,
2397 lambda x: x != nullrev,
2398 self.repo.changelog.parentrevs(rev),
2398 self.repo.changelog.parentrevs(rev),
2399 )
2399 )
2400 except error.WdirUnsupported:
2400 except error.WdirUnsupported:
2401 prevs = [p.rev() for p in self.repo[rev].parents()]
2401 prevs = [p.rev() for p in self.repo[rev].parents()]
2402 if self.onlyfirst:
2402 if self.onlyfirst:
2403 return prevs[:1]
2403 return prevs[:1]
2404 else:
2404 else:
2405 return prevs
2405 return prevs
2406
2406
2407 if self.startrev == nullrev:
2407 if self.startrev == nullrev:
2408 self.startrev = rev
2408 self.startrev = rev
2409 return True
2409 return True
2410
2410
2411 if rev > self.startrev:
2411 if rev > self.startrev:
2412 # forward: all descendants
2412 # forward: all descendants
2413 if not self.roots:
2413 if not self.roots:
2414 self.roots.add(self.startrev)
2414 self.roots.add(self.startrev)
2415 for parent in realparents(rev):
2415 for parent in realparents(rev):
2416 if parent in self.roots:
2416 if parent in self.roots:
2417 self.roots.add(rev)
2417 self.roots.add(rev)
2418 return True
2418 return True
2419 else:
2419 else:
2420 # backwards: all parents
2420 # backwards: all parents
2421 if not self.roots:
2421 if not self.roots:
2422 self.roots.update(realparents(self.startrev))
2422 self.roots.update(realparents(self.startrev))
2423 if rev in self.roots:
2423 if rev in self.roots:
2424 self.roots.remove(rev)
2424 self.roots.remove(rev)
2425 self.roots.update(realparents(rev))
2425 self.roots.update(realparents(rev))
2426 return True
2426 return True
2427
2427
2428 return False
2428 return False
2429
2429
2430
2430
2431 def walkchangerevs(repo, match, opts, prepare):
2431 def walkchangerevs(repo, match, opts, prepare):
2432 '''Iterate over files and the revs in which they changed.
2432 '''Iterate over files and the revs in which they changed.
2433
2433
2434 Callers most commonly need to iterate backwards over the history
2434 Callers most commonly need to iterate backwards over the history
2435 in which they are interested. Doing so has awful (quadratic-looking)
2435 in which they are interested. Doing so has awful (quadratic-looking)
2436 performance, so we use iterators in a "windowed" way.
2436 performance, so we use iterators in a "windowed" way.
2437
2437
2438 We walk a window of revisions in the desired order. Within the
2438 We walk a window of revisions in the desired order. Within the
2439 window, we first walk forwards to gather data, then in the desired
2439 window, we first walk forwards to gather data, then in the desired
2440 order (usually backwards) to display it.
2440 order (usually backwards) to display it.
2441
2441
2442 This function returns an iterator yielding contexts. Before
2442 This function returns an iterator yielding contexts. Before
2443 yielding each context, the iterator will first call the prepare
2443 yielding each context, the iterator will first call the prepare
2444 function on each context in the window in forward order.'''
2444 function on each context in the window in forward order.'''
2445
2445
2446 allfiles = opts.get(b'all_files')
2446 allfiles = opts.get(b'all_files')
2447 follow = opts.get(b'follow') or opts.get(b'follow_first')
2447 follow = opts.get(b'follow') or opts.get(b'follow_first')
2448 revs = _walkrevs(repo, opts)
2448 revs = _walkrevs(repo, opts)
2449 if not revs:
2449 if not revs:
2450 return []
2450 return []
2451 wanted = set()
2451 wanted = set()
2452 slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
2452 slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
2453 fncache = {}
2453 fncache = {}
2454 change = repo.__getitem__
2454 change = repo.__getitem__
2455
2455
2456 # First step is to fill wanted, the set of revisions that we want to yield.
2456 # First step is to fill wanted, the set of revisions that we want to yield.
2457 # When it does not induce extra cost, we also fill fncache for revisions in
2457 # When it does not induce extra cost, we also fill fncache for revisions in
2458 # wanted: a cache of filenames that were changed (ctx.files()) and that
2458 # wanted: a cache of filenames that were changed (ctx.files()) and that
2459 # match the file filtering conditions.
2459 # match the file filtering conditions.
2460
2460
2461 if match.always() or allfiles:
2461 if match.always() or allfiles:
2462 # No files, no patterns. Display all revs.
2462 # No files, no patterns. Display all revs.
2463 wanted = revs
2463 wanted = revs
2464 elif not slowpath:
2464 elif not slowpath:
2465 # We only have to read through the filelog to find wanted revisions
2465 # We only have to read through the filelog to find wanted revisions
2466
2466
2467 try:
2467 try:
2468 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2468 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2469 except FileWalkError:
2469 except FileWalkError:
2470 slowpath = True
2470 slowpath = True
2471
2471
2472 # We decided to fall back to the slowpath because at least one
2472 # We decided to fall back to the slowpath because at least one
2473 # of the paths was not a file. Check to see if at least one of them
2473 # of the paths was not a file. Check to see if at least one of them
2474 # existed in history, otherwise simply return
2474 # existed in history, otherwise simply return
2475 for path in match.files():
2475 for path in match.files():
2476 if path == b'.' or path in repo.store:
2476 if path == b'.' or path in repo.store:
2477 break
2477 break
2478 else:
2478 else:
2479 return []
2479 return []
2480
2480
2481 if slowpath:
2481 if slowpath:
2482 # We have to read the changelog to match filenames against
2482 # We have to read the changelog to match filenames against
2483 # changed files
2483 # changed files
2484
2484
2485 if follow:
2485 if follow:
2486 raise error.Abort(
2486 raise error.Abort(
2487 _(b'can only follow copies/renames for explicit filenames')
2487 _(b'can only follow copies/renames for explicit filenames')
2488 )
2488 )
2489
2489
2490 # The slow path checks files modified in every changeset.
2490 # The slow path checks files modified in every changeset.
2491 # This is really slow on large repos, so compute the set lazily.
2491 # This is really slow on large repos, so compute the set lazily.
2492 class lazywantedset(object):
2492 class lazywantedset(object):
2493 def __init__(self):
2493 def __init__(self):
2494 self.set = set()
2494 self.set = set()
2495 self.revs = set(revs)
2495 self.revs = set(revs)
2496
2496
2497 # No need to worry about locality here because it will be accessed
2497 # No need to worry about locality here because it will be accessed
2498 # in the same order as the increasing window below.
2498 # in the same order as the increasing window below.
2499 def __contains__(self, value):
2499 def __contains__(self, value):
2500 if value in self.set:
2500 if value in self.set:
2501 return True
2501 return True
2502 elif not value in self.revs:
2502 elif not value in self.revs:
2503 return False
2503 return False
2504 else:
2504 else:
2505 self.revs.discard(value)
2505 self.revs.discard(value)
2506 ctx = change(value)
2506 ctx = change(value)
2507 if allfiles:
2507 if allfiles:
2508 matches = list(ctx.manifest().walk(match))
2508 matches = list(ctx.manifest().walk(match))
2509 else:
2509 else:
2510 matches = [f for f in ctx.files() if match(f)]
2510 matches = [f for f in ctx.files() if match(f)]
2511 if matches:
2511 if matches:
2512 fncache[value] = matches
2512 fncache[value] = matches
2513 self.set.add(value)
2513 self.set.add(value)
2514 return True
2514 return True
2515 return False
2515 return False
2516
2516
2517 def discard(self, value):
2517 def discard(self, value):
2518 self.revs.discard(value)
2518 self.revs.discard(value)
2519 self.set.discard(value)
2519 self.set.discard(value)
2520
2520
2521 wanted = lazywantedset()
2521 wanted = lazywantedset()
2522
2522
2523 # it might be worthwhile to do this in the iterator if the rev range
2523 # it might be worthwhile to do this in the iterator if the rev range
2524 # is descending and the prune args are all within that range
2524 # is descending and the prune args are all within that range
2525 for rev in opts.get(b'prune', ()):
2525 for rev in opts.get(b'prune', ()):
2526 rev = repo[rev].rev()
2526 rev = repo[rev].rev()
2527 ff = _followfilter(repo)
2527 ff = _followfilter(repo)
2528 stop = min(revs[0], revs[-1])
2528 stop = min(revs[0], revs[-1])
2529 for x in pycompat.xrange(rev, stop - 1, -1):
2529 for x in pycompat.xrange(rev, stop - 1, -1):
2530 if ff.match(x):
2530 if ff.match(x):
2531 wanted = wanted - [x]
2531 wanted = wanted - [x]
2532
2532
2533 # Now that wanted is correctly initialized, we can iterate over the
2533 # Now that wanted is correctly initialized, we can iterate over the
2534 # revision range, yielding only revisions in wanted.
2534 # revision range, yielding only revisions in wanted.
2535 def iterate():
2535 def iterate():
2536 if follow and match.always():
2536 if follow and match.always():
2537 ff = _followfilter(repo, onlyfirst=opts.get(b'follow_first'))
2537 ff = _followfilter(repo, onlyfirst=opts.get(b'follow_first'))
2538
2538
2539 def want(rev):
2539 def want(rev):
2540 return ff.match(rev) and rev in wanted
2540 return ff.match(rev) and rev in wanted
2541
2541
2542 else:
2542 else:
2543
2543
2544 def want(rev):
2544 def want(rev):
2545 return rev in wanted
2545 return rev in wanted
2546
2546
2547 it = iter(revs)
2547 it = iter(revs)
2548 stopiteration = False
2548 stopiteration = False
2549 for windowsize in increasingwindows():
2549 for windowsize in increasingwindows():
2550 nrevs = []
2550 nrevs = []
2551 for i in pycompat.xrange(windowsize):
2551 for i in pycompat.xrange(windowsize):
2552 rev = next(it, None)
2552 rev = next(it, None)
2553 if rev is None:
2553 if rev is None:
2554 stopiteration = True
2554 stopiteration = True
2555 break
2555 break
2556 elif want(rev):
2556 elif want(rev):
2557 nrevs.append(rev)
2557 nrevs.append(rev)
2558 for rev in sorted(nrevs):
2558 for rev in sorted(nrevs):
2559 fns = fncache.get(rev)
2559 fns = fncache.get(rev)
2560 ctx = change(rev)
2560 ctx = change(rev)
2561 if not fns:
2561 if not fns:
2562
2562
2563 def fns_generator():
2563 def fns_generator():
2564 if allfiles:
2564 if allfiles:
2565
2565
2566 def bad(f, msg):
2566 def bad(f, msg):
2567 pass
2567 pass
2568
2568
2569 for f in ctx.matches(matchmod.badmatch(match, bad)):
2569 for f in ctx.matches(matchmod.badmatch(match, bad)):
2570 yield f
2570 yield f
2571 else:
2571 else:
2572 for f in ctx.files():
2572 for f in ctx.files():
2573 if match(f):
2573 if match(f):
2574 yield f
2574 yield f
2575
2575
2576 fns = fns_generator()
2576 fns = fns_generator()
2577 prepare(ctx, fns)
2577 prepare(ctx, fns)
2578 for rev in nrevs:
2578 for rev in nrevs:
2579 yield change(rev)
2579 yield change(rev)
2580
2580
2581 if stopiteration:
2581 if stopiteration:
2582 break
2582 break
2583
2583
2584 return iterate()
2584 return iterate()
2585
2585
2586
2586
2587 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2587 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2588 bad = []
2588 bad = []
2589
2589
2590 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2590 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2591 names = []
2591 names = []
2592 wctx = repo[None]
2592 wctx = repo[None]
2593 cca = None
2593 cca = None
2594 abort, warn = scmutil.checkportabilityalert(ui)
2594 abort, warn = scmutil.checkportabilityalert(ui)
2595 if abort or warn:
2595 if abort or warn:
2596 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2596 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2597
2597
2598 match = repo.narrowmatch(match, includeexact=True)
2598 match = repo.narrowmatch(match, includeexact=True)
2599 badmatch = matchmod.badmatch(match, badfn)
2599 badmatch = matchmod.badmatch(match, badfn)
2600 dirstate = repo.dirstate
2600 dirstate = repo.dirstate
2601 # We don't want to just call wctx.walk here, since it would return a lot of
2601 # We don't want to just call wctx.walk here, since it would return a lot of
2602 # clean files, which we aren't interested in and takes time.
2602 # clean files, which we aren't interested in and takes time.
2603 for f in sorted(
2603 for f in sorted(
2604 dirstate.walk(
2604 dirstate.walk(
2605 badmatch,
2605 badmatch,
2606 subrepos=sorted(wctx.substate),
2606 subrepos=sorted(wctx.substate),
2607 unknown=True,
2607 unknown=True,
2608 ignored=False,
2608 ignored=False,
2609 full=False,
2609 full=False,
2610 )
2610 )
2611 ):
2611 ):
2612 exact = match.exact(f)
2612 exact = match.exact(f)
2613 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2613 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2614 if cca:
2614 if cca:
2615 cca(f)
2615 cca(f)
2616 names.append(f)
2616 names.append(f)
2617 if ui.verbose or not exact:
2617 if ui.verbose or not exact:
2618 ui.status(
2618 ui.status(
2619 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2619 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2620 )
2620 )
2621
2621
2622 for subpath in sorted(wctx.substate):
2622 for subpath in sorted(wctx.substate):
2623 sub = wctx.sub(subpath)
2623 sub = wctx.sub(subpath)
2624 try:
2624 try:
2625 submatch = matchmod.subdirmatcher(subpath, match)
2625 submatch = matchmod.subdirmatcher(subpath, match)
2626 subprefix = repo.wvfs.reljoin(prefix, subpath)
2626 subprefix = repo.wvfs.reljoin(prefix, subpath)
2627 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2627 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2628 if opts.get('subrepos'):
2628 if opts.get('subrepos'):
2629 bad.extend(
2629 bad.extend(
2630 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2630 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2631 )
2631 )
2632 else:
2632 else:
2633 bad.extend(
2633 bad.extend(
2634 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2634 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2635 )
2635 )
2636 except error.LookupError:
2636 except error.LookupError:
2637 ui.status(
2637 ui.status(
2638 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2638 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2639 )
2639 )
2640
2640
2641 if not opts.get('dry_run'):
2641 if not opts.get('dry_run'):
2642 rejected = wctx.add(names, prefix)
2642 rejected = wctx.add(names, prefix)
2643 bad.extend(f for f in rejected if f in match.files())
2643 bad.extend(f for f in rejected if f in match.files())
2644 return bad
2644 return bad
2645
2645
2646
2646
2647 def addwebdirpath(repo, serverpath, webconf):
2647 def addwebdirpath(repo, serverpath, webconf):
2648 webconf[serverpath] = repo.root
2648 webconf[serverpath] = repo.root
2649 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2649 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2650
2650
2651 for r in repo.revs(b'filelog("path:.hgsub")'):
2651 for r in repo.revs(b'filelog("path:.hgsub")'):
2652 ctx = repo[r]
2652 ctx = repo[r]
2653 for subpath in ctx.substate:
2653 for subpath in ctx.substate:
2654 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2654 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2655
2655
2656
2656
2657 def forget(
2657 def forget(
2658 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2658 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2659 ):
2659 ):
2660 if dryrun and interactive:
2660 if dryrun and interactive:
2661 raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
2661 raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
2662 bad = []
2662 bad = []
2663 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2663 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2664 wctx = repo[None]
2664 wctx = repo[None]
2665 forgot = []
2665 forgot = []
2666
2666
2667 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2667 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2668 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2668 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2669 if explicitonly:
2669 if explicitonly:
2670 forget = [f for f in forget if match.exact(f)]
2670 forget = [f for f in forget if match.exact(f)]
2671
2671
2672 for subpath in sorted(wctx.substate):
2672 for subpath in sorted(wctx.substate):
2673 sub = wctx.sub(subpath)
2673 sub = wctx.sub(subpath)
2674 submatch = matchmod.subdirmatcher(subpath, match)
2674 submatch = matchmod.subdirmatcher(subpath, match)
2675 subprefix = repo.wvfs.reljoin(prefix, subpath)
2675 subprefix = repo.wvfs.reljoin(prefix, subpath)
2676 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2676 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2677 try:
2677 try:
2678 subbad, subforgot = sub.forget(
2678 subbad, subforgot = sub.forget(
2679 submatch,
2679 submatch,
2680 subprefix,
2680 subprefix,
2681 subuipathfn,
2681 subuipathfn,
2682 dryrun=dryrun,
2682 dryrun=dryrun,
2683 interactive=interactive,
2683 interactive=interactive,
2684 )
2684 )
2685 bad.extend([subpath + b'/' + f for f in subbad])
2685 bad.extend([subpath + b'/' + f for f in subbad])
2686 forgot.extend([subpath + b'/' + f for f in subforgot])
2686 forgot.extend([subpath + b'/' + f for f in subforgot])
2687 except error.LookupError:
2687 except error.LookupError:
2688 ui.status(
2688 ui.status(
2689 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2689 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2690 )
2690 )
2691
2691
2692 if not explicitonly:
2692 if not explicitonly:
2693 for f in match.files():
2693 for f in match.files():
2694 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2694 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2695 if f not in forgot:
2695 if f not in forgot:
2696 if repo.wvfs.exists(f):
2696 if repo.wvfs.exists(f):
2697 # Don't complain if the exact case match wasn't given.
2697 # Don't complain if the exact case match wasn't given.
2698 # But don't do this until after checking 'forgot', so
2698 # But don't do this until after checking 'forgot', so
2699 # that subrepo files aren't normalized, and this op is
2699 # that subrepo files aren't normalized, and this op is
2700 # purely from data cached by the status walk above.
2700 # purely from data cached by the status walk above.
2701 if repo.dirstate.normalize(f) in repo.dirstate:
2701 if repo.dirstate.normalize(f) in repo.dirstate:
2702 continue
2702 continue
2703 ui.warn(
2703 ui.warn(
2704 _(
2704 _(
2705 b'not removing %s: '
2705 b'not removing %s: '
2706 b'file is already untracked\n'
2706 b'file is already untracked\n'
2707 )
2707 )
2708 % uipathfn(f)
2708 % uipathfn(f)
2709 )
2709 )
2710 bad.append(f)
2710 bad.append(f)
2711
2711
2712 if interactive:
2712 if interactive:
2713 responses = _(
2713 responses = _(
2714 b'[Ynsa?]'
2714 b'[Ynsa?]'
2715 b'$$ &Yes, forget this file'
2715 b'$$ &Yes, forget this file'
2716 b'$$ &No, skip this file'
2716 b'$$ &No, skip this file'
2717 b'$$ &Skip remaining files'
2717 b'$$ &Skip remaining files'
2718 b'$$ Include &all remaining files'
2718 b'$$ Include &all remaining files'
2719 b'$$ &? (display help)'
2719 b'$$ &? (display help)'
2720 )
2720 )
2721 for filename in forget[:]:
2721 for filename in forget[:]:
2722 r = ui.promptchoice(
2722 r = ui.promptchoice(
2723 _(b'forget %s %s') % (uipathfn(filename), responses)
2723 _(b'forget %s %s') % (uipathfn(filename), responses)
2724 )
2724 )
2725 if r == 4: # ?
2725 if r == 4: # ?
2726 while r == 4:
2726 while r == 4:
2727 for c, t in ui.extractchoices(responses)[1]:
2727 for c, t in ui.extractchoices(responses)[1]:
2728 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2728 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2729 r = ui.promptchoice(
2729 r = ui.promptchoice(
2730 _(b'forget %s %s') % (uipathfn(filename), responses)
2730 _(b'forget %s %s') % (uipathfn(filename), responses)
2731 )
2731 )
2732 if r == 0: # yes
2732 if r == 0: # yes
2733 continue
2733 continue
2734 elif r == 1: # no
2734 elif r == 1: # no
2735 forget.remove(filename)
2735 forget.remove(filename)
2736 elif r == 2: # Skip
2736 elif r == 2: # Skip
2737 fnindex = forget.index(filename)
2737 fnindex = forget.index(filename)
2738 del forget[fnindex:]
2738 del forget[fnindex:]
2739 break
2739 break
2740 elif r == 3: # All
2740 elif r == 3: # All
2741 break
2741 break
2742
2742
2743 for f in forget:
2743 for f in forget:
2744 if ui.verbose or not match.exact(f) or interactive:
2744 if ui.verbose or not match.exact(f) or interactive:
2745 ui.status(
2745 ui.status(
2746 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2746 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2747 )
2747 )
2748
2748
2749 if not dryrun:
2749 if not dryrun:
2750 rejected = wctx.forget(forget, prefix)
2750 rejected = wctx.forget(forget, prefix)
2751 bad.extend(f for f in rejected if f in match.files())
2751 bad.extend(f for f in rejected if f in match.files())
2752 forgot.extend(f for f in forget if f not in rejected)
2752 forgot.extend(f for f in forget if f not in rejected)
2753 return bad, forgot
2753 return bad, forgot
2754
2754
2755
2755
2756 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2756 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2757 ret = 1
2757 ret = 1
2758
2758
2759 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2759 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2760 if fm.isplain() and not needsfctx:
2760 if fm.isplain() and not needsfctx:
2761 # Fast path. The speed-up comes from skipping the formatter, and batching
2761 # Fast path. The speed-up comes from skipping the formatter, and batching
2762 # calls to ui.write.
2762 # calls to ui.write.
2763 buf = []
2763 buf = []
2764 for f in ctx.matches(m):
2764 for f in ctx.matches(m):
2765 buf.append(fmt % uipathfn(f))
2765 buf.append(fmt % uipathfn(f))
2766 if len(buf) > 100:
2766 if len(buf) > 100:
2767 ui.write(b''.join(buf))
2767 ui.write(b''.join(buf))
2768 del buf[:]
2768 del buf[:]
2769 ret = 0
2769 ret = 0
2770 if buf:
2770 if buf:
2771 ui.write(b''.join(buf))
2771 ui.write(b''.join(buf))
2772 else:
2772 else:
2773 for f in ctx.matches(m):
2773 for f in ctx.matches(m):
2774 fm.startitem()
2774 fm.startitem()
2775 fm.context(ctx=ctx)
2775 fm.context(ctx=ctx)
2776 if needsfctx:
2776 if needsfctx:
2777 fc = ctx[f]
2777 fc = ctx[f]
2778 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2778 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2779 fm.data(path=f)
2779 fm.data(path=f)
2780 fm.plain(fmt % uipathfn(f))
2780 fm.plain(fmt % uipathfn(f))
2781 ret = 0
2781 ret = 0
2782
2782
2783 for subpath in sorted(ctx.substate):
2783 for subpath in sorted(ctx.substate):
2784 submatch = matchmod.subdirmatcher(subpath, m)
2784 submatch = matchmod.subdirmatcher(subpath, m)
2785 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2785 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2786 if subrepos or m.exact(subpath) or any(submatch.files()):
2786 if subrepos or m.exact(subpath) or any(submatch.files()):
2787 sub = ctx.sub(subpath)
2787 sub = ctx.sub(subpath)
2788 try:
2788 try:
2789 recurse = m.exact(subpath) or subrepos
2789 recurse = m.exact(subpath) or subrepos
2790 if (
2790 if (
2791 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2791 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2792 == 0
2792 == 0
2793 ):
2793 ):
2794 ret = 0
2794 ret = 0
2795 except error.LookupError:
2795 except error.LookupError:
2796 ui.status(
2796 ui.status(
2797 _(b"skipping missing subrepository: %s\n")
2797 _(b"skipping missing subrepository: %s\n")
2798 % uipathfn(subpath)
2798 % uipathfn(subpath)
2799 )
2799 )
2800
2800
2801 return ret
2801 return ret
2802
2802
2803
2803
2804 def remove(
2804 def remove(
2805 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2805 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2806 ):
2806 ):
2807 ret = 0
2807 ret = 0
2808 s = repo.status(match=m, clean=True)
2808 s = repo.status(match=m, clean=True)
2809 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2809 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2810
2810
2811 wctx = repo[None]
2811 wctx = repo[None]
2812
2812
2813 if warnings is None:
2813 if warnings is None:
2814 warnings = []
2814 warnings = []
2815 warn = True
2815 warn = True
2816 else:
2816 else:
2817 warn = False
2817 warn = False
2818
2818
2819 subs = sorted(wctx.substate)
2819 subs = sorted(wctx.substate)
2820 progress = ui.makeprogress(
2820 progress = ui.makeprogress(
2821 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2821 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2822 )
2822 )
2823 for subpath in subs:
2823 for subpath in subs:
2824 submatch = matchmod.subdirmatcher(subpath, m)
2824 submatch = matchmod.subdirmatcher(subpath, m)
2825 subprefix = repo.wvfs.reljoin(prefix, subpath)
2825 subprefix = repo.wvfs.reljoin(prefix, subpath)
2826 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2826 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2827 if subrepos or m.exact(subpath) or any(submatch.files()):
2827 if subrepos or m.exact(subpath) or any(submatch.files()):
2828 progress.increment()
2828 progress.increment()
2829 sub = wctx.sub(subpath)
2829 sub = wctx.sub(subpath)
2830 try:
2830 try:
2831 if sub.removefiles(
2831 if sub.removefiles(
2832 submatch,
2832 submatch,
2833 subprefix,
2833 subprefix,
2834 subuipathfn,
2834 subuipathfn,
2835 after,
2835 after,
2836 force,
2836 force,
2837 subrepos,
2837 subrepos,
2838 dryrun,
2838 dryrun,
2839 warnings,
2839 warnings,
2840 ):
2840 ):
2841 ret = 1
2841 ret = 1
2842 except error.LookupError:
2842 except error.LookupError:
2843 warnings.append(
2843 warnings.append(
2844 _(b"skipping missing subrepository: %s\n")
2844 _(b"skipping missing subrepository: %s\n")
2845 % uipathfn(subpath)
2845 % uipathfn(subpath)
2846 )
2846 )
2847 progress.complete()
2847 progress.complete()
2848
2848
2849 # warn about failure to delete explicit files/dirs
2849 # warn about failure to delete explicit files/dirs
2850 deleteddirs = pathutil.dirs(deleted)
2850 deleteddirs = pathutil.dirs(deleted)
2851 files = m.files()
2851 files = m.files()
2852 progress = ui.makeprogress(
2852 progress = ui.makeprogress(
2853 _(b'deleting'), total=len(files), unit=_(b'files')
2853 _(b'deleting'), total=len(files), unit=_(b'files')
2854 )
2854 )
2855 for f in files:
2855 for f in files:
2856
2856
2857 def insubrepo():
2857 def insubrepo():
2858 for subpath in wctx.substate:
2858 for subpath in wctx.substate:
2859 if f.startswith(subpath + b'/'):
2859 if f.startswith(subpath + b'/'):
2860 return True
2860 return True
2861 return False
2861 return False
2862
2862
2863 progress.increment()
2863 progress.increment()
2864 isdir = f in deleteddirs or wctx.hasdir(f)
2864 isdir = f in deleteddirs or wctx.hasdir(f)
2865 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2865 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2866 continue
2866 continue
2867
2867
2868 if repo.wvfs.exists(f):
2868 if repo.wvfs.exists(f):
2869 if repo.wvfs.isdir(f):
2869 if repo.wvfs.isdir(f):
2870 warnings.append(
2870 warnings.append(
2871 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2871 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2872 )
2872 )
2873 else:
2873 else:
2874 warnings.append(
2874 warnings.append(
2875 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2875 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2876 )
2876 )
2877 # missing files will generate a warning elsewhere
2877 # missing files will generate a warning elsewhere
2878 ret = 1
2878 ret = 1
2879 progress.complete()
2879 progress.complete()
2880
2880
2881 if force:
2881 if force:
2882 list = modified + deleted + clean + added
2882 list = modified + deleted + clean + added
2883 elif after:
2883 elif after:
2884 list = deleted
2884 list = deleted
2885 remaining = modified + added + clean
2885 remaining = modified + added + clean
2886 progress = ui.makeprogress(
2886 progress = ui.makeprogress(
2887 _(b'skipping'), total=len(remaining), unit=_(b'files')
2887 _(b'skipping'), total=len(remaining), unit=_(b'files')
2888 )
2888 )
2889 for f in remaining:
2889 for f in remaining:
2890 progress.increment()
2890 progress.increment()
2891 if ui.verbose or (f in files):
2891 if ui.verbose or (f in files):
2892 warnings.append(
2892 warnings.append(
2893 _(b'not removing %s: file still exists\n') % uipathfn(f)
2893 _(b'not removing %s: file still exists\n') % uipathfn(f)
2894 )
2894 )
2895 ret = 1
2895 ret = 1
2896 progress.complete()
2896 progress.complete()
2897 else:
2897 else:
2898 list = deleted + clean
2898 list = deleted + clean
2899 progress = ui.makeprogress(
2899 progress = ui.makeprogress(
2900 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2900 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2901 )
2901 )
2902 for f in modified:
2902 for f in modified:
2903 progress.increment()
2903 progress.increment()
2904 warnings.append(
2904 warnings.append(
2905 _(
2905 _(
2906 b'not removing %s: file is modified (use -f'
2906 b'not removing %s: file is modified (use -f'
2907 b' to force removal)\n'
2907 b' to force removal)\n'
2908 )
2908 )
2909 % uipathfn(f)
2909 % uipathfn(f)
2910 )
2910 )
2911 ret = 1
2911 ret = 1
2912 for f in added:
2912 for f in added:
2913 progress.increment()
2913 progress.increment()
2914 warnings.append(
2914 warnings.append(
2915 _(
2915 _(
2916 b"not removing %s: file has been marked for add"
2916 b"not removing %s: file has been marked for add"
2917 b" (use 'hg forget' to undo add)\n"
2917 b" (use 'hg forget' to undo add)\n"
2918 )
2918 )
2919 % uipathfn(f)
2919 % uipathfn(f)
2920 )
2920 )
2921 ret = 1
2921 ret = 1
2922 progress.complete()
2922 progress.complete()
2923
2923
2924 list = sorted(list)
2924 list = sorted(list)
2925 progress = ui.makeprogress(
2925 progress = ui.makeprogress(
2926 _(b'deleting'), total=len(list), unit=_(b'files')
2926 _(b'deleting'), total=len(list), unit=_(b'files')
2927 )
2927 )
2928 for f in list:
2928 for f in list:
2929 if ui.verbose or not m.exact(f):
2929 if ui.verbose or not m.exact(f):
2930 progress.increment()
2930 progress.increment()
2931 ui.status(
2931 ui.status(
2932 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2932 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2933 )
2933 )
2934 progress.complete()
2934 progress.complete()
2935
2935
2936 if not dryrun:
2936 if not dryrun:
2937 with repo.wlock():
2937 with repo.wlock():
2938 if not after:
2938 if not after:
2939 for f in list:
2939 for f in list:
2940 if f in added:
2940 if f in added:
2941 continue # we never unlink added files on remove
2941 continue # we never unlink added files on remove
2942 rmdir = repo.ui.configbool(
2942 rmdir = repo.ui.configbool(
2943 b'experimental', b'removeemptydirs'
2943 b'experimental', b'removeemptydirs'
2944 )
2944 )
2945 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2945 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2946 repo[None].forget(list)
2946 repo[None].forget(list)
2947
2947
2948 if warn:
2948 if warn:
2949 for warning in warnings:
2949 for warning in warnings:
2950 ui.warn(warning)
2950 ui.warn(warning)
2951
2951
2952 return ret
2952 return ret
2953
2953
2954
2954
2955 def _catfmtneedsdata(fm):
2955 def _catfmtneedsdata(fm):
2956 return not fm.datahint() or b'data' in fm.datahint()
2956 return not fm.datahint() or b'data' in fm.datahint()
2957
2957
2958
2958
2959 def _updatecatformatter(fm, ctx, matcher, path, decode):
2959 def _updatecatformatter(fm, ctx, matcher, path, decode):
2960 """Hook for adding data to the formatter used by ``hg cat``.
2960 """Hook for adding data to the formatter used by ``hg cat``.
2961
2961
2962 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2962 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2963 this method first."""
2963 this method first."""
2964
2964
2965 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2965 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2966 # wasn't requested.
2966 # wasn't requested.
2967 data = b''
2967 data = b''
2968 if _catfmtneedsdata(fm):
2968 if _catfmtneedsdata(fm):
2969 data = ctx[path].data()
2969 data = ctx[path].data()
2970 if decode:
2970 if decode:
2971 data = ctx.repo().wwritedata(path, data)
2971 data = ctx.repo().wwritedata(path, data)
2972 fm.startitem()
2972 fm.startitem()
2973 fm.context(ctx=ctx)
2973 fm.context(ctx=ctx)
2974 fm.write(b'data', b'%s', data)
2974 fm.write(b'data', b'%s', data)
2975 fm.data(path=path)
2975 fm.data(path=path)
2976
2976
2977
2977
2978 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2978 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2979 err = 1
2979 err = 1
2980 opts = pycompat.byteskwargs(opts)
2980 opts = pycompat.byteskwargs(opts)
2981
2981
2982 def write(path):
2982 def write(path):
2983 filename = None
2983 filename = None
2984 if fntemplate:
2984 if fntemplate:
2985 filename = makefilename(
2985 filename = makefilename(
2986 ctx, fntemplate, pathname=os.path.join(prefix, path)
2986 ctx, fntemplate, pathname=os.path.join(prefix, path)
2987 )
2987 )
2988 # attempt to create the directory if it does not already exist
2988 # attempt to create the directory if it does not already exist
2989 try:
2989 try:
2990 os.makedirs(os.path.dirname(filename))
2990 os.makedirs(os.path.dirname(filename))
2991 except OSError:
2991 except OSError:
2992 pass
2992 pass
2993 with formatter.maybereopen(basefm, filename) as fm:
2993 with formatter.maybereopen(basefm, filename) as fm:
2994 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2994 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2995
2995
2996 # Automation often uses hg cat on single files, so special case it
2996 # Automation often uses hg cat on single files, so special case it
2997 # for performance to avoid the cost of parsing the manifest.
2997 # for performance to avoid the cost of parsing the manifest.
2998 if len(matcher.files()) == 1 and not matcher.anypats():
2998 if len(matcher.files()) == 1 and not matcher.anypats():
2999 file = matcher.files()[0]
2999 file = matcher.files()[0]
3000 mfl = repo.manifestlog
3000 mfl = repo.manifestlog
3001 mfnode = ctx.manifestnode()
3001 mfnode = ctx.manifestnode()
3002 try:
3002 try:
3003 if mfnode and mfl[mfnode].find(file)[0]:
3003 if mfnode and mfl[mfnode].find(file)[0]:
3004 if _catfmtneedsdata(basefm):
3004 if _catfmtneedsdata(basefm):
3005 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
3005 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
3006 write(file)
3006 write(file)
3007 return 0
3007 return 0
3008 except KeyError:
3008 except KeyError:
3009 pass
3009 pass
3010
3010
3011 if _catfmtneedsdata(basefm):
3011 if _catfmtneedsdata(basefm):
3012 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
3012 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
3013
3013
3014 for abs in ctx.walk(matcher):
3014 for abs in ctx.walk(matcher):
3015 write(abs)
3015 write(abs)
3016 err = 0
3016 err = 0
3017
3017
3018 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3018 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3019 for subpath in sorted(ctx.substate):
3019 for subpath in sorted(ctx.substate):
3020 sub = ctx.sub(subpath)
3020 sub = ctx.sub(subpath)
3021 try:
3021 try:
3022 submatch = matchmod.subdirmatcher(subpath, matcher)
3022 submatch = matchmod.subdirmatcher(subpath, matcher)
3023 subprefix = os.path.join(prefix, subpath)
3023 subprefix = os.path.join(prefix, subpath)
3024 if not sub.cat(
3024 if not sub.cat(
3025 submatch,
3025 submatch,
3026 basefm,
3026 basefm,
3027 fntemplate,
3027 fntemplate,
3028 subprefix,
3028 subprefix,
3029 **pycompat.strkwargs(opts)
3029 **pycompat.strkwargs(opts)
3030 ):
3030 ):
3031 err = 0
3031 err = 0
3032 except error.RepoLookupError:
3032 except error.RepoLookupError:
3033 ui.status(
3033 ui.status(
3034 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
3034 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
3035 )
3035 )
3036
3036
3037 return err
3037 return err
3038
3038
3039
3039
3040 def commit(ui, repo, commitfunc, pats, opts):
3040 def commit(ui, repo, commitfunc, pats, opts):
3041 '''commit the specified files or all outstanding changes'''
3041 '''commit the specified files or all outstanding changes'''
3042 date = opts.get(b'date')
3042 date = opts.get(b'date')
3043 if date:
3043 if date:
3044 opts[b'date'] = dateutil.parsedate(date)
3044 opts[b'date'] = dateutil.parsedate(date)
3045 message = logmessage(ui, opts)
3045 message = logmessage(ui, opts)
3046 matcher = scmutil.match(repo[None], pats, opts)
3046 matcher = scmutil.match(repo[None], pats, opts)
3047
3047
3048 dsguard = None
3048 dsguard = None
3049 # extract addremove carefully -- this function can be called from a command
3049 # extract addremove carefully -- this function can be called from a command
3050 # that doesn't support addremove
3050 # that doesn't support addremove
3051 if opts.get(b'addremove'):
3051 if opts.get(b'addremove'):
3052 dsguard = dirstateguard.dirstateguard(repo, b'commit')
3052 dsguard = dirstateguard.dirstateguard(repo, b'commit')
3053 with dsguard or util.nullcontextmanager():
3053 with dsguard or util.nullcontextmanager():
3054 if dsguard:
3054 if dsguard:
3055 relative = scmutil.anypats(pats, opts)
3055 relative = scmutil.anypats(pats, opts)
3056 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
3056 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
3057 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
3057 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
3058 raise error.Abort(
3058 raise error.Abort(
3059 _(b"failed to mark all new/missing files as added/removed")
3059 _(b"failed to mark all new/missing files as added/removed")
3060 )
3060 )
3061
3061
3062 return commitfunc(ui, repo, message, matcher, opts)
3062 return commitfunc(ui, repo, message, matcher, opts)
3063
3063
3064
3064
3065 def samefile(f, ctx1, ctx2):
3065 def samefile(f, ctx1, ctx2):
3066 if f in ctx1.manifest():
3066 if f in ctx1.manifest():
3067 a = ctx1.filectx(f)
3067 a = ctx1.filectx(f)
3068 if f in ctx2.manifest():
3068 if f in ctx2.manifest():
3069 b = ctx2.filectx(f)
3069 b = ctx2.filectx(f)
3070 return not a.cmp(b) and a.flags() == b.flags()
3070 return not a.cmp(b) and a.flags() == b.flags()
3071 else:
3071 else:
3072 return False
3072 return False
3073 else:
3073 else:
3074 return f not in ctx2.manifest()
3074 return f not in ctx2.manifest()
3075
3075
3076
3076
3077 def amend(ui, repo, old, extra, pats, opts):
3077 def amend(ui, repo, old, extra, pats, opts):
3078 # avoid cycle context -> subrepo -> cmdutil
3078 # avoid cycle context -> subrepo -> cmdutil
3079 from . import context
3079 from . import context
3080
3080
3081 # amend will reuse the existing user if not specified, but the obsolete
3081 # amend will reuse the existing user if not specified, but the obsolete
3082 # marker creation requires that the current user's name is specified.
3082 # marker creation requires that the current user's name is specified.
3083 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3083 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3084 ui.username() # raise exception if username not set
3084 ui.username() # raise exception if username not set
3085
3085
3086 ui.note(_(b'amending changeset %s\n') % old)
3086 ui.note(_(b'amending changeset %s\n') % old)
3087 base = old.p1()
3087 base = old.p1()
3088
3088
3089 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
3089 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
3090 # Participating changesets:
3090 # Participating changesets:
3091 #
3091 #
3092 # wctx o - workingctx that contains changes from working copy
3092 # wctx o - workingctx that contains changes from working copy
3093 # | to go into amending commit
3093 # | to go into amending commit
3094 # |
3094 # |
3095 # old o - changeset to amend
3095 # old o - changeset to amend
3096 # |
3096 # |
3097 # base o - first parent of the changeset to amend
3097 # base o - first parent of the changeset to amend
3098 wctx = repo[None]
3098 wctx = repo[None]
3099
3099
3100 # Copy to avoid mutating input
3100 # Copy to avoid mutating input
3101 extra = extra.copy()
3101 extra = extra.copy()
3102 # Update extra dict from amended commit (e.g. to preserve graft
3102 # Update extra dict from amended commit (e.g. to preserve graft
3103 # source)
3103 # source)
3104 extra.update(old.extra())
3104 extra.update(old.extra())
3105
3105
3106 # Also update it from the from the wctx
3106 # Also update it from the from the wctx
3107 extra.update(wctx.extra())
3107 extra.update(wctx.extra())
3108
3108
3109 # date-only change should be ignored?
3109 # date-only change should be ignored?
3110 datemaydiffer = resolvecommitoptions(ui, opts)
3110 datemaydiffer = resolvecommitoptions(ui, opts)
3111
3111
3112 date = old.date()
3112 date = old.date()
3113 if opts.get(b'date'):
3113 if opts.get(b'date'):
3114 date = dateutil.parsedate(opts.get(b'date'))
3114 date = dateutil.parsedate(opts.get(b'date'))
3115 user = opts.get(b'user') or old.user()
3115 user = opts.get(b'user') or old.user()
3116
3116
3117 if len(old.parents()) > 1:
3117 if len(old.parents()) > 1:
3118 # ctx.files() isn't reliable for merges, so fall back to the
3118 # ctx.files() isn't reliable for merges, so fall back to the
3119 # slower repo.status() method
3119 # slower repo.status() method
3120 st = base.status(old)
3120 st = base.status(old)
3121 files = set(st.modified) | set(st.added) | set(st.removed)
3121 files = set(st.modified) | set(st.added) | set(st.removed)
3122 else:
3122 else:
3123 files = set(old.files())
3123 files = set(old.files())
3124
3124
3125 # add/remove the files to the working copy if the "addremove" option
3125 # add/remove the files to the working copy if the "addremove" option
3126 # was specified.
3126 # was specified.
3127 matcher = scmutil.match(wctx, pats, opts)
3127 matcher = scmutil.match(wctx, pats, opts)
3128 relative = scmutil.anypats(pats, opts)
3128 relative = scmutil.anypats(pats, opts)
3129 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
3129 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
3130 if opts.get(b'addremove') and scmutil.addremove(
3130 if opts.get(b'addremove') and scmutil.addremove(
3131 repo, matcher, b"", uipathfn, opts
3131 repo, matcher, b"", uipathfn, opts
3132 ):
3132 ):
3133 raise error.Abort(
3133 raise error.Abort(
3134 _(b"failed to mark all new/missing files as added/removed")
3134 _(b"failed to mark all new/missing files as added/removed")
3135 )
3135 )
3136
3136
3137 # Check subrepos. This depends on in-place wctx._status update in
3137 # Check subrepos. This depends on in-place wctx._status update in
3138 # subrepo.precommit(). To minimize the risk of this hack, we do
3138 # subrepo.precommit(). To minimize the risk of this hack, we do
3139 # nothing if .hgsub does not exist.
3139 # nothing if .hgsub does not exist.
3140 if b'.hgsub' in wctx or b'.hgsub' in old:
3140 if b'.hgsub' in wctx or b'.hgsub' in old:
3141 subs, commitsubs, newsubstate = subrepoutil.precommit(
3141 subs, commitsubs, newsubstate = subrepoutil.precommit(
3142 ui, wctx, wctx._status, matcher
3142 ui, wctx, wctx._status, matcher
3143 )
3143 )
3144 # amend should abort if commitsubrepos is enabled
3144 # amend should abort if commitsubrepos is enabled
3145 assert not commitsubs
3145 assert not commitsubs
3146 if subs:
3146 if subs:
3147 subrepoutil.writestate(repo, newsubstate)
3147 subrepoutil.writestate(repo, newsubstate)
3148
3148
3149 ms = mergestatemod.mergestate.read(repo)
3149 ms = mergestatemod.mergestate.read(repo)
3150 mergeutil.checkunresolved(ms)
3150 mergeutil.checkunresolved(ms)
3151
3151
3152 filestoamend = {f for f in wctx.files() if matcher(f)}
3152 filestoamend = {f for f in wctx.files() if matcher(f)}
3153
3153
3154 changes = len(filestoamend) > 0
3154 changes = len(filestoamend) > 0
3155 if changes:
3155 if changes:
3156 # Recompute copies (avoid recording a -> b -> a)
3156 # Recompute copies (avoid recording a -> b -> a)
3157 copied = copies.pathcopies(base, wctx, matcher)
3157 copied = copies.pathcopies(base, wctx, matcher)
3158 if old.p2:
3158 if old.p2:
3159 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3159 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3160
3160
3161 # Prune files which were reverted by the updates: if old
3161 # Prune files which were reverted by the updates: if old
3162 # introduced file X and the file was renamed in the working
3162 # introduced file X and the file was renamed in the working
3163 # copy, then those two files are the same and
3163 # copy, then those two files are the same and
3164 # we can discard X from our list of files. Likewise if X
3164 # we can discard X from our list of files. Likewise if X
3165 # was removed, it's no longer relevant. If X is missing (aka
3165 # was removed, it's no longer relevant. If X is missing (aka
3166 # deleted), old X must be preserved.
3166 # deleted), old X must be preserved.
3167 files.update(filestoamend)
3167 files.update(filestoamend)
3168 files = [
3168 files = [
3169 f
3169 f
3170 for f in files
3170 for f in files
3171 if (f not in filestoamend or not samefile(f, wctx, base))
3171 if (f not in filestoamend or not samefile(f, wctx, base))
3172 ]
3172 ]
3173
3173
3174 def filectxfn(repo, ctx_, path):
3174 def filectxfn(repo, ctx_, path):
3175 try:
3175 try:
3176 # If the file being considered is not amongst the files
3176 # If the file being considered is not amongst the files
3177 # to be amended, we should return the file context from the
3177 # to be amended, we should return the file context from the
3178 # old changeset. This avoids issues when only some files in
3178 # old changeset. This avoids issues when only some files in
3179 # the working copy are being amended but there are also
3179 # the working copy are being amended but there are also
3180 # changes to other files from the old changeset.
3180 # changes to other files from the old changeset.
3181 if path not in filestoamend:
3181 if path not in filestoamend:
3182 return old.filectx(path)
3182 return old.filectx(path)
3183
3183
3184 # Return None for removed files.
3184 # Return None for removed files.
3185 if path in wctx.removed():
3185 if path in wctx.removed():
3186 return None
3186 return None
3187
3187
3188 fctx = wctx[path]
3188 fctx = wctx[path]
3189 flags = fctx.flags()
3189 flags = fctx.flags()
3190 mctx = context.memfilectx(
3190 mctx = context.memfilectx(
3191 repo,
3191 repo,
3192 ctx_,
3192 ctx_,
3193 fctx.path(),
3193 fctx.path(),
3194 fctx.data(),
3194 fctx.data(),
3195 islink=b'l' in flags,
3195 islink=b'l' in flags,
3196 isexec=b'x' in flags,
3196 isexec=b'x' in flags,
3197 copysource=copied.get(path),
3197 copysource=copied.get(path),
3198 )
3198 )
3199 return mctx
3199 return mctx
3200 except KeyError:
3200 except KeyError:
3201 return None
3201 return None
3202
3202
3203 else:
3203 else:
3204 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
3204 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
3205
3205
3206 # Use version of files as in the old cset
3206 # Use version of files as in the old cset
3207 def filectxfn(repo, ctx_, path):
3207 def filectxfn(repo, ctx_, path):
3208 try:
3208 try:
3209 return old.filectx(path)
3209 return old.filectx(path)
3210 except KeyError:
3210 except KeyError:
3211 return None
3211 return None
3212
3212
3213 # See if we got a message from -m or -l, if not, open the editor with
3213 # See if we got a message from -m or -l, if not, open the editor with
3214 # the message of the changeset to amend.
3214 # the message of the changeset to amend.
3215 message = logmessage(ui, opts)
3215 message = logmessage(ui, opts)
3216
3216
3217 editform = mergeeditform(old, b'commit.amend')
3217 editform = mergeeditform(old, b'commit.amend')
3218
3218
3219 if not message:
3219 if not message:
3220 message = old.description()
3220 message = old.description()
3221 # Default if message isn't provided and --edit is not passed is to
3221 # Default if message isn't provided and --edit is not passed is to
3222 # invoke editor, but allow --no-edit. If somehow we don't have any
3222 # invoke editor, but allow --no-edit. If somehow we don't have any
3223 # description, let's always start the editor.
3223 # description, let's always start the editor.
3224 doedit = not message or opts.get(b'edit') in [True, None]
3224 doedit = not message or opts.get(b'edit') in [True, None]
3225 else:
3225 else:
3226 # Default if message is provided is to not invoke editor, but allow
3226 # Default if message is provided is to not invoke editor, but allow
3227 # --edit.
3227 # --edit.
3228 doedit = opts.get(b'edit') is True
3228 doedit = opts.get(b'edit') is True
3229 editor = getcommiteditor(edit=doedit, editform=editform)
3229 editor = getcommiteditor(edit=doedit, editform=editform)
3230
3230
3231 pureextra = extra.copy()
3231 pureextra = extra.copy()
3232 extra[b'amend_source'] = old.hex()
3232 extra[b'amend_source'] = old.hex()
3233
3233
3234 new = context.memctx(
3234 new = context.memctx(
3235 repo,
3235 repo,
3236 parents=[base.node(), old.p2().node()],
3236 parents=[base.node(), old.p2().node()],
3237 text=message,
3237 text=message,
3238 files=files,
3238 files=files,
3239 filectxfn=filectxfn,
3239 filectxfn=filectxfn,
3240 user=user,
3240 user=user,
3241 date=date,
3241 date=date,
3242 extra=extra,
3242 extra=extra,
3243 editor=editor,
3243 editor=editor,
3244 )
3244 )
3245
3245
3246 newdesc = changelog.stripdesc(new.description())
3246 newdesc = changelog.stripdesc(new.description())
3247 if (
3247 if (
3248 (not changes)
3248 (not changes)
3249 and newdesc == old.description()
3249 and newdesc == old.description()
3250 and user == old.user()
3250 and user == old.user()
3251 and (date == old.date() or datemaydiffer)
3251 and (date == old.date() or datemaydiffer)
3252 and pureextra == old.extra()
3252 and pureextra == old.extra()
3253 ):
3253 ):
3254 # nothing changed. continuing here would create a new node
3254 # nothing changed. continuing here would create a new node
3255 # anyway because of the amend_source noise.
3255 # anyway because of the amend_source noise.
3256 #
3256 #
3257 # This not what we expect from amend.
3257 # This not what we expect from amend.
3258 return old.node()
3258 return old.node()
3259
3259
3260 commitphase = None
3260 commitphase = None
3261 if opts.get(b'secret'):
3261 if opts.get(b'secret'):
3262 commitphase = phases.secret
3262 commitphase = phases.secret
3263 newid = repo.commitctx(new)
3263 newid = repo.commitctx(new)
3264 ms.reset()
3264 ms.reset()
3265
3265
3266 # Reroute the working copy parent to the new changeset
3266 # Reroute the working copy parent to the new changeset
3267 repo.setparents(newid, nullid)
3267 repo.setparents(newid, nullid)
3268 mapping = {old.node(): (newid,)}
3268 mapping = {old.node(): (newid,)}
3269 obsmetadata = None
3269 obsmetadata = None
3270 if opts.get(b'note'):
3270 if opts.get(b'note'):
3271 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
3271 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
3272 backup = ui.configbool(b'rewrite', b'backup-bundle')
3272 backup = ui.configbool(b'rewrite', b'backup-bundle')
3273 scmutil.cleanupnodes(
3273 scmutil.cleanupnodes(
3274 repo,
3274 repo,
3275 mapping,
3275 mapping,
3276 b'amend',
3276 b'amend',
3277 metadata=obsmetadata,
3277 metadata=obsmetadata,
3278 fixphase=True,
3278 fixphase=True,
3279 targetphase=commitphase,
3279 targetphase=commitphase,
3280 backup=backup,
3280 backup=backup,
3281 )
3281 )
3282
3282
3283 # Fixing the dirstate because localrepo.commitctx does not update
3283 # Fixing the dirstate because localrepo.commitctx does not update
3284 # it. This is rather convenient because we did not need to update
3284 # it. This is rather convenient because we did not need to update
3285 # the dirstate for all the files in the new commit which commitctx
3285 # the dirstate for all the files in the new commit which commitctx
3286 # could have done if it updated the dirstate. Now, we can
3286 # could have done if it updated the dirstate. Now, we can
3287 # selectively update the dirstate only for the amended files.
3287 # selectively update the dirstate only for the amended files.
3288 dirstate = repo.dirstate
3288 dirstate = repo.dirstate
3289
3289
3290 # Update the state of the files which were added and modified in the
3290 # Update the state of the files which were added and modified in the
3291 # amend to "normal" in the dirstate. We need to use "normallookup" since
3291 # amend to "normal" in the dirstate. We need to use "normallookup" since
3292 # the files may have changed since the command started; using "normal"
3292 # the files may have changed since the command started; using "normal"
3293 # would mark them as clean but with uncommitted contents.
3293 # would mark them as clean but with uncommitted contents.
3294 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3294 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3295 for f in normalfiles:
3295 for f in normalfiles:
3296 dirstate.normallookup(f)
3296 dirstate.normallookup(f)
3297
3297
3298 # Update the state of files which were removed in the amend
3298 # Update the state of files which were removed in the amend
3299 # to "removed" in the dirstate.
3299 # to "removed" in the dirstate.
3300 removedfiles = set(wctx.removed()) & filestoamend
3300 removedfiles = set(wctx.removed()) & filestoamend
3301 for f in removedfiles:
3301 for f in removedfiles:
3302 dirstate.drop(f)
3302 dirstate.drop(f)
3303
3303
3304 return newid
3304 return newid
3305
3305
3306
3306
3307 def commiteditor(repo, ctx, subs, editform=b''):
3307 def commiteditor(repo, ctx, subs, editform=b''):
3308 if ctx.description():
3308 if ctx.description():
3309 return ctx.description()
3309 return ctx.description()
3310 return commitforceeditor(
3310 return commitforceeditor(
3311 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3311 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3312 )
3312 )
3313
3313
3314
3314
3315 def commitforceeditor(
3315 def commitforceeditor(
3316 repo,
3316 repo,
3317 ctx,
3317 ctx,
3318 subs,
3318 subs,
3319 finishdesc=None,
3319 finishdesc=None,
3320 extramsg=None,
3320 extramsg=None,
3321 editform=b'',
3321 editform=b'',
3322 unchangedmessagedetection=False,
3322 unchangedmessagedetection=False,
3323 ):
3323 ):
3324 if not extramsg:
3324 if not extramsg:
3325 extramsg = _(b"Leave message empty to abort commit.")
3325 extramsg = _(b"Leave message empty to abort commit.")
3326
3326
3327 forms = [e for e in editform.split(b'.') if e]
3327 forms = [e for e in editform.split(b'.') if e]
3328 forms.insert(0, b'changeset')
3328 forms.insert(0, b'changeset')
3329 templatetext = None
3329 templatetext = None
3330 while forms:
3330 while forms:
3331 ref = b'.'.join(forms)
3331 ref = b'.'.join(forms)
3332 if repo.ui.config(b'committemplate', ref):
3332 if repo.ui.config(b'committemplate', ref):
3333 templatetext = committext = buildcommittemplate(
3333 templatetext = committext = buildcommittemplate(
3334 repo, ctx, subs, extramsg, ref
3334 repo, ctx, subs, extramsg, ref
3335 )
3335 )
3336 break
3336 break
3337 forms.pop()
3337 forms.pop()
3338 else:
3338 else:
3339 committext = buildcommittext(repo, ctx, subs, extramsg)
3339 committext = buildcommittext(repo, ctx, subs, extramsg)
3340
3340
3341 # run editor in the repository root
3341 # run editor in the repository root
3342 olddir = encoding.getcwd()
3342 olddir = encoding.getcwd()
3343 os.chdir(repo.root)
3343 os.chdir(repo.root)
3344
3344
3345 # make in-memory changes visible to external process
3345 # make in-memory changes visible to external process
3346 tr = repo.currenttransaction()
3346 tr = repo.currenttransaction()
3347 repo.dirstate.write(tr)
3347 repo.dirstate.write(tr)
3348 pending = tr and tr.writepending() and repo.root
3348 pending = tr and tr.writepending() and repo.root
3349
3349
3350 editortext = repo.ui.edit(
3350 editortext = repo.ui.edit(
3351 committext,
3351 committext,
3352 ctx.user(),
3352 ctx.user(),
3353 ctx.extra(),
3353 ctx.extra(),
3354 editform=editform,
3354 editform=editform,
3355 pending=pending,
3355 pending=pending,
3356 repopath=repo.path,
3356 repopath=repo.path,
3357 action=b'commit',
3357 action=b'commit',
3358 )
3358 )
3359 text = editortext
3359 text = editortext
3360
3360
3361 # strip away anything below this special string (used for editors that want
3361 # strip away anything below this special string (used for editors that want
3362 # to display the diff)
3362 # to display the diff)
3363 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3363 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3364 if stripbelow:
3364 if stripbelow:
3365 text = text[: stripbelow.start()]
3365 text = text[: stripbelow.start()]
3366
3366
3367 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3367 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3368 os.chdir(olddir)
3368 os.chdir(olddir)
3369
3369
3370 if finishdesc:
3370 if finishdesc:
3371 text = finishdesc(text)
3371 text = finishdesc(text)
3372 if not text.strip():
3372 if not text.strip():
3373 raise error.Abort(_(b"empty commit message"))
3373 raise error.Abort(_(b"empty commit message"))
3374 if unchangedmessagedetection and editortext == templatetext:
3374 if unchangedmessagedetection and editortext == templatetext:
3375 raise error.Abort(_(b"commit message unchanged"))
3375 raise error.Abort(_(b"commit message unchanged"))
3376
3376
3377 return text
3377 return text
3378
3378
3379
3379
3380 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3380 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3381 ui = repo.ui
3381 ui = repo.ui
3382 spec = formatter.reference_templatespec(ref)
3382 spec = formatter.reference_templatespec(ref)
3383 t = logcmdutil.changesettemplater(ui, repo, spec)
3383 t = logcmdutil.changesettemplater(ui, repo, spec)
3384 t.t.cache.update(
3384 t.t.cache.update(
3385 (k, templater.unquotestring(v))
3385 (k, templater.unquotestring(v))
3386 for k, v in repo.ui.configitems(b'committemplate')
3386 for k, v in repo.ui.configitems(b'committemplate')
3387 )
3387 )
3388
3388
3389 if not extramsg:
3389 if not extramsg:
3390 extramsg = b'' # ensure that extramsg is string
3390 extramsg = b'' # ensure that extramsg is string
3391
3391
3392 ui.pushbuffer()
3392 ui.pushbuffer()
3393 t.show(ctx, extramsg=extramsg)
3393 t.show(ctx, extramsg=extramsg)
3394 return ui.popbuffer()
3394 return ui.popbuffer()
3395
3395
3396
3396
3397 def hgprefix(msg):
3397 def hgprefix(msg):
3398 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3398 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3399
3399
3400
3400
3401 def buildcommittext(repo, ctx, subs, extramsg):
3401 def buildcommittext(repo, ctx, subs, extramsg):
3402 edittext = []
3402 edittext = []
3403 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3403 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3404 if ctx.description():
3404 if ctx.description():
3405 edittext.append(ctx.description())
3405 edittext.append(ctx.description())
3406 edittext.append(b"")
3406 edittext.append(b"")
3407 edittext.append(b"") # Empty line between message and comments.
3407 edittext.append(b"") # Empty line between message and comments.
3408 edittext.append(
3408 edittext.append(
3409 hgprefix(
3409 hgprefix(
3410 _(
3410 _(
3411 b"Enter commit message."
3411 b"Enter commit message."
3412 b" Lines beginning with 'HG:' are removed."
3412 b" Lines beginning with 'HG:' are removed."
3413 )
3413 )
3414 )
3414 )
3415 )
3415 )
3416 edittext.append(hgprefix(extramsg))
3416 edittext.append(hgprefix(extramsg))
3417 edittext.append(b"HG: --")
3417 edittext.append(b"HG: --")
3418 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3418 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3419 if ctx.p2():
3419 if ctx.p2():
3420 edittext.append(hgprefix(_(b"branch merge")))
3420 edittext.append(hgprefix(_(b"branch merge")))
3421 if ctx.branch():
3421 if ctx.branch():
3422 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3422 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3423 if bookmarks.isactivewdirparent(repo):
3423 if bookmarks.isactivewdirparent(repo):
3424 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3424 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3425 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3425 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3426 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3426 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3427 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3427 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3428 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3428 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3429 if not added and not modified and not removed:
3429 if not added and not modified and not removed:
3430 edittext.append(hgprefix(_(b"no files changed")))
3430 edittext.append(hgprefix(_(b"no files changed")))
3431 edittext.append(b"")
3431 edittext.append(b"")
3432
3432
3433 return b"\n".join(edittext)
3433 return b"\n".join(edittext)
3434
3434
3435
3435
3436 def commitstatus(repo, node, branch, bheads=None, opts=None):
3436 def commitstatus(repo, node, branch, bheads=None, opts=None):
3437 if opts is None:
3437 if opts is None:
3438 opts = {}
3438 opts = {}
3439 ctx = repo[node]
3439 ctx = repo[node]
3440 parents = ctx.parents()
3440 parents = ctx.parents()
3441
3441
3442 if (
3442 if (
3443 not opts.get(b'amend')
3443 not opts.get(b'amend')
3444 and bheads
3444 and bheads
3445 and node not in bheads
3445 and node not in bheads
3446 and not any(
3446 and not any(
3447 p.node() in bheads and p.branch() == branch for p in parents
3447 p.node() in bheads and p.branch() == branch for p in parents
3448 )
3448 )
3449 ):
3449 ):
3450 repo.ui.status(_(b'created new head\n'))
3450 repo.ui.status(_(b'created new head\n'))
3451 # The message is not printed for initial roots. For the other
3451 # The message is not printed for initial roots. For the other
3452 # changesets, it is printed in the following situations:
3452 # changesets, it is printed in the following situations:
3453 #
3453 #
3454 # Par column: for the 2 parents with ...
3454 # Par column: for the 2 parents with ...
3455 # N: null or no parent
3455 # N: null or no parent
3456 # B: parent is on another named branch
3456 # B: parent is on another named branch
3457 # C: parent is a regular non head changeset
3457 # C: parent is a regular non head changeset
3458 # H: parent was a branch head of the current branch
3458 # H: parent was a branch head of the current branch
3459 # Msg column: whether we print "created new head" message
3459 # Msg column: whether we print "created new head" message
3460 # In the following, it is assumed that there already exists some
3460 # In the following, it is assumed that there already exists some
3461 # initial branch heads of the current branch, otherwise nothing is
3461 # initial branch heads of the current branch, otherwise nothing is
3462 # printed anyway.
3462 # printed anyway.
3463 #
3463 #
3464 # Par Msg Comment
3464 # Par Msg Comment
3465 # N N y additional topo root
3465 # N N y additional topo root
3466 #
3466 #
3467 # B N y additional branch root
3467 # B N y additional branch root
3468 # C N y additional topo head
3468 # C N y additional topo head
3469 # H N n usual case
3469 # H N n usual case
3470 #
3470 #
3471 # B B y weird additional branch root
3471 # B B y weird additional branch root
3472 # C B y branch merge
3472 # C B y branch merge
3473 # H B n merge with named branch
3473 # H B n merge with named branch
3474 #
3474 #
3475 # C C y additional head from merge
3475 # C C y additional head from merge
3476 # C H n merge with a head
3476 # C H n merge with a head
3477 #
3477 #
3478 # H H n head merge: head count decreases
3478 # H H n head merge: head count decreases
3479
3479
3480 if not opts.get(b'close_branch'):
3480 if not opts.get(b'close_branch'):
3481 for r in parents:
3481 for r in parents:
3482 if r.closesbranch() and r.branch() == branch:
3482 if r.closesbranch() and r.branch() == branch:
3483 repo.ui.status(
3483 repo.ui.status(
3484 _(b'reopening closed branch head %d\n') % r.rev()
3484 _(b'reopening closed branch head %d\n') % r.rev()
3485 )
3485 )
3486
3486
3487 if repo.ui.debugflag:
3487 if repo.ui.debugflag:
3488 repo.ui.write(
3488 repo.ui.write(
3489 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3489 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3490 )
3490 )
3491 elif repo.ui.verbose:
3491 elif repo.ui.verbose:
3492 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3492 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3493
3493
3494
3494
3495 def postcommitstatus(repo, pats, opts):
3495 def postcommitstatus(repo, pats, opts):
3496 return repo.status(match=scmutil.match(repo[None], pats, opts))
3496 return repo.status(match=scmutil.match(repo[None], pats, opts))
3497
3497
3498
3498
3499 def revert(ui, repo, ctx, *pats, **opts):
3499 def revert(ui, repo, ctx, *pats, **opts):
3500 opts = pycompat.byteskwargs(opts)
3500 opts = pycompat.byteskwargs(opts)
3501 parent, p2 = repo.dirstate.parents()
3501 parent, p2 = repo.dirstate.parents()
3502 node = ctx.node()
3502 node = ctx.node()
3503
3503
3504 mf = ctx.manifest()
3504 mf = ctx.manifest()
3505 if node == p2:
3505 if node == p2:
3506 parent = p2
3506 parent = p2
3507
3507
3508 # need all matching names in dirstate and manifest of target rev,
3508 # need all matching names in dirstate and manifest of target rev,
3509 # so have to walk both. do not print errors if files exist in one
3509 # so have to walk both. do not print errors if files exist in one
3510 # but not other. in both cases, filesets should be evaluated against
3510 # but not other. in both cases, filesets should be evaluated against
3511 # workingctx to get consistent result (issue4497). this means 'set:**'
3511 # workingctx to get consistent result (issue4497). this means 'set:**'
3512 # cannot be used to select missing files from target rev.
3512 # cannot be used to select missing files from target rev.
3513
3513
3514 # `names` is a mapping for all elements in working copy and target revision
3514 # `names` is a mapping for all elements in working copy and target revision
3515 # The mapping is in the form:
3515 # The mapping is in the form:
3516 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3516 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3517 names = {}
3517 names = {}
3518 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3518 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3519
3519
3520 with repo.wlock():
3520 with repo.wlock():
3521 ## filling of the `names` mapping
3521 ## filling of the `names` mapping
3522 # walk dirstate to fill `names`
3522 # walk dirstate to fill `names`
3523
3523
3524 interactive = opts.get(b'interactive', False)
3524 interactive = opts.get(b'interactive', False)
3525 wctx = repo[None]
3525 wctx = repo[None]
3526 m = scmutil.match(wctx, pats, opts)
3526 m = scmutil.match(wctx, pats, opts)
3527
3527
3528 # we'll need this later
3528 # we'll need this later
3529 targetsubs = sorted(s for s in wctx.substate if m(s))
3529 targetsubs = sorted(s for s in wctx.substate if m(s))
3530
3530
3531 if not m.always():
3531 if not m.always():
3532 matcher = matchmod.badmatch(m, lambda x, y: False)
3532 matcher = matchmod.badmatch(m, lambda x, y: False)
3533 for abs in wctx.walk(matcher):
3533 for abs in wctx.walk(matcher):
3534 names[abs] = m.exact(abs)
3534 names[abs] = m.exact(abs)
3535
3535
3536 # walk target manifest to fill `names`
3536 # walk target manifest to fill `names`
3537
3537
3538 def badfn(path, msg):
3538 def badfn(path, msg):
3539 if path in names:
3539 if path in names:
3540 return
3540 return
3541 if path in ctx.substate:
3541 if path in ctx.substate:
3542 return
3542 return
3543 path_ = path + b'/'
3543 path_ = path + b'/'
3544 for f in names:
3544 for f in names:
3545 if f.startswith(path_):
3545 if f.startswith(path_):
3546 return
3546 return
3547 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3547 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3548
3548
3549 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3549 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3550 if abs not in names:
3550 if abs not in names:
3551 names[abs] = m.exact(abs)
3551 names[abs] = m.exact(abs)
3552
3552
3553 # Find status of all file in `names`.
3553 # Find status of all file in `names`.
3554 m = scmutil.matchfiles(repo, names)
3554 m = scmutil.matchfiles(repo, names)
3555
3555
3556 changes = repo.status(
3556 changes = repo.status(
3557 node1=node, match=m, unknown=True, ignored=True, clean=True
3557 node1=node, match=m, unknown=True, ignored=True, clean=True
3558 )
3558 )
3559 else:
3559 else:
3560 changes = repo.status(node1=node, match=m)
3560 changes = repo.status(node1=node, match=m)
3561 for kind in changes:
3561 for kind in changes:
3562 for abs in kind:
3562 for abs in kind:
3563 names[abs] = m.exact(abs)
3563 names[abs] = m.exact(abs)
3564
3564
3565 m = scmutil.matchfiles(repo, names)
3565 m = scmutil.matchfiles(repo, names)
3566
3566
3567 modified = set(changes.modified)
3567 modified = set(changes.modified)
3568 added = set(changes.added)
3568 added = set(changes.added)
3569 removed = set(changes.removed)
3569 removed = set(changes.removed)
3570 _deleted = set(changes.deleted)
3570 _deleted = set(changes.deleted)
3571 unknown = set(changes.unknown)
3571 unknown = set(changes.unknown)
3572 unknown.update(changes.ignored)
3572 unknown.update(changes.ignored)
3573 clean = set(changes.clean)
3573 clean = set(changes.clean)
3574 modadded = set()
3574 modadded = set()
3575
3575
3576 # We need to account for the state of the file in the dirstate,
3576 # We need to account for the state of the file in the dirstate,
3577 # even when we revert against something else than parent. This will
3577 # even when we revert against something else than parent. This will
3578 # slightly alter the behavior of revert (doing back up or not, delete
3578 # slightly alter the behavior of revert (doing back up or not, delete
3579 # or just forget etc).
3579 # or just forget etc).
3580 if parent == node:
3580 if parent == node:
3581 dsmodified = modified
3581 dsmodified = modified
3582 dsadded = added
3582 dsadded = added
3583 dsremoved = removed
3583 dsremoved = removed
3584 # store all local modifications, useful later for rename detection
3584 # store all local modifications, useful later for rename detection
3585 localchanges = dsmodified | dsadded
3585 localchanges = dsmodified | dsadded
3586 modified, added, removed = set(), set(), set()
3586 modified, added, removed = set(), set(), set()
3587 else:
3587 else:
3588 changes = repo.status(node1=parent, match=m)
3588 changes = repo.status(node1=parent, match=m)
3589 dsmodified = set(changes.modified)
3589 dsmodified = set(changes.modified)
3590 dsadded = set(changes.added)
3590 dsadded = set(changes.added)
3591 dsremoved = set(changes.removed)
3591 dsremoved = set(changes.removed)
3592 # store all local modifications, useful later for rename detection
3592 # store all local modifications, useful later for rename detection
3593 localchanges = dsmodified | dsadded
3593 localchanges = dsmodified | dsadded
3594
3594
3595 # only take into account for removes between wc and target
3595 # only take into account for removes between wc and target
3596 clean |= dsremoved - removed
3596 clean |= dsremoved - removed
3597 dsremoved &= removed
3597 dsremoved &= removed
3598 # distinct between dirstate remove and other
3598 # distinct between dirstate remove and other
3599 removed -= dsremoved
3599 removed -= dsremoved
3600
3600
3601 modadded = added & dsmodified
3601 modadded = added & dsmodified
3602 added -= modadded
3602 added -= modadded
3603
3603
3604 # tell newly modified apart.
3604 # tell newly modified apart.
3605 dsmodified &= modified
3605 dsmodified &= modified
3606 dsmodified |= modified & dsadded # dirstate added may need backup
3606 dsmodified |= modified & dsadded # dirstate added may need backup
3607 modified -= dsmodified
3607 modified -= dsmodified
3608
3608
3609 # We need to wait for some post-processing to update this set
3609 # We need to wait for some post-processing to update this set
3610 # before making the distinction. The dirstate will be used for
3610 # before making the distinction. The dirstate will be used for
3611 # that purpose.
3611 # that purpose.
3612 dsadded = added
3612 dsadded = added
3613
3613
3614 # in case of merge, files that are actually added can be reported as
3614 # in case of merge, files that are actually added can be reported as
3615 # modified, we need to post process the result
3615 # modified, we need to post process the result
3616 if p2 != nullid:
3616 if p2 != nullid:
3617 mergeadd = set(dsmodified)
3617 mergeadd = set(dsmodified)
3618 for path in dsmodified:
3618 for path in dsmodified:
3619 if path in mf:
3619 if path in mf:
3620 mergeadd.remove(path)
3620 mergeadd.remove(path)
3621 dsadded |= mergeadd
3621 dsadded |= mergeadd
3622 dsmodified -= mergeadd
3622 dsmodified -= mergeadd
3623
3623
3624 # if f is a rename, update `names` to also revert the source
3624 # if f is a rename, update `names` to also revert the source
3625 for f in localchanges:
3625 for f in localchanges:
3626 src = repo.dirstate.copied(f)
3626 src = repo.dirstate.copied(f)
3627 # XXX should we check for rename down to target node?
3627 # XXX should we check for rename down to target node?
3628 if src and src not in names and repo.dirstate[src] == b'r':
3628 if src and src not in names and repo.dirstate[src] == b'r':
3629 dsremoved.add(src)
3629 dsremoved.add(src)
3630 names[src] = True
3630 names[src] = True
3631
3631
3632 # determine the exact nature of the deleted changesets
3632 # determine the exact nature of the deleted changesets
3633 deladded = set(_deleted)
3633 deladded = set(_deleted)
3634 for path in _deleted:
3634 for path in _deleted:
3635 if path in mf:
3635 if path in mf:
3636 deladded.remove(path)
3636 deladded.remove(path)
3637 deleted = _deleted - deladded
3637 deleted = _deleted - deladded
3638
3638
3639 # distinguish between file to forget and the other
3639 # distinguish between file to forget and the other
3640 added = set()
3640 added = set()
3641 for abs in dsadded:
3641 for abs in dsadded:
3642 if repo.dirstate[abs] != b'a':
3642 if repo.dirstate[abs] != b'a':
3643 added.add(abs)
3643 added.add(abs)
3644 dsadded -= added
3644 dsadded -= added
3645
3645
3646 for abs in deladded:
3646 for abs in deladded:
3647 if repo.dirstate[abs] == b'a':
3647 if repo.dirstate[abs] == b'a':
3648 dsadded.add(abs)
3648 dsadded.add(abs)
3649 deladded -= dsadded
3649 deladded -= dsadded
3650
3650
3651 # For files marked as removed, we check if an unknown file is present at
3651 # For files marked as removed, we check if an unknown file is present at
3652 # the same path. If a such file exists it may need to be backed up.
3652 # the same path. If a such file exists it may need to be backed up.
3653 # Making the distinction at this stage helps have simpler backup
3653 # Making the distinction at this stage helps have simpler backup
3654 # logic.
3654 # logic.
3655 removunk = set()
3655 removunk = set()
3656 for abs in removed:
3656 for abs in removed:
3657 target = repo.wjoin(abs)
3657 target = repo.wjoin(abs)
3658 if os.path.lexists(target):
3658 if os.path.lexists(target):
3659 removunk.add(abs)
3659 removunk.add(abs)
3660 removed -= removunk
3660 removed -= removunk
3661
3661
3662 dsremovunk = set()
3662 dsremovunk = set()
3663 for abs in dsremoved:
3663 for abs in dsremoved:
3664 target = repo.wjoin(abs)
3664 target = repo.wjoin(abs)
3665 if os.path.lexists(target):
3665 if os.path.lexists(target):
3666 dsremovunk.add(abs)
3666 dsremovunk.add(abs)
3667 dsremoved -= dsremovunk
3667 dsremoved -= dsremovunk
3668
3668
3669 # action to be actually performed by revert
3669 # action to be actually performed by revert
3670 # (<list of file>, message>) tuple
3670 # (<list of file>, message>) tuple
3671 actions = {
3671 actions = {
3672 b'revert': ([], _(b'reverting %s\n')),
3672 b'revert': ([], _(b'reverting %s\n')),
3673 b'add': ([], _(b'adding %s\n')),
3673 b'add': ([], _(b'adding %s\n')),
3674 b'remove': ([], _(b'removing %s\n')),
3674 b'remove': ([], _(b'removing %s\n')),
3675 b'drop': ([], _(b'removing %s\n')),
3675 b'drop': ([], _(b'removing %s\n')),
3676 b'forget': ([], _(b'forgetting %s\n')),
3676 b'forget': ([], _(b'forgetting %s\n')),
3677 b'undelete': ([], _(b'undeleting %s\n')),
3677 b'undelete': ([], _(b'undeleting %s\n')),
3678 b'noop': (None, _(b'no changes needed to %s\n')),
3678 b'noop': (None, _(b'no changes needed to %s\n')),
3679 b'unknown': (None, _(b'file not managed: %s\n')),
3679 b'unknown': (None, _(b'file not managed: %s\n')),
3680 }
3680 }
3681
3681
3682 # "constant" that convey the backup strategy.
3682 # "constant" that convey the backup strategy.
3683 # All set to `discard` if `no-backup` is set do avoid checking
3683 # All set to `discard` if `no-backup` is set do avoid checking
3684 # no_backup lower in the code.
3684 # no_backup lower in the code.
3685 # These values are ordered for comparison purposes
3685 # These values are ordered for comparison purposes
3686 backupinteractive = 3 # do backup if interactively modified
3686 backupinteractive = 3 # do backup if interactively modified
3687 backup = 2 # unconditionally do backup
3687 backup = 2 # unconditionally do backup
3688 check = 1 # check if the existing file differs from target
3688 check = 1 # check if the existing file differs from target
3689 discard = 0 # never do backup
3689 discard = 0 # never do backup
3690 if opts.get(b'no_backup'):
3690 if opts.get(b'no_backup'):
3691 backupinteractive = backup = check = discard
3691 backupinteractive = backup = check = discard
3692 if interactive:
3692 if interactive:
3693 dsmodifiedbackup = backupinteractive
3693 dsmodifiedbackup = backupinteractive
3694 else:
3694 else:
3695 dsmodifiedbackup = backup
3695 dsmodifiedbackup = backup
3696 tobackup = set()
3696 tobackup = set()
3697
3697
3698 backupanddel = actions[b'remove']
3698 backupanddel = actions[b'remove']
3699 if not opts.get(b'no_backup'):
3699 if not opts.get(b'no_backup'):
3700 backupanddel = actions[b'drop']
3700 backupanddel = actions[b'drop']
3701
3701
3702 disptable = (
3702 disptable = (
3703 # dispatch table:
3703 # dispatch table:
3704 # file state
3704 # file state
3705 # action
3705 # action
3706 # make backup
3706 # make backup
3707 ## Sets that results that will change file on disk
3707 ## Sets that results that will change file on disk
3708 # Modified compared to target, no local change
3708 # Modified compared to target, no local change
3709 (modified, actions[b'revert'], discard),
3709 (modified, actions[b'revert'], discard),
3710 # Modified compared to target, but local file is deleted
3710 # Modified compared to target, but local file is deleted
3711 (deleted, actions[b'revert'], discard),
3711 (deleted, actions[b'revert'], discard),
3712 # Modified compared to target, local change
3712 # Modified compared to target, local change
3713 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3713 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3714 # Added since target
3714 # Added since target
3715 (added, actions[b'remove'], discard),
3715 (added, actions[b'remove'], discard),
3716 # Added in working directory
3716 # Added in working directory
3717 (dsadded, actions[b'forget'], discard),
3717 (dsadded, actions[b'forget'], discard),
3718 # Added since target, have local modification
3718 # Added since target, have local modification
3719 (modadded, backupanddel, backup),
3719 (modadded, backupanddel, backup),
3720 # Added since target but file is missing in working directory
3720 # Added since target but file is missing in working directory
3721 (deladded, actions[b'drop'], discard),
3721 (deladded, actions[b'drop'], discard),
3722 # Removed since target, before working copy parent
3722 # Removed since target, before working copy parent
3723 (removed, actions[b'add'], discard),
3723 (removed, actions[b'add'], discard),
3724 # Same as `removed` but an unknown file exists at the same path
3724 # Same as `removed` but an unknown file exists at the same path
3725 (removunk, actions[b'add'], check),
3725 (removunk, actions[b'add'], check),
3726 # Removed since targe, marked as such in working copy parent
3726 # Removed since targe, marked as such in working copy parent
3727 (dsremoved, actions[b'undelete'], discard),
3727 (dsremoved, actions[b'undelete'], discard),
3728 # Same as `dsremoved` but an unknown file exists at the same path
3728 # Same as `dsremoved` but an unknown file exists at the same path
3729 (dsremovunk, actions[b'undelete'], check),
3729 (dsremovunk, actions[b'undelete'], check),
3730 ## the following sets does not result in any file changes
3730 ## the following sets does not result in any file changes
3731 # File with no modification
3731 # File with no modification
3732 (clean, actions[b'noop'], discard),
3732 (clean, actions[b'noop'], discard),
3733 # Existing file, not tracked anywhere
3733 # Existing file, not tracked anywhere
3734 (unknown, actions[b'unknown'], discard),
3734 (unknown, actions[b'unknown'], discard),
3735 )
3735 )
3736
3736
3737 for abs, exact in sorted(names.items()):
3737 for abs, exact in sorted(names.items()):
3738 # target file to be touch on disk (relative to cwd)
3738 # target file to be touch on disk (relative to cwd)
3739 target = repo.wjoin(abs)
3739 target = repo.wjoin(abs)
3740 # search the entry in the dispatch table.
3740 # search the entry in the dispatch table.
3741 # if the file is in any of these sets, it was touched in the working
3741 # if the file is in any of these sets, it was touched in the working
3742 # directory parent and we are sure it needs to be reverted.
3742 # directory parent and we are sure it needs to be reverted.
3743 for table, (xlist, msg), dobackup in disptable:
3743 for table, (xlist, msg), dobackup in disptable:
3744 if abs not in table:
3744 if abs not in table:
3745 continue
3745 continue
3746 if xlist is not None:
3746 if xlist is not None:
3747 xlist.append(abs)
3747 xlist.append(abs)
3748 if dobackup:
3748 if dobackup:
3749 # If in interactive mode, don't automatically create
3749 # If in interactive mode, don't automatically create
3750 # .orig files (issue4793)
3750 # .orig files (issue4793)
3751 if dobackup == backupinteractive:
3751 if dobackup == backupinteractive:
3752 tobackup.add(abs)
3752 tobackup.add(abs)
3753 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3753 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3754 absbakname = scmutil.backuppath(ui, repo, abs)
3754 absbakname = scmutil.backuppath(ui, repo, abs)
3755 bakname = os.path.relpath(
3755 bakname = os.path.relpath(
3756 absbakname, start=repo.root
3756 absbakname, start=repo.root
3757 )
3757 )
3758 ui.note(
3758 ui.note(
3759 _(b'saving current version of %s as %s\n')
3759 _(b'saving current version of %s as %s\n')
3760 % (uipathfn(abs), uipathfn(bakname))
3760 % (uipathfn(abs), uipathfn(bakname))
3761 )
3761 )
3762 if not opts.get(b'dry_run'):
3762 if not opts.get(b'dry_run'):
3763 if interactive:
3763 if interactive:
3764 util.copyfile(target, absbakname)
3764 util.copyfile(target, absbakname)
3765 else:
3765 else:
3766 util.rename(target, absbakname)
3766 util.rename(target, absbakname)
3767 if opts.get(b'dry_run'):
3767 if opts.get(b'dry_run'):
3768 if ui.verbose or not exact:
3768 if ui.verbose or not exact:
3769 ui.status(msg % uipathfn(abs))
3769 ui.status(msg % uipathfn(abs))
3770 elif exact:
3770 elif exact:
3771 ui.warn(msg % uipathfn(abs))
3771 ui.warn(msg % uipathfn(abs))
3772 break
3772 break
3773
3773
3774 if not opts.get(b'dry_run'):
3774 if not opts.get(b'dry_run'):
3775 needdata = (b'revert', b'add', b'undelete')
3775 needdata = (b'revert', b'add', b'undelete')
3776 oplist = [actions[name][0] for name in needdata]
3776 oplist = [actions[name][0] for name in needdata]
3777 prefetch = scmutil.prefetchfiles
3777 prefetch = scmutil.prefetchfiles
3778 matchfiles = scmutil.matchfiles(
3778 matchfiles = scmutil.matchfiles(
3779 repo, [f for sublist in oplist for f in sublist]
3779 repo, [f for sublist in oplist for f in sublist]
3780 )
3780 )
3781 prefetch(
3781 prefetch(
3782 repo, [(ctx.rev(), matchfiles)],
3782 repo, [(ctx.rev(), matchfiles)],
3783 )
3783 )
3784 match = scmutil.match(repo[None], pats)
3784 match = scmutil.match(repo[None], pats)
3785 _performrevert(
3785 _performrevert(
3786 repo,
3786 repo,
3787 ctx,
3787 ctx,
3788 names,
3788 names,
3789 uipathfn,
3789 uipathfn,
3790 actions,
3790 actions,
3791 match,
3791 match,
3792 interactive,
3792 interactive,
3793 tobackup,
3793 tobackup,
3794 )
3794 )
3795
3795
3796 if targetsubs:
3796 if targetsubs:
3797 # Revert the subrepos on the revert list
3797 # Revert the subrepos on the revert list
3798 for sub in targetsubs:
3798 for sub in targetsubs:
3799 try:
3799 try:
3800 wctx.sub(sub).revert(
3800 wctx.sub(sub).revert(
3801 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3801 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3802 )
3802 )
3803 except KeyError:
3803 except KeyError:
3804 raise error.Abort(
3804 raise error.Abort(
3805 b"subrepository '%s' does not exist in %s!"
3805 b"subrepository '%s' does not exist in %s!"
3806 % (sub, short(ctx.node()))
3806 % (sub, short(ctx.node()))
3807 )
3807 )
3808
3808
3809
3809
3810 def _performrevert(
3810 def _performrevert(
3811 repo,
3811 repo,
3812 ctx,
3812 ctx,
3813 names,
3813 names,
3814 uipathfn,
3814 uipathfn,
3815 actions,
3815 actions,
3816 match,
3816 match,
3817 interactive=False,
3817 interactive=False,
3818 tobackup=None,
3818 tobackup=None,
3819 ):
3819 ):
3820 """function that actually perform all the actions computed for revert
3820 """function that actually perform all the actions computed for revert
3821
3821
3822 This is an independent function to let extension to plug in and react to
3822 This is an independent function to let extension to plug in and react to
3823 the imminent revert.
3823 the imminent revert.
3824
3824
3825 Make sure you have the working directory locked when calling this function.
3825 Make sure you have the working directory locked when calling this function.
3826 """
3826 """
3827 parent, p2 = repo.dirstate.parents()
3827 parent, p2 = repo.dirstate.parents()
3828 node = ctx.node()
3828 node = ctx.node()
3829 excluded_files = []
3829 excluded_files = []
3830
3830
3831 def checkout(f):
3831 def checkout(f):
3832 fc = ctx[f]
3832 fc = ctx[f]
3833 repo.wwrite(f, fc.data(), fc.flags())
3833 repo.wwrite(f, fc.data(), fc.flags())
3834
3834
3835 def doremove(f):
3835 def doremove(f):
3836 try:
3836 try:
3837 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3837 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3838 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3838 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3839 except OSError:
3839 except OSError:
3840 pass
3840 pass
3841 repo.dirstate.remove(f)
3841 repo.dirstate.remove(f)
3842
3842
3843 def prntstatusmsg(action, f):
3843 def prntstatusmsg(action, f):
3844 exact = names[f]
3844 exact = names[f]
3845 if repo.ui.verbose or not exact:
3845 if repo.ui.verbose or not exact:
3846 repo.ui.status(actions[action][1] % uipathfn(f))
3846 repo.ui.status(actions[action][1] % uipathfn(f))
3847
3847
3848 audit_path = pathutil.pathauditor(repo.root, cached=True)
3848 audit_path = pathutil.pathauditor(repo.root, cached=True)
3849 for f in actions[b'forget'][0]:
3849 for f in actions[b'forget'][0]:
3850 if interactive:
3850 if interactive:
3851 choice = repo.ui.promptchoice(
3851 choice = repo.ui.promptchoice(
3852 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3852 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3853 )
3853 )
3854 if choice == 0:
3854 if choice == 0:
3855 prntstatusmsg(b'forget', f)
3855 prntstatusmsg(b'forget', f)
3856 repo.dirstate.drop(f)
3856 repo.dirstate.drop(f)
3857 else:
3857 else:
3858 excluded_files.append(f)
3858 excluded_files.append(f)
3859 else:
3859 else:
3860 prntstatusmsg(b'forget', f)
3860 prntstatusmsg(b'forget', f)
3861 repo.dirstate.drop(f)
3861 repo.dirstate.drop(f)
3862 for f in actions[b'remove'][0]:
3862 for f in actions[b'remove'][0]:
3863 audit_path(f)
3863 audit_path(f)
3864 if interactive:
3864 if interactive:
3865 choice = repo.ui.promptchoice(
3865 choice = repo.ui.promptchoice(
3866 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3866 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3867 )
3867 )
3868 if choice == 0:
3868 if choice == 0:
3869 prntstatusmsg(b'remove', f)
3869 prntstatusmsg(b'remove', f)
3870 doremove(f)
3870 doremove(f)
3871 else:
3871 else:
3872 excluded_files.append(f)
3872 excluded_files.append(f)
3873 else:
3873 else:
3874 prntstatusmsg(b'remove', f)
3874 prntstatusmsg(b'remove', f)
3875 doremove(f)
3875 doremove(f)
3876 for f in actions[b'drop'][0]:
3876 for f in actions[b'drop'][0]:
3877 audit_path(f)
3877 audit_path(f)
3878 prntstatusmsg(b'drop', f)
3878 prntstatusmsg(b'drop', f)
3879 repo.dirstate.remove(f)
3879 repo.dirstate.remove(f)
3880
3880
3881 normal = None
3881 normal = None
3882 if node == parent:
3882 if node == parent:
3883 # We're reverting to our parent. If possible, we'd like status
3883 # We're reverting to our parent. If possible, we'd like status
3884 # to report the file as clean. We have to use normallookup for
3884 # to report the file as clean. We have to use normallookup for
3885 # merges to avoid losing information about merged/dirty files.
3885 # merges to avoid losing information about merged/dirty files.
3886 if p2 != nullid:
3886 if p2 != nullid:
3887 normal = repo.dirstate.normallookup
3887 normal = repo.dirstate.normallookup
3888 else:
3888 else:
3889 normal = repo.dirstate.normal
3889 normal = repo.dirstate.normal
3890
3890
3891 newlyaddedandmodifiedfiles = set()
3891 newlyaddedandmodifiedfiles = set()
3892 if interactive:
3892 if interactive:
3893 # Prompt the user for changes to revert
3893 # Prompt the user for changes to revert
3894 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3894 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3895 m = scmutil.matchfiles(repo, torevert)
3895 m = scmutil.matchfiles(repo, torevert)
3896 diffopts = patch.difffeatureopts(
3896 diffopts = patch.difffeatureopts(
3897 repo.ui,
3897 repo.ui,
3898 whitespace=True,
3898 whitespace=True,
3899 section=b'commands',
3899 section=b'commands',
3900 configprefix=b'revert.interactive.',
3900 configprefix=b'revert.interactive.',
3901 )
3901 )
3902 diffopts.nodates = True
3902 diffopts.nodates = True
3903 diffopts.git = True
3903 diffopts.git = True
3904 operation = b'apply'
3904 operation = b'apply'
3905 if node == parent:
3905 if node == parent:
3906 if repo.ui.configbool(
3906 if repo.ui.configbool(
3907 b'experimental', b'revert.interactive.select-to-keep'
3907 b'experimental', b'revert.interactive.select-to-keep'
3908 ):
3908 ):
3909 operation = b'keep'
3909 operation = b'keep'
3910 else:
3910 else:
3911 operation = b'discard'
3911 operation = b'discard'
3912
3912
3913 if operation == b'apply':
3913 if operation == b'apply':
3914 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3914 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3915 else:
3915 else:
3916 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3916 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3917 originalchunks = patch.parsepatch(diff)
3917 originalchunks = patch.parsepatch(diff)
3918
3918
3919 try:
3919 try:
3920
3920
3921 chunks, opts = recordfilter(
3921 chunks, opts = recordfilter(
3922 repo.ui, originalchunks, match, operation=operation
3922 repo.ui, originalchunks, match, operation=operation
3923 )
3923 )
3924 if operation == b'discard':
3924 if operation == b'discard':
3925 chunks = patch.reversehunks(chunks)
3925 chunks = patch.reversehunks(chunks)
3926
3926
3927 except error.PatchError as err:
3927 except error.PatchError as err:
3928 raise error.Abort(_(b'error parsing patch: %s') % err)
3928 raise error.Abort(_(b'error parsing patch: %s') % err)
3929
3929
3930 # FIXME: when doing an interactive revert of a copy, there's no way of
3930 # FIXME: when doing an interactive revert of a copy, there's no way of
3931 # performing a partial revert of the added file, the only option is
3931 # performing a partial revert of the added file, the only option is
3932 # "remove added file <name> (Yn)?", so we don't need to worry about the
3932 # "remove added file <name> (Yn)?", so we don't need to worry about the
3933 # alsorestore value. Ideally we'd be able to partially revert
3933 # alsorestore value. Ideally we'd be able to partially revert
3934 # copied/renamed files.
3934 # copied/renamed files.
3935 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
3935 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
3936 chunks, originalchunks
3936 chunks, originalchunks
3937 )
3937 )
3938 if tobackup is None:
3938 if tobackup is None:
3939 tobackup = set()
3939 tobackup = set()
3940 # Apply changes
3940 # Apply changes
3941 fp = stringio()
3941 fp = stringio()
3942 # chunks are serialized per file, but files aren't sorted
3942 # chunks are serialized per file, but files aren't sorted
3943 for f in sorted({c.header.filename() for c in chunks if ishunk(c)}):
3943 for f in sorted({c.header.filename() for c in chunks if ishunk(c)}):
3944 prntstatusmsg(b'revert', f)
3944 prntstatusmsg(b'revert', f)
3945 files = set()
3945 files = set()
3946 for c in chunks:
3946 for c in chunks:
3947 if ishunk(c):
3947 if ishunk(c):
3948 abs = c.header.filename()
3948 abs = c.header.filename()
3949 # Create a backup file only if this hunk should be backed up
3949 # Create a backup file only if this hunk should be backed up
3950 if c.header.filename() in tobackup:
3950 if c.header.filename() in tobackup:
3951 target = repo.wjoin(abs)
3951 target = repo.wjoin(abs)
3952 bakname = scmutil.backuppath(repo.ui, repo, abs)
3952 bakname = scmutil.backuppath(repo.ui, repo, abs)
3953 util.copyfile(target, bakname)
3953 util.copyfile(target, bakname)
3954 tobackup.remove(abs)
3954 tobackup.remove(abs)
3955 if abs not in files:
3955 if abs not in files:
3956 files.add(abs)
3956 files.add(abs)
3957 if operation == b'keep':
3957 if operation == b'keep':
3958 checkout(abs)
3958 checkout(abs)
3959 c.write(fp)
3959 c.write(fp)
3960 dopatch = fp.tell()
3960 dopatch = fp.tell()
3961 fp.seek(0)
3961 fp.seek(0)
3962 if dopatch:
3962 if dopatch:
3963 try:
3963 try:
3964 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3964 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3965 except error.PatchError as err:
3965 except error.PatchError as err:
3966 raise error.Abort(pycompat.bytestr(err))
3966 raise error.Abort(pycompat.bytestr(err))
3967 del fp
3967 del fp
3968 else:
3968 else:
3969 for f in actions[b'revert'][0]:
3969 for f in actions[b'revert'][0]:
3970 prntstatusmsg(b'revert', f)
3970 prntstatusmsg(b'revert', f)
3971 checkout(f)
3971 checkout(f)
3972 if normal:
3972 if normal:
3973 normal(f)
3973 normal(f)
3974
3974
3975 for f in actions[b'add'][0]:
3975 for f in actions[b'add'][0]:
3976 # Don't checkout modified files, they are already created by the diff
3976 # Don't checkout modified files, they are already created by the diff
3977 if f not in newlyaddedandmodifiedfiles:
3977 if f not in newlyaddedandmodifiedfiles:
3978 prntstatusmsg(b'add', f)
3978 prntstatusmsg(b'add', f)
3979 checkout(f)
3979 checkout(f)
3980 repo.dirstate.add(f)
3980 repo.dirstate.add(f)
3981
3981
3982 normal = repo.dirstate.normallookup
3982 normal = repo.dirstate.normallookup
3983 if node == parent and p2 == nullid:
3983 if node == parent and p2 == nullid:
3984 normal = repo.dirstate.normal
3984 normal = repo.dirstate.normal
3985 for f in actions[b'undelete'][0]:
3985 for f in actions[b'undelete'][0]:
3986 if interactive:
3986 if interactive:
3987 choice = repo.ui.promptchoice(
3987 choice = repo.ui.promptchoice(
3988 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3988 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3989 )
3989 )
3990 if choice == 0:
3990 if choice == 0:
3991 prntstatusmsg(b'undelete', f)
3991 prntstatusmsg(b'undelete', f)
3992 checkout(f)
3992 checkout(f)
3993 normal(f)
3993 normal(f)
3994 else:
3994 else:
3995 excluded_files.append(f)
3995 excluded_files.append(f)
3996 else:
3996 else:
3997 prntstatusmsg(b'undelete', f)
3997 prntstatusmsg(b'undelete', f)
3998 checkout(f)
3998 checkout(f)
3999 normal(f)
3999 normal(f)
4000
4000
4001 copied = copies.pathcopies(repo[parent], ctx)
4001 copied = copies.pathcopies(repo[parent], ctx)
4002
4002
4003 for f in (
4003 for f in (
4004 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
4004 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
4005 ):
4005 ):
4006 if f in copied:
4006 if f in copied:
4007 repo.dirstate.copy(copied[f], f)
4007 repo.dirstate.copy(copied[f], f)
4008
4008
4009
4009
4010 # a list of (ui, repo, otherpeer, opts, missing) functions called by
4010 # a list of (ui, repo, otherpeer, opts, missing) functions called by
4011 # commands.outgoing. "missing" is "missing" of the result of
4011 # commands.outgoing. "missing" is "missing" of the result of
4012 # "findcommonoutgoing()"
4012 # "findcommonoutgoing()"
4013 outgoinghooks = util.hooks()
4013 outgoinghooks = util.hooks()
4014
4014
4015 # a list of (ui, repo) functions called by commands.summary
4015 # a list of (ui, repo) functions called by commands.summary
4016 summaryhooks = util.hooks()
4016 summaryhooks = util.hooks()
4017
4017
4018 # a list of (ui, repo, opts, changes) functions called by commands.summary.
4018 # a list of (ui, repo, opts, changes) functions called by commands.summary.
4019 #
4019 #
4020 # functions should return tuple of booleans below, if 'changes' is None:
4020 # functions should return tuple of booleans below, if 'changes' is None:
4021 # (whether-incomings-are-needed, whether-outgoings-are-needed)
4021 # (whether-incomings-are-needed, whether-outgoings-are-needed)
4022 #
4022 #
4023 # otherwise, 'changes' is a tuple of tuples below:
4023 # otherwise, 'changes' is a tuple of tuples below:
4024 # - (sourceurl, sourcebranch, sourcepeer, incoming)
4024 # - (sourceurl, sourcebranch, sourcepeer, incoming)
4025 # - (desturl, destbranch, destpeer, outgoing)
4025 # - (desturl, destbranch, destpeer, outgoing)
4026 summaryremotehooks = util.hooks()
4026 summaryremotehooks = util.hooks()
4027
4027
4028
4028
4029 def checkunfinished(repo, commit=False, skipmerge=False):
4029 def checkunfinished(repo, commit=False, skipmerge=False):
4030 '''Look for an unfinished multistep operation, like graft, and abort
4030 '''Look for an unfinished multistep operation, like graft, and abort
4031 if found. It's probably good to check this right before
4031 if found. It's probably good to check this right before
4032 bailifchanged().
4032 bailifchanged().
4033 '''
4033 '''
4034 # Check for non-clearable states first, so things like rebase will take
4034 # Check for non-clearable states first, so things like rebase will take
4035 # precedence over update.
4035 # precedence over update.
4036 for state in statemod._unfinishedstates:
4036 for state in statemod._unfinishedstates:
4037 if (
4037 if (
4038 state._clearable
4038 state._clearable
4039 or (commit and state._allowcommit)
4039 or (commit and state._allowcommit)
4040 or state._reportonly
4040 or state._reportonly
4041 ):
4041 ):
4042 continue
4042 continue
4043 if state.isunfinished(repo):
4043 if state.isunfinished(repo):
4044 raise error.Abort(state.msg(), hint=state.hint())
4044 raise error.Abort(state.msg(), hint=state.hint())
4045
4045
4046 for s in statemod._unfinishedstates:
4046 for s in statemod._unfinishedstates:
4047 if (
4047 if (
4048 not s._clearable
4048 not s._clearable
4049 or (commit and s._allowcommit)
4049 or (commit and s._allowcommit)
4050 or (s._opname == b'merge' and skipmerge)
4050 or (s._opname == b'merge' and skipmerge)
4051 or s._reportonly
4051 or s._reportonly
4052 ):
4052 ):
4053 continue
4053 continue
4054 if s.isunfinished(repo):
4054 if s.isunfinished(repo):
4055 raise error.Abort(s.msg(), hint=s.hint())
4055 raise error.Abort(s.msg(), hint=s.hint())
4056
4056
4057
4057
4058 def clearunfinished(repo):
4058 def clearunfinished(repo):
4059 '''Check for unfinished operations (as above), and clear the ones
4059 '''Check for unfinished operations (as above), and clear the ones
4060 that are clearable.
4060 that are clearable.
4061 '''
4061 '''
4062 for state in statemod._unfinishedstates:
4062 for state in statemod._unfinishedstates:
4063 if state._reportonly:
4063 if state._reportonly:
4064 continue
4064 continue
4065 if not state._clearable and state.isunfinished(repo):
4065 if not state._clearable and state.isunfinished(repo):
4066 raise error.Abort(state.msg(), hint=state.hint())
4066 raise error.Abort(state.msg(), hint=state.hint())
4067
4067
4068 for s in statemod._unfinishedstates:
4068 for s in statemod._unfinishedstates:
4069 if s._opname == b'merge' or state._reportonly:
4069 if s._opname == b'merge' or state._reportonly:
4070 continue
4070 continue
4071 if s._clearable and s.isunfinished(repo):
4071 if s._clearable and s.isunfinished(repo):
4072 util.unlink(repo.vfs.join(s._fname))
4072 util.unlink(repo.vfs.join(s._fname))
4073
4073
4074
4074
4075 def getunfinishedstate(repo):
4075 def getunfinishedstate(repo):
4076 ''' Checks for unfinished operations and returns statecheck object
4076 ''' Checks for unfinished operations and returns statecheck object
4077 for it'''
4077 for it'''
4078 for state in statemod._unfinishedstates:
4078 for state in statemod._unfinishedstates:
4079 if state.isunfinished(repo):
4079 if state.isunfinished(repo):
4080 return state
4080 return state
4081 return None
4081 return None
4082
4082
4083
4083
4084 def howtocontinue(repo):
4084 def howtocontinue(repo):
4085 '''Check for an unfinished operation and return the command to finish
4085 '''Check for an unfinished operation and return the command to finish
4086 it.
4086 it.
4087
4087
4088 statemod._unfinishedstates list is checked for an unfinished operation
4088 statemod._unfinishedstates list is checked for an unfinished operation
4089 and the corresponding message to finish it is generated if a method to
4089 and the corresponding message to finish it is generated if a method to
4090 continue is supported by the operation.
4090 continue is supported by the operation.
4091
4091
4092 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
4092 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
4093 a boolean.
4093 a boolean.
4094 '''
4094 '''
4095 contmsg = _(b"continue: %s")
4095 contmsg = _(b"continue: %s")
4096 for state in statemod._unfinishedstates:
4096 for state in statemod._unfinishedstates:
4097 if not state._continueflag:
4097 if not state._continueflag:
4098 continue
4098 continue
4099 if state.isunfinished(repo):
4099 if state.isunfinished(repo):
4100 return contmsg % state.continuemsg(), True
4100 return contmsg % state.continuemsg(), True
4101 if repo[None].dirty(missing=True, merge=False, branch=False):
4101 if repo[None].dirty(missing=True, merge=False, branch=False):
4102 return contmsg % _(b"hg commit"), False
4102 return contmsg % _(b"hg commit"), False
4103 return None, None
4103 return None, None
4104
4104
4105
4105
4106 def checkafterresolved(repo):
4106 def checkafterresolved(repo):
4107 '''Inform the user about the next action after completing hg resolve
4107 '''Inform the user about the next action after completing hg resolve
4108
4108
4109 If there's a an unfinished operation that supports continue flag,
4109 If there's a an unfinished operation that supports continue flag,
4110 howtocontinue will yield repo.ui.warn as the reporter.
4110 howtocontinue will yield repo.ui.warn as the reporter.
4111
4111
4112 Otherwise, it will yield repo.ui.note.
4112 Otherwise, it will yield repo.ui.note.
4113 '''
4113 '''
4114 msg, warning = howtocontinue(repo)
4114 msg, warning = howtocontinue(repo)
4115 if msg is not None:
4115 if msg is not None:
4116 if warning:
4116 if warning:
4117 repo.ui.warn(b"%s\n" % msg)
4117 repo.ui.warn(b"%s\n" % msg)
4118 else:
4118 else:
4119 repo.ui.note(b"%s\n" % msg)
4119 repo.ui.note(b"%s\n" % msg)
4120
4120
4121
4121
4122 def wrongtooltocontinue(repo, task):
4122 def wrongtooltocontinue(repo, task):
4123 '''Raise an abort suggesting how to properly continue if there is an
4123 '''Raise an abort suggesting how to properly continue if there is an
4124 active task.
4124 active task.
4125
4125
4126 Uses howtocontinue() to find the active task.
4126 Uses howtocontinue() to find the active task.
4127
4127
4128 If there's no task (repo.ui.note for 'hg commit'), it does not offer
4128 If there's no task (repo.ui.note for 'hg commit'), it does not offer
4129 a hint.
4129 a hint.
4130 '''
4130 '''
4131 after = howtocontinue(repo)
4131 after = howtocontinue(repo)
4132 hint = None
4132 hint = None
4133 if after[1]:
4133 if after[1]:
4134 hint = after[0]
4134 hint = after[0]
4135 raise error.Abort(_(b'no %s in progress') % task, hint=hint)
4135 raise error.Abort(_(b'no %s in progress') % task, hint=hint)
4136
4136
4137
4137
4138 def abortgraft(ui, repo, graftstate):
4138 def abortgraft(ui, repo, graftstate):
4139 """abort the interrupted graft and rollbacks to the state before interrupted
4139 """abort the interrupted graft and rollbacks to the state before interrupted
4140 graft"""
4140 graft"""
4141 if not graftstate.exists():
4141 if not graftstate.exists():
4142 raise error.Abort(_(b"no interrupted graft to abort"))
4142 raise error.Abort(_(b"no interrupted graft to abort"))
4143 statedata = readgraftstate(repo, graftstate)
4143 statedata = readgraftstate(repo, graftstate)
4144 newnodes = statedata.get(b'newnodes')
4144 newnodes = statedata.get(b'newnodes')
4145 if newnodes is None:
4145 if newnodes is None:
4146 # and old graft state which does not have all the data required to abort
4146 # and old graft state which does not have all the data required to abort
4147 # the graft
4147 # the graft
4148 raise error.Abort(_(b"cannot abort using an old graftstate"))
4148 raise error.Abort(_(b"cannot abort using an old graftstate"))
4149
4149
4150 # changeset from which graft operation was started
4150 # changeset from which graft operation was started
4151 if len(newnodes) > 0:
4151 if len(newnodes) > 0:
4152 startctx = repo[newnodes[0]].p1()
4152 startctx = repo[newnodes[0]].p1()
4153 else:
4153 else:
4154 startctx = repo[b'.']
4154 startctx = repo[b'.']
4155 # whether to strip or not
4155 # whether to strip or not
4156 cleanup = False
4156 cleanup = False
4157 from . import hg
4158
4157
4159 if newnodes:
4158 if newnodes:
4160 newnodes = [repo[r].rev() for r in newnodes]
4159 newnodes = [repo[r].rev() for r in newnodes]
4161 cleanup = True
4160 cleanup = True
4162 # checking that none of the newnodes turned public or is public
4161 # checking that none of the newnodes turned public or is public
4163 immutable = [c for c in newnodes if not repo[c].mutable()]
4162 immutable = [c for c in newnodes if not repo[c].mutable()]
4164 if immutable:
4163 if immutable:
4165 repo.ui.warn(
4164 repo.ui.warn(
4166 _(b"cannot clean up public changesets %s\n")
4165 _(b"cannot clean up public changesets %s\n")
4167 % b', '.join(bytes(repo[r]) for r in immutable),
4166 % b', '.join(bytes(repo[r]) for r in immutable),
4168 hint=_(b"see 'hg help phases' for details"),
4167 hint=_(b"see 'hg help phases' for details"),
4169 )
4168 )
4170 cleanup = False
4169 cleanup = False
4171
4170
4172 # checking that no new nodes are created on top of grafted revs
4171 # checking that no new nodes are created on top of grafted revs
4173 desc = set(repo.changelog.descendants(newnodes))
4172 desc = set(repo.changelog.descendants(newnodes))
4174 if desc - set(newnodes):
4173 if desc - set(newnodes):
4175 repo.ui.warn(
4174 repo.ui.warn(
4176 _(
4175 _(
4177 b"new changesets detected on destination "
4176 b"new changesets detected on destination "
4178 b"branch, can't strip\n"
4177 b"branch, can't strip\n"
4179 )
4178 )
4180 )
4179 )
4181 cleanup = False
4180 cleanup = False
4182
4181
4183 if cleanup:
4182 if cleanup:
4184 with repo.wlock(), repo.lock():
4183 with repo.wlock(), repo.lock():
4185 hg.updaterepo(repo, startctx.node(), overwrite=True)
4184 mergemod.clean_update(startctx)
4186 # stripping the new nodes created
4185 # stripping the new nodes created
4187 strippoints = [
4186 strippoints = [
4188 c.node() for c in repo.set(b"roots(%ld)", newnodes)
4187 c.node() for c in repo.set(b"roots(%ld)", newnodes)
4189 ]
4188 ]
4190 repair.strip(repo.ui, repo, strippoints, backup=False)
4189 repair.strip(repo.ui, repo, strippoints, backup=False)
4191
4190
4192 if not cleanup:
4191 if not cleanup:
4193 # we don't update to the startnode if we can't strip
4192 # we don't update to the startnode if we can't strip
4194 startctx = repo[b'.']
4193 startctx = repo[b'.']
4195 hg.updaterepo(repo, startctx.node(), overwrite=True)
4194 mergemod.clean_update(startctx)
4196
4195
4197 ui.status(_(b"graft aborted\n"))
4196 ui.status(_(b"graft aborted\n"))
4198 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
4197 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
4199 graftstate.delete()
4198 graftstate.delete()
4200 return 0
4199 return 0
4201
4200
4202
4201
4203 def readgraftstate(repo, graftstate):
4202 def readgraftstate(repo, graftstate):
4204 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
4203 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
4205 """read the graft state file and return a dict of the data stored in it"""
4204 """read the graft state file and return a dict of the data stored in it"""
4206 try:
4205 try:
4207 return graftstate.read()
4206 return graftstate.read()
4208 except error.CorruptedState:
4207 except error.CorruptedState:
4209 nodes = repo.vfs.read(b'graftstate').splitlines()
4208 nodes = repo.vfs.read(b'graftstate').splitlines()
4210 return {b'nodes': nodes}
4209 return {b'nodes': nodes}
4211
4210
4212
4211
4213 def hgabortgraft(ui, repo):
4212 def hgabortgraft(ui, repo):
4214 """ abort logic for aborting graft using 'hg abort'"""
4213 """ abort logic for aborting graft using 'hg abort'"""
4215 with repo.wlock():
4214 with repo.wlock():
4216 graftstate = statemod.cmdstate(repo, b'graftstate')
4215 graftstate = statemod.cmdstate(repo, b'graftstate')
4217 return abortgraft(ui, repo, graftstate)
4216 return abortgraft(ui, repo, graftstate)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now