##// END OF EJS Templates
cmdutil: change check_incompatible_arguments() *arg to single iterable...
Martin von Zweigbergk -
r44655:d4c15012 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,2266 +1,2266 b''
1 # rebase.py - rebasing feature for mercurial
1 # rebase.py - rebasing feature for mercurial
2 #
2 #
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to move sets of revisions to a different ancestor
8 '''command to move sets of revisions to a different ancestor
9
9
10 This extension lets you rebase changesets in an existing Mercurial
10 This extension lets you rebase changesets in an existing Mercurial
11 repository.
11 repository.
12
12
13 For more information:
13 For more information:
14 https://mercurial-scm.org/wiki/RebaseExtension
14 https://mercurial-scm.org/wiki/RebaseExtension
15 '''
15 '''
16
16
17 from __future__ import absolute_import
17 from __future__ import absolute_import
18
18
19 import errno
19 import errno
20 import os
20 import os
21
21
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23 from mercurial.node import (
23 from mercurial.node import (
24 nullrev,
24 nullrev,
25 short,
25 short,
26 )
26 )
27 from mercurial.pycompat import open
27 from mercurial.pycompat import open
28 from mercurial import (
28 from mercurial import (
29 bookmarks,
29 bookmarks,
30 cmdutil,
30 cmdutil,
31 commands,
31 commands,
32 copies,
32 copies,
33 destutil,
33 destutil,
34 dirstateguard,
34 dirstateguard,
35 error,
35 error,
36 extensions,
36 extensions,
37 hg,
37 hg,
38 merge as mergemod,
38 merge as mergemod,
39 mergeutil,
39 mergeutil,
40 obsolete,
40 obsolete,
41 obsutil,
41 obsutil,
42 patch,
42 patch,
43 phases,
43 phases,
44 pycompat,
44 pycompat,
45 registrar,
45 registrar,
46 repair,
46 repair,
47 revset,
47 revset,
48 revsetlang,
48 revsetlang,
49 rewriteutil,
49 rewriteutil,
50 scmutil,
50 scmutil,
51 smartset,
51 smartset,
52 state as statemod,
52 state as statemod,
53 util,
53 util,
54 )
54 )
55
55
56 # The following constants are used throughout the rebase module. The ordering of
56 # The following constants are used throughout the rebase module. The ordering of
57 # their values must be maintained.
57 # their values must be maintained.
58
58
59 # Indicates that a revision needs to be rebased
59 # Indicates that a revision needs to be rebased
60 revtodo = -1
60 revtodo = -1
61 revtodostr = b'-1'
61 revtodostr = b'-1'
62
62
63 # legacy revstates no longer needed in current code
63 # legacy revstates no longer needed in current code
64 # -2: nullmerge, -3: revignored, -4: revprecursor, -5: revpruned
64 # -2: nullmerge, -3: revignored, -4: revprecursor, -5: revpruned
65 legacystates = {b'-2', b'-3', b'-4', b'-5'}
65 legacystates = {b'-2', b'-3', b'-4', b'-5'}
66
66
67 cmdtable = {}
67 cmdtable = {}
68 command = registrar.command(cmdtable)
68 command = registrar.command(cmdtable)
69 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
69 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
70 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
70 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
71 # be specifying the version(s) of Mercurial they are tested with, or
71 # be specifying the version(s) of Mercurial they are tested with, or
72 # leave the attribute unspecified.
72 # leave the attribute unspecified.
73 testedwith = b'ships-with-hg-core'
73 testedwith = b'ships-with-hg-core'
74
74
75
75
76 def _nothingtorebase():
76 def _nothingtorebase():
77 return 1
77 return 1
78
78
79
79
80 def _savegraft(ctx, extra):
80 def _savegraft(ctx, extra):
81 s = ctx.extra().get(b'source', None)
81 s = ctx.extra().get(b'source', None)
82 if s is not None:
82 if s is not None:
83 extra[b'source'] = s
83 extra[b'source'] = s
84 s = ctx.extra().get(b'intermediate-source', None)
84 s = ctx.extra().get(b'intermediate-source', None)
85 if s is not None:
85 if s is not None:
86 extra[b'intermediate-source'] = s
86 extra[b'intermediate-source'] = s
87
87
88
88
89 def _savebranch(ctx, extra):
89 def _savebranch(ctx, extra):
90 extra[b'branch'] = ctx.branch()
90 extra[b'branch'] = ctx.branch()
91
91
92
92
93 def _destrebase(repo, sourceset, destspace=None):
93 def _destrebase(repo, sourceset, destspace=None):
94 """small wrapper around destmerge to pass the right extra args
94 """small wrapper around destmerge to pass the right extra args
95
95
96 Please wrap destutil.destmerge instead."""
96 Please wrap destutil.destmerge instead."""
97 return destutil.destmerge(
97 return destutil.destmerge(
98 repo,
98 repo,
99 action=b'rebase',
99 action=b'rebase',
100 sourceset=sourceset,
100 sourceset=sourceset,
101 onheadcheck=False,
101 onheadcheck=False,
102 destspace=destspace,
102 destspace=destspace,
103 )
103 )
104
104
105
105
106 revsetpredicate = registrar.revsetpredicate()
106 revsetpredicate = registrar.revsetpredicate()
107
107
108
108
109 @revsetpredicate(b'_destrebase')
109 @revsetpredicate(b'_destrebase')
110 def _revsetdestrebase(repo, subset, x):
110 def _revsetdestrebase(repo, subset, x):
111 # ``_rebasedefaultdest()``
111 # ``_rebasedefaultdest()``
112
112
113 # default destination for rebase.
113 # default destination for rebase.
114 # # XXX: Currently private because I expect the signature to change.
114 # # XXX: Currently private because I expect the signature to change.
115 # # XXX: - bailing out in case of ambiguity vs returning all data.
115 # # XXX: - bailing out in case of ambiguity vs returning all data.
116 # i18n: "_rebasedefaultdest" is a keyword
116 # i18n: "_rebasedefaultdest" is a keyword
117 sourceset = None
117 sourceset = None
118 if x is not None:
118 if x is not None:
119 sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
119 sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
120 return subset & smartset.baseset([_destrebase(repo, sourceset)])
120 return subset & smartset.baseset([_destrebase(repo, sourceset)])
121
121
122
122
123 @revsetpredicate(b'_destautoorphanrebase')
123 @revsetpredicate(b'_destautoorphanrebase')
124 def _revsetdestautoorphanrebase(repo, subset, x):
124 def _revsetdestautoorphanrebase(repo, subset, x):
125 # ``_destautoorphanrebase()``
125 # ``_destautoorphanrebase()``
126
126
127 # automatic rebase destination for a single orphan revision.
127 # automatic rebase destination for a single orphan revision.
128 unfi = repo.unfiltered()
128 unfi = repo.unfiltered()
129 obsoleted = unfi.revs(b'obsolete()')
129 obsoleted = unfi.revs(b'obsolete()')
130
130
131 src = revset.getset(repo, subset, x).first()
131 src = revset.getset(repo, subset, x).first()
132
132
133 # Empty src or already obsoleted - Do not return a destination
133 # Empty src or already obsoleted - Do not return a destination
134 if not src or src in obsoleted:
134 if not src or src in obsoleted:
135 return smartset.baseset()
135 return smartset.baseset()
136 dests = destutil.orphanpossibledestination(repo, src)
136 dests = destutil.orphanpossibledestination(repo, src)
137 if len(dests) > 1:
137 if len(dests) > 1:
138 raise error.Abort(
138 raise error.Abort(
139 _(b"ambiguous automatic rebase: %r could end up on any of %r")
139 _(b"ambiguous automatic rebase: %r could end up on any of %r")
140 % (src, dests)
140 % (src, dests)
141 )
141 )
142 # We have zero or one destination, so we can just return here.
142 # We have zero or one destination, so we can just return here.
143 return smartset.baseset(dests)
143 return smartset.baseset(dests)
144
144
145
145
146 def _ctxdesc(ctx):
146 def _ctxdesc(ctx):
147 """short description for a context"""
147 """short description for a context"""
148 desc = b'%d:%s "%s"' % (
148 desc = b'%d:%s "%s"' % (
149 ctx.rev(),
149 ctx.rev(),
150 ctx,
150 ctx,
151 ctx.description().split(b'\n', 1)[0],
151 ctx.description().split(b'\n', 1)[0],
152 )
152 )
153 repo = ctx.repo()
153 repo = ctx.repo()
154 names = []
154 names = []
155 for nsname, ns in pycompat.iteritems(repo.names):
155 for nsname, ns in pycompat.iteritems(repo.names):
156 if nsname == b'branches':
156 if nsname == b'branches':
157 continue
157 continue
158 names.extend(ns.names(repo, ctx.node()))
158 names.extend(ns.names(repo, ctx.node()))
159 if names:
159 if names:
160 desc += b' (%s)' % b' '.join(names)
160 desc += b' (%s)' % b' '.join(names)
161 return desc
161 return desc
162
162
163
163
164 class rebaseruntime(object):
164 class rebaseruntime(object):
165 """This class is a container for rebase runtime state"""
165 """This class is a container for rebase runtime state"""
166
166
167 def __init__(self, repo, ui, inmemory=False, opts=None):
167 def __init__(self, repo, ui, inmemory=False, opts=None):
168 if opts is None:
168 if opts is None:
169 opts = {}
169 opts = {}
170
170
171 # prepared: whether we have rebasestate prepared or not. Currently it
171 # prepared: whether we have rebasestate prepared or not. Currently it
172 # decides whether "self.repo" is unfiltered or not.
172 # decides whether "self.repo" is unfiltered or not.
173 # The rebasestate has explicit hash to hash instructions not depending
173 # The rebasestate has explicit hash to hash instructions not depending
174 # on visibility. If rebasestate exists (in-memory or on-disk), use
174 # on visibility. If rebasestate exists (in-memory or on-disk), use
175 # unfiltered repo to avoid visibility issues.
175 # unfiltered repo to avoid visibility issues.
176 # Before knowing rebasestate (i.e. when starting a new rebase (not
176 # Before knowing rebasestate (i.e. when starting a new rebase (not
177 # --continue or --abort)), the original repo should be used so
177 # --continue or --abort)), the original repo should be used so
178 # visibility-dependent revsets are correct.
178 # visibility-dependent revsets are correct.
179 self.prepared = False
179 self.prepared = False
180 self._repo = repo
180 self._repo = repo
181
181
182 self.ui = ui
182 self.ui = ui
183 self.opts = opts
183 self.opts = opts
184 self.originalwd = None
184 self.originalwd = None
185 self.external = nullrev
185 self.external = nullrev
186 # Mapping between the old revision id and either what is the new rebased
186 # Mapping between the old revision id and either what is the new rebased
187 # revision or what needs to be done with the old revision. The state
187 # revision or what needs to be done with the old revision. The state
188 # dict will be what contains most of the rebase progress state.
188 # dict will be what contains most of the rebase progress state.
189 self.state = {}
189 self.state = {}
190 self.activebookmark = None
190 self.activebookmark = None
191 self.destmap = {}
191 self.destmap = {}
192 self.skipped = set()
192 self.skipped = set()
193
193
194 self.collapsef = opts.get(b'collapse', False)
194 self.collapsef = opts.get(b'collapse', False)
195 self.collapsemsg = cmdutil.logmessage(ui, opts)
195 self.collapsemsg = cmdutil.logmessage(ui, opts)
196 self.date = opts.get(b'date', None)
196 self.date = opts.get(b'date', None)
197
197
198 e = opts.get(b'extrafn') # internal, used by e.g. hgsubversion
198 e = opts.get(b'extrafn') # internal, used by e.g. hgsubversion
199 self.extrafns = [_savegraft]
199 self.extrafns = [_savegraft]
200 if e:
200 if e:
201 self.extrafns = [e]
201 self.extrafns = [e]
202
202
203 self.backupf = ui.configbool(b'rewrite', b'backup-bundle')
203 self.backupf = ui.configbool(b'rewrite', b'backup-bundle')
204 self.keepf = opts.get(b'keep', False)
204 self.keepf = opts.get(b'keep', False)
205 self.keepbranchesf = opts.get(b'keepbranches', False)
205 self.keepbranchesf = opts.get(b'keepbranches', False)
206 self.obsoletenotrebased = {}
206 self.obsoletenotrebased = {}
207 self.obsoletewithoutsuccessorindestination = set()
207 self.obsoletewithoutsuccessorindestination = set()
208 self.inmemory = inmemory
208 self.inmemory = inmemory
209 self.stateobj = statemod.cmdstate(repo, b'rebasestate')
209 self.stateobj = statemod.cmdstate(repo, b'rebasestate')
210
210
211 @property
211 @property
212 def repo(self):
212 def repo(self):
213 if self.prepared:
213 if self.prepared:
214 return self._repo.unfiltered()
214 return self._repo.unfiltered()
215 else:
215 else:
216 return self._repo
216 return self._repo
217
217
218 def storestatus(self, tr=None):
218 def storestatus(self, tr=None):
219 """Store the current status to allow recovery"""
219 """Store the current status to allow recovery"""
220 if tr:
220 if tr:
221 tr.addfilegenerator(
221 tr.addfilegenerator(
222 b'rebasestate',
222 b'rebasestate',
223 (b'rebasestate',),
223 (b'rebasestate',),
224 self._writestatus,
224 self._writestatus,
225 location=b'plain',
225 location=b'plain',
226 )
226 )
227 else:
227 else:
228 with self.repo.vfs(b"rebasestate", b"w") as f:
228 with self.repo.vfs(b"rebasestate", b"w") as f:
229 self._writestatus(f)
229 self._writestatus(f)
230
230
231 def _writestatus(self, f):
231 def _writestatus(self, f):
232 repo = self.repo
232 repo = self.repo
233 assert repo.filtername is None
233 assert repo.filtername is None
234 f.write(repo[self.originalwd].hex() + b'\n')
234 f.write(repo[self.originalwd].hex() + b'\n')
235 # was "dest". we now write dest per src root below.
235 # was "dest". we now write dest per src root below.
236 f.write(b'\n')
236 f.write(b'\n')
237 f.write(repo[self.external].hex() + b'\n')
237 f.write(repo[self.external].hex() + b'\n')
238 f.write(b'%d\n' % int(self.collapsef))
238 f.write(b'%d\n' % int(self.collapsef))
239 f.write(b'%d\n' % int(self.keepf))
239 f.write(b'%d\n' % int(self.keepf))
240 f.write(b'%d\n' % int(self.keepbranchesf))
240 f.write(b'%d\n' % int(self.keepbranchesf))
241 f.write(b'%s\n' % (self.activebookmark or b''))
241 f.write(b'%s\n' % (self.activebookmark or b''))
242 destmap = self.destmap
242 destmap = self.destmap
243 for d, v in pycompat.iteritems(self.state):
243 for d, v in pycompat.iteritems(self.state):
244 oldrev = repo[d].hex()
244 oldrev = repo[d].hex()
245 if v >= 0:
245 if v >= 0:
246 newrev = repo[v].hex()
246 newrev = repo[v].hex()
247 else:
247 else:
248 newrev = b"%d" % v
248 newrev = b"%d" % v
249 destnode = repo[destmap[d]].hex()
249 destnode = repo[destmap[d]].hex()
250 f.write(b"%s:%s:%s\n" % (oldrev, newrev, destnode))
250 f.write(b"%s:%s:%s\n" % (oldrev, newrev, destnode))
251 repo.ui.debug(b'rebase status stored\n')
251 repo.ui.debug(b'rebase status stored\n')
252
252
253 def restorestatus(self):
253 def restorestatus(self):
254 """Restore a previously stored status"""
254 """Restore a previously stored status"""
255 if not self.stateobj.exists():
255 if not self.stateobj.exists():
256 cmdutil.wrongtooltocontinue(self.repo, _(b'rebase'))
256 cmdutil.wrongtooltocontinue(self.repo, _(b'rebase'))
257
257
258 data = self._read()
258 data = self._read()
259 self.repo.ui.debug(b'rebase status resumed\n')
259 self.repo.ui.debug(b'rebase status resumed\n')
260
260
261 self.originalwd = data[b'originalwd']
261 self.originalwd = data[b'originalwd']
262 self.destmap = data[b'destmap']
262 self.destmap = data[b'destmap']
263 self.state = data[b'state']
263 self.state = data[b'state']
264 self.skipped = data[b'skipped']
264 self.skipped = data[b'skipped']
265 self.collapsef = data[b'collapse']
265 self.collapsef = data[b'collapse']
266 self.keepf = data[b'keep']
266 self.keepf = data[b'keep']
267 self.keepbranchesf = data[b'keepbranches']
267 self.keepbranchesf = data[b'keepbranches']
268 self.external = data[b'external']
268 self.external = data[b'external']
269 self.activebookmark = data[b'activebookmark']
269 self.activebookmark = data[b'activebookmark']
270
270
271 def _read(self):
271 def _read(self):
272 self.prepared = True
272 self.prepared = True
273 repo = self.repo
273 repo = self.repo
274 assert repo.filtername is None
274 assert repo.filtername is None
275 data = {
275 data = {
276 b'keepbranches': None,
276 b'keepbranches': None,
277 b'collapse': None,
277 b'collapse': None,
278 b'activebookmark': None,
278 b'activebookmark': None,
279 b'external': nullrev,
279 b'external': nullrev,
280 b'keep': None,
280 b'keep': None,
281 b'originalwd': None,
281 b'originalwd': None,
282 }
282 }
283 legacydest = None
283 legacydest = None
284 state = {}
284 state = {}
285 destmap = {}
285 destmap = {}
286
286
287 if True:
287 if True:
288 f = repo.vfs(b"rebasestate")
288 f = repo.vfs(b"rebasestate")
289 for i, l in enumerate(f.read().splitlines()):
289 for i, l in enumerate(f.read().splitlines()):
290 if i == 0:
290 if i == 0:
291 data[b'originalwd'] = repo[l].rev()
291 data[b'originalwd'] = repo[l].rev()
292 elif i == 1:
292 elif i == 1:
293 # this line should be empty in newer version. but legacy
293 # this line should be empty in newer version. but legacy
294 # clients may still use it
294 # clients may still use it
295 if l:
295 if l:
296 legacydest = repo[l].rev()
296 legacydest = repo[l].rev()
297 elif i == 2:
297 elif i == 2:
298 data[b'external'] = repo[l].rev()
298 data[b'external'] = repo[l].rev()
299 elif i == 3:
299 elif i == 3:
300 data[b'collapse'] = bool(int(l))
300 data[b'collapse'] = bool(int(l))
301 elif i == 4:
301 elif i == 4:
302 data[b'keep'] = bool(int(l))
302 data[b'keep'] = bool(int(l))
303 elif i == 5:
303 elif i == 5:
304 data[b'keepbranches'] = bool(int(l))
304 data[b'keepbranches'] = bool(int(l))
305 elif i == 6 and not (len(l) == 81 and b':' in l):
305 elif i == 6 and not (len(l) == 81 and b':' in l):
306 # line 6 is a recent addition, so for backwards
306 # line 6 is a recent addition, so for backwards
307 # compatibility check that the line doesn't look like the
307 # compatibility check that the line doesn't look like the
308 # oldrev:newrev lines
308 # oldrev:newrev lines
309 data[b'activebookmark'] = l
309 data[b'activebookmark'] = l
310 else:
310 else:
311 args = l.split(b':')
311 args = l.split(b':')
312 oldrev = repo[args[0]].rev()
312 oldrev = repo[args[0]].rev()
313 newrev = args[1]
313 newrev = args[1]
314 if newrev in legacystates:
314 if newrev in legacystates:
315 continue
315 continue
316 if len(args) > 2:
316 if len(args) > 2:
317 destrev = repo[args[2]].rev()
317 destrev = repo[args[2]].rev()
318 else:
318 else:
319 destrev = legacydest
319 destrev = legacydest
320 destmap[oldrev] = destrev
320 destmap[oldrev] = destrev
321 if newrev == revtodostr:
321 if newrev == revtodostr:
322 state[oldrev] = revtodo
322 state[oldrev] = revtodo
323 # Legacy compat special case
323 # Legacy compat special case
324 else:
324 else:
325 state[oldrev] = repo[newrev].rev()
325 state[oldrev] = repo[newrev].rev()
326
326
327 if data[b'keepbranches'] is None:
327 if data[b'keepbranches'] is None:
328 raise error.Abort(_(b'.hg/rebasestate is incomplete'))
328 raise error.Abort(_(b'.hg/rebasestate is incomplete'))
329
329
330 data[b'destmap'] = destmap
330 data[b'destmap'] = destmap
331 data[b'state'] = state
331 data[b'state'] = state
332 skipped = set()
332 skipped = set()
333 # recompute the set of skipped revs
333 # recompute the set of skipped revs
334 if not data[b'collapse']:
334 if not data[b'collapse']:
335 seen = set(destmap.values())
335 seen = set(destmap.values())
336 for old, new in sorted(state.items()):
336 for old, new in sorted(state.items()):
337 if new != revtodo and new in seen:
337 if new != revtodo and new in seen:
338 skipped.add(old)
338 skipped.add(old)
339 seen.add(new)
339 seen.add(new)
340 data[b'skipped'] = skipped
340 data[b'skipped'] = skipped
341 repo.ui.debug(
341 repo.ui.debug(
342 b'computed skipped revs: %s\n'
342 b'computed skipped revs: %s\n'
343 % (b' '.join(b'%d' % r for r in sorted(skipped)) or b'')
343 % (b' '.join(b'%d' % r for r in sorted(skipped)) or b'')
344 )
344 )
345
345
346 return data
346 return data
347
347
348 def _handleskippingobsolete(self, obsoleterevs, destmap):
348 def _handleskippingobsolete(self, obsoleterevs, destmap):
349 """Compute structures necessary for skipping obsolete revisions
349 """Compute structures necessary for skipping obsolete revisions
350
350
351 obsoleterevs: iterable of all obsolete revisions in rebaseset
351 obsoleterevs: iterable of all obsolete revisions in rebaseset
352 destmap: {srcrev: destrev} destination revisions
352 destmap: {srcrev: destrev} destination revisions
353 """
353 """
354 self.obsoletenotrebased = {}
354 self.obsoletenotrebased = {}
355 if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'):
355 if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'):
356 return
356 return
357 obsoleteset = set(obsoleterevs)
357 obsoleteset = set(obsoleterevs)
358 (
358 (
359 self.obsoletenotrebased,
359 self.obsoletenotrebased,
360 self.obsoletewithoutsuccessorindestination,
360 self.obsoletewithoutsuccessorindestination,
361 obsoleteextinctsuccessors,
361 obsoleteextinctsuccessors,
362 ) = _computeobsoletenotrebased(self.repo, obsoleteset, destmap)
362 ) = _computeobsoletenotrebased(self.repo, obsoleteset, destmap)
363 skippedset = set(self.obsoletenotrebased)
363 skippedset = set(self.obsoletenotrebased)
364 skippedset.update(self.obsoletewithoutsuccessorindestination)
364 skippedset.update(self.obsoletewithoutsuccessorindestination)
365 skippedset.update(obsoleteextinctsuccessors)
365 skippedset.update(obsoleteextinctsuccessors)
366 _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
366 _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
367
367
368 def _prepareabortorcontinue(self, isabort, backup=True, suppwarns=False):
368 def _prepareabortorcontinue(self, isabort, backup=True, suppwarns=False):
369 try:
369 try:
370 self.restorestatus()
370 self.restorestatus()
371 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
371 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
372 except error.RepoLookupError:
372 except error.RepoLookupError:
373 if isabort:
373 if isabort:
374 clearstatus(self.repo)
374 clearstatus(self.repo)
375 clearcollapsemsg(self.repo)
375 clearcollapsemsg(self.repo)
376 self.repo.ui.warn(
376 self.repo.ui.warn(
377 _(
377 _(
378 b'rebase aborted (no revision is removed,'
378 b'rebase aborted (no revision is removed,'
379 b' only broken state is cleared)\n'
379 b' only broken state is cleared)\n'
380 )
380 )
381 )
381 )
382 return 0
382 return 0
383 else:
383 else:
384 msg = _(b'cannot continue inconsistent rebase')
384 msg = _(b'cannot continue inconsistent rebase')
385 hint = _(b'use "hg rebase --abort" to clear broken state')
385 hint = _(b'use "hg rebase --abort" to clear broken state')
386 raise error.Abort(msg, hint=hint)
386 raise error.Abort(msg, hint=hint)
387
387
388 if isabort:
388 if isabort:
389 backup = backup and self.backupf
389 backup = backup and self.backupf
390 return self._abort(backup=backup, suppwarns=suppwarns)
390 return self._abort(backup=backup, suppwarns=suppwarns)
391
391
392 def _preparenewrebase(self, destmap):
392 def _preparenewrebase(self, destmap):
393 if not destmap:
393 if not destmap:
394 return _nothingtorebase()
394 return _nothingtorebase()
395
395
396 rebaseset = destmap.keys()
396 rebaseset = destmap.keys()
397 if not self.keepf:
397 if not self.keepf:
398 try:
398 try:
399 rewriteutil.precheck(self.repo, rebaseset, action=b'rebase')
399 rewriteutil.precheck(self.repo, rebaseset, action=b'rebase')
400 except error.Abort as e:
400 except error.Abort as e:
401 if e.hint is None:
401 if e.hint is None:
402 e.hint = _(b'use --keep to keep original changesets')
402 e.hint = _(b'use --keep to keep original changesets')
403 raise e
403 raise e
404
404
405 result = buildstate(self.repo, destmap, self.collapsef)
405 result = buildstate(self.repo, destmap, self.collapsef)
406
406
407 if not result:
407 if not result:
408 # Empty state built, nothing to rebase
408 # Empty state built, nothing to rebase
409 self.ui.status(_(b'nothing to rebase\n'))
409 self.ui.status(_(b'nothing to rebase\n'))
410 return _nothingtorebase()
410 return _nothingtorebase()
411
411
412 (self.originalwd, self.destmap, self.state) = result
412 (self.originalwd, self.destmap, self.state) = result
413 if self.collapsef:
413 if self.collapsef:
414 dests = set(self.destmap.values())
414 dests = set(self.destmap.values())
415 if len(dests) != 1:
415 if len(dests) != 1:
416 raise error.Abort(
416 raise error.Abort(
417 _(b'--collapse does not work with multiple destinations')
417 _(b'--collapse does not work with multiple destinations')
418 )
418 )
419 destrev = next(iter(dests))
419 destrev = next(iter(dests))
420 destancestors = self.repo.changelog.ancestors(
420 destancestors = self.repo.changelog.ancestors(
421 [destrev], inclusive=True
421 [destrev], inclusive=True
422 )
422 )
423 self.external = externalparent(self.repo, self.state, destancestors)
423 self.external = externalparent(self.repo, self.state, destancestors)
424
424
425 for destrev in sorted(set(destmap.values())):
425 for destrev in sorted(set(destmap.values())):
426 dest = self.repo[destrev]
426 dest = self.repo[destrev]
427 if dest.closesbranch() and not self.keepbranchesf:
427 if dest.closesbranch() and not self.keepbranchesf:
428 self.ui.status(_(b'reopening closed branch head %s\n') % dest)
428 self.ui.status(_(b'reopening closed branch head %s\n') % dest)
429
429
430 self.prepared = True
430 self.prepared = True
431
431
432 def _assignworkingcopy(self):
432 def _assignworkingcopy(self):
433 if self.inmemory:
433 if self.inmemory:
434 from mercurial.context import overlayworkingctx
434 from mercurial.context import overlayworkingctx
435
435
436 self.wctx = overlayworkingctx(self.repo)
436 self.wctx = overlayworkingctx(self.repo)
437 self.repo.ui.debug(b"rebasing in-memory\n")
437 self.repo.ui.debug(b"rebasing in-memory\n")
438 else:
438 else:
439 self.wctx = self.repo[None]
439 self.wctx = self.repo[None]
440 self.repo.ui.debug(b"rebasing on disk\n")
440 self.repo.ui.debug(b"rebasing on disk\n")
441 self.repo.ui.log(
441 self.repo.ui.log(
442 b"rebase",
442 b"rebase",
443 b"using in-memory rebase: %r\n",
443 b"using in-memory rebase: %r\n",
444 self.inmemory,
444 self.inmemory,
445 rebase_imm_used=self.inmemory,
445 rebase_imm_used=self.inmemory,
446 )
446 )
447
447
448 def _performrebase(self, tr):
448 def _performrebase(self, tr):
449 self._assignworkingcopy()
449 self._assignworkingcopy()
450 repo, ui = self.repo, self.ui
450 repo, ui = self.repo, self.ui
451 if self.keepbranchesf:
451 if self.keepbranchesf:
452 # insert _savebranch at the start of extrafns so if
452 # insert _savebranch at the start of extrafns so if
453 # there's a user-provided extrafn it can clobber branch if
453 # there's a user-provided extrafn it can clobber branch if
454 # desired
454 # desired
455 self.extrafns.insert(0, _savebranch)
455 self.extrafns.insert(0, _savebranch)
456 if self.collapsef:
456 if self.collapsef:
457 branches = set()
457 branches = set()
458 for rev in self.state:
458 for rev in self.state:
459 branches.add(repo[rev].branch())
459 branches.add(repo[rev].branch())
460 if len(branches) > 1:
460 if len(branches) > 1:
461 raise error.Abort(
461 raise error.Abort(
462 _(b'cannot collapse multiple named branches')
462 _(b'cannot collapse multiple named branches')
463 )
463 )
464
464
465 # Calculate self.obsoletenotrebased
465 # Calculate self.obsoletenotrebased
466 obsrevs = _filterobsoleterevs(self.repo, self.state)
466 obsrevs = _filterobsoleterevs(self.repo, self.state)
467 self._handleskippingobsolete(obsrevs, self.destmap)
467 self._handleskippingobsolete(obsrevs, self.destmap)
468
468
469 # Keep track of the active bookmarks in order to reset them later
469 # Keep track of the active bookmarks in order to reset them later
470 self.activebookmark = self.activebookmark or repo._activebookmark
470 self.activebookmark = self.activebookmark or repo._activebookmark
471 if self.activebookmark:
471 if self.activebookmark:
472 bookmarks.deactivate(repo)
472 bookmarks.deactivate(repo)
473
473
474 # Store the state before we begin so users can run 'hg rebase --abort'
474 # Store the state before we begin so users can run 'hg rebase --abort'
475 # if we fail before the transaction closes.
475 # if we fail before the transaction closes.
476 self.storestatus()
476 self.storestatus()
477 if tr:
477 if tr:
478 # When using single transaction, store state when transaction
478 # When using single transaction, store state when transaction
479 # commits.
479 # commits.
480 self.storestatus(tr)
480 self.storestatus(tr)
481
481
482 cands = [k for k, v in pycompat.iteritems(self.state) if v == revtodo]
482 cands = [k for k, v in pycompat.iteritems(self.state) if v == revtodo]
483 p = repo.ui.makeprogress(
483 p = repo.ui.makeprogress(
484 _(b"rebasing"), unit=_(b'changesets'), total=len(cands)
484 _(b"rebasing"), unit=_(b'changesets'), total=len(cands)
485 )
485 )
486
486
487 def progress(ctx):
487 def progress(ctx):
488 p.increment(item=(b"%d:%s" % (ctx.rev(), ctx)))
488 p.increment(item=(b"%d:%s" % (ctx.rev(), ctx)))
489
489
490 allowdivergence = self.ui.configbool(
490 allowdivergence = self.ui.configbool(
491 b'experimental', b'evolution.allowdivergence'
491 b'experimental', b'evolution.allowdivergence'
492 )
492 )
493 for subset in sortsource(self.destmap):
493 for subset in sortsource(self.destmap):
494 sortedrevs = self.repo.revs(b'sort(%ld, -topo)', subset)
494 sortedrevs = self.repo.revs(b'sort(%ld, -topo)', subset)
495 if not allowdivergence:
495 if not allowdivergence:
496 sortedrevs -= self.repo.revs(
496 sortedrevs -= self.repo.revs(
497 b'descendants(%ld) and not %ld',
497 b'descendants(%ld) and not %ld',
498 self.obsoletewithoutsuccessorindestination,
498 self.obsoletewithoutsuccessorindestination,
499 self.obsoletewithoutsuccessorindestination,
499 self.obsoletewithoutsuccessorindestination,
500 )
500 )
501 for rev in sortedrevs:
501 for rev in sortedrevs:
502 self._rebasenode(tr, rev, allowdivergence, progress)
502 self._rebasenode(tr, rev, allowdivergence, progress)
503 p.complete()
503 p.complete()
504 ui.note(_(b'rebase merging completed\n'))
504 ui.note(_(b'rebase merging completed\n'))
505
505
506 def _concludenode(self, rev, p1, p2, editor, commitmsg=None):
506 def _concludenode(self, rev, p1, p2, editor, commitmsg=None):
507 '''Commit the wd changes with parents p1 and p2.
507 '''Commit the wd changes with parents p1 and p2.
508
508
509 Reuse commit info from rev but also store useful information in extra.
509 Reuse commit info from rev but also store useful information in extra.
510 Return node of committed revision.'''
510 Return node of committed revision.'''
511 repo = self.repo
511 repo = self.repo
512 ctx = repo[rev]
512 ctx = repo[rev]
513 if commitmsg is None:
513 if commitmsg is None:
514 commitmsg = ctx.description()
514 commitmsg = ctx.description()
515 date = self.date
515 date = self.date
516 if date is None:
516 if date is None:
517 date = ctx.date()
517 date = ctx.date()
518 extra = {b'rebase_source': ctx.hex()}
518 extra = {b'rebase_source': ctx.hex()}
519 for c in self.extrafns:
519 for c in self.extrafns:
520 c(ctx, extra)
520 c(ctx, extra)
521 keepbranch = self.keepbranchesf and repo[p1].branch() != ctx.branch()
521 keepbranch = self.keepbranchesf and repo[p1].branch() != ctx.branch()
522 destphase = max(ctx.phase(), phases.draft)
522 destphase = max(ctx.phase(), phases.draft)
523 overrides = {(b'phases', b'new-commit'): destphase}
523 overrides = {(b'phases', b'new-commit'): destphase}
524 if keepbranch:
524 if keepbranch:
525 overrides[(b'ui', b'allowemptycommit')] = True
525 overrides[(b'ui', b'allowemptycommit')] = True
526 with repo.ui.configoverride(overrides, b'rebase'):
526 with repo.ui.configoverride(overrides, b'rebase'):
527 if self.inmemory:
527 if self.inmemory:
528 newnode = commitmemorynode(
528 newnode = commitmemorynode(
529 repo,
529 repo,
530 p1,
530 p1,
531 p2,
531 p2,
532 wctx=self.wctx,
532 wctx=self.wctx,
533 extra=extra,
533 extra=extra,
534 commitmsg=commitmsg,
534 commitmsg=commitmsg,
535 editor=editor,
535 editor=editor,
536 user=ctx.user(),
536 user=ctx.user(),
537 date=date,
537 date=date,
538 )
538 )
539 mergemod.mergestate.clean(repo)
539 mergemod.mergestate.clean(repo)
540 else:
540 else:
541 newnode = commitnode(
541 newnode = commitnode(
542 repo,
542 repo,
543 p1,
543 p1,
544 p2,
544 p2,
545 extra=extra,
545 extra=extra,
546 commitmsg=commitmsg,
546 commitmsg=commitmsg,
547 editor=editor,
547 editor=editor,
548 user=ctx.user(),
548 user=ctx.user(),
549 date=date,
549 date=date,
550 )
550 )
551
551
552 if newnode is None:
552 if newnode is None:
553 # If it ended up being a no-op commit, then the normal
553 # If it ended up being a no-op commit, then the normal
554 # merge state clean-up path doesn't happen, so do it
554 # merge state clean-up path doesn't happen, so do it
555 # here. Fix issue5494
555 # here. Fix issue5494
556 mergemod.mergestate.clean(repo)
556 mergemod.mergestate.clean(repo)
557 return newnode
557 return newnode
558
558
559 def _rebasenode(self, tr, rev, allowdivergence, progressfn):
559 def _rebasenode(self, tr, rev, allowdivergence, progressfn):
560 repo, ui, opts = self.repo, self.ui, self.opts
560 repo, ui, opts = self.repo, self.ui, self.opts
561 dest = self.destmap[rev]
561 dest = self.destmap[rev]
562 ctx = repo[rev]
562 ctx = repo[rev]
563 desc = _ctxdesc(ctx)
563 desc = _ctxdesc(ctx)
564 if self.state[rev] == rev:
564 if self.state[rev] == rev:
565 ui.status(_(b'already rebased %s\n') % desc)
565 ui.status(_(b'already rebased %s\n') % desc)
566 elif (
566 elif (
567 not allowdivergence
567 not allowdivergence
568 and rev in self.obsoletewithoutsuccessorindestination
568 and rev in self.obsoletewithoutsuccessorindestination
569 ):
569 ):
570 msg = (
570 msg = (
571 _(
571 _(
572 b'note: not rebasing %s and its descendants as '
572 b'note: not rebasing %s and its descendants as '
573 b'this would cause divergence\n'
573 b'this would cause divergence\n'
574 )
574 )
575 % desc
575 % desc
576 )
576 )
577 repo.ui.status(msg)
577 repo.ui.status(msg)
578 self.skipped.add(rev)
578 self.skipped.add(rev)
579 elif rev in self.obsoletenotrebased:
579 elif rev in self.obsoletenotrebased:
580 succ = self.obsoletenotrebased[rev]
580 succ = self.obsoletenotrebased[rev]
581 if succ is None:
581 if succ is None:
582 msg = _(b'note: not rebasing %s, it has no successor\n') % desc
582 msg = _(b'note: not rebasing %s, it has no successor\n') % desc
583 else:
583 else:
584 succdesc = _ctxdesc(repo[succ])
584 succdesc = _ctxdesc(repo[succ])
585 msg = _(
585 msg = _(
586 b'note: not rebasing %s, already in destination as %s\n'
586 b'note: not rebasing %s, already in destination as %s\n'
587 ) % (desc, succdesc)
587 ) % (desc, succdesc)
588 repo.ui.status(msg)
588 repo.ui.status(msg)
589 # Make clearrebased aware state[rev] is not a true successor
589 # Make clearrebased aware state[rev] is not a true successor
590 self.skipped.add(rev)
590 self.skipped.add(rev)
591 # Record rev as moved to its desired destination in self.state.
591 # Record rev as moved to its desired destination in self.state.
592 # This helps bookmark and working parent movement.
592 # This helps bookmark and working parent movement.
593 dest = max(
593 dest = max(
594 adjustdest(repo, rev, self.destmap, self.state, self.skipped)
594 adjustdest(repo, rev, self.destmap, self.state, self.skipped)
595 )
595 )
596 self.state[rev] = dest
596 self.state[rev] = dest
597 elif self.state[rev] == revtodo:
597 elif self.state[rev] == revtodo:
598 ui.status(_(b'rebasing %s\n') % desc)
598 ui.status(_(b'rebasing %s\n') % desc)
599 progressfn(ctx)
599 progressfn(ctx)
600 p1, p2, base = defineparents(
600 p1, p2, base = defineparents(
601 repo,
601 repo,
602 rev,
602 rev,
603 self.destmap,
603 self.destmap,
604 self.state,
604 self.state,
605 self.skipped,
605 self.skipped,
606 self.obsoletenotrebased,
606 self.obsoletenotrebased,
607 )
607 )
608 if not self.inmemory and len(repo[None].parents()) == 2:
608 if not self.inmemory and len(repo[None].parents()) == 2:
609 repo.ui.debug(b'resuming interrupted rebase\n')
609 repo.ui.debug(b'resuming interrupted rebase\n')
610 else:
610 else:
611 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
611 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
612 with ui.configoverride(overrides, b'rebase'):
612 with ui.configoverride(overrides, b'rebase'):
613 stats = rebasenode(
613 stats = rebasenode(
614 repo,
614 repo,
615 rev,
615 rev,
616 p1,
616 p1,
617 base,
617 base,
618 self.collapsef,
618 self.collapsef,
619 dest,
619 dest,
620 wctx=self.wctx,
620 wctx=self.wctx,
621 )
621 )
622 if stats.unresolvedcount > 0:
622 if stats.unresolvedcount > 0:
623 if self.inmemory:
623 if self.inmemory:
624 raise error.InMemoryMergeConflictsError()
624 raise error.InMemoryMergeConflictsError()
625 else:
625 else:
626 raise error.InterventionRequired(
626 raise error.InterventionRequired(
627 _(
627 _(
628 b'unresolved conflicts (see hg '
628 b'unresolved conflicts (see hg '
629 b'resolve, then hg rebase --continue)'
629 b'resolve, then hg rebase --continue)'
630 )
630 )
631 )
631 )
632 if not self.collapsef:
632 if not self.collapsef:
633 merging = p2 != nullrev
633 merging = p2 != nullrev
634 editform = cmdutil.mergeeditform(merging, b'rebase')
634 editform = cmdutil.mergeeditform(merging, b'rebase')
635 editor = cmdutil.getcommiteditor(
635 editor = cmdutil.getcommiteditor(
636 editform=editform, **pycompat.strkwargs(opts)
636 editform=editform, **pycompat.strkwargs(opts)
637 )
637 )
638 newnode = self._concludenode(rev, p1, p2, editor)
638 newnode = self._concludenode(rev, p1, p2, editor)
639 else:
639 else:
640 # Skip commit if we are collapsing
640 # Skip commit if we are collapsing
641 if self.inmemory:
641 if self.inmemory:
642 self.wctx.setbase(repo[p1])
642 self.wctx.setbase(repo[p1])
643 else:
643 else:
644 repo.setparents(repo[p1].node())
644 repo.setparents(repo[p1].node())
645 newnode = None
645 newnode = None
646 # Update the state
646 # Update the state
647 if newnode is not None:
647 if newnode is not None:
648 self.state[rev] = repo[newnode].rev()
648 self.state[rev] = repo[newnode].rev()
649 ui.debug(b'rebased as %s\n' % short(newnode))
649 ui.debug(b'rebased as %s\n' % short(newnode))
650 else:
650 else:
651 if not self.collapsef:
651 if not self.collapsef:
652 ui.warn(
652 ui.warn(
653 _(
653 _(
654 b'note: not rebasing %s, its destination already '
654 b'note: not rebasing %s, its destination already '
655 b'has all its changes\n'
655 b'has all its changes\n'
656 )
656 )
657 % desc
657 % desc
658 )
658 )
659 self.skipped.add(rev)
659 self.skipped.add(rev)
660 self.state[rev] = p1
660 self.state[rev] = p1
661 ui.debug(b'next revision set to %d\n' % p1)
661 ui.debug(b'next revision set to %d\n' % p1)
662 else:
662 else:
663 ui.status(
663 ui.status(
664 _(b'already rebased %s as %s\n') % (desc, repo[self.state[rev]])
664 _(b'already rebased %s as %s\n') % (desc, repo[self.state[rev]])
665 )
665 )
666 if not tr:
666 if not tr:
667 # When not using single transaction, store state after each
667 # When not using single transaction, store state after each
668 # commit is completely done. On InterventionRequired, we thus
668 # commit is completely done. On InterventionRequired, we thus
669 # won't store the status. Instead, we'll hit the "len(parents) == 2"
669 # won't store the status. Instead, we'll hit the "len(parents) == 2"
670 # case and realize that the commit was in progress.
670 # case and realize that the commit was in progress.
671 self.storestatus()
671 self.storestatus()
672
672
673 def _finishrebase(self):
673 def _finishrebase(self):
674 repo, ui, opts = self.repo, self.ui, self.opts
674 repo, ui, opts = self.repo, self.ui, self.opts
675 fm = ui.formatter(b'rebase', opts)
675 fm = ui.formatter(b'rebase', opts)
676 fm.startitem()
676 fm.startitem()
677 if self.collapsef:
677 if self.collapsef:
678 p1, p2, _base = defineparents(
678 p1, p2, _base = defineparents(
679 repo,
679 repo,
680 min(self.state),
680 min(self.state),
681 self.destmap,
681 self.destmap,
682 self.state,
682 self.state,
683 self.skipped,
683 self.skipped,
684 self.obsoletenotrebased,
684 self.obsoletenotrebased,
685 )
685 )
686 editopt = opts.get(b'edit')
686 editopt = opts.get(b'edit')
687 editform = b'rebase.collapse'
687 editform = b'rebase.collapse'
688 if self.collapsemsg:
688 if self.collapsemsg:
689 commitmsg = self.collapsemsg
689 commitmsg = self.collapsemsg
690 else:
690 else:
691 commitmsg = b'Collapsed revision'
691 commitmsg = b'Collapsed revision'
692 for rebased in sorted(self.state):
692 for rebased in sorted(self.state):
693 if rebased not in self.skipped:
693 if rebased not in self.skipped:
694 commitmsg += b'\n* %s' % repo[rebased].description()
694 commitmsg += b'\n* %s' % repo[rebased].description()
695 editopt = True
695 editopt = True
696 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
696 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
697 revtoreuse = max(self.state)
697 revtoreuse = max(self.state)
698
698
699 newnode = self._concludenode(
699 newnode = self._concludenode(
700 revtoreuse, p1, self.external, editor, commitmsg=commitmsg
700 revtoreuse, p1, self.external, editor, commitmsg=commitmsg
701 )
701 )
702
702
703 if newnode is not None:
703 if newnode is not None:
704 newrev = repo[newnode].rev()
704 newrev = repo[newnode].rev()
705 for oldrev in self.state:
705 for oldrev in self.state:
706 self.state[oldrev] = newrev
706 self.state[oldrev] = newrev
707
707
708 if b'qtip' in repo.tags():
708 if b'qtip' in repo.tags():
709 updatemq(repo, self.state, self.skipped, **pycompat.strkwargs(opts))
709 updatemq(repo, self.state, self.skipped, **pycompat.strkwargs(opts))
710
710
711 # restore original working directory
711 # restore original working directory
712 # (we do this before stripping)
712 # (we do this before stripping)
713 newwd = self.state.get(self.originalwd, self.originalwd)
713 newwd = self.state.get(self.originalwd, self.originalwd)
714 if newwd < 0:
714 if newwd < 0:
715 # original directory is a parent of rebase set root or ignored
715 # original directory is a parent of rebase set root or ignored
716 newwd = self.originalwd
716 newwd = self.originalwd
717 if newwd not in [c.rev() for c in repo[None].parents()]:
717 if newwd not in [c.rev() for c in repo[None].parents()]:
718 ui.note(_(b"update back to initial working directory parent\n"))
718 ui.note(_(b"update back to initial working directory parent\n"))
719 hg.updaterepo(repo, newwd, overwrite=False)
719 hg.updaterepo(repo, newwd, overwrite=False)
720
720
721 collapsedas = None
721 collapsedas = None
722 if self.collapsef and not self.keepf:
722 if self.collapsef and not self.keepf:
723 collapsedas = newnode
723 collapsedas = newnode
724 clearrebased(
724 clearrebased(
725 ui,
725 ui,
726 repo,
726 repo,
727 self.destmap,
727 self.destmap,
728 self.state,
728 self.state,
729 self.skipped,
729 self.skipped,
730 collapsedas,
730 collapsedas,
731 self.keepf,
731 self.keepf,
732 fm=fm,
732 fm=fm,
733 backup=self.backupf,
733 backup=self.backupf,
734 )
734 )
735
735
736 clearstatus(repo)
736 clearstatus(repo)
737 clearcollapsemsg(repo)
737 clearcollapsemsg(repo)
738
738
739 ui.note(_(b"rebase completed\n"))
739 ui.note(_(b"rebase completed\n"))
740 util.unlinkpath(repo.sjoin(b'undo'), ignoremissing=True)
740 util.unlinkpath(repo.sjoin(b'undo'), ignoremissing=True)
741 if self.skipped:
741 if self.skipped:
742 skippedlen = len(self.skipped)
742 skippedlen = len(self.skipped)
743 ui.note(_(b"%d revisions have been skipped\n") % skippedlen)
743 ui.note(_(b"%d revisions have been skipped\n") % skippedlen)
744 fm.end()
744 fm.end()
745
745
746 if (
746 if (
747 self.activebookmark
747 self.activebookmark
748 and self.activebookmark in repo._bookmarks
748 and self.activebookmark in repo._bookmarks
749 and repo[b'.'].node() == repo._bookmarks[self.activebookmark]
749 and repo[b'.'].node() == repo._bookmarks[self.activebookmark]
750 ):
750 ):
751 bookmarks.activate(repo, self.activebookmark)
751 bookmarks.activate(repo, self.activebookmark)
752
752
753 def _abort(self, backup=True, suppwarns=False):
753 def _abort(self, backup=True, suppwarns=False):
754 '''Restore the repository to its original state.'''
754 '''Restore the repository to its original state.'''
755
755
756 repo = self.repo
756 repo = self.repo
757 try:
757 try:
758 # If the first commits in the rebased set get skipped during the
758 # If the first commits in the rebased set get skipped during the
759 # rebase, their values within the state mapping will be the dest
759 # rebase, their values within the state mapping will be the dest
760 # rev id. The rebased list must must not contain the dest rev
760 # rev id. The rebased list must must not contain the dest rev
761 # (issue4896)
761 # (issue4896)
762 rebased = [
762 rebased = [
763 s
763 s
764 for r, s in self.state.items()
764 for r, s in self.state.items()
765 if s >= 0 and s != r and s != self.destmap[r]
765 if s >= 0 and s != r and s != self.destmap[r]
766 ]
766 ]
767 immutable = [d for d in rebased if not repo[d].mutable()]
767 immutable = [d for d in rebased if not repo[d].mutable()]
768 cleanup = True
768 cleanup = True
769 if immutable:
769 if immutable:
770 repo.ui.warn(
770 repo.ui.warn(
771 _(b"warning: can't clean up public changesets %s\n")
771 _(b"warning: can't clean up public changesets %s\n")
772 % b', '.join(bytes(repo[r]) for r in immutable),
772 % b', '.join(bytes(repo[r]) for r in immutable),
773 hint=_(b"see 'hg help phases' for details"),
773 hint=_(b"see 'hg help phases' for details"),
774 )
774 )
775 cleanup = False
775 cleanup = False
776
776
777 descendants = set()
777 descendants = set()
778 if rebased:
778 if rebased:
779 descendants = set(repo.changelog.descendants(rebased))
779 descendants = set(repo.changelog.descendants(rebased))
780 if descendants - set(rebased):
780 if descendants - set(rebased):
781 repo.ui.warn(
781 repo.ui.warn(
782 _(
782 _(
783 b"warning: new changesets detected on "
783 b"warning: new changesets detected on "
784 b"destination branch, can't strip\n"
784 b"destination branch, can't strip\n"
785 )
785 )
786 )
786 )
787 cleanup = False
787 cleanup = False
788
788
789 if cleanup:
789 if cleanup:
790 if rebased:
790 if rebased:
791 strippoints = [
791 strippoints = [
792 c.node() for c in repo.set(b'roots(%ld)', rebased)
792 c.node() for c in repo.set(b'roots(%ld)', rebased)
793 ]
793 ]
794
794
795 updateifonnodes = set(rebased)
795 updateifonnodes = set(rebased)
796 updateifonnodes.update(self.destmap.values())
796 updateifonnodes.update(self.destmap.values())
797 updateifonnodes.add(self.originalwd)
797 updateifonnodes.add(self.originalwd)
798 shouldupdate = repo[b'.'].rev() in updateifonnodes
798 shouldupdate = repo[b'.'].rev() in updateifonnodes
799
799
800 # Update away from the rebase if necessary
800 # Update away from the rebase if necessary
801 if shouldupdate:
801 if shouldupdate:
802 mergemod.update(
802 mergemod.update(
803 repo, self.originalwd, branchmerge=False, force=True
803 repo, self.originalwd, branchmerge=False, force=True
804 )
804 )
805
805
806 # Strip from the first rebased revision
806 # Strip from the first rebased revision
807 if rebased:
807 if rebased:
808 repair.strip(repo.ui, repo, strippoints, backup=backup)
808 repair.strip(repo.ui, repo, strippoints, backup=backup)
809
809
810 if self.activebookmark and self.activebookmark in repo._bookmarks:
810 if self.activebookmark and self.activebookmark in repo._bookmarks:
811 bookmarks.activate(repo, self.activebookmark)
811 bookmarks.activate(repo, self.activebookmark)
812
812
813 finally:
813 finally:
814 clearstatus(repo)
814 clearstatus(repo)
815 clearcollapsemsg(repo)
815 clearcollapsemsg(repo)
816 if not suppwarns:
816 if not suppwarns:
817 repo.ui.warn(_(b'rebase aborted\n'))
817 repo.ui.warn(_(b'rebase aborted\n'))
818 return 0
818 return 0
819
819
820
820
821 @command(
821 @command(
822 b'rebase',
822 b'rebase',
823 [
823 [
824 (
824 (
825 b's',
825 b's',
826 b'source',
826 b'source',
827 b'',
827 b'',
828 _(b'rebase the specified changeset and descendants'),
828 _(b'rebase the specified changeset and descendants'),
829 _(b'REV'),
829 _(b'REV'),
830 ),
830 ),
831 (
831 (
832 b'b',
832 b'b',
833 b'base',
833 b'base',
834 b'',
834 b'',
835 _(b'rebase everything from branching point of specified changeset'),
835 _(b'rebase everything from branching point of specified changeset'),
836 _(b'REV'),
836 _(b'REV'),
837 ),
837 ),
838 (b'r', b'rev', [], _(b'rebase these revisions'), _(b'REV')),
838 (b'r', b'rev', [], _(b'rebase these revisions'), _(b'REV')),
839 (
839 (
840 b'd',
840 b'd',
841 b'dest',
841 b'dest',
842 b'',
842 b'',
843 _(b'rebase onto the specified changeset'),
843 _(b'rebase onto the specified changeset'),
844 _(b'REV'),
844 _(b'REV'),
845 ),
845 ),
846 (b'', b'collapse', False, _(b'collapse the rebased changesets')),
846 (b'', b'collapse', False, _(b'collapse the rebased changesets')),
847 (
847 (
848 b'm',
848 b'm',
849 b'message',
849 b'message',
850 b'',
850 b'',
851 _(b'use text as collapse commit message'),
851 _(b'use text as collapse commit message'),
852 _(b'TEXT'),
852 _(b'TEXT'),
853 ),
853 ),
854 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
854 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
855 (
855 (
856 b'l',
856 b'l',
857 b'logfile',
857 b'logfile',
858 b'',
858 b'',
859 _(b'read collapse commit message from file'),
859 _(b'read collapse commit message from file'),
860 _(b'FILE'),
860 _(b'FILE'),
861 ),
861 ),
862 (b'k', b'keep', False, _(b'keep original changesets')),
862 (b'k', b'keep', False, _(b'keep original changesets')),
863 (b'', b'keepbranches', False, _(b'keep original branch names')),
863 (b'', b'keepbranches', False, _(b'keep original branch names')),
864 (b'D', b'detach', False, _(b'(DEPRECATED)')),
864 (b'D', b'detach', False, _(b'(DEPRECATED)')),
865 (b'i', b'interactive', False, _(b'(DEPRECATED)')),
865 (b'i', b'interactive', False, _(b'(DEPRECATED)')),
866 (b't', b'tool', b'', _(b'specify merge tool')),
866 (b't', b'tool', b'', _(b'specify merge tool')),
867 (b'', b'stop', False, _(b'stop interrupted rebase')),
867 (b'', b'stop', False, _(b'stop interrupted rebase')),
868 (b'c', b'continue', False, _(b'continue an interrupted rebase')),
868 (b'c', b'continue', False, _(b'continue an interrupted rebase')),
869 (b'a', b'abort', False, _(b'abort an interrupted rebase')),
869 (b'a', b'abort', False, _(b'abort an interrupted rebase')),
870 (
870 (
871 b'',
871 b'',
872 b'auto-orphans',
872 b'auto-orphans',
873 b'',
873 b'',
874 _(
874 _(
875 b'automatically rebase orphan revisions '
875 b'automatically rebase orphan revisions '
876 b'in the specified revset (EXPERIMENTAL)'
876 b'in the specified revset (EXPERIMENTAL)'
877 ),
877 ),
878 ),
878 ),
879 ]
879 ]
880 + cmdutil.dryrunopts
880 + cmdutil.dryrunopts
881 + cmdutil.formatteropts
881 + cmdutil.formatteropts
882 + cmdutil.confirmopts,
882 + cmdutil.confirmopts,
883 _(b'[-s REV | -b REV] [-d REV] [OPTION]'),
883 _(b'[-s REV | -b REV] [-d REV] [OPTION]'),
884 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
884 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
885 )
885 )
886 def rebase(ui, repo, **opts):
886 def rebase(ui, repo, **opts):
887 """move changeset (and descendants) to a different branch
887 """move changeset (and descendants) to a different branch
888
888
889 Rebase uses repeated merging to graft changesets from one part of
889 Rebase uses repeated merging to graft changesets from one part of
890 history (the source) onto another (the destination). This can be
890 history (the source) onto another (the destination). This can be
891 useful for linearizing *local* changes relative to a master
891 useful for linearizing *local* changes relative to a master
892 development tree.
892 development tree.
893
893
894 Published commits cannot be rebased (see :hg:`help phases`).
894 Published commits cannot be rebased (see :hg:`help phases`).
895 To copy commits, see :hg:`help graft`.
895 To copy commits, see :hg:`help graft`.
896
896
897 If you don't specify a destination changeset (``-d/--dest``), rebase
897 If you don't specify a destination changeset (``-d/--dest``), rebase
898 will use the same logic as :hg:`merge` to pick a destination. if
898 will use the same logic as :hg:`merge` to pick a destination. if
899 the current branch contains exactly one other head, the other head
899 the current branch contains exactly one other head, the other head
900 is merged with by default. Otherwise, an explicit revision with
900 is merged with by default. Otherwise, an explicit revision with
901 which to merge with must be provided. (destination changeset is not
901 which to merge with must be provided. (destination changeset is not
902 modified by rebasing, but new changesets are added as its
902 modified by rebasing, but new changesets are added as its
903 descendants.)
903 descendants.)
904
904
905 Here are the ways to select changesets:
905 Here are the ways to select changesets:
906
906
907 1. Explicitly select them using ``--rev``.
907 1. Explicitly select them using ``--rev``.
908
908
909 2. Use ``--source`` to select a root changeset and include all of its
909 2. Use ``--source`` to select a root changeset and include all of its
910 descendants.
910 descendants.
911
911
912 3. Use ``--base`` to select a changeset; rebase will find ancestors
912 3. Use ``--base`` to select a changeset; rebase will find ancestors
913 and their descendants which are not also ancestors of the destination.
913 and their descendants which are not also ancestors of the destination.
914
914
915 4. If you do not specify any of ``--rev``, ``--source``, or ``--base``,
915 4. If you do not specify any of ``--rev``, ``--source``, or ``--base``,
916 rebase will use ``--base .`` as above.
916 rebase will use ``--base .`` as above.
917
917
918 If ``--source`` or ``--rev`` is used, special names ``SRC`` and ``ALLSRC``
918 If ``--source`` or ``--rev`` is used, special names ``SRC`` and ``ALLSRC``
919 can be used in ``--dest``. Destination would be calculated per source
919 can be used in ``--dest``. Destination would be calculated per source
920 revision with ``SRC`` substituted by that single source revision and
920 revision with ``SRC`` substituted by that single source revision and
921 ``ALLSRC`` substituted by all source revisions.
921 ``ALLSRC`` substituted by all source revisions.
922
922
923 Rebase will destroy original changesets unless you use ``--keep``.
923 Rebase will destroy original changesets unless you use ``--keep``.
924 It will also move your bookmarks (even if you do).
924 It will also move your bookmarks (even if you do).
925
925
926 Some changesets may be dropped if they do not contribute changes
926 Some changesets may be dropped if they do not contribute changes
927 (e.g. merges from the destination branch).
927 (e.g. merges from the destination branch).
928
928
929 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
929 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
930 a named branch with two heads. You will need to explicitly specify source
930 a named branch with two heads. You will need to explicitly specify source
931 and/or destination.
931 and/or destination.
932
932
933 If you need to use a tool to automate merge/conflict decisions, you
933 If you need to use a tool to automate merge/conflict decisions, you
934 can specify one with ``--tool``, see :hg:`help merge-tools`.
934 can specify one with ``--tool``, see :hg:`help merge-tools`.
935 As a caveat: the tool will not be used to mediate when a file was
935 As a caveat: the tool will not be used to mediate when a file was
936 deleted, there is no hook presently available for this.
936 deleted, there is no hook presently available for this.
937
937
938 If a rebase is interrupted to manually resolve a conflict, it can be
938 If a rebase is interrupted to manually resolve a conflict, it can be
939 continued with --continue/-c, aborted with --abort/-a, or stopped with
939 continued with --continue/-c, aborted with --abort/-a, or stopped with
940 --stop.
940 --stop.
941
941
942 .. container:: verbose
942 .. container:: verbose
943
943
944 Examples:
944 Examples:
945
945
946 - move "local changes" (current commit back to branching point)
946 - move "local changes" (current commit back to branching point)
947 to the current branch tip after a pull::
947 to the current branch tip after a pull::
948
948
949 hg rebase
949 hg rebase
950
950
951 - move a single changeset to the stable branch::
951 - move a single changeset to the stable branch::
952
952
953 hg rebase -r 5f493448 -d stable
953 hg rebase -r 5f493448 -d stable
954
954
955 - splice a commit and all its descendants onto another part of history::
955 - splice a commit and all its descendants onto another part of history::
956
956
957 hg rebase --source c0c3 --dest 4cf9
957 hg rebase --source c0c3 --dest 4cf9
958
958
959 - rebase everything on a branch marked by a bookmark onto the
959 - rebase everything on a branch marked by a bookmark onto the
960 default branch::
960 default branch::
961
961
962 hg rebase --base myfeature --dest default
962 hg rebase --base myfeature --dest default
963
963
964 - collapse a sequence of changes into a single commit::
964 - collapse a sequence of changes into a single commit::
965
965
966 hg rebase --collapse -r 1520:1525 -d .
966 hg rebase --collapse -r 1520:1525 -d .
967
967
968 - move a named branch while preserving its name::
968 - move a named branch while preserving its name::
969
969
970 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
970 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
971
971
972 - stabilize orphaned changesets so history looks linear::
972 - stabilize orphaned changesets so history looks linear::
973
973
974 hg rebase -r 'orphan()-obsolete()'\
974 hg rebase -r 'orphan()-obsolete()'\
975 -d 'first(max((successors(max(roots(ALLSRC) & ::SRC)^)-obsolete())::) +\
975 -d 'first(max((successors(max(roots(ALLSRC) & ::SRC)^)-obsolete())::) +\
976 max(::((roots(ALLSRC) & ::SRC)^)-obsolete()))'
976 max(::((roots(ALLSRC) & ::SRC)^)-obsolete()))'
977
977
978 Configuration Options:
978 Configuration Options:
979
979
980 You can make rebase require a destination if you set the following config
980 You can make rebase require a destination if you set the following config
981 option::
981 option::
982
982
983 [commands]
983 [commands]
984 rebase.requiredest = True
984 rebase.requiredest = True
985
985
986 By default, rebase will close the transaction after each commit. For
986 By default, rebase will close the transaction after each commit. For
987 performance purposes, you can configure rebase to use a single transaction
987 performance purposes, you can configure rebase to use a single transaction
988 across the entire rebase. WARNING: This setting introduces a significant
988 across the entire rebase. WARNING: This setting introduces a significant
989 risk of losing the work you've done in a rebase if the rebase aborts
989 risk of losing the work you've done in a rebase if the rebase aborts
990 unexpectedly::
990 unexpectedly::
991
991
992 [rebase]
992 [rebase]
993 singletransaction = True
993 singletransaction = True
994
994
995 By default, rebase writes to the working copy, but you can configure it to
995 By default, rebase writes to the working copy, but you can configure it to
996 run in-memory for better performance. When the rebase is not moving the
996 run in-memory for better performance. When the rebase is not moving the
997 parent(s) of the working copy (AKA the "currently checked out changesets"),
997 parent(s) of the working copy (AKA the "currently checked out changesets"),
998 this may also allow it to run even if the working copy is dirty::
998 this may also allow it to run even if the working copy is dirty::
999
999
1000 [rebase]
1000 [rebase]
1001 experimental.inmemory = True
1001 experimental.inmemory = True
1002
1002
1003 Return Values:
1003 Return Values:
1004
1004
1005 Returns 0 on success, 1 if nothing to rebase or there are
1005 Returns 0 on success, 1 if nothing to rebase or there are
1006 unresolved conflicts.
1006 unresolved conflicts.
1007
1007
1008 """
1008 """
1009 opts = pycompat.byteskwargs(opts)
1009 opts = pycompat.byteskwargs(opts)
1010 inmemory = ui.configbool(b'rebase', b'experimental.inmemory')
1010 inmemory = ui.configbool(b'rebase', b'experimental.inmemory')
1011 action = cmdutil.check_at_most_one_arg(opts, b'abort', b'stop', b'continue')
1011 action = cmdutil.check_at_most_one_arg(opts, b'abort', b'stop', b'continue')
1012 if action:
1012 if action:
1013 cmdutil.check_incompatible_arguments(
1013 cmdutil.check_incompatible_arguments(
1014 opts, action, b'confirm', b'dry_run'
1014 opts, action, [b'confirm', b'dry_run']
1015 )
1015 )
1016 cmdutil.check_incompatible_arguments(
1016 cmdutil.check_incompatible_arguments(
1017 opts, action, b'rev', b'source', b'base', b'dest'
1017 opts, action, [b'rev', b'source', b'base', b'dest']
1018 )
1018 )
1019 cmdutil.check_at_most_one_arg(opts, b'confirm', b'dry_run')
1019 cmdutil.check_at_most_one_arg(opts, b'confirm', b'dry_run')
1020 cmdutil.check_at_most_one_arg(opts, b'rev', b'source', b'base')
1020 cmdutil.check_at_most_one_arg(opts, b'rev', b'source', b'base')
1021
1021
1022 if action or repo.currenttransaction() is not None:
1022 if action or repo.currenttransaction() is not None:
1023 # in-memory rebase is not compatible with resuming rebases.
1023 # in-memory rebase is not compatible with resuming rebases.
1024 # (Or if it is run within a transaction, since the restart logic can
1024 # (Or if it is run within a transaction, since the restart logic can
1025 # fail the entire transaction.)
1025 # fail the entire transaction.)
1026 inmemory = False
1026 inmemory = False
1027
1027
1028 if opts.get(b'auto_orphans'):
1028 if opts.get(b'auto_orphans'):
1029 disallowed_opts = set(opts) - {b'auto_orphans'}
1029 disallowed_opts = set(opts) - {b'auto_orphans'}
1030 cmdutil.check_incompatible_arguments(
1030 cmdutil.check_incompatible_arguments(
1031 opts, b'auto_orphans', *disallowed_opts
1031 opts, b'auto_orphans', disallowed_opts
1032 )
1032 )
1033
1033
1034 userrevs = list(repo.revs(opts.get(b'auto_orphans')))
1034 userrevs = list(repo.revs(opts.get(b'auto_orphans')))
1035 opts[b'rev'] = [revsetlang.formatspec(b'%ld and orphan()', userrevs)]
1035 opts[b'rev'] = [revsetlang.formatspec(b'%ld and orphan()', userrevs)]
1036 opts[b'dest'] = b'_destautoorphanrebase(SRC)'
1036 opts[b'dest'] = b'_destautoorphanrebase(SRC)'
1037
1037
1038 if opts.get(b'dry_run') or opts.get(b'confirm'):
1038 if opts.get(b'dry_run') or opts.get(b'confirm'):
1039 return _dryrunrebase(ui, repo, action, opts)
1039 return _dryrunrebase(ui, repo, action, opts)
1040 elif action == b'stop':
1040 elif action == b'stop':
1041 rbsrt = rebaseruntime(repo, ui)
1041 rbsrt = rebaseruntime(repo, ui)
1042 with repo.wlock(), repo.lock():
1042 with repo.wlock(), repo.lock():
1043 rbsrt.restorestatus()
1043 rbsrt.restorestatus()
1044 if rbsrt.collapsef:
1044 if rbsrt.collapsef:
1045 raise error.Abort(_(b"cannot stop in --collapse session"))
1045 raise error.Abort(_(b"cannot stop in --collapse session"))
1046 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1046 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1047 if not (rbsrt.keepf or allowunstable):
1047 if not (rbsrt.keepf or allowunstable):
1048 raise error.Abort(
1048 raise error.Abort(
1049 _(
1049 _(
1050 b"cannot remove original changesets with"
1050 b"cannot remove original changesets with"
1051 b" unrebased descendants"
1051 b" unrebased descendants"
1052 ),
1052 ),
1053 hint=_(
1053 hint=_(
1054 b'either enable obsmarkers to allow unstable '
1054 b'either enable obsmarkers to allow unstable '
1055 b'revisions or use --keep to keep original '
1055 b'revisions or use --keep to keep original '
1056 b'changesets'
1056 b'changesets'
1057 ),
1057 ),
1058 )
1058 )
1059 # update to the current working revision
1059 # update to the current working revision
1060 # to clear interrupted merge
1060 # to clear interrupted merge
1061 hg.updaterepo(repo, rbsrt.originalwd, overwrite=True)
1061 hg.updaterepo(repo, rbsrt.originalwd, overwrite=True)
1062 rbsrt._finishrebase()
1062 rbsrt._finishrebase()
1063 return 0
1063 return 0
1064 elif inmemory:
1064 elif inmemory:
1065 try:
1065 try:
1066 # in-memory merge doesn't support conflicts, so if we hit any, abort
1066 # in-memory merge doesn't support conflicts, so if we hit any, abort
1067 # and re-run as an on-disk merge.
1067 # and re-run as an on-disk merge.
1068 overrides = {(b'rebase', b'singletransaction'): True}
1068 overrides = {(b'rebase', b'singletransaction'): True}
1069 with ui.configoverride(overrides, b'rebase'):
1069 with ui.configoverride(overrides, b'rebase'):
1070 return _dorebase(ui, repo, action, opts, inmemory=inmemory)
1070 return _dorebase(ui, repo, action, opts, inmemory=inmemory)
1071 except error.InMemoryMergeConflictsError:
1071 except error.InMemoryMergeConflictsError:
1072 ui.warn(
1072 ui.warn(
1073 _(
1073 _(
1074 b'hit merge conflicts; re-running rebase without in-memory'
1074 b'hit merge conflicts; re-running rebase without in-memory'
1075 b' merge\n'
1075 b' merge\n'
1076 )
1076 )
1077 )
1077 )
1078 # TODO: Make in-memory merge not use the on-disk merge state, so
1078 # TODO: Make in-memory merge not use the on-disk merge state, so
1079 # we don't have to clean it here
1079 # we don't have to clean it here
1080 mergemod.mergestate.clean(repo)
1080 mergemod.mergestate.clean(repo)
1081 clearstatus(repo)
1081 clearstatus(repo)
1082 clearcollapsemsg(repo)
1082 clearcollapsemsg(repo)
1083 return _dorebase(ui, repo, action, opts, inmemory=False)
1083 return _dorebase(ui, repo, action, opts, inmemory=False)
1084 else:
1084 else:
1085 return _dorebase(ui, repo, action, opts)
1085 return _dorebase(ui, repo, action, opts)
1086
1086
1087
1087
1088 def _dryrunrebase(ui, repo, action, opts):
1088 def _dryrunrebase(ui, repo, action, opts):
1089 rbsrt = rebaseruntime(repo, ui, inmemory=True, opts=opts)
1089 rbsrt = rebaseruntime(repo, ui, inmemory=True, opts=opts)
1090 confirm = opts.get(b'confirm')
1090 confirm = opts.get(b'confirm')
1091 if confirm:
1091 if confirm:
1092 ui.status(_(b'starting in-memory rebase\n'))
1092 ui.status(_(b'starting in-memory rebase\n'))
1093 else:
1093 else:
1094 ui.status(
1094 ui.status(
1095 _(b'starting dry-run rebase; repository will not be changed\n')
1095 _(b'starting dry-run rebase; repository will not be changed\n')
1096 )
1096 )
1097 with repo.wlock(), repo.lock():
1097 with repo.wlock(), repo.lock():
1098 needsabort = True
1098 needsabort = True
1099 try:
1099 try:
1100 overrides = {(b'rebase', b'singletransaction'): True}
1100 overrides = {(b'rebase', b'singletransaction'): True}
1101 with ui.configoverride(overrides, b'rebase'):
1101 with ui.configoverride(overrides, b'rebase'):
1102 _origrebase(
1102 _origrebase(
1103 ui,
1103 ui,
1104 repo,
1104 repo,
1105 action,
1105 action,
1106 opts,
1106 opts,
1107 rbsrt,
1107 rbsrt,
1108 inmemory=True,
1108 inmemory=True,
1109 leaveunfinished=True,
1109 leaveunfinished=True,
1110 )
1110 )
1111 except error.InMemoryMergeConflictsError:
1111 except error.InMemoryMergeConflictsError:
1112 ui.status(_(b'hit a merge conflict\n'))
1112 ui.status(_(b'hit a merge conflict\n'))
1113 return 1
1113 return 1
1114 except error.Abort:
1114 except error.Abort:
1115 needsabort = False
1115 needsabort = False
1116 raise
1116 raise
1117 else:
1117 else:
1118 if confirm:
1118 if confirm:
1119 ui.status(_(b'rebase completed successfully\n'))
1119 ui.status(_(b'rebase completed successfully\n'))
1120 if not ui.promptchoice(_(b'apply changes (yn)?$$ &Yes $$ &No')):
1120 if not ui.promptchoice(_(b'apply changes (yn)?$$ &Yes $$ &No')):
1121 # finish unfinished rebase
1121 # finish unfinished rebase
1122 rbsrt._finishrebase()
1122 rbsrt._finishrebase()
1123 else:
1123 else:
1124 rbsrt._prepareabortorcontinue(
1124 rbsrt._prepareabortorcontinue(
1125 isabort=True, backup=False, suppwarns=True
1125 isabort=True, backup=False, suppwarns=True
1126 )
1126 )
1127 needsabort = False
1127 needsabort = False
1128 else:
1128 else:
1129 ui.status(
1129 ui.status(
1130 _(
1130 _(
1131 b'dry-run rebase completed successfully; run without'
1131 b'dry-run rebase completed successfully; run without'
1132 b' -n/--dry-run to perform this rebase\n'
1132 b' -n/--dry-run to perform this rebase\n'
1133 )
1133 )
1134 )
1134 )
1135 return 0
1135 return 0
1136 finally:
1136 finally:
1137 if needsabort:
1137 if needsabort:
1138 # no need to store backup in case of dryrun
1138 # no need to store backup in case of dryrun
1139 rbsrt._prepareabortorcontinue(
1139 rbsrt._prepareabortorcontinue(
1140 isabort=True, backup=False, suppwarns=True
1140 isabort=True, backup=False, suppwarns=True
1141 )
1141 )
1142
1142
1143
1143
1144 def _dorebase(ui, repo, action, opts, inmemory=False):
1144 def _dorebase(ui, repo, action, opts, inmemory=False):
1145 rbsrt = rebaseruntime(repo, ui, inmemory, opts)
1145 rbsrt = rebaseruntime(repo, ui, inmemory, opts)
1146 return _origrebase(ui, repo, action, opts, rbsrt, inmemory=inmemory)
1146 return _origrebase(ui, repo, action, opts, rbsrt, inmemory=inmemory)
1147
1147
1148
1148
1149 def _origrebase(
1149 def _origrebase(
1150 ui, repo, action, opts, rbsrt, inmemory=False, leaveunfinished=False
1150 ui, repo, action, opts, rbsrt, inmemory=False, leaveunfinished=False
1151 ):
1151 ):
1152 assert action != b'stop'
1152 assert action != b'stop'
1153 with repo.wlock(), repo.lock():
1153 with repo.wlock(), repo.lock():
1154 if opts.get(b'interactive'):
1154 if opts.get(b'interactive'):
1155 try:
1155 try:
1156 if extensions.find(b'histedit'):
1156 if extensions.find(b'histedit'):
1157 enablehistedit = b''
1157 enablehistedit = b''
1158 except KeyError:
1158 except KeyError:
1159 enablehistedit = b" --config extensions.histedit="
1159 enablehistedit = b" --config extensions.histedit="
1160 help = b"hg%s help -e histedit" % enablehistedit
1160 help = b"hg%s help -e histedit" % enablehistedit
1161 msg = (
1161 msg = (
1162 _(
1162 _(
1163 b"interactive history editing is supported by the "
1163 b"interactive history editing is supported by the "
1164 b"'histedit' extension (see \"%s\")"
1164 b"'histedit' extension (see \"%s\")"
1165 )
1165 )
1166 % help
1166 % help
1167 )
1167 )
1168 raise error.Abort(msg)
1168 raise error.Abort(msg)
1169
1169
1170 if rbsrt.collapsemsg and not rbsrt.collapsef:
1170 if rbsrt.collapsemsg and not rbsrt.collapsef:
1171 raise error.Abort(_(b'message can only be specified with collapse'))
1171 raise error.Abort(_(b'message can only be specified with collapse'))
1172
1172
1173 if action:
1173 if action:
1174 if rbsrt.collapsef:
1174 if rbsrt.collapsef:
1175 raise error.Abort(
1175 raise error.Abort(
1176 _(b'cannot use collapse with continue or abort')
1176 _(b'cannot use collapse with continue or abort')
1177 )
1177 )
1178 if action == b'abort' and opts.get(b'tool', False):
1178 if action == b'abort' and opts.get(b'tool', False):
1179 ui.warn(_(b'tool option will be ignored\n'))
1179 ui.warn(_(b'tool option will be ignored\n'))
1180 if action == b'continue':
1180 if action == b'continue':
1181 ms = mergemod.mergestate.read(repo)
1181 ms = mergemod.mergestate.read(repo)
1182 mergeutil.checkunresolved(ms)
1182 mergeutil.checkunresolved(ms)
1183
1183
1184 retcode = rbsrt._prepareabortorcontinue(
1184 retcode = rbsrt._prepareabortorcontinue(
1185 isabort=(action == b'abort')
1185 isabort=(action == b'abort')
1186 )
1186 )
1187 if retcode is not None:
1187 if retcode is not None:
1188 return retcode
1188 return retcode
1189 else:
1189 else:
1190 # search default destination in this space
1190 # search default destination in this space
1191 # used in the 'hg pull --rebase' case, see issue 5214.
1191 # used in the 'hg pull --rebase' case, see issue 5214.
1192 destspace = opts.get(b'_destspace')
1192 destspace = opts.get(b'_destspace')
1193 destmap = _definedestmap(
1193 destmap = _definedestmap(
1194 ui,
1194 ui,
1195 repo,
1195 repo,
1196 inmemory,
1196 inmemory,
1197 opts.get(b'dest', None),
1197 opts.get(b'dest', None),
1198 opts.get(b'source', None),
1198 opts.get(b'source', None),
1199 opts.get(b'base', None),
1199 opts.get(b'base', None),
1200 opts.get(b'rev', []),
1200 opts.get(b'rev', []),
1201 destspace=destspace,
1201 destspace=destspace,
1202 )
1202 )
1203 retcode = rbsrt._preparenewrebase(destmap)
1203 retcode = rbsrt._preparenewrebase(destmap)
1204 if retcode is not None:
1204 if retcode is not None:
1205 return retcode
1205 return retcode
1206 storecollapsemsg(repo, rbsrt.collapsemsg)
1206 storecollapsemsg(repo, rbsrt.collapsemsg)
1207
1207
1208 tr = None
1208 tr = None
1209
1209
1210 singletr = ui.configbool(b'rebase', b'singletransaction')
1210 singletr = ui.configbool(b'rebase', b'singletransaction')
1211 if singletr:
1211 if singletr:
1212 tr = repo.transaction(b'rebase')
1212 tr = repo.transaction(b'rebase')
1213
1213
1214 # If `rebase.singletransaction` is enabled, wrap the entire operation in
1214 # If `rebase.singletransaction` is enabled, wrap the entire operation in
1215 # one transaction here. Otherwise, transactions are obtained when
1215 # one transaction here. Otherwise, transactions are obtained when
1216 # committing each node, which is slower but allows partial success.
1216 # committing each node, which is slower but allows partial success.
1217 with util.acceptintervention(tr):
1217 with util.acceptintervention(tr):
1218 # Same logic for the dirstate guard, except we don't create one when
1218 # Same logic for the dirstate guard, except we don't create one when
1219 # rebasing in-memory (it's not needed).
1219 # rebasing in-memory (it's not needed).
1220 dsguard = None
1220 dsguard = None
1221 if singletr and not inmemory:
1221 if singletr and not inmemory:
1222 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1222 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1223 with util.acceptintervention(dsguard):
1223 with util.acceptintervention(dsguard):
1224 rbsrt._performrebase(tr)
1224 rbsrt._performrebase(tr)
1225 if not leaveunfinished:
1225 if not leaveunfinished:
1226 rbsrt._finishrebase()
1226 rbsrt._finishrebase()
1227
1227
1228
1228
1229 def _definedestmap(
1229 def _definedestmap(
1230 ui,
1230 ui,
1231 repo,
1231 repo,
1232 inmemory,
1232 inmemory,
1233 destf=None,
1233 destf=None,
1234 srcf=None,
1234 srcf=None,
1235 basef=None,
1235 basef=None,
1236 revf=None,
1236 revf=None,
1237 destspace=None,
1237 destspace=None,
1238 ):
1238 ):
1239 """use revisions argument to define destmap {srcrev: destrev}"""
1239 """use revisions argument to define destmap {srcrev: destrev}"""
1240 if revf is None:
1240 if revf is None:
1241 revf = []
1241 revf = []
1242
1242
1243 # destspace is here to work around issues with `hg pull --rebase` see
1243 # destspace is here to work around issues with `hg pull --rebase` see
1244 # issue5214 for details
1244 # issue5214 for details
1245
1245
1246 cmdutil.checkunfinished(repo)
1246 cmdutil.checkunfinished(repo)
1247 if not inmemory:
1247 if not inmemory:
1248 cmdutil.bailifchanged(repo)
1248 cmdutil.bailifchanged(repo)
1249
1249
1250 if ui.configbool(b'commands', b'rebase.requiredest') and not destf:
1250 if ui.configbool(b'commands', b'rebase.requiredest') and not destf:
1251 raise error.Abort(
1251 raise error.Abort(
1252 _(b'you must specify a destination'),
1252 _(b'you must specify a destination'),
1253 hint=_(b'use: hg rebase -d REV'),
1253 hint=_(b'use: hg rebase -d REV'),
1254 )
1254 )
1255
1255
1256 dest = None
1256 dest = None
1257
1257
1258 if revf:
1258 if revf:
1259 rebaseset = scmutil.revrange(repo, revf)
1259 rebaseset = scmutil.revrange(repo, revf)
1260 if not rebaseset:
1260 if not rebaseset:
1261 ui.status(_(b'empty "rev" revision set - nothing to rebase\n'))
1261 ui.status(_(b'empty "rev" revision set - nothing to rebase\n'))
1262 return None
1262 return None
1263 elif srcf:
1263 elif srcf:
1264 src = scmutil.revrange(repo, [srcf])
1264 src = scmutil.revrange(repo, [srcf])
1265 if not src:
1265 if not src:
1266 ui.status(_(b'empty "source" revision set - nothing to rebase\n'))
1266 ui.status(_(b'empty "source" revision set - nothing to rebase\n'))
1267 return None
1267 return None
1268 rebaseset = repo.revs(b'(%ld)::', src)
1268 rebaseset = repo.revs(b'(%ld)::', src)
1269 assert rebaseset
1269 assert rebaseset
1270 else:
1270 else:
1271 base = scmutil.revrange(repo, [basef or b'.'])
1271 base = scmutil.revrange(repo, [basef or b'.'])
1272 if not base:
1272 if not base:
1273 ui.status(
1273 ui.status(
1274 _(b'empty "base" revision set - ' b"can't compute rebase set\n")
1274 _(b'empty "base" revision set - ' b"can't compute rebase set\n")
1275 )
1275 )
1276 return None
1276 return None
1277 if destf:
1277 if destf:
1278 # --base does not support multiple destinations
1278 # --base does not support multiple destinations
1279 dest = scmutil.revsingle(repo, destf)
1279 dest = scmutil.revsingle(repo, destf)
1280 else:
1280 else:
1281 dest = repo[_destrebase(repo, base, destspace=destspace)]
1281 dest = repo[_destrebase(repo, base, destspace=destspace)]
1282 destf = bytes(dest)
1282 destf = bytes(dest)
1283
1283
1284 roots = [] # selected children of branching points
1284 roots = [] # selected children of branching points
1285 bpbase = {} # {branchingpoint: [origbase]}
1285 bpbase = {} # {branchingpoint: [origbase]}
1286 for b in base: # group bases by branching points
1286 for b in base: # group bases by branching points
1287 bp = repo.revs(b'ancestor(%d, %d)', b, dest.rev()).first()
1287 bp = repo.revs(b'ancestor(%d, %d)', b, dest.rev()).first()
1288 bpbase[bp] = bpbase.get(bp, []) + [b]
1288 bpbase[bp] = bpbase.get(bp, []) + [b]
1289 if None in bpbase:
1289 if None in bpbase:
1290 # emulate the old behavior, showing "nothing to rebase" (a better
1290 # emulate the old behavior, showing "nothing to rebase" (a better
1291 # behavior may be abort with "cannot find branching point" error)
1291 # behavior may be abort with "cannot find branching point" error)
1292 bpbase.clear()
1292 bpbase.clear()
1293 for bp, bs in pycompat.iteritems(bpbase): # calculate roots
1293 for bp, bs in pycompat.iteritems(bpbase): # calculate roots
1294 roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs))
1294 roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs))
1295
1295
1296 rebaseset = repo.revs(b'%ld::', roots)
1296 rebaseset = repo.revs(b'%ld::', roots)
1297
1297
1298 if not rebaseset:
1298 if not rebaseset:
1299 # transform to list because smartsets are not comparable to
1299 # transform to list because smartsets are not comparable to
1300 # lists. This should be improved to honor laziness of
1300 # lists. This should be improved to honor laziness of
1301 # smartset.
1301 # smartset.
1302 if list(base) == [dest.rev()]:
1302 if list(base) == [dest.rev()]:
1303 if basef:
1303 if basef:
1304 ui.status(
1304 ui.status(
1305 _(
1305 _(
1306 b'nothing to rebase - %s is both "base"'
1306 b'nothing to rebase - %s is both "base"'
1307 b' and destination\n'
1307 b' and destination\n'
1308 )
1308 )
1309 % dest
1309 % dest
1310 )
1310 )
1311 else:
1311 else:
1312 ui.status(
1312 ui.status(
1313 _(
1313 _(
1314 b'nothing to rebase - working directory '
1314 b'nothing to rebase - working directory '
1315 b'parent is also destination\n'
1315 b'parent is also destination\n'
1316 )
1316 )
1317 )
1317 )
1318 elif not repo.revs(b'%ld - ::%d', base, dest.rev()):
1318 elif not repo.revs(b'%ld - ::%d', base, dest.rev()):
1319 if basef:
1319 if basef:
1320 ui.status(
1320 ui.status(
1321 _(
1321 _(
1322 b'nothing to rebase - "base" %s is '
1322 b'nothing to rebase - "base" %s is '
1323 b'already an ancestor of destination '
1323 b'already an ancestor of destination '
1324 b'%s\n'
1324 b'%s\n'
1325 )
1325 )
1326 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1326 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1327 )
1327 )
1328 else:
1328 else:
1329 ui.status(
1329 ui.status(
1330 _(
1330 _(
1331 b'nothing to rebase - working '
1331 b'nothing to rebase - working '
1332 b'directory parent is already an '
1332 b'directory parent is already an '
1333 b'ancestor of destination %s\n'
1333 b'ancestor of destination %s\n'
1334 )
1334 )
1335 % dest
1335 % dest
1336 )
1336 )
1337 else: # can it happen?
1337 else: # can it happen?
1338 ui.status(
1338 ui.status(
1339 _(b'nothing to rebase from %s to %s\n')
1339 _(b'nothing to rebase from %s to %s\n')
1340 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1340 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1341 )
1341 )
1342 return None
1342 return None
1343
1343
1344 rebasingwcp = repo[b'.'].rev() in rebaseset
1344 rebasingwcp = repo[b'.'].rev() in rebaseset
1345 ui.log(
1345 ui.log(
1346 b"rebase",
1346 b"rebase",
1347 b"rebasing working copy parent: %r\n",
1347 b"rebasing working copy parent: %r\n",
1348 rebasingwcp,
1348 rebasingwcp,
1349 rebase_rebasing_wcp=rebasingwcp,
1349 rebase_rebasing_wcp=rebasingwcp,
1350 )
1350 )
1351 if inmemory and rebasingwcp:
1351 if inmemory and rebasingwcp:
1352 # Check these since we did not before.
1352 # Check these since we did not before.
1353 cmdutil.checkunfinished(repo)
1353 cmdutil.checkunfinished(repo)
1354 cmdutil.bailifchanged(repo)
1354 cmdutil.bailifchanged(repo)
1355
1355
1356 if not destf:
1356 if not destf:
1357 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
1357 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
1358 destf = bytes(dest)
1358 destf = bytes(dest)
1359
1359
1360 allsrc = revsetlang.formatspec(b'%ld', rebaseset)
1360 allsrc = revsetlang.formatspec(b'%ld', rebaseset)
1361 alias = {b'ALLSRC': allsrc}
1361 alias = {b'ALLSRC': allsrc}
1362
1362
1363 if dest is None:
1363 if dest is None:
1364 try:
1364 try:
1365 # fast path: try to resolve dest without SRC alias
1365 # fast path: try to resolve dest without SRC alias
1366 dest = scmutil.revsingle(repo, destf, localalias=alias)
1366 dest = scmutil.revsingle(repo, destf, localalias=alias)
1367 except error.RepoLookupError:
1367 except error.RepoLookupError:
1368 # multi-dest path: resolve dest for each SRC separately
1368 # multi-dest path: resolve dest for each SRC separately
1369 destmap = {}
1369 destmap = {}
1370 for r in rebaseset:
1370 for r in rebaseset:
1371 alias[b'SRC'] = revsetlang.formatspec(b'%d', r)
1371 alias[b'SRC'] = revsetlang.formatspec(b'%d', r)
1372 # use repo.anyrevs instead of scmutil.revsingle because we
1372 # use repo.anyrevs instead of scmutil.revsingle because we
1373 # don't want to abort if destset is empty.
1373 # don't want to abort if destset is empty.
1374 destset = repo.anyrevs([destf], user=True, localalias=alias)
1374 destset = repo.anyrevs([destf], user=True, localalias=alias)
1375 size = len(destset)
1375 size = len(destset)
1376 if size == 1:
1376 if size == 1:
1377 destmap[r] = destset.first()
1377 destmap[r] = destset.first()
1378 elif size == 0:
1378 elif size == 0:
1379 ui.note(_(b'skipping %s - empty destination\n') % repo[r])
1379 ui.note(_(b'skipping %s - empty destination\n') % repo[r])
1380 else:
1380 else:
1381 raise error.Abort(
1381 raise error.Abort(
1382 _(b'rebase destination for %s is not unique') % repo[r]
1382 _(b'rebase destination for %s is not unique') % repo[r]
1383 )
1383 )
1384
1384
1385 if dest is not None:
1385 if dest is not None:
1386 # single-dest case: assign dest to each rev in rebaseset
1386 # single-dest case: assign dest to each rev in rebaseset
1387 destrev = dest.rev()
1387 destrev = dest.rev()
1388 destmap = {r: destrev for r in rebaseset} # {srcrev: destrev}
1388 destmap = {r: destrev for r in rebaseset} # {srcrev: destrev}
1389
1389
1390 if not destmap:
1390 if not destmap:
1391 ui.status(_(b'nothing to rebase - empty destination\n'))
1391 ui.status(_(b'nothing to rebase - empty destination\n'))
1392 return None
1392 return None
1393
1393
1394 return destmap
1394 return destmap
1395
1395
1396
1396
1397 def externalparent(repo, state, destancestors):
1397 def externalparent(repo, state, destancestors):
1398 """Return the revision that should be used as the second parent
1398 """Return the revision that should be used as the second parent
1399 when the revisions in state is collapsed on top of destancestors.
1399 when the revisions in state is collapsed on top of destancestors.
1400 Abort if there is more than one parent.
1400 Abort if there is more than one parent.
1401 """
1401 """
1402 parents = set()
1402 parents = set()
1403 source = min(state)
1403 source = min(state)
1404 for rev in state:
1404 for rev in state:
1405 if rev == source:
1405 if rev == source:
1406 continue
1406 continue
1407 for p in repo[rev].parents():
1407 for p in repo[rev].parents():
1408 if p.rev() not in state and p.rev() not in destancestors:
1408 if p.rev() not in state and p.rev() not in destancestors:
1409 parents.add(p.rev())
1409 parents.add(p.rev())
1410 if not parents:
1410 if not parents:
1411 return nullrev
1411 return nullrev
1412 if len(parents) == 1:
1412 if len(parents) == 1:
1413 return parents.pop()
1413 return parents.pop()
1414 raise error.Abort(
1414 raise error.Abort(
1415 _(
1415 _(
1416 b'unable to collapse on top of %d, there is more '
1416 b'unable to collapse on top of %d, there is more '
1417 b'than one external parent: %s'
1417 b'than one external parent: %s'
1418 )
1418 )
1419 % (max(destancestors), b', '.join(b"%d" % p for p in sorted(parents)))
1419 % (max(destancestors), b', '.join(b"%d" % p for p in sorted(parents)))
1420 )
1420 )
1421
1421
1422
1422
1423 def commitmemorynode(repo, p1, p2, wctx, editor, extra, user, date, commitmsg):
1423 def commitmemorynode(repo, p1, p2, wctx, editor, extra, user, date, commitmsg):
1424 '''Commit the memory changes with parents p1 and p2.
1424 '''Commit the memory changes with parents p1 and p2.
1425 Return node of committed revision.'''
1425 Return node of committed revision.'''
1426 # Replicates the empty check in ``repo.commit``.
1426 # Replicates the empty check in ``repo.commit``.
1427 if wctx.isempty() and not repo.ui.configbool(b'ui', b'allowemptycommit'):
1427 if wctx.isempty() and not repo.ui.configbool(b'ui', b'allowemptycommit'):
1428 return None
1428 return None
1429
1429
1430 # By convention, ``extra['branch']`` (set by extrafn) clobbers
1430 # By convention, ``extra['branch']`` (set by extrafn) clobbers
1431 # ``branch`` (used when passing ``--keepbranches``).
1431 # ``branch`` (used when passing ``--keepbranches``).
1432 branch = None
1432 branch = None
1433 if b'branch' in extra:
1433 if b'branch' in extra:
1434 branch = extra[b'branch']
1434 branch = extra[b'branch']
1435
1435
1436 wctx.setparents(repo[p1].node(), repo[p2].node())
1436 wctx.setparents(repo[p1].node(), repo[p2].node())
1437 memctx = wctx.tomemctx(
1437 memctx = wctx.tomemctx(
1438 commitmsg,
1438 commitmsg,
1439 date=date,
1439 date=date,
1440 extra=extra,
1440 extra=extra,
1441 user=user,
1441 user=user,
1442 branch=branch,
1442 branch=branch,
1443 editor=editor,
1443 editor=editor,
1444 )
1444 )
1445 commitres = repo.commitctx(memctx)
1445 commitres = repo.commitctx(memctx)
1446 wctx.clean() # Might be reused
1446 wctx.clean() # Might be reused
1447 return commitres
1447 return commitres
1448
1448
1449
1449
1450 def commitnode(repo, p1, p2, editor, extra, user, date, commitmsg):
1450 def commitnode(repo, p1, p2, editor, extra, user, date, commitmsg):
1451 '''Commit the wd changes with parents p1 and p2.
1451 '''Commit the wd changes with parents p1 and p2.
1452 Return node of committed revision.'''
1452 Return node of committed revision.'''
1453 dsguard = util.nullcontextmanager()
1453 dsguard = util.nullcontextmanager()
1454 if not repo.ui.configbool(b'rebase', b'singletransaction'):
1454 if not repo.ui.configbool(b'rebase', b'singletransaction'):
1455 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1455 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1456 with dsguard:
1456 with dsguard:
1457 repo.setparents(repo[p1].node(), repo[p2].node())
1457 repo.setparents(repo[p1].node(), repo[p2].node())
1458
1458
1459 # Commit might fail if unresolved files exist
1459 # Commit might fail if unresolved files exist
1460 newnode = repo.commit(
1460 newnode = repo.commit(
1461 text=commitmsg, user=user, date=date, extra=extra, editor=editor
1461 text=commitmsg, user=user, date=date, extra=extra, editor=editor
1462 )
1462 )
1463
1463
1464 repo.dirstate.setbranch(repo[newnode].branch())
1464 repo.dirstate.setbranch(repo[newnode].branch())
1465 return newnode
1465 return newnode
1466
1466
1467
1467
1468 def rebasenode(repo, rev, p1, base, collapse, dest, wctx):
1468 def rebasenode(repo, rev, p1, base, collapse, dest, wctx):
1469 """Rebase a single revision rev on top of p1 using base as merge ancestor"""
1469 """Rebase a single revision rev on top of p1 using base as merge ancestor"""
1470 # Merge phase
1470 # Merge phase
1471 # Update to destination and merge it with local
1471 # Update to destination and merge it with local
1472 p1ctx = repo[p1]
1472 p1ctx = repo[p1]
1473 if wctx.isinmemory():
1473 if wctx.isinmemory():
1474 wctx.setbase(p1ctx)
1474 wctx.setbase(p1ctx)
1475 else:
1475 else:
1476 if repo[b'.'].rev() != p1:
1476 if repo[b'.'].rev() != p1:
1477 repo.ui.debug(b" update to %d:%s\n" % (p1, p1ctx))
1477 repo.ui.debug(b" update to %d:%s\n" % (p1, p1ctx))
1478 mergemod.update(repo, p1, branchmerge=False, force=True)
1478 mergemod.update(repo, p1, branchmerge=False, force=True)
1479 else:
1479 else:
1480 repo.ui.debug(b" already in destination\n")
1480 repo.ui.debug(b" already in destination\n")
1481 # This is, alas, necessary to invalidate workingctx's manifest cache,
1481 # This is, alas, necessary to invalidate workingctx's manifest cache,
1482 # as well as other data we litter on it in other places.
1482 # as well as other data we litter on it in other places.
1483 wctx = repo[None]
1483 wctx = repo[None]
1484 repo.dirstate.write(repo.currenttransaction())
1484 repo.dirstate.write(repo.currenttransaction())
1485 ctx = repo[rev]
1485 ctx = repo[rev]
1486 repo.ui.debug(b" merge against %d:%s\n" % (rev, ctx))
1486 repo.ui.debug(b" merge against %d:%s\n" % (rev, ctx))
1487 if base is not None:
1487 if base is not None:
1488 repo.ui.debug(b" detach base %d:%s\n" % (base, repo[base]))
1488 repo.ui.debug(b" detach base %d:%s\n" % (base, repo[base]))
1489
1489
1490 # See explanation in merge.graft()
1490 # See explanation in merge.graft()
1491 mergeancestor = repo.changelog.isancestor(p1ctx.node(), ctx.node())
1491 mergeancestor = repo.changelog.isancestor(p1ctx.node(), ctx.node())
1492 stats = mergemod.update(
1492 stats = mergemod.update(
1493 repo,
1493 repo,
1494 rev,
1494 rev,
1495 branchmerge=True,
1495 branchmerge=True,
1496 force=True,
1496 force=True,
1497 ancestor=base,
1497 ancestor=base,
1498 mergeancestor=mergeancestor,
1498 mergeancestor=mergeancestor,
1499 labels=[b'dest', b'source'],
1499 labels=[b'dest', b'source'],
1500 wc=wctx,
1500 wc=wctx,
1501 )
1501 )
1502 if collapse:
1502 if collapse:
1503 copies.graftcopies(wctx, ctx, repo[dest])
1503 copies.graftcopies(wctx, ctx, repo[dest])
1504 else:
1504 else:
1505 # If we're not using --collapse, we need to
1505 # If we're not using --collapse, we need to
1506 # duplicate copies between the revision we're
1506 # duplicate copies between the revision we're
1507 # rebasing and its first parent.
1507 # rebasing and its first parent.
1508 copies.graftcopies(wctx, ctx, ctx.p1())
1508 copies.graftcopies(wctx, ctx, ctx.p1())
1509 return stats
1509 return stats
1510
1510
1511
1511
1512 def adjustdest(repo, rev, destmap, state, skipped):
1512 def adjustdest(repo, rev, destmap, state, skipped):
1513 r"""adjust rebase destination given the current rebase state
1513 r"""adjust rebase destination given the current rebase state
1514
1514
1515 rev is what is being rebased. Return a list of two revs, which are the
1515 rev is what is being rebased. Return a list of two revs, which are the
1516 adjusted destinations for rev's p1 and p2, respectively. If a parent is
1516 adjusted destinations for rev's p1 and p2, respectively. If a parent is
1517 nullrev, return dest without adjustment for it.
1517 nullrev, return dest without adjustment for it.
1518
1518
1519 For example, when doing rebasing B+E to F, C to G, rebase will first move B
1519 For example, when doing rebasing B+E to F, C to G, rebase will first move B
1520 to B1, and E's destination will be adjusted from F to B1.
1520 to B1, and E's destination will be adjusted from F to B1.
1521
1521
1522 B1 <- written during rebasing B
1522 B1 <- written during rebasing B
1523 |
1523 |
1524 F <- original destination of B, E
1524 F <- original destination of B, E
1525 |
1525 |
1526 | E <- rev, which is being rebased
1526 | E <- rev, which is being rebased
1527 | |
1527 | |
1528 | D <- prev, one parent of rev being checked
1528 | D <- prev, one parent of rev being checked
1529 | |
1529 | |
1530 | x <- skipped, ex. no successor or successor in (::dest)
1530 | x <- skipped, ex. no successor or successor in (::dest)
1531 | |
1531 | |
1532 | C <- rebased as C', different destination
1532 | C <- rebased as C', different destination
1533 | |
1533 | |
1534 | B <- rebased as B1 C'
1534 | B <- rebased as B1 C'
1535 |/ |
1535 |/ |
1536 A G <- destination of C, different
1536 A G <- destination of C, different
1537
1537
1538 Another example about merge changeset, rebase -r C+G+H -d K, rebase will
1538 Another example about merge changeset, rebase -r C+G+H -d K, rebase will
1539 first move C to C1, G to G1, and when it's checking H, the adjusted
1539 first move C to C1, G to G1, and when it's checking H, the adjusted
1540 destinations will be [C1, G1].
1540 destinations will be [C1, G1].
1541
1541
1542 H C1 G1
1542 H C1 G1
1543 /| | /
1543 /| | /
1544 F G |/
1544 F G |/
1545 K | | -> K
1545 K | | -> K
1546 | C D |
1546 | C D |
1547 | |/ |
1547 | |/ |
1548 | B | ...
1548 | B | ...
1549 |/ |/
1549 |/ |/
1550 A A
1550 A A
1551
1551
1552 Besides, adjust dest according to existing rebase information. For example,
1552 Besides, adjust dest according to existing rebase information. For example,
1553
1553
1554 B C D B needs to be rebased on top of C, C needs to be rebased on top
1554 B C D B needs to be rebased on top of C, C needs to be rebased on top
1555 \|/ of D. We will rebase C first.
1555 \|/ of D. We will rebase C first.
1556 A
1556 A
1557
1557
1558 C' After rebasing C, when considering B's destination, use C'
1558 C' After rebasing C, when considering B's destination, use C'
1559 | instead of the original C.
1559 | instead of the original C.
1560 B D
1560 B D
1561 \ /
1561 \ /
1562 A
1562 A
1563 """
1563 """
1564 # pick already rebased revs with same dest from state as interesting source
1564 # pick already rebased revs with same dest from state as interesting source
1565 dest = destmap[rev]
1565 dest = destmap[rev]
1566 source = [
1566 source = [
1567 s
1567 s
1568 for s, d in state.items()
1568 for s, d in state.items()
1569 if d > 0 and destmap[s] == dest and s not in skipped
1569 if d > 0 and destmap[s] == dest and s not in skipped
1570 ]
1570 ]
1571
1571
1572 result = []
1572 result = []
1573 for prev in repo.changelog.parentrevs(rev):
1573 for prev in repo.changelog.parentrevs(rev):
1574 adjusted = dest
1574 adjusted = dest
1575 if prev != nullrev:
1575 if prev != nullrev:
1576 candidate = repo.revs(b'max(%ld and (::%d))', source, prev).first()
1576 candidate = repo.revs(b'max(%ld and (::%d))', source, prev).first()
1577 if candidate is not None:
1577 if candidate is not None:
1578 adjusted = state[candidate]
1578 adjusted = state[candidate]
1579 if adjusted == dest and dest in state:
1579 if adjusted == dest and dest in state:
1580 adjusted = state[dest]
1580 adjusted = state[dest]
1581 if adjusted == revtodo:
1581 if adjusted == revtodo:
1582 # sortsource should produce an order that makes this impossible
1582 # sortsource should produce an order that makes this impossible
1583 raise error.ProgrammingError(
1583 raise error.ProgrammingError(
1584 b'rev %d should be rebased already at this time' % dest
1584 b'rev %d should be rebased already at this time' % dest
1585 )
1585 )
1586 result.append(adjusted)
1586 result.append(adjusted)
1587 return result
1587 return result
1588
1588
1589
1589
1590 def _checkobsrebase(repo, ui, rebaseobsrevs, rebaseobsskipped):
1590 def _checkobsrebase(repo, ui, rebaseobsrevs, rebaseobsskipped):
1591 """
1591 """
1592 Abort if rebase will create divergence or rebase is noop because of markers
1592 Abort if rebase will create divergence or rebase is noop because of markers
1593
1593
1594 `rebaseobsrevs`: set of obsolete revision in source
1594 `rebaseobsrevs`: set of obsolete revision in source
1595 `rebaseobsskipped`: set of revisions from source skipped because they have
1595 `rebaseobsskipped`: set of revisions from source skipped because they have
1596 successors in destination or no non-obsolete successor.
1596 successors in destination or no non-obsolete successor.
1597 """
1597 """
1598 # Obsolete node with successors not in dest leads to divergence
1598 # Obsolete node with successors not in dest leads to divergence
1599 divergenceok = ui.configbool(b'experimental', b'evolution.allowdivergence')
1599 divergenceok = ui.configbool(b'experimental', b'evolution.allowdivergence')
1600 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
1600 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
1601
1601
1602 if divergencebasecandidates and not divergenceok:
1602 if divergencebasecandidates and not divergenceok:
1603 divhashes = (bytes(repo[r]) for r in divergencebasecandidates)
1603 divhashes = (bytes(repo[r]) for r in divergencebasecandidates)
1604 msg = _(b"this rebase will cause divergences from: %s")
1604 msg = _(b"this rebase will cause divergences from: %s")
1605 h = _(
1605 h = _(
1606 b"to force the rebase please set "
1606 b"to force the rebase please set "
1607 b"experimental.evolution.allowdivergence=True"
1607 b"experimental.evolution.allowdivergence=True"
1608 )
1608 )
1609 raise error.Abort(msg % (b",".join(divhashes),), hint=h)
1609 raise error.Abort(msg % (b",".join(divhashes),), hint=h)
1610
1610
1611
1611
1612 def successorrevs(unfi, rev):
1612 def successorrevs(unfi, rev):
1613 """yield revision numbers for successors of rev"""
1613 """yield revision numbers for successors of rev"""
1614 assert unfi.filtername is None
1614 assert unfi.filtername is None
1615 get_rev = unfi.changelog.index.get_rev
1615 get_rev = unfi.changelog.index.get_rev
1616 for s in obsutil.allsuccessors(unfi.obsstore, [unfi[rev].node()]):
1616 for s in obsutil.allsuccessors(unfi.obsstore, [unfi[rev].node()]):
1617 r = get_rev(s)
1617 r = get_rev(s)
1618 if r is not None:
1618 if r is not None:
1619 yield r
1619 yield r
1620
1620
1621
1621
1622 def defineparents(repo, rev, destmap, state, skipped, obsskipped):
1622 def defineparents(repo, rev, destmap, state, skipped, obsskipped):
1623 """Return new parents and optionally a merge base for rev being rebased
1623 """Return new parents and optionally a merge base for rev being rebased
1624
1624
1625 The destination specified by "dest" cannot always be used directly because
1625 The destination specified by "dest" cannot always be used directly because
1626 previously rebase result could affect destination. For example,
1626 previously rebase result could affect destination. For example,
1627
1627
1628 D E rebase -r C+D+E -d B
1628 D E rebase -r C+D+E -d B
1629 |/ C will be rebased to C'
1629 |/ C will be rebased to C'
1630 B C D's new destination will be C' instead of B
1630 B C D's new destination will be C' instead of B
1631 |/ E's new destination will be C' instead of B
1631 |/ E's new destination will be C' instead of B
1632 A
1632 A
1633
1633
1634 The new parents of a merge is slightly more complicated. See the comment
1634 The new parents of a merge is slightly more complicated. See the comment
1635 block below.
1635 block below.
1636 """
1636 """
1637 # use unfiltered changelog since successorrevs may return filtered nodes
1637 # use unfiltered changelog since successorrevs may return filtered nodes
1638 assert repo.filtername is None
1638 assert repo.filtername is None
1639 cl = repo.changelog
1639 cl = repo.changelog
1640 isancestor = cl.isancestorrev
1640 isancestor = cl.isancestorrev
1641
1641
1642 dest = destmap[rev]
1642 dest = destmap[rev]
1643 oldps = repo.changelog.parentrevs(rev) # old parents
1643 oldps = repo.changelog.parentrevs(rev) # old parents
1644 newps = [nullrev, nullrev] # new parents
1644 newps = [nullrev, nullrev] # new parents
1645 dests = adjustdest(repo, rev, destmap, state, skipped)
1645 dests = adjustdest(repo, rev, destmap, state, skipped)
1646 bases = list(oldps) # merge base candidates, initially just old parents
1646 bases = list(oldps) # merge base candidates, initially just old parents
1647
1647
1648 if all(r == nullrev for r in oldps[1:]):
1648 if all(r == nullrev for r in oldps[1:]):
1649 # For non-merge changeset, just move p to adjusted dest as requested.
1649 # For non-merge changeset, just move p to adjusted dest as requested.
1650 newps[0] = dests[0]
1650 newps[0] = dests[0]
1651 else:
1651 else:
1652 # For merge changeset, if we move p to dests[i] unconditionally, both
1652 # For merge changeset, if we move p to dests[i] unconditionally, both
1653 # parents may change and the end result looks like "the merge loses a
1653 # parents may change and the end result looks like "the merge loses a
1654 # parent", which is a surprise. This is a limit because "--dest" only
1654 # parent", which is a surprise. This is a limit because "--dest" only
1655 # accepts one dest per src.
1655 # accepts one dest per src.
1656 #
1656 #
1657 # Therefore, only move p with reasonable conditions (in this order):
1657 # Therefore, only move p with reasonable conditions (in this order):
1658 # 1. use dest, if dest is a descendent of (p or one of p's successors)
1658 # 1. use dest, if dest is a descendent of (p or one of p's successors)
1659 # 2. use p's rebased result, if p is rebased (state[p] > 0)
1659 # 2. use p's rebased result, if p is rebased (state[p] > 0)
1660 #
1660 #
1661 # Comparing with adjustdest, the logic here does some additional work:
1661 # Comparing with adjustdest, the logic here does some additional work:
1662 # 1. decide which parents will not be moved towards dest
1662 # 1. decide which parents will not be moved towards dest
1663 # 2. if the above decision is "no", should a parent still be moved
1663 # 2. if the above decision is "no", should a parent still be moved
1664 # because it was rebased?
1664 # because it was rebased?
1665 #
1665 #
1666 # For example:
1666 # For example:
1667 #
1667 #
1668 # C # "rebase -r C -d D" is an error since none of the parents
1668 # C # "rebase -r C -d D" is an error since none of the parents
1669 # /| # can be moved. "rebase -r B+C -d D" will move C's parent
1669 # /| # can be moved. "rebase -r B+C -d D" will move C's parent
1670 # A B D # B (using rule "2."), since B will be rebased.
1670 # A B D # B (using rule "2."), since B will be rebased.
1671 #
1671 #
1672 # The loop tries to be not rely on the fact that a Mercurial node has
1672 # The loop tries to be not rely on the fact that a Mercurial node has
1673 # at most 2 parents.
1673 # at most 2 parents.
1674 for i, p in enumerate(oldps):
1674 for i, p in enumerate(oldps):
1675 np = p # new parent
1675 np = p # new parent
1676 if any(isancestor(x, dests[i]) for x in successorrevs(repo, p)):
1676 if any(isancestor(x, dests[i]) for x in successorrevs(repo, p)):
1677 np = dests[i]
1677 np = dests[i]
1678 elif p in state and state[p] > 0:
1678 elif p in state and state[p] > 0:
1679 np = state[p]
1679 np = state[p]
1680
1680
1681 # "bases" only record "special" merge bases that cannot be
1681 # "bases" only record "special" merge bases that cannot be
1682 # calculated from changelog DAG (i.e. isancestor(p, np) is False).
1682 # calculated from changelog DAG (i.e. isancestor(p, np) is False).
1683 # For example:
1683 # For example:
1684 #
1684 #
1685 # B' # rebase -s B -d D, when B was rebased to B'. dest for C
1685 # B' # rebase -s B -d D, when B was rebased to B'. dest for C
1686 # | C # is B', but merge base for C is B, instead of
1686 # | C # is B', but merge base for C is B, instead of
1687 # D | # changelog.ancestor(C, B') == A. If changelog DAG and
1687 # D | # changelog.ancestor(C, B') == A. If changelog DAG and
1688 # | B # "state" edges are merged (so there will be an edge from
1688 # | B # "state" edges are merged (so there will be an edge from
1689 # |/ # B to B'), the merge base is still ancestor(C, B') in
1689 # |/ # B to B'), the merge base is still ancestor(C, B') in
1690 # A # the merged graph.
1690 # A # the merged graph.
1691 #
1691 #
1692 # Also see https://bz.mercurial-scm.org/show_bug.cgi?id=1950#c8
1692 # Also see https://bz.mercurial-scm.org/show_bug.cgi?id=1950#c8
1693 # which uses "virtual null merge" to explain this situation.
1693 # which uses "virtual null merge" to explain this situation.
1694 if isancestor(p, np):
1694 if isancestor(p, np):
1695 bases[i] = nullrev
1695 bases[i] = nullrev
1696
1696
1697 # If one parent becomes an ancestor of the other, drop the ancestor
1697 # If one parent becomes an ancestor of the other, drop the ancestor
1698 for j, x in enumerate(newps[:i]):
1698 for j, x in enumerate(newps[:i]):
1699 if x == nullrev:
1699 if x == nullrev:
1700 continue
1700 continue
1701 if isancestor(np, x): # CASE-1
1701 if isancestor(np, x): # CASE-1
1702 np = nullrev
1702 np = nullrev
1703 elif isancestor(x, np): # CASE-2
1703 elif isancestor(x, np): # CASE-2
1704 newps[j] = np
1704 newps[j] = np
1705 np = nullrev
1705 np = nullrev
1706 # New parents forming an ancestor relationship does not
1706 # New parents forming an ancestor relationship does not
1707 # mean the old parents have a similar relationship. Do not
1707 # mean the old parents have a similar relationship. Do not
1708 # set bases[x] to nullrev.
1708 # set bases[x] to nullrev.
1709 bases[j], bases[i] = bases[i], bases[j]
1709 bases[j], bases[i] = bases[i], bases[j]
1710
1710
1711 newps[i] = np
1711 newps[i] = np
1712
1712
1713 # "rebasenode" updates to new p1, and the old p1 will be used as merge
1713 # "rebasenode" updates to new p1, and the old p1 will be used as merge
1714 # base. If only p2 changes, merging using unchanged p1 as merge base is
1714 # base. If only p2 changes, merging using unchanged p1 as merge base is
1715 # suboptimal. Therefore swap parents to make the merge sane.
1715 # suboptimal. Therefore swap parents to make the merge sane.
1716 if newps[1] != nullrev and oldps[0] == newps[0]:
1716 if newps[1] != nullrev and oldps[0] == newps[0]:
1717 assert len(newps) == 2 and len(oldps) == 2
1717 assert len(newps) == 2 and len(oldps) == 2
1718 newps.reverse()
1718 newps.reverse()
1719 bases.reverse()
1719 bases.reverse()
1720
1720
1721 # No parent change might be an error because we fail to make rev a
1721 # No parent change might be an error because we fail to make rev a
1722 # descendent of requested dest. This can happen, for example:
1722 # descendent of requested dest. This can happen, for example:
1723 #
1723 #
1724 # C # rebase -r C -d D
1724 # C # rebase -r C -d D
1725 # /| # None of A and B will be changed to D and rebase fails.
1725 # /| # None of A and B will be changed to D and rebase fails.
1726 # A B D
1726 # A B D
1727 if set(newps) == set(oldps) and dest not in newps:
1727 if set(newps) == set(oldps) and dest not in newps:
1728 raise error.Abort(
1728 raise error.Abort(
1729 _(
1729 _(
1730 b'cannot rebase %d:%s without '
1730 b'cannot rebase %d:%s without '
1731 b'moving at least one of its parents'
1731 b'moving at least one of its parents'
1732 )
1732 )
1733 % (rev, repo[rev])
1733 % (rev, repo[rev])
1734 )
1734 )
1735
1735
1736 # Source should not be ancestor of dest. The check here guarantees it's
1736 # Source should not be ancestor of dest. The check here guarantees it's
1737 # impossible. With multi-dest, the initial check does not cover complex
1737 # impossible. With multi-dest, the initial check does not cover complex
1738 # cases since we don't have abstractions to dry-run rebase cheaply.
1738 # cases since we don't have abstractions to dry-run rebase cheaply.
1739 if any(p != nullrev and isancestor(rev, p) for p in newps):
1739 if any(p != nullrev and isancestor(rev, p) for p in newps):
1740 raise error.Abort(_(b'source is ancestor of destination'))
1740 raise error.Abort(_(b'source is ancestor of destination'))
1741
1741
1742 # "rebasenode" updates to new p1, use the corresponding merge base.
1742 # "rebasenode" updates to new p1, use the corresponding merge base.
1743 if bases[0] != nullrev:
1743 if bases[0] != nullrev:
1744 base = bases[0]
1744 base = bases[0]
1745 else:
1745 else:
1746 base = None
1746 base = None
1747
1747
1748 # Check if the merge will contain unwanted changes. That may happen if
1748 # Check if the merge will contain unwanted changes. That may happen if
1749 # there are multiple special (non-changelog ancestor) merge bases, which
1749 # there are multiple special (non-changelog ancestor) merge bases, which
1750 # cannot be handled well by the 3-way merge algorithm. For example:
1750 # cannot be handled well by the 3-way merge algorithm. For example:
1751 #
1751 #
1752 # F
1752 # F
1753 # /|
1753 # /|
1754 # D E # "rebase -r D+E+F -d Z", when rebasing F, if "D" was chosen
1754 # D E # "rebase -r D+E+F -d Z", when rebasing F, if "D" was chosen
1755 # | | # as merge base, the difference between D and F will include
1755 # | | # as merge base, the difference between D and F will include
1756 # B C # C, so the rebased F will contain C surprisingly. If "E" was
1756 # B C # C, so the rebased F will contain C surprisingly. If "E" was
1757 # |/ # chosen, the rebased F will contain B.
1757 # |/ # chosen, the rebased F will contain B.
1758 # A Z
1758 # A Z
1759 #
1759 #
1760 # But our merge base candidates (D and E in above case) could still be
1760 # But our merge base candidates (D and E in above case) could still be
1761 # better than the default (ancestor(F, Z) == null). Therefore still
1761 # better than the default (ancestor(F, Z) == null). Therefore still
1762 # pick one (so choose p1 above).
1762 # pick one (so choose p1 above).
1763 if sum(1 for b in set(bases) if b != nullrev) > 1:
1763 if sum(1 for b in set(bases) if b != nullrev) > 1:
1764 unwanted = [None, None] # unwanted[i]: unwanted revs if choose bases[i]
1764 unwanted = [None, None] # unwanted[i]: unwanted revs if choose bases[i]
1765 for i, base in enumerate(bases):
1765 for i, base in enumerate(bases):
1766 if base == nullrev:
1766 if base == nullrev:
1767 continue
1767 continue
1768 # Revisions in the side (not chosen as merge base) branch that
1768 # Revisions in the side (not chosen as merge base) branch that
1769 # might contain "surprising" contents
1769 # might contain "surprising" contents
1770 other_bases = set(bases) - {base}
1770 other_bases = set(bases) - {base}
1771 siderevs = list(
1771 siderevs = list(
1772 repo.revs(b'(%ld %% (%d+%d))', other_bases, base, dest)
1772 repo.revs(b'(%ld %% (%d+%d))', other_bases, base, dest)
1773 )
1773 )
1774
1774
1775 # If those revisions are covered by rebaseset, the result is good.
1775 # If those revisions are covered by rebaseset, the result is good.
1776 # A merge in rebaseset would be considered to cover its ancestors.
1776 # A merge in rebaseset would be considered to cover its ancestors.
1777 if siderevs:
1777 if siderevs:
1778 rebaseset = [
1778 rebaseset = [
1779 r for r, d in state.items() if d > 0 and r not in obsskipped
1779 r for r, d in state.items() if d > 0 and r not in obsskipped
1780 ]
1780 ]
1781 merges = [
1781 merges = [
1782 r for r in rebaseset if cl.parentrevs(r)[1] != nullrev
1782 r for r in rebaseset if cl.parentrevs(r)[1] != nullrev
1783 ]
1783 ]
1784 unwanted[i] = list(
1784 unwanted[i] = list(
1785 repo.revs(
1785 repo.revs(
1786 b'%ld - (::%ld) - %ld', siderevs, merges, rebaseset
1786 b'%ld - (::%ld) - %ld', siderevs, merges, rebaseset
1787 )
1787 )
1788 )
1788 )
1789
1789
1790 # Choose a merge base that has a minimal number of unwanted revs.
1790 # Choose a merge base that has a minimal number of unwanted revs.
1791 l, i = min(
1791 l, i = min(
1792 (len(revs), i)
1792 (len(revs), i)
1793 for i, revs in enumerate(unwanted)
1793 for i, revs in enumerate(unwanted)
1794 if revs is not None
1794 if revs is not None
1795 )
1795 )
1796
1796
1797 # The merge will include unwanted revisions. Abort now. Revisit this if
1797 # The merge will include unwanted revisions. Abort now. Revisit this if
1798 # we have a more advanced merge algorithm that handles multiple bases.
1798 # we have a more advanced merge algorithm that handles multiple bases.
1799 if l > 0:
1799 if l > 0:
1800 unwanteddesc = _(b' or ').join(
1800 unwanteddesc = _(b' or ').join(
1801 (
1801 (
1802 b', '.join(b'%d:%s' % (r, repo[r]) for r in revs)
1802 b', '.join(b'%d:%s' % (r, repo[r]) for r in revs)
1803 for revs in unwanted
1803 for revs in unwanted
1804 if revs is not None
1804 if revs is not None
1805 )
1805 )
1806 )
1806 )
1807 raise error.Abort(
1807 raise error.Abort(
1808 _(b'rebasing %d:%s will include unwanted changes from %s')
1808 _(b'rebasing %d:%s will include unwanted changes from %s')
1809 % (rev, repo[rev], unwanteddesc)
1809 % (rev, repo[rev], unwanteddesc)
1810 )
1810 )
1811
1811
1812 base = bases[i]
1812 base = bases[i]
1813
1813
1814 # newps[0] should match merge base if possible. Currently, if newps[i]
1814 # newps[0] should match merge base if possible. Currently, if newps[i]
1815 # is nullrev, the only case is newps[i] and newps[j] (j < i), one is
1815 # is nullrev, the only case is newps[i] and newps[j] (j < i), one is
1816 # the other's ancestor. In that case, it's fine to not swap newps here.
1816 # the other's ancestor. In that case, it's fine to not swap newps here.
1817 # (see CASE-1 and CASE-2 above)
1817 # (see CASE-1 and CASE-2 above)
1818 if i != 0 and newps[i] != nullrev:
1818 if i != 0 and newps[i] != nullrev:
1819 newps[0], newps[i] = newps[i], newps[0]
1819 newps[0], newps[i] = newps[i], newps[0]
1820
1820
1821 repo.ui.debug(b" future parents are %d and %d\n" % tuple(newps))
1821 repo.ui.debug(b" future parents are %d and %d\n" % tuple(newps))
1822
1822
1823 return newps[0], newps[1], base
1823 return newps[0], newps[1], base
1824
1824
1825
1825
1826 def isagitpatch(repo, patchname):
1826 def isagitpatch(repo, patchname):
1827 """Return true if the given patch is in git format"""
1827 """Return true if the given patch is in git format"""
1828 mqpatch = os.path.join(repo.mq.path, patchname)
1828 mqpatch = os.path.join(repo.mq.path, patchname)
1829 for line in patch.linereader(open(mqpatch, b'rb')):
1829 for line in patch.linereader(open(mqpatch, b'rb')):
1830 if line.startswith(b'diff --git'):
1830 if line.startswith(b'diff --git'):
1831 return True
1831 return True
1832 return False
1832 return False
1833
1833
1834
1834
1835 def updatemq(repo, state, skipped, **opts):
1835 def updatemq(repo, state, skipped, **opts):
1836 """Update rebased mq patches - finalize and then import them"""
1836 """Update rebased mq patches - finalize and then import them"""
1837 mqrebase = {}
1837 mqrebase = {}
1838 mq = repo.mq
1838 mq = repo.mq
1839 original_series = mq.fullseries[:]
1839 original_series = mq.fullseries[:]
1840 skippedpatches = set()
1840 skippedpatches = set()
1841
1841
1842 for p in mq.applied:
1842 for p in mq.applied:
1843 rev = repo[p.node].rev()
1843 rev = repo[p.node].rev()
1844 if rev in state:
1844 if rev in state:
1845 repo.ui.debug(
1845 repo.ui.debug(
1846 b'revision %d is an mq patch (%s), finalize it.\n'
1846 b'revision %d is an mq patch (%s), finalize it.\n'
1847 % (rev, p.name)
1847 % (rev, p.name)
1848 )
1848 )
1849 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1849 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1850 else:
1850 else:
1851 # Applied but not rebased, not sure this should happen
1851 # Applied but not rebased, not sure this should happen
1852 skippedpatches.add(p.name)
1852 skippedpatches.add(p.name)
1853
1853
1854 if mqrebase:
1854 if mqrebase:
1855 mq.finish(repo, mqrebase.keys())
1855 mq.finish(repo, mqrebase.keys())
1856
1856
1857 # We must start import from the newest revision
1857 # We must start import from the newest revision
1858 for rev in sorted(mqrebase, reverse=True):
1858 for rev in sorted(mqrebase, reverse=True):
1859 if rev not in skipped:
1859 if rev not in skipped:
1860 name, isgit = mqrebase[rev]
1860 name, isgit = mqrebase[rev]
1861 repo.ui.note(
1861 repo.ui.note(
1862 _(b'updating mq patch %s to %d:%s\n')
1862 _(b'updating mq patch %s to %d:%s\n')
1863 % (name, state[rev], repo[state[rev]])
1863 % (name, state[rev], repo[state[rev]])
1864 )
1864 )
1865 mq.qimport(
1865 mq.qimport(
1866 repo,
1866 repo,
1867 (),
1867 (),
1868 patchname=name,
1868 patchname=name,
1869 git=isgit,
1869 git=isgit,
1870 rev=[b"%d" % state[rev]],
1870 rev=[b"%d" % state[rev]],
1871 )
1871 )
1872 else:
1872 else:
1873 # Rebased and skipped
1873 # Rebased and skipped
1874 skippedpatches.add(mqrebase[rev][0])
1874 skippedpatches.add(mqrebase[rev][0])
1875
1875
1876 # Patches were either applied and rebased and imported in
1876 # Patches were either applied and rebased and imported in
1877 # order, applied and removed or unapplied. Discard the removed
1877 # order, applied and removed or unapplied. Discard the removed
1878 # ones while preserving the original series order and guards.
1878 # ones while preserving the original series order and guards.
1879 newseries = [
1879 newseries = [
1880 s
1880 s
1881 for s in original_series
1881 for s in original_series
1882 if mq.guard_re.split(s, 1)[0] not in skippedpatches
1882 if mq.guard_re.split(s, 1)[0] not in skippedpatches
1883 ]
1883 ]
1884 mq.fullseries[:] = newseries
1884 mq.fullseries[:] = newseries
1885 mq.seriesdirty = True
1885 mq.seriesdirty = True
1886 mq.savedirty()
1886 mq.savedirty()
1887
1887
1888
1888
1889 def storecollapsemsg(repo, collapsemsg):
1889 def storecollapsemsg(repo, collapsemsg):
1890 """Store the collapse message to allow recovery"""
1890 """Store the collapse message to allow recovery"""
1891 collapsemsg = collapsemsg or b''
1891 collapsemsg = collapsemsg or b''
1892 f = repo.vfs(b"last-message.txt", b"w")
1892 f = repo.vfs(b"last-message.txt", b"w")
1893 f.write(b"%s\n" % collapsemsg)
1893 f.write(b"%s\n" % collapsemsg)
1894 f.close()
1894 f.close()
1895
1895
1896
1896
1897 def clearcollapsemsg(repo):
1897 def clearcollapsemsg(repo):
1898 """Remove collapse message file"""
1898 """Remove collapse message file"""
1899 repo.vfs.unlinkpath(b"last-message.txt", ignoremissing=True)
1899 repo.vfs.unlinkpath(b"last-message.txt", ignoremissing=True)
1900
1900
1901
1901
1902 def restorecollapsemsg(repo, isabort):
1902 def restorecollapsemsg(repo, isabort):
1903 """Restore previously stored collapse message"""
1903 """Restore previously stored collapse message"""
1904 try:
1904 try:
1905 f = repo.vfs(b"last-message.txt")
1905 f = repo.vfs(b"last-message.txt")
1906 collapsemsg = f.readline().strip()
1906 collapsemsg = f.readline().strip()
1907 f.close()
1907 f.close()
1908 except IOError as err:
1908 except IOError as err:
1909 if err.errno != errno.ENOENT:
1909 if err.errno != errno.ENOENT:
1910 raise
1910 raise
1911 if isabort:
1911 if isabort:
1912 # Oh well, just abort like normal
1912 # Oh well, just abort like normal
1913 collapsemsg = b''
1913 collapsemsg = b''
1914 else:
1914 else:
1915 raise error.Abort(_(b'missing .hg/last-message.txt for rebase'))
1915 raise error.Abort(_(b'missing .hg/last-message.txt for rebase'))
1916 return collapsemsg
1916 return collapsemsg
1917
1917
1918
1918
1919 def clearstatus(repo):
1919 def clearstatus(repo):
1920 """Remove the status files"""
1920 """Remove the status files"""
1921 # Make sure the active transaction won't write the state file
1921 # Make sure the active transaction won't write the state file
1922 tr = repo.currenttransaction()
1922 tr = repo.currenttransaction()
1923 if tr:
1923 if tr:
1924 tr.removefilegenerator(b'rebasestate')
1924 tr.removefilegenerator(b'rebasestate')
1925 repo.vfs.unlinkpath(b"rebasestate", ignoremissing=True)
1925 repo.vfs.unlinkpath(b"rebasestate", ignoremissing=True)
1926
1926
1927
1927
1928 def sortsource(destmap):
1928 def sortsource(destmap):
1929 """yield source revisions in an order that we only rebase things once
1929 """yield source revisions in an order that we only rebase things once
1930
1930
1931 If source and destination overlaps, we should filter out revisions
1931 If source and destination overlaps, we should filter out revisions
1932 depending on other revisions which hasn't been rebased yet.
1932 depending on other revisions which hasn't been rebased yet.
1933
1933
1934 Yield a sorted list of revisions each time.
1934 Yield a sorted list of revisions each time.
1935
1935
1936 For example, when rebasing A to B, B to C. This function yields [B], then
1936 For example, when rebasing A to B, B to C. This function yields [B], then
1937 [A], indicating B needs to be rebased first.
1937 [A], indicating B needs to be rebased first.
1938
1938
1939 Raise if there is a cycle so the rebase is impossible.
1939 Raise if there is a cycle so the rebase is impossible.
1940 """
1940 """
1941 srcset = set(destmap)
1941 srcset = set(destmap)
1942 while srcset:
1942 while srcset:
1943 srclist = sorted(srcset)
1943 srclist = sorted(srcset)
1944 result = []
1944 result = []
1945 for r in srclist:
1945 for r in srclist:
1946 if destmap[r] not in srcset:
1946 if destmap[r] not in srcset:
1947 result.append(r)
1947 result.append(r)
1948 if not result:
1948 if not result:
1949 raise error.Abort(_(b'source and destination form a cycle'))
1949 raise error.Abort(_(b'source and destination form a cycle'))
1950 srcset -= set(result)
1950 srcset -= set(result)
1951 yield result
1951 yield result
1952
1952
1953
1953
1954 def buildstate(repo, destmap, collapse):
1954 def buildstate(repo, destmap, collapse):
1955 '''Define which revisions are going to be rebased and where
1955 '''Define which revisions are going to be rebased and where
1956
1956
1957 repo: repo
1957 repo: repo
1958 destmap: {srcrev: destrev}
1958 destmap: {srcrev: destrev}
1959 '''
1959 '''
1960 rebaseset = destmap.keys()
1960 rebaseset = destmap.keys()
1961 originalwd = repo[b'.'].rev()
1961 originalwd = repo[b'.'].rev()
1962
1962
1963 # This check isn't strictly necessary, since mq detects commits over an
1963 # This check isn't strictly necessary, since mq detects commits over an
1964 # applied patch. But it prevents messing up the working directory when
1964 # applied patch. But it prevents messing up the working directory when
1965 # a partially completed rebase is blocked by mq.
1965 # a partially completed rebase is blocked by mq.
1966 if b'qtip' in repo.tags():
1966 if b'qtip' in repo.tags():
1967 mqapplied = set(repo[s.node].rev() for s in repo.mq.applied)
1967 mqapplied = set(repo[s.node].rev() for s in repo.mq.applied)
1968 if set(destmap.values()) & mqapplied:
1968 if set(destmap.values()) & mqapplied:
1969 raise error.Abort(_(b'cannot rebase onto an applied mq patch'))
1969 raise error.Abort(_(b'cannot rebase onto an applied mq patch'))
1970
1970
1971 # Get "cycle" error early by exhausting the generator.
1971 # Get "cycle" error early by exhausting the generator.
1972 sortedsrc = list(sortsource(destmap)) # a list of sorted revs
1972 sortedsrc = list(sortsource(destmap)) # a list of sorted revs
1973 if not sortedsrc:
1973 if not sortedsrc:
1974 raise error.Abort(_(b'no matching revisions'))
1974 raise error.Abort(_(b'no matching revisions'))
1975
1975
1976 # Only check the first batch of revisions to rebase not depending on other
1976 # Only check the first batch of revisions to rebase not depending on other
1977 # rebaseset. This means "source is ancestor of destination" for the second
1977 # rebaseset. This means "source is ancestor of destination" for the second
1978 # (and following) batches of revisions are not checked here. We rely on
1978 # (and following) batches of revisions are not checked here. We rely on
1979 # "defineparents" to do that check.
1979 # "defineparents" to do that check.
1980 roots = list(repo.set(b'roots(%ld)', sortedsrc[0]))
1980 roots = list(repo.set(b'roots(%ld)', sortedsrc[0]))
1981 if not roots:
1981 if not roots:
1982 raise error.Abort(_(b'no matching revisions'))
1982 raise error.Abort(_(b'no matching revisions'))
1983
1983
1984 def revof(r):
1984 def revof(r):
1985 return r.rev()
1985 return r.rev()
1986
1986
1987 roots = sorted(roots, key=revof)
1987 roots = sorted(roots, key=revof)
1988 state = dict.fromkeys(rebaseset, revtodo)
1988 state = dict.fromkeys(rebaseset, revtodo)
1989 emptyrebase = len(sortedsrc) == 1
1989 emptyrebase = len(sortedsrc) == 1
1990 for root in roots:
1990 for root in roots:
1991 dest = repo[destmap[root.rev()]]
1991 dest = repo[destmap[root.rev()]]
1992 commonbase = root.ancestor(dest)
1992 commonbase = root.ancestor(dest)
1993 if commonbase == root:
1993 if commonbase == root:
1994 raise error.Abort(_(b'source is ancestor of destination'))
1994 raise error.Abort(_(b'source is ancestor of destination'))
1995 if commonbase == dest:
1995 if commonbase == dest:
1996 wctx = repo[None]
1996 wctx = repo[None]
1997 if dest == wctx.p1():
1997 if dest == wctx.p1():
1998 # when rebasing to '.', it will use the current wd branch name
1998 # when rebasing to '.', it will use the current wd branch name
1999 samebranch = root.branch() == wctx.branch()
1999 samebranch = root.branch() == wctx.branch()
2000 else:
2000 else:
2001 samebranch = root.branch() == dest.branch()
2001 samebranch = root.branch() == dest.branch()
2002 if not collapse and samebranch and dest in root.parents():
2002 if not collapse and samebranch and dest in root.parents():
2003 # mark the revision as done by setting its new revision
2003 # mark the revision as done by setting its new revision
2004 # equal to its old (current) revisions
2004 # equal to its old (current) revisions
2005 state[root.rev()] = root.rev()
2005 state[root.rev()] = root.rev()
2006 repo.ui.debug(b'source is a child of destination\n')
2006 repo.ui.debug(b'source is a child of destination\n')
2007 continue
2007 continue
2008
2008
2009 emptyrebase = False
2009 emptyrebase = False
2010 repo.ui.debug(b'rebase onto %s starting from %s\n' % (dest, root))
2010 repo.ui.debug(b'rebase onto %s starting from %s\n' % (dest, root))
2011 if emptyrebase:
2011 if emptyrebase:
2012 return None
2012 return None
2013 for rev in sorted(state):
2013 for rev in sorted(state):
2014 parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
2014 parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
2015 # if all parents of this revision are done, then so is this revision
2015 # if all parents of this revision are done, then so is this revision
2016 if parents and all((state.get(p) == p for p in parents)):
2016 if parents and all((state.get(p) == p for p in parents)):
2017 state[rev] = rev
2017 state[rev] = rev
2018 return originalwd, destmap, state
2018 return originalwd, destmap, state
2019
2019
2020
2020
2021 def clearrebased(
2021 def clearrebased(
2022 ui,
2022 ui,
2023 repo,
2023 repo,
2024 destmap,
2024 destmap,
2025 state,
2025 state,
2026 skipped,
2026 skipped,
2027 collapsedas=None,
2027 collapsedas=None,
2028 keepf=False,
2028 keepf=False,
2029 fm=None,
2029 fm=None,
2030 backup=True,
2030 backup=True,
2031 ):
2031 ):
2032 """dispose of rebased revision at the end of the rebase
2032 """dispose of rebased revision at the end of the rebase
2033
2033
2034 If `collapsedas` is not None, the rebase was a collapse whose result if the
2034 If `collapsedas` is not None, the rebase was a collapse whose result if the
2035 `collapsedas` node.
2035 `collapsedas` node.
2036
2036
2037 If `keepf` is not True, the rebase has --keep set and no nodes should be
2037 If `keepf` is not True, the rebase has --keep set and no nodes should be
2038 removed (but bookmarks still need to be moved).
2038 removed (but bookmarks still need to be moved).
2039
2039
2040 If `backup` is False, no backup will be stored when stripping rebased
2040 If `backup` is False, no backup will be stored when stripping rebased
2041 revisions.
2041 revisions.
2042 """
2042 """
2043 tonode = repo.changelog.node
2043 tonode = repo.changelog.node
2044 replacements = {}
2044 replacements = {}
2045 moves = {}
2045 moves = {}
2046 stripcleanup = not obsolete.isenabled(repo, obsolete.createmarkersopt)
2046 stripcleanup = not obsolete.isenabled(repo, obsolete.createmarkersopt)
2047
2047
2048 collapsednodes = []
2048 collapsednodes = []
2049 for rev, newrev in sorted(state.items()):
2049 for rev, newrev in sorted(state.items()):
2050 if newrev >= 0 and newrev != rev:
2050 if newrev >= 0 and newrev != rev:
2051 oldnode = tonode(rev)
2051 oldnode = tonode(rev)
2052 newnode = collapsedas or tonode(newrev)
2052 newnode = collapsedas or tonode(newrev)
2053 moves[oldnode] = newnode
2053 moves[oldnode] = newnode
2054 succs = None
2054 succs = None
2055 if rev in skipped:
2055 if rev in skipped:
2056 if stripcleanup or not repo[rev].obsolete():
2056 if stripcleanup or not repo[rev].obsolete():
2057 succs = ()
2057 succs = ()
2058 elif collapsedas:
2058 elif collapsedas:
2059 collapsednodes.append(oldnode)
2059 collapsednodes.append(oldnode)
2060 else:
2060 else:
2061 succs = (newnode,)
2061 succs = (newnode,)
2062 if succs is not None:
2062 if succs is not None:
2063 replacements[(oldnode,)] = succs
2063 replacements[(oldnode,)] = succs
2064 if collapsednodes:
2064 if collapsednodes:
2065 replacements[tuple(collapsednodes)] = (collapsedas,)
2065 replacements[tuple(collapsednodes)] = (collapsedas,)
2066 if fm:
2066 if fm:
2067 hf = fm.hexfunc
2067 hf = fm.hexfunc
2068 fl = fm.formatlist
2068 fl = fm.formatlist
2069 fd = fm.formatdict
2069 fd = fm.formatdict
2070 changes = {}
2070 changes = {}
2071 for oldns, newn in pycompat.iteritems(replacements):
2071 for oldns, newn in pycompat.iteritems(replacements):
2072 for oldn in oldns:
2072 for oldn in oldns:
2073 changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node')
2073 changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node')
2074 nodechanges = fd(changes, key=b"oldnode", value=b"newnodes")
2074 nodechanges = fd(changes, key=b"oldnode", value=b"newnodes")
2075 fm.data(nodechanges=nodechanges)
2075 fm.data(nodechanges=nodechanges)
2076 if keepf:
2076 if keepf:
2077 replacements = {}
2077 replacements = {}
2078 scmutil.cleanupnodes(repo, replacements, b'rebase', moves, backup=backup)
2078 scmutil.cleanupnodes(repo, replacements, b'rebase', moves, backup=backup)
2079
2079
2080
2080
2081 def pullrebase(orig, ui, repo, *args, **opts):
2081 def pullrebase(orig, ui, repo, *args, **opts):
2082 """Call rebase after pull if the latter has been invoked with --rebase"""
2082 """Call rebase after pull if the latter has been invoked with --rebase"""
2083 if opts.get('rebase'):
2083 if opts.get('rebase'):
2084 if ui.configbool(b'commands', b'rebase.requiredest'):
2084 if ui.configbool(b'commands', b'rebase.requiredest'):
2085 msg = _(b'rebase destination required by configuration')
2085 msg = _(b'rebase destination required by configuration')
2086 hint = _(b'use hg pull followed by hg rebase -d DEST')
2086 hint = _(b'use hg pull followed by hg rebase -d DEST')
2087 raise error.Abort(msg, hint=hint)
2087 raise error.Abort(msg, hint=hint)
2088
2088
2089 with repo.wlock(), repo.lock():
2089 with repo.wlock(), repo.lock():
2090 if opts.get('update'):
2090 if opts.get('update'):
2091 del opts['update']
2091 del opts['update']
2092 ui.debug(
2092 ui.debug(
2093 b'--update and --rebase are not compatible, ignoring '
2093 b'--update and --rebase are not compatible, ignoring '
2094 b'the update flag\n'
2094 b'the update flag\n'
2095 )
2095 )
2096
2096
2097 cmdutil.checkunfinished(repo, skipmerge=True)
2097 cmdutil.checkunfinished(repo, skipmerge=True)
2098 cmdutil.bailifchanged(
2098 cmdutil.bailifchanged(
2099 repo,
2099 repo,
2100 hint=_(
2100 hint=_(
2101 b'cannot pull with rebase: '
2101 b'cannot pull with rebase: '
2102 b'please commit or shelve your changes first'
2102 b'please commit or shelve your changes first'
2103 ),
2103 ),
2104 )
2104 )
2105
2105
2106 revsprepull = len(repo)
2106 revsprepull = len(repo)
2107 origpostincoming = commands.postincoming
2107 origpostincoming = commands.postincoming
2108
2108
2109 def _dummy(*args, **kwargs):
2109 def _dummy(*args, **kwargs):
2110 pass
2110 pass
2111
2111
2112 commands.postincoming = _dummy
2112 commands.postincoming = _dummy
2113 try:
2113 try:
2114 ret = orig(ui, repo, *args, **opts)
2114 ret = orig(ui, repo, *args, **opts)
2115 finally:
2115 finally:
2116 commands.postincoming = origpostincoming
2116 commands.postincoming = origpostincoming
2117 revspostpull = len(repo)
2117 revspostpull = len(repo)
2118 if revspostpull > revsprepull:
2118 if revspostpull > revsprepull:
2119 # --rev option from pull conflict with rebase own --rev
2119 # --rev option from pull conflict with rebase own --rev
2120 # dropping it
2120 # dropping it
2121 if 'rev' in opts:
2121 if 'rev' in opts:
2122 del opts['rev']
2122 del opts['rev']
2123 # positional argument from pull conflicts with rebase's own
2123 # positional argument from pull conflicts with rebase's own
2124 # --source.
2124 # --source.
2125 if 'source' in opts:
2125 if 'source' in opts:
2126 del opts['source']
2126 del opts['source']
2127 # revsprepull is the len of the repo, not revnum of tip.
2127 # revsprepull is the len of the repo, not revnum of tip.
2128 destspace = list(repo.changelog.revs(start=revsprepull))
2128 destspace = list(repo.changelog.revs(start=revsprepull))
2129 opts['_destspace'] = destspace
2129 opts['_destspace'] = destspace
2130 try:
2130 try:
2131 rebase(ui, repo, **opts)
2131 rebase(ui, repo, **opts)
2132 except error.NoMergeDestAbort:
2132 except error.NoMergeDestAbort:
2133 # we can maybe update instead
2133 # we can maybe update instead
2134 rev, _a, _b = destutil.destupdate(repo)
2134 rev, _a, _b = destutil.destupdate(repo)
2135 if rev == repo[b'.'].rev():
2135 if rev == repo[b'.'].rev():
2136 ui.status(_(b'nothing to rebase\n'))
2136 ui.status(_(b'nothing to rebase\n'))
2137 else:
2137 else:
2138 ui.status(_(b'nothing to rebase - updating instead\n'))
2138 ui.status(_(b'nothing to rebase - updating instead\n'))
2139 # not passing argument to get the bare update behavior
2139 # not passing argument to get the bare update behavior
2140 # with warning and trumpets
2140 # with warning and trumpets
2141 commands.update(ui, repo)
2141 commands.update(ui, repo)
2142 else:
2142 else:
2143 if opts.get('tool'):
2143 if opts.get('tool'):
2144 raise error.Abort(_(b'--tool can only be used with --rebase'))
2144 raise error.Abort(_(b'--tool can only be used with --rebase'))
2145 ret = orig(ui, repo, *args, **opts)
2145 ret = orig(ui, repo, *args, **opts)
2146
2146
2147 return ret
2147 return ret
2148
2148
2149
2149
2150 def _filterobsoleterevs(repo, revs):
2150 def _filterobsoleterevs(repo, revs):
2151 """returns a set of the obsolete revisions in revs"""
2151 """returns a set of the obsolete revisions in revs"""
2152 return set(r for r in revs if repo[r].obsolete())
2152 return set(r for r in revs if repo[r].obsolete())
2153
2153
2154
2154
2155 def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap):
2155 def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap):
2156 """Return (obsoletenotrebased, obsoletewithoutsuccessorindestination).
2156 """Return (obsoletenotrebased, obsoletewithoutsuccessorindestination).
2157
2157
2158 `obsoletenotrebased` is a mapping mapping obsolete => successor for all
2158 `obsoletenotrebased` is a mapping mapping obsolete => successor for all
2159 obsolete nodes to be rebased given in `rebaseobsrevs`.
2159 obsolete nodes to be rebased given in `rebaseobsrevs`.
2160
2160
2161 `obsoletewithoutsuccessorindestination` is a set with obsolete revisions
2161 `obsoletewithoutsuccessorindestination` is a set with obsolete revisions
2162 without a successor in destination.
2162 without a successor in destination.
2163
2163
2164 `obsoleteextinctsuccessors` is a set of obsolete revisions with only
2164 `obsoleteextinctsuccessors` is a set of obsolete revisions with only
2165 obsolete successors.
2165 obsolete successors.
2166 """
2166 """
2167 obsoletenotrebased = {}
2167 obsoletenotrebased = {}
2168 obsoletewithoutsuccessorindestination = set()
2168 obsoletewithoutsuccessorindestination = set()
2169 obsoleteextinctsuccessors = set()
2169 obsoleteextinctsuccessors = set()
2170
2170
2171 assert repo.filtername is None
2171 assert repo.filtername is None
2172 cl = repo.changelog
2172 cl = repo.changelog
2173 get_rev = cl.index.get_rev
2173 get_rev = cl.index.get_rev
2174 extinctrevs = set(repo.revs(b'extinct()'))
2174 extinctrevs = set(repo.revs(b'extinct()'))
2175 for srcrev in rebaseobsrevs:
2175 for srcrev in rebaseobsrevs:
2176 srcnode = cl.node(srcrev)
2176 srcnode = cl.node(srcrev)
2177 # XXX: more advanced APIs are required to handle split correctly
2177 # XXX: more advanced APIs are required to handle split correctly
2178 successors = set(obsutil.allsuccessors(repo.obsstore, [srcnode]))
2178 successors = set(obsutil.allsuccessors(repo.obsstore, [srcnode]))
2179 # obsutil.allsuccessors includes node itself
2179 # obsutil.allsuccessors includes node itself
2180 successors.remove(srcnode)
2180 successors.remove(srcnode)
2181 succrevs = {get_rev(s) for s in successors}
2181 succrevs = {get_rev(s) for s in successors}
2182 succrevs.discard(None)
2182 succrevs.discard(None)
2183 if succrevs.issubset(extinctrevs):
2183 if succrevs.issubset(extinctrevs):
2184 # all successors are extinct
2184 # all successors are extinct
2185 obsoleteextinctsuccessors.add(srcrev)
2185 obsoleteextinctsuccessors.add(srcrev)
2186 if not successors:
2186 if not successors:
2187 # no successor
2187 # no successor
2188 obsoletenotrebased[srcrev] = None
2188 obsoletenotrebased[srcrev] = None
2189 else:
2189 else:
2190 dstrev = destmap[srcrev]
2190 dstrev = destmap[srcrev]
2191 for succrev in succrevs:
2191 for succrev in succrevs:
2192 if cl.isancestorrev(succrev, dstrev):
2192 if cl.isancestorrev(succrev, dstrev):
2193 obsoletenotrebased[srcrev] = succrev
2193 obsoletenotrebased[srcrev] = succrev
2194 break
2194 break
2195 else:
2195 else:
2196 # If 'srcrev' has a successor in rebase set but none in
2196 # If 'srcrev' has a successor in rebase set but none in
2197 # destination (which would be catched above), we shall skip it
2197 # destination (which would be catched above), we shall skip it
2198 # and its descendants to avoid divergence.
2198 # and its descendants to avoid divergence.
2199 if srcrev in extinctrevs or any(s in destmap for s in succrevs):
2199 if srcrev in extinctrevs or any(s in destmap for s in succrevs):
2200 obsoletewithoutsuccessorindestination.add(srcrev)
2200 obsoletewithoutsuccessorindestination.add(srcrev)
2201
2201
2202 return (
2202 return (
2203 obsoletenotrebased,
2203 obsoletenotrebased,
2204 obsoletewithoutsuccessorindestination,
2204 obsoletewithoutsuccessorindestination,
2205 obsoleteextinctsuccessors,
2205 obsoleteextinctsuccessors,
2206 )
2206 )
2207
2207
2208
2208
2209 def abortrebase(ui, repo):
2209 def abortrebase(ui, repo):
2210 with repo.wlock(), repo.lock():
2210 with repo.wlock(), repo.lock():
2211 rbsrt = rebaseruntime(repo, ui)
2211 rbsrt = rebaseruntime(repo, ui)
2212 rbsrt._prepareabortorcontinue(isabort=True)
2212 rbsrt._prepareabortorcontinue(isabort=True)
2213
2213
2214
2214
2215 def continuerebase(ui, repo):
2215 def continuerebase(ui, repo):
2216 with repo.wlock(), repo.lock():
2216 with repo.wlock(), repo.lock():
2217 rbsrt = rebaseruntime(repo, ui)
2217 rbsrt = rebaseruntime(repo, ui)
2218 ms = mergemod.mergestate.read(repo)
2218 ms = mergemod.mergestate.read(repo)
2219 mergeutil.checkunresolved(ms)
2219 mergeutil.checkunresolved(ms)
2220 retcode = rbsrt._prepareabortorcontinue(isabort=False)
2220 retcode = rbsrt._prepareabortorcontinue(isabort=False)
2221 if retcode is not None:
2221 if retcode is not None:
2222 return retcode
2222 return retcode
2223 rbsrt._performrebase(None)
2223 rbsrt._performrebase(None)
2224 rbsrt._finishrebase()
2224 rbsrt._finishrebase()
2225
2225
2226
2226
2227 def summaryhook(ui, repo):
2227 def summaryhook(ui, repo):
2228 if not repo.vfs.exists(b'rebasestate'):
2228 if not repo.vfs.exists(b'rebasestate'):
2229 return
2229 return
2230 try:
2230 try:
2231 rbsrt = rebaseruntime(repo, ui, {})
2231 rbsrt = rebaseruntime(repo, ui, {})
2232 rbsrt.restorestatus()
2232 rbsrt.restorestatus()
2233 state = rbsrt.state
2233 state = rbsrt.state
2234 except error.RepoLookupError:
2234 except error.RepoLookupError:
2235 # i18n: column positioning for "hg summary"
2235 # i18n: column positioning for "hg summary"
2236 msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n')
2236 msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n')
2237 ui.write(msg)
2237 ui.write(msg)
2238 return
2238 return
2239 numrebased = len([i for i in pycompat.itervalues(state) if i >= 0])
2239 numrebased = len([i for i in pycompat.itervalues(state) if i >= 0])
2240 # i18n: column positioning for "hg summary"
2240 # i18n: column positioning for "hg summary"
2241 ui.write(
2241 ui.write(
2242 _(b'rebase: %s, %s (rebase --continue)\n')
2242 _(b'rebase: %s, %s (rebase --continue)\n')
2243 % (
2243 % (
2244 ui.label(_(b'%d rebased'), b'rebase.rebased') % numrebased,
2244 ui.label(_(b'%d rebased'), b'rebase.rebased') % numrebased,
2245 ui.label(_(b'%d remaining'), b'rebase.remaining')
2245 ui.label(_(b'%d remaining'), b'rebase.remaining')
2246 % (len(state) - numrebased),
2246 % (len(state) - numrebased),
2247 )
2247 )
2248 )
2248 )
2249
2249
2250
2250
2251 def uisetup(ui):
2251 def uisetup(ui):
2252 # Replace pull with a decorator to provide --rebase option
2252 # Replace pull with a decorator to provide --rebase option
2253 entry = extensions.wrapcommand(commands.table, b'pull', pullrebase)
2253 entry = extensions.wrapcommand(commands.table, b'pull', pullrebase)
2254 entry[1].append(
2254 entry[1].append(
2255 (b'', b'rebase', None, _(b"rebase working directory to branch head"))
2255 (b'', b'rebase', None, _(b"rebase working directory to branch head"))
2256 )
2256 )
2257 entry[1].append((b't', b'tool', b'', _(b"specify merge tool for rebase")))
2257 entry[1].append((b't', b'tool', b'', _(b"specify merge tool for rebase")))
2258 cmdutil.summaryhooks.add(b'rebase', summaryhook)
2258 cmdutil.summaryhooks.add(b'rebase', summaryhook)
2259 statemod.addunfinished(
2259 statemod.addunfinished(
2260 b'rebase',
2260 b'rebase',
2261 fname=b'rebasestate',
2261 fname=b'rebasestate',
2262 stopflag=True,
2262 stopflag=True,
2263 continueflag=True,
2263 continueflag=True,
2264 abortfunc=abortrebase,
2264 abortfunc=abortrebase,
2265 continuefunc=continuerebase,
2265 continuefunc=continuerebase,
2266 )
2266 )
@@ -1,711 +1,711 b''
1 # Copyright 2017-present Gregory Szorc <gregory.szorc@gmail.com>
1 # Copyright 2017-present Gregory Szorc <gregory.szorc@gmail.com>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 """generate release notes from commit messages (EXPERIMENTAL)
6 """generate release notes from commit messages (EXPERIMENTAL)
7
7
8 It is common to maintain files detailing changes in a project between
8 It is common to maintain files detailing changes in a project between
9 releases. Maintaining these files can be difficult and time consuming.
9 releases. Maintaining these files can be difficult and time consuming.
10 The :hg:`releasenotes` command provided by this extension makes the
10 The :hg:`releasenotes` command provided by this extension makes the
11 process simpler by automating it.
11 process simpler by automating it.
12 """
12 """
13
13
14 from __future__ import absolute_import
14 from __future__ import absolute_import
15
15
16 import difflib
16 import difflib
17 import errno
17 import errno
18 import re
18 import re
19
19
20 from mercurial.i18n import _
20 from mercurial.i18n import _
21 from mercurial.pycompat import open
21 from mercurial.pycompat import open
22 from mercurial import (
22 from mercurial import (
23 cmdutil,
23 cmdutil,
24 config,
24 config,
25 error,
25 error,
26 minirst,
26 minirst,
27 node,
27 node,
28 pycompat,
28 pycompat,
29 registrar,
29 registrar,
30 scmutil,
30 scmutil,
31 util,
31 util,
32 )
32 )
33 from mercurial.utils import stringutil
33 from mercurial.utils import stringutil
34
34
35 cmdtable = {}
35 cmdtable = {}
36 command = registrar.command(cmdtable)
36 command = registrar.command(cmdtable)
37
37
38 try:
38 try:
39 import fuzzywuzzy.fuzz as fuzz
39 import fuzzywuzzy.fuzz as fuzz
40
40
41 fuzz.token_set_ratio
41 fuzz.token_set_ratio
42 except ImportError:
42 except ImportError:
43 fuzz = None
43 fuzz = None
44
44
45 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
45 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
46 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
46 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
47 # be specifying the version(s) of Mercurial they are tested with, or
47 # be specifying the version(s) of Mercurial they are tested with, or
48 # leave the attribute unspecified.
48 # leave the attribute unspecified.
49 testedwith = b'ships-with-hg-core'
49 testedwith = b'ships-with-hg-core'
50
50
51 DEFAULT_SECTIONS = [
51 DEFAULT_SECTIONS = [
52 (b'feature', _(b'New Features')),
52 (b'feature', _(b'New Features')),
53 (b'bc', _(b'Backwards Compatibility Changes')),
53 (b'bc', _(b'Backwards Compatibility Changes')),
54 (b'fix', _(b'Bug Fixes')),
54 (b'fix', _(b'Bug Fixes')),
55 (b'perf', _(b'Performance Improvements')),
55 (b'perf', _(b'Performance Improvements')),
56 (b'api', _(b'API Changes')),
56 (b'api', _(b'API Changes')),
57 ]
57 ]
58
58
59 RE_DIRECTIVE = re.compile(br'^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$')
59 RE_DIRECTIVE = re.compile(br'^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$')
60 RE_ISSUE = br'\bissue ?[0-9]{4,6}(?![0-9])\b'
60 RE_ISSUE = br'\bissue ?[0-9]{4,6}(?![0-9])\b'
61
61
62 BULLET_SECTION = _(b'Other Changes')
62 BULLET_SECTION = _(b'Other Changes')
63
63
64
64
65 class parsedreleasenotes(object):
65 class parsedreleasenotes(object):
66 def __init__(self):
66 def __init__(self):
67 self.sections = {}
67 self.sections = {}
68
68
69 def __contains__(self, section):
69 def __contains__(self, section):
70 return section in self.sections
70 return section in self.sections
71
71
72 def __iter__(self):
72 def __iter__(self):
73 return iter(sorted(self.sections))
73 return iter(sorted(self.sections))
74
74
75 def addtitleditem(self, section, title, paragraphs):
75 def addtitleditem(self, section, title, paragraphs):
76 """Add a titled release note entry."""
76 """Add a titled release note entry."""
77 self.sections.setdefault(section, ([], []))
77 self.sections.setdefault(section, ([], []))
78 self.sections[section][0].append((title, paragraphs))
78 self.sections[section][0].append((title, paragraphs))
79
79
80 def addnontitleditem(self, section, paragraphs):
80 def addnontitleditem(self, section, paragraphs):
81 """Adds a non-titled release note entry.
81 """Adds a non-titled release note entry.
82
82
83 Will be rendered as a bullet point.
83 Will be rendered as a bullet point.
84 """
84 """
85 self.sections.setdefault(section, ([], []))
85 self.sections.setdefault(section, ([], []))
86 self.sections[section][1].append(paragraphs)
86 self.sections[section][1].append(paragraphs)
87
87
88 def titledforsection(self, section):
88 def titledforsection(self, section):
89 """Returns titled entries in a section.
89 """Returns titled entries in a section.
90
90
91 Returns a list of (title, paragraphs) tuples describing sub-sections.
91 Returns a list of (title, paragraphs) tuples describing sub-sections.
92 """
92 """
93 return self.sections.get(section, ([], []))[0]
93 return self.sections.get(section, ([], []))[0]
94
94
95 def nontitledforsection(self, section):
95 def nontitledforsection(self, section):
96 """Returns non-titled, bulleted paragraphs in a section."""
96 """Returns non-titled, bulleted paragraphs in a section."""
97 return self.sections.get(section, ([], []))[1]
97 return self.sections.get(section, ([], []))[1]
98
98
99 def hastitledinsection(self, section, title):
99 def hastitledinsection(self, section, title):
100 return any(t[0] == title for t in self.titledforsection(section))
100 return any(t[0] == title for t in self.titledforsection(section))
101
101
102 def merge(self, ui, other):
102 def merge(self, ui, other):
103 """Merge another instance into this one.
103 """Merge another instance into this one.
104
104
105 This is used to combine multiple sources of release notes together.
105 This is used to combine multiple sources of release notes together.
106 """
106 """
107 if not fuzz:
107 if not fuzz:
108 ui.warn(
108 ui.warn(
109 _(
109 _(
110 b"module 'fuzzywuzzy' not found, merging of similar "
110 b"module 'fuzzywuzzy' not found, merging of similar "
111 b"releasenotes is disabled\n"
111 b"releasenotes is disabled\n"
112 )
112 )
113 )
113 )
114
114
115 for section in other:
115 for section in other:
116 existingnotes = converttitled(
116 existingnotes = converttitled(
117 self.titledforsection(section)
117 self.titledforsection(section)
118 ) + convertnontitled(self.nontitledforsection(section))
118 ) + convertnontitled(self.nontitledforsection(section))
119 for title, paragraphs in other.titledforsection(section):
119 for title, paragraphs in other.titledforsection(section):
120 if self.hastitledinsection(section, title):
120 if self.hastitledinsection(section, title):
121 # TODO prompt for resolution if different and running in
121 # TODO prompt for resolution if different and running in
122 # interactive mode.
122 # interactive mode.
123 ui.write(
123 ui.write(
124 _(b'%s already exists in %s section; ignoring\n')
124 _(b'%s already exists in %s section; ignoring\n')
125 % (title, section)
125 % (title, section)
126 )
126 )
127 continue
127 continue
128
128
129 incoming_str = converttitled([(title, paragraphs)])[0]
129 incoming_str = converttitled([(title, paragraphs)])[0]
130 if section == b'fix':
130 if section == b'fix':
131 issue = getissuenum(incoming_str)
131 issue = getissuenum(incoming_str)
132 if issue:
132 if issue:
133 if findissue(ui, existingnotes, issue):
133 if findissue(ui, existingnotes, issue):
134 continue
134 continue
135
135
136 if similar(ui, existingnotes, incoming_str):
136 if similar(ui, existingnotes, incoming_str):
137 continue
137 continue
138
138
139 self.addtitleditem(section, title, paragraphs)
139 self.addtitleditem(section, title, paragraphs)
140
140
141 for paragraphs in other.nontitledforsection(section):
141 for paragraphs in other.nontitledforsection(section):
142 if paragraphs in self.nontitledforsection(section):
142 if paragraphs in self.nontitledforsection(section):
143 continue
143 continue
144
144
145 incoming_str = convertnontitled([paragraphs])[0]
145 incoming_str = convertnontitled([paragraphs])[0]
146 if section == b'fix':
146 if section == b'fix':
147 issue = getissuenum(incoming_str)
147 issue = getissuenum(incoming_str)
148 if issue:
148 if issue:
149 if findissue(ui, existingnotes, issue):
149 if findissue(ui, existingnotes, issue):
150 continue
150 continue
151
151
152 if similar(ui, existingnotes, incoming_str):
152 if similar(ui, existingnotes, incoming_str):
153 continue
153 continue
154
154
155 self.addnontitleditem(section, paragraphs)
155 self.addnontitleditem(section, paragraphs)
156
156
157
157
158 class releasenotessections(object):
158 class releasenotessections(object):
159 def __init__(self, ui, repo=None):
159 def __init__(self, ui, repo=None):
160 if repo:
160 if repo:
161 sections = util.sortdict(DEFAULT_SECTIONS)
161 sections = util.sortdict(DEFAULT_SECTIONS)
162 custom_sections = getcustomadmonitions(repo)
162 custom_sections = getcustomadmonitions(repo)
163 if custom_sections:
163 if custom_sections:
164 sections.update(custom_sections)
164 sections.update(custom_sections)
165 self._sections = list(pycompat.iteritems(sections))
165 self._sections = list(pycompat.iteritems(sections))
166 else:
166 else:
167 self._sections = list(DEFAULT_SECTIONS)
167 self._sections = list(DEFAULT_SECTIONS)
168
168
169 def __iter__(self):
169 def __iter__(self):
170 return iter(self._sections)
170 return iter(self._sections)
171
171
172 def names(self):
172 def names(self):
173 return [t[0] for t in self._sections]
173 return [t[0] for t in self._sections]
174
174
175 def sectionfromtitle(self, title):
175 def sectionfromtitle(self, title):
176 for name, value in self._sections:
176 for name, value in self._sections:
177 if value == title:
177 if value == title:
178 return name
178 return name
179
179
180 return None
180 return None
181
181
182
182
183 def converttitled(titledparagraphs):
183 def converttitled(titledparagraphs):
184 """
184 """
185 Convert titled paragraphs to strings
185 Convert titled paragraphs to strings
186 """
186 """
187 string_list = []
187 string_list = []
188 for title, paragraphs in titledparagraphs:
188 for title, paragraphs in titledparagraphs:
189 lines = []
189 lines = []
190 for para in paragraphs:
190 for para in paragraphs:
191 lines.extend(para)
191 lines.extend(para)
192 string_list.append(b' '.join(lines))
192 string_list.append(b' '.join(lines))
193 return string_list
193 return string_list
194
194
195
195
196 def convertnontitled(nontitledparagraphs):
196 def convertnontitled(nontitledparagraphs):
197 """
197 """
198 Convert non-titled bullets to strings
198 Convert non-titled bullets to strings
199 """
199 """
200 string_list = []
200 string_list = []
201 for paragraphs in nontitledparagraphs:
201 for paragraphs in nontitledparagraphs:
202 lines = []
202 lines = []
203 for para in paragraphs:
203 for para in paragraphs:
204 lines.extend(para)
204 lines.extend(para)
205 string_list.append(b' '.join(lines))
205 string_list.append(b' '.join(lines))
206 return string_list
206 return string_list
207
207
208
208
209 def getissuenum(incoming_str):
209 def getissuenum(incoming_str):
210 """
210 """
211 Returns issue number from the incoming string if it exists
211 Returns issue number from the incoming string if it exists
212 """
212 """
213 issue = re.search(RE_ISSUE, incoming_str, re.IGNORECASE)
213 issue = re.search(RE_ISSUE, incoming_str, re.IGNORECASE)
214 if issue:
214 if issue:
215 issue = issue.group()
215 issue = issue.group()
216 return issue
216 return issue
217
217
218
218
219 def findissue(ui, existing, issue):
219 def findissue(ui, existing, issue):
220 """
220 """
221 Returns true if issue number already exists in notes.
221 Returns true if issue number already exists in notes.
222 """
222 """
223 if any(issue in s for s in existing):
223 if any(issue in s for s in existing):
224 ui.write(_(b'"%s" already exists in notes; ignoring\n') % issue)
224 ui.write(_(b'"%s" already exists in notes; ignoring\n') % issue)
225 return True
225 return True
226 else:
226 else:
227 return False
227 return False
228
228
229
229
230 def similar(ui, existing, incoming_str):
230 def similar(ui, existing, incoming_str):
231 """
231 """
232 Returns true if similar note found in existing notes.
232 Returns true if similar note found in existing notes.
233 """
233 """
234 if len(incoming_str.split()) > 10:
234 if len(incoming_str.split()) > 10:
235 merge = similaritycheck(incoming_str, existing)
235 merge = similaritycheck(incoming_str, existing)
236 if not merge:
236 if not merge:
237 ui.write(
237 ui.write(
238 _(b'"%s" already exists in notes file; ignoring\n')
238 _(b'"%s" already exists in notes file; ignoring\n')
239 % incoming_str
239 % incoming_str
240 )
240 )
241 return True
241 return True
242 else:
242 else:
243 return False
243 return False
244 else:
244 else:
245 return False
245 return False
246
246
247
247
248 def similaritycheck(incoming_str, existingnotes):
248 def similaritycheck(incoming_str, existingnotes):
249 """
249 """
250 Returns false when note fragment can be merged to existing notes.
250 Returns false when note fragment can be merged to existing notes.
251 """
251 """
252 # fuzzywuzzy not present
252 # fuzzywuzzy not present
253 if not fuzz:
253 if not fuzz:
254 return True
254 return True
255
255
256 merge = True
256 merge = True
257 for bullet in existingnotes:
257 for bullet in existingnotes:
258 score = fuzz.token_set_ratio(incoming_str, bullet)
258 score = fuzz.token_set_ratio(incoming_str, bullet)
259 if score > 75:
259 if score > 75:
260 merge = False
260 merge = False
261 break
261 break
262 return merge
262 return merge
263
263
264
264
265 def getcustomadmonitions(repo):
265 def getcustomadmonitions(repo):
266 ctx = repo[b'.']
266 ctx = repo[b'.']
267 p = config.config()
267 p = config.config()
268
268
269 def read(f, sections=None, remap=None):
269 def read(f, sections=None, remap=None):
270 if f in ctx:
270 if f in ctx:
271 data = ctx[f].data()
271 data = ctx[f].data()
272 p.parse(f, data, sections, remap, read)
272 p.parse(f, data, sections, remap, read)
273 else:
273 else:
274 raise error.Abort(
274 raise error.Abort(
275 _(b".hgreleasenotes file \'%s\' not found") % repo.pathto(f)
275 _(b".hgreleasenotes file \'%s\' not found") % repo.pathto(f)
276 )
276 )
277
277
278 if b'.hgreleasenotes' in ctx:
278 if b'.hgreleasenotes' in ctx:
279 read(b'.hgreleasenotes')
279 read(b'.hgreleasenotes')
280 return p[b'sections']
280 return p[b'sections']
281
281
282
282
283 def checkadmonitions(ui, repo, directives, revs):
283 def checkadmonitions(ui, repo, directives, revs):
284 """
284 """
285 Checks the commit messages for admonitions and their validity.
285 Checks the commit messages for admonitions and their validity.
286
286
287 .. abcd::
287 .. abcd::
288
288
289 First paragraph under this admonition
289 First paragraph under this admonition
290
290
291 For this commit message, using `hg releasenotes -r . --check`
291 For this commit message, using `hg releasenotes -r . --check`
292 returns: Invalid admonition 'abcd' present in changeset 3ea92981e103
292 returns: Invalid admonition 'abcd' present in changeset 3ea92981e103
293
293
294 As admonition 'abcd' is neither present in default nor custom admonitions
294 As admonition 'abcd' is neither present in default nor custom admonitions
295 """
295 """
296 for rev in revs:
296 for rev in revs:
297 ctx = repo[rev]
297 ctx = repo[rev]
298 admonition = re.search(RE_DIRECTIVE, ctx.description())
298 admonition = re.search(RE_DIRECTIVE, ctx.description())
299 if admonition:
299 if admonition:
300 if admonition.group(1) in directives:
300 if admonition.group(1) in directives:
301 continue
301 continue
302 else:
302 else:
303 ui.write(
303 ui.write(
304 _(b"Invalid admonition '%s' present in changeset %s\n")
304 _(b"Invalid admonition '%s' present in changeset %s\n")
305 % (admonition.group(1), ctx.hex()[:12])
305 % (admonition.group(1), ctx.hex()[:12])
306 )
306 )
307 sim = lambda x: difflib.SequenceMatcher(
307 sim = lambda x: difflib.SequenceMatcher(
308 None, admonition.group(1), x
308 None, admonition.group(1), x
309 ).ratio()
309 ).ratio()
310
310
311 similar = [s for s in directives if sim(s) > 0.6]
311 similar = [s for s in directives if sim(s) > 0.6]
312 if len(similar) == 1:
312 if len(similar) == 1:
313 ui.write(_(b"(did you mean %s?)\n") % similar[0])
313 ui.write(_(b"(did you mean %s?)\n") % similar[0])
314 elif similar:
314 elif similar:
315 ss = b", ".join(sorted(similar))
315 ss = b", ".join(sorted(similar))
316 ui.write(_(b"(did you mean one of %s?)\n") % ss)
316 ui.write(_(b"(did you mean one of %s?)\n") % ss)
317
317
318
318
319 def _getadmonitionlist(ui, sections):
319 def _getadmonitionlist(ui, sections):
320 for section in sections:
320 for section in sections:
321 ui.write(b"%s: %s\n" % (section[0], section[1]))
321 ui.write(b"%s: %s\n" % (section[0], section[1]))
322
322
323
323
324 def parsenotesfromrevisions(repo, directives, revs):
324 def parsenotesfromrevisions(repo, directives, revs):
325 notes = parsedreleasenotes()
325 notes = parsedreleasenotes()
326
326
327 for rev in revs:
327 for rev in revs:
328 ctx = repo[rev]
328 ctx = repo[rev]
329
329
330 blocks, pruned = minirst.parse(
330 blocks, pruned = minirst.parse(
331 ctx.description(), admonitions=directives
331 ctx.description(), admonitions=directives
332 )
332 )
333
333
334 for i, block in enumerate(blocks):
334 for i, block in enumerate(blocks):
335 if block[b'type'] != b'admonition':
335 if block[b'type'] != b'admonition':
336 continue
336 continue
337
337
338 directive = block[b'admonitiontitle']
338 directive = block[b'admonitiontitle']
339 title = block[b'lines'][0].strip() if block[b'lines'] else None
339 title = block[b'lines'][0].strip() if block[b'lines'] else None
340
340
341 if i + 1 == len(blocks):
341 if i + 1 == len(blocks):
342 raise error.Abort(
342 raise error.Abort(
343 _(
343 _(
344 b'changeset %s: release notes directive %s '
344 b'changeset %s: release notes directive %s '
345 b'lacks content'
345 b'lacks content'
346 )
346 )
347 % (ctx, directive)
347 % (ctx, directive)
348 )
348 )
349
349
350 # Now search ahead and find all paragraphs attached to this
350 # Now search ahead and find all paragraphs attached to this
351 # admonition.
351 # admonition.
352 paragraphs = []
352 paragraphs = []
353 for j in range(i + 1, len(blocks)):
353 for j in range(i + 1, len(blocks)):
354 pblock = blocks[j]
354 pblock = blocks[j]
355
355
356 # Margin blocks may appear between paragraphs. Ignore them.
356 # Margin blocks may appear between paragraphs. Ignore them.
357 if pblock[b'type'] == b'margin':
357 if pblock[b'type'] == b'margin':
358 continue
358 continue
359
359
360 if pblock[b'type'] == b'admonition':
360 if pblock[b'type'] == b'admonition':
361 break
361 break
362
362
363 if pblock[b'type'] != b'paragraph':
363 if pblock[b'type'] != b'paragraph':
364 repo.ui.warn(
364 repo.ui.warn(
365 _(
365 _(
366 b'changeset %s: unexpected block in release '
366 b'changeset %s: unexpected block in release '
367 b'notes directive %s\n'
367 b'notes directive %s\n'
368 )
368 )
369 % (ctx, directive)
369 % (ctx, directive)
370 )
370 )
371
371
372 if pblock[b'indent'] > 0:
372 if pblock[b'indent'] > 0:
373 paragraphs.append(pblock[b'lines'])
373 paragraphs.append(pblock[b'lines'])
374 else:
374 else:
375 break
375 break
376
376
377 # TODO consider using title as paragraph for more concise notes.
377 # TODO consider using title as paragraph for more concise notes.
378 if not paragraphs:
378 if not paragraphs:
379 repo.ui.warn(
379 repo.ui.warn(
380 _(b"error parsing releasenotes for revision: '%s'\n")
380 _(b"error parsing releasenotes for revision: '%s'\n")
381 % node.hex(ctx.node())
381 % node.hex(ctx.node())
382 )
382 )
383 if title:
383 if title:
384 notes.addtitleditem(directive, title, paragraphs)
384 notes.addtitleditem(directive, title, paragraphs)
385 else:
385 else:
386 notes.addnontitleditem(directive, paragraphs)
386 notes.addnontitleditem(directive, paragraphs)
387
387
388 return notes
388 return notes
389
389
390
390
391 def parsereleasenotesfile(sections, text):
391 def parsereleasenotesfile(sections, text):
392 """Parse text content containing generated release notes."""
392 """Parse text content containing generated release notes."""
393 notes = parsedreleasenotes()
393 notes = parsedreleasenotes()
394
394
395 blocks = minirst.parse(text)[0]
395 blocks = minirst.parse(text)[0]
396
396
397 def gatherparagraphsbullets(offset, title=False):
397 def gatherparagraphsbullets(offset, title=False):
398 notefragment = []
398 notefragment = []
399
399
400 for i in range(offset + 1, len(blocks)):
400 for i in range(offset + 1, len(blocks)):
401 block = blocks[i]
401 block = blocks[i]
402
402
403 if block[b'type'] == b'margin':
403 if block[b'type'] == b'margin':
404 continue
404 continue
405 elif block[b'type'] == b'section':
405 elif block[b'type'] == b'section':
406 break
406 break
407 elif block[b'type'] == b'bullet':
407 elif block[b'type'] == b'bullet':
408 if block[b'indent'] != 0:
408 if block[b'indent'] != 0:
409 raise error.Abort(_(b'indented bullet lists not supported'))
409 raise error.Abort(_(b'indented bullet lists not supported'))
410 if title:
410 if title:
411 lines = [l[1:].strip() for l in block[b'lines']]
411 lines = [l[1:].strip() for l in block[b'lines']]
412 notefragment.append(lines)
412 notefragment.append(lines)
413 continue
413 continue
414 else:
414 else:
415 lines = [[l[1:].strip() for l in block[b'lines']]]
415 lines = [[l[1:].strip() for l in block[b'lines']]]
416
416
417 for block in blocks[i + 1 :]:
417 for block in blocks[i + 1 :]:
418 if block[b'type'] in (b'bullet', b'section'):
418 if block[b'type'] in (b'bullet', b'section'):
419 break
419 break
420 if block[b'type'] == b'paragraph':
420 if block[b'type'] == b'paragraph':
421 lines.append(block[b'lines'])
421 lines.append(block[b'lines'])
422 notefragment.append(lines)
422 notefragment.append(lines)
423 continue
423 continue
424 elif block[b'type'] != b'paragraph':
424 elif block[b'type'] != b'paragraph':
425 raise error.Abort(
425 raise error.Abort(
426 _(b'unexpected block type in release notes: %s')
426 _(b'unexpected block type in release notes: %s')
427 % block[b'type']
427 % block[b'type']
428 )
428 )
429 if title:
429 if title:
430 notefragment.append(block[b'lines'])
430 notefragment.append(block[b'lines'])
431
431
432 return notefragment
432 return notefragment
433
433
434 currentsection = None
434 currentsection = None
435 for i, block in enumerate(blocks):
435 for i, block in enumerate(blocks):
436 if block[b'type'] != b'section':
436 if block[b'type'] != b'section':
437 continue
437 continue
438
438
439 title = block[b'lines'][0]
439 title = block[b'lines'][0]
440
440
441 # TODO the parsing around paragraphs and bullet points needs some
441 # TODO the parsing around paragraphs and bullet points needs some
442 # work.
442 # work.
443 if block[b'underline'] == b'=': # main section
443 if block[b'underline'] == b'=': # main section
444 name = sections.sectionfromtitle(title)
444 name = sections.sectionfromtitle(title)
445 if not name:
445 if not name:
446 raise error.Abort(
446 raise error.Abort(
447 _(b'unknown release notes section: %s') % title
447 _(b'unknown release notes section: %s') % title
448 )
448 )
449
449
450 currentsection = name
450 currentsection = name
451 bullet_points = gatherparagraphsbullets(i)
451 bullet_points = gatherparagraphsbullets(i)
452 if bullet_points:
452 if bullet_points:
453 for para in bullet_points:
453 for para in bullet_points:
454 notes.addnontitleditem(currentsection, para)
454 notes.addnontitleditem(currentsection, para)
455
455
456 elif block[b'underline'] == b'-': # sub-section
456 elif block[b'underline'] == b'-': # sub-section
457 if title == BULLET_SECTION:
457 if title == BULLET_SECTION:
458 bullet_points = gatherparagraphsbullets(i)
458 bullet_points = gatherparagraphsbullets(i)
459 for para in bullet_points:
459 for para in bullet_points:
460 notes.addnontitleditem(currentsection, para)
460 notes.addnontitleditem(currentsection, para)
461 else:
461 else:
462 paragraphs = gatherparagraphsbullets(i, True)
462 paragraphs = gatherparagraphsbullets(i, True)
463 notes.addtitleditem(currentsection, title, paragraphs)
463 notes.addtitleditem(currentsection, title, paragraphs)
464 else:
464 else:
465 raise error.Abort(_(b'unsupported section type for %s') % title)
465 raise error.Abort(_(b'unsupported section type for %s') % title)
466
466
467 return notes
467 return notes
468
468
469
469
470 def serializenotes(sections, notes):
470 def serializenotes(sections, notes):
471 """Serialize release notes from parsed fragments and notes.
471 """Serialize release notes from parsed fragments and notes.
472
472
473 This function essentially takes the output of ``parsenotesfromrevisions()``
473 This function essentially takes the output of ``parsenotesfromrevisions()``
474 and ``parserelnotesfile()`` and produces output combining the 2.
474 and ``parserelnotesfile()`` and produces output combining the 2.
475 """
475 """
476 lines = []
476 lines = []
477
477
478 for sectionname, sectiontitle in sections:
478 for sectionname, sectiontitle in sections:
479 if sectionname not in notes:
479 if sectionname not in notes:
480 continue
480 continue
481
481
482 lines.append(sectiontitle)
482 lines.append(sectiontitle)
483 lines.append(b'=' * len(sectiontitle))
483 lines.append(b'=' * len(sectiontitle))
484 lines.append(b'')
484 lines.append(b'')
485
485
486 # First pass to emit sub-sections.
486 # First pass to emit sub-sections.
487 for title, paragraphs in notes.titledforsection(sectionname):
487 for title, paragraphs in notes.titledforsection(sectionname):
488 lines.append(title)
488 lines.append(title)
489 lines.append(b'-' * len(title))
489 lines.append(b'-' * len(title))
490 lines.append(b'')
490 lines.append(b'')
491
491
492 for i, para in enumerate(paragraphs):
492 for i, para in enumerate(paragraphs):
493 if i:
493 if i:
494 lines.append(b'')
494 lines.append(b'')
495 lines.extend(
495 lines.extend(
496 stringutil.wrap(b' '.join(para), width=78).splitlines()
496 stringutil.wrap(b' '.join(para), width=78).splitlines()
497 )
497 )
498
498
499 lines.append(b'')
499 lines.append(b'')
500
500
501 # Second pass to emit bullet list items.
501 # Second pass to emit bullet list items.
502
502
503 # If the section has titled and non-titled items, we can't
503 # If the section has titled and non-titled items, we can't
504 # simply emit the bullet list because it would appear to come
504 # simply emit the bullet list because it would appear to come
505 # from the last title/section. So, we emit a new sub-section
505 # from the last title/section. So, we emit a new sub-section
506 # for the non-titled items.
506 # for the non-titled items.
507 nontitled = notes.nontitledforsection(sectionname)
507 nontitled = notes.nontitledforsection(sectionname)
508 if notes.titledforsection(sectionname) and nontitled:
508 if notes.titledforsection(sectionname) and nontitled:
509 # TODO make configurable.
509 # TODO make configurable.
510 lines.append(BULLET_SECTION)
510 lines.append(BULLET_SECTION)
511 lines.append(b'-' * len(BULLET_SECTION))
511 lines.append(b'-' * len(BULLET_SECTION))
512 lines.append(b'')
512 lines.append(b'')
513
513
514 for paragraphs in nontitled:
514 for paragraphs in nontitled:
515 lines.extend(
515 lines.extend(
516 stringutil.wrap(
516 stringutil.wrap(
517 b' '.join(paragraphs[0]),
517 b' '.join(paragraphs[0]),
518 width=78,
518 width=78,
519 initindent=b'* ',
519 initindent=b'* ',
520 hangindent=b' ',
520 hangindent=b' ',
521 ).splitlines()
521 ).splitlines()
522 )
522 )
523
523
524 for para in paragraphs[1:]:
524 for para in paragraphs[1:]:
525 lines.append(b'')
525 lines.append(b'')
526 lines.extend(
526 lines.extend(
527 stringutil.wrap(
527 stringutil.wrap(
528 b' '.join(para),
528 b' '.join(para),
529 width=78,
529 width=78,
530 initindent=b' ',
530 initindent=b' ',
531 hangindent=b' ',
531 hangindent=b' ',
532 ).splitlines()
532 ).splitlines()
533 )
533 )
534
534
535 lines.append(b'')
535 lines.append(b'')
536
536
537 if lines and lines[-1]:
537 if lines and lines[-1]:
538 lines.append(b'')
538 lines.append(b'')
539
539
540 return b'\n'.join(lines)
540 return b'\n'.join(lines)
541
541
542
542
543 @command(
543 @command(
544 b'releasenotes',
544 b'releasenotes',
545 [
545 [
546 (
546 (
547 b'r',
547 b'r',
548 b'rev',
548 b'rev',
549 b'',
549 b'',
550 _(b'revisions to process for release notes'),
550 _(b'revisions to process for release notes'),
551 _(b'REV'),
551 _(b'REV'),
552 ),
552 ),
553 (
553 (
554 b'c',
554 b'c',
555 b'check',
555 b'check',
556 False,
556 False,
557 _(b'checks for validity of admonitions (if any)'),
557 _(b'checks for validity of admonitions (if any)'),
558 _(b'REV'),
558 _(b'REV'),
559 ),
559 ),
560 (
560 (
561 b'l',
561 b'l',
562 b'list',
562 b'list',
563 False,
563 False,
564 _(b'list the available admonitions with their title'),
564 _(b'list the available admonitions with their title'),
565 None,
565 None,
566 ),
566 ),
567 ],
567 ],
568 _(b'hg releasenotes [-r REV] [-c] FILE'),
568 _(b'hg releasenotes [-r REV] [-c] FILE'),
569 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
569 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
570 )
570 )
571 def releasenotes(ui, repo, file_=None, **opts):
571 def releasenotes(ui, repo, file_=None, **opts):
572 """parse release notes from commit messages into an output file
572 """parse release notes from commit messages into an output file
573
573
574 Given an output file and set of revisions, this command will parse commit
574 Given an output file and set of revisions, this command will parse commit
575 messages for release notes then add them to the output file.
575 messages for release notes then add them to the output file.
576
576
577 Release notes are defined in commit messages as ReStructuredText
577 Release notes are defined in commit messages as ReStructuredText
578 directives. These have the form::
578 directives. These have the form::
579
579
580 .. directive:: title
580 .. directive:: title
581
581
582 content
582 content
583
583
584 Each ``directive`` maps to an output section in a generated release notes
584 Each ``directive`` maps to an output section in a generated release notes
585 file, which itself is ReStructuredText. For example, the ``.. feature::``
585 file, which itself is ReStructuredText. For example, the ``.. feature::``
586 directive would map to a ``New Features`` section.
586 directive would map to a ``New Features`` section.
587
587
588 Release note directives can be either short-form or long-form. In short-
588 Release note directives can be either short-form or long-form. In short-
589 form, ``title`` is omitted and the release note is rendered as a bullet
589 form, ``title`` is omitted and the release note is rendered as a bullet
590 list. In long form, a sub-section with the title ``title`` is added to the
590 list. In long form, a sub-section with the title ``title`` is added to the
591 section.
591 section.
592
592
593 The ``FILE`` argument controls the output file to write gathered release
593 The ``FILE`` argument controls the output file to write gathered release
594 notes to. The format of the file is::
594 notes to. The format of the file is::
595
595
596 Section 1
596 Section 1
597 =========
597 =========
598
598
599 ...
599 ...
600
600
601 Section 2
601 Section 2
602 =========
602 =========
603
603
604 ...
604 ...
605
605
606 Only sections with defined release notes are emitted.
606 Only sections with defined release notes are emitted.
607
607
608 If a section only has short-form notes, it will consist of bullet list::
608 If a section only has short-form notes, it will consist of bullet list::
609
609
610 Section
610 Section
611 =======
611 =======
612
612
613 * Release note 1
613 * Release note 1
614 * Release note 2
614 * Release note 2
615
615
616 If a section has long-form notes, sub-sections will be emitted::
616 If a section has long-form notes, sub-sections will be emitted::
617
617
618 Section
618 Section
619 =======
619 =======
620
620
621 Note 1 Title
621 Note 1 Title
622 ------------
622 ------------
623
623
624 Description of the first long-form note.
624 Description of the first long-form note.
625
625
626 Note 2 Title
626 Note 2 Title
627 ------------
627 ------------
628
628
629 Description of the second long-form note.
629 Description of the second long-form note.
630
630
631 If the ``FILE`` argument points to an existing file, that file will be
631 If the ``FILE`` argument points to an existing file, that file will be
632 parsed for release notes having the format that would be generated by this
632 parsed for release notes having the format that would be generated by this
633 command. The notes from the processed commit messages will be *merged*
633 command. The notes from the processed commit messages will be *merged*
634 into this parsed set.
634 into this parsed set.
635
635
636 During release notes merging:
636 During release notes merging:
637
637
638 * Duplicate items are automatically ignored
638 * Duplicate items are automatically ignored
639 * Items that are different are automatically ignored if the similarity is
639 * Items that are different are automatically ignored if the similarity is
640 greater than a threshold.
640 greater than a threshold.
641
641
642 This means that the release notes file can be updated independently from
642 This means that the release notes file can be updated independently from
643 this command and changes should not be lost when running this command on
643 this command and changes should not be lost when running this command on
644 that file. A particular use case for this is to tweak the wording of a
644 that file. A particular use case for this is to tweak the wording of a
645 release note after it has been added to the release notes file.
645 release note after it has been added to the release notes file.
646
646
647 The -c/--check option checks the commit message for invalid admonitions.
647 The -c/--check option checks the commit message for invalid admonitions.
648
648
649 The -l/--list option, presents the user with a list of existing available
649 The -l/--list option, presents the user with a list of existing available
650 admonitions along with their title. This also includes the custom
650 admonitions along with their title. This also includes the custom
651 admonitions (if any).
651 admonitions (if any).
652 """
652 """
653
653
654 opts = pycompat.byteskwargs(opts)
654 opts = pycompat.byteskwargs(opts)
655 sections = releasenotessections(ui, repo)
655 sections = releasenotessections(ui, repo)
656
656
657 cmdutil.check_incompatible_arguments(opts, b'list', b'rev', b'check')
657 cmdutil.check_incompatible_arguments(opts, b'list', [b'rev', b'check'])
658
658
659 if opts.get(b'list'):
659 if opts.get(b'list'):
660 return _getadmonitionlist(ui, sections)
660 return _getadmonitionlist(ui, sections)
661
661
662 rev = opts.get(b'rev')
662 rev = opts.get(b'rev')
663 revs = scmutil.revrange(repo, [rev or b'not public()'])
663 revs = scmutil.revrange(repo, [rev or b'not public()'])
664 if opts.get(b'check'):
664 if opts.get(b'check'):
665 return checkadmonitions(ui, repo, sections.names(), revs)
665 return checkadmonitions(ui, repo, sections.names(), revs)
666
666
667 incoming = parsenotesfromrevisions(repo, sections.names(), revs)
667 incoming = parsenotesfromrevisions(repo, sections.names(), revs)
668
668
669 if file_ is None:
669 if file_ is None:
670 ui.pager(b'releasenotes')
670 ui.pager(b'releasenotes')
671 return ui.write(serializenotes(sections, incoming))
671 return ui.write(serializenotes(sections, incoming))
672
672
673 try:
673 try:
674 with open(file_, b'rb') as fh:
674 with open(file_, b'rb') as fh:
675 notes = parsereleasenotesfile(sections, fh.read())
675 notes = parsereleasenotesfile(sections, fh.read())
676 except IOError as e:
676 except IOError as e:
677 if e.errno != errno.ENOENT:
677 if e.errno != errno.ENOENT:
678 raise
678 raise
679
679
680 notes = parsedreleasenotes()
680 notes = parsedreleasenotes()
681
681
682 notes.merge(ui, incoming)
682 notes.merge(ui, incoming)
683
683
684 with open(file_, b'wb') as fh:
684 with open(file_, b'wb') as fh:
685 fh.write(serializenotes(sections, notes))
685 fh.write(serializenotes(sections, notes))
686
686
687
687
688 @command(b'debugparsereleasenotes', norepo=True)
688 @command(b'debugparsereleasenotes', norepo=True)
689 def debugparsereleasenotes(ui, path, repo=None):
689 def debugparsereleasenotes(ui, path, repo=None):
690 """parse release notes and print resulting data structure"""
690 """parse release notes and print resulting data structure"""
691 if path == b'-':
691 if path == b'-':
692 text = pycompat.stdin.read()
692 text = pycompat.stdin.read()
693 else:
693 else:
694 with open(path, b'rb') as fh:
694 with open(path, b'rb') as fh:
695 text = fh.read()
695 text = fh.read()
696
696
697 sections = releasenotessections(ui, repo)
697 sections = releasenotessections(ui, repo)
698
698
699 notes = parsereleasenotesfile(sections, text)
699 notes = parsereleasenotesfile(sections, text)
700
700
701 for section in notes:
701 for section in notes:
702 ui.write(_(b'section: %s\n') % section)
702 ui.write(_(b'section: %s\n') % section)
703 for title, paragraphs in notes.titledforsection(section):
703 for title, paragraphs in notes.titledforsection(section):
704 ui.write(_(b' subsection: %s\n') % title)
704 ui.write(_(b' subsection: %s\n') % title)
705 for para in paragraphs:
705 for para in paragraphs:
706 ui.write(_(b' paragraph: %s\n') % b' '.join(para))
706 ui.write(_(b' paragraph: %s\n') % b' '.join(para))
707
707
708 for paragraphs in notes.nontitledforsection(section):
708 for paragraphs in notes.nontitledforsection(section):
709 ui.write(_(b' bullet point:\n'))
709 ui.write(_(b' bullet point:\n'))
710 for para in paragraphs:
710 for para in paragraphs:
711 ui.write(_(b' paragraph: %s\n') % b' '.join(para))
711 ui.write(_(b' paragraph: %s\n') % b' '.join(para))
@@ -1,929 +1,929 b''
1 # Patch transplanting extension for Mercurial
1 # Patch transplanting extension for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to transplant changesets from another branch
8 '''command to transplant changesets from another branch
9
9
10 This extension allows you to transplant changes to another parent revision,
10 This extension allows you to transplant changes to another parent revision,
11 possibly in another repository. The transplant is done using 'diff' patches.
11 possibly in another repository. The transplant is done using 'diff' patches.
12
12
13 Transplanted patches are recorded in .hg/transplant/transplants, as a
13 Transplanted patches are recorded in .hg/transplant/transplants, as a
14 map from a changeset hash to its hash in the source repository.
14 map from a changeset hash to its hash in the source repository.
15 '''
15 '''
16 from __future__ import absolute_import
16 from __future__ import absolute_import
17
17
18 import os
18 import os
19
19
20 from mercurial.i18n import _
20 from mercurial.i18n import _
21 from mercurial.pycompat import open
21 from mercurial.pycompat import open
22 from mercurial import (
22 from mercurial import (
23 bundlerepo,
23 bundlerepo,
24 cmdutil,
24 cmdutil,
25 error,
25 error,
26 exchange,
26 exchange,
27 hg,
27 hg,
28 logcmdutil,
28 logcmdutil,
29 match,
29 match,
30 merge,
30 merge,
31 node as nodemod,
31 node as nodemod,
32 patch,
32 patch,
33 pycompat,
33 pycompat,
34 registrar,
34 registrar,
35 revlog,
35 revlog,
36 revset,
36 revset,
37 scmutil,
37 scmutil,
38 smartset,
38 smartset,
39 state as statemod,
39 state as statemod,
40 util,
40 util,
41 vfs as vfsmod,
41 vfs as vfsmod,
42 )
42 )
43 from mercurial.utils import (
43 from mercurial.utils import (
44 procutil,
44 procutil,
45 stringutil,
45 stringutil,
46 )
46 )
47
47
48
48
49 class TransplantError(error.Abort):
49 class TransplantError(error.Abort):
50 pass
50 pass
51
51
52
52
53 cmdtable = {}
53 cmdtable = {}
54 command = registrar.command(cmdtable)
54 command = registrar.command(cmdtable)
55 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
55 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
56 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
56 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
57 # be specifying the version(s) of Mercurial they are tested with, or
57 # be specifying the version(s) of Mercurial they are tested with, or
58 # leave the attribute unspecified.
58 # leave the attribute unspecified.
59 testedwith = b'ships-with-hg-core'
59 testedwith = b'ships-with-hg-core'
60
60
61 configtable = {}
61 configtable = {}
62 configitem = registrar.configitem(configtable)
62 configitem = registrar.configitem(configtable)
63
63
64 configitem(
64 configitem(
65 b'transplant', b'filter', default=None,
65 b'transplant', b'filter', default=None,
66 )
66 )
67 configitem(
67 configitem(
68 b'transplant', b'log', default=None,
68 b'transplant', b'log', default=None,
69 )
69 )
70
70
71
71
72 class transplantentry(object):
72 class transplantentry(object):
73 def __init__(self, lnode, rnode):
73 def __init__(self, lnode, rnode):
74 self.lnode = lnode
74 self.lnode = lnode
75 self.rnode = rnode
75 self.rnode = rnode
76
76
77
77
78 class transplants(object):
78 class transplants(object):
79 def __init__(self, path=None, transplantfile=None, opener=None):
79 def __init__(self, path=None, transplantfile=None, opener=None):
80 self.path = path
80 self.path = path
81 self.transplantfile = transplantfile
81 self.transplantfile = transplantfile
82 self.opener = opener
82 self.opener = opener
83
83
84 if not opener:
84 if not opener:
85 self.opener = vfsmod.vfs(self.path)
85 self.opener = vfsmod.vfs(self.path)
86 self.transplants = {}
86 self.transplants = {}
87 self.dirty = False
87 self.dirty = False
88 self.read()
88 self.read()
89
89
90 def read(self):
90 def read(self):
91 abspath = os.path.join(self.path, self.transplantfile)
91 abspath = os.path.join(self.path, self.transplantfile)
92 if self.transplantfile and os.path.exists(abspath):
92 if self.transplantfile and os.path.exists(abspath):
93 for line in self.opener.read(self.transplantfile).splitlines():
93 for line in self.opener.read(self.transplantfile).splitlines():
94 lnode, rnode = map(revlog.bin, line.split(b':'))
94 lnode, rnode = map(revlog.bin, line.split(b':'))
95 list = self.transplants.setdefault(rnode, [])
95 list = self.transplants.setdefault(rnode, [])
96 list.append(transplantentry(lnode, rnode))
96 list.append(transplantentry(lnode, rnode))
97
97
98 def write(self):
98 def write(self):
99 if self.dirty and self.transplantfile:
99 if self.dirty and self.transplantfile:
100 if not os.path.isdir(self.path):
100 if not os.path.isdir(self.path):
101 os.mkdir(self.path)
101 os.mkdir(self.path)
102 fp = self.opener(self.transplantfile, b'w')
102 fp = self.opener(self.transplantfile, b'w')
103 for list in pycompat.itervalues(self.transplants):
103 for list in pycompat.itervalues(self.transplants):
104 for t in list:
104 for t in list:
105 l, r = map(nodemod.hex, (t.lnode, t.rnode))
105 l, r = map(nodemod.hex, (t.lnode, t.rnode))
106 fp.write(l + b':' + r + b'\n')
106 fp.write(l + b':' + r + b'\n')
107 fp.close()
107 fp.close()
108 self.dirty = False
108 self.dirty = False
109
109
110 def get(self, rnode):
110 def get(self, rnode):
111 return self.transplants.get(rnode) or []
111 return self.transplants.get(rnode) or []
112
112
113 def set(self, lnode, rnode):
113 def set(self, lnode, rnode):
114 list = self.transplants.setdefault(rnode, [])
114 list = self.transplants.setdefault(rnode, [])
115 list.append(transplantentry(lnode, rnode))
115 list.append(transplantentry(lnode, rnode))
116 self.dirty = True
116 self.dirty = True
117
117
118 def remove(self, transplant):
118 def remove(self, transplant):
119 list = self.transplants.get(transplant.rnode)
119 list = self.transplants.get(transplant.rnode)
120 if list:
120 if list:
121 del list[list.index(transplant)]
121 del list[list.index(transplant)]
122 self.dirty = True
122 self.dirty = True
123
123
124
124
125 class transplanter(object):
125 class transplanter(object):
126 def __init__(self, ui, repo, opts):
126 def __init__(self, ui, repo, opts):
127 self.ui = ui
127 self.ui = ui
128 self.path = repo.vfs.join(b'transplant')
128 self.path = repo.vfs.join(b'transplant')
129 self.opener = vfsmod.vfs(self.path)
129 self.opener = vfsmod.vfs(self.path)
130 self.transplants = transplants(
130 self.transplants = transplants(
131 self.path, b'transplants', opener=self.opener
131 self.path, b'transplants', opener=self.opener
132 )
132 )
133
133
134 def getcommiteditor():
134 def getcommiteditor():
135 editform = cmdutil.mergeeditform(repo[None], b'transplant')
135 editform = cmdutil.mergeeditform(repo[None], b'transplant')
136 return cmdutil.getcommiteditor(
136 return cmdutil.getcommiteditor(
137 editform=editform, **pycompat.strkwargs(opts)
137 editform=editform, **pycompat.strkwargs(opts)
138 )
138 )
139
139
140 self.getcommiteditor = getcommiteditor
140 self.getcommiteditor = getcommiteditor
141
141
142 def applied(self, repo, node, parent):
142 def applied(self, repo, node, parent):
143 '''returns True if a node is already an ancestor of parent
143 '''returns True if a node is already an ancestor of parent
144 or is parent or has already been transplanted'''
144 or is parent or has already been transplanted'''
145 if hasnode(repo, parent):
145 if hasnode(repo, parent):
146 parentrev = repo.changelog.rev(parent)
146 parentrev = repo.changelog.rev(parent)
147 if hasnode(repo, node):
147 if hasnode(repo, node):
148 rev = repo.changelog.rev(node)
148 rev = repo.changelog.rev(node)
149 reachable = repo.changelog.ancestors(
149 reachable = repo.changelog.ancestors(
150 [parentrev], rev, inclusive=True
150 [parentrev], rev, inclusive=True
151 )
151 )
152 if rev in reachable:
152 if rev in reachable:
153 return True
153 return True
154 for t in self.transplants.get(node):
154 for t in self.transplants.get(node):
155 # it might have been stripped
155 # it might have been stripped
156 if not hasnode(repo, t.lnode):
156 if not hasnode(repo, t.lnode):
157 self.transplants.remove(t)
157 self.transplants.remove(t)
158 return False
158 return False
159 lnoderev = repo.changelog.rev(t.lnode)
159 lnoderev = repo.changelog.rev(t.lnode)
160 if lnoderev in repo.changelog.ancestors(
160 if lnoderev in repo.changelog.ancestors(
161 [parentrev], lnoderev, inclusive=True
161 [parentrev], lnoderev, inclusive=True
162 ):
162 ):
163 return True
163 return True
164 return False
164 return False
165
165
166 def apply(self, repo, source, revmap, merges, opts=None):
166 def apply(self, repo, source, revmap, merges, opts=None):
167 '''apply the revisions in revmap one by one in revision order'''
167 '''apply the revisions in revmap one by one in revision order'''
168 if opts is None:
168 if opts is None:
169 opts = {}
169 opts = {}
170 revs = sorted(revmap)
170 revs = sorted(revmap)
171 p1 = repo.dirstate.p1()
171 p1 = repo.dirstate.p1()
172 pulls = []
172 pulls = []
173 diffopts = patch.difffeatureopts(self.ui, opts)
173 diffopts = patch.difffeatureopts(self.ui, opts)
174 diffopts.git = True
174 diffopts.git = True
175
175
176 lock = tr = None
176 lock = tr = None
177 try:
177 try:
178 lock = repo.lock()
178 lock = repo.lock()
179 tr = repo.transaction(b'transplant')
179 tr = repo.transaction(b'transplant')
180 for rev in revs:
180 for rev in revs:
181 node = revmap[rev]
181 node = revmap[rev]
182 revstr = b'%d:%s' % (rev, nodemod.short(node))
182 revstr = b'%d:%s' % (rev, nodemod.short(node))
183
183
184 if self.applied(repo, node, p1):
184 if self.applied(repo, node, p1):
185 self.ui.warn(
185 self.ui.warn(
186 _(b'skipping already applied revision %s\n') % revstr
186 _(b'skipping already applied revision %s\n') % revstr
187 )
187 )
188 continue
188 continue
189
189
190 parents = source.changelog.parents(node)
190 parents = source.changelog.parents(node)
191 if not (opts.get(b'filter') or opts.get(b'log')):
191 if not (opts.get(b'filter') or opts.get(b'log')):
192 # If the changeset parent is the same as the
192 # If the changeset parent is the same as the
193 # wdir's parent, just pull it.
193 # wdir's parent, just pull it.
194 if parents[0] == p1:
194 if parents[0] == p1:
195 pulls.append(node)
195 pulls.append(node)
196 p1 = node
196 p1 = node
197 continue
197 continue
198 if pulls:
198 if pulls:
199 if source != repo:
199 if source != repo:
200 exchange.pull(repo, source.peer(), heads=pulls)
200 exchange.pull(repo, source.peer(), heads=pulls)
201 merge.update(
201 merge.update(
202 repo, pulls[-1], branchmerge=False, force=False
202 repo, pulls[-1], branchmerge=False, force=False
203 )
203 )
204 p1 = repo.dirstate.p1()
204 p1 = repo.dirstate.p1()
205 pulls = []
205 pulls = []
206
206
207 domerge = False
207 domerge = False
208 if node in merges:
208 if node in merges:
209 # pulling all the merge revs at once would mean we
209 # pulling all the merge revs at once would mean we
210 # couldn't transplant after the latest even if
210 # couldn't transplant after the latest even if
211 # transplants before them fail.
211 # transplants before them fail.
212 domerge = True
212 domerge = True
213 if not hasnode(repo, node):
213 if not hasnode(repo, node):
214 exchange.pull(repo, source.peer(), heads=[node])
214 exchange.pull(repo, source.peer(), heads=[node])
215
215
216 skipmerge = False
216 skipmerge = False
217 if parents[1] != revlog.nullid:
217 if parents[1] != revlog.nullid:
218 if not opts.get(b'parent'):
218 if not opts.get(b'parent'):
219 self.ui.note(
219 self.ui.note(
220 _(b'skipping merge changeset %d:%s\n')
220 _(b'skipping merge changeset %d:%s\n')
221 % (rev, nodemod.short(node))
221 % (rev, nodemod.short(node))
222 )
222 )
223 skipmerge = True
223 skipmerge = True
224 else:
224 else:
225 parent = source.lookup(opts[b'parent'])
225 parent = source.lookup(opts[b'parent'])
226 if parent not in parents:
226 if parent not in parents:
227 raise error.Abort(
227 raise error.Abort(
228 _(b'%s is not a parent of %s')
228 _(b'%s is not a parent of %s')
229 % (nodemod.short(parent), nodemod.short(node))
229 % (nodemod.short(parent), nodemod.short(node))
230 )
230 )
231 else:
231 else:
232 parent = parents[0]
232 parent = parents[0]
233
233
234 if skipmerge:
234 if skipmerge:
235 patchfile = None
235 patchfile = None
236 else:
236 else:
237 fd, patchfile = pycompat.mkstemp(prefix=b'hg-transplant-')
237 fd, patchfile = pycompat.mkstemp(prefix=b'hg-transplant-')
238 fp = os.fdopen(fd, 'wb')
238 fp = os.fdopen(fd, 'wb')
239 gen = patch.diff(source, parent, node, opts=diffopts)
239 gen = patch.diff(source, parent, node, opts=diffopts)
240 for chunk in gen:
240 for chunk in gen:
241 fp.write(chunk)
241 fp.write(chunk)
242 fp.close()
242 fp.close()
243
243
244 del revmap[rev]
244 del revmap[rev]
245 if patchfile or domerge:
245 if patchfile or domerge:
246 try:
246 try:
247 try:
247 try:
248 n = self.applyone(
248 n = self.applyone(
249 repo,
249 repo,
250 node,
250 node,
251 source.changelog.read(node),
251 source.changelog.read(node),
252 patchfile,
252 patchfile,
253 merge=domerge,
253 merge=domerge,
254 log=opts.get(b'log'),
254 log=opts.get(b'log'),
255 filter=opts.get(b'filter'),
255 filter=opts.get(b'filter'),
256 )
256 )
257 except TransplantError:
257 except TransplantError:
258 # Do not rollback, it is up to the user to
258 # Do not rollback, it is up to the user to
259 # fix the merge or cancel everything
259 # fix the merge or cancel everything
260 tr.close()
260 tr.close()
261 raise
261 raise
262 if n and domerge:
262 if n and domerge:
263 self.ui.status(
263 self.ui.status(
264 _(b'%s merged at %s\n')
264 _(b'%s merged at %s\n')
265 % (revstr, nodemod.short(n))
265 % (revstr, nodemod.short(n))
266 )
266 )
267 elif n:
267 elif n:
268 self.ui.status(
268 self.ui.status(
269 _(b'%s transplanted to %s\n')
269 _(b'%s transplanted to %s\n')
270 % (nodemod.short(node), nodemod.short(n))
270 % (nodemod.short(node), nodemod.short(n))
271 )
271 )
272 finally:
272 finally:
273 if patchfile:
273 if patchfile:
274 os.unlink(patchfile)
274 os.unlink(patchfile)
275 tr.close()
275 tr.close()
276 if pulls:
276 if pulls:
277 exchange.pull(repo, source.peer(), heads=pulls)
277 exchange.pull(repo, source.peer(), heads=pulls)
278 merge.update(repo, pulls[-1], branchmerge=False, force=False)
278 merge.update(repo, pulls[-1], branchmerge=False, force=False)
279 finally:
279 finally:
280 self.saveseries(revmap, merges)
280 self.saveseries(revmap, merges)
281 self.transplants.write()
281 self.transplants.write()
282 if tr:
282 if tr:
283 tr.release()
283 tr.release()
284 if lock:
284 if lock:
285 lock.release()
285 lock.release()
286
286
287 def filter(self, filter, node, changelog, patchfile):
287 def filter(self, filter, node, changelog, patchfile):
288 '''arbitrarily rewrite changeset before applying it'''
288 '''arbitrarily rewrite changeset before applying it'''
289
289
290 self.ui.status(_(b'filtering %s\n') % patchfile)
290 self.ui.status(_(b'filtering %s\n') % patchfile)
291 user, date, msg = (changelog[1], changelog[2], changelog[4])
291 user, date, msg = (changelog[1], changelog[2], changelog[4])
292 fd, headerfile = pycompat.mkstemp(prefix=b'hg-transplant-')
292 fd, headerfile = pycompat.mkstemp(prefix=b'hg-transplant-')
293 fp = os.fdopen(fd, 'wb')
293 fp = os.fdopen(fd, 'wb')
294 fp.write(b"# HG changeset patch\n")
294 fp.write(b"# HG changeset patch\n")
295 fp.write(b"# User %s\n" % user)
295 fp.write(b"# User %s\n" % user)
296 fp.write(b"# Date %d %d\n" % date)
296 fp.write(b"# Date %d %d\n" % date)
297 fp.write(msg + b'\n')
297 fp.write(msg + b'\n')
298 fp.close()
298 fp.close()
299
299
300 try:
300 try:
301 self.ui.system(
301 self.ui.system(
302 b'%s %s %s'
302 b'%s %s %s'
303 % (
303 % (
304 filter,
304 filter,
305 procutil.shellquote(headerfile),
305 procutil.shellquote(headerfile),
306 procutil.shellquote(patchfile),
306 procutil.shellquote(patchfile),
307 ),
307 ),
308 environ={
308 environ={
309 b'HGUSER': changelog[1],
309 b'HGUSER': changelog[1],
310 b'HGREVISION': nodemod.hex(node),
310 b'HGREVISION': nodemod.hex(node),
311 },
311 },
312 onerr=error.Abort,
312 onerr=error.Abort,
313 errprefix=_(b'filter failed'),
313 errprefix=_(b'filter failed'),
314 blockedtag=b'transplant_filter',
314 blockedtag=b'transplant_filter',
315 )
315 )
316 user, date, msg = self.parselog(open(headerfile, b'rb'))[1:4]
316 user, date, msg = self.parselog(open(headerfile, b'rb'))[1:4]
317 finally:
317 finally:
318 os.unlink(headerfile)
318 os.unlink(headerfile)
319
319
320 return (user, date, msg)
320 return (user, date, msg)
321
321
322 def applyone(
322 def applyone(
323 self, repo, node, cl, patchfile, merge=False, log=False, filter=None
323 self, repo, node, cl, patchfile, merge=False, log=False, filter=None
324 ):
324 ):
325 '''apply the patch in patchfile to the repository as a transplant'''
325 '''apply the patch in patchfile to the repository as a transplant'''
326 (manifest, user, (time, timezone), files, message) = cl[:5]
326 (manifest, user, (time, timezone), files, message) = cl[:5]
327 date = b"%d %d" % (time, timezone)
327 date = b"%d %d" % (time, timezone)
328 extra = {b'transplant_source': node}
328 extra = {b'transplant_source': node}
329 if filter:
329 if filter:
330 (user, date, message) = self.filter(filter, node, cl, patchfile)
330 (user, date, message) = self.filter(filter, node, cl, patchfile)
331
331
332 if log:
332 if log:
333 # we don't translate messages inserted into commits
333 # we don't translate messages inserted into commits
334 message += b'\n(transplanted from %s)' % nodemod.hex(node)
334 message += b'\n(transplanted from %s)' % nodemod.hex(node)
335
335
336 self.ui.status(_(b'applying %s\n') % nodemod.short(node))
336 self.ui.status(_(b'applying %s\n') % nodemod.short(node))
337 self.ui.note(b'%s %s\n%s\n' % (user, date, message))
337 self.ui.note(b'%s %s\n%s\n' % (user, date, message))
338
338
339 if not patchfile and not merge:
339 if not patchfile and not merge:
340 raise error.Abort(_(b'can only omit patchfile if merging'))
340 raise error.Abort(_(b'can only omit patchfile if merging'))
341 if patchfile:
341 if patchfile:
342 try:
342 try:
343 files = set()
343 files = set()
344 patch.patch(self.ui, repo, patchfile, files=files, eolmode=None)
344 patch.patch(self.ui, repo, patchfile, files=files, eolmode=None)
345 files = list(files)
345 files = list(files)
346 except Exception as inst:
346 except Exception as inst:
347 seriespath = os.path.join(self.path, b'series')
347 seriespath = os.path.join(self.path, b'series')
348 if os.path.exists(seriespath):
348 if os.path.exists(seriespath):
349 os.unlink(seriespath)
349 os.unlink(seriespath)
350 p1 = repo.dirstate.p1()
350 p1 = repo.dirstate.p1()
351 p2 = node
351 p2 = node
352 self.log(user, date, message, p1, p2, merge=merge)
352 self.log(user, date, message, p1, p2, merge=merge)
353 self.ui.write(stringutil.forcebytestr(inst) + b'\n')
353 self.ui.write(stringutil.forcebytestr(inst) + b'\n')
354 raise TransplantError(
354 raise TransplantError(
355 _(
355 _(
356 b'fix up the working directory and run '
356 b'fix up the working directory and run '
357 b'hg transplant --continue'
357 b'hg transplant --continue'
358 )
358 )
359 )
359 )
360 else:
360 else:
361 files = None
361 files = None
362 if merge:
362 if merge:
363 p1 = repo.dirstate.p1()
363 p1 = repo.dirstate.p1()
364 repo.setparents(p1, node)
364 repo.setparents(p1, node)
365 m = match.always()
365 m = match.always()
366 else:
366 else:
367 m = match.exact(files)
367 m = match.exact(files)
368
368
369 n = repo.commit(
369 n = repo.commit(
370 message,
370 message,
371 user,
371 user,
372 date,
372 date,
373 extra=extra,
373 extra=extra,
374 match=m,
374 match=m,
375 editor=self.getcommiteditor(),
375 editor=self.getcommiteditor(),
376 )
376 )
377 if not n:
377 if not n:
378 self.ui.warn(
378 self.ui.warn(
379 _(b'skipping emptied changeset %s\n') % nodemod.short(node)
379 _(b'skipping emptied changeset %s\n') % nodemod.short(node)
380 )
380 )
381 return None
381 return None
382 if not merge:
382 if not merge:
383 self.transplants.set(n, node)
383 self.transplants.set(n, node)
384
384
385 return n
385 return n
386
386
387 def canresume(self):
387 def canresume(self):
388 return os.path.exists(os.path.join(self.path, b'journal'))
388 return os.path.exists(os.path.join(self.path, b'journal'))
389
389
390 def resume(self, repo, source, opts):
390 def resume(self, repo, source, opts):
391 '''recover last transaction and apply remaining changesets'''
391 '''recover last transaction and apply remaining changesets'''
392 if os.path.exists(os.path.join(self.path, b'journal')):
392 if os.path.exists(os.path.join(self.path, b'journal')):
393 n, node = self.recover(repo, source, opts)
393 n, node = self.recover(repo, source, opts)
394 if n:
394 if n:
395 self.ui.status(
395 self.ui.status(
396 _(b'%s transplanted as %s\n')
396 _(b'%s transplanted as %s\n')
397 % (nodemod.short(node), nodemod.short(n))
397 % (nodemod.short(node), nodemod.short(n))
398 )
398 )
399 else:
399 else:
400 self.ui.status(
400 self.ui.status(
401 _(b'%s skipped due to empty diff\n')
401 _(b'%s skipped due to empty diff\n')
402 % (nodemod.short(node),)
402 % (nodemod.short(node),)
403 )
403 )
404 seriespath = os.path.join(self.path, b'series')
404 seriespath = os.path.join(self.path, b'series')
405 if not os.path.exists(seriespath):
405 if not os.path.exists(seriespath):
406 self.transplants.write()
406 self.transplants.write()
407 return
407 return
408 nodes, merges = self.readseries()
408 nodes, merges = self.readseries()
409 revmap = {}
409 revmap = {}
410 for n in nodes:
410 for n in nodes:
411 revmap[source.changelog.rev(n)] = n
411 revmap[source.changelog.rev(n)] = n
412 os.unlink(seriespath)
412 os.unlink(seriespath)
413
413
414 self.apply(repo, source, revmap, merges, opts)
414 self.apply(repo, source, revmap, merges, opts)
415
415
416 def recover(self, repo, source, opts):
416 def recover(self, repo, source, opts):
417 '''commit working directory using journal metadata'''
417 '''commit working directory using journal metadata'''
418 node, user, date, message, parents = self.readlog()
418 node, user, date, message, parents = self.readlog()
419 merge = False
419 merge = False
420
420
421 if not user or not date or not message or not parents[0]:
421 if not user or not date or not message or not parents[0]:
422 raise error.Abort(_(b'transplant log file is corrupt'))
422 raise error.Abort(_(b'transplant log file is corrupt'))
423
423
424 parent = parents[0]
424 parent = parents[0]
425 if len(parents) > 1:
425 if len(parents) > 1:
426 if opts.get(b'parent'):
426 if opts.get(b'parent'):
427 parent = source.lookup(opts[b'parent'])
427 parent = source.lookup(opts[b'parent'])
428 if parent not in parents:
428 if parent not in parents:
429 raise error.Abort(
429 raise error.Abort(
430 _(b'%s is not a parent of %s')
430 _(b'%s is not a parent of %s')
431 % (nodemod.short(parent), nodemod.short(node))
431 % (nodemod.short(parent), nodemod.short(node))
432 )
432 )
433 else:
433 else:
434 merge = True
434 merge = True
435
435
436 extra = {b'transplant_source': node}
436 extra = {b'transplant_source': node}
437 try:
437 try:
438 p1 = repo.dirstate.p1()
438 p1 = repo.dirstate.p1()
439 if p1 != parent:
439 if p1 != parent:
440 raise error.Abort(
440 raise error.Abort(
441 _(b'working directory not at transplant parent %s')
441 _(b'working directory not at transplant parent %s')
442 % nodemod.hex(parent)
442 % nodemod.hex(parent)
443 )
443 )
444 if merge:
444 if merge:
445 repo.setparents(p1, parents[1])
445 repo.setparents(p1, parents[1])
446 st = repo.status()
446 st = repo.status()
447 modified, added, removed, deleted = (
447 modified, added, removed, deleted = (
448 st.modified,
448 st.modified,
449 st.added,
449 st.added,
450 st.removed,
450 st.removed,
451 st.deleted,
451 st.deleted,
452 )
452 )
453 if merge or modified or added or removed or deleted:
453 if merge or modified or added or removed or deleted:
454 n = repo.commit(
454 n = repo.commit(
455 message,
455 message,
456 user,
456 user,
457 date,
457 date,
458 extra=extra,
458 extra=extra,
459 editor=self.getcommiteditor(),
459 editor=self.getcommiteditor(),
460 )
460 )
461 if not n:
461 if not n:
462 raise error.Abort(_(b'commit failed'))
462 raise error.Abort(_(b'commit failed'))
463 if not merge:
463 if not merge:
464 self.transplants.set(n, node)
464 self.transplants.set(n, node)
465 else:
465 else:
466 n = None
466 n = None
467 self.unlog()
467 self.unlog()
468
468
469 return n, node
469 return n, node
470 finally:
470 finally:
471 # TODO: get rid of this meaningless try/finally enclosing.
471 # TODO: get rid of this meaningless try/finally enclosing.
472 # this is kept only to reduce changes in a patch.
472 # this is kept only to reduce changes in a patch.
473 pass
473 pass
474
474
475 def stop(self, ui, repo):
475 def stop(self, ui, repo):
476 """logic to stop an interrupted transplant"""
476 """logic to stop an interrupted transplant"""
477 if self.canresume():
477 if self.canresume():
478 startctx = repo[b'.']
478 startctx = repo[b'.']
479 hg.updaterepo(repo, startctx.node(), overwrite=True)
479 hg.updaterepo(repo, startctx.node(), overwrite=True)
480 ui.status(_(b"stopped the interrupted transplant\n"))
480 ui.status(_(b"stopped the interrupted transplant\n"))
481 ui.status(
481 ui.status(
482 _(b"working directory is now at %s\n") % startctx.hex()[:12]
482 _(b"working directory is now at %s\n") % startctx.hex()[:12]
483 )
483 )
484 self.unlog()
484 self.unlog()
485 return 0
485 return 0
486
486
487 def readseries(self):
487 def readseries(self):
488 nodes = []
488 nodes = []
489 merges = []
489 merges = []
490 cur = nodes
490 cur = nodes
491 for line in self.opener.read(b'series').splitlines():
491 for line in self.opener.read(b'series').splitlines():
492 if line.startswith(b'# Merges'):
492 if line.startswith(b'# Merges'):
493 cur = merges
493 cur = merges
494 continue
494 continue
495 cur.append(revlog.bin(line))
495 cur.append(revlog.bin(line))
496
496
497 return (nodes, merges)
497 return (nodes, merges)
498
498
499 def saveseries(self, revmap, merges):
499 def saveseries(self, revmap, merges):
500 if not revmap:
500 if not revmap:
501 return
501 return
502
502
503 if not os.path.isdir(self.path):
503 if not os.path.isdir(self.path):
504 os.mkdir(self.path)
504 os.mkdir(self.path)
505 series = self.opener(b'series', b'w')
505 series = self.opener(b'series', b'w')
506 for rev in sorted(revmap):
506 for rev in sorted(revmap):
507 series.write(nodemod.hex(revmap[rev]) + b'\n')
507 series.write(nodemod.hex(revmap[rev]) + b'\n')
508 if merges:
508 if merges:
509 series.write(b'# Merges\n')
509 series.write(b'# Merges\n')
510 for m in merges:
510 for m in merges:
511 series.write(nodemod.hex(m) + b'\n')
511 series.write(nodemod.hex(m) + b'\n')
512 series.close()
512 series.close()
513
513
514 def parselog(self, fp):
514 def parselog(self, fp):
515 parents = []
515 parents = []
516 message = []
516 message = []
517 node = revlog.nullid
517 node = revlog.nullid
518 inmsg = False
518 inmsg = False
519 user = None
519 user = None
520 date = None
520 date = None
521 for line in fp.read().splitlines():
521 for line in fp.read().splitlines():
522 if inmsg:
522 if inmsg:
523 message.append(line)
523 message.append(line)
524 elif line.startswith(b'# User '):
524 elif line.startswith(b'# User '):
525 user = line[7:]
525 user = line[7:]
526 elif line.startswith(b'# Date '):
526 elif line.startswith(b'# Date '):
527 date = line[7:]
527 date = line[7:]
528 elif line.startswith(b'# Node ID '):
528 elif line.startswith(b'# Node ID '):
529 node = revlog.bin(line[10:])
529 node = revlog.bin(line[10:])
530 elif line.startswith(b'# Parent '):
530 elif line.startswith(b'# Parent '):
531 parents.append(revlog.bin(line[9:]))
531 parents.append(revlog.bin(line[9:]))
532 elif not line.startswith(b'# '):
532 elif not line.startswith(b'# '):
533 inmsg = True
533 inmsg = True
534 message.append(line)
534 message.append(line)
535 if None in (user, date):
535 if None in (user, date):
536 raise error.Abort(
536 raise error.Abort(
537 _(b"filter corrupted changeset (no user or date)")
537 _(b"filter corrupted changeset (no user or date)")
538 )
538 )
539 return (node, user, date, b'\n'.join(message), parents)
539 return (node, user, date, b'\n'.join(message), parents)
540
540
541 def log(self, user, date, message, p1, p2, merge=False):
541 def log(self, user, date, message, p1, p2, merge=False):
542 '''journal changelog metadata for later recover'''
542 '''journal changelog metadata for later recover'''
543
543
544 if not os.path.isdir(self.path):
544 if not os.path.isdir(self.path):
545 os.mkdir(self.path)
545 os.mkdir(self.path)
546 fp = self.opener(b'journal', b'w')
546 fp = self.opener(b'journal', b'w')
547 fp.write(b'# User %s\n' % user)
547 fp.write(b'# User %s\n' % user)
548 fp.write(b'# Date %s\n' % date)
548 fp.write(b'# Date %s\n' % date)
549 fp.write(b'# Node ID %s\n' % nodemod.hex(p2))
549 fp.write(b'# Node ID %s\n' % nodemod.hex(p2))
550 fp.write(b'# Parent ' + nodemod.hex(p1) + b'\n')
550 fp.write(b'# Parent ' + nodemod.hex(p1) + b'\n')
551 if merge:
551 if merge:
552 fp.write(b'# Parent ' + nodemod.hex(p2) + b'\n')
552 fp.write(b'# Parent ' + nodemod.hex(p2) + b'\n')
553 fp.write(message.rstrip() + b'\n')
553 fp.write(message.rstrip() + b'\n')
554 fp.close()
554 fp.close()
555
555
556 def readlog(self):
556 def readlog(self):
557 return self.parselog(self.opener(b'journal'))
557 return self.parselog(self.opener(b'journal'))
558
558
559 def unlog(self):
559 def unlog(self):
560 '''remove changelog journal'''
560 '''remove changelog journal'''
561 absdst = os.path.join(self.path, b'journal')
561 absdst = os.path.join(self.path, b'journal')
562 if os.path.exists(absdst):
562 if os.path.exists(absdst):
563 os.unlink(absdst)
563 os.unlink(absdst)
564
564
565 def transplantfilter(self, repo, source, root):
565 def transplantfilter(self, repo, source, root):
566 def matchfn(node):
566 def matchfn(node):
567 if self.applied(repo, node, root):
567 if self.applied(repo, node, root):
568 return False
568 return False
569 if source.changelog.parents(node)[1] != revlog.nullid:
569 if source.changelog.parents(node)[1] != revlog.nullid:
570 return False
570 return False
571 extra = source.changelog.read(node)[5]
571 extra = source.changelog.read(node)[5]
572 cnode = extra.get(b'transplant_source')
572 cnode = extra.get(b'transplant_source')
573 if cnode and self.applied(repo, cnode, root):
573 if cnode and self.applied(repo, cnode, root):
574 return False
574 return False
575 return True
575 return True
576
576
577 return matchfn
577 return matchfn
578
578
579
579
580 def hasnode(repo, node):
580 def hasnode(repo, node):
581 try:
581 try:
582 return repo.changelog.rev(node) is not None
582 return repo.changelog.rev(node) is not None
583 except error.StorageError:
583 except error.StorageError:
584 return False
584 return False
585
585
586
586
587 def browserevs(ui, repo, nodes, opts):
587 def browserevs(ui, repo, nodes, opts):
588 '''interactively transplant changesets'''
588 '''interactively transplant changesets'''
589 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
589 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
590 transplants = []
590 transplants = []
591 merges = []
591 merges = []
592 prompt = _(
592 prompt = _(
593 b'apply changeset? [ynmpcq?]:'
593 b'apply changeset? [ynmpcq?]:'
594 b'$$ &yes, transplant this changeset'
594 b'$$ &yes, transplant this changeset'
595 b'$$ &no, skip this changeset'
595 b'$$ &no, skip this changeset'
596 b'$$ &merge at this changeset'
596 b'$$ &merge at this changeset'
597 b'$$ show &patch'
597 b'$$ show &patch'
598 b'$$ &commit selected changesets'
598 b'$$ &commit selected changesets'
599 b'$$ &quit and cancel transplant'
599 b'$$ &quit and cancel transplant'
600 b'$$ &? (show this help)'
600 b'$$ &? (show this help)'
601 )
601 )
602 for node in nodes:
602 for node in nodes:
603 displayer.show(repo[node])
603 displayer.show(repo[node])
604 action = None
604 action = None
605 while not action:
605 while not action:
606 choice = ui.promptchoice(prompt)
606 choice = ui.promptchoice(prompt)
607 action = b'ynmpcq?'[choice : choice + 1]
607 action = b'ynmpcq?'[choice : choice + 1]
608 if action == b'?':
608 if action == b'?':
609 for c, t in ui.extractchoices(prompt)[1]:
609 for c, t in ui.extractchoices(prompt)[1]:
610 ui.write(b'%s: %s\n' % (c, t))
610 ui.write(b'%s: %s\n' % (c, t))
611 action = None
611 action = None
612 elif action == b'p':
612 elif action == b'p':
613 parent = repo.changelog.parents(node)[0]
613 parent = repo.changelog.parents(node)[0]
614 for chunk in patch.diff(repo, parent, node):
614 for chunk in patch.diff(repo, parent, node):
615 ui.write(chunk)
615 ui.write(chunk)
616 action = None
616 action = None
617 if action == b'y':
617 if action == b'y':
618 transplants.append(node)
618 transplants.append(node)
619 elif action == b'm':
619 elif action == b'm':
620 merges.append(node)
620 merges.append(node)
621 elif action == b'c':
621 elif action == b'c':
622 break
622 break
623 elif action == b'q':
623 elif action == b'q':
624 transplants = ()
624 transplants = ()
625 merges = ()
625 merges = ()
626 break
626 break
627 displayer.close()
627 displayer.close()
628 return (transplants, merges)
628 return (transplants, merges)
629
629
630
630
631 @command(
631 @command(
632 b'transplant',
632 b'transplant',
633 [
633 [
634 (
634 (
635 b's',
635 b's',
636 b'source',
636 b'source',
637 b'',
637 b'',
638 _(b'transplant changesets from REPO'),
638 _(b'transplant changesets from REPO'),
639 _(b'REPO'),
639 _(b'REPO'),
640 ),
640 ),
641 (
641 (
642 b'b',
642 b'b',
643 b'branch',
643 b'branch',
644 [],
644 [],
645 _(b'use this source changeset as head'),
645 _(b'use this source changeset as head'),
646 _(b'REV'),
646 _(b'REV'),
647 ),
647 ),
648 (
648 (
649 b'a',
649 b'a',
650 b'all',
650 b'all',
651 None,
651 None,
652 _(b'pull all changesets up to the --branch revisions'),
652 _(b'pull all changesets up to the --branch revisions'),
653 ),
653 ),
654 (b'p', b'prune', [], _(b'skip over REV'), _(b'REV')),
654 (b'p', b'prune', [], _(b'skip over REV'), _(b'REV')),
655 (b'm', b'merge', [], _(b'merge at REV'), _(b'REV')),
655 (b'm', b'merge', [], _(b'merge at REV'), _(b'REV')),
656 (
656 (
657 b'',
657 b'',
658 b'parent',
658 b'parent',
659 b'',
659 b'',
660 _(b'parent to choose when transplanting merge'),
660 _(b'parent to choose when transplanting merge'),
661 _(b'REV'),
661 _(b'REV'),
662 ),
662 ),
663 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
663 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
664 (b'', b'log', None, _(b'append transplant info to log message')),
664 (b'', b'log', None, _(b'append transplant info to log message')),
665 (b'', b'stop', False, _(b'stop interrupted transplant')),
665 (b'', b'stop', False, _(b'stop interrupted transplant')),
666 (
666 (
667 b'c',
667 b'c',
668 b'continue',
668 b'continue',
669 None,
669 None,
670 _(b'continue last transplant session after fixing conflicts'),
670 _(b'continue last transplant session after fixing conflicts'),
671 ),
671 ),
672 (
672 (
673 b'',
673 b'',
674 b'filter',
674 b'filter',
675 b'',
675 b'',
676 _(b'filter changesets through command'),
676 _(b'filter changesets through command'),
677 _(b'CMD'),
677 _(b'CMD'),
678 ),
678 ),
679 ],
679 ],
680 _(
680 _(
681 b'hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] '
681 b'hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] '
682 b'[-m REV] [REV]...'
682 b'[-m REV] [REV]...'
683 ),
683 ),
684 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
684 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
685 )
685 )
686 def transplant(ui, repo, *revs, **opts):
686 def transplant(ui, repo, *revs, **opts):
687 '''transplant changesets from another branch
687 '''transplant changesets from another branch
688
688
689 Selected changesets will be applied on top of the current working
689 Selected changesets will be applied on top of the current working
690 directory with the log of the original changeset. The changesets
690 directory with the log of the original changeset. The changesets
691 are copied and will thus appear twice in the history with different
691 are copied and will thus appear twice in the history with different
692 identities.
692 identities.
693
693
694 Consider using the graft command if everything is inside the same
694 Consider using the graft command if everything is inside the same
695 repository - it will use merges and will usually give a better result.
695 repository - it will use merges and will usually give a better result.
696 Use the rebase extension if the changesets are unpublished and you want
696 Use the rebase extension if the changesets are unpublished and you want
697 to move them instead of copying them.
697 to move them instead of copying them.
698
698
699 If --log is specified, log messages will have a comment appended
699 If --log is specified, log messages will have a comment appended
700 of the form::
700 of the form::
701
701
702 (transplanted from CHANGESETHASH)
702 (transplanted from CHANGESETHASH)
703
703
704 You can rewrite the changelog message with the --filter option.
704 You can rewrite the changelog message with the --filter option.
705 Its argument will be invoked with the current changelog message as
705 Its argument will be invoked with the current changelog message as
706 $1 and the patch as $2.
706 $1 and the patch as $2.
707
707
708 --source/-s specifies another repository to use for selecting changesets,
708 --source/-s specifies another repository to use for selecting changesets,
709 just as if it temporarily had been pulled.
709 just as if it temporarily had been pulled.
710 If --branch/-b is specified, these revisions will be used as
710 If --branch/-b is specified, these revisions will be used as
711 heads when deciding which changesets to transplant, just as if only
711 heads when deciding which changesets to transplant, just as if only
712 these revisions had been pulled.
712 these revisions had been pulled.
713 If --all/-a is specified, all the revisions up to the heads specified
713 If --all/-a is specified, all the revisions up to the heads specified
714 with --branch will be transplanted.
714 with --branch will be transplanted.
715
715
716 Example:
716 Example:
717
717
718 - transplant all changes up to REV on top of your current revision::
718 - transplant all changes up to REV on top of your current revision::
719
719
720 hg transplant --branch REV --all
720 hg transplant --branch REV --all
721
721
722 You can optionally mark selected transplanted changesets as merge
722 You can optionally mark selected transplanted changesets as merge
723 changesets. You will not be prompted to transplant any ancestors
723 changesets. You will not be prompted to transplant any ancestors
724 of a merged transplant, and you can merge descendants of them
724 of a merged transplant, and you can merge descendants of them
725 normally instead of transplanting them.
725 normally instead of transplanting them.
726
726
727 Merge changesets may be transplanted directly by specifying the
727 Merge changesets may be transplanted directly by specifying the
728 proper parent changeset by calling :hg:`transplant --parent`.
728 proper parent changeset by calling :hg:`transplant --parent`.
729
729
730 If no merges or revisions are provided, :hg:`transplant` will
730 If no merges or revisions are provided, :hg:`transplant` will
731 start an interactive changeset browser.
731 start an interactive changeset browser.
732
732
733 If a changeset application fails, you can fix the merge by hand
733 If a changeset application fails, you can fix the merge by hand
734 and then resume where you left off by calling :hg:`transplant
734 and then resume where you left off by calling :hg:`transplant
735 --continue/-c`.
735 --continue/-c`.
736 '''
736 '''
737 with repo.wlock():
737 with repo.wlock():
738 return _dotransplant(ui, repo, *revs, **opts)
738 return _dotransplant(ui, repo, *revs, **opts)
739
739
740
740
741 def _dotransplant(ui, repo, *revs, **opts):
741 def _dotransplant(ui, repo, *revs, **opts):
742 def incwalk(repo, csets, match=util.always):
742 def incwalk(repo, csets, match=util.always):
743 for node in csets:
743 for node in csets:
744 if match(node):
744 if match(node):
745 yield node
745 yield node
746
746
747 def transplantwalk(repo, dest, heads, match=util.always):
747 def transplantwalk(repo, dest, heads, match=util.always):
748 '''Yield all nodes that are ancestors of a head but not ancestors
748 '''Yield all nodes that are ancestors of a head but not ancestors
749 of dest.
749 of dest.
750 If no heads are specified, the heads of repo will be used.'''
750 If no heads are specified, the heads of repo will be used.'''
751 if not heads:
751 if not heads:
752 heads = repo.heads()
752 heads = repo.heads()
753 ancestors = []
753 ancestors = []
754 ctx = repo[dest]
754 ctx = repo[dest]
755 for head in heads:
755 for head in heads:
756 ancestors.append(ctx.ancestor(repo[head]).node())
756 ancestors.append(ctx.ancestor(repo[head]).node())
757 for node in repo.changelog.nodesbetween(ancestors, heads)[0]:
757 for node in repo.changelog.nodesbetween(ancestors, heads)[0]:
758 if match(node):
758 if match(node):
759 yield node
759 yield node
760
760
761 def checkopts(opts, revs):
761 def checkopts(opts, revs):
762 if opts.get(b'continue'):
762 if opts.get(b'continue'):
763 cmdutil.check_incompatible_arguments(
763 cmdutil.check_incompatible_arguments(
764 opts, b'continue', b'branch', b'all', b'merge'
764 opts, b'continue', [b'branch', b'all', b'merge']
765 )
765 )
766 return
766 return
767 if opts.get(b'stop'):
767 if opts.get(b'stop'):
768 cmdutil.check_incompatible_arguments(
768 cmdutil.check_incompatible_arguments(
769 opts, b'stop', b'branch', b'all', b'merge'
769 opts, b'stop', [b'branch', b'all', b'merge']
770 )
770 )
771 return
771 return
772 if not (
772 if not (
773 opts.get(b'source')
773 opts.get(b'source')
774 or revs
774 or revs
775 or opts.get(b'merge')
775 or opts.get(b'merge')
776 or opts.get(b'branch')
776 or opts.get(b'branch')
777 ):
777 ):
778 raise error.Abort(
778 raise error.Abort(
779 _(
779 _(
780 b'no source URL, branch revision, or revision '
780 b'no source URL, branch revision, or revision '
781 b'list provided'
781 b'list provided'
782 )
782 )
783 )
783 )
784 if opts.get(b'all'):
784 if opts.get(b'all'):
785 if not opts.get(b'branch'):
785 if not opts.get(b'branch'):
786 raise error.Abort(_(b'--all requires a branch revision'))
786 raise error.Abort(_(b'--all requires a branch revision'))
787 if revs:
787 if revs:
788 raise error.Abort(
788 raise error.Abort(
789 _(b'--all is incompatible with a revision list')
789 _(b'--all is incompatible with a revision list')
790 )
790 )
791
791
792 opts = pycompat.byteskwargs(opts)
792 opts = pycompat.byteskwargs(opts)
793 checkopts(opts, revs)
793 checkopts(opts, revs)
794
794
795 if not opts.get(b'log'):
795 if not opts.get(b'log'):
796 # deprecated config: transplant.log
796 # deprecated config: transplant.log
797 opts[b'log'] = ui.config(b'transplant', b'log')
797 opts[b'log'] = ui.config(b'transplant', b'log')
798 if not opts.get(b'filter'):
798 if not opts.get(b'filter'):
799 # deprecated config: transplant.filter
799 # deprecated config: transplant.filter
800 opts[b'filter'] = ui.config(b'transplant', b'filter')
800 opts[b'filter'] = ui.config(b'transplant', b'filter')
801
801
802 tp = transplanter(ui, repo, opts)
802 tp = transplanter(ui, repo, opts)
803
803
804 p1 = repo.dirstate.p1()
804 p1 = repo.dirstate.p1()
805 if len(repo) > 0 and p1 == revlog.nullid:
805 if len(repo) > 0 and p1 == revlog.nullid:
806 raise error.Abort(_(b'no revision checked out'))
806 raise error.Abort(_(b'no revision checked out'))
807 if opts.get(b'continue'):
807 if opts.get(b'continue'):
808 if not tp.canresume():
808 if not tp.canresume():
809 raise error.Abort(_(b'no transplant to continue'))
809 raise error.Abort(_(b'no transplant to continue'))
810 elif opts.get(b'stop'):
810 elif opts.get(b'stop'):
811 if not tp.canresume():
811 if not tp.canresume():
812 raise error.Abort(_(b'no interrupted transplant found'))
812 raise error.Abort(_(b'no interrupted transplant found'))
813 return tp.stop(ui, repo)
813 return tp.stop(ui, repo)
814 else:
814 else:
815 cmdutil.checkunfinished(repo)
815 cmdutil.checkunfinished(repo)
816 cmdutil.bailifchanged(repo)
816 cmdutil.bailifchanged(repo)
817
817
818 sourcerepo = opts.get(b'source')
818 sourcerepo = opts.get(b'source')
819 if sourcerepo:
819 if sourcerepo:
820 peer = hg.peer(repo, opts, ui.expandpath(sourcerepo))
820 peer = hg.peer(repo, opts, ui.expandpath(sourcerepo))
821 heads = pycompat.maplist(peer.lookup, opts.get(b'branch', ()))
821 heads = pycompat.maplist(peer.lookup, opts.get(b'branch', ()))
822 target = set(heads)
822 target = set(heads)
823 for r in revs:
823 for r in revs:
824 try:
824 try:
825 target.add(peer.lookup(r))
825 target.add(peer.lookup(r))
826 except error.RepoError:
826 except error.RepoError:
827 pass
827 pass
828 source, csets, cleanupfn = bundlerepo.getremotechanges(
828 source, csets, cleanupfn = bundlerepo.getremotechanges(
829 ui, repo, peer, onlyheads=sorted(target), force=True
829 ui, repo, peer, onlyheads=sorted(target), force=True
830 )
830 )
831 else:
831 else:
832 source = repo
832 source = repo
833 heads = pycompat.maplist(source.lookup, opts.get(b'branch', ()))
833 heads = pycompat.maplist(source.lookup, opts.get(b'branch', ()))
834 cleanupfn = None
834 cleanupfn = None
835
835
836 try:
836 try:
837 if opts.get(b'continue'):
837 if opts.get(b'continue'):
838 tp.resume(repo, source, opts)
838 tp.resume(repo, source, opts)
839 return
839 return
840
840
841 tf = tp.transplantfilter(repo, source, p1)
841 tf = tp.transplantfilter(repo, source, p1)
842 if opts.get(b'prune'):
842 if opts.get(b'prune'):
843 prune = set(
843 prune = set(
844 source[r].node()
844 source[r].node()
845 for r in scmutil.revrange(source, opts.get(b'prune'))
845 for r in scmutil.revrange(source, opts.get(b'prune'))
846 )
846 )
847 matchfn = lambda x: tf(x) and x not in prune
847 matchfn = lambda x: tf(x) and x not in prune
848 else:
848 else:
849 matchfn = tf
849 matchfn = tf
850 merges = pycompat.maplist(source.lookup, opts.get(b'merge', ()))
850 merges = pycompat.maplist(source.lookup, opts.get(b'merge', ()))
851 revmap = {}
851 revmap = {}
852 if revs:
852 if revs:
853 for r in scmutil.revrange(source, revs):
853 for r in scmutil.revrange(source, revs):
854 revmap[int(r)] = source[r].node()
854 revmap[int(r)] = source[r].node()
855 elif opts.get(b'all') or not merges:
855 elif opts.get(b'all') or not merges:
856 if source != repo:
856 if source != repo:
857 alltransplants = incwalk(source, csets, match=matchfn)
857 alltransplants = incwalk(source, csets, match=matchfn)
858 else:
858 else:
859 alltransplants = transplantwalk(
859 alltransplants = transplantwalk(
860 source, p1, heads, match=matchfn
860 source, p1, heads, match=matchfn
861 )
861 )
862 if opts.get(b'all'):
862 if opts.get(b'all'):
863 revs = alltransplants
863 revs = alltransplants
864 else:
864 else:
865 revs, newmerges = browserevs(ui, source, alltransplants, opts)
865 revs, newmerges = browserevs(ui, source, alltransplants, opts)
866 merges.extend(newmerges)
866 merges.extend(newmerges)
867 for r in revs:
867 for r in revs:
868 revmap[source.changelog.rev(r)] = r
868 revmap[source.changelog.rev(r)] = r
869 for r in merges:
869 for r in merges:
870 revmap[source.changelog.rev(r)] = r
870 revmap[source.changelog.rev(r)] = r
871
871
872 tp.apply(repo, source, revmap, merges, opts)
872 tp.apply(repo, source, revmap, merges, opts)
873 finally:
873 finally:
874 if cleanupfn:
874 if cleanupfn:
875 cleanupfn()
875 cleanupfn()
876
876
877
877
878 def continuecmd(ui, repo):
878 def continuecmd(ui, repo):
879 """logic to resume an interrupted transplant using
879 """logic to resume an interrupted transplant using
880 'hg continue'"""
880 'hg continue'"""
881 with repo.wlock():
881 with repo.wlock():
882 tp = transplanter(ui, repo, {})
882 tp = transplanter(ui, repo, {})
883 return tp.resume(repo, repo, {})
883 return tp.resume(repo, repo, {})
884
884
885
885
886 revsetpredicate = registrar.revsetpredicate()
886 revsetpredicate = registrar.revsetpredicate()
887
887
888
888
889 @revsetpredicate(b'transplanted([set])')
889 @revsetpredicate(b'transplanted([set])')
890 def revsettransplanted(repo, subset, x):
890 def revsettransplanted(repo, subset, x):
891 """Transplanted changesets in set, or all transplanted changesets.
891 """Transplanted changesets in set, or all transplanted changesets.
892 """
892 """
893 if x:
893 if x:
894 s = revset.getset(repo, subset, x)
894 s = revset.getset(repo, subset, x)
895 else:
895 else:
896 s = subset
896 s = subset
897 return smartset.baseset(
897 return smartset.baseset(
898 [r for r in s if repo[r].extra().get(b'transplant_source')]
898 [r for r in s if repo[r].extra().get(b'transplant_source')]
899 )
899 )
900
900
901
901
902 templatekeyword = registrar.templatekeyword()
902 templatekeyword = registrar.templatekeyword()
903
903
904
904
905 @templatekeyword(b'transplanted', requires={b'ctx'})
905 @templatekeyword(b'transplanted', requires={b'ctx'})
906 def kwtransplanted(context, mapping):
906 def kwtransplanted(context, mapping):
907 """String. The node identifier of the transplanted
907 """String. The node identifier of the transplanted
908 changeset if any."""
908 changeset if any."""
909 ctx = context.resource(mapping, b'ctx')
909 ctx = context.resource(mapping, b'ctx')
910 n = ctx.extra().get(b'transplant_source')
910 n = ctx.extra().get(b'transplant_source')
911 return n and nodemod.hex(n) or b''
911 return n and nodemod.hex(n) or b''
912
912
913
913
914 def extsetup(ui):
914 def extsetup(ui):
915 statemod.addunfinished(
915 statemod.addunfinished(
916 b'transplant',
916 b'transplant',
917 fname=b'transplant/journal',
917 fname=b'transplant/journal',
918 clearable=True,
918 clearable=True,
919 continuefunc=continuecmd,
919 continuefunc=continuecmd,
920 statushint=_(
920 statushint=_(
921 b'To continue: hg transplant --continue\n'
921 b'To continue: hg transplant --continue\n'
922 b'To stop: hg transplant --stop'
922 b'To stop: hg transplant --stop'
923 ),
923 ),
924 cmdhint=_(b"use 'hg transplant --continue' or 'hg transplant --stop'"),
924 cmdhint=_(b"use 'hg transplant --continue' or 'hg transplant --stop'"),
925 )
925 )
926
926
927
927
928 # tell hggettext to extract docstrings from these functions:
928 # tell hggettext to extract docstrings from these functions:
929 i18nfunctions = [revsettransplanted, kwtransplanted]
929 i18nfunctions = [revsettransplanted, kwtransplanted]
@@ -1,4072 +1,4072 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy as copymod
10 import copy as copymod
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 nullrev,
19 nullrev,
20 short,
20 short,
21 )
21 )
22 from .pycompat import (
22 from .pycompat import (
23 getattr,
23 getattr,
24 open,
24 open,
25 setattr,
25 setattr,
26 )
26 )
27 from .thirdparty import attr
27 from .thirdparty import attr
28
28
29 from . import (
29 from . import (
30 bookmarks,
30 bookmarks,
31 changelog,
31 changelog,
32 copies,
32 copies,
33 crecord as crecordmod,
33 crecord as crecordmod,
34 dirstateguard,
34 dirstateguard,
35 encoding,
35 encoding,
36 error,
36 error,
37 formatter,
37 formatter,
38 logcmdutil,
38 logcmdutil,
39 match as matchmod,
39 match as matchmod,
40 merge as mergemod,
40 merge as mergemod,
41 mergeutil,
41 mergeutil,
42 obsolete,
42 obsolete,
43 patch,
43 patch,
44 pathutil,
44 pathutil,
45 phases,
45 phases,
46 pycompat,
46 pycompat,
47 repair,
47 repair,
48 revlog,
48 revlog,
49 rewriteutil,
49 rewriteutil,
50 scmutil,
50 scmutil,
51 smartset,
51 smartset,
52 state as statemod,
52 state as statemod,
53 subrepoutil,
53 subrepoutil,
54 templatekw,
54 templatekw,
55 templater,
55 templater,
56 util,
56 util,
57 vfs as vfsmod,
57 vfs as vfsmod,
58 )
58 )
59
59
60 from .utils import (
60 from .utils import (
61 dateutil,
61 dateutil,
62 stringutil,
62 stringutil,
63 )
63 )
64
64
65 if pycompat.TYPE_CHECKING:
65 if pycompat.TYPE_CHECKING:
66 from typing import (
66 from typing import (
67 Any,
67 Any,
68 Dict,
68 Dict,
69 )
69 )
70
70
71 for t in (Any, Dict):
71 for t in (Any, Dict):
72 assert t
72 assert t
73
73
74 stringio = util.stringio
74 stringio = util.stringio
75
75
76 # templates of common command options
76 # templates of common command options
77
77
78 dryrunopts = [
78 dryrunopts = [
79 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
79 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
80 ]
80 ]
81
81
82 confirmopts = [
82 confirmopts = [
83 (b'', b'confirm', None, _(b'ask before applying actions')),
83 (b'', b'confirm', None, _(b'ask before applying actions')),
84 ]
84 ]
85
85
86 remoteopts = [
86 remoteopts = [
87 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
87 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
88 (
88 (
89 b'',
89 b'',
90 b'remotecmd',
90 b'remotecmd',
91 b'',
91 b'',
92 _(b'specify hg command to run on the remote side'),
92 _(b'specify hg command to run on the remote side'),
93 _(b'CMD'),
93 _(b'CMD'),
94 ),
94 ),
95 (
95 (
96 b'',
96 b'',
97 b'insecure',
97 b'insecure',
98 None,
98 None,
99 _(b'do not verify server certificate (ignoring web.cacerts config)'),
99 _(b'do not verify server certificate (ignoring web.cacerts config)'),
100 ),
100 ),
101 ]
101 ]
102
102
103 walkopts = [
103 walkopts = [
104 (
104 (
105 b'I',
105 b'I',
106 b'include',
106 b'include',
107 [],
107 [],
108 _(b'include names matching the given patterns'),
108 _(b'include names matching the given patterns'),
109 _(b'PATTERN'),
109 _(b'PATTERN'),
110 ),
110 ),
111 (
111 (
112 b'X',
112 b'X',
113 b'exclude',
113 b'exclude',
114 [],
114 [],
115 _(b'exclude names matching the given patterns'),
115 _(b'exclude names matching the given patterns'),
116 _(b'PATTERN'),
116 _(b'PATTERN'),
117 ),
117 ),
118 ]
118 ]
119
119
120 commitopts = [
120 commitopts = [
121 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
121 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
122 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
122 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
123 ]
123 ]
124
124
125 commitopts2 = [
125 commitopts2 = [
126 (
126 (
127 b'd',
127 b'd',
128 b'date',
128 b'date',
129 b'',
129 b'',
130 _(b'record the specified date as commit date'),
130 _(b'record the specified date as commit date'),
131 _(b'DATE'),
131 _(b'DATE'),
132 ),
132 ),
133 (
133 (
134 b'u',
134 b'u',
135 b'user',
135 b'user',
136 b'',
136 b'',
137 _(b'record the specified user as committer'),
137 _(b'record the specified user as committer'),
138 _(b'USER'),
138 _(b'USER'),
139 ),
139 ),
140 ]
140 ]
141
141
142 commitopts3 = [
142 commitopts3 = [
143 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
143 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
144 (b'U', b'currentuser', None, _(b'record the current user as committer')),
144 (b'U', b'currentuser', None, _(b'record the current user as committer')),
145 ]
145 ]
146
146
147 formatteropts = [
147 formatteropts = [
148 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
148 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
149 ]
149 ]
150
150
151 templateopts = [
151 templateopts = [
152 (
152 (
153 b'',
153 b'',
154 b'style',
154 b'style',
155 b'',
155 b'',
156 _(b'display using template map file (DEPRECATED)'),
156 _(b'display using template map file (DEPRECATED)'),
157 _(b'STYLE'),
157 _(b'STYLE'),
158 ),
158 ),
159 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
159 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
160 ]
160 ]
161
161
162 logopts = [
162 logopts = [
163 (b'p', b'patch', None, _(b'show patch')),
163 (b'p', b'patch', None, _(b'show patch')),
164 (b'g', b'git', None, _(b'use git extended diff format')),
164 (b'g', b'git', None, _(b'use git extended diff format')),
165 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
165 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
166 (b'M', b'no-merges', None, _(b'do not show merges')),
166 (b'M', b'no-merges', None, _(b'do not show merges')),
167 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
167 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
168 (b'G', b'graph', None, _(b"show the revision DAG")),
168 (b'G', b'graph', None, _(b"show the revision DAG")),
169 ] + templateopts
169 ] + templateopts
170
170
171 diffopts = [
171 diffopts = [
172 (b'a', b'text', None, _(b'treat all files as text')),
172 (b'a', b'text', None, _(b'treat all files as text')),
173 (b'g', b'git', None, _(b'use git extended diff format')),
173 (b'g', b'git', None, _(b'use git extended diff format')),
174 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
174 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
175 (b'', b'nodates', None, _(b'omit dates from diff headers')),
175 (b'', b'nodates', None, _(b'omit dates from diff headers')),
176 ]
176 ]
177
177
178 diffwsopts = [
178 diffwsopts = [
179 (
179 (
180 b'w',
180 b'w',
181 b'ignore-all-space',
181 b'ignore-all-space',
182 None,
182 None,
183 _(b'ignore white space when comparing lines'),
183 _(b'ignore white space when comparing lines'),
184 ),
184 ),
185 (
185 (
186 b'b',
186 b'b',
187 b'ignore-space-change',
187 b'ignore-space-change',
188 None,
188 None,
189 _(b'ignore changes in the amount of white space'),
189 _(b'ignore changes in the amount of white space'),
190 ),
190 ),
191 (
191 (
192 b'B',
192 b'B',
193 b'ignore-blank-lines',
193 b'ignore-blank-lines',
194 None,
194 None,
195 _(b'ignore changes whose lines are all blank'),
195 _(b'ignore changes whose lines are all blank'),
196 ),
196 ),
197 (
197 (
198 b'Z',
198 b'Z',
199 b'ignore-space-at-eol',
199 b'ignore-space-at-eol',
200 None,
200 None,
201 _(b'ignore changes in whitespace at EOL'),
201 _(b'ignore changes in whitespace at EOL'),
202 ),
202 ),
203 ]
203 ]
204
204
205 diffopts2 = (
205 diffopts2 = (
206 [
206 [
207 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
207 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
208 (
208 (
209 b'p',
209 b'p',
210 b'show-function',
210 b'show-function',
211 None,
211 None,
212 _(b'show which function each change is in'),
212 _(b'show which function each change is in'),
213 ),
213 ),
214 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
214 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
215 ]
215 ]
216 + diffwsopts
216 + diffwsopts
217 + [
217 + [
218 (
218 (
219 b'U',
219 b'U',
220 b'unified',
220 b'unified',
221 b'',
221 b'',
222 _(b'number of lines of context to show'),
222 _(b'number of lines of context to show'),
223 _(b'NUM'),
223 _(b'NUM'),
224 ),
224 ),
225 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
225 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
226 (
226 (
227 b'',
227 b'',
228 b'root',
228 b'root',
229 b'',
229 b'',
230 _(b'produce diffs relative to subdirectory'),
230 _(b'produce diffs relative to subdirectory'),
231 _(b'DIR'),
231 _(b'DIR'),
232 ),
232 ),
233 ]
233 ]
234 )
234 )
235
235
236 mergetoolopts = [
236 mergetoolopts = [
237 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
237 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
238 ]
238 ]
239
239
240 similarityopts = [
240 similarityopts = [
241 (
241 (
242 b's',
242 b's',
243 b'similarity',
243 b'similarity',
244 b'',
244 b'',
245 _(b'guess renamed files by similarity (0<=s<=100)'),
245 _(b'guess renamed files by similarity (0<=s<=100)'),
246 _(b'SIMILARITY'),
246 _(b'SIMILARITY'),
247 )
247 )
248 ]
248 ]
249
249
250 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
250 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
251
251
252 debugrevlogopts = [
252 debugrevlogopts = [
253 (b'c', b'changelog', False, _(b'open changelog')),
253 (b'c', b'changelog', False, _(b'open changelog')),
254 (b'm', b'manifest', False, _(b'open manifest')),
254 (b'm', b'manifest', False, _(b'open manifest')),
255 (b'', b'dir', b'', _(b'open directory manifest')),
255 (b'', b'dir', b'', _(b'open directory manifest')),
256 ]
256 ]
257
257
258 # special string such that everything below this line will be ingored in the
258 # special string such that everything below this line will be ingored in the
259 # editor text
259 # editor text
260 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
260 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
261
261
262
262
263 def check_at_most_one_arg(opts, *args):
263 def check_at_most_one_arg(opts, *args):
264 """abort if more than one of the arguments are in opts
264 """abort if more than one of the arguments are in opts
265
265
266 Returns the unique argument or None if none of them were specified.
266 Returns the unique argument or None if none of them were specified.
267 """
267 """
268
268
269 def to_display(name):
269 def to_display(name):
270 return pycompat.sysbytes(name).replace(b'_', b'-')
270 return pycompat.sysbytes(name).replace(b'_', b'-')
271
271
272 previous = None
272 previous = None
273 for x in args:
273 for x in args:
274 if opts.get(x):
274 if opts.get(x):
275 if previous:
275 if previous:
276 raise error.Abort(
276 raise error.Abort(
277 _(b'cannot specify both --%s and --%s')
277 _(b'cannot specify both --%s and --%s')
278 % (to_display(previous), to_display(x))
278 % (to_display(previous), to_display(x))
279 )
279 )
280 previous = x
280 previous = x
281 return previous
281 return previous
282
282
283
283
284 def check_incompatible_arguments(opts, first, *others):
284 def check_incompatible_arguments(opts, first, others):
285 """abort if the first argument is given along with any of the others
285 """abort if the first argument is given along with any of the others
286
286
287 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
287 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
288 among themselves.
288 among themselves, and they're passed as a single collection.
289 """
289 """
290 for other in others:
290 for other in others:
291 check_at_most_one_arg(opts, first, other)
291 check_at_most_one_arg(opts, first, other)
292
292
293
293
294 def resolvecommitoptions(ui, opts):
294 def resolvecommitoptions(ui, opts):
295 """modify commit options dict to handle related options
295 """modify commit options dict to handle related options
296
296
297 The return value indicates that ``rewrite.update-timestamp`` is the reason
297 The return value indicates that ``rewrite.update-timestamp`` is the reason
298 the ``date`` option is set.
298 the ``date`` option is set.
299 """
299 """
300 check_at_most_one_arg(opts, b'date', b'currentdate')
300 check_at_most_one_arg(opts, b'date', b'currentdate')
301 check_at_most_one_arg(opts, b'user', b'currentuser')
301 check_at_most_one_arg(opts, b'user', b'currentuser')
302
302
303 datemaydiffer = False # date-only change should be ignored?
303 datemaydiffer = False # date-only change should be ignored?
304
304
305 if opts.get(b'currentdate'):
305 if opts.get(b'currentdate'):
306 opts[b'date'] = b'%d %d' % dateutil.makedate()
306 opts[b'date'] = b'%d %d' % dateutil.makedate()
307 elif (
307 elif (
308 not opts.get(b'date')
308 not opts.get(b'date')
309 and ui.configbool(b'rewrite', b'update-timestamp')
309 and ui.configbool(b'rewrite', b'update-timestamp')
310 and opts.get(b'currentdate') is None
310 and opts.get(b'currentdate') is None
311 ):
311 ):
312 opts[b'date'] = b'%d %d' % dateutil.makedate()
312 opts[b'date'] = b'%d %d' % dateutil.makedate()
313 datemaydiffer = True
313 datemaydiffer = True
314
314
315 if opts.get(b'currentuser'):
315 if opts.get(b'currentuser'):
316 opts[b'user'] = ui.username()
316 opts[b'user'] = ui.username()
317
317
318 return datemaydiffer
318 return datemaydiffer
319
319
320
320
321 def checknotesize(ui, opts):
321 def checknotesize(ui, opts):
322 """ make sure note is of valid format """
322 """ make sure note is of valid format """
323
323
324 note = opts.get(b'note')
324 note = opts.get(b'note')
325 if not note:
325 if not note:
326 return
326 return
327
327
328 if len(note) > 255:
328 if len(note) > 255:
329 raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
329 raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
330 if b'\n' in note:
330 if b'\n' in note:
331 raise error.Abort(_(b"note cannot contain a newline"))
331 raise error.Abort(_(b"note cannot contain a newline"))
332
332
333
333
334 def ishunk(x):
334 def ishunk(x):
335 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
335 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
336 return isinstance(x, hunkclasses)
336 return isinstance(x, hunkclasses)
337
337
338
338
339 def newandmodified(chunks, originalchunks):
339 def newandmodified(chunks, originalchunks):
340 newlyaddedandmodifiedfiles = set()
340 newlyaddedandmodifiedfiles = set()
341 alsorestore = set()
341 alsorestore = set()
342 for chunk in chunks:
342 for chunk in chunks:
343 if (
343 if (
344 ishunk(chunk)
344 ishunk(chunk)
345 and chunk.header.isnewfile()
345 and chunk.header.isnewfile()
346 and chunk not in originalchunks
346 and chunk not in originalchunks
347 ):
347 ):
348 newlyaddedandmodifiedfiles.add(chunk.header.filename())
348 newlyaddedandmodifiedfiles.add(chunk.header.filename())
349 alsorestore.update(
349 alsorestore.update(
350 set(chunk.header.files()) - {chunk.header.filename()}
350 set(chunk.header.files()) - {chunk.header.filename()}
351 )
351 )
352 return newlyaddedandmodifiedfiles, alsorestore
352 return newlyaddedandmodifiedfiles, alsorestore
353
353
354
354
355 def parsealiases(cmd):
355 def parsealiases(cmd):
356 return cmd.split(b"|")
356 return cmd.split(b"|")
357
357
358
358
359 def setupwrapcolorwrite(ui):
359 def setupwrapcolorwrite(ui):
360 # wrap ui.write so diff output can be labeled/colorized
360 # wrap ui.write so diff output can be labeled/colorized
361 def wrapwrite(orig, *args, **kw):
361 def wrapwrite(orig, *args, **kw):
362 label = kw.pop('label', b'')
362 label = kw.pop('label', b'')
363 for chunk, l in patch.difflabel(lambda: args):
363 for chunk, l in patch.difflabel(lambda: args):
364 orig(chunk, label=label + l)
364 orig(chunk, label=label + l)
365
365
366 oldwrite = ui.write
366 oldwrite = ui.write
367
367
368 def wrap(*args, **kwargs):
368 def wrap(*args, **kwargs):
369 return wrapwrite(oldwrite, *args, **kwargs)
369 return wrapwrite(oldwrite, *args, **kwargs)
370
370
371 setattr(ui, 'write', wrap)
371 setattr(ui, 'write', wrap)
372 return oldwrite
372 return oldwrite
373
373
374
374
375 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
375 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
376 try:
376 try:
377 if usecurses:
377 if usecurses:
378 if testfile:
378 if testfile:
379 recordfn = crecordmod.testdecorator(
379 recordfn = crecordmod.testdecorator(
380 testfile, crecordmod.testchunkselector
380 testfile, crecordmod.testchunkselector
381 )
381 )
382 else:
382 else:
383 recordfn = crecordmod.chunkselector
383 recordfn = crecordmod.chunkselector
384
384
385 return crecordmod.filterpatch(
385 return crecordmod.filterpatch(
386 ui, originalhunks, recordfn, operation
386 ui, originalhunks, recordfn, operation
387 )
387 )
388 except crecordmod.fallbackerror as e:
388 except crecordmod.fallbackerror as e:
389 ui.warn(b'%s\n' % e)
389 ui.warn(b'%s\n' % e)
390 ui.warn(_(b'falling back to text mode\n'))
390 ui.warn(_(b'falling back to text mode\n'))
391
391
392 return patch.filterpatch(ui, originalhunks, match, operation)
392 return patch.filterpatch(ui, originalhunks, match, operation)
393
393
394
394
395 def recordfilter(ui, originalhunks, match, operation=None):
395 def recordfilter(ui, originalhunks, match, operation=None):
396 """ Prompts the user to filter the originalhunks and return a list of
396 """ Prompts the user to filter the originalhunks and return a list of
397 selected hunks.
397 selected hunks.
398 *operation* is used for to build ui messages to indicate the user what
398 *operation* is used for to build ui messages to indicate the user what
399 kind of filtering they are doing: reverting, committing, shelving, etc.
399 kind of filtering they are doing: reverting, committing, shelving, etc.
400 (see patch.filterpatch).
400 (see patch.filterpatch).
401 """
401 """
402 usecurses = crecordmod.checkcurses(ui)
402 usecurses = crecordmod.checkcurses(ui)
403 testfile = ui.config(b'experimental', b'crecordtest')
403 testfile = ui.config(b'experimental', b'crecordtest')
404 oldwrite = setupwrapcolorwrite(ui)
404 oldwrite = setupwrapcolorwrite(ui)
405 try:
405 try:
406 newchunks, newopts = filterchunks(
406 newchunks, newopts = filterchunks(
407 ui, originalhunks, usecurses, testfile, match, operation
407 ui, originalhunks, usecurses, testfile, match, operation
408 )
408 )
409 finally:
409 finally:
410 ui.write = oldwrite
410 ui.write = oldwrite
411 return newchunks, newopts
411 return newchunks, newopts
412
412
413
413
414 def dorecord(
414 def dorecord(
415 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
415 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
416 ):
416 ):
417 opts = pycompat.byteskwargs(opts)
417 opts = pycompat.byteskwargs(opts)
418 if not ui.interactive():
418 if not ui.interactive():
419 if cmdsuggest:
419 if cmdsuggest:
420 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
420 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
421 else:
421 else:
422 msg = _(b'running non-interactively')
422 msg = _(b'running non-interactively')
423 raise error.Abort(msg)
423 raise error.Abort(msg)
424
424
425 # make sure username is set before going interactive
425 # make sure username is set before going interactive
426 if not opts.get(b'user'):
426 if not opts.get(b'user'):
427 ui.username() # raise exception, username not provided
427 ui.username() # raise exception, username not provided
428
428
429 def recordfunc(ui, repo, message, match, opts):
429 def recordfunc(ui, repo, message, match, opts):
430 """This is generic record driver.
430 """This is generic record driver.
431
431
432 Its job is to interactively filter local changes, and
432 Its job is to interactively filter local changes, and
433 accordingly prepare working directory into a state in which the
433 accordingly prepare working directory into a state in which the
434 job can be delegated to a non-interactive commit command such as
434 job can be delegated to a non-interactive commit command such as
435 'commit' or 'qrefresh'.
435 'commit' or 'qrefresh'.
436
436
437 After the actual job is done by non-interactive command, the
437 After the actual job is done by non-interactive command, the
438 working directory is restored to its original state.
438 working directory is restored to its original state.
439
439
440 In the end we'll record interesting changes, and everything else
440 In the end we'll record interesting changes, and everything else
441 will be left in place, so the user can continue working.
441 will be left in place, so the user can continue working.
442 """
442 """
443 if not opts.get(b'interactive-unshelve'):
443 if not opts.get(b'interactive-unshelve'):
444 checkunfinished(repo, commit=True)
444 checkunfinished(repo, commit=True)
445 wctx = repo[None]
445 wctx = repo[None]
446 merge = len(wctx.parents()) > 1
446 merge = len(wctx.parents()) > 1
447 if merge:
447 if merge:
448 raise error.Abort(
448 raise error.Abort(
449 _(
449 _(
450 b'cannot partially commit a merge '
450 b'cannot partially commit a merge '
451 b'(use "hg commit" instead)'
451 b'(use "hg commit" instead)'
452 )
452 )
453 )
453 )
454
454
455 def fail(f, msg):
455 def fail(f, msg):
456 raise error.Abort(b'%s: %s' % (f, msg))
456 raise error.Abort(b'%s: %s' % (f, msg))
457
457
458 force = opts.get(b'force')
458 force = opts.get(b'force')
459 if not force:
459 if not force:
460 match = matchmod.badmatch(match, fail)
460 match = matchmod.badmatch(match, fail)
461
461
462 status = repo.status(match=match)
462 status = repo.status(match=match)
463
463
464 overrides = {(b'ui', b'commitsubrepos'): True}
464 overrides = {(b'ui', b'commitsubrepos'): True}
465
465
466 with repo.ui.configoverride(overrides, b'record'):
466 with repo.ui.configoverride(overrides, b'record'):
467 # subrepoutil.precommit() modifies the status
467 # subrepoutil.precommit() modifies the status
468 tmpstatus = scmutil.status(
468 tmpstatus = scmutil.status(
469 copymod.copy(status.modified),
469 copymod.copy(status.modified),
470 copymod.copy(status.added),
470 copymod.copy(status.added),
471 copymod.copy(status.removed),
471 copymod.copy(status.removed),
472 copymod.copy(status.deleted),
472 copymod.copy(status.deleted),
473 copymod.copy(status.unknown),
473 copymod.copy(status.unknown),
474 copymod.copy(status.ignored),
474 copymod.copy(status.ignored),
475 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
475 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
476 )
476 )
477
477
478 # Force allows -X subrepo to skip the subrepo.
478 # Force allows -X subrepo to skip the subrepo.
479 subs, commitsubs, newstate = subrepoutil.precommit(
479 subs, commitsubs, newstate = subrepoutil.precommit(
480 repo.ui, wctx, tmpstatus, match, force=True
480 repo.ui, wctx, tmpstatus, match, force=True
481 )
481 )
482 for s in subs:
482 for s in subs:
483 if s in commitsubs:
483 if s in commitsubs:
484 dirtyreason = wctx.sub(s).dirtyreason(True)
484 dirtyreason = wctx.sub(s).dirtyreason(True)
485 raise error.Abort(dirtyreason)
485 raise error.Abort(dirtyreason)
486
486
487 if not force:
487 if not force:
488 repo.checkcommitpatterns(wctx, match, status, fail)
488 repo.checkcommitpatterns(wctx, match, status, fail)
489 diffopts = patch.difffeatureopts(
489 diffopts = patch.difffeatureopts(
490 ui,
490 ui,
491 opts=opts,
491 opts=opts,
492 whitespace=True,
492 whitespace=True,
493 section=b'commands',
493 section=b'commands',
494 configprefix=b'commit.interactive.',
494 configprefix=b'commit.interactive.',
495 )
495 )
496 diffopts.nodates = True
496 diffopts.nodates = True
497 diffopts.git = True
497 diffopts.git = True
498 diffopts.showfunc = True
498 diffopts.showfunc = True
499 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
499 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
500 originalchunks = patch.parsepatch(originaldiff)
500 originalchunks = patch.parsepatch(originaldiff)
501 match = scmutil.match(repo[None], pats)
501 match = scmutil.match(repo[None], pats)
502
502
503 # 1. filter patch, since we are intending to apply subset of it
503 # 1. filter patch, since we are intending to apply subset of it
504 try:
504 try:
505 chunks, newopts = filterfn(ui, originalchunks, match)
505 chunks, newopts = filterfn(ui, originalchunks, match)
506 except error.PatchError as err:
506 except error.PatchError as err:
507 raise error.Abort(_(b'error parsing patch: %s') % err)
507 raise error.Abort(_(b'error parsing patch: %s') % err)
508 opts.update(newopts)
508 opts.update(newopts)
509
509
510 # We need to keep a backup of files that have been newly added and
510 # We need to keep a backup of files that have been newly added and
511 # modified during the recording process because there is a previous
511 # modified during the recording process because there is a previous
512 # version without the edit in the workdir. We also will need to restore
512 # version without the edit in the workdir. We also will need to restore
513 # files that were the sources of renames so that the patch application
513 # files that were the sources of renames so that the patch application
514 # works.
514 # works.
515 newlyaddedandmodifiedfiles, alsorestore = newandmodified(
515 newlyaddedandmodifiedfiles, alsorestore = newandmodified(
516 chunks, originalchunks
516 chunks, originalchunks
517 )
517 )
518 contenders = set()
518 contenders = set()
519 for h in chunks:
519 for h in chunks:
520 try:
520 try:
521 contenders.update(set(h.files()))
521 contenders.update(set(h.files()))
522 except AttributeError:
522 except AttributeError:
523 pass
523 pass
524
524
525 changed = status.modified + status.added + status.removed
525 changed = status.modified + status.added + status.removed
526 newfiles = [f for f in changed if f in contenders]
526 newfiles = [f for f in changed if f in contenders]
527 if not newfiles:
527 if not newfiles:
528 ui.status(_(b'no changes to record\n'))
528 ui.status(_(b'no changes to record\n'))
529 return 0
529 return 0
530
530
531 modified = set(status.modified)
531 modified = set(status.modified)
532
532
533 # 2. backup changed files, so we can restore them in the end
533 # 2. backup changed files, so we can restore them in the end
534
534
535 if backupall:
535 if backupall:
536 tobackup = changed
536 tobackup = changed
537 else:
537 else:
538 tobackup = [
538 tobackup = [
539 f
539 f
540 for f in newfiles
540 for f in newfiles
541 if f in modified or f in newlyaddedandmodifiedfiles
541 if f in modified or f in newlyaddedandmodifiedfiles
542 ]
542 ]
543 backups = {}
543 backups = {}
544 if tobackup:
544 if tobackup:
545 backupdir = repo.vfs.join(b'record-backups')
545 backupdir = repo.vfs.join(b'record-backups')
546 try:
546 try:
547 os.mkdir(backupdir)
547 os.mkdir(backupdir)
548 except OSError as err:
548 except OSError as err:
549 if err.errno != errno.EEXIST:
549 if err.errno != errno.EEXIST:
550 raise
550 raise
551 try:
551 try:
552 # backup continues
552 # backup continues
553 for f in tobackup:
553 for f in tobackup:
554 fd, tmpname = pycompat.mkstemp(
554 fd, tmpname = pycompat.mkstemp(
555 prefix=f.replace(b'/', b'_') + b'.', dir=backupdir
555 prefix=f.replace(b'/', b'_') + b'.', dir=backupdir
556 )
556 )
557 os.close(fd)
557 os.close(fd)
558 ui.debug(b'backup %r as %r\n' % (f, tmpname))
558 ui.debug(b'backup %r as %r\n' % (f, tmpname))
559 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
559 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
560 backups[f] = tmpname
560 backups[f] = tmpname
561
561
562 fp = stringio()
562 fp = stringio()
563 for c in chunks:
563 for c in chunks:
564 fname = c.filename()
564 fname = c.filename()
565 if fname in backups:
565 if fname in backups:
566 c.write(fp)
566 c.write(fp)
567 dopatch = fp.tell()
567 dopatch = fp.tell()
568 fp.seek(0)
568 fp.seek(0)
569
569
570 # 2.5 optionally review / modify patch in text editor
570 # 2.5 optionally review / modify patch in text editor
571 if opts.get(b'review', False):
571 if opts.get(b'review', False):
572 patchtext = (
572 patchtext = (
573 crecordmod.diffhelptext
573 crecordmod.diffhelptext
574 + crecordmod.patchhelptext
574 + crecordmod.patchhelptext
575 + fp.read()
575 + fp.read()
576 )
576 )
577 reviewedpatch = ui.edit(
577 reviewedpatch = ui.edit(
578 patchtext, b"", action=b"diff", repopath=repo.path
578 patchtext, b"", action=b"diff", repopath=repo.path
579 )
579 )
580 fp.truncate(0)
580 fp.truncate(0)
581 fp.write(reviewedpatch)
581 fp.write(reviewedpatch)
582 fp.seek(0)
582 fp.seek(0)
583
583
584 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
584 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
585 # 3a. apply filtered patch to clean repo (clean)
585 # 3a. apply filtered patch to clean repo (clean)
586 if backups:
586 if backups:
587 # Equivalent to hg.revert
587 # Equivalent to hg.revert
588 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
588 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
589 mergemod.update(
589 mergemod.update(
590 repo,
590 repo,
591 repo.dirstate.p1(),
591 repo.dirstate.p1(),
592 branchmerge=False,
592 branchmerge=False,
593 force=True,
593 force=True,
594 matcher=m,
594 matcher=m,
595 )
595 )
596
596
597 # 3b. (apply)
597 # 3b. (apply)
598 if dopatch:
598 if dopatch:
599 try:
599 try:
600 ui.debug(b'applying patch\n')
600 ui.debug(b'applying patch\n')
601 ui.debug(fp.getvalue())
601 ui.debug(fp.getvalue())
602 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
602 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
603 except error.PatchError as err:
603 except error.PatchError as err:
604 raise error.Abort(pycompat.bytestr(err))
604 raise error.Abort(pycompat.bytestr(err))
605 del fp
605 del fp
606
606
607 # 4. We prepared working directory according to filtered
607 # 4. We prepared working directory according to filtered
608 # patch. Now is the time to delegate the job to
608 # patch. Now is the time to delegate the job to
609 # commit/qrefresh or the like!
609 # commit/qrefresh or the like!
610
610
611 # Make all of the pathnames absolute.
611 # Make all of the pathnames absolute.
612 newfiles = [repo.wjoin(nf) for nf in newfiles]
612 newfiles = [repo.wjoin(nf) for nf in newfiles]
613 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
613 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
614 finally:
614 finally:
615 # 5. finally restore backed-up files
615 # 5. finally restore backed-up files
616 try:
616 try:
617 dirstate = repo.dirstate
617 dirstate = repo.dirstate
618 for realname, tmpname in pycompat.iteritems(backups):
618 for realname, tmpname in pycompat.iteritems(backups):
619 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
619 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
620
620
621 if dirstate[realname] == b'n':
621 if dirstate[realname] == b'n':
622 # without normallookup, restoring timestamp
622 # without normallookup, restoring timestamp
623 # may cause partially committed files
623 # may cause partially committed files
624 # to be treated as unmodified
624 # to be treated as unmodified
625 dirstate.normallookup(realname)
625 dirstate.normallookup(realname)
626
626
627 # copystat=True here and above are a hack to trick any
627 # copystat=True here and above are a hack to trick any
628 # editors that have f open that we haven't modified them.
628 # editors that have f open that we haven't modified them.
629 #
629 #
630 # Also note that this racy as an editor could notice the
630 # Also note that this racy as an editor could notice the
631 # file's mtime before we've finished writing it.
631 # file's mtime before we've finished writing it.
632 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
632 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
633 os.unlink(tmpname)
633 os.unlink(tmpname)
634 if tobackup:
634 if tobackup:
635 os.rmdir(backupdir)
635 os.rmdir(backupdir)
636 except OSError:
636 except OSError:
637 pass
637 pass
638
638
639 def recordinwlock(ui, repo, message, match, opts):
639 def recordinwlock(ui, repo, message, match, opts):
640 with repo.wlock():
640 with repo.wlock():
641 return recordfunc(ui, repo, message, match, opts)
641 return recordfunc(ui, repo, message, match, opts)
642
642
643 return commit(ui, repo, recordinwlock, pats, opts)
643 return commit(ui, repo, recordinwlock, pats, opts)
644
644
645
645
646 class dirnode(object):
646 class dirnode(object):
647 """
647 """
648 Represent a directory in user working copy with information required for
648 Represent a directory in user working copy with information required for
649 the purpose of tersing its status.
649 the purpose of tersing its status.
650
650
651 path is the path to the directory, without a trailing '/'
651 path is the path to the directory, without a trailing '/'
652
652
653 statuses is a set of statuses of all files in this directory (this includes
653 statuses is a set of statuses of all files in this directory (this includes
654 all the files in all the subdirectories too)
654 all the files in all the subdirectories too)
655
655
656 files is a list of files which are direct child of this directory
656 files is a list of files which are direct child of this directory
657
657
658 subdirs is a dictionary of sub-directory name as the key and it's own
658 subdirs is a dictionary of sub-directory name as the key and it's own
659 dirnode object as the value
659 dirnode object as the value
660 """
660 """
661
661
662 def __init__(self, dirpath):
662 def __init__(self, dirpath):
663 self.path = dirpath
663 self.path = dirpath
664 self.statuses = set()
664 self.statuses = set()
665 self.files = []
665 self.files = []
666 self.subdirs = {}
666 self.subdirs = {}
667
667
668 def _addfileindir(self, filename, status):
668 def _addfileindir(self, filename, status):
669 """Add a file in this directory as a direct child."""
669 """Add a file in this directory as a direct child."""
670 self.files.append((filename, status))
670 self.files.append((filename, status))
671
671
672 def addfile(self, filename, status):
672 def addfile(self, filename, status):
673 """
673 """
674 Add a file to this directory or to its direct parent directory.
674 Add a file to this directory or to its direct parent directory.
675
675
676 If the file is not direct child of this directory, we traverse to the
676 If the file is not direct child of this directory, we traverse to the
677 directory of which this file is a direct child of and add the file
677 directory of which this file is a direct child of and add the file
678 there.
678 there.
679 """
679 """
680
680
681 # the filename contains a path separator, it means it's not the direct
681 # the filename contains a path separator, it means it's not the direct
682 # child of this directory
682 # child of this directory
683 if b'/' in filename:
683 if b'/' in filename:
684 subdir, filep = filename.split(b'/', 1)
684 subdir, filep = filename.split(b'/', 1)
685
685
686 # does the dirnode object for subdir exists
686 # does the dirnode object for subdir exists
687 if subdir not in self.subdirs:
687 if subdir not in self.subdirs:
688 subdirpath = pathutil.join(self.path, subdir)
688 subdirpath = pathutil.join(self.path, subdir)
689 self.subdirs[subdir] = dirnode(subdirpath)
689 self.subdirs[subdir] = dirnode(subdirpath)
690
690
691 # try adding the file in subdir
691 # try adding the file in subdir
692 self.subdirs[subdir].addfile(filep, status)
692 self.subdirs[subdir].addfile(filep, status)
693
693
694 else:
694 else:
695 self._addfileindir(filename, status)
695 self._addfileindir(filename, status)
696
696
697 if status not in self.statuses:
697 if status not in self.statuses:
698 self.statuses.add(status)
698 self.statuses.add(status)
699
699
700 def iterfilepaths(self):
700 def iterfilepaths(self):
701 """Yield (status, path) for files directly under this directory."""
701 """Yield (status, path) for files directly under this directory."""
702 for f, st in self.files:
702 for f, st in self.files:
703 yield st, pathutil.join(self.path, f)
703 yield st, pathutil.join(self.path, f)
704
704
705 def tersewalk(self, terseargs):
705 def tersewalk(self, terseargs):
706 """
706 """
707 Yield (status, path) obtained by processing the status of this
707 Yield (status, path) obtained by processing the status of this
708 dirnode.
708 dirnode.
709
709
710 terseargs is the string of arguments passed by the user with `--terse`
710 terseargs is the string of arguments passed by the user with `--terse`
711 flag.
711 flag.
712
712
713 Following are the cases which can happen:
713 Following are the cases which can happen:
714
714
715 1) All the files in the directory (including all the files in its
715 1) All the files in the directory (including all the files in its
716 subdirectories) share the same status and the user has asked us to terse
716 subdirectories) share the same status and the user has asked us to terse
717 that status. -> yield (status, dirpath). dirpath will end in '/'.
717 that status. -> yield (status, dirpath). dirpath will end in '/'.
718
718
719 2) Otherwise, we do following:
719 2) Otherwise, we do following:
720
720
721 a) Yield (status, filepath) for all the files which are in this
721 a) Yield (status, filepath) for all the files which are in this
722 directory (only the ones in this directory, not the subdirs)
722 directory (only the ones in this directory, not the subdirs)
723
723
724 b) Recurse the function on all the subdirectories of this
724 b) Recurse the function on all the subdirectories of this
725 directory
725 directory
726 """
726 """
727
727
728 if len(self.statuses) == 1:
728 if len(self.statuses) == 1:
729 onlyst = self.statuses.pop()
729 onlyst = self.statuses.pop()
730
730
731 # Making sure we terse only when the status abbreviation is
731 # Making sure we terse only when the status abbreviation is
732 # passed as terse argument
732 # passed as terse argument
733 if onlyst in terseargs:
733 if onlyst in terseargs:
734 yield onlyst, self.path + b'/'
734 yield onlyst, self.path + b'/'
735 return
735 return
736
736
737 # add the files to status list
737 # add the files to status list
738 for st, fpath in self.iterfilepaths():
738 for st, fpath in self.iterfilepaths():
739 yield st, fpath
739 yield st, fpath
740
740
741 # recurse on the subdirs
741 # recurse on the subdirs
742 for dirobj in self.subdirs.values():
742 for dirobj in self.subdirs.values():
743 for st, fpath in dirobj.tersewalk(terseargs):
743 for st, fpath in dirobj.tersewalk(terseargs):
744 yield st, fpath
744 yield st, fpath
745
745
746
746
747 def tersedir(statuslist, terseargs):
747 def tersedir(statuslist, terseargs):
748 """
748 """
749 Terse the status if all the files in a directory shares the same status.
749 Terse the status if all the files in a directory shares the same status.
750
750
751 statuslist is scmutil.status() object which contains a list of files for
751 statuslist is scmutil.status() object which contains a list of files for
752 each status.
752 each status.
753 terseargs is string which is passed by the user as the argument to `--terse`
753 terseargs is string which is passed by the user as the argument to `--terse`
754 flag.
754 flag.
755
755
756 The function makes a tree of objects of dirnode class, and at each node it
756 The function makes a tree of objects of dirnode class, and at each node it
757 stores the information required to know whether we can terse a certain
757 stores the information required to know whether we can terse a certain
758 directory or not.
758 directory or not.
759 """
759 """
760 # the order matters here as that is used to produce final list
760 # the order matters here as that is used to produce final list
761 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
761 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
762
762
763 # checking the argument validity
763 # checking the argument validity
764 for s in pycompat.bytestr(terseargs):
764 for s in pycompat.bytestr(terseargs):
765 if s not in allst:
765 if s not in allst:
766 raise error.Abort(_(b"'%s' not recognized") % s)
766 raise error.Abort(_(b"'%s' not recognized") % s)
767
767
768 # creating a dirnode object for the root of the repo
768 # creating a dirnode object for the root of the repo
769 rootobj = dirnode(b'')
769 rootobj = dirnode(b'')
770 pstatus = (
770 pstatus = (
771 b'modified',
771 b'modified',
772 b'added',
772 b'added',
773 b'deleted',
773 b'deleted',
774 b'clean',
774 b'clean',
775 b'unknown',
775 b'unknown',
776 b'ignored',
776 b'ignored',
777 b'removed',
777 b'removed',
778 )
778 )
779
779
780 tersedict = {}
780 tersedict = {}
781 for attrname in pstatus:
781 for attrname in pstatus:
782 statuschar = attrname[0:1]
782 statuschar = attrname[0:1]
783 for f in getattr(statuslist, attrname):
783 for f in getattr(statuslist, attrname):
784 rootobj.addfile(f, statuschar)
784 rootobj.addfile(f, statuschar)
785 tersedict[statuschar] = []
785 tersedict[statuschar] = []
786
786
787 # we won't be tersing the root dir, so add files in it
787 # we won't be tersing the root dir, so add files in it
788 for st, fpath in rootobj.iterfilepaths():
788 for st, fpath in rootobj.iterfilepaths():
789 tersedict[st].append(fpath)
789 tersedict[st].append(fpath)
790
790
791 # process each sub-directory and build tersedict
791 # process each sub-directory and build tersedict
792 for subdir in rootobj.subdirs.values():
792 for subdir in rootobj.subdirs.values():
793 for st, f in subdir.tersewalk(terseargs):
793 for st, f in subdir.tersewalk(terseargs):
794 tersedict[st].append(f)
794 tersedict[st].append(f)
795
795
796 tersedlist = []
796 tersedlist = []
797 for st in allst:
797 for st in allst:
798 tersedict[st].sort()
798 tersedict[st].sort()
799 tersedlist.append(tersedict[st])
799 tersedlist.append(tersedict[st])
800
800
801 return scmutil.status(*tersedlist)
801 return scmutil.status(*tersedlist)
802
802
803
803
804 def _commentlines(raw):
804 def _commentlines(raw):
805 '''Surround lineswith a comment char and a new line'''
805 '''Surround lineswith a comment char and a new line'''
806 lines = raw.splitlines()
806 lines = raw.splitlines()
807 commentedlines = [b'# %s' % line for line in lines]
807 commentedlines = [b'# %s' % line for line in lines]
808 return b'\n'.join(commentedlines) + b'\n'
808 return b'\n'.join(commentedlines) + b'\n'
809
809
810
810
811 @attr.s(frozen=True)
811 @attr.s(frozen=True)
812 class morestatus(object):
812 class morestatus(object):
813 reporoot = attr.ib()
813 reporoot = attr.ib()
814 unfinishedop = attr.ib()
814 unfinishedop = attr.ib()
815 unfinishedmsg = attr.ib()
815 unfinishedmsg = attr.ib()
816 activemerge = attr.ib()
816 activemerge = attr.ib()
817 unresolvedpaths = attr.ib()
817 unresolvedpaths = attr.ib()
818 _formattedpaths = attr.ib(init=False, default=set())
818 _formattedpaths = attr.ib(init=False, default=set())
819 _label = b'status.morestatus'
819 _label = b'status.morestatus'
820
820
821 def formatfile(self, path, fm):
821 def formatfile(self, path, fm):
822 self._formattedpaths.add(path)
822 self._formattedpaths.add(path)
823 if self.activemerge and path in self.unresolvedpaths:
823 if self.activemerge and path in self.unresolvedpaths:
824 fm.data(unresolved=True)
824 fm.data(unresolved=True)
825
825
826 def formatfooter(self, fm):
826 def formatfooter(self, fm):
827 if self.unfinishedop or self.unfinishedmsg:
827 if self.unfinishedop or self.unfinishedmsg:
828 fm.startitem()
828 fm.startitem()
829 fm.data(itemtype=b'morestatus')
829 fm.data(itemtype=b'morestatus')
830
830
831 if self.unfinishedop:
831 if self.unfinishedop:
832 fm.data(unfinished=self.unfinishedop)
832 fm.data(unfinished=self.unfinishedop)
833 statemsg = (
833 statemsg = (
834 _(b'The repository is in an unfinished *%s* state.')
834 _(b'The repository is in an unfinished *%s* state.')
835 % self.unfinishedop
835 % self.unfinishedop
836 )
836 )
837 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
837 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
838 if self.unfinishedmsg:
838 if self.unfinishedmsg:
839 fm.data(unfinishedmsg=self.unfinishedmsg)
839 fm.data(unfinishedmsg=self.unfinishedmsg)
840
840
841 # May also start new data items.
841 # May also start new data items.
842 self._formatconflicts(fm)
842 self._formatconflicts(fm)
843
843
844 if self.unfinishedmsg:
844 if self.unfinishedmsg:
845 fm.plain(
845 fm.plain(
846 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
846 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
847 )
847 )
848
848
849 def _formatconflicts(self, fm):
849 def _formatconflicts(self, fm):
850 if not self.activemerge:
850 if not self.activemerge:
851 return
851 return
852
852
853 if self.unresolvedpaths:
853 if self.unresolvedpaths:
854 mergeliststr = b'\n'.join(
854 mergeliststr = b'\n'.join(
855 [
855 [
856 b' %s'
856 b' %s'
857 % util.pathto(self.reporoot, encoding.getcwd(), path)
857 % util.pathto(self.reporoot, encoding.getcwd(), path)
858 for path in self.unresolvedpaths
858 for path in self.unresolvedpaths
859 ]
859 ]
860 )
860 )
861 msg = (
861 msg = (
862 _(
862 _(
863 '''Unresolved merge conflicts:
863 '''Unresolved merge conflicts:
864
864
865 %s
865 %s
866
866
867 To mark files as resolved: hg resolve --mark FILE'''
867 To mark files as resolved: hg resolve --mark FILE'''
868 )
868 )
869 % mergeliststr
869 % mergeliststr
870 )
870 )
871
871
872 # If any paths with unresolved conflicts were not previously
872 # If any paths with unresolved conflicts were not previously
873 # formatted, output them now.
873 # formatted, output them now.
874 for f in self.unresolvedpaths:
874 for f in self.unresolvedpaths:
875 if f in self._formattedpaths:
875 if f in self._formattedpaths:
876 # Already output.
876 # Already output.
877 continue
877 continue
878 fm.startitem()
878 fm.startitem()
879 # We can't claim to know the status of the file - it may just
879 # We can't claim to know the status of the file - it may just
880 # have been in one of the states that were not requested for
880 # have been in one of the states that were not requested for
881 # display, so it could be anything.
881 # display, so it could be anything.
882 fm.data(itemtype=b'file', path=f, unresolved=True)
882 fm.data(itemtype=b'file', path=f, unresolved=True)
883
883
884 else:
884 else:
885 msg = _(b'No unresolved merge conflicts.')
885 msg = _(b'No unresolved merge conflicts.')
886
886
887 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
887 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
888
888
889
889
890 def readmorestatus(repo):
890 def readmorestatus(repo):
891 """Returns a morestatus object if the repo has unfinished state."""
891 """Returns a morestatus object if the repo has unfinished state."""
892 statetuple = statemod.getrepostate(repo)
892 statetuple = statemod.getrepostate(repo)
893 mergestate = mergemod.mergestate.read(repo)
893 mergestate = mergemod.mergestate.read(repo)
894 activemerge = mergestate.active()
894 activemerge = mergestate.active()
895 if not statetuple and not activemerge:
895 if not statetuple and not activemerge:
896 return None
896 return None
897
897
898 unfinishedop = unfinishedmsg = unresolved = None
898 unfinishedop = unfinishedmsg = unresolved = None
899 if statetuple:
899 if statetuple:
900 unfinishedop, unfinishedmsg = statetuple
900 unfinishedop, unfinishedmsg = statetuple
901 if activemerge:
901 if activemerge:
902 unresolved = sorted(mergestate.unresolved())
902 unresolved = sorted(mergestate.unresolved())
903 return morestatus(
903 return morestatus(
904 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
904 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
905 )
905 )
906
906
907
907
908 def findpossible(cmd, table, strict=False):
908 def findpossible(cmd, table, strict=False):
909 """
909 """
910 Return cmd -> (aliases, command table entry)
910 Return cmd -> (aliases, command table entry)
911 for each matching command.
911 for each matching command.
912 Return debug commands (or their aliases) only if no normal command matches.
912 Return debug commands (or their aliases) only if no normal command matches.
913 """
913 """
914 choice = {}
914 choice = {}
915 debugchoice = {}
915 debugchoice = {}
916
916
917 if cmd in table:
917 if cmd in table:
918 # short-circuit exact matches, "log" alias beats "log|history"
918 # short-circuit exact matches, "log" alias beats "log|history"
919 keys = [cmd]
919 keys = [cmd]
920 else:
920 else:
921 keys = table.keys()
921 keys = table.keys()
922
922
923 allcmds = []
923 allcmds = []
924 for e in keys:
924 for e in keys:
925 aliases = parsealiases(e)
925 aliases = parsealiases(e)
926 allcmds.extend(aliases)
926 allcmds.extend(aliases)
927 found = None
927 found = None
928 if cmd in aliases:
928 if cmd in aliases:
929 found = cmd
929 found = cmd
930 elif not strict:
930 elif not strict:
931 for a in aliases:
931 for a in aliases:
932 if a.startswith(cmd):
932 if a.startswith(cmd):
933 found = a
933 found = a
934 break
934 break
935 if found is not None:
935 if found is not None:
936 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
936 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
937 debugchoice[found] = (aliases, table[e])
937 debugchoice[found] = (aliases, table[e])
938 else:
938 else:
939 choice[found] = (aliases, table[e])
939 choice[found] = (aliases, table[e])
940
940
941 if not choice and debugchoice:
941 if not choice and debugchoice:
942 choice = debugchoice
942 choice = debugchoice
943
943
944 return choice, allcmds
944 return choice, allcmds
945
945
946
946
947 def findcmd(cmd, table, strict=True):
947 def findcmd(cmd, table, strict=True):
948 """Return (aliases, command table entry) for command string."""
948 """Return (aliases, command table entry) for command string."""
949 choice, allcmds = findpossible(cmd, table, strict)
949 choice, allcmds = findpossible(cmd, table, strict)
950
950
951 if cmd in choice:
951 if cmd in choice:
952 return choice[cmd]
952 return choice[cmd]
953
953
954 if len(choice) > 1:
954 if len(choice) > 1:
955 clist = sorted(choice)
955 clist = sorted(choice)
956 raise error.AmbiguousCommand(cmd, clist)
956 raise error.AmbiguousCommand(cmd, clist)
957
957
958 if choice:
958 if choice:
959 return list(choice.values())[0]
959 return list(choice.values())[0]
960
960
961 raise error.UnknownCommand(cmd, allcmds)
961 raise error.UnknownCommand(cmd, allcmds)
962
962
963
963
964 def changebranch(ui, repo, revs, label):
964 def changebranch(ui, repo, revs, label):
965 """ Change the branch name of given revs to label """
965 """ Change the branch name of given revs to label """
966
966
967 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
967 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
968 # abort in case of uncommitted merge or dirty wdir
968 # abort in case of uncommitted merge or dirty wdir
969 bailifchanged(repo)
969 bailifchanged(repo)
970 revs = scmutil.revrange(repo, revs)
970 revs = scmutil.revrange(repo, revs)
971 if not revs:
971 if not revs:
972 raise error.Abort(b"empty revision set")
972 raise error.Abort(b"empty revision set")
973 roots = repo.revs(b'roots(%ld)', revs)
973 roots = repo.revs(b'roots(%ld)', revs)
974 if len(roots) > 1:
974 if len(roots) > 1:
975 raise error.Abort(
975 raise error.Abort(
976 _(b"cannot change branch of non-linear revisions")
976 _(b"cannot change branch of non-linear revisions")
977 )
977 )
978 rewriteutil.precheck(repo, revs, b'change branch of')
978 rewriteutil.precheck(repo, revs, b'change branch of')
979
979
980 root = repo[roots.first()]
980 root = repo[roots.first()]
981 rpb = {parent.branch() for parent in root.parents()}
981 rpb = {parent.branch() for parent in root.parents()}
982 if label not in rpb and label in repo.branchmap():
982 if label not in rpb and label in repo.branchmap():
983 raise error.Abort(_(b"a branch of the same name already exists"))
983 raise error.Abort(_(b"a branch of the same name already exists"))
984
984
985 if repo.revs(b'obsolete() and %ld', revs):
985 if repo.revs(b'obsolete() and %ld', revs):
986 raise error.Abort(
986 raise error.Abort(
987 _(b"cannot change branch of a obsolete changeset")
987 _(b"cannot change branch of a obsolete changeset")
988 )
988 )
989
989
990 # make sure only topological heads
990 # make sure only topological heads
991 if repo.revs(b'heads(%ld) - head()', revs):
991 if repo.revs(b'heads(%ld) - head()', revs):
992 raise error.Abort(_(b"cannot change branch in middle of a stack"))
992 raise error.Abort(_(b"cannot change branch in middle of a stack"))
993
993
994 replacements = {}
994 replacements = {}
995 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
995 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
996 # mercurial.subrepo -> mercurial.cmdutil
996 # mercurial.subrepo -> mercurial.cmdutil
997 from . import context
997 from . import context
998
998
999 for rev in revs:
999 for rev in revs:
1000 ctx = repo[rev]
1000 ctx = repo[rev]
1001 oldbranch = ctx.branch()
1001 oldbranch = ctx.branch()
1002 # check if ctx has same branch
1002 # check if ctx has same branch
1003 if oldbranch == label:
1003 if oldbranch == label:
1004 continue
1004 continue
1005
1005
1006 def filectxfn(repo, newctx, path):
1006 def filectxfn(repo, newctx, path):
1007 try:
1007 try:
1008 return ctx[path]
1008 return ctx[path]
1009 except error.ManifestLookupError:
1009 except error.ManifestLookupError:
1010 return None
1010 return None
1011
1011
1012 ui.debug(
1012 ui.debug(
1013 b"changing branch of '%s' from '%s' to '%s'\n"
1013 b"changing branch of '%s' from '%s' to '%s'\n"
1014 % (hex(ctx.node()), oldbranch, label)
1014 % (hex(ctx.node()), oldbranch, label)
1015 )
1015 )
1016 extra = ctx.extra()
1016 extra = ctx.extra()
1017 extra[b'branch_change'] = hex(ctx.node())
1017 extra[b'branch_change'] = hex(ctx.node())
1018 # While changing branch of set of linear commits, make sure that
1018 # While changing branch of set of linear commits, make sure that
1019 # we base our commits on new parent rather than old parent which
1019 # we base our commits on new parent rather than old parent which
1020 # was obsoleted while changing the branch
1020 # was obsoleted while changing the branch
1021 p1 = ctx.p1().node()
1021 p1 = ctx.p1().node()
1022 p2 = ctx.p2().node()
1022 p2 = ctx.p2().node()
1023 if p1 in replacements:
1023 if p1 in replacements:
1024 p1 = replacements[p1][0]
1024 p1 = replacements[p1][0]
1025 if p2 in replacements:
1025 if p2 in replacements:
1026 p2 = replacements[p2][0]
1026 p2 = replacements[p2][0]
1027
1027
1028 mc = context.memctx(
1028 mc = context.memctx(
1029 repo,
1029 repo,
1030 (p1, p2),
1030 (p1, p2),
1031 ctx.description(),
1031 ctx.description(),
1032 ctx.files(),
1032 ctx.files(),
1033 filectxfn,
1033 filectxfn,
1034 user=ctx.user(),
1034 user=ctx.user(),
1035 date=ctx.date(),
1035 date=ctx.date(),
1036 extra=extra,
1036 extra=extra,
1037 branch=label,
1037 branch=label,
1038 )
1038 )
1039
1039
1040 newnode = repo.commitctx(mc)
1040 newnode = repo.commitctx(mc)
1041 replacements[ctx.node()] = (newnode,)
1041 replacements[ctx.node()] = (newnode,)
1042 ui.debug(b'new node id is %s\n' % hex(newnode))
1042 ui.debug(b'new node id is %s\n' % hex(newnode))
1043
1043
1044 # create obsmarkers and move bookmarks
1044 # create obsmarkers and move bookmarks
1045 scmutil.cleanupnodes(
1045 scmutil.cleanupnodes(
1046 repo, replacements, b'branch-change', fixphase=True
1046 repo, replacements, b'branch-change', fixphase=True
1047 )
1047 )
1048
1048
1049 # move the working copy too
1049 # move the working copy too
1050 wctx = repo[None]
1050 wctx = repo[None]
1051 # in-progress merge is a bit too complex for now.
1051 # in-progress merge is a bit too complex for now.
1052 if len(wctx.parents()) == 1:
1052 if len(wctx.parents()) == 1:
1053 newid = replacements.get(wctx.p1().node())
1053 newid = replacements.get(wctx.p1().node())
1054 if newid is not None:
1054 if newid is not None:
1055 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1055 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1056 # mercurial.cmdutil
1056 # mercurial.cmdutil
1057 from . import hg
1057 from . import hg
1058
1058
1059 hg.update(repo, newid[0], quietempty=True)
1059 hg.update(repo, newid[0], quietempty=True)
1060
1060
1061 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1061 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1062
1062
1063
1063
1064 def findrepo(p):
1064 def findrepo(p):
1065 while not os.path.isdir(os.path.join(p, b".hg")):
1065 while not os.path.isdir(os.path.join(p, b".hg")):
1066 oldp, p = p, os.path.dirname(p)
1066 oldp, p = p, os.path.dirname(p)
1067 if p == oldp:
1067 if p == oldp:
1068 return None
1068 return None
1069
1069
1070 return p
1070 return p
1071
1071
1072
1072
1073 def bailifchanged(repo, merge=True, hint=None):
1073 def bailifchanged(repo, merge=True, hint=None):
1074 """ enforce the precondition that working directory must be clean.
1074 """ enforce the precondition that working directory must be clean.
1075
1075
1076 'merge' can be set to false if a pending uncommitted merge should be
1076 'merge' can be set to false if a pending uncommitted merge should be
1077 ignored (such as when 'update --check' runs).
1077 ignored (such as when 'update --check' runs).
1078
1078
1079 'hint' is the usual hint given to Abort exception.
1079 'hint' is the usual hint given to Abort exception.
1080 """
1080 """
1081
1081
1082 if merge and repo.dirstate.p2() != nullid:
1082 if merge and repo.dirstate.p2() != nullid:
1083 raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
1083 raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
1084 st = repo.status()
1084 st = repo.status()
1085 if st.modified or st.added or st.removed or st.deleted:
1085 if st.modified or st.added or st.removed or st.deleted:
1086 raise error.Abort(_(b'uncommitted changes'), hint=hint)
1086 raise error.Abort(_(b'uncommitted changes'), hint=hint)
1087 ctx = repo[None]
1087 ctx = repo[None]
1088 for s in sorted(ctx.substate):
1088 for s in sorted(ctx.substate):
1089 ctx.sub(s).bailifchanged(hint=hint)
1089 ctx.sub(s).bailifchanged(hint=hint)
1090
1090
1091
1091
1092 def logmessage(ui, opts):
1092 def logmessage(ui, opts):
1093 """ get the log message according to -m and -l option """
1093 """ get the log message according to -m and -l option """
1094
1094
1095 check_at_most_one_arg(opts, b'message', b'logfile')
1095 check_at_most_one_arg(opts, b'message', b'logfile')
1096
1096
1097 message = opts.get(b'message')
1097 message = opts.get(b'message')
1098 logfile = opts.get(b'logfile')
1098 logfile = opts.get(b'logfile')
1099
1099
1100 if not message and logfile:
1100 if not message and logfile:
1101 try:
1101 try:
1102 if isstdiofilename(logfile):
1102 if isstdiofilename(logfile):
1103 message = ui.fin.read()
1103 message = ui.fin.read()
1104 else:
1104 else:
1105 message = b'\n'.join(util.readfile(logfile).splitlines())
1105 message = b'\n'.join(util.readfile(logfile).splitlines())
1106 except IOError as inst:
1106 except IOError as inst:
1107 raise error.Abort(
1107 raise error.Abort(
1108 _(b"can't read commit message '%s': %s")
1108 _(b"can't read commit message '%s': %s")
1109 % (logfile, encoding.strtolocal(inst.strerror))
1109 % (logfile, encoding.strtolocal(inst.strerror))
1110 )
1110 )
1111 return message
1111 return message
1112
1112
1113
1113
1114 def mergeeditform(ctxorbool, baseformname):
1114 def mergeeditform(ctxorbool, baseformname):
1115 """return appropriate editform name (referencing a committemplate)
1115 """return appropriate editform name (referencing a committemplate)
1116
1116
1117 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1117 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1118 merging is committed.
1118 merging is committed.
1119
1119
1120 This returns baseformname with '.merge' appended if it is a merge,
1120 This returns baseformname with '.merge' appended if it is a merge,
1121 otherwise '.normal' is appended.
1121 otherwise '.normal' is appended.
1122 """
1122 """
1123 if isinstance(ctxorbool, bool):
1123 if isinstance(ctxorbool, bool):
1124 if ctxorbool:
1124 if ctxorbool:
1125 return baseformname + b".merge"
1125 return baseformname + b".merge"
1126 elif len(ctxorbool.parents()) > 1:
1126 elif len(ctxorbool.parents()) > 1:
1127 return baseformname + b".merge"
1127 return baseformname + b".merge"
1128
1128
1129 return baseformname + b".normal"
1129 return baseformname + b".normal"
1130
1130
1131
1131
1132 def getcommiteditor(
1132 def getcommiteditor(
1133 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1133 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1134 ):
1134 ):
1135 """get appropriate commit message editor according to '--edit' option
1135 """get appropriate commit message editor according to '--edit' option
1136
1136
1137 'finishdesc' is a function to be called with edited commit message
1137 'finishdesc' is a function to be called with edited commit message
1138 (= 'description' of the new changeset) just after editing, but
1138 (= 'description' of the new changeset) just after editing, but
1139 before checking empty-ness. It should return actual text to be
1139 before checking empty-ness. It should return actual text to be
1140 stored into history. This allows to change description before
1140 stored into history. This allows to change description before
1141 storing.
1141 storing.
1142
1142
1143 'extramsg' is a extra message to be shown in the editor instead of
1143 'extramsg' is a extra message to be shown in the editor instead of
1144 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1144 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1145 is automatically added.
1145 is automatically added.
1146
1146
1147 'editform' is a dot-separated list of names, to distinguish
1147 'editform' is a dot-separated list of names, to distinguish
1148 the purpose of commit text editing.
1148 the purpose of commit text editing.
1149
1149
1150 'getcommiteditor' returns 'commitforceeditor' regardless of
1150 'getcommiteditor' returns 'commitforceeditor' regardless of
1151 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1151 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1152 they are specific for usage in MQ.
1152 they are specific for usage in MQ.
1153 """
1153 """
1154 if edit or finishdesc or extramsg:
1154 if edit or finishdesc or extramsg:
1155 return lambda r, c, s: commitforceeditor(
1155 return lambda r, c, s: commitforceeditor(
1156 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1156 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1157 )
1157 )
1158 elif editform:
1158 elif editform:
1159 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1159 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1160 else:
1160 else:
1161 return commiteditor
1161 return commiteditor
1162
1162
1163
1163
1164 def _escapecommandtemplate(tmpl):
1164 def _escapecommandtemplate(tmpl):
1165 parts = []
1165 parts = []
1166 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1166 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1167 if typ == b'string':
1167 if typ == b'string':
1168 parts.append(stringutil.escapestr(tmpl[start:end]))
1168 parts.append(stringutil.escapestr(tmpl[start:end]))
1169 else:
1169 else:
1170 parts.append(tmpl[start:end])
1170 parts.append(tmpl[start:end])
1171 return b''.join(parts)
1171 return b''.join(parts)
1172
1172
1173
1173
1174 def rendercommandtemplate(ui, tmpl, props):
1174 def rendercommandtemplate(ui, tmpl, props):
1175 r"""Expand a literal template 'tmpl' in a way suitable for command line
1175 r"""Expand a literal template 'tmpl' in a way suitable for command line
1176
1176
1177 '\' in outermost string is not taken as an escape character because it
1177 '\' in outermost string is not taken as an escape character because it
1178 is a directory separator on Windows.
1178 is a directory separator on Windows.
1179
1179
1180 >>> from . import ui as uimod
1180 >>> from . import ui as uimod
1181 >>> ui = uimod.ui()
1181 >>> ui = uimod.ui()
1182 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1182 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1183 'c:\\foo'
1183 'c:\\foo'
1184 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1184 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1185 'c:{path}'
1185 'c:{path}'
1186 """
1186 """
1187 if not tmpl:
1187 if not tmpl:
1188 return tmpl
1188 return tmpl
1189 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1189 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1190 return t.renderdefault(props)
1190 return t.renderdefault(props)
1191
1191
1192
1192
1193 def rendertemplate(ctx, tmpl, props=None):
1193 def rendertemplate(ctx, tmpl, props=None):
1194 """Expand a literal template 'tmpl' byte-string against one changeset
1194 """Expand a literal template 'tmpl' byte-string against one changeset
1195
1195
1196 Each props item must be a stringify-able value or a callable returning
1196 Each props item must be a stringify-able value or a callable returning
1197 such value, i.e. no bare list nor dict should be passed.
1197 such value, i.e. no bare list nor dict should be passed.
1198 """
1198 """
1199 repo = ctx.repo()
1199 repo = ctx.repo()
1200 tres = formatter.templateresources(repo.ui, repo)
1200 tres = formatter.templateresources(repo.ui, repo)
1201 t = formatter.maketemplater(
1201 t = formatter.maketemplater(
1202 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1202 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1203 )
1203 )
1204 mapping = {b'ctx': ctx}
1204 mapping = {b'ctx': ctx}
1205 if props:
1205 if props:
1206 mapping.update(props)
1206 mapping.update(props)
1207 return t.renderdefault(mapping)
1207 return t.renderdefault(mapping)
1208
1208
1209
1209
1210 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1210 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1211 r"""Convert old-style filename format string to template string
1211 r"""Convert old-style filename format string to template string
1212
1212
1213 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1213 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1214 'foo-{reporoot|basename}-{seqno}.patch'
1214 'foo-{reporoot|basename}-{seqno}.patch'
1215 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1215 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1216 '{rev}{tags % "{tag}"}{node}'
1216 '{rev}{tags % "{tag}"}{node}'
1217
1217
1218 '\' in outermost strings has to be escaped because it is a directory
1218 '\' in outermost strings has to be escaped because it is a directory
1219 separator on Windows:
1219 separator on Windows:
1220
1220
1221 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1221 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1222 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1222 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1223 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1223 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1224 '\\\\\\\\foo\\\\bar.patch'
1224 '\\\\\\\\foo\\\\bar.patch'
1225 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1225 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1226 '\\\\{tags % "{tag}"}'
1226 '\\\\{tags % "{tag}"}'
1227
1227
1228 but inner strings follow the template rules (i.e. '\' is taken as an
1228 but inner strings follow the template rules (i.e. '\' is taken as an
1229 escape character):
1229 escape character):
1230
1230
1231 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1231 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1232 '{"c:\\tmp"}'
1232 '{"c:\\tmp"}'
1233 """
1233 """
1234 expander = {
1234 expander = {
1235 b'H': b'{node}',
1235 b'H': b'{node}',
1236 b'R': b'{rev}',
1236 b'R': b'{rev}',
1237 b'h': b'{node|short}',
1237 b'h': b'{node|short}',
1238 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1238 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1239 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1239 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1240 b'%': b'%',
1240 b'%': b'%',
1241 b'b': b'{reporoot|basename}',
1241 b'b': b'{reporoot|basename}',
1242 }
1242 }
1243 if total is not None:
1243 if total is not None:
1244 expander[b'N'] = b'{total}'
1244 expander[b'N'] = b'{total}'
1245 if seqno is not None:
1245 if seqno is not None:
1246 expander[b'n'] = b'{seqno}'
1246 expander[b'n'] = b'{seqno}'
1247 if total is not None and seqno is not None:
1247 if total is not None and seqno is not None:
1248 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1248 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1249 if pathname is not None:
1249 if pathname is not None:
1250 expander[b's'] = b'{pathname|basename}'
1250 expander[b's'] = b'{pathname|basename}'
1251 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1251 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1252 expander[b'p'] = b'{pathname}'
1252 expander[b'p'] = b'{pathname}'
1253
1253
1254 newname = []
1254 newname = []
1255 for typ, start, end in templater.scantemplate(pat, raw=True):
1255 for typ, start, end in templater.scantemplate(pat, raw=True):
1256 if typ != b'string':
1256 if typ != b'string':
1257 newname.append(pat[start:end])
1257 newname.append(pat[start:end])
1258 continue
1258 continue
1259 i = start
1259 i = start
1260 while i < end:
1260 while i < end:
1261 n = pat.find(b'%', i, end)
1261 n = pat.find(b'%', i, end)
1262 if n < 0:
1262 if n < 0:
1263 newname.append(stringutil.escapestr(pat[i:end]))
1263 newname.append(stringutil.escapestr(pat[i:end]))
1264 break
1264 break
1265 newname.append(stringutil.escapestr(pat[i:n]))
1265 newname.append(stringutil.escapestr(pat[i:n]))
1266 if n + 2 > end:
1266 if n + 2 > end:
1267 raise error.Abort(
1267 raise error.Abort(
1268 _(b"incomplete format spec in output filename")
1268 _(b"incomplete format spec in output filename")
1269 )
1269 )
1270 c = pat[n + 1 : n + 2]
1270 c = pat[n + 1 : n + 2]
1271 i = n + 2
1271 i = n + 2
1272 try:
1272 try:
1273 newname.append(expander[c])
1273 newname.append(expander[c])
1274 except KeyError:
1274 except KeyError:
1275 raise error.Abort(
1275 raise error.Abort(
1276 _(b"invalid format spec '%%%s' in output filename") % c
1276 _(b"invalid format spec '%%%s' in output filename") % c
1277 )
1277 )
1278 return b''.join(newname)
1278 return b''.join(newname)
1279
1279
1280
1280
1281 def makefilename(ctx, pat, **props):
1281 def makefilename(ctx, pat, **props):
1282 if not pat:
1282 if not pat:
1283 return pat
1283 return pat
1284 tmpl = _buildfntemplate(pat, **props)
1284 tmpl = _buildfntemplate(pat, **props)
1285 # BUG: alias expansion shouldn't be made against template fragments
1285 # BUG: alias expansion shouldn't be made against template fragments
1286 # rewritten from %-format strings, but we have no easy way to partially
1286 # rewritten from %-format strings, but we have no easy way to partially
1287 # disable the expansion.
1287 # disable the expansion.
1288 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1288 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1289
1289
1290
1290
1291 def isstdiofilename(pat):
1291 def isstdiofilename(pat):
1292 """True if the given pat looks like a filename denoting stdin/stdout"""
1292 """True if the given pat looks like a filename denoting stdin/stdout"""
1293 return not pat or pat == b'-'
1293 return not pat or pat == b'-'
1294
1294
1295
1295
1296 class _unclosablefile(object):
1296 class _unclosablefile(object):
1297 def __init__(self, fp):
1297 def __init__(self, fp):
1298 self._fp = fp
1298 self._fp = fp
1299
1299
1300 def close(self):
1300 def close(self):
1301 pass
1301 pass
1302
1302
1303 def __iter__(self):
1303 def __iter__(self):
1304 return iter(self._fp)
1304 return iter(self._fp)
1305
1305
1306 def __getattr__(self, attr):
1306 def __getattr__(self, attr):
1307 return getattr(self._fp, attr)
1307 return getattr(self._fp, attr)
1308
1308
1309 def __enter__(self):
1309 def __enter__(self):
1310 return self
1310 return self
1311
1311
1312 def __exit__(self, exc_type, exc_value, exc_tb):
1312 def __exit__(self, exc_type, exc_value, exc_tb):
1313 pass
1313 pass
1314
1314
1315
1315
1316 def makefileobj(ctx, pat, mode=b'wb', **props):
1316 def makefileobj(ctx, pat, mode=b'wb', **props):
1317 writable = mode not in (b'r', b'rb')
1317 writable = mode not in (b'r', b'rb')
1318
1318
1319 if isstdiofilename(pat):
1319 if isstdiofilename(pat):
1320 repo = ctx.repo()
1320 repo = ctx.repo()
1321 if writable:
1321 if writable:
1322 fp = repo.ui.fout
1322 fp = repo.ui.fout
1323 else:
1323 else:
1324 fp = repo.ui.fin
1324 fp = repo.ui.fin
1325 return _unclosablefile(fp)
1325 return _unclosablefile(fp)
1326 fn = makefilename(ctx, pat, **props)
1326 fn = makefilename(ctx, pat, **props)
1327 return open(fn, mode)
1327 return open(fn, mode)
1328
1328
1329
1329
1330 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1330 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1331 """opens the changelog, manifest, a filelog or a given revlog"""
1331 """opens the changelog, manifest, a filelog or a given revlog"""
1332 cl = opts[b'changelog']
1332 cl = opts[b'changelog']
1333 mf = opts[b'manifest']
1333 mf = opts[b'manifest']
1334 dir = opts[b'dir']
1334 dir = opts[b'dir']
1335 msg = None
1335 msg = None
1336 if cl and mf:
1336 if cl and mf:
1337 msg = _(b'cannot specify --changelog and --manifest at the same time')
1337 msg = _(b'cannot specify --changelog and --manifest at the same time')
1338 elif cl and dir:
1338 elif cl and dir:
1339 msg = _(b'cannot specify --changelog and --dir at the same time')
1339 msg = _(b'cannot specify --changelog and --dir at the same time')
1340 elif cl or mf or dir:
1340 elif cl or mf or dir:
1341 if file_:
1341 if file_:
1342 msg = _(b'cannot specify filename with --changelog or --manifest')
1342 msg = _(b'cannot specify filename with --changelog or --manifest')
1343 elif not repo:
1343 elif not repo:
1344 msg = _(
1344 msg = _(
1345 b'cannot specify --changelog or --manifest or --dir '
1345 b'cannot specify --changelog or --manifest or --dir '
1346 b'without a repository'
1346 b'without a repository'
1347 )
1347 )
1348 if msg:
1348 if msg:
1349 raise error.Abort(msg)
1349 raise error.Abort(msg)
1350
1350
1351 r = None
1351 r = None
1352 if repo:
1352 if repo:
1353 if cl:
1353 if cl:
1354 r = repo.unfiltered().changelog
1354 r = repo.unfiltered().changelog
1355 elif dir:
1355 elif dir:
1356 if b'treemanifest' not in repo.requirements:
1356 if b'treemanifest' not in repo.requirements:
1357 raise error.Abort(
1357 raise error.Abort(
1358 _(
1358 _(
1359 b"--dir can only be used on repos with "
1359 b"--dir can only be used on repos with "
1360 b"treemanifest enabled"
1360 b"treemanifest enabled"
1361 )
1361 )
1362 )
1362 )
1363 if not dir.endswith(b'/'):
1363 if not dir.endswith(b'/'):
1364 dir = dir + b'/'
1364 dir = dir + b'/'
1365 dirlog = repo.manifestlog.getstorage(dir)
1365 dirlog = repo.manifestlog.getstorage(dir)
1366 if len(dirlog):
1366 if len(dirlog):
1367 r = dirlog
1367 r = dirlog
1368 elif mf:
1368 elif mf:
1369 r = repo.manifestlog.getstorage(b'')
1369 r = repo.manifestlog.getstorage(b'')
1370 elif file_:
1370 elif file_:
1371 filelog = repo.file(file_)
1371 filelog = repo.file(file_)
1372 if len(filelog):
1372 if len(filelog):
1373 r = filelog
1373 r = filelog
1374
1374
1375 # Not all storage may be revlogs. If requested, try to return an actual
1375 # Not all storage may be revlogs. If requested, try to return an actual
1376 # revlog instance.
1376 # revlog instance.
1377 if returnrevlog:
1377 if returnrevlog:
1378 if isinstance(r, revlog.revlog):
1378 if isinstance(r, revlog.revlog):
1379 pass
1379 pass
1380 elif util.safehasattr(r, b'_revlog'):
1380 elif util.safehasattr(r, b'_revlog'):
1381 r = r._revlog # pytype: disable=attribute-error
1381 r = r._revlog # pytype: disable=attribute-error
1382 elif r is not None:
1382 elif r is not None:
1383 raise error.Abort(_(b'%r does not appear to be a revlog') % r)
1383 raise error.Abort(_(b'%r does not appear to be a revlog') % r)
1384
1384
1385 if not r:
1385 if not r:
1386 if not returnrevlog:
1386 if not returnrevlog:
1387 raise error.Abort(_(b'cannot give path to non-revlog'))
1387 raise error.Abort(_(b'cannot give path to non-revlog'))
1388
1388
1389 if not file_:
1389 if not file_:
1390 raise error.CommandError(cmd, _(b'invalid arguments'))
1390 raise error.CommandError(cmd, _(b'invalid arguments'))
1391 if not os.path.isfile(file_):
1391 if not os.path.isfile(file_):
1392 raise error.Abort(_(b"revlog '%s' not found") % file_)
1392 raise error.Abort(_(b"revlog '%s' not found") % file_)
1393 r = revlog.revlog(
1393 r = revlog.revlog(
1394 vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
1394 vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
1395 )
1395 )
1396 return r
1396 return r
1397
1397
1398
1398
1399 def openrevlog(repo, cmd, file_, opts):
1399 def openrevlog(repo, cmd, file_, opts):
1400 """Obtain a revlog backing storage of an item.
1400 """Obtain a revlog backing storage of an item.
1401
1401
1402 This is similar to ``openstorage()`` except it always returns a revlog.
1402 This is similar to ``openstorage()`` except it always returns a revlog.
1403
1403
1404 In most cases, a caller cares about the main storage object - not the
1404 In most cases, a caller cares about the main storage object - not the
1405 revlog backing it. Therefore, this function should only be used by code
1405 revlog backing it. Therefore, this function should only be used by code
1406 that needs to examine low-level revlog implementation details. e.g. debug
1406 that needs to examine low-level revlog implementation details. e.g. debug
1407 commands.
1407 commands.
1408 """
1408 """
1409 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1409 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1410
1410
1411
1411
1412 def copy(ui, repo, pats, opts, rename=False):
1412 def copy(ui, repo, pats, opts, rename=False):
1413 # called with the repo lock held
1413 # called with the repo lock held
1414 #
1414 #
1415 # hgsep => pathname that uses "/" to separate directories
1415 # hgsep => pathname that uses "/" to separate directories
1416 # ossep => pathname that uses os.sep to separate directories
1416 # ossep => pathname that uses os.sep to separate directories
1417 cwd = repo.getcwd()
1417 cwd = repo.getcwd()
1418 targets = {}
1418 targets = {}
1419 after = opts.get(b"after")
1419 after = opts.get(b"after")
1420 dryrun = opts.get(b"dry_run")
1420 dryrun = opts.get(b"dry_run")
1421 wctx = repo[None]
1421 wctx = repo[None]
1422
1422
1423 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1423 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1424
1424
1425 def walkpat(pat):
1425 def walkpat(pat):
1426 srcs = []
1426 srcs = []
1427 if after:
1427 if after:
1428 badstates = b'?'
1428 badstates = b'?'
1429 else:
1429 else:
1430 badstates = b'?r'
1430 badstates = b'?r'
1431 m = scmutil.match(wctx, [pat], opts, globbed=True)
1431 m = scmutil.match(wctx, [pat], opts, globbed=True)
1432 for abs in wctx.walk(m):
1432 for abs in wctx.walk(m):
1433 state = repo.dirstate[abs]
1433 state = repo.dirstate[abs]
1434 rel = uipathfn(abs)
1434 rel = uipathfn(abs)
1435 exact = m.exact(abs)
1435 exact = m.exact(abs)
1436 if state in badstates:
1436 if state in badstates:
1437 if exact and state == b'?':
1437 if exact and state == b'?':
1438 ui.warn(_(b'%s: not copying - file is not managed\n') % rel)
1438 ui.warn(_(b'%s: not copying - file is not managed\n') % rel)
1439 if exact and state == b'r':
1439 if exact and state == b'r':
1440 ui.warn(
1440 ui.warn(
1441 _(
1441 _(
1442 b'%s: not copying - file has been marked for'
1442 b'%s: not copying - file has been marked for'
1443 b' remove\n'
1443 b' remove\n'
1444 )
1444 )
1445 % rel
1445 % rel
1446 )
1446 )
1447 continue
1447 continue
1448 # abs: hgsep
1448 # abs: hgsep
1449 # rel: ossep
1449 # rel: ossep
1450 srcs.append((abs, rel, exact))
1450 srcs.append((abs, rel, exact))
1451 return srcs
1451 return srcs
1452
1452
1453 # abssrc: hgsep
1453 # abssrc: hgsep
1454 # relsrc: ossep
1454 # relsrc: ossep
1455 # otarget: ossep
1455 # otarget: ossep
1456 def copyfile(abssrc, relsrc, otarget, exact):
1456 def copyfile(abssrc, relsrc, otarget, exact):
1457 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1457 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1458 if b'/' in abstarget:
1458 if b'/' in abstarget:
1459 # We cannot normalize abstarget itself, this would prevent
1459 # We cannot normalize abstarget itself, this would prevent
1460 # case only renames, like a => A.
1460 # case only renames, like a => A.
1461 abspath, absname = abstarget.rsplit(b'/', 1)
1461 abspath, absname = abstarget.rsplit(b'/', 1)
1462 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1462 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1463 reltarget = repo.pathto(abstarget, cwd)
1463 reltarget = repo.pathto(abstarget, cwd)
1464 target = repo.wjoin(abstarget)
1464 target = repo.wjoin(abstarget)
1465 src = repo.wjoin(abssrc)
1465 src = repo.wjoin(abssrc)
1466 state = repo.dirstate[abstarget]
1466 state = repo.dirstate[abstarget]
1467
1467
1468 scmutil.checkportable(ui, abstarget)
1468 scmutil.checkportable(ui, abstarget)
1469
1469
1470 # check for collisions
1470 # check for collisions
1471 prevsrc = targets.get(abstarget)
1471 prevsrc = targets.get(abstarget)
1472 if prevsrc is not None:
1472 if prevsrc is not None:
1473 ui.warn(
1473 ui.warn(
1474 _(b'%s: not overwriting - %s collides with %s\n')
1474 _(b'%s: not overwriting - %s collides with %s\n')
1475 % (
1475 % (
1476 reltarget,
1476 reltarget,
1477 repo.pathto(abssrc, cwd),
1477 repo.pathto(abssrc, cwd),
1478 repo.pathto(prevsrc, cwd),
1478 repo.pathto(prevsrc, cwd),
1479 )
1479 )
1480 )
1480 )
1481 return True # report a failure
1481 return True # report a failure
1482
1482
1483 # check for overwrites
1483 # check for overwrites
1484 exists = os.path.lexists(target)
1484 exists = os.path.lexists(target)
1485 samefile = False
1485 samefile = False
1486 if exists and abssrc != abstarget:
1486 if exists and abssrc != abstarget:
1487 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1487 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1488 abstarget
1488 abstarget
1489 ):
1489 ):
1490 if not rename:
1490 if not rename:
1491 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1491 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1492 return True # report a failure
1492 return True # report a failure
1493 exists = False
1493 exists = False
1494 samefile = True
1494 samefile = True
1495
1495
1496 if not after and exists or after and state in b'mn':
1496 if not after and exists or after and state in b'mn':
1497 if not opts[b'force']:
1497 if not opts[b'force']:
1498 if state in b'mn':
1498 if state in b'mn':
1499 msg = _(b'%s: not overwriting - file already committed\n')
1499 msg = _(b'%s: not overwriting - file already committed\n')
1500 if after:
1500 if after:
1501 flags = b'--after --force'
1501 flags = b'--after --force'
1502 else:
1502 else:
1503 flags = b'--force'
1503 flags = b'--force'
1504 if rename:
1504 if rename:
1505 hint = (
1505 hint = (
1506 _(
1506 _(
1507 b"('hg rename %s' to replace the file by "
1507 b"('hg rename %s' to replace the file by "
1508 b'recording a rename)\n'
1508 b'recording a rename)\n'
1509 )
1509 )
1510 % flags
1510 % flags
1511 )
1511 )
1512 else:
1512 else:
1513 hint = (
1513 hint = (
1514 _(
1514 _(
1515 b"('hg copy %s' to replace the file by "
1515 b"('hg copy %s' to replace the file by "
1516 b'recording a copy)\n'
1516 b'recording a copy)\n'
1517 )
1517 )
1518 % flags
1518 % flags
1519 )
1519 )
1520 else:
1520 else:
1521 msg = _(b'%s: not overwriting - file exists\n')
1521 msg = _(b'%s: not overwriting - file exists\n')
1522 if rename:
1522 if rename:
1523 hint = _(
1523 hint = _(
1524 b"('hg rename --after' to record the rename)\n"
1524 b"('hg rename --after' to record the rename)\n"
1525 )
1525 )
1526 else:
1526 else:
1527 hint = _(b"('hg copy --after' to record the copy)\n")
1527 hint = _(b"('hg copy --after' to record the copy)\n")
1528 ui.warn(msg % reltarget)
1528 ui.warn(msg % reltarget)
1529 ui.warn(hint)
1529 ui.warn(hint)
1530 return True # report a failure
1530 return True # report a failure
1531
1531
1532 if after:
1532 if after:
1533 if not exists:
1533 if not exists:
1534 if rename:
1534 if rename:
1535 ui.warn(
1535 ui.warn(
1536 _(b'%s: not recording move - %s does not exist\n')
1536 _(b'%s: not recording move - %s does not exist\n')
1537 % (relsrc, reltarget)
1537 % (relsrc, reltarget)
1538 )
1538 )
1539 else:
1539 else:
1540 ui.warn(
1540 ui.warn(
1541 _(b'%s: not recording copy - %s does not exist\n')
1541 _(b'%s: not recording copy - %s does not exist\n')
1542 % (relsrc, reltarget)
1542 % (relsrc, reltarget)
1543 )
1543 )
1544 return True # report a failure
1544 return True # report a failure
1545 elif not dryrun:
1545 elif not dryrun:
1546 try:
1546 try:
1547 if exists:
1547 if exists:
1548 os.unlink(target)
1548 os.unlink(target)
1549 targetdir = os.path.dirname(target) or b'.'
1549 targetdir = os.path.dirname(target) or b'.'
1550 if not os.path.isdir(targetdir):
1550 if not os.path.isdir(targetdir):
1551 os.makedirs(targetdir)
1551 os.makedirs(targetdir)
1552 if samefile:
1552 if samefile:
1553 tmp = target + b"~hgrename"
1553 tmp = target + b"~hgrename"
1554 os.rename(src, tmp)
1554 os.rename(src, tmp)
1555 os.rename(tmp, target)
1555 os.rename(tmp, target)
1556 else:
1556 else:
1557 # Preserve stat info on renames, not on copies; this matches
1557 # Preserve stat info on renames, not on copies; this matches
1558 # Linux CLI behavior.
1558 # Linux CLI behavior.
1559 util.copyfile(src, target, copystat=rename)
1559 util.copyfile(src, target, copystat=rename)
1560 srcexists = True
1560 srcexists = True
1561 except IOError as inst:
1561 except IOError as inst:
1562 if inst.errno == errno.ENOENT:
1562 if inst.errno == errno.ENOENT:
1563 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1563 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1564 srcexists = False
1564 srcexists = False
1565 else:
1565 else:
1566 ui.warn(
1566 ui.warn(
1567 _(b'%s: cannot copy - %s\n')
1567 _(b'%s: cannot copy - %s\n')
1568 % (relsrc, encoding.strtolocal(inst.strerror))
1568 % (relsrc, encoding.strtolocal(inst.strerror))
1569 )
1569 )
1570 return True # report a failure
1570 return True # report a failure
1571
1571
1572 if ui.verbose or not exact:
1572 if ui.verbose or not exact:
1573 if rename:
1573 if rename:
1574 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1574 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1575 else:
1575 else:
1576 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1576 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1577
1577
1578 targets[abstarget] = abssrc
1578 targets[abstarget] = abssrc
1579
1579
1580 # fix up dirstate
1580 # fix up dirstate
1581 scmutil.dirstatecopy(
1581 scmutil.dirstatecopy(
1582 ui, repo, wctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1582 ui, repo, wctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1583 )
1583 )
1584 if rename and not dryrun:
1584 if rename and not dryrun:
1585 if not after and srcexists and not samefile:
1585 if not after and srcexists and not samefile:
1586 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1586 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1587 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1587 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1588 wctx.forget([abssrc])
1588 wctx.forget([abssrc])
1589
1589
1590 # pat: ossep
1590 # pat: ossep
1591 # dest ossep
1591 # dest ossep
1592 # srcs: list of (hgsep, hgsep, ossep, bool)
1592 # srcs: list of (hgsep, hgsep, ossep, bool)
1593 # return: function that takes hgsep and returns ossep
1593 # return: function that takes hgsep and returns ossep
1594 def targetpathfn(pat, dest, srcs):
1594 def targetpathfn(pat, dest, srcs):
1595 if os.path.isdir(pat):
1595 if os.path.isdir(pat):
1596 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1596 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1597 abspfx = util.localpath(abspfx)
1597 abspfx = util.localpath(abspfx)
1598 if destdirexists:
1598 if destdirexists:
1599 striplen = len(os.path.split(abspfx)[0])
1599 striplen = len(os.path.split(abspfx)[0])
1600 else:
1600 else:
1601 striplen = len(abspfx)
1601 striplen = len(abspfx)
1602 if striplen:
1602 if striplen:
1603 striplen += len(pycompat.ossep)
1603 striplen += len(pycompat.ossep)
1604 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1604 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1605 elif destdirexists:
1605 elif destdirexists:
1606 res = lambda p: os.path.join(
1606 res = lambda p: os.path.join(
1607 dest, os.path.basename(util.localpath(p))
1607 dest, os.path.basename(util.localpath(p))
1608 )
1608 )
1609 else:
1609 else:
1610 res = lambda p: dest
1610 res = lambda p: dest
1611 return res
1611 return res
1612
1612
1613 # pat: ossep
1613 # pat: ossep
1614 # dest ossep
1614 # dest ossep
1615 # srcs: list of (hgsep, hgsep, ossep, bool)
1615 # srcs: list of (hgsep, hgsep, ossep, bool)
1616 # return: function that takes hgsep and returns ossep
1616 # return: function that takes hgsep and returns ossep
1617 def targetpathafterfn(pat, dest, srcs):
1617 def targetpathafterfn(pat, dest, srcs):
1618 if matchmod.patkind(pat):
1618 if matchmod.patkind(pat):
1619 # a mercurial pattern
1619 # a mercurial pattern
1620 res = lambda p: os.path.join(
1620 res = lambda p: os.path.join(
1621 dest, os.path.basename(util.localpath(p))
1621 dest, os.path.basename(util.localpath(p))
1622 )
1622 )
1623 else:
1623 else:
1624 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1624 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1625 if len(abspfx) < len(srcs[0][0]):
1625 if len(abspfx) < len(srcs[0][0]):
1626 # A directory. Either the target path contains the last
1626 # A directory. Either the target path contains the last
1627 # component of the source path or it does not.
1627 # component of the source path or it does not.
1628 def evalpath(striplen):
1628 def evalpath(striplen):
1629 score = 0
1629 score = 0
1630 for s in srcs:
1630 for s in srcs:
1631 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1631 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1632 if os.path.lexists(t):
1632 if os.path.lexists(t):
1633 score += 1
1633 score += 1
1634 return score
1634 return score
1635
1635
1636 abspfx = util.localpath(abspfx)
1636 abspfx = util.localpath(abspfx)
1637 striplen = len(abspfx)
1637 striplen = len(abspfx)
1638 if striplen:
1638 if striplen:
1639 striplen += len(pycompat.ossep)
1639 striplen += len(pycompat.ossep)
1640 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1640 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1641 score = evalpath(striplen)
1641 score = evalpath(striplen)
1642 striplen1 = len(os.path.split(abspfx)[0])
1642 striplen1 = len(os.path.split(abspfx)[0])
1643 if striplen1:
1643 if striplen1:
1644 striplen1 += len(pycompat.ossep)
1644 striplen1 += len(pycompat.ossep)
1645 if evalpath(striplen1) > score:
1645 if evalpath(striplen1) > score:
1646 striplen = striplen1
1646 striplen = striplen1
1647 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1647 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1648 else:
1648 else:
1649 # a file
1649 # a file
1650 if destdirexists:
1650 if destdirexists:
1651 res = lambda p: os.path.join(
1651 res = lambda p: os.path.join(
1652 dest, os.path.basename(util.localpath(p))
1652 dest, os.path.basename(util.localpath(p))
1653 )
1653 )
1654 else:
1654 else:
1655 res = lambda p: dest
1655 res = lambda p: dest
1656 return res
1656 return res
1657
1657
1658 pats = scmutil.expandpats(pats)
1658 pats = scmutil.expandpats(pats)
1659 if not pats:
1659 if not pats:
1660 raise error.Abort(_(b'no source or destination specified'))
1660 raise error.Abort(_(b'no source or destination specified'))
1661 if len(pats) == 1:
1661 if len(pats) == 1:
1662 raise error.Abort(_(b'no destination specified'))
1662 raise error.Abort(_(b'no destination specified'))
1663 dest = pats.pop()
1663 dest = pats.pop()
1664 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1664 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1665 if not destdirexists:
1665 if not destdirexists:
1666 if len(pats) > 1 or matchmod.patkind(pats[0]):
1666 if len(pats) > 1 or matchmod.patkind(pats[0]):
1667 raise error.Abort(
1667 raise error.Abort(
1668 _(
1668 _(
1669 b'with multiple sources, destination must be an '
1669 b'with multiple sources, destination must be an '
1670 b'existing directory'
1670 b'existing directory'
1671 )
1671 )
1672 )
1672 )
1673 if util.endswithsep(dest):
1673 if util.endswithsep(dest):
1674 raise error.Abort(_(b'destination %s is not a directory') % dest)
1674 raise error.Abort(_(b'destination %s is not a directory') % dest)
1675
1675
1676 tfn = targetpathfn
1676 tfn = targetpathfn
1677 if after:
1677 if after:
1678 tfn = targetpathafterfn
1678 tfn = targetpathafterfn
1679 copylist = []
1679 copylist = []
1680 for pat in pats:
1680 for pat in pats:
1681 srcs = walkpat(pat)
1681 srcs = walkpat(pat)
1682 if not srcs:
1682 if not srcs:
1683 continue
1683 continue
1684 copylist.append((tfn(pat, dest, srcs), srcs))
1684 copylist.append((tfn(pat, dest, srcs), srcs))
1685 if not copylist:
1685 if not copylist:
1686 raise error.Abort(_(b'no files to copy'))
1686 raise error.Abort(_(b'no files to copy'))
1687
1687
1688 errors = 0
1688 errors = 0
1689 for targetpath, srcs in copylist:
1689 for targetpath, srcs in copylist:
1690 for abssrc, relsrc, exact in srcs:
1690 for abssrc, relsrc, exact in srcs:
1691 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1691 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1692 errors += 1
1692 errors += 1
1693
1693
1694 return errors != 0
1694 return errors != 0
1695
1695
1696
1696
1697 ## facility to let extension process additional data into an import patch
1697 ## facility to let extension process additional data into an import patch
1698 # list of identifier to be executed in order
1698 # list of identifier to be executed in order
1699 extrapreimport = [] # run before commit
1699 extrapreimport = [] # run before commit
1700 extrapostimport = [] # run after commit
1700 extrapostimport = [] # run after commit
1701 # mapping from identifier to actual import function
1701 # mapping from identifier to actual import function
1702 #
1702 #
1703 # 'preimport' are run before the commit is made and are provided the following
1703 # 'preimport' are run before the commit is made and are provided the following
1704 # arguments:
1704 # arguments:
1705 # - repo: the localrepository instance,
1705 # - repo: the localrepository instance,
1706 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1706 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1707 # - extra: the future extra dictionary of the changeset, please mutate it,
1707 # - extra: the future extra dictionary of the changeset, please mutate it,
1708 # - opts: the import options.
1708 # - opts: the import options.
1709 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1709 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1710 # mutation of in memory commit and more. Feel free to rework the code to get
1710 # mutation of in memory commit and more. Feel free to rework the code to get
1711 # there.
1711 # there.
1712 extrapreimportmap = {}
1712 extrapreimportmap = {}
1713 # 'postimport' are run after the commit is made and are provided the following
1713 # 'postimport' are run after the commit is made and are provided the following
1714 # argument:
1714 # argument:
1715 # - ctx: the changectx created by import.
1715 # - ctx: the changectx created by import.
1716 extrapostimportmap = {}
1716 extrapostimportmap = {}
1717
1717
1718
1718
1719 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1719 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1720 """Utility function used by commands.import to import a single patch
1720 """Utility function used by commands.import to import a single patch
1721
1721
1722 This function is explicitly defined here to help the evolve extension to
1722 This function is explicitly defined here to help the evolve extension to
1723 wrap this part of the import logic.
1723 wrap this part of the import logic.
1724
1724
1725 The API is currently a bit ugly because it a simple code translation from
1725 The API is currently a bit ugly because it a simple code translation from
1726 the import command. Feel free to make it better.
1726 the import command. Feel free to make it better.
1727
1727
1728 :patchdata: a dictionary containing parsed patch data (such as from
1728 :patchdata: a dictionary containing parsed patch data (such as from
1729 ``patch.extract()``)
1729 ``patch.extract()``)
1730 :parents: nodes that will be parent of the created commit
1730 :parents: nodes that will be parent of the created commit
1731 :opts: the full dict of option passed to the import command
1731 :opts: the full dict of option passed to the import command
1732 :msgs: list to save commit message to.
1732 :msgs: list to save commit message to.
1733 (used in case we need to save it when failing)
1733 (used in case we need to save it when failing)
1734 :updatefunc: a function that update a repo to a given node
1734 :updatefunc: a function that update a repo to a given node
1735 updatefunc(<repo>, <node>)
1735 updatefunc(<repo>, <node>)
1736 """
1736 """
1737 # avoid cycle context -> subrepo -> cmdutil
1737 # avoid cycle context -> subrepo -> cmdutil
1738 from . import context
1738 from . import context
1739
1739
1740 tmpname = patchdata.get(b'filename')
1740 tmpname = patchdata.get(b'filename')
1741 message = patchdata.get(b'message')
1741 message = patchdata.get(b'message')
1742 user = opts.get(b'user') or patchdata.get(b'user')
1742 user = opts.get(b'user') or patchdata.get(b'user')
1743 date = opts.get(b'date') or patchdata.get(b'date')
1743 date = opts.get(b'date') or patchdata.get(b'date')
1744 branch = patchdata.get(b'branch')
1744 branch = patchdata.get(b'branch')
1745 nodeid = patchdata.get(b'nodeid')
1745 nodeid = patchdata.get(b'nodeid')
1746 p1 = patchdata.get(b'p1')
1746 p1 = patchdata.get(b'p1')
1747 p2 = patchdata.get(b'p2')
1747 p2 = patchdata.get(b'p2')
1748
1748
1749 nocommit = opts.get(b'no_commit')
1749 nocommit = opts.get(b'no_commit')
1750 importbranch = opts.get(b'import_branch')
1750 importbranch = opts.get(b'import_branch')
1751 update = not opts.get(b'bypass')
1751 update = not opts.get(b'bypass')
1752 strip = opts[b"strip"]
1752 strip = opts[b"strip"]
1753 prefix = opts[b"prefix"]
1753 prefix = opts[b"prefix"]
1754 sim = float(opts.get(b'similarity') or 0)
1754 sim = float(opts.get(b'similarity') or 0)
1755
1755
1756 if not tmpname:
1756 if not tmpname:
1757 return None, None, False
1757 return None, None, False
1758
1758
1759 rejects = False
1759 rejects = False
1760
1760
1761 cmdline_message = logmessage(ui, opts)
1761 cmdline_message = logmessage(ui, opts)
1762 if cmdline_message:
1762 if cmdline_message:
1763 # pickup the cmdline msg
1763 # pickup the cmdline msg
1764 message = cmdline_message
1764 message = cmdline_message
1765 elif message:
1765 elif message:
1766 # pickup the patch msg
1766 # pickup the patch msg
1767 message = message.strip()
1767 message = message.strip()
1768 else:
1768 else:
1769 # launch the editor
1769 # launch the editor
1770 message = None
1770 message = None
1771 ui.debug(b'message:\n%s\n' % (message or b''))
1771 ui.debug(b'message:\n%s\n' % (message or b''))
1772
1772
1773 if len(parents) == 1:
1773 if len(parents) == 1:
1774 parents.append(repo[nullid])
1774 parents.append(repo[nullid])
1775 if opts.get(b'exact'):
1775 if opts.get(b'exact'):
1776 if not nodeid or not p1:
1776 if not nodeid or not p1:
1777 raise error.Abort(_(b'not a Mercurial patch'))
1777 raise error.Abort(_(b'not a Mercurial patch'))
1778 p1 = repo[p1]
1778 p1 = repo[p1]
1779 p2 = repo[p2 or nullid]
1779 p2 = repo[p2 or nullid]
1780 elif p2:
1780 elif p2:
1781 try:
1781 try:
1782 p1 = repo[p1]
1782 p1 = repo[p1]
1783 p2 = repo[p2]
1783 p2 = repo[p2]
1784 # Without any options, consider p2 only if the
1784 # Without any options, consider p2 only if the
1785 # patch is being applied on top of the recorded
1785 # patch is being applied on top of the recorded
1786 # first parent.
1786 # first parent.
1787 if p1 != parents[0]:
1787 if p1 != parents[0]:
1788 p1 = parents[0]
1788 p1 = parents[0]
1789 p2 = repo[nullid]
1789 p2 = repo[nullid]
1790 except error.RepoError:
1790 except error.RepoError:
1791 p1, p2 = parents
1791 p1, p2 = parents
1792 if p2.node() == nullid:
1792 if p2.node() == nullid:
1793 ui.warn(
1793 ui.warn(
1794 _(
1794 _(
1795 b"warning: import the patch as a normal revision\n"
1795 b"warning: import the patch as a normal revision\n"
1796 b"(use --exact to import the patch as a merge)\n"
1796 b"(use --exact to import the patch as a merge)\n"
1797 )
1797 )
1798 )
1798 )
1799 else:
1799 else:
1800 p1, p2 = parents
1800 p1, p2 = parents
1801
1801
1802 n = None
1802 n = None
1803 if update:
1803 if update:
1804 if p1 != parents[0]:
1804 if p1 != parents[0]:
1805 updatefunc(repo, p1.node())
1805 updatefunc(repo, p1.node())
1806 if p2 != parents[1]:
1806 if p2 != parents[1]:
1807 repo.setparents(p1.node(), p2.node())
1807 repo.setparents(p1.node(), p2.node())
1808
1808
1809 if opts.get(b'exact') or importbranch:
1809 if opts.get(b'exact') or importbranch:
1810 repo.dirstate.setbranch(branch or b'default')
1810 repo.dirstate.setbranch(branch or b'default')
1811
1811
1812 partial = opts.get(b'partial', False)
1812 partial = opts.get(b'partial', False)
1813 files = set()
1813 files = set()
1814 try:
1814 try:
1815 patch.patch(
1815 patch.patch(
1816 ui,
1816 ui,
1817 repo,
1817 repo,
1818 tmpname,
1818 tmpname,
1819 strip=strip,
1819 strip=strip,
1820 prefix=prefix,
1820 prefix=prefix,
1821 files=files,
1821 files=files,
1822 eolmode=None,
1822 eolmode=None,
1823 similarity=sim / 100.0,
1823 similarity=sim / 100.0,
1824 )
1824 )
1825 except error.PatchError as e:
1825 except error.PatchError as e:
1826 if not partial:
1826 if not partial:
1827 raise error.Abort(pycompat.bytestr(e))
1827 raise error.Abort(pycompat.bytestr(e))
1828 if partial:
1828 if partial:
1829 rejects = True
1829 rejects = True
1830
1830
1831 files = list(files)
1831 files = list(files)
1832 if nocommit:
1832 if nocommit:
1833 if message:
1833 if message:
1834 msgs.append(message)
1834 msgs.append(message)
1835 else:
1835 else:
1836 if opts.get(b'exact') or p2:
1836 if opts.get(b'exact') or p2:
1837 # If you got here, you either use --force and know what
1837 # If you got here, you either use --force and know what
1838 # you are doing or used --exact or a merge patch while
1838 # you are doing or used --exact or a merge patch while
1839 # being updated to its first parent.
1839 # being updated to its first parent.
1840 m = None
1840 m = None
1841 else:
1841 else:
1842 m = scmutil.matchfiles(repo, files or [])
1842 m = scmutil.matchfiles(repo, files or [])
1843 editform = mergeeditform(repo[None], b'import.normal')
1843 editform = mergeeditform(repo[None], b'import.normal')
1844 if opts.get(b'exact'):
1844 if opts.get(b'exact'):
1845 editor = None
1845 editor = None
1846 else:
1846 else:
1847 editor = getcommiteditor(
1847 editor = getcommiteditor(
1848 editform=editform, **pycompat.strkwargs(opts)
1848 editform=editform, **pycompat.strkwargs(opts)
1849 )
1849 )
1850 extra = {}
1850 extra = {}
1851 for idfunc in extrapreimport:
1851 for idfunc in extrapreimport:
1852 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1852 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1853 overrides = {}
1853 overrides = {}
1854 if partial:
1854 if partial:
1855 overrides[(b'ui', b'allowemptycommit')] = True
1855 overrides[(b'ui', b'allowemptycommit')] = True
1856 if opts.get(b'secret'):
1856 if opts.get(b'secret'):
1857 overrides[(b'phases', b'new-commit')] = b'secret'
1857 overrides[(b'phases', b'new-commit')] = b'secret'
1858 with repo.ui.configoverride(overrides, b'import'):
1858 with repo.ui.configoverride(overrides, b'import'):
1859 n = repo.commit(
1859 n = repo.commit(
1860 message, user, date, match=m, editor=editor, extra=extra
1860 message, user, date, match=m, editor=editor, extra=extra
1861 )
1861 )
1862 for idfunc in extrapostimport:
1862 for idfunc in extrapostimport:
1863 extrapostimportmap[idfunc](repo[n])
1863 extrapostimportmap[idfunc](repo[n])
1864 else:
1864 else:
1865 if opts.get(b'exact') or importbranch:
1865 if opts.get(b'exact') or importbranch:
1866 branch = branch or b'default'
1866 branch = branch or b'default'
1867 else:
1867 else:
1868 branch = p1.branch()
1868 branch = p1.branch()
1869 store = patch.filestore()
1869 store = patch.filestore()
1870 try:
1870 try:
1871 files = set()
1871 files = set()
1872 try:
1872 try:
1873 patch.patchrepo(
1873 patch.patchrepo(
1874 ui,
1874 ui,
1875 repo,
1875 repo,
1876 p1,
1876 p1,
1877 store,
1877 store,
1878 tmpname,
1878 tmpname,
1879 strip,
1879 strip,
1880 prefix,
1880 prefix,
1881 files,
1881 files,
1882 eolmode=None,
1882 eolmode=None,
1883 )
1883 )
1884 except error.PatchError as e:
1884 except error.PatchError as e:
1885 raise error.Abort(stringutil.forcebytestr(e))
1885 raise error.Abort(stringutil.forcebytestr(e))
1886 if opts.get(b'exact'):
1886 if opts.get(b'exact'):
1887 editor = None
1887 editor = None
1888 else:
1888 else:
1889 editor = getcommiteditor(editform=b'import.bypass')
1889 editor = getcommiteditor(editform=b'import.bypass')
1890 memctx = context.memctx(
1890 memctx = context.memctx(
1891 repo,
1891 repo,
1892 (p1.node(), p2.node()),
1892 (p1.node(), p2.node()),
1893 message,
1893 message,
1894 files=files,
1894 files=files,
1895 filectxfn=store,
1895 filectxfn=store,
1896 user=user,
1896 user=user,
1897 date=date,
1897 date=date,
1898 branch=branch,
1898 branch=branch,
1899 editor=editor,
1899 editor=editor,
1900 )
1900 )
1901 n = memctx.commit()
1901 n = memctx.commit()
1902 finally:
1902 finally:
1903 store.close()
1903 store.close()
1904 if opts.get(b'exact') and nocommit:
1904 if opts.get(b'exact') and nocommit:
1905 # --exact with --no-commit is still useful in that it does merge
1905 # --exact with --no-commit is still useful in that it does merge
1906 # and branch bits
1906 # and branch bits
1907 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
1907 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
1908 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
1908 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
1909 raise error.Abort(_(b'patch is damaged or loses information'))
1909 raise error.Abort(_(b'patch is damaged or loses information'))
1910 msg = _(b'applied to working directory')
1910 msg = _(b'applied to working directory')
1911 if n:
1911 if n:
1912 # i18n: refers to a short changeset id
1912 # i18n: refers to a short changeset id
1913 msg = _(b'created %s') % short(n)
1913 msg = _(b'created %s') % short(n)
1914 return msg, n, rejects
1914 return msg, n, rejects
1915
1915
1916
1916
1917 # facility to let extensions include additional data in an exported patch
1917 # facility to let extensions include additional data in an exported patch
1918 # list of identifiers to be executed in order
1918 # list of identifiers to be executed in order
1919 extraexport = []
1919 extraexport = []
1920 # mapping from identifier to actual export function
1920 # mapping from identifier to actual export function
1921 # function as to return a string to be added to the header or None
1921 # function as to return a string to be added to the header or None
1922 # it is given two arguments (sequencenumber, changectx)
1922 # it is given two arguments (sequencenumber, changectx)
1923 extraexportmap = {}
1923 extraexportmap = {}
1924
1924
1925
1925
1926 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
1926 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
1927 node = scmutil.binnode(ctx)
1927 node = scmutil.binnode(ctx)
1928 parents = [p.node() for p in ctx.parents() if p]
1928 parents = [p.node() for p in ctx.parents() if p]
1929 branch = ctx.branch()
1929 branch = ctx.branch()
1930 if switch_parent:
1930 if switch_parent:
1931 parents.reverse()
1931 parents.reverse()
1932
1932
1933 if parents:
1933 if parents:
1934 prev = parents[0]
1934 prev = parents[0]
1935 else:
1935 else:
1936 prev = nullid
1936 prev = nullid
1937
1937
1938 fm.context(ctx=ctx)
1938 fm.context(ctx=ctx)
1939 fm.plain(b'# HG changeset patch\n')
1939 fm.plain(b'# HG changeset patch\n')
1940 fm.write(b'user', b'# User %s\n', ctx.user())
1940 fm.write(b'user', b'# User %s\n', ctx.user())
1941 fm.plain(b'# Date %d %d\n' % ctx.date())
1941 fm.plain(b'# Date %d %d\n' % ctx.date())
1942 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
1942 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
1943 fm.condwrite(
1943 fm.condwrite(
1944 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
1944 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
1945 )
1945 )
1946 fm.write(b'node', b'# Node ID %s\n', hex(node))
1946 fm.write(b'node', b'# Node ID %s\n', hex(node))
1947 fm.plain(b'# Parent %s\n' % hex(prev))
1947 fm.plain(b'# Parent %s\n' % hex(prev))
1948 if len(parents) > 1:
1948 if len(parents) > 1:
1949 fm.plain(b'# Parent %s\n' % hex(parents[1]))
1949 fm.plain(b'# Parent %s\n' % hex(parents[1]))
1950 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
1950 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
1951
1951
1952 # TODO: redesign extraexportmap function to support formatter
1952 # TODO: redesign extraexportmap function to support formatter
1953 for headerid in extraexport:
1953 for headerid in extraexport:
1954 header = extraexportmap[headerid](seqno, ctx)
1954 header = extraexportmap[headerid](seqno, ctx)
1955 if header is not None:
1955 if header is not None:
1956 fm.plain(b'# %s\n' % header)
1956 fm.plain(b'# %s\n' % header)
1957
1957
1958 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
1958 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
1959 fm.plain(b'\n')
1959 fm.plain(b'\n')
1960
1960
1961 if fm.isplain():
1961 if fm.isplain():
1962 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
1962 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
1963 for chunk, label in chunkiter:
1963 for chunk, label in chunkiter:
1964 fm.plain(chunk, label=label)
1964 fm.plain(chunk, label=label)
1965 else:
1965 else:
1966 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
1966 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
1967 # TODO: make it structured?
1967 # TODO: make it structured?
1968 fm.data(diff=b''.join(chunkiter))
1968 fm.data(diff=b''.join(chunkiter))
1969
1969
1970
1970
1971 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
1971 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
1972 """Export changesets to stdout or a single file"""
1972 """Export changesets to stdout or a single file"""
1973 for seqno, rev in enumerate(revs, 1):
1973 for seqno, rev in enumerate(revs, 1):
1974 ctx = repo[rev]
1974 ctx = repo[rev]
1975 if not dest.startswith(b'<'):
1975 if not dest.startswith(b'<'):
1976 repo.ui.note(b"%s\n" % dest)
1976 repo.ui.note(b"%s\n" % dest)
1977 fm.startitem()
1977 fm.startitem()
1978 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
1978 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
1979
1979
1980
1980
1981 def _exportfntemplate(
1981 def _exportfntemplate(
1982 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
1982 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
1983 ):
1983 ):
1984 """Export changesets to possibly multiple files"""
1984 """Export changesets to possibly multiple files"""
1985 total = len(revs)
1985 total = len(revs)
1986 revwidth = max(len(str(rev)) for rev in revs)
1986 revwidth = max(len(str(rev)) for rev in revs)
1987 filemap = util.sortdict() # filename: [(seqno, rev), ...]
1987 filemap = util.sortdict() # filename: [(seqno, rev), ...]
1988
1988
1989 for seqno, rev in enumerate(revs, 1):
1989 for seqno, rev in enumerate(revs, 1):
1990 ctx = repo[rev]
1990 ctx = repo[rev]
1991 dest = makefilename(
1991 dest = makefilename(
1992 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
1992 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
1993 )
1993 )
1994 filemap.setdefault(dest, []).append((seqno, rev))
1994 filemap.setdefault(dest, []).append((seqno, rev))
1995
1995
1996 for dest in filemap:
1996 for dest in filemap:
1997 with formatter.maybereopen(basefm, dest) as fm:
1997 with formatter.maybereopen(basefm, dest) as fm:
1998 repo.ui.note(b"%s\n" % dest)
1998 repo.ui.note(b"%s\n" % dest)
1999 for seqno, rev in filemap[dest]:
1999 for seqno, rev in filemap[dest]:
2000 fm.startitem()
2000 fm.startitem()
2001 ctx = repo[rev]
2001 ctx = repo[rev]
2002 _exportsingle(
2002 _exportsingle(
2003 repo, ctx, fm, match, switch_parent, seqno, diffopts
2003 repo, ctx, fm, match, switch_parent, seqno, diffopts
2004 )
2004 )
2005
2005
2006
2006
2007 def _prefetchchangedfiles(repo, revs, match):
2007 def _prefetchchangedfiles(repo, revs, match):
2008 allfiles = set()
2008 allfiles = set()
2009 for rev in revs:
2009 for rev in revs:
2010 for file in repo[rev].files():
2010 for file in repo[rev].files():
2011 if not match or match(file):
2011 if not match or match(file):
2012 allfiles.add(file)
2012 allfiles.add(file)
2013 scmutil.prefetchfiles(repo, revs, scmutil.matchfiles(repo, allfiles))
2013 scmutil.prefetchfiles(repo, revs, scmutil.matchfiles(repo, allfiles))
2014
2014
2015
2015
2016 def export(
2016 def export(
2017 repo,
2017 repo,
2018 revs,
2018 revs,
2019 basefm,
2019 basefm,
2020 fntemplate=b'hg-%h.patch',
2020 fntemplate=b'hg-%h.patch',
2021 switch_parent=False,
2021 switch_parent=False,
2022 opts=None,
2022 opts=None,
2023 match=None,
2023 match=None,
2024 ):
2024 ):
2025 '''export changesets as hg patches
2025 '''export changesets as hg patches
2026
2026
2027 Args:
2027 Args:
2028 repo: The repository from which we're exporting revisions.
2028 repo: The repository from which we're exporting revisions.
2029 revs: A list of revisions to export as revision numbers.
2029 revs: A list of revisions to export as revision numbers.
2030 basefm: A formatter to which patches should be written.
2030 basefm: A formatter to which patches should be written.
2031 fntemplate: An optional string to use for generating patch file names.
2031 fntemplate: An optional string to use for generating patch file names.
2032 switch_parent: If True, show diffs against second parent when not nullid.
2032 switch_parent: If True, show diffs against second parent when not nullid.
2033 Default is false, which always shows diff against p1.
2033 Default is false, which always shows diff against p1.
2034 opts: diff options to use for generating the patch.
2034 opts: diff options to use for generating the patch.
2035 match: If specified, only export changes to files matching this matcher.
2035 match: If specified, only export changes to files matching this matcher.
2036
2036
2037 Returns:
2037 Returns:
2038 Nothing.
2038 Nothing.
2039
2039
2040 Side Effect:
2040 Side Effect:
2041 "HG Changeset Patch" data is emitted to one of the following
2041 "HG Changeset Patch" data is emitted to one of the following
2042 destinations:
2042 destinations:
2043 fntemplate specified: Each rev is written to a unique file named using
2043 fntemplate specified: Each rev is written to a unique file named using
2044 the given template.
2044 the given template.
2045 Otherwise: All revs will be written to basefm.
2045 Otherwise: All revs will be written to basefm.
2046 '''
2046 '''
2047 _prefetchchangedfiles(repo, revs, match)
2047 _prefetchchangedfiles(repo, revs, match)
2048
2048
2049 if not fntemplate:
2049 if not fntemplate:
2050 _exportfile(
2050 _exportfile(
2051 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2051 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2052 )
2052 )
2053 else:
2053 else:
2054 _exportfntemplate(
2054 _exportfntemplate(
2055 repo, revs, basefm, fntemplate, switch_parent, opts, match
2055 repo, revs, basefm, fntemplate, switch_parent, opts, match
2056 )
2056 )
2057
2057
2058
2058
2059 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2059 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2060 """Export changesets to the given file stream"""
2060 """Export changesets to the given file stream"""
2061 _prefetchchangedfiles(repo, revs, match)
2061 _prefetchchangedfiles(repo, revs, match)
2062
2062
2063 dest = getattr(fp, 'name', b'<unnamed>')
2063 dest = getattr(fp, 'name', b'<unnamed>')
2064 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2064 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2065 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2065 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2066
2066
2067
2067
2068 def showmarker(fm, marker, index=None):
2068 def showmarker(fm, marker, index=None):
2069 """utility function to display obsolescence marker in a readable way
2069 """utility function to display obsolescence marker in a readable way
2070
2070
2071 To be used by debug function."""
2071 To be used by debug function."""
2072 if index is not None:
2072 if index is not None:
2073 fm.write(b'index', b'%i ', index)
2073 fm.write(b'index', b'%i ', index)
2074 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2074 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2075 succs = marker.succnodes()
2075 succs = marker.succnodes()
2076 fm.condwrite(
2076 fm.condwrite(
2077 succs,
2077 succs,
2078 b'succnodes',
2078 b'succnodes',
2079 b'%s ',
2079 b'%s ',
2080 fm.formatlist(map(hex, succs), name=b'node'),
2080 fm.formatlist(map(hex, succs), name=b'node'),
2081 )
2081 )
2082 fm.write(b'flag', b'%X ', marker.flags())
2082 fm.write(b'flag', b'%X ', marker.flags())
2083 parents = marker.parentnodes()
2083 parents = marker.parentnodes()
2084 if parents is not None:
2084 if parents is not None:
2085 fm.write(
2085 fm.write(
2086 b'parentnodes',
2086 b'parentnodes',
2087 b'{%s} ',
2087 b'{%s} ',
2088 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2088 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2089 )
2089 )
2090 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2090 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2091 meta = marker.metadata().copy()
2091 meta = marker.metadata().copy()
2092 meta.pop(b'date', None)
2092 meta.pop(b'date', None)
2093 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2093 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2094 fm.write(
2094 fm.write(
2095 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2095 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2096 )
2096 )
2097 fm.plain(b'\n')
2097 fm.plain(b'\n')
2098
2098
2099
2099
2100 def finddate(ui, repo, date):
2100 def finddate(ui, repo, date):
2101 """Find the tipmost changeset that matches the given date spec"""
2101 """Find the tipmost changeset that matches the given date spec"""
2102
2102
2103 df = dateutil.matchdate(date)
2103 df = dateutil.matchdate(date)
2104 m = scmutil.matchall(repo)
2104 m = scmutil.matchall(repo)
2105 results = {}
2105 results = {}
2106
2106
2107 def prep(ctx, fns):
2107 def prep(ctx, fns):
2108 d = ctx.date()
2108 d = ctx.date()
2109 if df(d[0]):
2109 if df(d[0]):
2110 results[ctx.rev()] = d
2110 results[ctx.rev()] = d
2111
2111
2112 for ctx in walkchangerevs(repo, m, {b'rev': None}, prep):
2112 for ctx in walkchangerevs(repo, m, {b'rev': None}, prep):
2113 rev = ctx.rev()
2113 rev = ctx.rev()
2114 if rev in results:
2114 if rev in results:
2115 ui.status(
2115 ui.status(
2116 _(b"found revision %d from %s\n")
2116 _(b"found revision %d from %s\n")
2117 % (rev, dateutil.datestr(results[rev]))
2117 % (rev, dateutil.datestr(results[rev]))
2118 )
2118 )
2119 return b'%d' % rev
2119 return b'%d' % rev
2120
2120
2121 raise error.Abort(_(b"revision matching date not found"))
2121 raise error.Abort(_(b"revision matching date not found"))
2122
2122
2123
2123
2124 def increasingwindows(windowsize=8, sizelimit=512):
2124 def increasingwindows(windowsize=8, sizelimit=512):
2125 while True:
2125 while True:
2126 yield windowsize
2126 yield windowsize
2127 if windowsize < sizelimit:
2127 if windowsize < sizelimit:
2128 windowsize *= 2
2128 windowsize *= 2
2129
2129
2130
2130
2131 def _walkrevs(repo, opts):
2131 def _walkrevs(repo, opts):
2132 # Default --rev value depends on --follow but --follow behavior
2132 # Default --rev value depends on --follow but --follow behavior
2133 # depends on revisions resolved from --rev...
2133 # depends on revisions resolved from --rev...
2134 follow = opts.get(b'follow') or opts.get(b'follow_first')
2134 follow = opts.get(b'follow') or opts.get(b'follow_first')
2135 if opts.get(b'rev'):
2135 if opts.get(b'rev'):
2136 revs = scmutil.revrange(repo, opts[b'rev'])
2136 revs = scmutil.revrange(repo, opts[b'rev'])
2137 elif follow and repo.dirstate.p1() == nullid:
2137 elif follow and repo.dirstate.p1() == nullid:
2138 revs = smartset.baseset()
2138 revs = smartset.baseset()
2139 elif follow:
2139 elif follow:
2140 revs = repo.revs(b'reverse(:.)')
2140 revs = repo.revs(b'reverse(:.)')
2141 else:
2141 else:
2142 revs = smartset.spanset(repo)
2142 revs = smartset.spanset(repo)
2143 revs.reverse()
2143 revs.reverse()
2144 return revs
2144 return revs
2145
2145
2146
2146
2147 class FileWalkError(Exception):
2147 class FileWalkError(Exception):
2148 pass
2148 pass
2149
2149
2150
2150
2151 def walkfilerevs(repo, match, follow, revs, fncache):
2151 def walkfilerevs(repo, match, follow, revs, fncache):
2152 '''Walks the file history for the matched files.
2152 '''Walks the file history for the matched files.
2153
2153
2154 Returns the changeset revs that are involved in the file history.
2154 Returns the changeset revs that are involved in the file history.
2155
2155
2156 Throws FileWalkError if the file history can't be walked using
2156 Throws FileWalkError if the file history can't be walked using
2157 filelogs alone.
2157 filelogs alone.
2158 '''
2158 '''
2159 wanted = set()
2159 wanted = set()
2160 copies = []
2160 copies = []
2161 minrev, maxrev = min(revs), max(revs)
2161 minrev, maxrev = min(revs), max(revs)
2162
2162
2163 def filerevs(filelog, last):
2163 def filerevs(filelog, last):
2164 """
2164 """
2165 Only files, no patterns. Check the history of each file.
2165 Only files, no patterns. Check the history of each file.
2166
2166
2167 Examines filelog entries within minrev, maxrev linkrev range
2167 Examines filelog entries within minrev, maxrev linkrev range
2168 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2168 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2169 tuples in backwards order
2169 tuples in backwards order
2170 """
2170 """
2171 cl_count = len(repo)
2171 cl_count = len(repo)
2172 revs = []
2172 revs = []
2173 for j in pycompat.xrange(0, last + 1):
2173 for j in pycompat.xrange(0, last + 1):
2174 linkrev = filelog.linkrev(j)
2174 linkrev = filelog.linkrev(j)
2175 if linkrev < minrev:
2175 if linkrev < minrev:
2176 continue
2176 continue
2177 # only yield rev for which we have the changelog, it can
2177 # only yield rev for which we have the changelog, it can
2178 # happen while doing "hg log" during a pull or commit
2178 # happen while doing "hg log" during a pull or commit
2179 if linkrev >= cl_count:
2179 if linkrev >= cl_count:
2180 break
2180 break
2181
2181
2182 parentlinkrevs = []
2182 parentlinkrevs = []
2183 for p in filelog.parentrevs(j):
2183 for p in filelog.parentrevs(j):
2184 if p != nullrev:
2184 if p != nullrev:
2185 parentlinkrevs.append(filelog.linkrev(p))
2185 parentlinkrevs.append(filelog.linkrev(p))
2186 n = filelog.node(j)
2186 n = filelog.node(j)
2187 revs.append(
2187 revs.append(
2188 (linkrev, parentlinkrevs, follow and filelog.renamed(n))
2188 (linkrev, parentlinkrevs, follow and filelog.renamed(n))
2189 )
2189 )
2190
2190
2191 return reversed(revs)
2191 return reversed(revs)
2192
2192
2193 def iterfiles():
2193 def iterfiles():
2194 pctx = repo[b'.']
2194 pctx = repo[b'.']
2195 for filename in match.files():
2195 for filename in match.files():
2196 if follow:
2196 if follow:
2197 if filename not in pctx:
2197 if filename not in pctx:
2198 raise error.Abort(
2198 raise error.Abort(
2199 _(
2199 _(
2200 b'cannot follow file not in parent '
2200 b'cannot follow file not in parent '
2201 b'revision: "%s"'
2201 b'revision: "%s"'
2202 )
2202 )
2203 % filename
2203 % filename
2204 )
2204 )
2205 yield filename, pctx[filename].filenode()
2205 yield filename, pctx[filename].filenode()
2206 else:
2206 else:
2207 yield filename, None
2207 yield filename, None
2208 for filename_node in copies:
2208 for filename_node in copies:
2209 yield filename_node
2209 yield filename_node
2210
2210
2211 for file_, node in iterfiles():
2211 for file_, node in iterfiles():
2212 filelog = repo.file(file_)
2212 filelog = repo.file(file_)
2213 if not len(filelog):
2213 if not len(filelog):
2214 if node is None:
2214 if node is None:
2215 # A zero count may be a directory or deleted file, so
2215 # A zero count may be a directory or deleted file, so
2216 # try to find matching entries on the slow path.
2216 # try to find matching entries on the slow path.
2217 if follow:
2217 if follow:
2218 raise error.Abort(
2218 raise error.Abort(
2219 _(b'cannot follow nonexistent file: "%s"') % file_
2219 _(b'cannot follow nonexistent file: "%s"') % file_
2220 )
2220 )
2221 raise FileWalkError(b"Cannot walk via filelog")
2221 raise FileWalkError(b"Cannot walk via filelog")
2222 else:
2222 else:
2223 continue
2223 continue
2224
2224
2225 if node is None:
2225 if node is None:
2226 last = len(filelog) - 1
2226 last = len(filelog) - 1
2227 else:
2227 else:
2228 last = filelog.rev(node)
2228 last = filelog.rev(node)
2229
2229
2230 # keep track of all ancestors of the file
2230 # keep track of all ancestors of the file
2231 ancestors = {filelog.linkrev(last)}
2231 ancestors = {filelog.linkrev(last)}
2232
2232
2233 # iterate from latest to oldest revision
2233 # iterate from latest to oldest revision
2234 for rev, flparentlinkrevs, copied in filerevs(filelog, last):
2234 for rev, flparentlinkrevs, copied in filerevs(filelog, last):
2235 if not follow:
2235 if not follow:
2236 if rev > maxrev:
2236 if rev > maxrev:
2237 continue
2237 continue
2238 else:
2238 else:
2239 # Note that last might not be the first interesting
2239 # Note that last might not be the first interesting
2240 # rev to us:
2240 # rev to us:
2241 # if the file has been changed after maxrev, we'll
2241 # if the file has been changed after maxrev, we'll
2242 # have linkrev(last) > maxrev, and we still need
2242 # have linkrev(last) > maxrev, and we still need
2243 # to explore the file graph
2243 # to explore the file graph
2244 if rev not in ancestors:
2244 if rev not in ancestors:
2245 continue
2245 continue
2246 # XXX insert 1327 fix here
2246 # XXX insert 1327 fix here
2247 if flparentlinkrevs:
2247 if flparentlinkrevs:
2248 ancestors.update(flparentlinkrevs)
2248 ancestors.update(flparentlinkrevs)
2249
2249
2250 fncache.setdefault(rev, []).append(file_)
2250 fncache.setdefault(rev, []).append(file_)
2251 wanted.add(rev)
2251 wanted.add(rev)
2252 if copied:
2252 if copied:
2253 copies.append(copied)
2253 copies.append(copied)
2254
2254
2255 return wanted
2255 return wanted
2256
2256
2257
2257
2258 class _followfilter(object):
2258 class _followfilter(object):
2259 def __init__(self, repo, onlyfirst=False):
2259 def __init__(self, repo, onlyfirst=False):
2260 self.repo = repo
2260 self.repo = repo
2261 self.startrev = nullrev
2261 self.startrev = nullrev
2262 self.roots = set()
2262 self.roots = set()
2263 self.onlyfirst = onlyfirst
2263 self.onlyfirst = onlyfirst
2264
2264
2265 def match(self, rev):
2265 def match(self, rev):
2266 def realparents(rev):
2266 def realparents(rev):
2267 if self.onlyfirst:
2267 if self.onlyfirst:
2268 return self.repo.changelog.parentrevs(rev)[0:1]
2268 return self.repo.changelog.parentrevs(rev)[0:1]
2269 else:
2269 else:
2270 return filter(
2270 return filter(
2271 lambda x: x != nullrev, self.repo.changelog.parentrevs(rev)
2271 lambda x: x != nullrev, self.repo.changelog.parentrevs(rev)
2272 )
2272 )
2273
2273
2274 if self.startrev == nullrev:
2274 if self.startrev == nullrev:
2275 self.startrev = rev
2275 self.startrev = rev
2276 return True
2276 return True
2277
2277
2278 if rev > self.startrev:
2278 if rev > self.startrev:
2279 # forward: all descendants
2279 # forward: all descendants
2280 if not self.roots:
2280 if not self.roots:
2281 self.roots.add(self.startrev)
2281 self.roots.add(self.startrev)
2282 for parent in realparents(rev):
2282 for parent in realparents(rev):
2283 if parent in self.roots:
2283 if parent in self.roots:
2284 self.roots.add(rev)
2284 self.roots.add(rev)
2285 return True
2285 return True
2286 else:
2286 else:
2287 # backwards: all parents
2287 # backwards: all parents
2288 if not self.roots:
2288 if not self.roots:
2289 self.roots.update(realparents(self.startrev))
2289 self.roots.update(realparents(self.startrev))
2290 if rev in self.roots:
2290 if rev in self.roots:
2291 self.roots.remove(rev)
2291 self.roots.remove(rev)
2292 self.roots.update(realparents(rev))
2292 self.roots.update(realparents(rev))
2293 return True
2293 return True
2294
2294
2295 return False
2295 return False
2296
2296
2297
2297
2298 def walkchangerevs(repo, match, opts, prepare):
2298 def walkchangerevs(repo, match, opts, prepare):
2299 '''Iterate over files and the revs in which they changed.
2299 '''Iterate over files and the revs in which they changed.
2300
2300
2301 Callers most commonly need to iterate backwards over the history
2301 Callers most commonly need to iterate backwards over the history
2302 in which they are interested. Doing so has awful (quadratic-looking)
2302 in which they are interested. Doing so has awful (quadratic-looking)
2303 performance, so we use iterators in a "windowed" way.
2303 performance, so we use iterators in a "windowed" way.
2304
2304
2305 We walk a window of revisions in the desired order. Within the
2305 We walk a window of revisions in the desired order. Within the
2306 window, we first walk forwards to gather data, then in the desired
2306 window, we first walk forwards to gather data, then in the desired
2307 order (usually backwards) to display it.
2307 order (usually backwards) to display it.
2308
2308
2309 This function returns an iterator yielding contexts. Before
2309 This function returns an iterator yielding contexts. Before
2310 yielding each context, the iterator will first call the prepare
2310 yielding each context, the iterator will first call the prepare
2311 function on each context in the window in forward order.'''
2311 function on each context in the window in forward order.'''
2312
2312
2313 allfiles = opts.get(b'all_files')
2313 allfiles = opts.get(b'all_files')
2314 follow = opts.get(b'follow') or opts.get(b'follow_first')
2314 follow = opts.get(b'follow') or opts.get(b'follow_first')
2315 revs = _walkrevs(repo, opts)
2315 revs = _walkrevs(repo, opts)
2316 if not revs:
2316 if not revs:
2317 return []
2317 return []
2318 wanted = set()
2318 wanted = set()
2319 slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
2319 slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
2320 fncache = {}
2320 fncache = {}
2321 change = repo.__getitem__
2321 change = repo.__getitem__
2322
2322
2323 # First step is to fill wanted, the set of revisions that we want to yield.
2323 # First step is to fill wanted, the set of revisions that we want to yield.
2324 # When it does not induce extra cost, we also fill fncache for revisions in
2324 # When it does not induce extra cost, we also fill fncache for revisions in
2325 # wanted: a cache of filenames that were changed (ctx.files()) and that
2325 # wanted: a cache of filenames that were changed (ctx.files()) and that
2326 # match the file filtering conditions.
2326 # match the file filtering conditions.
2327
2327
2328 if match.always() or allfiles:
2328 if match.always() or allfiles:
2329 # No files, no patterns. Display all revs.
2329 # No files, no patterns. Display all revs.
2330 wanted = revs
2330 wanted = revs
2331 elif not slowpath:
2331 elif not slowpath:
2332 # We only have to read through the filelog to find wanted revisions
2332 # We only have to read through the filelog to find wanted revisions
2333
2333
2334 try:
2334 try:
2335 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2335 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2336 except FileWalkError:
2336 except FileWalkError:
2337 slowpath = True
2337 slowpath = True
2338
2338
2339 # We decided to fall back to the slowpath because at least one
2339 # We decided to fall back to the slowpath because at least one
2340 # of the paths was not a file. Check to see if at least one of them
2340 # of the paths was not a file. Check to see if at least one of them
2341 # existed in history, otherwise simply return
2341 # existed in history, otherwise simply return
2342 for path in match.files():
2342 for path in match.files():
2343 if path == b'.' or path in repo.store:
2343 if path == b'.' or path in repo.store:
2344 break
2344 break
2345 else:
2345 else:
2346 return []
2346 return []
2347
2347
2348 if slowpath:
2348 if slowpath:
2349 # We have to read the changelog to match filenames against
2349 # We have to read the changelog to match filenames against
2350 # changed files
2350 # changed files
2351
2351
2352 if follow:
2352 if follow:
2353 raise error.Abort(
2353 raise error.Abort(
2354 _(b'can only follow copies/renames for explicit filenames')
2354 _(b'can only follow copies/renames for explicit filenames')
2355 )
2355 )
2356
2356
2357 # The slow path checks files modified in every changeset.
2357 # The slow path checks files modified in every changeset.
2358 # This is really slow on large repos, so compute the set lazily.
2358 # This is really slow on large repos, so compute the set lazily.
2359 class lazywantedset(object):
2359 class lazywantedset(object):
2360 def __init__(self):
2360 def __init__(self):
2361 self.set = set()
2361 self.set = set()
2362 self.revs = set(revs)
2362 self.revs = set(revs)
2363
2363
2364 # No need to worry about locality here because it will be accessed
2364 # No need to worry about locality here because it will be accessed
2365 # in the same order as the increasing window below.
2365 # in the same order as the increasing window below.
2366 def __contains__(self, value):
2366 def __contains__(self, value):
2367 if value in self.set:
2367 if value in self.set:
2368 return True
2368 return True
2369 elif not value in self.revs:
2369 elif not value in self.revs:
2370 return False
2370 return False
2371 else:
2371 else:
2372 self.revs.discard(value)
2372 self.revs.discard(value)
2373 ctx = change(value)
2373 ctx = change(value)
2374 if allfiles:
2374 if allfiles:
2375 matches = list(ctx.manifest().walk(match))
2375 matches = list(ctx.manifest().walk(match))
2376 else:
2376 else:
2377 matches = [f for f in ctx.files() if match(f)]
2377 matches = [f for f in ctx.files() if match(f)]
2378 if matches:
2378 if matches:
2379 fncache[value] = matches
2379 fncache[value] = matches
2380 self.set.add(value)
2380 self.set.add(value)
2381 return True
2381 return True
2382 return False
2382 return False
2383
2383
2384 def discard(self, value):
2384 def discard(self, value):
2385 self.revs.discard(value)
2385 self.revs.discard(value)
2386 self.set.discard(value)
2386 self.set.discard(value)
2387
2387
2388 wanted = lazywantedset()
2388 wanted = lazywantedset()
2389
2389
2390 # it might be worthwhile to do this in the iterator if the rev range
2390 # it might be worthwhile to do this in the iterator if the rev range
2391 # is descending and the prune args are all within that range
2391 # is descending and the prune args are all within that range
2392 for rev in opts.get(b'prune', ()):
2392 for rev in opts.get(b'prune', ()):
2393 rev = repo[rev].rev()
2393 rev = repo[rev].rev()
2394 ff = _followfilter(repo)
2394 ff = _followfilter(repo)
2395 stop = min(revs[0], revs[-1])
2395 stop = min(revs[0], revs[-1])
2396 for x in pycompat.xrange(rev, stop - 1, -1):
2396 for x in pycompat.xrange(rev, stop - 1, -1):
2397 if ff.match(x):
2397 if ff.match(x):
2398 wanted = wanted - [x]
2398 wanted = wanted - [x]
2399
2399
2400 # Now that wanted is correctly initialized, we can iterate over the
2400 # Now that wanted is correctly initialized, we can iterate over the
2401 # revision range, yielding only revisions in wanted.
2401 # revision range, yielding only revisions in wanted.
2402 def iterate():
2402 def iterate():
2403 if follow and match.always():
2403 if follow and match.always():
2404 ff = _followfilter(repo, onlyfirst=opts.get(b'follow_first'))
2404 ff = _followfilter(repo, onlyfirst=opts.get(b'follow_first'))
2405
2405
2406 def want(rev):
2406 def want(rev):
2407 return ff.match(rev) and rev in wanted
2407 return ff.match(rev) and rev in wanted
2408
2408
2409 else:
2409 else:
2410
2410
2411 def want(rev):
2411 def want(rev):
2412 return rev in wanted
2412 return rev in wanted
2413
2413
2414 it = iter(revs)
2414 it = iter(revs)
2415 stopiteration = False
2415 stopiteration = False
2416 for windowsize in increasingwindows():
2416 for windowsize in increasingwindows():
2417 nrevs = []
2417 nrevs = []
2418 for i in pycompat.xrange(windowsize):
2418 for i in pycompat.xrange(windowsize):
2419 rev = next(it, None)
2419 rev = next(it, None)
2420 if rev is None:
2420 if rev is None:
2421 stopiteration = True
2421 stopiteration = True
2422 break
2422 break
2423 elif want(rev):
2423 elif want(rev):
2424 nrevs.append(rev)
2424 nrevs.append(rev)
2425 for rev in sorted(nrevs):
2425 for rev in sorted(nrevs):
2426 fns = fncache.get(rev)
2426 fns = fncache.get(rev)
2427 ctx = change(rev)
2427 ctx = change(rev)
2428 if not fns:
2428 if not fns:
2429
2429
2430 def fns_generator():
2430 def fns_generator():
2431 if allfiles:
2431 if allfiles:
2432
2432
2433 def bad(f, msg):
2433 def bad(f, msg):
2434 pass
2434 pass
2435
2435
2436 for f in ctx.matches(matchmod.badmatch(match, bad)):
2436 for f in ctx.matches(matchmod.badmatch(match, bad)):
2437 yield f
2437 yield f
2438 else:
2438 else:
2439 for f in ctx.files():
2439 for f in ctx.files():
2440 if match(f):
2440 if match(f):
2441 yield f
2441 yield f
2442
2442
2443 fns = fns_generator()
2443 fns = fns_generator()
2444 prepare(ctx, fns)
2444 prepare(ctx, fns)
2445 for rev in nrevs:
2445 for rev in nrevs:
2446 yield change(rev)
2446 yield change(rev)
2447
2447
2448 if stopiteration:
2448 if stopiteration:
2449 break
2449 break
2450
2450
2451 return iterate()
2451 return iterate()
2452
2452
2453
2453
2454 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2454 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2455 bad = []
2455 bad = []
2456
2456
2457 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2457 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2458 names = []
2458 names = []
2459 wctx = repo[None]
2459 wctx = repo[None]
2460 cca = None
2460 cca = None
2461 abort, warn = scmutil.checkportabilityalert(ui)
2461 abort, warn = scmutil.checkportabilityalert(ui)
2462 if abort or warn:
2462 if abort or warn:
2463 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2463 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2464
2464
2465 match = repo.narrowmatch(match, includeexact=True)
2465 match = repo.narrowmatch(match, includeexact=True)
2466 badmatch = matchmod.badmatch(match, badfn)
2466 badmatch = matchmod.badmatch(match, badfn)
2467 dirstate = repo.dirstate
2467 dirstate = repo.dirstate
2468 # We don't want to just call wctx.walk here, since it would return a lot of
2468 # We don't want to just call wctx.walk here, since it would return a lot of
2469 # clean files, which we aren't interested in and takes time.
2469 # clean files, which we aren't interested in and takes time.
2470 for f in sorted(
2470 for f in sorted(
2471 dirstate.walk(
2471 dirstate.walk(
2472 badmatch,
2472 badmatch,
2473 subrepos=sorted(wctx.substate),
2473 subrepos=sorted(wctx.substate),
2474 unknown=True,
2474 unknown=True,
2475 ignored=False,
2475 ignored=False,
2476 full=False,
2476 full=False,
2477 )
2477 )
2478 ):
2478 ):
2479 exact = match.exact(f)
2479 exact = match.exact(f)
2480 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2480 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2481 if cca:
2481 if cca:
2482 cca(f)
2482 cca(f)
2483 names.append(f)
2483 names.append(f)
2484 if ui.verbose or not exact:
2484 if ui.verbose or not exact:
2485 ui.status(
2485 ui.status(
2486 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2486 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2487 )
2487 )
2488
2488
2489 for subpath in sorted(wctx.substate):
2489 for subpath in sorted(wctx.substate):
2490 sub = wctx.sub(subpath)
2490 sub = wctx.sub(subpath)
2491 try:
2491 try:
2492 submatch = matchmod.subdirmatcher(subpath, match)
2492 submatch = matchmod.subdirmatcher(subpath, match)
2493 subprefix = repo.wvfs.reljoin(prefix, subpath)
2493 subprefix = repo.wvfs.reljoin(prefix, subpath)
2494 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2494 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2495 if opts.get('subrepos'):
2495 if opts.get('subrepos'):
2496 bad.extend(
2496 bad.extend(
2497 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2497 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2498 )
2498 )
2499 else:
2499 else:
2500 bad.extend(
2500 bad.extend(
2501 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2501 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2502 )
2502 )
2503 except error.LookupError:
2503 except error.LookupError:
2504 ui.status(
2504 ui.status(
2505 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2505 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2506 )
2506 )
2507
2507
2508 if not opts.get('dry_run'):
2508 if not opts.get('dry_run'):
2509 rejected = wctx.add(names, prefix)
2509 rejected = wctx.add(names, prefix)
2510 bad.extend(f for f in rejected if f in match.files())
2510 bad.extend(f for f in rejected if f in match.files())
2511 return bad
2511 return bad
2512
2512
2513
2513
2514 def addwebdirpath(repo, serverpath, webconf):
2514 def addwebdirpath(repo, serverpath, webconf):
2515 webconf[serverpath] = repo.root
2515 webconf[serverpath] = repo.root
2516 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2516 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2517
2517
2518 for r in repo.revs(b'filelog("path:.hgsub")'):
2518 for r in repo.revs(b'filelog("path:.hgsub")'):
2519 ctx = repo[r]
2519 ctx = repo[r]
2520 for subpath in ctx.substate:
2520 for subpath in ctx.substate:
2521 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2521 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2522
2522
2523
2523
2524 def forget(
2524 def forget(
2525 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2525 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2526 ):
2526 ):
2527 if dryrun and interactive:
2527 if dryrun and interactive:
2528 raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
2528 raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
2529 bad = []
2529 bad = []
2530 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2530 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2531 wctx = repo[None]
2531 wctx = repo[None]
2532 forgot = []
2532 forgot = []
2533
2533
2534 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2534 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2535 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2535 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2536 if explicitonly:
2536 if explicitonly:
2537 forget = [f for f in forget if match.exact(f)]
2537 forget = [f for f in forget if match.exact(f)]
2538
2538
2539 for subpath in sorted(wctx.substate):
2539 for subpath in sorted(wctx.substate):
2540 sub = wctx.sub(subpath)
2540 sub = wctx.sub(subpath)
2541 submatch = matchmod.subdirmatcher(subpath, match)
2541 submatch = matchmod.subdirmatcher(subpath, match)
2542 subprefix = repo.wvfs.reljoin(prefix, subpath)
2542 subprefix = repo.wvfs.reljoin(prefix, subpath)
2543 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2543 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2544 try:
2544 try:
2545 subbad, subforgot = sub.forget(
2545 subbad, subforgot = sub.forget(
2546 submatch,
2546 submatch,
2547 subprefix,
2547 subprefix,
2548 subuipathfn,
2548 subuipathfn,
2549 dryrun=dryrun,
2549 dryrun=dryrun,
2550 interactive=interactive,
2550 interactive=interactive,
2551 )
2551 )
2552 bad.extend([subpath + b'/' + f for f in subbad])
2552 bad.extend([subpath + b'/' + f for f in subbad])
2553 forgot.extend([subpath + b'/' + f for f in subforgot])
2553 forgot.extend([subpath + b'/' + f for f in subforgot])
2554 except error.LookupError:
2554 except error.LookupError:
2555 ui.status(
2555 ui.status(
2556 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2556 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2557 )
2557 )
2558
2558
2559 if not explicitonly:
2559 if not explicitonly:
2560 for f in match.files():
2560 for f in match.files():
2561 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2561 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2562 if f not in forgot:
2562 if f not in forgot:
2563 if repo.wvfs.exists(f):
2563 if repo.wvfs.exists(f):
2564 # Don't complain if the exact case match wasn't given.
2564 # Don't complain if the exact case match wasn't given.
2565 # But don't do this until after checking 'forgot', so
2565 # But don't do this until after checking 'forgot', so
2566 # that subrepo files aren't normalized, and this op is
2566 # that subrepo files aren't normalized, and this op is
2567 # purely from data cached by the status walk above.
2567 # purely from data cached by the status walk above.
2568 if repo.dirstate.normalize(f) in repo.dirstate:
2568 if repo.dirstate.normalize(f) in repo.dirstate:
2569 continue
2569 continue
2570 ui.warn(
2570 ui.warn(
2571 _(
2571 _(
2572 b'not removing %s: '
2572 b'not removing %s: '
2573 b'file is already untracked\n'
2573 b'file is already untracked\n'
2574 )
2574 )
2575 % uipathfn(f)
2575 % uipathfn(f)
2576 )
2576 )
2577 bad.append(f)
2577 bad.append(f)
2578
2578
2579 if interactive:
2579 if interactive:
2580 responses = _(
2580 responses = _(
2581 b'[Ynsa?]'
2581 b'[Ynsa?]'
2582 b'$$ &Yes, forget this file'
2582 b'$$ &Yes, forget this file'
2583 b'$$ &No, skip this file'
2583 b'$$ &No, skip this file'
2584 b'$$ &Skip remaining files'
2584 b'$$ &Skip remaining files'
2585 b'$$ Include &all remaining files'
2585 b'$$ Include &all remaining files'
2586 b'$$ &? (display help)'
2586 b'$$ &? (display help)'
2587 )
2587 )
2588 for filename in forget[:]:
2588 for filename in forget[:]:
2589 r = ui.promptchoice(
2589 r = ui.promptchoice(
2590 _(b'forget %s %s') % (uipathfn(filename), responses)
2590 _(b'forget %s %s') % (uipathfn(filename), responses)
2591 )
2591 )
2592 if r == 4: # ?
2592 if r == 4: # ?
2593 while r == 4:
2593 while r == 4:
2594 for c, t in ui.extractchoices(responses)[1]:
2594 for c, t in ui.extractchoices(responses)[1]:
2595 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2595 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2596 r = ui.promptchoice(
2596 r = ui.promptchoice(
2597 _(b'forget %s %s') % (uipathfn(filename), responses)
2597 _(b'forget %s %s') % (uipathfn(filename), responses)
2598 )
2598 )
2599 if r == 0: # yes
2599 if r == 0: # yes
2600 continue
2600 continue
2601 elif r == 1: # no
2601 elif r == 1: # no
2602 forget.remove(filename)
2602 forget.remove(filename)
2603 elif r == 2: # Skip
2603 elif r == 2: # Skip
2604 fnindex = forget.index(filename)
2604 fnindex = forget.index(filename)
2605 del forget[fnindex:]
2605 del forget[fnindex:]
2606 break
2606 break
2607 elif r == 3: # All
2607 elif r == 3: # All
2608 break
2608 break
2609
2609
2610 for f in forget:
2610 for f in forget:
2611 if ui.verbose or not match.exact(f) or interactive:
2611 if ui.verbose or not match.exact(f) or interactive:
2612 ui.status(
2612 ui.status(
2613 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2613 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2614 )
2614 )
2615
2615
2616 if not dryrun:
2616 if not dryrun:
2617 rejected = wctx.forget(forget, prefix)
2617 rejected = wctx.forget(forget, prefix)
2618 bad.extend(f for f in rejected if f in match.files())
2618 bad.extend(f for f in rejected if f in match.files())
2619 forgot.extend(f for f in forget if f not in rejected)
2619 forgot.extend(f for f in forget if f not in rejected)
2620 return bad, forgot
2620 return bad, forgot
2621
2621
2622
2622
2623 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2623 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2624 ret = 1
2624 ret = 1
2625
2625
2626 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2626 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2627 for f in ctx.matches(m):
2627 for f in ctx.matches(m):
2628 fm.startitem()
2628 fm.startitem()
2629 fm.context(ctx=ctx)
2629 fm.context(ctx=ctx)
2630 if needsfctx:
2630 if needsfctx:
2631 fc = ctx[f]
2631 fc = ctx[f]
2632 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2632 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2633 fm.data(path=f)
2633 fm.data(path=f)
2634 fm.plain(fmt % uipathfn(f))
2634 fm.plain(fmt % uipathfn(f))
2635 ret = 0
2635 ret = 0
2636
2636
2637 for subpath in sorted(ctx.substate):
2637 for subpath in sorted(ctx.substate):
2638 submatch = matchmod.subdirmatcher(subpath, m)
2638 submatch = matchmod.subdirmatcher(subpath, m)
2639 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2639 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2640 if subrepos or m.exact(subpath) or any(submatch.files()):
2640 if subrepos or m.exact(subpath) or any(submatch.files()):
2641 sub = ctx.sub(subpath)
2641 sub = ctx.sub(subpath)
2642 try:
2642 try:
2643 recurse = m.exact(subpath) or subrepos
2643 recurse = m.exact(subpath) or subrepos
2644 if (
2644 if (
2645 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2645 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2646 == 0
2646 == 0
2647 ):
2647 ):
2648 ret = 0
2648 ret = 0
2649 except error.LookupError:
2649 except error.LookupError:
2650 ui.status(
2650 ui.status(
2651 _(b"skipping missing subrepository: %s\n")
2651 _(b"skipping missing subrepository: %s\n")
2652 % uipathfn(subpath)
2652 % uipathfn(subpath)
2653 )
2653 )
2654
2654
2655 return ret
2655 return ret
2656
2656
2657
2657
2658 def remove(
2658 def remove(
2659 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2659 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2660 ):
2660 ):
2661 ret = 0
2661 ret = 0
2662 s = repo.status(match=m, clean=True)
2662 s = repo.status(match=m, clean=True)
2663 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2663 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2664
2664
2665 wctx = repo[None]
2665 wctx = repo[None]
2666
2666
2667 if warnings is None:
2667 if warnings is None:
2668 warnings = []
2668 warnings = []
2669 warn = True
2669 warn = True
2670 else:
2670 else:
2671 warn = False
2671 warn = False
2672
2672
2673 subs = sorted(wctx.substate)
2673 subs = sorted(wctx.substate)
2674 progress = ui.makeprogress(
2674 progress = ui.makeprogress(
2675 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2675 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2676 )
2676 )
2677 for subpath in subs:
2677 for subpath in subs:
2678 submatch = matchmod.subdirmatcher(subpath, m)
2678 submatch = matchmod.subdirmatcher(subpath, m)
2679 subprefix = repo.wvfs.reljoin(prefix, subpath)
2679 subprefix = repo.wvfs.reljoin(prefix, subpath)
2680 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2680 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2681 if subrepos or m.exact(subpath) or any(submatch.files()):
2681 if subrepos or m.exact(subpath) or any(submatch.files()):
2682 progress.increment()
2682 progress.increment()
2683 sub = wctx.sub(subpath)
2683 sub = wctx.sub(subpath)
2684 try:
2684 try:
2685 if sub.removefiles(
2685 if sub.removefiles(
2686 submatch,
2686 submatch,
2687 subprefix,
2687 subprefix,
2688 subuipathfn,
2688 subuipathfn,
2689 after,
2689 after,
2690 force,
2690 force,
2691 subrepos,
2691 subrepos,
2692 dryrun,
2692 dryrun,
2693 warnings,
2693 warnings,
2694 ):
2694 ):
2695 ret = 1
2695 ret = 1
2696 except error.LookupError:
2696 except error.LookupError:
2697 warnings.append(
2697 warnings.append(
2698 _(b"skipping missing subrepository: %s\n")
2698 _(b"skipping missing subrepository: %s\n")
2699 % uipathfn(subpath)
2699 % uipathfn(subpath)
2700 )
2700 )
2701 progress.complete()
2701 progress.complete()
2702
2702
2703 # warn about failure to delete explicit files/dirs
2703 # warn about failure to delete explicit files/dirs
2704 deleteddirs = pathutil.dirs(deleted)
2704 deleteddirs = pathutil.dirs(deleted)
2705 files = m.files()
2705 files = m.files()
2706 progress = ui.makeprogress(
2706 progress = ui.makeprogress(
2707 _(b'deleting'), total=len(files), unit=_(b'files')
2707 _(b'deleting'), total=len(files), unit=_(b'files')
2708 )
2708 )
2709 for f in files:
2709 for f in files:
2710
2710
2711 def insubrepo():
2711 def insubrepo():
2712 for subpath in wctx.substate:
2712 for subpath in wctx.substate:
2713 if f.startswith(subpath + b'/'):
2713 if f.startswith(subpath + b'/'):
2714 return True
2714 return True
2715 return False
2715 return False
2716
2716
2717 progress.increment()
2717 progress.increment()
2718 isdir = f in deleteddirs or wctx.hasdir(f)
2718 isdir = f in deleteddirs or wctx.hasdir(f)
2719 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2719 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2720 continue
2720 continue
2721
2721
2722 if repo.wvfs.exists(f):
2722 if repo.wvfs.exists(f):
2723 if repo.wvfs.isdir(f):
2723 if repo.wvfs.isdir(f):
2724 warnings.append(
2724 warnings.append(
2725 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2725 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2726 )
2726 )
2727 else:
2727 else:
2728 warnings.append(
2728 warnings.append(
2729 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2729 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2730 )
2730 )
2731 # missing files will generate a warning elsewhere
2731 # missing files will generate a warning elsewhere
2732 ret = 1
2732 ret = 1
2733 progress.complete()
2733 progress.complete()
2734
2734
2735 if force:
2735 if force:
2736 list = modified + deleted + clean + added
2736 list = modified + deleted + clean + added
2737 elif after:
2737 elif after:
2738 list = deleted
2738 list = deleted
2739 remaining = modified + added + clean
2739 remaining = modified + added + clean
2740 progress = ui.makeprogress(
2740 progress = ui.makeprogress(
2741 _(b'skipping'), total=len(remaining), unit=_(b'files')
2741 _(b'skipping'), total=len(remaining), unit=_(b'files')
2742 )
2742 )
2743 for f in remaining:
2743 for f in remaining:
2744 progress.increment()
2744 progress.increment()
2745 if ui.verbose or (f in files):
2745 if ui.verbose or (f in files):
2746 warnings.append(
2746 warnings.append(
2747 _(b'not removing %s: file still exists\n') % uipathfn(f)
2747 _(b'not removing %s: file still exists\n') % uipathfn(f)
2748 )
2748 )
2749 ret = 1
2749 ret = 1
2750 progress.complete()
2750 progress.complete()
2751 else:
2751 else:
2752 list = deleted + clean
2752 list = deleted + clean
2753 progress = ui.makeprogress(
2753 progress = ui.makeprogress(
2754 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2754 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2755 )
2755 )
2756 for f in modified:
2756 for f in modified:
2757 progress.increment()
2757 progress.increment()
2758 warnings.append(
2758 warnings.append(
2759 _(
2759 _(
2760 b'not removing %s: file is modified (use -f'
2760 b'not removing %s: file is modified (use -f'
2761 b' to force removal)\n'
2761 b' to force removal)\n'
2762 )
2762 )
2763 % uipathfn(f)
2763 % uipathfn(f)
2764 )
2764 )
2765 ret = 1
2765 ret = 1
2766 for f in added:
2766 for f in added:
2767 progress.increment()
2767 progress.increment()
2768 warnings.append(
2768 warnings.append(
2769 _(
2769 _(
2770 b"not removing %s: file has been marked for add"
2770 b"not removing %s: file has been marked for add"
2771 b" (use 'hg forget' to undo add)\n"
2771 b" (use 'hg forget' to undo add)\n"
2772 )
2772 )
2773 % uipathfn(f)
2773 % uipathfn(f)
2774 )
2774 )
2775 ret = 1
2775 ret = 1
2776 progress.complete()
2776 progress.complete()
2777
2777
2778 list = sorted(list)
2778 list = sorted(list)
2779 progress = ui.makeprogress(
2779 progress = ui.makeprogress(
2780 _(b'deleting'), total=len(list), unit=_(b'files')
2780 _(b'deleting'), total=len(list), unit=_(b'files')
2781 )
2781 )
2782 for f in list:
2782 for f in list:
2783 if ui.verbose or not m.exact(f):
2783 if ui.verbose or not m.exact(f):
2784 progress.increment()
2784 progress.increment()
2785 ui.status(
2785 ui.status(
2786 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2786 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2787 )
2787 )
2788 progress.complete()
2788 progress.complete()
2789
2789
2790 if not dryrun:
2790 if not dryrun:
2791 with repo.wlock():
2791 with repo.wlock():
2792 if not after:
2792 if not after:
2793 for f in list:
2793 for f in list:
2794 if f in added:
2794 if f in added:
2795 continue # we never unlink added files on remove
2795 continue # we never unlink added files on remove
2796 rmdir = repo.ui.configbool(
2796 rmdir = repo.ui.configbool(
2797 b'experimental', b'removeemptydirs'
2797 b'experimental', b'removeemptydirs'
2798 )
2798 )
2799 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2799 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2800 repo[None].forget(list)
2800 repo[None].forget(list)
2801
2801
2802 if warn:
2802 if warn:
2803 for warning in warnings:
2803 for warning in warnings:
2804 ui.warn(warning)
2804 ui.warn(warning)
2805
2805
2806 return ret
2806 return ret
2807
2807
2808
2808
2809 def _catfmtneedsdata(fm):
2809 def _catfmtneedsdata(fm):
2810 return not fm.datahint() or b'data' in fm.datahint()
2810 return not fm.datahint() or b'data' in fm.datahint()
2811
2811
2812
2812
2813 def _updatecatformatter(fm, ctx, matcher, path, decode):
2813 def _updatecatformatter(fm, ctx, matcher, path, decode):
2814 """Hook for adding data to the formatter used by ``hg cat``.
2814 """Hook for adding data to the formatter used by ``hg cat``.
2815
2815
2816 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2816 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2817 this method first."""
2817 this method first."""
2818
2818
2819 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2819 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2820 # wasn't requested.
2820 # wasn't requested.
2821 data = b''
2821 data = b''
2822 if _catfmtneedsdata(fm):
2822 if _catfmtneedsdata(fm):
2823 data = ctx[path].data()
2823 data = ctx[path].data()
2824 if decode:
2824 if decode:
2825 data = ctx.repo().wwritedata(path, data)
2825 data = ctx.repo().wwritedata(path, data)
2826 fm.startitem()
2826 fm.startitem()
2827 fm.context(ctx=ctx)
2827 fm.context(ctx=ctx)
2828 fm.write(b'data', b'%s', data)
2828 fm.write(b'data', b'%s', data)
2829 fm.data(path=path)
2829 fm.data(path=path)
2830
2830
2831
2831
2832 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2832 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2833 err = 1
2833 err = 1
2834 opts = pycompat.byteskwargs(opts)
2834 opts = pycompat.byteskwargs(opts)
2835
2835
2836 def write(path):
2836 def write(path):
2837 filename = None
2837 filename = None
2838 if fntemplate:
2838 if fntemplate:
2839 filename = makefilename(
2839 filename = makefilename(
2840 ctx, fntemplate, pathname=os.path.join(prefix, path)
2840 ctx, fntemplate, pathname=os.path.join(prefix, path)
2841 )
2841 )
2842 # attempt to create the directory if it does not already exist
2842 # attempt to create the directory if it does not already exist
2843 try:
2843 try:
2844 os.makedirs(os.path.dirname(filename))
2844 os.makedirs(os.path.dirname(filename))
2845 except OSError:
2845 except OSError:
2846 pass
2846 pass
2847 with formatter.maybereopen(basefm, filename) as fm:
2847 with formatter.maybereopen(basefm, filename) as fm:
2848 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2848 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2849
2849
2850 # Automation often uses hg cat on single files, so special case it
2850 # Automation often uses hg cat on single files, so special case it
2851 # for performance to avoid the cost of parsing the manifest.
2851 # for performance to avoid the cost of parsing the manifest.
2852 if len(matcher.files()) == 1 and not matcher.anypats():
2852 if len(matcher.files()) == 1 and not matcher.anypats():
2853 file = matcher.files()[0]
2853 file = matcher.files()[0]
2854 mfl = repo.manifestlog
2854 mfl = repo.manifestlog
2855 mfnode = ctx.manifestnode()
2855 mfnode = ctx.manifestnode()
2856 try:
2856 try:
2857 if mfnode and mfl[mfnode].find(file)[0]:
2857 if mfnode and mfl[mfnode].find(file)[0]:
2858 if _catfmtneedsdata(basefm):
2858 if _catfmtneedsdata(basefm):
2859 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2859 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2860 write(file)
2860 write(file)
2861 return 0
2861 return 0
2862 except KeyError:
2862 except KeyError:
2863 pass
2863 pass
2864
2864
2865 if _catfmtneedsdata(basefm):
2865 if _catfmtneedsdata(basefm):
2866 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2866 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2867
2867
2868 for abs in ctx.walk(matcher):
2868 for abs in ctx.walk(matcher):
2869 write(abs)
2869 write(abs)
2870 err = 0
2870 err = 0
2871
2871
2872 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2872 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2873 for subpath in sorted(ctx.substate):
2873 for subpath in sorted(ctx.substate):
2874 sub = ctx.sub(subpath)
2874 sub = ctx.sub(subpath)
2875 try:
2875 try:
2876 submatch = matchmod.subdirmatcher(subpath, matcher)
2876 submatch = matchmod.subdirmatcher(subpath, matcher)
2877 subprefix = os.path.join(prefix, subpath)
2877 subprefix = os.path.join(prefix, subpath)
2878 if not sub.cat(
2878 if not sub.cat(
2879 submatch,
2879 submatch,
2880 basefm,
2880 basefm,
2881 fntemplate,
2881 fntemplate,
2882 subprefix,
2882 subprefix,
2883 **pycompat.strkwargs(opts)
2883 **pycompat.strkwargs(opts)
2884 ):
2884 ):
2885 err = 0
2885 err = 0
2886 except error.RepoLookupError:
2886 except error.RepoLookupError:
2887 ui.status(
2887 ui.status(
2888 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2888 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2889 )
2889 )
2890
2890
2891 return err
2891 return err
2892
2892
2893
2893
2894 def commit(ui, repo, commitfunc, pats, opts):
2894 def commit(ui, repo, commitfunc, pats, opts):
2895 '''commit the specified files or all outstanding changes'''
2895 '''commit the specified files or all outstanding changes'''
2896 date = opts.get(b'date')
2896 date = opts.get(b'date')
2897 if date:
2897 if date:
2898 opts[b'date'] = dateutil.parsedate(date)
2898 opts[b'date'] = dateutil.parsedate(date)
2899 message = logmessage(ui, opts)
2899 message = logmessage(ui, opts)
2900 matcher = scmutil.match(repo[None], pats, opts)
2900 matcher = scmutil.match(repo[None], pats, opts)
2901
2901
2902 dsguard = None
2902 dsguard = None
2903 # extract addremove carefully -- this function can be called from a command
2903 # extract addremove carefully -- this function can be called from a command
2904 # that doesn't support addremove
2904 # that doesn't support addremove
2905 if opts.get(b'addremove'):
2905 if opts.get(b'addremove'):
2906 dsguard = dirstateguard.dirstateguard(repo, b'commit')
2906 dsguard = dirstateguard.dirstateguard(repo, b'commit')
2907 with dsguard or util.nullcontextmanager():
2907 with dsguard or util.nullcontextmanager():
2908 if dsguard:
2908 if dsguard:
2909 relative = scmutil.anypats(pats, opts)
2909 relative = scmutil.anypats(pats, opts)
2910 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2910 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2911 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
2911 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
2912 raise error.Abort(
2912 raise error.Abort(
2913 _(b"failed to mark all new/missing files as added/removed")
2913 _(b"failed to mark all new/missing files as added/removed")
2914 )
2914 )
2915
2915
2916 return commitfunc(ui, repo, message, matcher, opts)
2916 return commitfunc(ui, repo, message, matcher, opts)
2917
2917
2918
2918
2919 def samefile(f, ctx1, ctx2):
2919 def samefile(f, ctx1, ctx2):
2920 if f in ctx1.manifest():
2920 if f in ctx1.manifest():
2921 a = ctx1.filectx(f)
2921 a = ctx1.filectx(f)
2922 if f in ctx2.manifest():
2922 if f in ctx2.manifest():
2923 b = ctx2.filectx(f)
2923 b = ctx2.filectx(f)
2924 return not a.cmp(b) and a.flags() == b.flags()
2924 return not a.cmp(b) and a.flags() == b.flags()
2925 else:
2925 else:
2926 return False
2926 return False
2927 else:
2927 else:
2928 return f not in ctx2.manifest()
2928 return f not in ctx2.manifest()
2929
2929
2930
2930
2931 def amend(ui, repo, old, extra, pats, opts):
2931 def amend(ui, repo, old, extra, pats, opts):
2932 # avoid cycle context -> subrepo -> cmdutil
2932 # avoid cycle context -> subrepo -> cmdutil
2933 from . import context
2933 from . import context
2934
2934
2935 # amend will reuse the existing user if not specified, but the obsolete
2935 # amend will reuse the existing user if not specified, but the obsolete
2936 # marker creation requires that the current user's name is specified.
2936 # marker creation requires that the current user's name is specified.
2937 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2937 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2938 ui.username() # raise exception if username not set
2938 ui.username() # raise exception if username not set
2939
2939
2940 ui.note(_(b'amending changeset %s\n') % old)
2940 ui.note(_(b'amending changeset %s\n') % old)
2941 base = old.p1()
2941 base = old.p1()
2942
2942
2943 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
2943 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
2944 # Participating changesets:
2944 # Participating changesets:
2945 #
2945 #
2946 # wctx o - workingctx that contains changes from working copy
2946 # wctx o - workingctx that contains changes from working copy
2947 # | to go into amending commit
2947 # | to go into amending commit
2948 # |
2948 # |
2949 # old o - changeset to amend
2949 # old o - changeset to amend
2950 # |
2950 # |
2951 # base o - first parent of the changeset to amend
2951 # base o - first parent of the changeset to amend
2952 wctx = repo[None]
2952 wctx = repo[None]
2953
2953
2954 # Copy to avoid mutating input
2954 # Copy to avoid mutating input
2955 extra = extra.copy()
2955 extra = extra.copy()
2956 # Update extra dict from amended commit (e.g. to preserve graft
2956 # Update extra dict from amended commit (e.g. to preserve graft
2957 # source)
2957 # source)
2958 extra.update(old.extra())
2958 extra.update(old.extra())
2959
2959
2960 # Also update it from the from the wctx
2960 # Also update it from the from the wctx
2961 extra.update(wctx.extra())
2961 extra.update(wctx.extra())
2962
2962
2963 # date-only change should be ignored?
2963 # date-only change should be ignored?
2964 datemaydiffer = resolvecommitoptions(ui, opts)
2964 datemaydiffer = resolvecommitoptions(ui, opts)
2965
2965
2966 date = old.date()
2966 date = old.date()
2967 if opts.get(b'date'):
2967 if opts.get(b'date'):
2968 date = dateutil.parsedate(opts.get(b'date'))
2968 date = dateutil.parsedate(opts.get(b'date'))
2969 user = opts.get(b'user') or old.user()
2969 user = opts.get(b'user') or old.user()
2970
2970
2971 if len(old.parents()) > 1:
2971 if len(old.parents()) > 1:
2972 # ctx.files() isn't reliable for merges, so fall back to the
2972 # ctx.files() isn't reliable for merges, so fall back to the
2973 # slower repo.status() method
2973 # slower repo.status() method
2974 st = base.status(old)
2974 st = base.status(old)
2975 files = set(st.modified) | set(st.added) | set(st.removed)
2975 files = set(st.modified) | set(st.added) | set(st.removed)
2976 else:
2976 else:
2977 files = set(old.files())
2977 files = set(old.files())
2978
2978
2979 # add/remove the files to the working copy if the "addremove" option
2979 # add/remove the files to the working copy if the "addremove" option
2980 # was specified.
2980 # was specified.
2981 matcher = scmutil.match(wctx, pats, opts)
2981 matcher = scmutil.match(wctx, pats, opts)
2982 relative = scmutil.anypats(pats, opts)
2982 relative = scmutil.anypats(pats, opts)
2983 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2983 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2984 if opts.get(b'addremove') and scmutil.addremove(
2984 if opts.get(b'addremove') and scmutil.addremove(
2985 repo, matcher, b"", uipathfn, opts
2985 repo, matcher, b"", uipathfn, opts
2986 ):
2986 ):
2987 raise error.Abort(
2987 raise error.Abort(
2988 _(b"failed to mark all new/missing files as added/removed")
2988 _(b"failed to mark all new/missing files as added/removed")
2989 )
2989 )
2990
2990
2991 # Check subrepos. This depends on in-place wctx._status update in
2991 # Check subrepos. This depends on in-place wctx._status update in
2992 # subrepo.precommit(). To minimize the risk of this hack, we do
2992 # subrepo.precommit(). To minimize the risk of this hack, we do
2993 # nothing if .hgsub does not exist.
2993 # nothing if .hgsub does not exist.
2994 if b'.hgsub' in wctx or b'.hgsub' in old:
2994 if b'.hgsub' in wctx or b'.hgsub' in old:
2995 subs, commitsubs, newsubstate = subrepoutil.precommit(
2995 subs, commitsubs, newsubstate = subrepoutil.precommit(
2996 ui, wctx, wctx._status, matcher
2996 ui, wctx, wctx._status, matcher
2997 )
2997 )
2998 # amend should abort if commitsubrepos is enabled
2998 # amend should abort if commitsubrepos is enabled
2999 assert not commitsubs
2999 assert not commitsubs
3000 if subs:
3000 if subs:
3001 subrepoutil.writestate(repo, newsubstate)
3001 subrepoutil.writestate(repo, newsubstate)
3002
3002
3003 ms = mergemod.mergestate.read(repo)
3003 ms = mergemod.mergestate.read(repo)
3004 mergeutil.checkunresolved(ms)
3004 mergeutil.checkunresolved(ms)
3005
3005
3006 filestoamend = set(f for f in wctx.files() if matcher(f))
3006 filestoamend = set(f for f in wctx.files() if matcher(f))
3007
3007
3008 changes = len(filestoamend) > 0
3008 changes = len(filestoamend) > 0
3009 if changes:
3009 if changes:
3010 # Recompute copies (avoid recording a -> b -> a)
3010 # Recompute copies (avoid recording a -> b -> a)
3011 copied = copies.pathcopies(base, wctx, matcher)
3011 copied = copies.pathcopies(base, wctx, matcher)
3012 if old.p2:
3012 if old.p2:
3013 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3013 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3014
3014
3015 # Prune files which were reverted by the updates: if old
3015 # Prune files which were reverted by the updates: if old
3016 # introduced file X and the file was renamed in the working
3016 # introduced file X and the file was renamed in the working
3017 # copy, then those two files are the same and
3017 # copy, then those two files are the same and
3018 # we can discard X from our list of files. Likewise if X
3018 # we can discard X from our list of files. Likewise if X
3019 # was removed, it's no longer relevant. If X is missing (aka
3019 # was removed, it's no longer relevant. If X is missing (aka
3020 # deleted), old X must be preserved.
3020 # deleted), old X must be preserved.
3021 files.update(filestoamend)
3021 files.update(filestoamend)
3022 files = [
3022 files = [
3023 f
3023 f
3024 for f in files
3024 for f in files
3025 if (f not in filestoamend or not samefile(f, wctx, base))
3025 if (f not in filestoamend or not samefile(f, wctx, base))
3026 ]
3026 ]
3027
3027
3028 def filectxfn(repo, ctx_, path):
3028 def filectxfn(repo, ctx_, path):
3029 try:
3029 try:
3030 # If the file being considered is not amongst the files
3030 # If the file being considered is not amongst the files
3031 # to be amended, we should return the file context from the
3031 # to be amended, we should return the file context from the
3032 # old changeset. This avoids issues when only some files in
3032 # old changeset. This avoids issues when only some files in
3033 # the working copy are being amended but there are also
3033 # the working copy are being amended but there are also
3034 # changes to other files from the old changeset.
3034 # changes to other files from the old changeset.
3035 if path not in filestoamend:
3035 if path not in filestoamend:
3036 return old.filectx(path)
3036 return old.filectx(path)
3037
3037
3038 # Return None for removed files.
3038 # Return None for removed files.
3039 if path in wctx.removed():
3039 if path in wctx.removed():
3040 return None
3040 return None
3041
3041
3042 fctx = wctx[path]
3042 fctx = wctx[path]
3043 flags = fctx.flags()
3043 flags = fctx.flags()
3044 mctx = context.memfilectx(
3044 mctx = context.memfilectx(
3045 repo,
3045 repo,
3046 ctx_,
3046 ctx_,
3047 fctx.path(),
3047 fctx.path(),
3048 fctx.data(),
3048 fctx.data(),
3049 islink=b'l' in flags,
3049 islink=b'l' in flags,
3050 isexec=b'x' in flags,
3050 isexec=b'x' in flags,
3051 copysource=copied.get(path),
3051 copysource=copied.get(path),
3052 )
3052 )
3053 return mctx
3053 return mctx
3054 except KeyError:
3054 except KeyError:
3055 return None
3055 return None
3056
3056
3057 else:
3057 else:
3058 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
3058 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
3059
3059
3060 # Use version of files as in the old cset
3060 # Use version of files as in the old cset
3061 def filectxfn(repo, ctx_, path):
3061 def filectxfn(repo, ctx_, path):
3062 try:
3062 try:
3063 return old.filectx(path)
3063 return old.filectx(path)
3064 except KeyError:
3064 except KeyError:
3065 return None
3065 return None
3066
3066
3067 # See if we got a message from -m or -l, if not, open the editor with
3067 # See if we got a message from -m or -l, if not, open the editor with
3068 # the message of the changeset to amend.
3068 # the message of the changeset to amend.
3069 message = logmessage(ui, opts)
3069 message = logmessage(ui, opts)
3070
3070
3071 editform = mergeeditform(old, b'commit.amend')
3071 editform = mergeeditform(old, b'commit.amend')
3072
3072
3073 if not message:
3073 if not message:
3074 message = old.description()
3074 message = old.description()
3075 # Default if message isn't provided and --edit is not passed is to
3075 # Default if message isn't provided and --edit is not passed is to
3076 # invoke editor, but allow --no-edit. If somehow we don't have any
3076 # invoke editor, but allow --no-edit. If somehow we don't have any
3077 # description, let's always start the editor.
3077 # description, let's always start the editor.
3078 doedit = not message or opts.get(b'edit') in [True, None]
3078 doedit = not message or opts.get(b'edit') in [True, None]
3079 else:
3079 else:
3080 # Default if message is provided is to not invoke editor, but allow
3080 # Default if message is provided is to not invoke editor, but allow
3081 # --edit.
3081 # --edit.
3082 doedit = opts.get(b'edit') is True
3082 doedit = opts.get(b'edit') is True
3083 editor = getcommiteditor(edit=doedit, editform=editform)
3083 editor = getcommiteditor(edit=doedit, editform=editform)
3084
3084
3085 pureextra = extra.copy()
3085 pureextra = extra.copy()
3086 extra[b'amend_source'] = old.hex()
3086 extra[b'amend_source'] = old.hex()
3087
3087
3088 new = context.memctx(
3088 new = context.memctx(
3089 repo,
3089 repo,
3090 parents=[base.node(), old.p2().node()],
3090 parents=[base.node(), old.p2().node()],
3091 text=message,
3091 text=message,
3092 files=files,
3092 files=files,
3093 filectxfn=filectxfn,
3093 filectxfn=filectxfn,
3094 user=user,
3094 user=user,
3095 date=date,
3095 date=date,
3096 extra=extra,
3096 extra=extra,
3097 editor=editor,
3097 editor=editor,
3098 )
3098 )
3099
3099
3100 newdesc = changelog.stripdesc(new.description())
3100 newdesc = changelog.stripdesc(new.description())
3101 if (
3101 if (
3102 (not changes)
3102 (not changes)
3103 and newdesc == old.description()
3103 and newdesc == old.description()
3104 and user == old.user()
3104 and user == old.user()
3105 and (date == old.date() or datemaydiffer)
3105 and (date == old.date() or datemaydiffer)
3106 and pureextra == old.extra()
3106 and pureextra == old.extra()
3107 ):
3107 ):
3108 # nothing changed. continuing here would create a new node
3108 # nothing changed. continuing here would create a new node
3109 # anyway because of the amend_source noise.
3109 # anyway because of the amend_source noise.
3110 #
3110 #
3111 # This not what we expect from amend.
3111 # This not what we expect from amend.
3112 return old.node()
3112 return old.node()
3113
3113
3114 commitphase = None
3114 commitphase = None
3115 if opts.get(b'secret'):
3115 if opts.get(b'secret'):
3116 commitphase = phases.secret
3116 commitphase = phases.secret
3117 newid = repo.commitctx(new)
3117 newid = repo.commitctx(new)
3118
3118
3119 # Reroute the working copy parent to the new changeset
3119 # Reroute the working copy parent to the new changeset
3120 repo.setparents(newid, nullid)
3120 repo.setparents(newid, nullid)
3121 mapping = {old.node(): (newid,)}
3121 mapping = {old.node(): (newid,)}
3122 obsmetadata = None
3122 obsmetadata = None
3123 if opts.get(b'note'):
3123 if opts.get(b'note'):
3124 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
3124 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
3125 backup = ui.configbool(b'rewrite', b'backup-bundle')
3125 backup = ui.configbool(b'rewrite', b'backup-bundle')
3126 scmutil.cleanupnodes(
3126 scmutil.cleanupnodes(
3127 repo,
3127 repo,
3128 mapping,
3128 mapping,
3129 b'amend',
3129 b'amend',
3130 metadata=obsmetadata,
3130 metadata=obsmetadata,
3131 fixphase=True,
3131 fixphase=True,
3132 targetphase=commitphase,
3132 targetphase=commitphase,
3133 backup=backup,
3133 backup=backup,
3134 )
3134 )
3135
3135
3136 # Fixing the dirstate because localrepo.commitctx does not update
3136 # Fixing the dirstate because localrepo.commitctx does not update
3137 # it. This is rather convenient because we did not need to update
3137 # it. This is rather convenient because we did not need to update
3138 # the dirstate for all the files in the new commit which commitctx
3138 # the dirstate for all the files in the new commit which commitctx
3139 # could have done if it updated the dirstate. Now, we can
3139 # could have done if it updated the dirstate. Now, we can
3140 # selectively update the dirstate only for the amended files.
3140 # selectively update the dirstate only for the amended files.
3141 dirstate = repo.dirstate
3141 dirstate = repo.dirstate
3142
3142
3143 # Update the state of the files which were added and modified in the
3143 # Update the state of the files which were added and modified in the
3144 # amend to "normal" in the dirstate. We need to use "normallookup" since
3144 # amend to "normal" in the dirstate. We need to use "normallookup" since
3145 # the files may have changed since the command started; using "normal"
3145 # the files may have changed since the command started; using "normal"
3146 # would mark them as clean but with uncommitted contents.
3146 # would mark them as clean but with uncommitted contents.
3147 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3147 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3148 for f in normalfiles:
3148 for f in normalfiles:
3149 dirstate.normallookup(f)
3149 dirstate.normallookup(f)
3150
3150
3151 # Update the state of files which were removed in the amend
3151 # Update the state of files which were removed in the amend
3152 # to "removed" in the dirstate.
3152 # to "removed" in the dirstate.
3153 removedfiles = set(wctx.removed()) & filestoamend
3153 removedfiles = set(wctx.removed()) & filestoamend
3154 for f in removedfiles:
3154 for f in removedfiles:
3155 dirstate.drop(f)
3155 dirstate.drop(f)
3156
3156
3157 return newid
3157 return newid
3158
3158
3159
3159
3160 def commiteditor(repo, ctx, subs, editform=b''):
3160 def commiteditor(repo, ctx, subs, editform=b''):
3161 if ctx.description():
3161 if ctx.description():
3162 return ctx.description()
3162 return ctx.description()
3163 return commitforceeditor(
3163 return commitforceeditor(
3164 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3164 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3165 )
3165 )
3166
3166
3167
3167
3168 def commitforceeditor(
3168 def commitforceeditor(
3169 repo,
3169 repo,
3170 ctx,
3170 ctx,
3171 subs,
3171 subs,
3172 finishdesc=None,
3172 finishdesc=None,
3173 extramsg=None,
3173 extramsg=None,
3174 editform=b'',
3174 editform=b'',
3175 unchangedmessagedetection=False,
3175 unchangedmessagedetection=False,
3176 ):
3176 ):
3177 if not extramsg:
3177 if not extramsg:
3178 extramsg = _(b"Leave message empty to abort commit.")
3178 extramsg = _(b"Leave message empty to abort commit.")
3179
3179
3180 forms = [e for e in editform.split(b'.') if e]
3180 forms = [e for e in editform.split(b'.') if e]
3181 forms.insert(0, b'changeset')
3181 forms.insert(0, b'changeset')
3182 templatetext = None
3182 templatetext = None
3183 while forms:
3183 while forms:
3184 ref = b'.'.join(forms)
3184 ref = b'.'.join(forms)
3185 if repo.ui.config(b'committemplate', ref):
3185 if repo.ui.config(b'committemplate', ref):
3186 templatetext = committext = buildcommittemplate(
3186 templatetext = committext = buildcommittemplate(
3187 repo, ctx, subs, extramsg, ref
3187 repo, ctx, subs, extramsg, ref
3188 )
3188 )
3189 break
3189 break
3190 forms.pop()
3190 forms.pop()
3191 else:
3191 else:
3192 committext = buildcommittext(repo, ctx, subs, extramsg)
3192 committext = buildcommittext(repo, ctx, subs, extramsg)
3193
3193
3194 # run editor in the repository root
3194 # run editor in the repository root
3195 olddir = encoding.getcwd()
3195 olddir = encoding.getcwd()
3196 os.chdir(repo.root)
3196 os.chdir(repo.root)
3197
3197
3198 # make in-memory changes visible to external process
3198 # make in-memory changes visible to external process
3199 tr = repo.currenttransaction()
3199 tr = repo.currenttransaction()
3200 repo.dirstate.write(tr)
3200 repo.dirstate.write(tr)
3201 pending = tr and tr.writepending() and repo.root
3201 pending = tr and tr.writepending() and repo.root
3202
3202
3203 editortext = repo.ui.edit(
3203 editortext = repo.ui.edit(
3204 committext,
3204 committext,
3205 ctx.user(),
3205 ctx.user(),
3206 ctx.extra(),
3206 ctx.extra(),
3207 editform=editform,
3207 editform=editform,
3208 pending=pending,
3208 pending=pending,
3209 repopath=repo.path,
3209 repopath=repo.path,
3210 action=b'commit',
3210 action=b'commit',
3211 )
3211 )
3212 text = editortext
3212 text = editortext
3213
3213
3214 # strip away anything below this special string (used for editors that want
3214 # strip away anything below this special string (used for editors that want
3215 # to display the diff)
3215 # to display the diff)
3216 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3216 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3217 if stripbelow:
3217 if stripbelow:
3218 text = text[: stripbelow.start()]
3218 text = text[: stripbelow.start()]
3219
3219
3220 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3220 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3221 os.chdir(olddir)
3221 os.chdir(olddir)
3222
3222
3223 if finishdesc:
3223 if finishdesc:
3224 text = finishdesc(text)
3224 text = finishdesc(text)
3225 if not text.strip():
3225 if not text.strip():
3226 raise error.Abort(_(b"empty commit message"))
3226 raise error.Abort(_(b"empty commit message"))
3227 if unchangedmessagedetection and editortext == templatetext:
3227 if unchangedmessagedetection and editortext == templatetext:
3228 raise error.Abort(_(b"commit message unchanged"))
3228 raise error.Abort(_(b"commit message unchanged"))
3229
3229
3230 return text
3230 return text
3231
3231
3232
3232
3233 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3233 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3234 ui = repo.ui
3234 ui = repo.ui
3235 spec = formatter.templatespec(ref, None, None)
3235 spec = formatter.templatespec(ref, None, None)
3236 t = logcmdutil.changesettemplater(ui, repo, spec)
3236 t = logcmdutil.changesettemplater(ui, repo, spec)
3237 t.t.cache.update(
3237 t.t.cache.update(
3238 (k, templater.unquotestring(v))
3238 (k, templater.unquotestring(v))
3239 for k, v in repo.ui.configitems(b'committemplate')
3239 for k, v in repo.ui.configitems(b'committemplate')
3240 )
3240 )
3241
3241
3242 if not extramsg:
3242 if not extramsg:
3243 extramsg = b'' # ensure that extramsg is string
3243 extramsg = b'' # ensure that extramsg is string
3244
3244
3245 ui.pushbuffer()
3245 ui.pushbuffer()
3246 t.show(ctx, extramsg=extramsg)
3246 t.show(ctx, extramsg=extramsg)
3247 return ui.popbuffer()
3247 return ui.popbuffer()
3248
3248
3249
3249
3250 def hgprefix(msg):
3250 def hgprefix(msg):
3251 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3251 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3252
3252
3253
3253
3254 def buildcommittext(repo, ctx, subs, extramsg):
3254 def buildcommittext(repo, ctx, subs, extramsg):
3255 edittext = []
3255 edittext = []
3256 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3256 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3257 if ctx.description():
3257 if ctx.description():
3258 edittext.append(ctx.description())
3258 edittext.append(ctx.description())
3259 edittext.append(b"")
3259 edittext.append(b"")
3260 edittext.append(b"") # Empty line between message and comments.
3260 edittext.append(b"") # Empty line between message and comments.
3261 edittext.append(
3261 edittext.append(
3262 hgprefix(
3262 hgprefix(
3263 _(
3263 _(
3264 b"Enter commit message."
3264 b"Enter commit message."
3265 b" Lines beginning with 'HG:' are removed."
3265 b" Lines beginning with 'HG:' are removed."
3266 )
3266 )
3267 )
3267 )
3268 )
3268 )
3269 edittext.append(hgprefix(extramsg))
3269 edittext.append(hgprefix(extramsg))
3270 edittext.append(b"HG: --")
3270 edittext.append(b"HG: --")
3271 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3271 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3272 if ctx.p2():
3272 if ctx.p2():
3273 edittext.append(hgprefix(_(b"branch merge")))
3273 edittext.append(hgprefix(_(b"branch merge")))
3274 if ctx.branch():
3274 if ctx.branch():
3275 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3275 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3276 if bookmarks.isactivewdirparent(repo):
3276 if bookmarks.isactivewdirparent(repo):
3277 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3277 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3278 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3278 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3279 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3279 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3280 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3280 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3281 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3281 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3282 if not added and not modified and not removed:
3282 if not added and not modified and not removed:
3283 edittext.append(hgprefix(_(b"no files changed")))
3283 edittext.append(hgprefix(_(b"no files changed")))
3284 edittext.append(b"")
3284 edittext.append(b"")
3285
3285
3286 return b"\n".join(edittext)
3286 return b"\n".join(edittext)
3287
3287
3288
3288
3289 def commitstatus(repo, node, branch, bheads=None, opts=None):
3289 def commitstatus(repo, node, branch, bheads=None, opts=None):
3290 if opts is None:
3290 if opts is None:
3291 opts = {}
3291 opts = {}
3292 ctx = repo[node]
3292 ctx = repo[node]
3293 parents = ctx.parents()
3293 parents = ctx.parents()
3294
3294
3295 if (
3295 if (
3296 not opts.get(b'amend')
3296 not opts.get(b'amend')
3297 and bheads
3297 and bheads
3298 and node not in bheads
3298 and node not in bheads
3299 and not [
3299 and not [
3300 x for x in parents if x.node() in bheads and x.branch() == branch
3300 x for x in parents if x.node() in bheads and x.branch() == branch
3301 ]
3301 ]
3302 ):
3302 ):
3303 repo.ui.status(_(b'created new head\n'))
3303 repo.ui.status(_(b'created new head\n'))
3304 # The message is not printed for initial roots. For the other
3304 # The message is not printed for initial roots. For the other
3305 # changesets, it is printed in the following situations:
3305 # changesets, it is printed in the following situations:
3306 #
3306 #
3307 # Par column: for the 2 parents with ...
3307 # Par column: for the 2 parents with ...
3308 # N: null or no parent
3308 # N: null or no parent
3309 # B: parent is on another named branch
3309 # B: parent is on another named branch
3310 # C: parent is a regular non head changeset
3310 # C: parent is a regular non head changeset
3311 # H: parent was a branch head of the current branch
3311 # H: parent was a branch head of the current branch
3312 # Msg column: whether we print "created new head" message
3312 # Msg column: whether we print "created new head" message
3313 # In the following, it is assumed that there already exists some
3313 # In the following, it is assumed that there already exists some
3314 # initial branch heads of the current branch, otherwise nothing is
3314 # initial branch heads of the current branch, otherwise nothing is
3315 # printed anyway.
3315 # printed anyway.
3316 #
3316 #
3317 # Par Msg Comment
3317 # Par Msg Comment
3318 # N N y additional topo root
3318 # N N y additional topo root
3319 #
3319 #
3320 # B N y additional branch root
3320 # B N y additional branch root
3321 # C N y additional topo head
3321 # C N y additional topo head
3322 # H N n usual case
3322 # H N n usual case
3323 #
3323 #
3324 # B B y weird additional branch root
3324 # B B y weird additional branch root
3325 # C B y branch merge
3325 # C B y branch merge
3326 # H B n merge with named branch
3326 # H B n merge with named branch
3327 #
3327 #
3328 # C C y additional head from merge
3328 # C C y additional head from merge
3329 # C H n merge with a head
3329 # C H n merge with a head
3330 #
3330 #
3331 # H H n head merge: head count decreases
3331 # H H n head merge: head count decreases
3332
3332
3333 if not opts.get(b'close_branch'):
3333 if not opts.get(b'close_branch'):
3334 for r in parents:
3334 for r in parents:
3335 if r.closesbranch() and r.branch() == branch:
3335 if r.closesbranch() and r.branch() == branch:
3336 repo.ui.status(
3336 repo.ui.status(
3337 _(b'reopening closed branch head %d\n') % r.rev()
3337 _(b'reopening closed branch head %d\n') % r.rev()
3338 )
3338 )
3339
3339
3340 if repo.ui.debugflag:
3340 if repo.ui.debugflag:
3341 repo.ui.write(
3341 repo.ui.write(
3342 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3342 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3343 )
3343 )
3344 elif repo.ui.verbose:
3344 elif repo.ui.verbose:
3345 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3345 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3346
3346
3347
3347
3348 def postcommitstatus(repo, pats, opts):
3348 def postcommitstatus(repo, pats, opts):
3349 return repo.status(match=scmutil.match(repo[None], pats, opts))
3349 return repo.status(match=scmutil.match(repo[None], pats, opts))
3350
3350
3351
3351
3352 def revert(ui, repo, ctx, parents, *pats, **opts):
3352 def revert(ui, repo, ctx, parents, *pats, **opts):
3353 opts = pycompat.byteskwargs(opts)
3353 opts = pycompat.byteskwargs(opts)
3354 parent, p2 = parents
3354 parent, p2 = parents
3355 node = ctx.node()
3355 node = ctx.node()
3356
3356
3357 mf = ctx.manifest()
3357 mf = ctx.manifest()
3358 if node == p2:
3358 if node == p2:
3359 parent = p2
3359 parent = p2
3360
3360
3361 # need all matching names in dirstate and manifest of target rev,
3361 # need all matching names in dirstate and manifest of target rev,
3362 # so have to walk both. do not print errors if files exist in one
3362 # so have to walk both. do not print errors if files exist in one
3363 # but not other. in both cases, filesets should be evaluated against
3363 # but not other. in both cases, filesets should be evaluated against
3364 # workingctx to get consistent result (issue4497). this means 'set:**'
3364 # workingctx to get consistent result (issue4497). this means 'set:**'
3365 # cannot be used to select missing files from target rev.
3365 # cannot be used to select missing files from target rev.
3366
3366
3367 # `names` is a mapping for all elements in working copy and target revision
3367 # `names` is a mapping for all elements in working copy and target revision
3368 # The mapping is in the form:
3368 # The mapping is in the form:
3369 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3369 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3370 names = {}
3370 names = {}
3371 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3371 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3372
3372
3373 with repo.wlock():
3373 with repo.wlock():
3374 ## filling of the `names` mapping
3374 ## filling of the `names` mapping
3375 # walk dirstate to fill `names`
3375 # walk dirstate to fill `names`
3376
3376
3377 interactive = opts.get(b'interactive', False)
3377 interactive = opts.get(b'interactive', False)
3378 wctx = repo[None]
3378 wctx = repo[None]
3379 m = scmutil.match(wctx, pats, opts)
3379 m = scmutil.match(wctx, pats, opts)
3380
3380
3381 # we'll need this later
3381 # we'll need this later
3382 targetsubs = sorted(s for s in wctx.substate if m(s))
3382 targetsubs = sorted(s for s in wctx.substate if m(s))
3383
3383
3384 if not m.always():
3384 if not m.always():
3385 matcher = matchmod.badmatch(m, lambda x, y: False)
3385 matcher = matchmod.badmatch(m, lambda x, y: False)
3386 for abs in wctx.walk(matcher):
3386 for abs in wctx.walk(matcher):
3387 names[abs] = m.exact(abs)
3387 names[abs] = m.exact(abs)
3388
3388
3389 # walk target manifest to fill `names`
3389 # walk target manifest to fill `names`
3390
3390
3391 def badfn(path, msg):
3391 def badfn(path, msg):
3392 if path in names:
3392 if path in names:
3393 return
3393 return
3394 if path in ctx.substate:
3394 if path in ctx.substate:
3395 return
3395 return
3396 path_ = path + b'/'
3396 path_ = path + b'/'
3397 for f in names:
3397 for f in names:
3398 if f.startswith(path_):
3398 if f.startswith(path_):
3399 return
3399 return
3400 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3400 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3401
3401
3402 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3402 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3403 if abs not in names:
3403 if abs not in names:
3404 names[abs] = m.exact(abs)
3404 names[abs] = m.exact(abs)
3405
3405
3406 # Find status of all file in `names`.
3406 # Find status of all file in `names`.
3407 m = scmutil.matchfiles(repo, names)
3407 m = scmutil.matchfiles(repo, names)
3408
3408
3409 changes = repo.status(
3409 changes = repo.status(
3410 node1=node, match=m, unknown=True, ignored=True, clean=True
3410 node1=node, match=m, unknown=True, ignored=True, clean=True
3411 )
3411 )
3412 else:
3412 else:
3413 changes = repo.status(node1=node, match=m)
3413 changes = repo.status(node1=node, match=m)
3414 for kind in changes:
3414 for kind in changes:
3415 for abs in kind:
3415 for abs in kind:
3416 names[abs] = m.exact(abs)
3416 names[abs] = m.exact(abs)
3417
3417
3418 m = scmutil.matchfiles(repo, names)
3418 m = scmutil.matchfiles(repo, names)
3419
3419
3420 modified = set(changes.modified)
3420 modified = set(changes.modified)
3421 added = set(changes.added)
3421 added = set(changes.added)
3422 removed = set(changes.removed)
3422 removed = set(changes.removed)
3423 _deleted = set(changes.deleted)
3423 _deleted = set(changes.deleted)
3424 unknown = set(changes.unknown)
3424 unknown = set(changes.unknown)
3425 unknown.update(changes.ignored)
3425 unknown.update(changes.ignored)
3426 clean = set(changes.clean)
3426 clean = set(changes.clean)
3427 modadded = set()
3427 modadded = set()
3428
3428
3429 # We need to account for the state of the file in the dirstate,
3429 # We need to account for the state of the file in the dirstate,
3430 # even when we revert against something else than parent. This will
3430 # even when we revert against something else than parent. This will
3431 # slightly alter the behavior of revert (doing back up or not, delete
3431 # slightly alter the behavior of revert (doing back up or not, delete
3432 # or just forget etc).
3432 # or just forget etc).
3433 if parent == node:
3433 if parent == node:
3434 dsmodified = modified
3434 dsmodified = modified
3435 dsadded = added
3435 dsadded = added
3436 dsremoved = removed
3436 dsremoved = removed
3437 # store all local modifications, useful later for rename detection
3437 # store all local modifications, useful later for rename detection
3438 localchanges = dsmodified | dsadded
3438 localchanges = dsmodified | dsadded
3439 modified, added, removed = set(), set(), set()
3439 modified, added, removed = set(), set(), set()
3440 else:
3440 else:
3441 changes = repo.status(node1=parent, match=m)
3441 changes = repo.status(node1=parent, match=m)
3442 dsmodified = set(changes.modified)
3442 dsmodified = set(changes.modified)
3443 dsadded = set(changes.added)
3443 dsadded = set(changes.added)
3444 dsremoved = set(changes.removed)
3444 dsremoved = set(changes.removed)
3445 # store all local modifications, useful later for rename detection
3445 # store all local modifications, useful later for rename detection
3446 localchanges = dsmodified | dsadded
3446 localchanges = dsmodified | dsadded
3447
3447
3448 # only take into account for removes between wc and target
3448 # only take into account for removes between wc and target
3449 clean |= dsremoved - removed
3449 clean |= dsremoved - removed
3450 dsremoved &= removed
3450 dsremoved &= removed
3451 # distinct between dirstate remove and other
3451 # distinct between dirstate remove and other
3452 removed -= dsremoved
3452 removed -= dsremoved
3453
3453
3454 modadded = added & dsmodified
3454 modadded = added & dsmodified
3455 added -= modadded
3455 added -= modadded
3456
3456
3457 # tell newly modified apart.
3457 # tell newly modified apart.
3458 dsmodified &= modified
3458 dsmodified &= modified
3459 dsmodified |= modified & dsadded # dirstate added may need backup
3459 dsmodified |= modified & dsadded # dirstate added may need backup
3460 modified -= dsmodified
3460 modified -= dsmodified
3461
3461
3462 # We need to wait for some post-processing to update this set
3462 # We need to wait for some post-processing to update this set
3463 # before making the distinction. The dirstate will be used for
3463 # before making the distinction. The dirstate will be used for
3464 # that purpose.
3464 # that purpose.
3465 dsadded = added
3465 dsadded = added
3466
3466
3467 # in case of merge, files that are actually added can be reported as
3467 # in case of merge, files that are actually added can be reported as
3468 # modified, we need to post process the result
3468 # modified, we need to post process the result
3469 if p2 != nullid:
3469 if p2 != nullid:
3470 mergeadd = set(dsmodified)
3470 mergeadd = set(dsmodified)
3471 for path in dsmodified:
3471 for path in dsmodified:
3472 if path in mf:
3472 if path in mf:
3473 mergeadd.remove(path)
3473 mergeadd.remove(path)
3474 dsadded |= mergeadd
3474 dsadded |= mergeadd
3475 dsmodified -= mergeadd
3475 dsmodified -= mergeadd
3476
3476
3477 # if f is a rename, update `names` to also revert the source
3477 # if f is a rename, update `names` to also revert the source
3478 for f in localchanges:
3478 for f in localchanges:
3479 src = repo.dirstate.copied(f)
3479 src = repo.dirstate.copied(f)
3480 # XXX should we check for rename down to target node?
3480 # XXX should we check for rename down to target node?
3481 if src and src not in names and repo.dirstate[src] == b'r':
3481 if src and src not in names and repo.dirstate[src] == b'r':
3482 dsremoved.add(src)
3482 dsremoved.add(src)
3483 names[src] = True
3483 names[src] = True
3484
3484
3485 # determine the exact nature of the deleted changesets
3485 # determine the exact nature of the deleted changesets
3486 deladded = set(_deleted)
3486 deladded = set(_deleted)
3487 for path in _deleted:
3487 for path in _deleted:
3488 if path in mf:
3488 if path in mf:
3489 deladded.remove(path)
3489 deladded.remove(path)
3490 deleted = _deleted - deladded
3490 deleted = _deleted - deladded
3491
3491
3492 # distinguish between file to forget and the other
3492 # distinguish between file to forget and the other
3493 added = set()
3493 added = set()
3494 for abs in dsadded:
3494 for abs in dsadded:
3495 if repo.dirstate[abs] != b'a':
3495 if repo.dirstate[abs] != b'a':
3496 added.add(abs)
3496 added.add(abs)
3497 dsadded -= added
3497 dsadded -= added
3498
3498
3499 for abs in deladded:
3499 for abs in deladded:
3500 if repo.dirstate[abs] == b'a':
3500 if repo.dirstate[abs] == b'a':
3501 dsadded.add(abs)
3501 dsadded.add(abs)
3502 deladded -= dsadded
3502 deladded -= dsadded
3503
3503
3504 # For files marked as removed, we check if an unknown file is present at
3504 # For files marked as removed, we check if an unknown file is present at
3505 # the same path. If a such file exists it may need to be backed up.
3505 # the same path. If a such file exists it may need to be backed up.
3506 # Making the distinction at this stage helps have simpler backup
3506 # Making the distinction at this stage helps have simpler backup
3507 # logic.
3507 # logic.
3508 removunk = set()
3508 removunk = set()
3509 for abs in removed:
3509 for abs in removed:
3510 target = repo.wjoin(abs)
3510 target = repo.wjoin(abs)
3511 if os.path.lexists(target):
3511 if os.path.lexists(target):
3512 removunk.add(abs)
3512 removunk.add(abs)
3513 removed -= removunk
3513 removed -= removunk
3514
3514
3515 dsremovunk = set()
3515 dsremovunk = set()
3516 for abs in dsremoved:
3516 for abs in dsremoved:
3517 target = repo.wjoin(abs)
3517 target = repo.wjoin(abs)
3518 if os.path.lexists(target):
3518 if os.path.lexists(target):
3519 dsremovunk.add(abs)
3519 dsremovunk.add(abs)
3520 dsremoved -= dsremovunk
3520 dsremoved -= dsremovunk
3521
3521
3522 # action to be actually performed by revert
3522 # action to be actually performed by revert
3523 # (<list of file>, message>) tuple
3523 # (<list of file>, message>) tuple
3524 actions = {
3524 actions = {
3525 b'revert': ([], _(b'reverting %s\n')),
3525 b'revert': ([], _(b'reverting %s\n')),
3526 b'add': ([], _(b'adding %s\n')),
3526 b'add': ([], _(b'adding %s\n')),
3527 b'remove': ([], _(b'removing %s\n')),
3527 b'remove': ([], _(b'removing %s\n')),
3528 b'drop': ([], _(b'removing %s\n')),
3528 b'drop': ([], _(b'removing %s\n')),
3529 b'forget': ([], _(b'forgetting %s\n')),
3529 b'forget': ([], _(b'forgetting %s\n')),
3530 b'undelete': ([], _(b'undeleting %s\n')),
3530 b'undelete': ([], _(b'undeleting %s\n')),
3531 b'noop': (None, _(b'no changes needed to %s\n')),
3531 b'noop': (None, _(b'no changes needed to %s\n')),
3532 b'unknown': (None, _(b'file not managed: %s\n')),
3532 b'unknown': (None, _(b'file not managed: %s\n')),
3533 }
3533 }
3534
3534
3535 # "constant" that convey the backup strategy.
3535 # "constant" that convey the backup strategy.
3536 # All set to `discard` if `no-backup` is set do avoid checking
3536 # All set to `discard` if `no-backup` is set do avoid checking
3537 # no_backup lower in the code.
3537 # no_backup lower in the code.
3538 # These values are ordered for comparison purposes
3538 # These values are ordered for comparison purposes
3539 backupinteractive = 3 # do backup if interactively modified
3539 backupinteractive = 3 # do backup if interactively modified
3540 backup = 2 # unconditionally do backup
3540 backup = 2 # unconditionally do backup
3541 check = 1 # check if the existing file differs from target
3541 check = 1 # check if the existing file differs from target
3542 discard = 0 # never do backup
3542 discard = 0 # never do backup
3543 if opts.get(b'no_backup'):
3543 if opts.get(b'no_backup'):
3544 backupinteractive = backup = check = discard
3544 backupinteractive = backup = check = discard
3545 if interactive:
3545 if interactive:
3546 dsmodifiedbackup = backupinteractive
3546 dsmodifiedbackup = backupinteractive
3547 else:
3547 else:
3548 dsmodifiedbackup = backup
3548 dsmodifiedbackup = backup
3549 tobackup = set()
3549 tobackup = set()
3550
3550
3551 backupanddel = actions[b'remove']
3551 backupanddel = actions[b'remove']
3552 if not opts.get(b'no_backup'):
3552 if not opts.get(b'no_backup'):
3553 backupanddel = actions[b'drop']
3553 backupanddel = actions[b'drop']
3554
3554
3555 disptable = (
3555 disptable = (
3556 # dispatch table:
3556 # dispatch table:
3557 # file state
3557 # file state
3558 # action
3558 # action
3559 # make backup
3559 # make backup
3560 ## Sets that results that will change file on disk
3560 ## Sets that results that will change file on disk
3561 # Modified compared to target, no local change
3561 # Modified compared to target, no local change
3562 (modified, actions[b'revert'], discard),
3562 (modified, actions[b'revert'], discard),
3563 # Modified compared to target, but local file is deleted
3563 # Modified compared to target, but local file is deleted
3564 (deleted, actions[b'revert'], discard),
3564 (deleted, actions[b'revert'], discard),
3565 # Modified compared to target, local change
3565 # Modified compared to target, local change
3566 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3566 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3567 # Added since target
3567 # Added since target
3568 (added, actions[b'remove'], discard),
3568 (added, actions[b'remove'], discard),
3569 # Added in working directory
3569 # Added in working directory
3570 (dsadded, actions[b'forget'], discard),
3570 (dsadded, actions[b'forget'], discard),
3571 # Added since target, have local modification
3571 # Added since target, have local modification
3572 (modadded, backupanddel, backup),
3572 (modadded, backupanddel, backup),
3573 # Added since target but file is missing in working directory
3573 # Added since target but file is missing in working directory
3574 (deladded, actions[b'drop'], discard),
3574 (deladded, actions[b'drop'], discard),
3575 # Removed since target, before working copy parent
3575 # Removed since target, before working copy parent
3576 (removed, actions[b'add'], discard),
3576 (removed, actions[b'add'], discard),
3577 # Same as `removed` but an unknown file exists at the same path
3577 # Same as `removed` but an unknown file exists at the same path
3578 (removunk, actions[b'add'], check),
3578 (removunk, actions[b'add'], check),
3579 # Removed since targe, marked as such in working copy parent
3579 # Removed since targe, marked as such in working copy parent
3580 (dsremoved, actions[b'undelete'], discard),
3580 (dsremoved, actions[b'undelete'], discard),
3581 # Same as `dsremoved` but an unknown file exists at the same path
3581 # Same as `dsremoved` but an unknown file exists at the same path
3582 (dsremovunk, actions[b'undelete'], check),
3582 (dsremovunk, actions[b'undelete'], check),
3583 ## the following sets does not result in any file changes
3583 ## the following sets does not result in any file changes
3584 # File with no modification
3584 # File with no modification
3585 (clean, actions[b'noop'], discard),
3585 (clean, actions[b'noop'], discard),
3586 # Existing file, not tracked anywhere
3586 # Existing file, not tracked anywhere
3587 (unknown, actions[b'unknown'], discard),
3587 (unknown, actions[b'unknown'], discard),
3588 )
3588 )
3589
3589
3590 for abs, exact in sorted(names.items()):
3590 for abs, exact in sorted(names.items()):
3591 # target file to be touch on disk (relative to cwd)
3591 # target file to be touch on disk (relative to cwd)
3592 target = repo.wjoin(abs)
3592 target = repo.wjoin(abs)
3593 # search the entry in the dispatch table.
3593 # search the entry in the dispatch table.
3594 # if the file is in any of these sets, it was touched in the working
3594 # if the file is in any of these sets, it was touched in the working
3595 # directory parent and we are sure it needs to be reverted.
3595 # directory parent and we are sure it needs to be reverted.
3596 for table, (xlist, msg), dobackup in disptable:
3596 for table, (xlist, msg), dobackup in disptable:
3597 if abs not in table:
3597 if abs not in table:
3598 continue
3598 continue
3599 if xlist is not None:
3599 if xlist is not None:
3600 xlist.append(abs)
3600 xlist.append(abs)
3601 if dobackup:
3601 if dobackup:
3602 # If in interactive mode, don't automatically create
3602 # If in interactive mode, don't automatically create
3603 # .orig files (issue4793)
3603 # .orig files (issue4793)
3604 if dobackup == backupinteractive:
3604 if dobackup == backupinteractive:
3605 tobackup.add(abs)
3605 tobackup.add(abs)
3606 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3606 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3607 absbakname = scmutil.backuppath(ui, repo, abs)
3607 absbakname = scmutil.backuppath(ui, repo, abs)
3608 bakname = os.path.relpath(
3608 bakname = os.path.relpath(
3609 absbakname, start=repo.root
3609 absbakname, start=repo.root
3610 )
3610 )
3611 ui.note(
3611 ui.note(
3612 _(b'saving current version of %s as %s\n')
3612 _(b'saving current version of %s as %s\n')
3613 % (uipathfn(abs), uipathfn(bakname))
3613 % (uipathfn(abs), uipathfn(bakname))
3614 )
3614 )
3615 if not opts.get(b'dry_run'):
3615 if not opts.get(b'dry_run'):
3616 if interactive:
3616 if interactive:
3617 util.copyfile(target, absbakname)
3617 util.copyfile(target, absbakname)
3618 else:
3618 else:
3619 util.rename(target, absbakname)
3619 util.rename(target, absbakname)
3620 if opts.get(b'dry_run'):
3620 if opts.get(b'dry_run'):
3621 if ui.verbose or not exact:
3621 if ui.verbose or not exact:
3622 ui.status(msg % uipathfn(abs))
3622 ui.status(msg % uipathfn(abs))
3623 elif exact:
3623 elif exact:
3624 ui.warn(msg % uipathfn(abs))
3624 ui.warn(msg % uipathfn(abs))
3625 break
3625 break
3626
3626
3627 if not opts.get(b'dry_run'):
3627 if not opts.get(b'dry_run'):
3628 needdata = (b'revert', b'add', b'undelete')
3628 needdata = (b'revert', b'add', b'undelete')
3629 oplist = [actions[name][0] for name in needdata]
3629 oplist = [actions[name][0] for name in needdata]
3630 prefetch = scmutil.prefetchfiles
3630 prefetch = scmutil.prefetchfiles
3631 matchfiles = scmutil.matchfiles
3631 matchfiles = scmutil.matchfiles
3632 prefetch(
3632 prefetch(
3633 repo,
3633 repo,
3634 [ctx.rev()],
3634 [ctx.rev()],
3635 matchfiles(repo, [f for sublist in oplist for f in sublist]),
3635 matchfiles(repo, [f for sublist in oplist for f in sublist]),
3636 )
3636 )
3637 match = scmutil.match(repo[None], pats)
3637 match = scmutil.match(repo[None], pats)
3638 _performrevert(
3638 _performrevert(
3639 repo,
3639 repo,
3640 parents,
3640 parents,
3641 ctx,
3641 ctx,
3642 names,
3642 names,
3643 uipathfn,
3643 uipathfn,
3644 actions,
3644 actions,
3645 match,
3645 match,
3646 interactive,
3646 interactive,
3647 tobackup,
3647 tobackup,
3648 )
3648 )
3649
3649
3650 if targetsubs:
3650 if targetsubs:
3651 # Revert the subrepos on the revert list
3651 # Revert the subrepos on the revert list
3652 for sub in targetsubs:
3652 for sub in targetsubs:
3653 try:
3653 try:
3654 wctx.sub(sub).revert(
3654 wctx.sub(sub).revert(
3655 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3655 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3656 )
3656 )
3657 except KeyError:
3657 except KeyError:
3658 raise error.Abort(
3658 raise error.Abort(
3659 b"subrepository '%s' does not exist in %s!"
3659 b"subrepository '%s' does not exist in %s!"
3660 % (sub, short(ctx.node()))
3660 % (sub, short(ctx.node()))
3661 )
3661 )
3662
3662
3663
3663
3664 def _performrevert(
3664 def _performrevert(
3665 repo,
3665 repo,
3666 parents,
3666 parents,
3667 ctx,
3667 ctx,
3668 names,
3668 names,
3669 uipathfn,
3669 uipathfn,
3670 actions,
3670 actions,
3671 match,
3671 match,
3672 interactive=False,
3672 interactive=False,
3673 tobackup=None,
3673 tobackup=None,
3674 ):
3674 ):
3675 """function that actually perform all the actions computed for revert
3675 """function that actually perform all the actions computed for revert
3676
3676
3677 This is an independent function to let extension to plug in and react to
3677 This is an independent function to let extension to plug in and react to
3678 the imminent revert.
3678 the imminent revert.
3679
3679
3680 Make sure you have the working directory locked when calling this function.
3680 Make sure you have the working directory locked when calling this function.
3681 """
3681 """
3682 parent, p2 = parents
3682 parent, p2 = parents
3683 node = ctx.node()
3683 node = ctx.node()
3684 excluded_files = []
3684 excluded_files = []
3685
3685
3686 def checkout(f):
3686 def checkout(f):
3687 fc = ctx[f]
3687 fc = ctx[f]
3688 repo.wwrite(f, fc.data(), fc.flags())
3688 repo.wwrite(f, fc.data(), fc.flags())
3689
3689
3690 def doremove(f):
3690 def doremove(f):
3691 try:
3691 try:
3692 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3692 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3693 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3693 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3694 except OSError:
3694 except OSError:
3695 pass
3695 pass
3696 repo.dirstate.remove(f)
3696 repo.dirstate.remove(f)
3697
3697
3698 def prntstatusmsg(action, f):
3698 def prntstatusmsg(action, f):
3699 exact = names[f]
3699 exact = names[f]
3700 if repo.ui.verbose or not exact:
3700 if repo.ui.verbose or not exact:
3701 repo.ui.status(actions[action][1] % uipathfn(f))
3701 repo.ui.status(actions[action][1] % uipathfn(f))
3702
3702
3703 audit_path = pathutil.pathauditor(repo.root, cached=True)
3703 audit_path = pathutil.pathauditor(repo.root, cached=True)
3704 for f in actions[b'forget'][0]:
3704 for f in actions[b'forget'][0]:
3705 if interactive:
3705 if interactive:
3706 choice = repo.ui.promptchoice(
3706 choice = repo.ui.promptchoice(
3707 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3707 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3708 )
3708 )
3709 if choice == 0:
3709 if choice == 0:
3710 prntstatusmsg(b'forget', f)
3710 prntstatusmsg(b'forget', f)
3711 repo.dirstate.drop(f)
3711 repo.dirstate.drop(f)
3712 else:
3712 else:
3713 excluded_files.append(f)
3713 excluded_files.append(f)
3714 else:
3714 else:
3715 prntstatusmsg(b'forget', f)
3715 prntstatusmsg(b'forget', f)
3716 repo.dirstate.drop(f)
3716 repo.dirstate.drop(f)
3717 for f in actions[b'remove'][0]:
3717 for f in actions[b'remove'][0]:
3718 audit_path(f)
3718 audit_path(f)
3719 if interactive:
3719 if interactive:
3720 choice = repo.ui.promptchoice(
3720 choice = repo.ui.promptchoice(
3721 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3721 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3722 )
3722 )
3723 if choice == 0:
3723 if choice == 0:
3724 prntstatusmsg(b'remove', f)
3724 prntstatusmsg(b'remove', f)
3725 doremove(f)
3725 doremove(f)
3726 else:
3726 else:
3727 excluded_files.append(f)
3727 excluded_files.append(f)
3728 else:
3728 else:
3729 prntstatusmsg(b'remove', f)
3729 prntstatusmsg(b'remove', f)
3730 doremove(f)
3730 doremove(f)
3731 for f in actions[b'drop'][0]:
3731 for f in actions[b'drop'][0]:
3732 audit_path(f)
3732 audit_path(f)
3733 prntstatusmsg(b'drop', f)
3733 prntstatusmsg(b'drop', f)
3734 repo.dirstate.remove(f)
3734 repo.dirstate.remove(f)
3735
3735
3736 normal = None
3736 normal = None
3737 if node == parent:
3737 if node == parent:
3738 # We're reverting to our parent. If possible, we'd like status
3738 # We're reverting to our parent. If possible, we'd like status
3739 # to report the file as clean. We have to use normallookup for
3739 # to report the file as clean. We have to use normallookup for
3740 # merges to avoid losing information about merged/dirty files.
3740 # merges to avoid losing information about merged/dirty files.
3741 if p2 != nullid:
3741 if p2 != nullid:
3742 normal = repo.dirstate.normallookup
3742 normal = repo.dirstate.normallookup
3743 else:
3743 else:
3744 normal = repo.dirstate.normal
3744 normal = repo.dirstate.normal
3745
3745
3746 newlyaddedandmodifiedfiles = set()
3746 newlyaddedandmodifiedfiles = set()
3747 if interactive:
3747 if interactive:
3748 # Prompt the user for changes to revert
3748 # Prompt the user for changes to revert
3749 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3749 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3750 m = scmutil.matchfiles(repo, torevert)
3750 m = scmutil.matchfiles(repo, torevert)
3751 diffopts = patch.difffeatureopts(
3751 diffopts = patch.difffeatureopts(
3752 repo.ui,
3752 repo.ui,
3753 whitespace=True,
3753 whitespace=True,
3754 section=b'commands',
3754 section=b'commands',
3755 configprefix=b'revert.interactive.',
3755 configprefix=b'revert.interactive.',
3756 )
3756 )
3757 diffopts.nodates = True
3757 diffopts.nodates = True
3758 diffopts.git = True
3758 diffopts.git = True
3759 operation = b'apply'
3759 operation = b'apply'
3760 if node == parent:
3760 if node == parent:
3761 if repo.ui.configbool(
3761 if repo.ui.configbool(
3762 b'experimental', b'revert.interactive.select-to-keep'
3762 b'experimental', b'revert.interactive.select-to-keep'
3763 ):
3763 ):
3764 operation = b'keep'
3764 operation = b'keep'
3765 else:
3765 else:
3766 operation = b'discard'
3766 operation = b'discard'
3767
3767
3768 if operation == b'apply':
3768 if operation == b'apply':
3769 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3769 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3770 else:
3770 else:
3771 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3771 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3772 originalchunks = patch.parsepatch(diff)
3772 originalchunks = patch.parsepatch(diff)
3773
3773
3774 try:
3774 try:
3775
3775
3776 chunks, opts = recordfilter(
3776 chunks, opts = recordfilter(
3777 repo.ui, originalchunks, match, operation=operation
3777 repo.ui, originalchunks, match, operation=operation
3778 )
3778 )
3779 if operation == b'discard':
3779 if operation == b'discard':
3780 chunks = patch.reversehunks(chunks)
3780 chunks = patch.reversehunks(chunks)
3781
3781
3782 except error.PatchError as err:
3782 except error.PatchError as err:
3783 raise error.Abort(_(b'error parsing patch: %s') % err)
3783 raise error.Abort(_(b'error parsing patch: %s') % err)
3784
3784
3785 # FIXME: when doing an interactive revert of a copy, there's no way of
3785 # FIXME: when doing an interactive revert of a copy, there's no way of
3786 # performing a partial revert of the added file, the only option is
3786 # performing a partial revert of the added file, the only option is
3787 # "remove added file <name> (Yn)?", so we don't need to worry about the
3787 # "remove added file <name> (Yn)?", so we don't need to worry about the
3788 # alsorestore value. Ideally we'd be able to partially revert
3788 # alsorestore value. Ideally we'd be able to partially revert
3789 # copied/renamed files.
3789 # copied/renamed files.
3790 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
3790 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
3791 chunks, originalchunks
3791 chunks, originalchunks
3792 )
3792 )
3793 if tobackup is None:
3793 if tobackup is None:
3794 tobackup = set()
3794 tobackup = set()
3795 # Apply changes
3795 # Apply changes
3796 fp = stringio()
3796 fp = stringio()
3797 # chunks are serialized per file, but files aren't sorted
3797 # chunks are serialized per file, but files aren't sorted
3798 for f in sorted(set(c.header.filename() for c in chunks if ishunk(c))):
3798 for f in sorted(set(c.header.filename() for c in chunks if ishunk(c))):
3799 prntstatusmsg(b'revert', f)
3799 prntstatusmsg(b'revert', f)
3800 files = set()
3800 files = set()
3801 for c in chunks:
3801 for c in chunks:
3802 if ishunk(c):
3802 if ishunk(c):
3803 abs = c.header.filename()
3803 abs = c.header.filename()
3804 # Create a backup file only if this hunk should be backed up
3804 # Create a backup file only if this hunk should be backed up
3805 if c.header.filename() in tobackup:
3805 if c.header.filename() in tobackup:
3806 target = repo.wjoin(abs)
3806 target = repo.wjoin(abs)
3807 bakname = scmutil.backuppath(repo.ui, repo, abs)
3807 bakname = scmutil.backuppath(repo.ui, repo, abs)
3808 util.copyfile(target, bakname)
3808 util.copyfile(target, bakname)
3809 tobackup.remove(abs)
3809 tobackup.remove(abs)
3810 if abs not in files:
3810 if abs not in files:
3811 files.add(abs)
3811 files.add(abs)
3812 if operation == b'keep':
3812 if operation == b'keep':
3813 checkout(abs)
3813 checkout(abs)
3814 c.write(fp)
3814 c.write(fp)
3815 dopatch = fp.tell()
3815 dopatch = fp.tell()
3816 fp.seek(0)
3816 fp.seek(0)
3817 if dopatch:
3817 if dopatch:
3818 try:
3818 try:
3819 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3819 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3820 except error.PatchError as err:
3820 except error.PatchError as err:
3821 raise error.Abort(pycompat.bytestr(err))
3821 raise error.Abort(pycompat.bytestr(err))
3822 del fp
3822 del fp
3823 else:
3823 else:
3824 for f in actions[b'revert'][0]:
3824 for f in actions[b'revert'][0]:
3825 prntstatusmsg(b'revert', f)
3825 prntstatusmsg(b'revert', f)
3826 checkout(f)
3826 checkout(f)
3827 if normal:
3827 if normal:
3828 normal(f)
3828 normal(f)
3829
3829
3830 for f in actions[b'add'][0]:
3830 for f in actions[b'add'][0]:
3831 # Don't checkout modified files, they are already created by the diff
3831 # Don't checkout modified files, they are already created by the diff
3832 if f not in newlyaddedandmodifiedfiles:
3832 if f not in newlyaddedandmodifiedfiles:
3833 prntstatusmsg(b'add', f)
3833 prntstatusmsg(b'add', f)
3834 checkout(f)
3834 checkout(f)
3835 repo.dirstate.add(f)
3835 repo.dirstate.add(f)
3836
3836
3837 normal = repo.dirstate.normallookup
3837 normal = repo.dirstate.normallookup
3838 if node == parent and p2 == nullid:
3838 if node == parent and p2 == nullid:
3839 normal = repo.dirstate.normal
3839 normal = repo.dirstate.normal
3840 for f in actions[b'undelete'][0]:
3840 for f in actions[b'undelete'][0]:
3841 if interactive:
3841 if interactive:
3842 choice = repo.ui.promptchoice(
3842 choice = repo.ui.promptchoice(
3843 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3843 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3844 )
3844 )
3845 if choice == 0:
3845 if choice == 0:
3846 prntstatusmsg(b'undelete', f)
3846 prntstatusmsg(b'undelete', f)
3847 checkout(f)
3847 checkout(f)
3848 normal(f)
3848 normal(f)
3849 else:
3849 else:
3850 excluded_files.append(f)
3850 excluded_files.append(f)
3851 else:
3851 else:
3852 prntstatusmsg(b'undelete', f)
3852 prntstatusmsg(b'undelete', f)
3853 checkout(f)
3853 checkout(f)
3854 normal(f)
3854 normal(f)
3855
3855
3856 copied = copies.pathcopies(repo[parent], ctx)
3856 copied = copies.pathcopies(repo[parent], ctx)
3857
3857
3858 for f in (
3858 for f in (
3859 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
3859 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
3860 ):
3860 ):
3861 if f in copied:
3861 if f in copied:
3862 repo.dirstate.copy(copied[f], f)
3862 repo.dirstate.copy(copied[f], f)
3863
3863
3864
3864
3865 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3865 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3866 # commands.outgoing. "missing" is "missing" of the result of
3866 # commands.outgoing. "missing" is "missing" of the result of
3867 # "findcommonoutgoing()"
3867 # "findcommonoutgoing()"
3868 outgoinghooks = util.hooks()
3868 outgoinghooks = util.hooks()
3869
3869
3870 # a list of (ui, repo) functions called by commands.summary
3870 # a list of (ui, repo) functions called by commands.summary
3871 summaryhooks = util.hooks()
3871 summaryhooks = util.hooks()
3872
3872
3873 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3873 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3874 #
3874 #
3875 # functions should return tuple of booleans below, if 'changes' is None:
3875 # functions should return tuple of booleans below, if 'changes' is None:
3876 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3876 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3877 #
3877 #
3878 # otherwise, 'changes' is a tuple of tuples below:
3878 # otherwise, 'changes' is a tuple of tuples below:
3879 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3879 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3880 # - (desturl, destbranch, destpeer, outgoing)
3880 # - (desturl, destbranch, destpeer, outgoing)
3881 summaryremotehooks = util.hooks()
3881 summaryremotehooks = util.hooks()
3882
3882
3883
3883
3884 def checkunfinished(repo, commit=False, skipmerge=False):
3884 def checkunfinished(repo, commit=False, skipmerge=False):
3885 '''Look for an unfinished multistep operation, like graft, and abort
3885 '''Look for an unfinished multistep operation, like graft, and abort
3886 if found. It's probably good to check this right before
3886 if found. It's probably good to check this right before
3887 bailifchanged().
3887 bailifchanged().
3888 '''
3888 '''
3889 # Check for non-clearable states first, so things like rebase will take
3889 # Check for non-clearable states first, so things like rebase will take
3890 # precedence over update.
3890 # precedence over update.
3891 for state in statemod._unfinishedstates:
3891 for state in statemod._unfinishedstates:
3892 if (
3892 if (
3893 state._clearable
3893 state._clearable
3894 or (commit and state._allowcommit)
3894 or (commit and state._allowcommit)
3895 or state._reportonly
3895 or state._reportonly
3896 ):
3896 ):
3897 continue
3897 continue
3898 if state.isunfinished(repo):
3898 if state.isunfinished(repo):
3899 raise error.Abort(state.msg(), hint=state.hint())
3899 raise error.Abort(state.msg(), hint=state.hint())
3900
3900
3901 for s in statemod._unfinishedstates:
3901 for s in statemod._unfinishedstates:
3902 if (
3902 if (
3903 not s._clearable
3903 not s._clearable
3904 or (commit and s._allowcommit)
3904 or (commit and s._allowcommit)
3905 or (s._opname == b'merge' and skipmerge)
3905 or (s._opname == b'merge' and skipmerge)
3906 or s._reportonly
3906 or s._reportonly
3907 ):
3907 ):
3908 continue
3908 continue
3909 if s.isunfinished(repo):
3909 if s.isunfinished(repo):
3910 raise error.Abort(s.msg(), hint=s.hint())
3910 raise error.Abort(s.msg(), hint=s.hint())
3911
3911
3912
3912
3913 def clearunfinished(repo):
3913 def clearunfinished(repo):
3914 '''Check for unfinished operations (as above), and clear the ones
3914 '''Check for unfinished operations (as above), and clear the ones
3915 that are clearable.
3915 that are clearable.
3916 '''
3916 '''
3917 for state in statemod._unfinishedstates:
3917 for state in statemod._unfinishedstates:
3918 if state._reportonly:
3918 if state._reportonly:
3919 continue
3919 continue
3920 if not state._clearable and state.isunfinished(repo):
3920 if not state._clearable and state.isunfinished(repo):
3921 raise error.Abort(state.msg(), hint=state.hint())
3921 raise error.Abort(state.msg(), hint=state.hint())
3922
3922
3923 for s in statemod._unfinishedstates:
3923 for s in statemod._unfinishedstates:
3924 if s._opname == b'merge' or state._reportonly:
3924 if s._opname == b'merge' or state._reportonly:
3925 continue
3925 continue
3926 if s._clearable and s.isunfinished(repo):
3926 if s._clearable and s.isunfinished(repo):
3927 util.unlink(repo.vfs.join(s._fname))
3927 util.unlink(repo.vfs.join(s._fname))
3928
3928
3929
3929
3930 def getunfinishedstate(repo):
3930 def getunfinishedstate(repo):
3931 ''' Checks for unfinished operations and returns statecheck object
3931 ''' Checks for unfinished operations and returns statecheck object
3932 for it'''
3932 for it'''
3933 for state in statemod._unfinishedstates:
3933 for state in statemod._unfinishedstates:
3934 if state.isunfinished(repo):
3934 if state.isunfinished(repo):
3935 return state
3935 return state
3936 return None
3936 return None
3937
3937
3938
3938
3939 def howtocontinue(repo):
3939 def howtocontinue(repo):
3940 '''Check for an unfinished operation and return the command to finish
3940 '''Check for an unfinished operation and return the command to finish
3941 it.
3941 it.
3942
3942
3943 statemod._unfinishedstates list is checked for an unfinished operation
3943 statemod._unfinishedstates list is checked for an unfinished operation
3944 and the corresponding message to finish it is generated if a method to
3944 and the corresponding message to finish it is generated if a method to
3945 continue is supported by the operation.
3945 continue is supported by the operation.
3946
3946
3947 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3947 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3948 a boolean.
3948 a boolean.
3949 '''
3949 '''
3950 contmsg = _(b"continue: %s")
3950 contmsg = _(b"continue: %s")
3951 for state in statemod._unfinishedstates:
3951 for state in statemod._unfinishedstates:
3952 if not state._continueflag:
3952 if not state._continueflag:
3953 continue
3953 continue
3954 if state.isunfinished(repo):
3954 if state.isunfinished(repo):
3955 return contmsg % state.continuemsg(), True
3955 return contmsg % state.continuemsg(), True
3956 if repo[None].dirty(missing=True, merge=False, branch=False):
3956 if repo[None].dirty(missing=True, merge=False, branch=False):
3957 return contmsg % _(b"hg commit"), False
3957 return contmsg % _(b"hg commit"), False
3958 return None, None
3958 return None, None
3959
3959
3960
3960
3961 def checkafterresolved(repo):
3961 def checkafterresolved(repo):
3962 '''Inform the user about the next action after completing hg resolve
3962 '''Inform the user about the next action after completing hg resolve
3963
3963
3964 If there's a an unfinished operation that supports continue flag,
3964 If there's a an unfinished operation that supports continue flag,
3965 howtocontinue will yield repo.ui.warn as the reporter.
3965 howtocontinue will yield repo.ui.warn as the reporter.
3966
3966
3967 Otherwise, it will yield repo.ui.note.
3967 Otherwise, it will yield repo.ui.note.
3968 '''
3968 '''
3969 msg, warning = howtocontinue(repo)
3969 msg, warning = howtocontinue(repo)
3970 if msg is not None:
3970 if msg is not None:
3971 if warning:
3971 if warning:
3972 repo.ui.warn(b"%s\n" % msg)
3972 repo.ui.warn(b"%s\n" % msg)
3973 else:
3973 else:
3974 repo.ui.note(b"%s\n" % msg)
3974 repo.ui.note(b"%s\n" % msg)
3975
3975
3976
3976
3977 def wrongtooltocontinue(repo, task):
3977 def wrongtooltocontinue(repo, task):
3978 '''Raise an abort suggesting how to properly continue if there is an
3978 '''Raise an abort suggesting how to properly continue if there is an
3979 active task.
3979 active task.
3980
3980
3981 Uses howtocontinue() to find the active task.
3981 Uses howtocontinue() to find the active task.
3982
3982
3983 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3983 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3984 a hint.
3984 a hint.
3985 '''
3985 '''
3986 after = howtocontinue(repo)
3986 after = howtocontinue(repo)
3987 hint = None
3987 hint = None
3988 if after[1]:
3988 if after[1]:
3989 hint = after[0]
3989 hint = after[0]
3990 raise error.Abort(_(b'no %s in progress') % task, hint=hint)
3990 raise error.Abort(_(b'no %s in progress') % task, hint=hint)
3991
3991
3992
3992
3993 def abortgraft(ui, repo, graftstate):
3993 def abortgraft(ui, repo, graftstate):
3994 """abort the interrupted graft and rollbacks to the state before interrupted
3994 """abort the interrupted graft and rollbacks to the state before interrupted
3995 graft"""
3995 graft"""
3996 if not graftstate.exists():
3996 if not graftstate.exists():
3997 raise error.Abort(_(b"no interrupted graft to abort"))
3997 raise error.Abort(_(b"no interrupted graft to abort"))
3998 statedata = readgraftstate(repo, graftstate)
3998 statedata = readgraftstate(repo, graftstate)
3999 newnodes = statedata.get(b'newnodes')
3999 newnodes = statedata.get(b'newnodes')
4000 if newnodes is None:
4000 if newnodes is None:
4001 # and old graft state which does not have all the data required to abort
4001 # and old graft state which does not have all the data required to abort
4002 # the graft
4002 # the graft
4003 raise error.Abort(_(b"cannot abort using an old graftstate"))
4003 raise error.Abort(_(b"cannot abort using an old graftstate"))
4004
4004
4005 # changeset from which graft operation was started
4005 # changeset from which graft operation was started
4006 if len(newnodes) > 0:
4006 if len(newnodes) > 0:
4007 startctx = repo[newnodes[0]].p1()
4007 startctx = repo[newnodes[0]].p1()
4008 else:
4008 else:
4009 startctx = repo[b'.']
4009 startctx = repo[b'.']
4010 # whether to strip or not
4010 # whether to strip or not
4011 cleanup = False
4011 cleanup = False
4012 from . import hg
4012 from . import hg
4013
4013
4014 if newnodes:
4014 if newnodes:
4015 newnodes = [repo[r].rev() for r in newnodes]
4015 newnodes = [repo[r].rev() for r in newnodes]
4016 cleanup = True
4016 cleanup = True
4017 # checking that none of the newnodes turned public or is public
4017 # checking that none of the newnodes turned public or is public
4018 immutable = [c for c in newnodes if not repo[c].mutable()]
4018 immutable = [c for c in newnodes if not repo[c].mutable()]
4019 if immutable:
4019 if immutable:
4020 repo.ui.warn(
4020 repo.ui.warn(
4021 _(b"cannot clean up public changesets %s\n")
4021 _(b"cannot clean up public changesets %s\n")
4022 % b', '.join(bytes(repo[r]) for r in immutable),
4022 % b', '.join(bytes(repo[r]) for r in immutable),
4023 hint=_(b"see 'hg help phases' for details"),
4023 hint=_(b"see 'hg help phases' for details"),
4024 )
4024 )
4025 cleanup = False
4025 cleanup = False
4026
4026
4027 # checking that no new nodes are created on top of grafted revs
4027 # checking that no new nodes are created on top of grafted revs
4028 desc = set(repo.changelog.descendants(newnodes))
4028 desc = set(repo.changelog.descendants(newnodes))
4029 if desc - set(newnodes):
4029 if desc - set(newnodes):
4030 repo.ui.warn(
4030 repo.ui.warn(
4031 _(
4031 _(
4032 b"new changesets detected on destination "
4032 b"new changesets detected on destination "
4033 b"branch, can't strip\n"
4033 b"branch, can't strip\n"
4034 )
4034 )
4035 )
4035 )
4036 cleanup = False
4036 cleanup = False
4037
4037
4038 if cleanup:
4038 if cleanup:
4039 with repo.wlock(), repo.lock():
4039 with repo.wlock(), repo.lock():
4040 hg.updaterepo(repo, startctx.node(), overwrite=True)
4040 hg.updaterepo(repo, startctx.node(), overwrite=True)
4041 # stripping the new nodes created
4041 # stripping the new nodes created
4042 strippoints = [
4042 strippoints = [
4043 c.node() for c in repo.set(b"roots(%ld)", newnodes)
4043 c.node() for c in repo.set(b"roots(%ld)", newnodes)
4044 ]
4044 ]
4045 repair.strip(repo.ui, repo, strippoints, backup=False)
4045 repair.strip(repo.ui, repo, strippoints, backup=False)
4046
4046
4047 if not cleanup:
4047 if not cleanup:
4048 # we don't update to the startnode if we can't strip
4048 # we don't update to the startnode if we can't strip
4049 startctx = repo[b'.']
4049 startctx = repo[b'.']
4050 hg.updaterepo(repo, startctx.node(), overwrite=True)
4050 hg.updaterepo(repo, startctx.node(), overwrite=True)
4051
4051
4052 ui.status(_(b"graft aborted\n"))
4052 ui.status(_(b"graft aborted\n"))
4053 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
4053 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
4054 graftstate.delete()
4054 graftstate.delete()
4055 return 0
4055 return 0
4056
4056
4057
4057
4058 def readgraftstate(repo, graftstate):
4058 def readgraftstate(repo, graftstate):
4059 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
4059 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
4060 """read the graft state file and return a dict of the data stored in it"""
4060 """read the graft state file and return a dict of the data stored in it"""
4061 try:
4061 try:
4062 return graftstate.read()
4062 return graftstate.read()
4063 except error.CorruptedState:
4063 except error.CorruptedState:
4064 nodes = repo.vfs.read(b'graftstate').splitlines()
4064 nodes = repo.vfs.read(b'graftstate').splitlines()
4065 return {b'nodes': nodes}
4065 return {b'nodes': nodes}
4066
4066
4067
4067
4068 def hgabortgraft(ui, repo):
4068 def hgabortgraft(ui, repo):
4069 """ abort logic for aborting graft using 'hg abort'"""
4069 """ abort logic for aborting graft using 'hg abort'"""
4070 with repo.wlock():
4070 with repo.wlock():
4071 graftstate = statemod.cmdstate(repo, b'graftstate')
4071 graftstate = statemod.cmdstate(repo, b'graftstate')
4072 return abortgraft(ui, repo, graftstate)
4072 return abortgraft(ui, repo, graftstate)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now