##// END OF EJS Templates
rebase: move bookmarks with --keep (issue5682)...
Jun Wu -
r34364:2f427b57 4.3.3 stable
parent child Browse files
Show More
@@ -1,1540 +1,1538 b''
1 # rebase.py - rebasing feature for mercurial
1 # rebase.py - rebasing feature for mercurial
2 #
2 #
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to move sets of revisions to a different ancestor
8 '''command to move sets of revisions to a different ancestor
9
9
10 This extension lets you rebase changesets in an existing Mercurial
10 This extension lets you rebase changesets in an existing Mercurial
11 repository.
11 repository.
12
12
13 For more information:
13 For more information:
14 https://mercurial-scm.org/wiki/RebaseExtension
14 https://mercurial-scm.org/wiki/RebaseExtension
15 '''
15 '''
16
16
17 from __future__ import absolute_import
17 from __future__ import absolute_import
18
18
19 import errno
19 import errno
20 import os
20 import os
21
21
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23 from mercurial.node import (
23 from mercurial.node import (
24 hex,
24 hex,
25 nullid,
25 nullid,
26 nullrev,
26 nullrev,
27 short,
27 short,
28 )
28 )
29 from mercurial import (
29 from mercurial import (
30 bookmarks,
30 bookmarks,
31 cmdutil,
31 cmdutil,
32 commands,
32 commands,
33 copies,
33 copies,
34 destutil,
34 destutil,
35 dirstateguard,
35 dirstateguard,
36 error,
36 error,
37 extensions,
37 extensions,
38 hg,
38 hg,
39 lock,
39 lock,
40 merge as mergemod,
40 merge as mergemod,
41 mergeutil,
41 mergeutil,
42 obsolete,
42 obsolete,
43 obsutil,
43 obsutil,
44 patch,
44 patch,
45 phases,
45 phases,
46 registrar,
46 registrar,
47 repair,
47 repair,
48 repoview,
48 repoview,
49 revset,
49 revset,
50 scmutil,
50 scmutil,
51 smartset,
51 smartset,
52 util,
52 util,
53 )
53 )
54
54
55 release = lock.release
55 release = lock.release
56 templateopts = cmdutil.templateopts
56 templateopts = cmdutil.templateopts
57
57
58 # The following constants are used throughout the rebase module. The ordering of
58 # The following constants are used throughout the rebase module. The ordering of
59 # their values must be maintained.
59 # their values must be maintained.
60
60
61 # Indicates that a revision needs to be rebased
61 # Indicates that a revision needs to be rebased
62 revtodo = -1
62 revtodo = -1
63 nullmerge = -2
63 nullmerge = -2
64 revignored = -3
64 revignored = -3
65 # successor in rebase destination
65 # successor in rebase destination
66 revprecursor = -4
66 revprecursor = -4
67 # plain prune (no successor)
67 # plain prune (no successor)
68 revpruned = -5
68 revpruned = -5
69 revskipped = (revignored, revprecursor, revpruned)
69 revskipped = (revignored, revprecursor, revpruned)
70
70
71 cmdtable = {}
71 cmdtable = {}
72 command = registrar.command(cmdtable)
72 command = registrar.command(cmdtable)
73 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
73 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
74 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
74 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
75 # be specifying the version(s) of Mercurial they are tested with, or
75 # be specifying the version(s) of Mercurial they are tested with, or
76 # leave the attribute unspecified.
76 # leave the attribute unspecified.
77 testedwith = 'ships-with-hg-core'
77 testedwith = 'ships-with-hg-core'
78
78
79 def _nothingtorebase():
79 def _nothingtorebase():
80 return 1
80 return 1
81
81
82 def _savegraft(ctx, extra):
82 def _savegraft(ctx, extra):
83 s = ctx.extra().get('source', None)
83 s = ctx.extra().get('source', None)
84 if s is not None:
84 if s is not None:
85 extra['source'] = s
85 extra['source'] = s
86 s = ctx.extra().get('intermediate-source', None)
86 s = ctx.extra().get('intermediate-source', None)
87 if s is not None:
87 if s is not None:
88 extra['intermediate-source'] = s
88 extra['intermediate-source'] = s
89
89
90 def _savebranch(ctx, extra):
90 def _savebranch(ctx, extra):
91 extra['branch'] = ctx.branch()
91 extra['branch'] = ctx.branch()
92
92
93 def _makeextrafn(copiers):
93 def _makeextrafn(copiers):
94 """make an extrafn out of the given copy-functions.
94 """make an extrafn out of the given copy-functions.
95
95
96 A copy function takes a context and an extra dict, and mutates the
96 A copy function takes a context and an extra dict, and mutates the
97 extra dict as needed based on the given context.
97 extra dict as needed based on the given context.
98 """
98 """
99 def extrafn(ctx, extra):
99 def extrafn(ctx, extra):
100 for c in copiers:
100 for c in copiers:
101 c(ctx, extra)
101 c(ctx, extra)
102 return extrafn
102 return extrafn
103
103
104 def _destrebase(repo, sourceset, destspace=None):
104 def _destrebase(repo, sourceset, destspace=None):
105 """small wrapper around destmerge to pass the right extra args
105 """small wrapper around destmerge to pass the right extra args
106
106
107 Please wrap destutil.destmerge instead."""
107 Please wrap destutil.destmerge instead."""
108 return destutil.destmerge(repo, action='rebase', sourceset=sourceset,
108 return destutil.destmerge(repo, action='rebase', sourceset=sourceset,
109 onheadcheck=False, destspace=destspace)
109 onheadcheck=False, destspace=destspace)
110
110
111 revsetpredicate = registrar.revsetpredicate()
111 revsetpredicate = registrar.revsetpredicate()
112
112
113 @revsetpredicate('_destrebase')
113 @revsetpredicate('_destrebase')
114 def _revsetdestrebase(repo, subset, x):
114 def _revsetdestrebase(repo, subset, x):
115 # ``_rebasedefaultdest()``
115 # ``_rebasedefaultdest()``
116
116
117 # default destination for rebase.
117 # default destination for rebase.
118 # # XXX: Currently private because I expect the signature to change.
118 # # XXX: Currently private because I expect the signature to change.
119 # # XXX: - bailing out in case of ambiguity vs returning all data.
119 # # XXX: - bailing out in case of ambiguity vs returning all data.
120 # i18n: "_rebasedefaultdest" is a keyword
120 # i18n: "_rebasedefaultdest" is a keyword
121 sourceset = None
121 sourceset = None
122 if x is not None:
122 if x is not None:
123 sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
123 sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
124 return subset & smartset.baseset([_destrebase(repo, sourceset)])
124 return subset & smartset.baseset([_destrebase(repo, sourceset)])
125
125
126 class rebaseruntime(object):
126 class rebaseruntime(object):
127 """This class is a container for rebase runtime state"""
127 """This class is a container for rebase runtime state"""
128 def __init__(self, repo, ui, opts=None):
128 def __init__(self, repo, ui, opts=None):
129 if opts is None:
129 if opts is None:
130 opts = {}
130 opts = {}
131
131
132 self.repo = repo
132 self.repo = repo
133 self.ui = ui
133 self.ui = ui
134 self.opts = opts
134 self.opts = opts
135 self.originalwd = None
135 self.originalwd = None
136 self.external = nullrev
136 self.external = nullrev
137 # Mapping between the old revision id and either what is the new rebased
137 # Mapping between the old revision id and either what is the new rebased
138 # revision or what needs to be done with the old revision. The state
138 # revision or what needs to be done with the old revision. The state
139 # dict will be what contains most of the rebase progress state.
139 # dict will be what contains most of the rebase progress state.
140 self.state = {}
140 self.state = {}
141 self.activebookmark = None
141 self.activebookmark = None
142 self.dest = None
142 self.dest = None
143 self.skipped = set()
143 self.skipped = set()
144 self.destancestors = set()
144 self.destancestors = set()
145
145
146 self.collapsef = opts.get('collapse', False)
146 self.collapsef = opts.get('collapse', False)
147 self.collapsemsg = cmdutil.logmessage(ui, opts)
147 self.collapsemsg = cmdutil.logmessage(ui, opts)
148 self.date = opts.get('date', None)
148 self.date = opts.get('date', None)
149
149
150 e = opts.get('extrafn') # internal, used by e.g. hgsubversion
150 e = opts.get('extrafn') # internal, used by e.g. hgsubversion
151 self.extrafns = [_savegraft]
151 self.extrafns = [_savegraft]
152 if e:
152 if e:
153 self.extrafns = [e]
153 self.extrafns = [e]
154
154
155 self.keepf = opts.get('keep', False)
155 self.keepf = opts.get('keep', False)
156 self.keepbranchesf = opts.get('keepbranches', False)
156 self.keepbranchesf = opts.get('keepbranches', False)
157 # keepopen is not meant for use on the command line, but by
157 # keepopen is not meant for use on the command line, but by
158 # other extensions
158 # other extensions
159 self.keepopen = opts.get('keepopen', False)
159 self.keepopen = opts.get('keepopen', False)
160 self.obsoletenotrebased = {}
160 self.obsoletenotrebased = {}
161
161
162 def storestatus(self, tr=None):
162 def storestatus(self, tr=None):
163 """Store the current status to allow recovery"""
163 """Store the current status to allow recovery"""
164 if tr:
164 if tr:
165 tr.addfilegenerator('rebasestate', ('rebasestate',),
165 tr.addfilegenerator('rebasestate', ('rebasestate',),
166 self._writestatus, location='plain')
166 self._writestatus, location='plain')
167 else:
167 else:
168 with self.repo.vfs("rebasestate", "w") as f:
168 with self.repo.vfs("rebasestate", "w") as f:
169 self._writestatus(f)
169 self._writestatus(f)
170
170
171 def _writestatus(self, f):
171 def _writestatus(self, f):
172 repo = self.repo.unfiltered()
172 repo = self.repo.unfiltered()
173 f.write(repo[self.originalwd].hex() + '\n')
173 f.write(repo[self.originalwd].hex() + '\n')
174 f.write(repo[self.dest].hex() + '\n')
174 f.write(repo[self.dest].hex() + '\n')
175 f.write(repo[self.external].hex() + '\n')
175 f.write(repo[self.external].hex() + '\n')
176 f.write('%d\n' % int(self.collapsef))
176 f.write('%d\n' % int(self.collapsef))
177 f.write('%d\n' % int(self.keepf))
177 f.write('%d\n' % int(self.keepf))
178 f.write('%d\n' % int(self.keepbranchesf))
178 f.write('%d\n' % int(self.keepbranchesf))
179 f.write('%s\n' % (self.activebookmark or ''))
179 f.write('%s\n' % (self.activebookmark or ''))
180 for d, v in self.state.iteritems():
180 for d, v in self.state.iteritems():
181 oldrev = repo[d].hex()
181 oldrev = repo[d].hex()
182 if v >= 0:
182 if v >= 0:
183 newrev = repo[v].hex()
183 newrev = repo[v].hex()
184 elif v == revtodo:
184 elif v == revtodo:
185 # To maintain format compatibility, we have to use nullid.
185 # To maintain format compatibility, we have to use nullid.
186 # Please do remove this special case when upgrading the format.
186 # Please do remove this special case when upgrading the format.
187 newrev = hex(nullid)
187 newrev = hex(nullid)
188 else:
188 else:
189 newrev = v
189 newrev = v
190 f.write("%s:%s\n" % (oldrev, newrev))
190 f.write("%s:%s\n" % (oldrev, newrev))
191 repo.ui.debug('rebase status stored\n')
191 repo.ui.debug('rebase status stored\n')
192
192
193 def restorestatus(self):
193 def restorestatus(self):
194 """Restore a previously stored status"""
194 """Restore a previously stored status"""
195 repo = self.repo
195 repo = self.repo
196 keepbranches = None
196 keepbranches = None
197 dest = None
197 dest = None
198 collapse = False
198 collapse = False
199 external = nullrev
199 external = nullrev
200 activebookmark = None
200 activebookmark = None
201 state = {}
201 state = {}
202
202
203 try:
203 try:
204 f = repo.vfs("rebasestate")
204 f = repo.vfs("rebasestate")
205 for i, l in enumerate(f.read().splitlines()):
205 for i, l in enumerate(f.read().splitlines()):
206 if i == 0:
206 if i == 0:
207 originalwd = repo[l].rev()
207 originalwd = repo[l].rev()
208 elif i == 1:
208 elif i == 1:
209 dest = repo[l].rev()
209 dest = repo[l].rev()
210 elif i == 2:
210 elif i == 2:
211 external = repo[l].rev()
211 external = repo[l].rev()
212 elif i == 3:
212 elif i == 3:
213 collapse = bool(int(l))
213 collapse = bool(int(l))
214 elif i == 4:
214 elif i == 4:
215 keep = bool(int(l))
215 keep = bool(int(l))
216 elif i == 5:
216 elif i == 5:
217 keepbranches = bool(int(l))
217 keepbranches = bool(int(l))
218 elif i == 6 and not (len(l) == 81 and ':' in l):
218 elif i == 6 and not (len(l) == 81 and ':' in l):
219 # line 6 is a recent addition, so for backwards
219 # line 6 is a recent addition, so for backwards
220 # compatibility check that the line doesn't look like the
220 # compatibility check that the line doesn't look like the
221 # oldrev:newrev lines
221 # oldrev:newrev lines
222 activebookmark = l
222 activebookmark = l
223 else:
223 else:
224 oldrev, newrev = l.split(':')
224 oldrev, newrev = l.split(':')
225 if newrev in (str(nullmerge), str(revignored),
225 if newrev in (str(nullmerge), str(revignored),
226 str(revprecursor), str(revpruned)):
226 str(revprecursor), str(revpruned)):
227 state[repo[oldrev].rev()] = int(newrev)
227 state[repo[oldrev].rev()] = int(newrev)
228 elif newrev == nullid:
228 elif newrev == nullid:
229 state[repo[oldrev].rev()] = revtodo
229 state[repo[oldrev].rev()] = revtodo
230 # Legacy compat special case
230 # Legacy compat special case
231 else:
231 else:
232 state[repo[oldrev].rev()] = repo[newrev].rev()
232 state[repo[oldrev].rev()] = repo[newrev].rev()
233
233
234 except IOError as err:
234 except IOError as err:
235 if err.errno != errno.ENOENT:
235 if err.errno != errno.ENOENT:
236 raise
236 raise
237 cmdutil.wrongtooltocontinue(repo, _('rebase'))
237 cmdutil.wrongtooltocontinue(repo, _('rebase'))
238
238
239 if keepbranches is None:
239 if keepbranches is None:
240 raise error.Abort(_('.hg/rebasestate is incomplete'))
240 raise error.Abort(_('.hg/rebasestate is incomplete'))
241
241
242 skipped = set()
242 skipped = set()
243 # recompute the set of skipped revs
243 # recompute the set of skipped revs
244 if not collapse:
244 if not collapse:
245 seen = {dest}
245 seen = {dest}
246 for old, new in sorted(state.items()):
246 for old, new in sorted(state.items()):
247 if new != revtodo and new in seen:
247 if new != revtodo and new in seen:
248 skipped.add(old)
248 skipped.add(old)
249 seen.add(new)
249 seen.add(new)
250 repo.ui.debug('computed skipped revs: %s\n' %
250 repo.ui.debug('computed skipped revs: %s\n' %
251 (' '.join(str(r) for r in sorted(skipped)) or None))
251 (' '.join(str(r) for r in sorted(skipped)) or None))
252 repo.ui.debug('rebase status resumed\n')
252 repo.ui.debug('rebase status resumed\n')
253 _setrebasesetvisibility(repo, set(state.keys()) | {originalwd})
253 _setrebasesetvisibility(repo, set(state.keys()) | {originalwd})
254
254
255 self.originalwd = originalwd
255 self.originalwd = originalwd
256 self.dest = dest
256 self.dest = dest
257 self.state = state
257 self.state = state
258 self.skipped = skipped
258 self.skipped = skipped
259 self.collapsef = collapse
259 self.collapsef = collapse
260 self.keepf = keep
260 self.keepf = keep
261 self.keepbranchesf = keepbranches
261 self.keepbranchesf = keepbranches
262 self.external = external
262 self.external = external
263 self.activebookmark = activebookmark
263 self.activebookmark = activebookmark
264
264
265 def _handleskippingobsolete(self, rebaserevs, obsoleterevs, dest):
265 def _handleskippingobsolete(self, rebaserevs, obsoleterevs, dest):
266 """Compute structures necessary for skipping obsolete revisions
266 """Compute structures necessary for skipping obsolete revisions
267
267
268 rebaserevs: iterable of all revisions that are to be rebased
268 rebaserevs: iterable of all revisions that are to be rebased
269 obsoleterevs: iterable of all obsolete revisions in rebaseset
269 obsoleterevs: iterable of all obsolete revisions in rebaseset
270 dest: a destination revision for the rebase operation
270 dest: a destination revision for the rebase operation
271 """
271 """
272 self.obsoletenotrebased = {}
272 self.obsoletenotrebased = {}
273 if not self.ui.configbool('experimental', 'rebaseskipobsolete',
273 if not self.ui.configbool('experimental', 'rebaseskipobsolete',
274 default=True):
274 default=True):
275 return
275 return
276 rebaseset = set(rebaserevs)
276 rebaseset = set(rebaserevs)
277 obsoleteset = set(obsoleterevs)
277 obsoleteset = set(obsoleterevs)
278 self.obsoletenotrebased = _computeobsoletenotrebased(self.repo,
278 self.obsoletenotrebased = _computeobsoletenotrebased(self.repo,
279 obsoleteset, dest)
279 obsoleteset, dest)
280 skippedset = set(self.obsoletenotrebased)
280 skippedset = set(self.obsoletenotrebased)
281 _checkobsrebase(self.repo, self.ui, obsoleteset, rebaseset, skippedset)
281 _checkobsrebase(self.repo, self.ui, obsoleteset, rebaseset, skippedset)
282
282
283 def _prepareabortorcontinue(self, isabort):
283 def _prepareabortorcontinue(self, isabort):
284 try:
284 try:
285 self.restorestatus()
285 self.restorestatus()
286 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
286 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
287 except error.RepoLookupError:
287 except error.RepoLookupError:
288 if isabort:
288 if isabort:
289 clearstatus(self.repo)
289 clearstatus(self.repo)
290 clearcollapsemsg(self.repo)
290 clearcollapsemsg(self.repo)
291 self.repo.ui.warn(_('rebase aborted (no revision is removed,'
291 self.repo.ui.warn(_('rebase aborted (no revision is removed,'
292 ' only broken state is cleared)\n'))
292 ' only broken state is cleared)\n'))
293 return 0
293 return 0
294 else:
294 else:
295 msg = _('cannot continue inconsistent rebase')
295 msg = _('cannot continue inconsistent rebase')
296 hint = _('use "hg rebase --abort" to clear broken state')
296 hint = _('use "hg rebase --abort" to clear broken state')
297 raise error.Abort(msg, hint=hint)
297 raise error.Abort(msg, hint=hint)
298 if isabort:
298 if isabort:
299 return abort(self.repo, self.originalwd, self.dest,
299 return abort(self.repo, self.originalwd, self.dest,
300 self.state, activebookmark=self.activebookmark)
300 self.state, activebookmark=self.activebookmark)
301
301
302 obsrevs = (r for r, st in self.state.items() if st == revprecursor)
302 obsrevs = (r for r, st in self.state.items() if st == revprecursor)
303 self._handleskippingobsolete(self.state.keys(), obsrevs, self.dest)
303 self._handleskippingobsolete(self.state.keys(), obsrevs, self.dest)
304
304
305 def _preparenewrebase(self, dest, rebaseset):
305 def _preparenewrebase(self, dest, rebaseset):
306 if dest is None:
306 if dest is None:
307 return _nothingtorebase()
307 return _nothingtorebase()
308
308
309 allowunstable = obsolete.isenabled(self.repo, obsolete.allowunstableopt)
309 allowunstable = obsolete.isenabled(self.repo, obsolete.allowunstableopt)
310 if (not (self.keepf or allowunstable)
310 if (not (self.keepf or allowunstable)
311 and self.repo.revs('first(children(%ld) - %ld)',
311 and self.repo.revs('first(children(%ld) - %ld)',
312 rebaseset, rebaseset)):
312 rebaseset, rebaseset)):
313 raise error.Abort(
313 raise error.Abort(
314 _("can't remove original changesets with"
314 _("can't remove original changesets with"
315 " unrebased descendants"),
315 " unrebased descendants"),
316 hint=_('use --keep to keep original changesets'))
316 hint=_('use --keep to keep original changesets'))
317
317
318 obsrevs = _filterobsoleterevs(self.repo, set(rebaseset))
318 obsrevs = _filterobsoleterevs(self.repo, set(rebaseset))
319 self._handleskippingobsolete(rebaseset, obsrevs, dest.rev())
319 self._handleskippingobsolete(rebaseset, obsrevs, dest.rev())
320
320
321 result = buildstate(self.repo, dest, rebaseset, self.collapsef,
321 result = buildstate(self.repo, dest, rebaseset, self.collapsef,
322 self.obsoletenotrebased)
322 self.obsoletenotrebased)
323
323
324 if not result:
324 if not result:
325 # Empty state built, nothing to rebase
325 # Empty state built, nothing to rebase
326 self.ui.status(_('nothing to rebase\n'))
326 self.ui.status(_('nothing to rebase\n'))
327 return _nothingtorebase()
327 return _nothingtorebase()
328
328
329 for root in self.repo.set('roots(%ld)', rebaseset):
329 for root in self.repo.set('roots(%ld)', rebaseset):
330 if not self.keepf and not root.mutable():
330 if not self.keepf and not root.mutable():
331 raise error.Abort(_("can't rebase public changeset %s")
331 raise error.Abort(_("can't rebase public changeset %s")
332 % root,
332 % root,
333 hint=_("see 'hg help phases' for details"))
333 hint=_("see 'hg help phases' for details"))
334
334
335 (self.originalwd, self.dest, self.state) = result
335 (self.originalwd, self.dest, self.state) = result
336 if self.collapsef:
336 if self.collapsef:
337 self.destancestors = self.repo.changelog.ancestors(
337 self.destancestors = self.repo.changelog.ancestors(
338 [self.dest],
338 [self.dest],
339 inclusive=True)
339 inclusive=True)
340 self.external = externalparent(self.repo, self.state,
340 self.external = externalparent(self.repo, self.state,
341 self.destancestors)
341 self.destancestors)
342
342
343 if dest.closesbranch() and not self.keepbranchesf:
343 if dest.closesbranch() and not self.keepbranchesf:
344 self.ui.status(_('reopening closed branch head %s\n') % dest)
344 self.ui.status(_('reopening closed branch head %s\n') % dest)
345
345
346 def _performrebase(self, tr):
346 def _performrebase(self, tr):
347 repo, ui, opts = self.repo, self.ui, self.opts
347 repo, ui, opts = self.repo, self.ui, self.opts
348 if self.keepbranchesf:
348 if self.keepbranchesf:
349 # insert _savebranch at the start of extrafns so if
349 # insert _savebranch at the start of extrafns so if
350 # there's a user-provided extrafn it can clobber branch if
350 # there's a user-provided extrafn it can clobber branch if
351 # desired
351 # desired
352 self.extrafns.insert(0, _savebranch)
352 self.extrafns.insert(0, _savebranch)
353 if self.collapsef:
353 if self.collapsef:
354 branches = set()
354 branches = set()
355 for rev in self.state:
355 for rev in self.state:
356 branches.add(repo[rev].branch())
356 branches.add(repo[rev].branch())
357 if len(branches) > 1:
357 if len(branches) > 1:
358 raise error.Abort(_('cannot collapse multiple named '
358 raise error.Abort(_('cannot collapse multiple named '
359 'branches'))
359 'branches'))
360
360
361 # Rebase
361 # Rebase
362 if not self.destancestors:
362 if not self.destancestors:
363 self.destancestors = repo.changelog.ancestors([self.dest],
363 self.destancestors = repo.changelog.ancestors([self.dest],
364 inclusive=True)
364 inclusive=True)
365
365
366 # Keep track of the active bookmarks in order to reset them later
366 # Keep track of the active bookmarks in order to reset them later
367 self.activebookmark = self.activebookmark or repo._activebookmark
367 self.activebookmark = self.activebookmark or repo._activebookmark
368 if self.activebookmark:
368 if self.activebookmark:
369 bookmarks.deactivate(repo)
369 bookmarks.deactivate(repo)
370
370
371 # Store the state before we begin so users can run 'hg rebase --abort'
371 # Store the state before we begin so users can run 'hg rebase --abort'
372 # if we fail before the transaction closes.
372 # if we fail before the transaction closes.
373 self.storestatus()
373 self.storestatus()
374
374
375 sortedrevs = repo.revs('sort(%ld, -topo)', self.state)
375 sortedrevs = repo.revs('sort(%ld, -topo)', self.state)
376 cands = [k for k, v in self.state.iteritems() if v == revtodo]
376 cands = [k for k, v in self.state.iteritems() if v == revtodo]
377 total = len(cands)
377 total = len(cands)
378 pos = 0
378 pos = 0
379 for rev in sortedrevs:
379 for rev in sortedrevs:
380 ctx = repo[rev]
380 ctx = repo[rev]
381 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
381 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
382 ctx.description().split('\n', 1)[0])
382 ctx.description().split('\n', 1)[0])
383 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
383 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
384 if names:
384 if names:
385 desc += ' (%s)' % ' '.join(names)
385 desc += ' (%s)' % ' '.join(names)
386 if self.state[rev] == rev:
386 if self.state[rev] == rev:
387 ui.status(_('already rebased %s\n') % desc)
387 ui.status(_('already rebased %s\n') % desc)
388 elif self.state[rev] == revtodo:
388 elif self.state[rev] == revtodo:
389 pos += 1
389 pos += 1
390 ui.status(_('rebasing %s\n') % desc)
390 ui.status(_('rebasing %s\n') % desc)
391 ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
391 ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
392 _('changesets'), total)
392 _('changesets'), total)
393 p1, p2, base = defineparents(repo, rev, self.dest,
393 p1, p2, base = defineparents(repo, rev, self.dest,
394 self.state,
394 self.state,
395 self.destancestors,
395 self.destancestors,
396 self.obsoletenotrebased)
396 self.obsoletenotrebased)
397 self.storestatus(tr=tr)
397 self.storestatus(tr=tr)
398 storecollapsemsg(repo, self.collapsemsg)
398 storecollapsemsg(repo, self.collapsemsg)
399 if len(repo[None].parents()) == 2:
399 if len(repo[None].parents()) == 2:
400 repo.ui.debug('resuming interrupted rebase\n')
400 repo.ui.debug('resuming interrupted rebase\n')
401 else:
401 else:
402 try:
402 try:
403 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
403 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
404 'rebase')
404 'rebase')
405 stats = rebasenode(repo, rev, p1, base, self.state,
405 stats = rebasenode(repo, rev, p1, base, self.state,
406 self.collapsef, self.dest)
406 self.collapsef, self.dest)
407 if stats and stats[3] > 0:
407 if stats and stats[3] > 0:
408 raise error.InterventionRequired(
408 raise error.InterventionRequired(
409 _('unresolved conflicts (see hg '
409 _('unresolved conflicts (see hg '
410 'resolve, then hg rebase --continue)'))
410 'resolve, then hg rebase --continue)'))
411 finally:
411 finally:
412 ui.setconfig('ui', 'forcemerge', '', 'rebase')
412 ui.setconfig('ui', 'forcemerge', '', 'rebase')
413 if not self.collapsef:
413 if not self.collapsef:
414 merging = p2 != nullrev
414 merging = p2 != nullrev
415 editform = cmdutil.mergeeditform(merging, 'rebase')
415 editform = cmdutil.mergeeditform(merging, 'rebase')
416 editor = cmdutil.getcommiteditor(editform=editform, **opts)
416 editor = cmdutil.getcommiteditor(editform=editform, **opts)
417 newnode = concludenode(repo, rev, p1, p2,
417 newnode = concludenode(repo, rev, p1, p2,
418 extrafn=_makeextrafn(self.extrafns),
418 extrafn=_makeextrafn(self.extrafns),
419 editor=editor,
419 editor=editor,
420 keepbranches=self.keepbranchesf,
420 keepbranches=self.keepbranchesf,
421 date=self.date)
421 date=self.date)
422 if newnode is None:
422 if newnode is None:
423 # If it ended up being a no-op commit, then the normal
423 # If it ended up being a no-op commit, then the normal
424 # merge state clean-up path doesn't happen, so do it
424 # merge state clean-up path doesn't happen, so do it
425 # here. Fix issue5494
425 # here. Fix issue5494
426 mergemod.mergestate.clean(repo)
426 mergemod.mergestate.clean(repo)
427 else:
427 else:
428 # Skip commit if we are collapsing
428 # Skip commit if we are collapsing
429 repo.setparents(repo[p1].node())
429 repo.setparents(repo[p1].node())
430 newnode = None
430 newnode = None
431 # Update the state
431 # Update the state
432 if newnode is not None:
432 if newnode is not None:
433 self.state[rev] = repo[newnode].rev()
433 self.state[rev] = repo[newnode].rev()
434 ui.debug('rebased as %s\n' % short(newnode))
434 ui.debug('rebased as %s\n' % short(newnode))
435 else:
435 else:
436 if not self.collapsef:
436 if not self.collapsef:
437 ui.warn(_('note: rebase of %d:%s created no changes '
437 ui.warn(_('note: rebase of %d:%s created no changes '
438 'to commit\n') % (rev, ctx))
438 'to commit\n') % (rev, ctx))
439 self.skipped.add(rev)
439 self.skipped.add(rev)
440 self.state[rev] = p1
440 self.state[rev] = p1
441 ui.debug('next revision set to %s\n' % p1)
441 ui.debug('next revision set to %s\n' % p1)
442 elif self.state[rev] == nullmerge:
442 elif self.state[rev] == nullmerge:
443 ui.debug('ignoring null merge rebase of %s\n' % rev)
443 ui.debug('ignoring null merge rebase of %s\n' % rev)
444 elif self.state[rev] == revignored:
444 elif self.state[rev] == revignored:
445 ui.status(_('not rebasing ignored %s\n') % desc)
445 ui.status(_('not rebasing ignored %s\n') % desc)
446 elif self.state[rev] == revprecursor:
446 elif self.state[rev] == revprecursor:
447 destctx = repo[self.obsoletenotrebased[rev]]
447 destctx = repo[self.obsoletenotrebased[rev]]
448 descdest = '%d:%s "%s"' % (destctx.rev(), destctx,
448 descdest = '%d:%s "%s"' % (destctx.rev(), destctx,
449 destctx.description().split('\n', 1)[0])
449 destctx.description().split('\n', 1)[0])
450 msg = _('note: not rebasing %s, already in destination as %s\n')
450 msg = _('note: not rebasing %s, already in destination as %s\n')
451 ui.status(msg % (desc, descdest))
451 ui.status(msg % (desc, descdest))
452 elif self.state[rev] == revpruned:
452 elif self.state[rev] == revpruned:
453 msg = _('note: not rebasing %s, it has no successor\n')
453 msg = _('note: not rebasing %s, it has no successor\n')
454 ui.status(msg % desc)
454 ui.status(msg % desc)
455 else:
455 else:
456 ui.status(_('already rebased %s as %s\n') %
456 ui.status(_('already rebased %s as %s\n') %
457 (desc, repo[self.state[rev]]))
457 (desc, repo[self.state[rev]]))
458
458
459 ui.progress(_('rebasing'), None)
459 ui.progress(_('rebasing'), None)
460 ui.note(_('rebase merging completed\n'))
460 ui.note(_('rebase merging completed\n'))
461
461
462 def _finishrebase(self):
462 def _finishrebase(self):
463 repo, ui, opts = self.repo, self.ui, self.opts
463 repo, ui, opts = self.repo, self.ui, self.opts
464 if self.collapsef and not self.keepopen:
464 if self.collapsef and not self.keepopen:
465 p1, p2, _base = defineparents(repo, min(self.state),
465 p1, p2, _base = defineparents(repo, min(self.state),
466 self.dest, self.state,
466 self.dest, self.state,
467 self.destancestors,
467 self.destancestors,
468 self.obsoletenotrebased)
468 self.obsoletenotrebased)
469 editopt = opts.get('edit')
469 editopt = opts.get('edit')
470 editform = 'rebase.collapse'
470 editform = 'rebase.collapse'
471 if self.collapsemsg:
471 if self.collapsemsg:
472 commitmsg = self.collapsemsg
472 commitmsg = self.collapsemsg
473 else:
473 else:
474 commitmsg = 'Collapsed revision'
474 commitmsg = 'Collapsed revision'
475 for rebased in sorted(self.state):
475 for rebased in sorted(self.state):
476 if rebased not in self.skipped and\
476 if rebased not in self.skipped and\
477 self.state[rebased] > nullmerge:
477 self.state[rebased] > nullmerge:
478 commitmsg += '\n* %s' % repo[rebased].description()
478 commitmsg += '\n* %s' % repo[rebased].description()
479 editopt = True
479 editopt = True
480 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
480 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
481 revtoreuse = max(self.state)
481 revtoreuse = max(self.state)
482 newnode = concludenode(repo, revtoreuse, p1, self.external,
482 newnode = concludenode(repo, revtoreuse, p1, self.external,
483 commitmsg=commitmsg,
483 commitmsg=commitmsg,
484 extrafn=_makeextrafn(self.extrafns),
484 extrafn=_makeextrafn(self.extrafns),
485 editor=editor,
485 editor=editor,
486 keepbranches=self.keepbranchesf,
486 keepbranches=self.keepbranchesf,
487 date=self.date)
487 date=self.date)
488 if newnode is None:
488 if newnode is None:
489 newrev = self.dest
489 newrev = self.dest
490 else:
490 else:
491 newrev = repo[newnode].rev()
491 newrev = repo[newnode].rev()
492 for oldrev in self.state.iterkeys():
492 for oldrev in self.state.iterkeys():
493 if self.state[oldrev] > nullmerge:
493 if self.state[oldrev] > nullmerge:
494 self.state[oldrev] = newrev
494 self.state[oldrev] = newrev
495
495
496 if 'qtip' in repo.tags():
496 if 'qtip' in repo.tags():
497 updatemq(repo, self.state, self.skipped, **opts)
497 updatemq(repo, self.state, self.skipped, **opts)
498
498
499 # restore original working directory
499 # restore original working directory
500 # (we do this before stripping)
500 # (we do this before stripping)
501 newwd = self.state.get(self.originalwd, self.originalwd)
501 newwd = self.state.get(self.originalwd, self.originalwd)
502 if newwd == revprecursor:
502 if newwd == revprecursor:
503 newwd = self.obsoletenotrebased[self.originalwd]
503 newwd = self.obsoletenotrebased[self.originalwd]
504 elif newwd < 0:
504 elif newwd < 0:
505 # original directory is a parent of rebase set root or ignored
505 # original directory is a parent of rebase set root or ignored
506 newwd = self.originalwd
506 newwd = self.originalwd
507 if newwd not in [c.rev() for c in repo[None].parents()]:
507 if newwd not in [c.rev() for c in repo[None].parents()]:
508 ui.note(_("update back to initial working directory parent\n"))
508 ui.note(_("update back to initial working directory parent\n"))
509 hg.updaterepo(repo, newwd, False)
509 hg.updaterepo(repo, newwd, False)
510
510
511 collapsedas = None
511 if not self.keepf:
512 if not self.keepf:
512 collapsedas = None
513 if self.collapsef:
513 if self.collapsef:
514 collapsedas = newnode
514 collapsedas = newnode
515 clearrebased(ui, repo, self.dest, self.state, self.skipped,
515 clearrebased(ui, repo, self.dest, self.state, self.skipped,
516 collapsedas)
516 collapsedas, self.keepf)
517
517
518 clearstatus(repo)
518 clearstatus(repo)
519 clearcollapsemsg(repo)
519 clearcollapsemsg(repo)
520
520
521 ui.note(_("rebase completed\n"))
521 ui.note(_("rebase completed\n"))
522 util.unlinkpath(repo.sjoin('undo'), ignoremissing=True)
522 util.unlinkpath(repo.sjoin('undo'), ignoremissing=True)
523 if self.skipped:
523 if self.skipped:
524 skippedlen = len(self.skipped)
524 skippedlen = len(self.skipped)
525 ui.note(_("%d revisions have been skipped\n") % skippedlen)
525 ui.note(_("%d revisions have been skipped\n") % skippedlen)
526
526
527 if (self.activebookmark and self.activebookmark in repo._bookmarks and
527 if (self.activebookmark and self.activebookmark in repo._bookmarks and
528 repo['.'].node() == repo._bookmarks[self.activebookmark]):
528 repo['.'].node() == repo._bookmarks[self.activebookmark]):
529 bookmarks.activate(repo, self.activebookmark)
529 bookmarks.activate(repo, self.activebookmark)
530
530
531 @command('rebase',
531 @command('rebase',
532 [('s', 'source', '',
532 [('s', 'source', '',
533 _('rebase the specified changeset and descendants'), _('REV')),
533 _('rebase the specified changeset and descendants'), _('REV')),
534 ('b', 'base', '',
534 ('b', 'base', '',
535 _('rebase everything from branching point of specified changeset'),
535 _('rebase everything from branching point of specified changeset'),
536 _('REV')),
536 _('REV')),
537 ('r', 'rev', [],
537 ('r', 'rev', [],
538 _('rebase these revisions'),
538 _('rebase these revisions'),
539 _('REV')),
539 _('REV')),
540 ('d', 'dest', '',
540 ('d', 'dest', '',
541 _('rebase onto the specified changeset'), _('REV')),
541 _('rebase onto the specified changeset'), _('REV')),
542 ('', 'collapse', False, _('collapse the rebased changesets')),
542 ('', 'collapse', False, _('collapse the rebased changesets')),
543 ('m', 'message', '',
543 ('m', 'message', '',
544 _('use text as collapse commit message'), _('TEXT')),
544 _('use text as collapse commit message'), _('TEXT')),
545 ('e', 'edit', False, _('invoke editor on commit messages')),
545 ('e', 'edit', False, _('invoke editor on commit messages')),
546 ('l', 'logfile', '',
546 ('l', 'logfile', '',
547 _('read collapse commit message from file'), _('FILE')),
547 _('read collapse commit message from file'), _('FILE')),
548 ('k', 'keep', False, _('keep original changesets')),
548 ('k', 'keep', False, _('keep original changesets')),
549 ('', 'keepbranches', False, _('keep original branch names')),
549 ('', 'keepbranches', False, _('keep original branch names')),
550 ('D', 'detach', False, _('(DEPRECATED)')),
550 ('D', 'detach', False, _('(DEPRECATED)')),
551 ('i', 'interactive', False, _('(DEPRECATED)')),
551 ('i', 'interactive', False, _('(DEPRECATED)')),
552 ('t', 'tool', '', _('specify merge tool')),
552 ('t', 'tool', '', _('specify merge tool')),
553 ('c', 'continue', False, _('continue an interrupted rebase')),
553 ('c', 'continue', False, _('continue an interrupted rebase')),
554 ('a', 'abort', False, _('abort an interrupted rebase'))] +
554 ('a', 'abort', False, _('abort an interrupted rebase'))] +
555 templateopts,
555 templateopts,
556 _('[-s REV | -b REV] [-d REV] [OPTION]'))
556 _('[-s REV | -b REV] [-d REV] [OPTION]'))
557 def rebase(ui, repo, **opts):
557 def rebase(ui, repo, **opts):
558 """move changeset (and descendants) to a different branch
558 """move changeset (and descendants) to a different branch
559
559
560 Rebase uses repeated merging to graft changesets from one part of
560 Rebase uses repeated merging to graft changesets from one part of
561 history (the source) onto another (the destination). This can be
561 history (the source) onto another (the destination). This can be
562 useful for linearizing *local* changes relative to a master
562 useful for linearizing *local* changes relative to a master
563 development tree.
563 development tree.
564
564
565 Published commits cannot be rebased (see :hg:`help phases`).
565 Published commits cannot be rebased (see :hg:`help phases`).
566 To copy commits, see :hg:`help graft`.
566 To copy commits, see :hg:`help graft`.
567
567
568 If you don't specify a destination changeset (``-d/--dest``), rebase
568 If you don't specify a destination changeset (``-d/--dest``), rebase
569 will use the same logic as :hg:`merge` to pick a destination. if
569 will use the same logic as :hg:`merge` to pick a destination. if
570 the current branch contains exactly one other head, the other head
570 the current branch contains exactly one other head, the other head
571 is merged with by default. Otherwise, an explicit revision with
571 is merged with by default. Otherwise, an explicit revision with
572 which to merge with must be provided. (destination changeset is not
572 which to merge with must be provided. (destination changeset is not
573 modified by rebasing, but new changesets are added as its
573 modified by rebasing, but new changesets are added as its
574 descendants.)
574 descendants.)
575
575
576 Here are the ways to select changesets:
576 Here are the ways to select changesets:
577
577
578 1. Explicitly select them using ``--rev``.
578 1. Explicitly select them using ``--rev``.
579
579
580 2. Use ``--source`` to select a root changeset and include all of its
580 2. Use ``--source`` to select a root changeset and include all of its
581 descendants.
581 descendants.
582
582
583 3. Use ``--base`` to select a changeset; rebase will find ancestors
583 3. Use ``--base`` to select a changeset; rebase will find ancestors
584 and their descendants which are not also ancestors of the destination.
584 and their descendants which are not also ancestors of the destination.
585
585
586 4. If you do not specify any of ``--rev``, ``source``, or ``--base``,
586 4. If you do not specify any of ``--rev``, ``source``, or ``--base``,
587 rebase will use ``--base .`` as above.
587 rebase will use ``--base .`` as above.
588
588
589 Rebase will destroy original changesets unless you use ``--keep``.
589 Rebase will destroy original changesets unless you use ``--keep``.
590 It will also move your bookmarks (even if you do).
590 It will also move your bookmarks (even if you do).
591
591
592 Some changesets may be dropped if they do not contribute changes
592 Some changesets may be dropped if they do not contribute changes
593 (e.g. merges from the destination branch).
593 (e.g. merges from the destination branch).
594
594
595 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
595 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
596 a named branch with two heads. You will need to explicitly specify source
596 a named branch with two heads. You will need to explicitly specify source
597 and/or destination.
597 and/or destination.
598
598
599 If you need to use a tool to automate merge/conflict decisions, you
599 If you need to use a tool to automate merge/conflict decisions, you
600 can specify one with ``--tool``, see :hg:`help merge-tools`.
600 can specify one with ``--tool``, see :hg:`help merge-tools`.
601 As a caveat: the tool will not be used to mediate when a file was
601 As a caveat: the tool will not be used to mediate when a file was
602 deleted, there is no hook presently available for this.
602 deleted, there is no hook presently available for this.
603
603
604 If a rebase is interrupted to manually resolve a conflict, it can be
604 If a rebase is interrupted to manually resolve a conflict, it can be
605 continued with --continue/-c or aborted with --abort/-a.
605 continued with --continue/-c or aborted with --abort/-a.
606
606
607 .. container:: verbose
607 .. container:: verbose
608
608
609 Examples:
609 Examples:
610
610
611 - move "local changes" (current commit back to branching point)
611 - move "local changes" (current commit back to branching point)
612 to the current branch tip after a pull::
612 to the current branch tip after a pull::
613
613
614 hg rebase
614 hg rebase
615
615
616 - move a single changeset to the stable branch::
616 - move a single changeset to the stable branch::
617
617
618 hg rebase -r 5f493448 -d stable
618 hg rebase -r 5f493448 -d stable
619
619
620 - splice a commit and all its descendants onto another part of history::
620 - splice a commit and all its descendants onto another part of history::
621
621
622 hg rebase --source c0c3 --dest 4cf9
622 hg rebase --source c0c3 --dest 4cf9
623
623
624 - rebase everything on a branch marked by a bookmark onto the
624 - rebase everything on a branch marked by a bookmark onto the
625 default branch::
625 default branch::
626
626
627 hg rebase --base myfeature --dest default
627 hg rebase --base myfeature --dest default
628
628
629 - collapse a sequence of changes into a single commit::
629 - collapse a sequence of changes into a single commit::
630
630
631 hg rebase --collapse -r 1520:1525 -d .
631 hg rebase --collapse -r 1520:1525 -d .
632
632
633 - move a named branch while preserving its name::
633 - move a named branch while preserving its name::
634
634
635 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
635 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
636
636
637 Configuration Options:
637 Configuration Options:
638
638
639 You can make rebase require a destination if you set the following config
639 You can make rebase require a destination if you set the following config
640 option::
640 option::
641
641
642 [commands]
642 [commands]
643 rebase.requiredest = True
643 rebase.requiredest = True
644
644
645 By default, rebase will close the transaction after each commit. For
645 By default, rebase will close the transaction after each commit. For
646 performance purposes, you can configure rebase to use a single transaction
646 performance purposes, you can configure rebase to use a single transaction
647 across the entire rebase. WARNING: This setting introduces a significant
647 across the entire rebase. WARNING: This setting introduces a significant
648 risk of losing the work you've done in a rebase if the rebase aborts
648 risk of losing the work you've done in a rebase if the rebase aborts
649 unexpectedly::
649 unexpectedly::
650
650
651 [rebase]
651 [rebase]
652 singletransaction = True
652 singletransaction = True
653
653
654 Return Values:
654 Return Values:
655
655
656 Returns 0 on success, 1 if nothing to rebase or there are
656 Returns 0 on success, 1 if nothing to rebase or there are
657 unresolved conflicts.
657 unresolved conflicts.
658
658
659 """
659 """
660 rbsrt = rebaseruntime(repo, ui, opts)
660 rbsrt = rebaseruntime(repo, ui, opts)
661
661
662 with repo.wlock(), repo.lock():
662 with repo.wlock(), repo.lock():
663 # Validate input and define rebasing points
663 # Validate input and define rebasing points
664 destf = opts.get('dest', None)
664 destf = opts.get('dest', None)
665 srcf = opts.get('source', None)
665 srcf = opts.get('source', None)
666 basef = opts.get('base', None)
666 basef = opts.get('base', None)
667 revf = opts.get('rev', [])
667 revf = opts.get('rev', [])
668 # search default destination in this space
668 # search default destination in this space
669 # used in the 'hg pull --rebase' case, see issue 5214.
669 # used in the 'hg pull --rebase' case, see issue 5214.
670 destspace = opts.get('_destspace')
670 destspace = opts.get('_destspace')
671 contf = opts.get('continue')
671 contf = opts.get('continue')
672 abortf = opts.get('abort')
672 abortf = opts.get('abort')
673 if opts.get('interactive'):
673 if opts.get('interactive'):
674 try:
674 try:
675 if extensions.find('histedit'):
675 if extensions.find('histedit'):
676 enablehistedit = ''
676 enablehistedit = ''
677 except KeyError:
677 except KeyError:
678 enablehistedit = " --config extensions.histedit="
678 enablehistedit = " --config extensions.histedit="
679 help = "hg%s help -e histedit" % enablehistedit
679 help = "hg%s help -e histedit" % enablehistedit
680 msg = _("interactive history editing is supported by the "
680 msg = _("interactive history editing is supported by the "
681 "'histedit' extension (see \"%s\")") % help
681 "'histedit' extension (see \"%s\")") % help
682 raise error.Abort(msg)
682 raise error.Abort(msg)
683
683
684 if rbsrt.collapsemsg and not rbsrt.collapsef:
684 if rbsrt.collapsemsg and not rbsrt.collapsef:
685 raise error.Abort(
685 raise error.Abort(
686 _('message can only be specified with collapse'))
686 _('message can only be specified with collapse'))
687
687
688 if contf or abortf:
688 if contf or abortf:
689 if contf and abortf:
689 if contf and abortf:
690 raise error.Abort(_('cannot use both abort and continue'))
690 raise error.Abort(_('cannot use both abort and continue'))
691 if rbsrt.collapsef:
691 if rbsrt.collapsef:
692 raise error.Abort(
692 raise error.Abort(
693 _('cannot use collapse with continue or abort'))
693 _('cannot use collapse with continue or abort'))
694 if srcf or basef or destf:
694 if srcf or basef or destf:
695 raise error.Abort(
695 raise error.Abort(
696 _('abort and continue do not allow specifying revisions'))
696 _('abort and continue do not allow specifying revisions'))
697 if abortf and opts.get('tool', False):
697 if abortf and opts.get('tool', False):
698 ui.warn(_('tool option will be ignored\n'))
698 ui.warn(_('tool option will be ignored\n'))
699 if contf:
699 if contf:
700 ms = mergemod.mergestate.read(repo)
700 ms = mergemod.mergestate.read(repo)
701 mergeutil.checkunresolved(ms)
701 mergeutil.checkunresolved(ms)
702
702
703 retcode = rbsrt._prepareabortorcontinue(abortf)
703 retcode = rbsrt._prepareabortorcontinue(abortf)
704 if retcode is not None:
704 if retcode is not None:
705 return retcode
705 return retcode
706 else:
706 else:
707 dest, rebaseset = _definesets(ui, repo, destf, srcf, basef, revf,
707 dest, rebaseset = _definesets(ui, repo, destf, srcf, basef, revf,
708 destspace=destspace)
708 destspace=destspace)
709 retcode = rbsrt._preparenewrebase(dest, rebaseset)
709 retcode = rbsrt._preparenewrebase(dest, rebaseset)
710 if retcode is not None:
710 if retcode is not None:
711 return retcode
711 return retcode
712
712
713 tr = None
713 tr = None
714 if ui.configbool('rebase', 'singletransaction'):
714 if ui.configbool('rebase', 'singletransaction'):
715 tr = repo.transaction('rebase')
715 tr = repo.transaction('rebase')
716 with util.acceptintervention(tr):
716 with util.acceptintervention(tr):
717 rbsrt._performrebase(tr)
717 rbsrt._performrebase(tr)
718
718
719 rbsrt._finishrebase()
719 rbsrt._finishrebase()
720
720
721 def _definesets(ui, repo, destf=None, srcf=None, basef=None, revf=None,
721 def _definesets(ui, repo, destf=None, srcf=None, basef=None, revf=None,
722 destspace=None):
722 destspace=None):
723 """use revisions argument to define destination and rebase set
723 """use revisions argument to define destination and rebase set
724 """
724 """
725 if revf is None:
725 if revf is None:
726 revf = []
726 revf = []
727
727
728 # destspace is here to work around issues with `hg pull --rebase` see
728 # destspace is here to work around issues with `hg pull --rebase` see
729 # issue5214 for details
729 # issue5214 for details
730 if srcf and basef:
730 if srcf and basef:
731 raise error.Abort(_('cannot specify both a source and a base'))
731 raise error.Abort(_('cannot specify both a source and a base'))
732 if revf and basef:
732 if revf and basef:
733 raise error.Abort(_('cannot specify both a revision and a base'))
733 raise error.Abort(_('cannot specify both a revision and a base'))
734 if revf and srcf:
734 if revf and srcf:
735 raise error.Abort(_('cannot specify both a revision and a source'))
735 raise error.Abort(_('cannot specify both a revision and a source'))
736
736
737 cmdutil.checkunfinished(repo)
737 cmdutil.checkunfinished(repo)
738 cmdutil.bailifchanged(repo)
738 cmdutil.bailifchanged(repo)
739
739
740 if ui.configbool('commands', 'rebase.requiredest') and not destf:
740 if ui.configbool('commands', 'rebase.requiredest') and not destf:
741 raise error.Abort(_('you must specify a destination'),
741 raise error.Abort(_('you must specify a destination'),
742 hint=_('use: hg rebase -d REV'))
742 hint=_('use: hg rebase -d REV'))
743
743
744 if destf:
744 if destf:
745 dest = scmutil.revsingle(repo, destf)
745 dest = scmutil.revsingle(repo, destf)
746
746
747 if revf:
747 if revf:
748 rebaseset = scmutil.revrange(repo, revf)
748 rebaseset = scmutil.revrange(repo, revf)
749 if not rebaseset:
749 if not rebaseset:
750 ui.status(_('empty "rev" revision set - nothing to rebase\n'))
750 ui.status(_('empty "rev" revision set - nothing to rebase\n'))
751 return None, None
751 return None, None
752 elif srcf:
752 elif srcf:
753 src = scmutil.revrange(repo, [srcf])
753 src = scmutil.revrange(repo, [srcf])
754 if not src:
754 if not src:
755 ui.status(_('empty "source" revision set - nothing to rebase\n'))
755 ui.status(_('empty "source" revision set - nothing to rebase\n'))
756 return None, None
756 return None, None
757 rebaseset = repo.revs('(%ld)::', src)
757 rebaseset = repo.revs('(%ld)::', src)
758 assert rebaseset
758 assert rebaseset
759 else:
759 else:
760 base = scmutil.revrange(repo, [basef or '.'])
760 base = scmutil.revrange(repo, [basef or '.'])
761 if not base:
761 if not base:
762 ui.status(_('empty "base" revision set - '
762 ui.status(_('empty "base" revision set - '
763 "can't compute rebase set\n"))
763 "can't compute rebase set\n"))
764 return None, None
764 return None, None
765 if not destf:
765 if not destf:
766 dest = repo[_destrebase(repo, base, destspace=destspace)]
766 dest = repo[_destrebase(repo, base, destspace=destspace)]
767 destf = str(dest)
767 destf = str(dest)
768
768
769 roots = [] # selected children of branching points
769 roots = [] # selected children of branching points
770 bpbase = {} # {branchingpoint: [origbase]}
770 bpbase = {} # {branchingpoint: [origbase]}
771 for b in base: # group bases by branching points
771 for b in base: # group bases by branching points
772 bp = repo.revs('ancestor(%d, %d)', b, dest).first()
772 bp = repo.revs('ancestor(%d, %d)', b, dest).first()
773 bpbase[bp] = bpbase.get(bp, []) + [b]
773 bpbase[bp] = bpbase.get(bp, []) + [b]
774 if None in bpbase:
774 if None in bpbase:
775 # emulate the old behavior, showing "nothing to rebase" (a better
775 # emulate the old behavior, showing "nothing to rebase" (a better
776 # behavior may be abort with "cannot find branching point" error)
776 # behavior may be abort with "cannot find branching point" error)
777 bpbase.clear()
777 bpbase.clear()
778 for bp, bs in bpbase.iteritems(): # calculate roots
778 for bp, bs in bpbase.iteritems(): # calculate roots
779 roots += list(repo.revs('children(%d) & ancestors(%ld)', bp, bs))
779 roots += list(repo.revs('children(%d) & ancestors(%ld)', bp, bs))
780
780
781 rebaseset = repo.revs('%ld::', roots)
781 rebaseset = repo.revs('%ld::', roots)
782
782
783 if not rebaseset:
783 if not rebaseset:
784 # transform to list because smartsets are not comparable to
784 # transform to list because smartsets are not comparable to
785 # lists. This should be improved to honor laziness of
785 # lists. This should be improved to honor laziness of
786 # smartset.
786 # smartset.
787 if list(base) == [dest.rev()]:
787 if list(base) == [dest.rev()]:
788 if basef:
788 if basef:
789 ui.status(_('nothing to rebase - %s is both "base"'
789 ui.status(_('nothing to rebase - %s is both "base"'
790 ' and destination\n') % dest)
790 ' and destination\n') % dest)
791 else:
791 else:
792 ui.status(_('nothing to rebase - working directory '
792 ui.status(_('nothing to rebase - working directory '
793 'parent is also destination\n'))
793 'parent is also destination\n'))
794 elif not repo.revs('%ld - ::%d', base, dest):
794 elif not repo.revs('%ld - ::%d', base, dest):
795 if basef:
795 if basef:
796 ui.status(_('nothing to rebase - "base" %s is '
796 ui.status(_('nothing to rebase - "base" %s is '
797 'already an ancestor of destination '
797 'already an ancestor of destination '
798 '%s\n') %
798 '%s\n') %
799 ('+'.join(str(repo[r]) for r in base),
799 ('+'.join(str(repo[r]) for r in base),
800 dest))
800 dest))
801 else:
801 else:
802 ui.status(_('nothing to rebase - working '
802 ui.status(_('nothing to rebase - working '
803 'directory parent is already an '
803 'directory parent is already an '
804 'ancestor of destination %s\n') % dest)
804 'ancestor of destination %s\n') % dest)
805 else: # can it happen?
805 else: # can it happen?
806 ui.status(_('nothing to rebase from %s to %s\n') %
806 ui.status(_('nothing to rebase from %s to %s\n') %
807 ('+'.join(str(repo[r]) for r in base), dest))
807 ('+'.join(str(repo[r]) for r in base), dest))
808 return None, None
808 return None, None
809
809
810 if not destf:
810 if not destf:
811 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
811 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
812 destf = str(dest)
812 destf = str(dest)
813
813
814 return dest, rebaseset
814 return dest, rebaseset
815
815
816 def externalparent(repo, state, destancestors):
816 def externalparent(repo, state, destancestors):
817 """Return the revision that should be used as the second parent
817 """Return the revision that should be used as the second parent
818 when the revisions in state is collapsed on top of destancestors.
818 when the revisions in state is collapsed on top of destancestors.
819 Abort if there is more than one parent.
819 Abort if there is more than one parent.
820 """
820 """
821 parents = set()
821 parents = set()
822 source = min(state)
822 source = min(state)
823 for rev in state:
823 for rev in state:
824 if rev == source:
824 if rev == source:
825 continue
825 continue
826 for p in repo[rev].parents():
826 for p in repo[rev].parents():
827 if (p.rev() not in state
827 if (p.rev() not in state
828 and p.rev() not in destancestors):
828 and p.rev() not in destancestors):
829 parents.add(p.rev())
829 parents.add(p.rev())
830 if not parents:
830 if not parents:
831 return nullrev
831 return nullrev
832 if len(parents) == 1:
832 if len(parents) == 1:
833 return parents.pop()
833 return parents.pop()
834 raise error.Abort(_('unable to collapse on top of %s, there is more '
834 raise error.Abort(_('unable to collapse on top of %s, there is more '
835 'than one external parent: %s') %
835 'than one external parent: %s') %
836 (max(destancestors),
836 (max(destancestors),
837 ', '.join(str(p) for p in sorted(parents))))
837 ', '.join(str(p) for p in sorted(parents))))
838
838
839 def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None,
839 def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None,
840 keepbranches=False, date=None):
840 keepbranches=False, date=None):
841 '''Commit the wd changes with parents p1 and p2. Reuse commit info from rev
841 '''Commit the wd changes with parents p1 and p2. Reuse commit info from rev
842 but also store useful information in extra.
842 but also store useful information in extra.
843 Return node of committed revision.'''
843 Return node of committed revision.'''
844 dsguard = dirstateguard.dirstateguard(repo, 'rebase')
844 dsguard = dirstateguard.dirstateguard(repo, 'rebase')
845 try:
845 try:
846 repo.setparents(repo[p1].node(), repo[p2].node())
846 repo.setparents(repo[p1].node(), repo[p2].node())
847 ctx = repo[rev]
847 ctx = repo[rev]
848 if commitmsg is None:
848 if commitmsg is None:
849 commitmsg = ctx.description()
849 commitmsg = ctx.description()
850 keepbranch = keepbranches and repo[p1].branch() != ctx.branch()
850 keepbranch = keepbranches and repo[p1].branch() != ctx.branch()
851 extra = {'rebase_source': ctx.hex()}
851 extra = {'rebase_source': ctx.hex()}
852 if extrafn:
852 if extrafn:
853 extrafn(ctx, extra)
853 extrafn(ctx, extra)
854
854
855 destphase = max(ctx.phase(), phases.draft)
855 destphase = max(ctx.phase(), phases.draft)
856 overrides = {('phases', 'new-commit'): destphase}
856 overrides = {('phases', 'new-commit'): destphase}
857 with repo.ui.configoverride(overrides, 'rebase'):
857 with repo.ui.configoverride(overrides, 'rebase'):
858 if keepbranch:
858 if keepbranch:
859 repo.ui.setconfig('ui', 'allowemptycommit', True)
859 repo.ui.setconfig('ui', 'allowemptycommit', True)
860 # Commit might fail if unresolved files exist
860 # Commit might fail if unresolved files exist
861 if date is None:
861 if date is None:
862 date = ctx.date()
862 date = ctx.date()
863 newnode = repo.commit(text=commitmsg, user=ctx.user(),
863 newnode = repo.commit(text=commitmsg, user=ctx.user(),
864 date=date, extra=extra, editor=editor)
864 date=date, extra=extra, editor=editor)
865
865
866 repo.dirstate.setbranch(repo[newnode].branch())
866 repo.dirstate.setbranch(repo[newnode].branch())
867 dsguard.close()
867 dsguard.close()
868 return newnode
868 return newnode
869 finally:
869 finally:
870 release(dsguard)
870 release(dsguard)
871
871
872 def rebasenode(repo, rev, p1, base, state, collapse, dest):
872 def rebasenode(repo, rev, p1, base, state, collapse, dest):
873 'Rebase a single revision rev on top of p1 using base as merge ancestor'
873 'Rebase a single revision rev on top of p1 using base as merge ancestor'
874 # Merge phase
874 # Merge phase
875 # Update to destination and merge it with local
875 # Update to destination and merge it with local
876 if repo['.'].rev() != p1:
876 if repo['.'].rev() != p1:
877 repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1]))
877 repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1]))
878 mergemod.update(repo, p1, False, True)
878 mergemod.update(repo, p1, False, True)
879 else:
879 else:
880 repo.ui.debug(" already in destination\n")
880 repo.ui.debug(" already in destination\n")
881 repo.dirstate.write(repo.currenttransaction())
881 repo.dirstate.write(repo.currenttransaction())
882 repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev]))
882 repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev]))
883 if base is not None:
883 if base is not None:
884 repo.ui.debug(" detach base %d:%s\n" % (base, repo[base]))
884 repo.ui.debug(" detach base %d:%s\n" % (base, repo[base]))
885 # When collapsing in-place, the parent is the common ancestor, we
885 # When collapsing in-place, the parent is the common ancestor, we
886 # have to allow merging with it.
886 # have to allow merging with it.
887 stats = mergemod.update(repo, rev, True, True, base, collapse,
887 stats = mergemod.update(repo, rev, True, True, base, collapse,
888 labels=['dest', 'source'])
888 labels=['dest', 'source'])
889 if collapse:
889 if collapse:
890 copies.duplicatecopies(repo, rev, dest)
890 copies.duplicatecopies(repo, rev, dest)
891 else:
891 else:
892 # If we're not using --collapse, we need to
892 # If we're not using --collapse, we need to
893 # duplicate copies between the revision we're
893 # duplicate copies between the revision we're
894 # rebasing and its first parent, but *not*
894 # rebasing and its first parent, but *not*
895 # duplicate any copies that have already been
895 # duplicate any copies that have already been
896 # performed in the destination.
896 # performed in the destination.
897 p1rev = repo[rev].p1().rev()
897 p1rev = repo[rev].p1().rev()
898 copies.duplicatecopies(repo, rev, p1rev, skiprev=dest)
898 copies.duplicatecopies(repo, rev, p1rev, skiprev=dest)
899 return stats
899 return stats
900
900
901 def adjustdest(repo, rev, dest, state):
901 def adjustdest(repo, rev, dest, state):
902 """adjust rebase destination given the current rebase state
902 """adjust rebase destination given the current rebase state
903
903
904 rev is what is being rebased. Return a list of two revs, which are the
904 rev is what is being rebased. Return a list of two revs, which are the
905 adjusted destinations for rev's p1 and p2, respectively. If a parent is
905 adjusted destinations for rev's p1 and p2, respectively. If a parent is
906 nullrev, return dest without adjustment for it.
906 nullrev, return dest without adjustment for it.
907
907
908 For example, when doing rebase -r B+E -d F, rebase will first move B to B1,
908 For example, when doing rebase -r B+E -d F, rebase will first move B to B1,
909 and E's destination will be adjusted from F to B1.
909 and E's destination will be adjusted from F to B1.
910
910
911 B1 <- written during rebasing B
911 B1 <- written during rebasing B
912 |
912 |
913 F <- original destination of B, E
913 F <- original destination of B, E
914 |
914 |
915 | E <- rev, which is being rebased
915 | E <- rev, which is being rebased
916 | |
916 | |
917 | D <- prev, one parent of rev being checked
917 | D <- prev, one parent of rev being checked
918 | |
918 | |
919 | x <- skipped, ex. no successor or successor in (::dest)
919 | x <- skipped, ex. no successor or successor in (::dest)
920 | |
920 | |
921 | C
921 | C
922 | |
922 | |
923 | B <- rebased as B1
923 | B <- rebased as B1
924 |/
924 |/
925 A
925 A
926
926
927 Another example about merge changeset, rebase -r C+G+H -d K, rebase will
927 Another example about merge changeset, rebase -r C+G+H -d K, rebase will
928 first move C to C1, G to G1, and when it's checking H, the adjusted
928 first move C to C1, G to G1, and when it's checking H, the adjusted
929 destinations will be [C1, G1].
929 destinations will be [C1, G1].
930
930
931 H C1 G1
931 H C1 G1
932 /| | /
932 /| | /
933 F G |/
933 F G |/
934 K | | -> K
934 K | | -> K
935 | C D |
935 | C D |
936 | |/ |
936 | |/ |
937 | B | ...
937 | B | ...
938 |/ |/
938 |/ |/
939 A A
939 A A
940 """
940 """
941 result = []
941 result = []
942 for prev in repo.changelog.parentrevs(rev):
942 for prev in repo.changelog.parentrevs(rev):
943 adjusted = dest
943 adjusted = dest
944 if prev != nullrev:
944 if prev != nullrev:
945 # pick already rebased revs from state
945 # pick already rebased revs from state
946 source = [s for s, d in state.items() if d > 0]
946 source = [s for s, d in state.items() if d > 0]
947 candidate = repo.revs('max(%ld and (::%d))', source, prev).first()
947 candidate = repo.revs('max(%ld and (::%d))', source, prev).first()
948 if candidate is not None:
948 if candidate is not None:
949 adjusted = state[candidate]
949 adjusted = state[candidate]
950 result.append(adjusted)
950 result.append(adjusted)
951 return result
951 return result
952
952
953 def nearestrebased(repo, rev, state):
953 def nearestrebased(repo, rev, state):
954 """return the nearest ancestors of rev in the rebase result"""
954 """return the nearest ancestors of rev in the rebase result"""
955 rebased = [r for r in state if state[r] > nullmerge]
955 rebased = [r for r in state if state[r] > nullmerge]
956 candidates = repo.revs('max(%ld and (::%d))', rebased, rev)
956 candidates = repo.revs('max(%ld and (::%d))', rebased, rev)
957 if candidates:
957 if candidates:
958 return state[candidates.first()]
958 return state[candidates.first()]
959 else:
959 else:
960 return None
960 return None
961
961
962 def _checkobsrebase(repo, ui, rebaseobsrevs, rebasesetrevs, rebaseobsskipped):
962 def _checkobsrebase(repo, ui, rebaseobsrevs, rebasesetrevs, rebaseobsskipped):
963 """
963 """
964 Abort if rebase will create divergence or rebase is noop because of markers
964 Abort if rebase will create divergence or rebase is noop because of markers
965
965
966 `rebaseobsrevs`: set of obsolete revision in source
966 `rebaseobsrevs`: set of obsolete revision in source
967 `rebasesetrevs`: set of revisions to be rebased from source
967 `rebasesetrevs`: set of revisions to be rebased from source
968 `rebaseobsskipped`: set of revisions from source skipped because they have
968 `rebaseobsskipped`: set of revisions from source skipped because they have
969 successors in destination
969 successors in destination
970 """
970 """
971 # Obsolete node with successors not in dest leads to divergence
971 # Obsolete node with successors not in dest leads to divergence
972 divergenceok = ui.configbool('experimental',
972 divergenceok = ui.configbool('experimental',
973 'allowdivergence')
973 'allowdivergence')
974 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
974 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
975
975
976 if divergencebasecandidates and not divergenceok:
976 if divergencebasecandidates and not divergenceok:
977 divhashes = (str(repo[r])
977 divhashes = (str(repo[r])
978 for r in divergencebasecandidates)
978 for r in divergencebasecandidates)
979 msg = _("this rebase will cause "
979 msg = _("this rebase will cause "
980 "divergences from: %s")
980 "divergences from: %s")
981 h = _("to force the rebase please set "
981 h = _("to force the rebase please set "
982 "experimental.allowdivergence=True")
982 "experimental.allowdivergence=True")
983 raise error.Abort(msg % (",".join(divhashes),), hint=h)
983 raise error.Abort(msg % (",".join(divhashes),), hint=h)
984
984
985 def defineparents(repo, rev, dest, state, destancestors,
985 def defineparents(repo, rev, dest, state, destancestors,
986 obsoletenotrebased):
986 obsoletenotrebased):
987 'Return the new parent relationship of the revision that will be rebased'
987 'Return the new parent relationship of the revision that will be rebased'
988 parents = repo[rev].parents()
988 parents = repo[rev].parents()
989 p1 = p2 = nullrev
989 p1 = p2 = nullrev
990 rp1 = None
990 rp1 = None
991
991
992 p1n = parents[0].rev()
992 p1n = parents[0].rev()
993 if p1n in destancestors:
993 if p1n in destancestors:
994 p1 = dest
994 p1 = dest
995 elif p1n in state:
995 elif p1n in state:
996 if state[p1n] == nullmerge:
996 if state[p1n] == nullmerge:
997 p1 = dest
997 p1 = dest
998 elif state[p1n] in revskipped:
998 elif state[p1n] in revskipped:
999 p1 = nearestrebased(repo, p1n, state)
999 p1 = nearestrebased(repo, p1n, state)
1000 if p1 is None:
1000 if p1 is None:
1001 p1 = dest
1001 p1 = dest
1002 else:
1002 else:
1003 p1 = state[p1n]
1003 p1 = state[p1n]
1004 else: # p1n external
1004 else: # p1n external
1005 p1 = dest
1005 p1 = dest
1006 p2 = p1n
1006 p2 = p1n
1007
1007
1008 if len(parents) == 2 and parents[1].rev() not in destancestors:
1008 if len(parents) == 2 and parents[1].rev() not in destancestors:
1009 p2n = parents[1].rev()
1009 p2n = parents[1].rev()
1010 # interesting second parent
1010 # interesting second parent
1011 if p2n in state:
1011 if p2n in state:
1012 if p1 == dest: # p1n in destancestors or external
1012 if p1 == dest: # p1n in destancestors or external
1013 p1 = state[p2n]
1013 p1 = state[p2n]
1014 if p1 == revprecursor:
1014 if p1 == revprecursor:
1015 rp1 = obsoletenotrebased[p2n]
1015 rp1 = obsoletenotrebased[p2n]
1016 elif state[p2n] in revskipped:
1016 elif state[p2n] in revskipped:
1017 p2 = nearestrebased(repo, p2n, state)
1017 p2 = nearestrebased(repo, p2n, state)
1018 if p2 is None:
1018 if p2 is None:
1019 # no ancestors rebased yet, detach
1019 # no ancestors rebased yet, detach
1020 p2 = dest
1020 p2 = dest
1021 else:
1021 else:
1022 p2 = state[p2n]
1022 p2 = state[p2n]
1023 else: # p2n external
1023 else: # p2n external
1024 if p2 != nullrev: # p1n external too => rev is a merged revision
1024 if p2 != nullrev: # p1n external too => rev is a merged revision
1025 raise error.Abort(_('cannot use revision %d as base, result '
1025 raise error.Abort(_('cannot use revision %d as base, result '
1026 'would have 3 parents') % rev)
1026 'would have 3 parents') % rev)
1027 p2 = p2n
1027 p2 = p2n
1028 repo.ui.debug(" future parents are %d and %d\n" %
1028 repo.ui.debug(" future parents are %d and %d\n" %
1029 (repo[rp1 or p1].rev(), repo[p2].rev()))
1029 (repo[rp1 or p1].rev(), repo[p2].rev()))
1030
1030
1031 if not any(p.rev() in state for p in parents):
1031 if not any(p.rev() in state for p in parents):
1032 # Case (1) root changeset of a non-detaching rebase set.
1032 # Case (1) root changeset of a non-detaching rebase set.
1033 # Let the merge mechanism find the base itself.
1033 # Let the merge mechanism find the base itself.
1034 base = None
1034 base = None
1035 elif not repo[rev].p2():
1035 elif not repo[rev].p2():
1036 # Case (2) detaching the node with a single parent, use this parent
1036 # Case (2) detaching the node with a single parent, use this parent
1037 base = repo[rev].p1().rev()
1037 base = repo[rev].p1().rev()
1038 else:
1038 else:
1039 # Assuming there is a p1, this is the case where there also is a p2.
1039 # Assuming there is a p1, this is the case where there also is a p2.
1040 # We are thus rebasing a merge and need to pick the right merge base.
1040 # We are thus rebasing a merge and need to pick the right merge base.
1041 #
1041 #
1042 # Imagine we have:
1042 # Imagine we have:
1043 # - M: current rebase revision in this step
1043 # - M: current rebase revision in this step
1044 # - A: one parent of M
1044 # - A: one parent of M
1045 # - B: other parent of M
1045 # - B: other parent of M
1046 # - D: destination of this merge step (p1 var)
1046 # - D: destination of this merge step (p1 var)
1047 #
1047 #
1048 # Consider the case where D is a descendant of A or B and the other is
1048 # Consider the case where D is a descendant of A or B and the other is
1049 # 'outside'. In this case, the right merge base is the D ancestor.
1049 # 'outside'. In this case, the right merge base is the D ancestor.
1050 #
1050 #
1051 # An informal proof, assuming A is 'outside' and B is the D ancestor:
1051 # An informal proof, assuming A is 'outside' and B is the D ancestor:
1052 #
1052 #
1053 # If we pick B as the base, the merge involves:
1053 # If we pick B as the base, the merge involves:
1054 # - changes from B to M (actual changeset payload)
1054 # - changes from B to M (actual changeset payload)
1055 # - changes from B to D (induced by rebase) as D is a rebased
1055 # - changes from B to D (induced by rebase) as D is a rebased
1056 # version of B)
1056 # version of B)
1057 # Which exactly represent the rebase operation.
1057 # Which exactly represent the rebase operation.
1058 #
1058 #
1059 # If we pick A as the base, the merge involves:
1059 # If we pick A as the base, the merge involves:
1060 # - changes from A to M (actual changeset payload)
1060 # - changes from A to M (actual changeset payload)
1061 # - changes from A to D (with include changes between unrelated A and B
1061 # - changes from A to D (with include changes between unrelated A and B
1062 # plus changes induced by rebase)
1062 # plus changes induced by rebase)
1063 # Which does not represent anything sensible and creates a lot of
1063 # Which does not represent anything sensible and creates a lot of
1064 # conflicts. A is thus not the right choice - B is.
1064 # conflicts. A is thus not the right choice - B is.
1065 #
1065 #
1066 # Note: The base found in this 'proof' is only correct in the specified
1066 # Note: The base found in this 'proof' is only correct in the specified
1067 # case. This base does not make sense if is not D a descendant of A or B
1067 # case. This base does not make sense if is not D a descendant of A or B
1068 # or if the other is not parent 'outside' (especially not if the other
1068 # or if the other is not parent 'outside' (especially not if the other
1069 # parent has been rebased). The current implementation does not
1069 # parent has been rebased). The current implementation does not
1070 # make it feasible to consider different cases separately. In these
1070 # make it feasible to consider different cases separately. In these
1071 # other cases we currently just leave it to the user to correctly
1071 # other cases we currently just leave it to the user to correctly
1072 # resolve an impossible merge using a wrong ancestor.
1072 # resolve an impossible merge using a wrong ancestor.
1073 #
1073 #
1074 # xx, p1 could be -4, and both parents could probably be -4...
1074 # xx, p1 could be -4, and both parents could probably be -4...
1075 for p in repo[rev].parents():
1075 for p in repo[rev].parents():
1076 if state.get(p.rev()) == p1:
1076 if state.get(p.rev()) == p1:
1077 base = p.rev()
1077 base = p.rev()
1078 break
1078 break
1079 else: # fallback when base not found
1079 else: # fallback when base not found
1080 base = None
1080 base = None
1081
1081
1082 # Raise because this function is called wrong (see issue 4106)
1082 # Raise because this function is called wrong (see issue 4106)
1083 raise AssertionError('no base found to rebase on '
1083 raise AssertionError('no base found to rebase on '
1084 '(defineparents called wrong)')
1084 '(defineparents called wrong)')
1085 return rp1 or p1, p2, base
1085 return rp1 or p1, p2, base
1086
1086
1087 def isagitpatch(repo, patchname):
1087 def isagitpatch(repo, patchname):
1088 'Return true if the given patch is in git format'
1088 'Return true if the given patch is in git format'
1089 mqpatch = os.path.join(repo.mq.path, patchname)
1089 mqpatch = os.path.join(repo.mq.path, patchname)
1090 for line in patch.linereader(file(mqpatch, 'rb')):
1090 for line in patch.linereader(file(mqpatch, 'rb')):
1091 if line.startswith('diff --git'):
1091 if line.startswith('diff --git'):
1092 return True
1092 return True
1093 return False
1093 return False
1094
1094
1095 def updatemq(repo, state, skipped, **opts):
1095 def updatemq(repo, state, skipped, **opts):
1096 'Update rebased mq patches - finalize and then import them'
1096 'Update rebased mq patches - finalize and then import them'
1097 mqrebase = {}
1097 mqrebase = {}
1098 mq = repo.mq
1098 mq = repo.mq
1099 original_series = mq.fullseries[:]
1099 original_series = mq.fullseries[:]
1100 skippedpatches = set()
1100 skippedpatches = set()
1101
1101
1102 for p in mq.applied:
1102 for p in mq.applied:
1103 rev = repo[p.node].rev()
1103 rev = repo[p.node].rev()
1104 if rev in state:
1104 if rev in state:
1105 repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
1105 repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
1106 (rev, p.name))
1106 (rev, p.name))
1107 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1107 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1108 else:
1108 else:
1109 # Applied but not rebased, not sure this should happen
1109 # Applied but not rebased, not sure this should happen
1110 skippedpatches.add(p.name)
1110 skippedpatches.add(p.name)
1111
1111
1112 if mqrebase:
1112 if mqrebase:
1113 mq.finish(repo, mqrebase.keys())
1113 mq.finish(repo, mqrebase.keys())
1114
1114
1115 # We must start import from the newest revision
1115 # We must start import from the newest revision
1116 for rev in sorted(mqrebase, reverse=True):
1116 for rev in sorted(mqrebase, reverse=True):
1117 if rev not in skipped:
1117 if rev not in skipped:
1118 name, isgit = mqrebase[rev]
1118 name, isgit = mqrebase[rev]
1119 repo.ui.note(_('updating mq patch %s to %s:%s\n') %
1119 repo.ui.note(_('updating mq patch %s to %s:%s\n') %
1120 (name, state[rev], repo[state[rev]]))
1120 (name, state[rev], repo[state[rev]]))
1121 mq.qimport(repo, (), patchname=name, git=isgit,
1121 mq.qimport(repo, (), patchname=name, git=isgit,
1122 rev=[str(state[rev])])
1122 rev=[str(state[rev])])
1123 else:
1123 else:
1124 # Rebased and skipped
1124 # Rebased and skipped
1125 skippedpatches.add(mqrebase[rev][0])
1125 skippedpatches.add(mqrebase[rev][0])
1126
1126
1127 # Patches were either applied and rebased and imported in
1127 # Patches were either applied and rebased and imported in
1128 # order, applied and removed or unapplied. Discard the removed
1128 # order, applied and removed or unapplied. Discard the removed
1129 # ones while preserving the original series order and guards.
1129 # ones while preserving the original series order and guards.
1130 newseries = [s for s in original_series
1130 newseries = [s for s in original_series
1131 if mq.guard_re.split(s, 1)[0] not in skippedpatches]
1131 if mq.guard_re.split(s, 1)[0] not in skippedpatches]
1132 mq.fullseries[:] = newseries
1132 mq.fullseries[:] = newseries
1133 mq.seriesdirty = True
1133 mq.seriesdirty = True
1134 mq.savedirty()
1134 mq.savedirty()
1135
1135
1136 def storecollapsemsg(repo, collapsemsg):
1136 def storecollapsemsg(repo, collapsemsg):
1137 'Store the collapse message to allow recovery'
1137 'Store the collapse message to allow recovery'
1138 collapsemsg = collapsemsg or ''
1138 collapsemsg = collapsemsg or ''
1139 f = repo.vfs("last-message.txt", "w")
1139 f = repo.vfs("last-message.txt", "w")
1140 f.write("%s\n" % collapsemsg)
1140 f.write("%s\n" % collapsemsg)
1141 f.close()
1141 f.close()
1142
1142
1143 def clearcollapsemsg(repo):
1143 def clearcollapsemsg(repo):
1144 'Remove collapse message file'
1144 'Remove collapse message file'
1145 repo.vfs.unlinkpath("last-message.txt", ignoremissing=True)
1145 repo.vfs.unlinkpath("last-message.txt", ignoremissing=True)
1146
1146
1147 def restorecollapsemsg(repo, isabort):
1147 def restorecollapsemsg(repo, isabort):
1148 'Restore previously stored collapse message'
1148 'Restore previously stored collapse message'
1149 try:
1149 try:
1150 f = repo.vfs("last-message.txt")
1150 f = repo.vfs("last-message.txt")
1151 collapsemsg = f.readline().strip()
1151 collapsemsg = f.readline().strip()
1152 f.close()
1152 f.close()
1153 except IOError as err:
1153 except IOError as err:
1154 if err.errno != errno.ENOENT:
1154 if err.errno != errno.ENOENT:
1155 raise
1155 raise
1156 if isabort:
1156 if isabort:
1157 # Oh well, just abort like normal
1157 # Oh well, just abort like normal
1158 collapsemsg = ''
1158 collapsemsg = ''
1159 else:
1159 else:
1160 raise error.Abort(_('missing .hg/last-message.txt for rebase'))
1160 raise error.Abort(_('missing .hg/last-message.txt for rebase'))
1161 return collapsemsg
1161 return collapsemsg
1162
1162
1163 def clearstatus(repo):
1163 def clearstatus(repo):
1164 'Remove the status files'
1164 'Remove the status files'
1165 _clearrebasesetvisibiliy(repo)
1165 _clearrebasesetvisibiliy(repo)
1166 # Make sure the active transaction won't write the state file
1166 # Make sure the active transaction won't write the state file
1167 tr = repo.currenttransaction()
1167 tr = repo.currenttransaction()
1168 if tr:
1168 if tr:
1169 tr.removefilegenerator('rebasestate')
1169 tr.removefilegenerator('rebasestate')
1170 repo.vfs.unlinkpath("rebasestate", ignoremissing=True)
1170 repo.vfs.unlinkpath("rebasestate", ignoremissing=True)
1171
1171
1172 def needupdate(repo, state):
1172 def needupdate(repo, state):
1173 '''check whether we should `update --clean` away from a merge, or if
1173 '''check whether we should `update --clean` away from a merge, or if
1174 somehow the working dir got forcibly updated, e.g. by older hg'''
1174 somehow the working dir got forcibly updated, e.g. by older hg'''
1175 parents = [p.rev() for p in repo[None].parents()]
1175 parents = [p.rev() for p in repo[None].parents()]
1176
1176
1177 # Are we in a merge state at all?
1177 # Are we in a merge state at all?
1178 if len(parents) < 2:
1178 if len(parents) < 2:
1179 return False
1179 return False
1180
1180
1181 # We should be standing on the first as-of-yet unrebased commit.
1181 # We should be standing on the first as-of-yet unrebased commit.
1182 firstunrebased = min([old for old, new in state.iteritems()
1182 firstunrebased = min([old for old, new in state.iteritems()
1183 if new == nullrev])
1183 if new == nullrev])
1184 if firstunrebased in parents:
1184 if firstunrebased in parents:
1185 return True
1185 return True
1186
1186
1187 return False
1187 return False
1188
1188
1189 def abort(repo, originalwd, dest, state, activebookmark=None):
1189 def abort(repo, originalwd, dest, state, activebookmark=None):
1190 '''Restore the repository to its original state. Additional args:
1190 '''Restore the repository to its original state. Additional args:
1191
1191
1192 activebookmark: the name of the bookmark that should be active after the
1192 activebookmark: the name of the bookmark that should be active after the
1193 restore'''
1193 restore'''
1194
1194
1195 try:
1195 try:
1196 # If the first commits in the rebased set get skipped during the rebase,
1196 # If the first commits in the rebased set get skipped during the rebase,
1197 # their values within the state mapping will be the dest rev id. The
1197 # their values within the state mapping will be the dest rev id. The
1198 # dstates list must must not contain the dest rev (issue4896)
1198 # dstates list must must not contain the dest rev (issue4896)
1199 dstates = [s for s in state.values() if s >= 0 and s != dest]
1199 dstates = [s for s in state.values() if s >= 0 and s != dest]
1200 immutable = [d for d in dstates if not repo[d].mutable()]
1200 immutable = [d for d in dstates if not repo[d].mutable()]
1201 cleanup = True
1201 cleanup = True
1202 if immutable:
1202 if immutable:
1203 repo.ui.warn(_("warning: can't clean up public changesets %s\n")
1203 repo.ui.warn(_("warning: can't clean up public changesets %s\n")
1204 % ', '.join(str(repo[r]) for r in immutable),
1204 % ', '.join(str(repo[r]) for r in immutable),
1205 hint=_("see 'hg help phases' for details"))
1205 hint=_("see 'hg help phases' for details"))
1206 cleanup = False
1206 cleanup = False
1207
1207
1208 descendants = set()
1208 descendants = set()
1209 if dstates:
1209 if dstates:
1210 descendants = set(repo.changelog.descendants(dstates))
1210 descendants = set(repo.changelog.descendants(dstates))
1211 if descendants - set(dstates):
1211 if descendants - set(dstates):
1212 repo.ui.warn(_("warning: new changesets detected on destination "
1212 repo.ui.warn(_("warning: new changesets detected on destination "
1213 "branch, can't strip\n"))
1213 "branch, can't strip\n"))
1214 cleanup = False
1214 cleanup = False
1215
1215
1216 if cleanup:
1216 if cleanup:
1217 shouldupdate = False
1217 shouldupdate = False
1218 rebased = filter(lambda x: x >= 0 and x != dest, state.values())
1218 rebased = filter(lambda x: x >= 0 and x != dest, state.values())
1219 if rebased:
1219 if rebased:
1220 strippoints = [
1220 strippoints = [
1221 c.node() for c in repo.set('roots(%ld)', rebased)]
1221 c.node() for c in repo.set('roots(%ld)', rebased)]
1222
1222
1223 updateifonnodes = set(rebased)
1223 updateifonnodes = set(rebased)
1224 updateifonnodes.add(dest)
1224 updateifonnodes.add(dest)
1225 updateifonnodes.add(originalwd)
1225 updateifonnodes.add(originalwd)
1226 shouldupdate = repo['.'].rev() in updateifonnodes
1226 shouldupdate = repo['.'].rev() in updateifonnodes
1227
1227
1228 # Update away from the rebase if necessary
1228 # Update away from the rebase if necessary
1229 if shouldupdate or needupdate(repo, state):
1229 if shouldupdate or needupdate(repo, state):
1230 mergemod.update(repo, originalwd, False, True)
1230 mergemod.update(repo, originalwd, False, True)
1231
1231
1232 # Strip from the first rebased revision
1232 # Strip from the first rebased revision
1233 if rebased:
1233 if rebased:
1234 # no backup of rebased cset versions needed
1234 # no backup of rebased cset versions needed
1235 repair.strip(repo.ui, repo, strippoints)
1235 repair.strip(repo.ui, repo, strippoints)
1236
1236
1237 if activebookmark and activebookmark in repo._bookmarks:
1237 if activebookmark and activebookmark in repo._bookmarks:
1238 bookmarks.activate(repo, activebookmark)
1238 bookmarks.activate(repo, activebookmark)
1239
1239
1240 finally:
1240 finally:
1241 clearstatus(repo)
1241 clearstatus(repo)
1242 clearcollapsemsg(repo)
1242 clearcollapsemsg(repo)
1243 repo.ui.warn(_('rebase aborted\n'))
1243 repo.ui.warn(_('rebase aborted\n'))
1244 return 0
1244 return 0
1245
1245
1246 def buildstate(repo, dest, rebaseset, collapse, obsoletenotrebased):
1246 def buildstate(repo, dest, rebaseset, collapse, obsoletenotrebased):
1247 '''Define which revisions are going to be rebased and where
1247 '''Define which revisions are going to be rebased and where
1248
1248
1249 repo: repo
1249 repo: repo
1250 dest: context
1250 dest: context
1251 rebaseset: set of rev
1251 rebaseset: set of rev
1252 '''
1252 '''
1253 originalwd = repo['.'].rev()
1253 originalwd = repo['.'].rev()
1254 _setrebasesetvisibility(repo, set(rebaseset) | {originalwd})
1254 _setrebasesetvisibility(repo, set(rebaseset) | {originalwd})
1255
1255
1256 # This check isn't strictly necessary, since mq detects commits over an
1256 # This check isn't strictly necessary, since mq detects commits over an
1257 # applied patch. But it prevents messing up the working directory when
1257 # applied patch. But it prevents messing up the working directory when
1258 # a partially completed rebase is blocked by mq.
1258 # a partially completed rebase is blocked by mq.
1259 if 'qtip' in repo.tags() and (dest.node() in
1259 if 'qtip' in repo.tags() and (dest.node() in
1260 [s.node for s in repo.mq.applied]):
1260 [s.node for s in repo.mq.applied]):
1261 raise error.Abort(_('cannot rebase onto an applied mq patch'))
1261 raise error.Abort(_('cannot rebase onto an applied mq patch'))
1262
1262
1263 roots = list(repo.set('roots(%ld)', rebaseset))
1263 roots = list(repo.set('roots(%ld)', rebaseset))
1264 if not roots:
1264 if not roots:
1265 raise error.Abort(_('no matching revisions'))
1265 raise error.Abort(_('no matching revisions'))
1266 roots.sort()
1266 roots.sort()
1267 state = dict.fromkeys(rebaseset, revtodo)
1267 state = dict.fromkeys(rebaseset, revtodo)
1268 detachset = set()
1268 detachset = set()
1269 emptyrebase = True
1269 emptyrebase = True
1270 for root in roots:
1270 for root in roots:
1271 commonbase = root.ancestor(dest)
1271 commonbase = root.ancestor(dest)
1272 if commonbase == root:
1272 if commonbase == root:
1273 raise error.Abort(_('source is ancestor of destination'))
1273 raise error.Abort(_('source is ancestor of destination'))
1274 if commonbase == dest:
1274 if commonbase == dest:
1275 wctx = repo[None]
1275 wctx = repo[None]
1276 if dest == wctx.p1():
1276 if dest == wctx.p1():
1277 # when rebasing to '.', it will use the current wd branch name
1277 # when rebasing to '.', it will use the current wd branch name
1278 samebranch = root.branch() == wctx.branch()
1278 samebranch = root.branch() == wctx.branch()
1279 else:
1279 else:
1280 samebranch = root.branch() == dest.branch()
1280 samebranch = root.branch() == dest.branch()
1281 if not collapse and samebranch and dest in root.parents():
1281 if not collapse and samebranch and dest in root.parents():
1282 # mark the revision as done by setting its new revision
1282 # mark the revision as done by setting its new revision
1283 # equal to its old (current) revisions
1283 # equal to its old (current) revisions
1284 state[root.rev()] = root.rev()
1284 state[root.rev()] = root.rev()
1285 repo.ui.debug('source is a child of destination\n')
1285 repo.ui.debug('source is a child of destination\n')
1286 continue
1286 continue
1287
1287
1288 emptyrebase = False
1288 emptyrebase = False
1289 repo.ui.debug('rebase onto %s starting from %s\n' % (dest, root))
1289 repo.ui.debug('rebase onto %s starting from %s\n' % (dest, root))
1290 # Rebase tries to turn <dest> into a parent of <root> while
1290 # Rebase tries to turn <dest> into a parent of <root> while
1291 # preserving the number of parents of rebased changesets:
1291 # preserving the number of parents of rebased changesets:
1292 #
1292 #
1293 # - A changeset with a single parent will always be rebased as a
1293 # - A changeset with a single parent will always be rebased as a
1294 # changeset with a single parent.
1294 # changeset with a single parent.
1295 #
1295 #
1296 # - A merge will be rebased as merge unless its parents are both
1296 # - A merge will be rebased as merge unless its parents are both
1297 # ancestors of <dest> or are themselves in the rebased set and
1297 # ancestors of <dest> or are themselves in the rebased set and
1298 # pruned while rebased.
1298 # pruned while rebased.
1299 #
1299 #
1300 # If one parent of <root> is an ancestor of <dest>, the rebased
1300 # If one parent of <root> is an ancestor of <dest>, the rebased
1301 # version of this parent will be <dest>. This is always true with
1301 # version of this parent will be <dest>. This is always true with
1302 # --base option.
1302 # --base option.
1303 #
1303 #
1304 # Otherwise, we need to *replace* the original parents with
1304 # Otherwise, we need to *replace* the original parents with
1305 # <dest>. This "detaches" the rebased set from its former location
1305 # <dest>. This "detaches" the rebased set from its former location
1306 # and rebases it onto <dest>. Changes introduced by ancestors of
1306 # and rebases it onto <dest>. Changes introduced by ancestors of
1307 # <root> not common with <dest> (the detachset, marked as
1307 # <root> not common with <dest> (the detachset, marked as
1308 # nullmerge) are "removed" from the rebased changesets.
1308 # nullmerge) are "removed" from the rebased changesets.
1309 #
1309 #
1310 # - If <root> has a single parent, set it to <dest>.
1310 # - If <root> has a single parent, set it to <dest>.
1311 #
1311 #
1312 # - If <root> is a merge, we cannot decide which parent to
1312 # - If <root> is a merge, we cannot decide which parent to
1313 # replace, the rebase operation is not clearly defined.
1313 # replace, the rebase operation is not clearly defined.
1314 #
1314 #
1315 # The table below sums up this behavior:
1315 # The table below sums up this behavior:
1316 #
1316 #
1317 # +------------------+----------------------+-------------------------+
1317 # +------------------+----------------------+-------------------------+
1318 # | | one parent | merge |
1318 # | | one parent | merge |
1319 # +------------------+----------------------+-------------------------+
1319 # +------------------+----------------------+-------------------------+
1320 # | parent in | new parent is <dest> | parents in ::<dest> are |
1320 # | parent in | new parent is <dest> | parents in ::<dest> are |
1321 # | ::<dest> | | remapped to <dest> |
1321 # | ::<dest> | | remapped to <dest> |
1322 # +------------------+----------------------+-------------------------+
1322 # +------------------+----------------------+-------------------------+
1323 # | unrelated source | new parent is <dest> | ambiguous, abort |
1323 # | unrelated source | new parent is <dest> | ambiguous, abort |
1324 # +------------------+----------------------+-------------------------+
1324 # +------------------+----------------------+-------------------------+
1325 #
1325 #
1326 # The actual abort is handled by `defineparents`
1326 # The actual abort is handled by `defineparents`
1327 if len(root.parents()) <= 1:
1327 if len(root.parents()) <= 1:
1328 # ancestors of <root> not ancestors of <dest>
1328 # ancestors of <root> not ancestors of <dest>
1329 detachset.update(repo.changelog.findmissingrevs([commonbase.rev()],
1329 detachset.update(repo.changelog.findmissingrevs([commonbase.rev()],
1330 [root.rev()]))
1330 [root.rev()]))
1331 if emptyrebase:
1331 if emptyrebase:
1332 return None
1332 return None
1333 for rev in sorted(state):
1333 for rev in sorted(state):
1334 parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
1334 parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
1335 # if all parents of this revision are done, then so is this revision
1335 # if all parents of this revision are done, then so is this revision
1336 if parents and all((state.get(p) == p for p in parents)):
1336 if parents and all((state.get(p) == p for p in parents)):
1337 state[rev] = rev
1337 state[rev] = rev
1338 for r in detachset:
1338 for r in detachset:
1339 if r not in state:
1339 if r not in state:
1340 state[r] = nullmerge
1340 state[r] = nullmerge
1341 if len(roots) > 1:
1341 if len(roots) > 1:
1342 # If we have multiple roots, we may have "hole" in the rebase set.
1342 # If we have multiple roots, we may have "hole" in the rebase set.
1343 # Rebase roots that descend from those "hole" should not be detached as
1343 # Rebase roots that descend from those "hole" should not be detached as
1344 # other root are. We use the special `revignored` to inform rebase that
1344 # other root are. We use the special `revignored` to inform rebase that
1345 # the revision should be ignored but that `defineparents` should search
1345 # the revision should be ignored but that `defineparents` should search
1346 # a rebase destination that make sense regarding rebased topology.
1346 # a rebase destination that make sense regarding rebased topology.
1347 rebasedomain = set(repo.revs('%ld::%ld', rebaseset, rebaseset))
1347 rebasedomain = set(repo.revs('%ld::%ld', rebaseset, rebaseset))
1348 for ignored in set(rebasedomain) - set(rebaseset):
1348 for ignored in set(rebasedomain) - set(rebaseset):
1349 state[ignored] = revignored
1349 state[ignored] = revignored
1350 for r in obsoletenotrebased:
1350 for r in obsoletenotrebased:
1351 if obsoletenotrebased[r] is None:
1351 if obsoletenotrebased[r] is None:
1352 state[r] = revpruned
1352 state[r] = revpruned
1353 else:
1353 else:
1354 state[r] = revprecursor
1354 state[r] = revprecursor
1355 return originalwd, dest.rev(), state
1355 return originalwd, dest.rev(), state
1356
1356
1357 def clearrebased(ui, repo, dest, state, skipped, collapsedas=None):
1357 def clearrebased(ui, repo, dest, state, skipped, collapsedas=None, keepf=False):
1358 """dispose of rebased revision at the end of the rebase
1358 """dispose of rebased revision at the end of the rebase
1359
1359
1360 If `collapsedas` is not None, the rebase was a collapse whose result if the
1360 If `collapsedas` is not None, the rebase was a collapse whose result if the
1361 `collapsedas` node."""
1361 `collapsedas` node.
1362
1363 If `keepf` is not True, the rebase has --keep set and no nodes should be
1364 removed (but bookmarks still need to be moved).
1365 """
1362 tonode = repo.changelog.node
1366 tonode = repo.changelog.node
1363 # Move bookmark of skipped nodes to destination. This cannot be handled
1367 replacements = {}
1364 # by scmutil.cleanupnodes since it will treat rev as removed (no successor)
1368 moves = {}
1365 # and move bookmark backwards.
1366 bmchanges = [(name, tonode(max(adjustdest(repo, rev, dest, state))))
1367 for rev in skipped
1368 for name in repo.nodebookmarks(tonode(rev))]
1369 if bmchanges:
1370 with repo.transaction('rebase') as tr:
1371 repo._bookmarks.applychanges(repo, tr, bmchanges)
1372 mapping = {}
1373 for rev, newrev in sorted(state.items()):
1369 for rev, newrev in sorted(state.items()):
1374 if newrev >= 0 and newrev != rev:
1370 if newrev >= 0 and newrev != rev:
1371 oldnode = tonode(rev)
1372 newnode = collapsedas or tonode(newrev)
1373 moves[oldnode] = newnode
1374 if not keepf:
1375 if rev in skipped:
1375 if rev in skipped:
1376 succs = ()
1376 succs = ()
1377 elif collapsedas is not None:
1378 succs = (collapsedas,)
1379 else:
1377 else:
1380 succs = (tonode(newrev),)
1378 succs = (newnode,)
1381 mapping[tonode(rev)] = succs
1379 replacements[oldnode] = succs
1382 scmutil.cleanupnodes(repo, mapping, 'rebase')
1380 scmutil.cleanupnodes(repo, replacements, 'rebase', moves)
1383
1381
1384 def pullrebase(orig, ui, repo, *args, **opts):
1382 def pullrebase(orig, ui, repo, *args, **opts):
1385 'Call rebase after pull if the latter has been invoked with --rebase'
1383 'Call rebase after pull if the latter has been invoked with --rebase'
1386 ret = None
1384 ret = None
1387 if opts.get('rebase'):
1385 if opts.get('rebase'):
1388 if ui.configbool('commands', 'rebase.requiredest'):
1386 if ui.configbool('commands', 'rebase.requiredest'):
1389 msg = _('rebase destination required by configuration')
1387 msg = _('rebase destination required by configuration')
1390 hint = _('use hg pull followed by hg rebase -d DEST')
1388 hint = _('use hg pull followed by hg rebase -d DEST')
1391 raise error.Abort(msg, hint=hint)
1389 raise error.Abort(msg, hint=hint)
1392
1390
1393 with repo.wlock(), repo.lock():
1391 with repo.wlock(), repo.lock():
1394 if opts.get('update'):
1392 if opts.get('update'):
1395 del opts['update']
1393 del opts['update']
1396 ui.debug('--update and --rebase are not compatible, ignoring '
1394 ui.debug('--update and --rebase are not compatible, ignoring '
1397 'the update flag\n')
1395 'the update flag\n')
1398
1396
1399 cmdutil.checkunfinished(repo)
1397 cmdutil.checkunfinished(repo)
1400 cmdutil.bailifchanged(repo, hint=_('cannot pull with rebase: '
1398 cmdutil.bailifchanged(repo, hint=_('cannot pull with rebase: '
1401 'please commit or shelve your changes first'))
1399 'please commit or shelve your changes first'))
1402
1400
1403 revsprepull = len(repo)
1401 revsprepull = len(repo)
1404 origpostincoming = commands.postincoming
1402 origpostincoming = commands.postincoming
1405 def _dummy(*args, **kwargs):
1403 def _dummy(*args, **kwargs):
1406 pass
1404 pass
1407 commands.postincoming = _dummy
1405 commands.postincoming = _dummy
1408 try:
1406 try:
1409 ret = orig(ui, repo, *args, **opts)
1407 ret = orig(ui, repo, *args, **opts)
1410 finally:
1408 finally:
1411 commands.postincoming = origpostincoming
1409 commands.postincoming = origpostincoming
1412 revspostpull = len(repo)
1410 revspostpull = len(repo)
1413 if revspostpull > revsprepull:
1411 if revspostpull > revsprepull:
1414 # --rev option from pull conflict with rebase own --rev
1412 # --rev option from pull conflict with rebase own --rev
1415 # dropping it
1413 # dropping it
1416 if 'rev' in opts:
1414 if 'rev' in opts:
1417 del opts['rev']
1415 del opts['rev']
1418 # positional argument from pull conflicts with rebase's own
1416 # positional argument from pull conflicts with rebase's own
1419 # --source.
1417 # --source.
1420 if 'source' in opts:
1418 if 'source' in opts:
1421 del opts['source']
1419 del opts['source']
1422 # revsprepull is the len of the repo, not revnum of tip.
1420 # revsprepull is the len of the repo, not revnum of tip.
1423 destspace = list(repo.changelog.revs(start=revsprepull))
1421 destspace = list(repo.changelog.revs(start=revsprepull))
1424 opts['_destspace'] = destspace
1422 opts['_destspace'] = destspace
1425 try:
1423 try:
1426 rebase(ui, repo, **opts)
1424 rebase(ui, repo, **opts)
1427 except error.NoMergeDestAbort:
1425 except error.NoMergeDestAbort:
1428 # we can maybe update instead
1426 # we can maybe update instead
1429 rev, _a, _b = destutil.destupdate(repo)
1427 rev, _a, _b = destutil.destupdate(repo)
1430 if rev == repo['.'].rev():
1428 if rev == repo['.'].rev():
1431 ui.status(_('nothing to rebase\n'))
1429 ui.status(_('nothing to rebase\n'))
1432 else:
1430 else:
1433 ui.status(_('nothing to rebase - updating instead\n'))
1431 ui.status(_('nothing to rebase - updating instead\n'))
1434 # not passing argument to get the bare update behavior
1432 # not passing argument to get the bare update behavior
1435 # with warning and trumpets
1433 # with warning and trumpets
1436 commands.update(ui, repo)
1434 commands.update(ui, repo)
1437 else:
1435 else:
1438 if opts.get('tool'):
1436 if opts.get('tool'):
1439 raise error.Abort(_('--tool can only be used with --rebase'))
1437 raise error.Abort(_('--tool can only be used with --rebase'))
1440 ret = orig(ui, repo, *args, **opts)
1438 ret = orig(ui, repo, *args, **opts)
1441
1439
1442 return ret
1440 return ret
1443
1441
1444 def _setrebasesetvisibility(repo, revs):
1442 def _setrebasesetvisibility(repo, revs):
1445 """store the currently rebased set on the repo object
1443 """store the currently rebased set on the repo object
1446
1444
1447 This is used by another function to prevent rebased revision to because
1445 This is used by another function to prevent rebased revision to because
1448 hidden (see issue4504)"""
1446 hidden (see issue4504)"""
1449 repo = repo.unfiltered()
1447 repo = repo.unfiltered()
1450 repo._rebaseset = revs
1448 repo._rebaseset = revs
1451 # invalidate cache if visibility changes
1449 # invalidate cache if visibility changes
1452 hiddens = repo.filteredrevcache.get('visible', set())
1450 hiddens = repo.filteredrevcache.get('visible', set())
1453 if revs & hiddens:
1451 if revs & hiddens:
1454 repo.invalidatevolatilesets()
1452 repo.invalidatevolatilesets()
1455
1453
1456 def _clearrebasesetvisibiliy(repo):
1454 def _clearrebasesetvisibiliy(repo):
1457 """remove rebaseset data from the repo"""
1455 """remove rebaseset data from the repo"""
1458 repo = repo.unfiltered()
1456 repo = repo.unfiltered()
1459 if '_rebaseset' in vars(repo):
1457 if '_rebaseset' in vars(repo):
1460 del repo._rebaseset
1458 del repo._rebaseset
1461
1459
1462 def _rebasedvisible(orig, repo):
1460 def _rebasedvisible(orig, repo):
1463 """ensure rebased revs stay visible (see issue4504)"""
1461 """ensure rebased revs stay visible (see issue4504)"""
1464 blockers = orig(repo)
1462 blockers = orig(repo)
1465 blockers.update(getattr(repo, '_rebaseset', ()))
1463 blockers.update(getattr(repo, '_rebaseset', ()))
1466 return blockers
1464 return blockers
1467
1465
1468 def _filterobsoleterevs(repo, revs):
1466 def _filterobsoleterevs(repo, revs):
1469 """returns a set of the obsolete revisions in revs"""
1467 """returns a set of the obsolete revisions in revs"""
1470 return set(r for r in revs if repo[r].obsolete())
1468 return set(r for r in revs if repo[r].obsolete())
1471
1469
1472 def _computeobsoletenotrebased(repo, rebaseobsrevs, dest):
1470 def _computeobsoletenotrebased(repo, rebaseobsrevs, dest):
1473 """return a mapping obsolete => successor for all obsolete nodes to be
1471 """return a mapping obsolete => successor for all obsolete nodes to be
1474 rebased that have a successors in the destination
1472 rebased that have a successors in the destination
1475
1473
1476 obsolete => None entries in the mapping indicate nodes with no successor"""
1474 obsolete => None entries in the mapping indicate nodes with no successor"""
1477 obsoletenotrebased = {}
1475 obsoletenotrebased = {}
1478
1476
1479 # Build a mapping successor => obsolete nodes for the obsolete
1477 # Build a mapping successor => obsolete nodes for the obsolete
1480 # nodes to be rebased
1478 # nodes to be rebased
1481 allsuccessors = {}
1479 allsuccessors = {}
1482 cl = repo.changelog
1480 cl = repo.changelog
1483 for r in rebaseobsrevs:
1481 for r in rebaseobsrevs:
1484 node = cl.node(r)
1482 node = cl.node(r)
1485 for s in obsutil.allsuccessors(repo.obsstore, [node]):
1483 for s in obsutil.allsuccessors(repo.obsstore, [node]):
1486 try:
1484 try:
1487 allsuccessors[cl.rev(s)] = cl.rev(node)
1485 allsuccessors[cl.rev(s)] = cl.rev(node)
1488 except LookupError:
1486 except LookupError:
1489 pass
1487 pass
1490
1488
1491 if allsuccessors:
1489 if allsuccessors:
1492 # Look for successors of obsolete nodes to be rebased among
1490 # Look for successors of obsolete nodes to be rebased among
1493 # the ancestors of dest
1491 # the ancestors of dest
1494 ancs = cl.ancestors([dest],
1492 ancs = cl.ancestors([dest],
1495 stoprev=min(allsuccessors),
1493 stoprev=min(allsuccessors),
1496 inclusive=True)
1494 inclusive=True)
1497 for s in allsuccessors:
1495 for s in allsuccessors:
1498 if s in ancs:
1496 if s in ancs:
1499 obsoletenotrebased[allsuccessors[s]] = s
1497 obsoletenotrebased[allsuccessors[s]] = s
1500 elif (s == allsuccessors[s] and
1498 elif (s == allsuccessors[s] and
1501 allsuccessors.values().count(s) == 1):
1499 allsuccessors.values().count(s) == 1):
1502 # plain prune
1500 # plain prune
1503 obsoletenotrebased[s] = None
1501 obsoletenotrebased[s] = None
1504
1502
1505 return obsoletenotrebased
1503 return obsoletenotrebased
1506
1504
1507 def summaryhook(ui, repo):
1505 def summaryhook(ui, repo):
1508 if not repo.vfs.exists('rebasestate'):
1506 if not repo.vfs.exists('rebasestate'):
1509 return
1507 return
1510 try:
1508 try:
1511 rbsrt = rebaseruntime(repo, ui, {})
1509 rbsrt = rebaseruntime(repo, ui, {})
1512 rbsrt.restorestatus()
1510 rbsrt.restorestatus()
1513 state = rbsrt.state
1511 state = rbsrt.state
1514 except error.RepoLookupError:
1512 except error.RepoLookupError:
1515 # i18n: column positioning for "hg summary"
1513 # i18n: column positioning for "hg summary"
1516 msg = _('rebase: (use "hg rebase --abort" to clear broken state)\n')
1514 msg = _('rebase: (use "hg rebase --abort" to clear broken state)\n')
1517 ui.write(msg)
1515 ui.write(msg)
1518 return
1516 return
1519 numrebased = len([i for i in state.itervalues() if i >= 0])
1517 numrebased = len([i for i in state.itervalues() if i >= 0])
1520 # i18n: column positioning for "hg summary"
1518 # i18n: column positioning for "hg summary"
1521 ui.write(_('rebase: %s, %s (rebase --continue)\n') %
1519 ui.write(_('rebase: %s, %s (rebase --continue)\n') %
1522 (ui.label(_('%d rebased'), 'rebase.rebased') % numrebased,
1520 (ui.label(_('%d rebased'), 'rebase.rebased') % numrebased,
1523 ui.label(_('%d remaining'), 'rebase.remaining') %
1521 ui.label(_('%d remaining'), 'rebase.remaining') %
1524 (len(state) - numrebased)))
1522 (len(state) - numrebased)))
1525
1523
1526 def uisetup(ui):
1524 def uisetup(ui):
1527 #Replace pull with a decorator to provide --rebase option
1525 #Replace pull with a decorator to provide --rebase option
1528 entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
1526 entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
1529 entry[1].append(('', 'rebase', None,
1527 entry[1].append(('', 'rebase', None,
1530 _("rebase working directory to branch head")))
1528 _("rebase working directory to branch head")))
1531 entry[1].append(('t', 'tool', '',
1529 entry[1].append(('t', 'tool', '',
1532 _("specify merge tool for rebase")))
1530 _("specify merge tool for rebase")))
1533 cmdutil.summaryhooks.add('rebase', summaryhook)
1531 cmdutil.summaryhooks.add('rebase', summaryhook)
1534 cmdutil.unfinishedstates.append(
1532 cmdutil.unfinishedstates.append(
1535 ['rebasestate', False, False, _('rebase in progress'),
1533 ['rebasestate', False, False, _('rebase in progress'),
1536 _("use 'hg rebase --continue' or 'hg rebase --abort'")])
1534 _("use 'hg rebase --continue' or 'hg rebase --abort'")])
1537 cmdutil.afterresolvedstates.append(
1535 cmdutil.afterresolvedstates.append(
1538 ['rebasestate', _('hg rebase --continue')])
1536 ['rebasestate', _('hg rebase --continue')])
1539 # ensure rebased rev are not hidden
1537 # ensure rebased rev are not hidden
1540 extensions.wrapfunction(repoview, 'pinnedrevs', _rebasedvisible)
1538 extensions.wrapfunction(repoview, 'pinnedrevs', _rebasedvisible)
@@ -1,1110 +1,1124 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 hex,
20 hex,
21 nullid,
21 nullid,
22 wdirid,
22 wdirid,
23 wdirrev,
23 wdirrev,
24 )
24 )
25
25
26 from . import (
26 from . import (
27 encoding,
27 encoding,
28 error,
28 error,
29 match as matchmod,
29 match as matchmod,
30 obsolete,
30 obsolete,
31 obsutil,
31 obsutil,
32 pathutil,
32 pathutil,
33 phases,
33 phases,
34 pycompat,
34 pycompat,
35 revsetlang,
35 revsetlang,
36 similar,
36 similar,
37 util,
37 util,
38 )
38 )
39
39
40 if pycompat.osname == 'nt':
40 if pycompat.osname == 'nt':
41 from . import scmwindows as scmplatform
41 from . import scmwindows as scmplatform
42 else:
42 else:
43 from . import scmposix as scmplatform
43 from . import scmposix as scmplatform
44
44
45 termsize = scmplatform.termsize
45 termsize = scmplatform.termsize
46
46
47 class status(tuple):
47 class status(tuple):
48 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
48 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
49 and 'ignored' properties are only relevant to the working copy.
49 and 'ignored' properties are only relevant to the working copy.
50 '''
50 '''
51
51
52 __slots__ = ()
52 __slots__ = ()
53
53
54 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
54 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
55 clean):
55 clean):
56 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
56 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
57 ignored, clean))
57 ignored, clean))
58
58
59 @property
59 @property
60 def modified(self):
60 def modified(self):
61 '''files that have been modified'''
61 '''files that have been modified'''
62 return self[0]
62 return self[0]
63
63
64 @property
64 @property
65 def added(self):
65 def added(self):
66 '''files that have been added'''
66 '''files that have been added'''
67 return self[1]
67 return self[1]
68
68
69 @property
69 @property
70 def removed(self):
70 def removed(self):
71 '''files that have been removed'''
71 '''files that have been removed'''
72 return self[2]
72 return self[2]
73
73
74 @property
74 @property
75 def deleted(self):
75 def deleted(self):
76 '''files that are in the dirstate, but have been deleted from the
76 '''files that are in the dirstate, but have been deleted from the
77 working copy (aka "missing")
77 working copy (aka "missing")
78 '''
78 '''
79 return self[3]
79 return self[3]
80
80
81 @property
81 @property
82 def unknown(self):
82 def unknown(self):
83 '''files not in the dirstate that are not ignored'''
83 '''files not in the dirstate that are not ignored'''
84 return self[4]
84 return self[4]
85
85
86 @property
86 @property
87 def ignored(self):
87 def ignored(self):
88 '''files not in the dirstate that are ignored (by _dirignore())'''
88 '''files not in the dirstate that are ignored (by _dirignore())'''
89 return self[5]
89 return self[5]
90
90
91 @property
91 @property
92 def clean(self):
92 def clean(self):
93 '''files that have not been modified'''
93 '''files that have not been modified'''
94 return self[6]
94 return self[6]
95
95
96 def __repr__(self, *args, **kwargs):
96 def __repr__(self, *args, **kwargs):
97 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
97 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
98 'unknown=%r, ignored=%r, clean=%r>') % self)
98 'unknown=%r, ignored=%r, clean=%r>') % self)
99
99
100 def itersubrepos(ctx1, ctx2):
100 def itersubrepos(ctx1, ctx2):
101 """find subrepos in ctx1 or ctx2"""
101 """find subrepos in ctx1 or ctx2"""
102 # Create a (subpath, ctx) mapping where we prefer subpaths from
102 # Create a (subpath, ctx) mapping where we prefer subpaths from
103 # ctx1. The subpaths from ctx2 are important when the .hgsub file
103 # ctx1. The subpaths from ctx2 are important when the .hgsub file
104 # has been modified (in ctx2) but not yet committed (in ctx1).
104 # has been modified (in ctx2) but not yet committed (in ctx1).
105 subpaths = dict.fromkeys(ctx2.substate, ctx2)
105 subpaths = dict.fromkeys(ctx2.substate, ctx2)
106 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
106 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
107
107
108 missing = set()
108 missing = set()
109
109
110 for subpath in ctx2.substate:
110 for subpath in ctx2.substate:
111 if subpath not in ctx1.substate:
111 if subpath not in ctx1.substate:
112 del subpaths[subpath]
112 del subpaths[subpath]
113 missing.add(subpath)
113 missing.add(subpath)
114
114
115 for subpath, ctx in sorted(subpaths.iteritems()):
115 for subpath, ctx in sorted(subpaths.iteritems()):
116 yield subpath, ctx.sub(subpath)
116 yield subpath, ctx.sub(subpath)
117
117
118 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
118 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
119 # status and diff will have an accurate result when it does
119 # status and diff will have an accurate result when it does
120 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
120 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
121 # against itself.
121 # against itself.
122 for subpath in missing:
122 for subpath in missing:
123 yield subpath, ctx2.nullsub(subpath, ctx1)
123 yield subpath, ctx2.nullsub(subpath, ctx1)
124
124
125 def nochangesfound(ui, repo, excluded=None):
125 def nochangesfound(ui, repo, excluded=None):
126 '''Report no changes for push/pull, excluded is None or a list of
126 '''Report no changes for push/pull, excluded is None or a list of
127 nodes excluded from the push/pull.
127 nodes excluded from the push/pull.
128 '''
128 '''
129 secretlist = []
129 secretlist = []
130 if excluded:
130 if excluded:
131 for n in excluded:
131 for n in excluded:
132 ctx = repo[n]
132 ctx = repo[n]
133 if ctx.phase() >= phases.secret and not ctx.extinct():
133 if ctx.phase() >= phases.secret and not ctx.extinct():
134 secretlist.append(n)
134 secretlist.append(n)
135
135
136 if secretlist:
136 if secretlist:
137 ui.status(_("no changes found (ignored %d secret changesets)\n")
137 ui.status(_("no changes found (ignored %d secret changesets)\n")
138 % len(secretlist))
138 % len(secretlist))
139 else:
139 else:
140 ui.status(_("no changes found\n"))
140 ui.status(_("no changes found\n"))
141
141
142 def callcatch(ui, func):
142 def callcatch(ui, func):
143 """call func() with global exception handling
143 """call func() with global exception handling
144
144
145 return func() if no exception happens. otherwise do some error handling
145 return func() if no exception happens. otherwise do some error handling
146 and return an exit code accordingly. does not handle all exceptions.
146 and return an exit code accordingly. does not handle all exceptions.
147 """
147 """
148 try:
148 try:
149 try:
149 try:
150 return func()
150 return func()
151 except: # re-raises
151 except: # re-raises
152 ui.traceback()
152 ui.traceback()
153 raise
153 raise
154 # Global exception handling, alphabetically
154 # Global exception handling, alphabetically
155 # Mercurial-specific first, followed by built-in and library exceptions
155 # Mercurial-specific first, followed by built-in and library exceptions
156 except error.LockHeld as inst:
156 except error.LockHeld as inst:
157 if inst.errno == errno.ETIMEDOUT:
157 if inst.errno == errno.ETIMEDOUT:
158 reason = _('timed out waiting for lock held by %r') % inst.locker
158 reason = _('timed out waiting for lock held by %r') % inst.locker
159 else:
159 else:
160 reason = _('lock held by %r') % inst.locker
160 reason = _('lock held by %r') % inst.locker
161 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
161 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
162 if not inst.locker:
162 if not inst.locker:
163 ui.warn(_("(lock might be very busy)\n"))
163 ui.warn(_("(lock might be very busy)\n"))
164 except error.LockUnavailable as inst:
164 except error.LockUnavailable as inst:
165 ui.warn(_("abort: could not lock %s: %s\n") %
165 ui.warn(_("abort: could not lock %s: %s\n") %
166 (inst.desc or inst.filename, inst.strerror))
166 (inst.desc or inst.filename, inst.strerror))
167 except error.OutOfBandError as inst:
167 except error.OutOfBandError as inst:
168 if inst.args:
168 if inst.args:
169 msg = _("abort: remote error:\n")
169 msg = _("abort: remote error:\n")
170 else:
170 else:
171 msg = _("abort: remote error\n")
171 msg = _("abort: remote error\n")
172 ui.warn(msg)
172 ui.warn(msg)
173 if inst.args:
173 if inst.args:
174 ui.warn(''.join(inst.args))
174 ui.warn(''.join(inst.args))
175 if inst.hint:
175 if inst.hint:
176 ui.warn('(%s)\n' % inst.hint)
176 ui.warn('(%s)\n' % inst.hint)
177 except error.RepoError as inst:
177 except error.RepoError as inst:
178 ui.warn(_("abort: %s!\n") % inst)
178 ui.warn(_("abort: %s!\n") % inst)
179 if inst.hint:
179 if inst.hint:
180 ui.warn(_("(%s)\n") % inst.hint)
180 ui.warn(_("(%s)\n") % inst.hint)
181 except error.ResponseError as inst:
181 except error.ResponseError as inst:
182 ui.warn(_("abort: %s") % inst.args[0])
182 ui.warn(_("abort: %s") % inst.args[0])
183 if not isinstance(inst.args[1], basestring):
183 if not isinstance(inst.args[1], basestring):
184 ui.warn(" %r\n" % (inst.args[1],))
184 ui.warn(" %r\n" % (inst.args[1],))
185 elif not inst.args[1]:
185 elif not inst.args[1]:
186 ui.warn(_(" empty string\n"))
186 ui.warn(_(" empty string\n"))
187 else:
187 else:
188 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
188 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
189 except error.CensoredNodeError as inst:
189 except error.CensoredNodeError as inst:
190 ui.warn(_("abort: file censored %s!\n") % inst)
190 ui.warn(_("abort: file censored %s!\n") % inst)
191 except error.RevlogError as inst:
191 except error.RevlogError as inst:
192 ui.warn(_("abort: %s!\n") % inst)
192 ui.warn(_("abort: %s!\n") % inst)
193 except error.InterventionRequired as inst:
193 except error.InterventionRequired as inst:
194 ui.warn("%s\n" % inst)
194 ui.warn("%s\n" % inst)
195 if inst.hint:
195 if inst.hint:
196 ui.warn(_("(%s)\n") % inst.hint)
196 ui.warn(_("(%s)\n") % inst.hint)
197 return 1
197 return 1
198 except error.WdirUnsupported:
198 except error.WdirUnsupported:
199 ui.warn(_("abort: working directory revision cannot be specified\n"))
199 ui.warn(_("abort: working directory revision cannot be specified\n"))
200 except error.Abort as inst:
200 except error.Abort as inst:
201 ui.warn(_("abort: %s\n") % inst)
201 ui.warn(_("abort: %s\n") % inst)
202 if inst.hint:
202 if inst.hint:
203 ui.warn(_("(%s)\n") % inst.hint)
203 ui.warn(_("(%s)\n") % inst.hint)
204 except ImportError as inst:
204 except ImportError as inst:
205 ui.warn(_("abort: %s!\n") % inst)
205 ui.warn(_("abort: %s!\n") % inst)
206 m = str(inst).split()[-1]
206 m = str(inst).split()[-1]
207 if m in "mpatch bdiff".split():
207 if m in "mpatch bdiff".split():
208 ui.warn(_("(did you forget to compile extensions?)\n"))
208 ui.warn(_("(did you forget to compile extensions?)\n"))
209 elif m in "zlib".split():
209 elif m in "zlib".split():
210 ui.warn(_("(is your Python install correct?)\n"))
210 ui.warn(_("(is your Python install correct?)\n"))
211 except IOError as inst:
211 except IOError as inst:
212 if util.safehasattr(inst, "code"):
212 if util.safehasattr(inst, "code"):
213 ui.warn(_("abort: %s\n") % inst)
213 ui.warn(_("abort: %s\n") % inst)
214 elif util.safehasattr(inst, "reason"):
214 elif util.safehasattr(inst, "reason"):
215 try: # usually it is in the form (errno, strerror)
215 try: # usually it is in the form (errno, strerror)
216 reason = inst.reason.args[1]
216 reason = inst.reason.args[1]
217 except (AttributeError, IndexError):
217 except (AttributeError, IndexError):
218 # it might be anything, for example a string
218 # it might be anything, for example a string
219 reason = inst.reason
219 reason = inst.reason
220 if isinstance(reason, unicode):
220 if isinstance(reason, unicode):
221 # SSLError of Python 2.7.9 contains a unicode
221 # SSLError of Python 2.7.9 contains a unicode
222 reason = encoding.unitolocal(reason)
222 reason = encoding.unitolocal(reason)
223 ui.warn(_("abort: error: %s\n") % reason)
223 ui.warn(_("abort: error: %s\n") % reason)
224 elif (util.safehasattr(inst, "args")
224 elif (util.safehasattr(inst, "args")
225 and inst.args and inst.args[0] == errno.EPIPE):
225 and inst.args and inst.args[0] == errno.EPIPE):
226 pass
226 pass
227 elif getattr(inst, "strerror", None):
227 elif getattr(inst, "strerror", None):
228 if getattr(inst, "filename", None):
228 if getattr(inst, "filename", None):
229 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
229 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
230 else:
230 else:
231 ui.warn(_("abort: %s\n") % inst.strerror)
231 ui.warn(_("abort: %s\n") % inst.strerror)
232 else:
232 else:
233 raise
233 raise
234 except OSError as inst:
234 except OSError as inst:
235 if getattr(inst, "filename", None) is not None:
235 if getattr(inst, "filename", None) is not None:
236 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
236 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
237 else:
237 else:
238 ui.warn(_("abort: %s\n") % inst.strerror)
238 ui.warn(_("abort: %s\n") % inst.strerror)
239 except MemoryError:
239 except MemoryError:
240 ui.warn(_("abort: out of memory\n"))
240 ui.warn(_("abort: out of memory\n"))
241 except SystemExit as inst:
241 except SystemExit as inst:
242 # Commands shouldn't sys.exit directly, but give a return code.
242 # Commands shouldn't sys.exit directly, but give a return code.
243 # Just in case catch this and and pass exit code to caller.
243 # Just in case catch this and and pass exit code to caller.
244 return inst.code
244 return inst.code
245 except socket.error as inst:
245 except socket.error as inst:
246 ui.warn(_("abort: %s\n") % inst.args[-1])
246 ui.warn(_("abort: %s\n") % inst.args[-1])
247
247
248 return -1
248 return -1
249
249
250 def checknewlabel(repo, lbl, kind):
250 def checknewlabel(repo, lbl, kind):
251 # Do not use the "kind" parameter in ui output.
251 # Do not use the "kind" parameter in ui output.
252 # It makes strings difficult to translate.
252 # It makes strings difficult to translate.
253 if lbl in ['tip', '.', 'null']:
253 if lbl in ['tip', '.', 'null']:
254 raise error.Abort(_("the name '%s' is reserved") % lbl)
254 raise error.Abort(_("the name '%s' is reserved") % lbl)
255 for c in (':', '\0', '\n', '\r'):
255 for c in (':', '\0', '\n', '\r'):
256 if c in lbl:
256 if c in lbl:
257 raise error.Abort(_("%r cannot be used in a name") % c)
257 raise error.Abort(_("%r cannot be used in a name") % c)
258 try:
258 try:
259 int(lbl)
259 int(lbl)
260 raise error.Abort(_("cannot use an integer as a name"))
260 raise error.Abort(_("cannot use an integer as a name"))
261 except ValueError:
261 except ValueError:
262 pass
262 pass
263
263
264 def checkfilename(f):
264 def checkfilename(f):
265 '''Check that the filename f is an acceptable filename for a tracked file'''
265 '''Check that the filename f is an acceptable filename for a tracked file'''
266 if '\r' in f or '\n' in f:
266 if '\r' in f or '\n' in f:
267 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
267 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
268
268
269 def checkportable(ui, f):
269 def checkportable(ui, f):
270 '''Check if filename f is portable and warn or abort depending on config'''
270 '''Check if filename f is portable and warn or abort depending on config'''
271 checkfilename(f)
271 checkfilename(f)
272 abort, warn = checkportabilityalert(ui)
272 abort, warn = checkportabilityalert(ui)
273 if abort or warn:
273 if abort or warn:
274 msg = util.checkwinfilename(f)
274 msg = util.checkwinfilename(f)
275 if msg:
275 if msg:
276 msg = "%s: %r" % (msg, f)
276 msg = "%s: %r" % (msg, f)
277 if abort:
277 if abort:
278 raise error.Abort(msg)
278 raise error.Abort(msg)
279 ui.warn(_("warning: %s\n") % msg)
279 ui.warn(_("warning: %s\n") % msg)
280
280
281 def checkportabilityalert(ui):
281 def checkportabilityalert(ui):
282 '''check if the user's config requests nothing, a warning, or abort for
282 '''check if the user's config requests nothing, a warning, or abort for
283 non-portable filenames'''
283 non-portable filenames'''
284 val = ui.config('ui', 'portablefilenames')
284 val = ui.config('ui', 'portablefilenames')
285 lval = val.lower()
285 lval = val.lower()
286 bval = util.parsebool(val)
286 bval = util.parsebool(val)
287 abort = pycompat.osname == 'nt' or lval == 'abort'
287 abort = pycompat.osname == 'nt' or lval == 'abort'
288 warn = bval or lval == 'warn'
288 warn = bval or lval == 'warn'
289 if bval is None and not (warn or abort or lval == 'ignore'):
289 if bval is None and not (warn or abort or lval == 'ignore'):
290 raise error.ConfigError(
290 raise error.ConfigError(
291 _("ui.portablefilenames value is invalid ('%s')") % val)
291 _("ui.portablefilenames value is invalid ('%s')") % val)
292 return abort, warn
292 return abort, warn
293
293
294 class casecollisionauditor(object):
294 class casecollisionauditor(object):
295 def __init__(self, ui, abort, dirstate):
295 def __init__(self, ui, abort, dirstate):
296 self._ui = ui
296 self._ui = ui
297 self._abort = abort
297 self._abort = abort
298 allfiles = '\0'.join(dirstate._map)
298 allfiles = '\0'.join(dirstate._map)
299 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
299 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
300 self._dirstate = dirstate
300 self._dirstate = dirstate
301 # The purpose of _newfiles is so that we don't complain about
301 # The purpose of _newfiles is so that we don't complain about
302 # case collisions if someone were to call this object with the
302 # case collisions if someone were to call this object with the
303 # same filename twice.
303 # same filename twice.
304 self._newfiles = set()
304 self._newfiles = set()
305
305
306 def __call__(self, f):
306 def __call__(self, f):
307 if f in self._newfiles:
307 if f in self._newfiles:
308 return
308 return
309 fl = encoding.lower(f)
309 fl = encoding.lower(f)
310 if fl in self._loweredfiles and f not in self._dirstate:
310 if fl in self._loweredfiles and f not in self._dirstate:
311 msg = _('possible case-folding collision for %s') % f
311 msg = _('possible case-folding collision for %s') % f
312 if self._abort:
312 if self._abort:
313 raise error.Abort(msg)
313 raise error.Abort(msg)
314 self._ui.warn(_("warning: %s\n") % msg)
314 self._ui.warn(_("warning: %s\n") % msg)
315 self._loweredfiles.add(fl)
315 self._loweredfiles.add(fl)
316 self._newfiles.add(f)
316 self._newfiles.add(f)
317
317
318 def filteredhash(repo, maxrev):
318 def filteredhash(repo, maxrev):
319 """build hash of filtered revisions in the current repoview.
319 """build hash of filtered revisions in the current repoview.
320
320
321 Multiple caches perform up-to-date validation by checking that the
321 Multiple caches perform up-to-date validation by checking that the
322 tiprev and tipnode stored in the cache file match the current repository.
322 tiprev and tipnode stored in the cache file match the current repository.
323 However, this is not sufficient for validating repoviews because the set
323 However, this is not sufficient for validating repoviews because the set
324 of revisions in the view may change without the repository tiprev and
324 of revisions in the view may change without the repository tiprev and
325 tipnode changing.
325 tipnode changing.
326
326
327 This function hashes all the revs filtered from the view and returns
327 This function hashes all the revs filtered from the view and returns
328 that SHA-1 digest.
328 that SHA-1 digest.
329 """
329 """
330 cl = repo.changelog
330 cl = repo.changelog
331 if not cl.filteredrevs:
331 if not cl.filteredrevs:
332 return None
332 return None
333 key = None
333 key = None
334 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
334 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
335 if revs:
335 if revs:
336 s = hashlib.sha1()
336 s = hashlib.sha1()
337 for rev in revs:
337 for rev in revs:
338 s.update('%d;' % rev)
338 s.update('%d;' % rev)
339 key = s.digest()
339 key = s.digest()
340 return key
340 return key
341
341
342 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
342 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
343 '''yield every hg repository under path, always recursively.
343 '''yield every hg repository under path, always recursively.
344 The recurse flag will only control recursion into repo working dirs'''
344 The recurse flag will only control recursion into repo working dirs'''
345 def errhandler(err):
345 def errhandler(err):
346 if err.filename == path:
346 if err.filename == path:
347 raise err
347 raise err
348 samestat = getattr(os.path, 'samestat', None)
348 samestat = getattr(os.path, 'samestat', None)
349 if followsym and samestat is not None:
349 if followsym and samestat is not None:
350 def adddir(dirlst, dirname):
350 def adddir(dirlst, dirname):
351 match = False
351 match = False
352 dirstat = os.stat(dirname)
352 dirstat = os.stat(dirname)
353 for lstdirstat in dirlst:
353 for lstdirstat in dirlst:
354 if samestat(dirstat, lstdirstat):
354 if samestat(dirstat, lstdirstat):
355 match = True
355 match = True
356 break
356 break
357 if not match:
357 if not match:
358 dirlst.append(dirstat)
358 dirlst.append(dirstat)
359 return not match
359 return not match
360 else:
360 else:
361 followsym = False
361 followsym = False
362
362
363 if (seen_dirs is None) and followsym:
363 if (seen_dirs is None) and followsym:
364 seen_dirs = []
364 seen_dirs = []
365 adddir(seen_dirs, path)
365 adddir(seen_dirs, path)
366 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
366 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
367 dirs.sort()
367 dirs.sort()
368 if '.hg' in dirs:
368 if '.hg' in dirs:
369 yield root # found a repository
369 yield root # found a repository
370 qroot = os.path.join(root, '.hg', 'patches')
370 qroot = os.path.join(root, '.hg', 'patches')
371 if os.path.isdir(os.path.join(qroot, '.hg')):
371 if os.path.isdir(os.path.join(qroot, '.hg')):
372 yield qroot # we have a patch queue repo here
372 yield qroot # we have a patch queue repo here
373 if recurse:
373 if recurse:
374 # avoid recursing inside the .hg directory
374 # avoid recursing inside the .hg directory
375 dirs.remove('.hg')
375 dirs.remove('.hg')
376 else:
376 else:
377 dirs[:] = [] # don't descend further
377 dirs[:] = [] # don't descend further
378 elif followsym:
378 elif followsym:
379 newdirs = []
379 newdirs = []
380 for d in dirs:
380 for d in dirs:
381 fname = os.path.join(root, d)
381 fname = os.path.join(root, d)
382 if adddir(seen_dirs, fname):
382 if adddir(seen_dirs, fname):
383 if os.path.islink(fname):
383 if os.path.islink(fname):
384 for hgname in walkrepos(fname, True, seen_dirs):
384 for hgname in walkrepos(fname, True, seen_dirs):
385 yield hgname
385 yield hgname
386 else:
386 else:
387 newdirs.append(d)
387 newdirs.append(d)
388 dirs[:] = newdirs
388 dirs[:] = newdirs
389
389
390 def binnode(ctx):
390 def binnode(ctx):
391 """Return binary node id for a given basectx"""
391 """Return binary node id for a given basectx"""
392 node = ctx.node()
392 node = ctx.node()
393 if node is None:
393 if node is None:
394 return wdirid
394 return wdirid
395 return node
395 return node
396
396
397 def intrev(ctx):
397 def intrev(ctx):
398 """Return integer for a given basectx that can be used in comparison or
398 """Return integer for a given basectx that can be used in comparison or
399 arithmetic operation"""
399 arithmetic operation"""
400 rev = ctx.rev()
400 rev = ctx.rev()
401 if rev is None:
401 if rev is None:
402 return wdirrev
402 return wdirrev
403 return rev
403 return rev
404
404
405 def revsingle(repo, revspec, default='.'):
405 def revsingle(repo, revspec, default='.'):
406 if not revspec and revspec != 0:
406 if not revspec and revspec != 0:
407 return repo[default]
407 return repo[default]
408
408
409 l = revrange(repo, [revspec])
409 l = revrange(repo, [revspec])
410 if not l:
410 if not l:
411 raise error.Abort(_('empty revision set'))
411 raise error.Abort(_('empty revision set'))
412 return repo[l.last()]
412 return repo[l.last()]
413
413
414 def _pairspec(revspec):
414 def _pairspec(revspec):
415 tree = revsetlang.parse(revspec)
415 tree = revsetlang.parse(revspec)
416 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
416 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
417
417
418 def revpair(repo, revs):
418 def revpair(repo, revs):
419 if not revs:
419 if not revs:
420 return repo.dirstate.p1(), None
420 return repo.dirstate.p1(), None
421
421
422 l = revrange(repo, revs)
422 l = revrange(repo, revs)
423
423
424 if not l:
424 if not l:
425 first = second = None
425 first = second = None
426 elif l.isascending():
426 elif l.isascending():
427 first = l.min()
427 first = l.min()
428 second = l.max()
428 second = l.max()
429 elif l.isdescending():
429 elif l.isdescending():
430 first = l.max()
430 first = l.max()
431 second = l.min()
431 second = l.min()
432 else:
432 else:
433 first = l.first()
433 first = l.first()
434 second = l.last()
434 second = l.last()
435
435
436 if first is None:
436 if first is None:
437 raise error.Abort(_('empty revision range'))
437 raise error.Abort(_('empty revision range'))
438 if (first == second and len(revs) >= 2
438 if (first == second and len(revs) >= 2
439 and not all(revrange(repo, [r]) for r in revs)):
439 and not all(revrange(repo, [r]) for r in revs)):
440 raise error.Abort(_('empty revision on one side of range'))
440 raise error.Abort(_('empty revision on one side of range'))
441
441
442 # if top-level is range expression, the result must always be a pair
442 # if top-level is range expression, the result must always be a pair
443 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
443 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
444 return repo.lookup(first), None
444 return repo.lookup(first), None
445
445
446 return repo.lookup(first), repo.lookup(second)
446 return repo.lookup(first), repo.lookup(second)
447
447
448 def revrange(repo, specs):
448 def revrange(repo, specs):
449 """Execute 1 to many revsets and return the union.
449 """Execute 1 to many revsets and return the union.
450
450
451 This is the preferred mechanism for executing revsets using user-specified
451 This is the preferred mechanism for executing revsets using user-specified
452 config options, such as revset aliases.
452 config options, such as revset aliases.
453
453
454 The revsets specified by ``specs`` will be executed via a chained ``OR``
454 The revsets specified by ``specs`` will be executed via a chained ``OR``
455 expression. If ``specs`` is empty, an empty result is returned.
455 expression. If ``specs`` is empty, an empty result is returned.
456
456
457 ``specs`` can contain integers, in which case they are assumed to be
457 ``specs`` can contain integers, in which case they are assumed to be
458 revision numbers.
458 revision numbers.
459
459
460 It is assumed the revsets are already formatted. If you have arguments
460 It is assumed the revsets are already formatted. If you have arguments
461 that need to be expanded in the revset, call ``revsetlang.formatspec()``
461 that need to be expanded in the revset, call ``revsetlang.formatspec()``
462 and pass the result as an element of ``specs``.
462 and pass the result as an element of ``specs``.
463
463
464 Specifying a single revset is allowed.
464 Specifying a single revset is allowed.
465
465
466 Returns a ``revset.abstractsmartset`` which is a list-like interface over
466 Returns a ``revset.abstractsmartset`` which is a list-like interface over
467 integer revisions.
467 integer revisions.
468 """
468 """
469 allspecs = []
469 allspecs = []
470 for spec in specs:
470 for spec in specs:
471 if isinstance(spec, int):
471 if isinstance(spec, int):
472 spec = revsetlang.formatspec('rev(%d)', spec)
472 spec = revsetlang.formatspec('rev(%d)', spec)
473 allspecs.append(spec)
473 allspecs.append(spec)
474 return repo.anyrevs(allspecs, user=True)
474 return repo.anyrevs(allspecs, user=True)
475
475
476 def meaningfulparents(repo, ctx):
476 def meaningfulparents(repo, ctx):
477 """Return list of meaningful (or all if debug) parentrevs for rev.
477 """Return list of meaningful (or all if debug) parentrevs for rev.
478
478
479 For merges (two non-nullrev revisions) both parents are meaningful.
479 For merges (two non-nullrev revisions) both parents are meaningful.
480 Otherwise the first parent revision is considered meaningful if it
480 Otherwise the first parent revision is considered meaningful if it
481 is not the preceding revision.
481 is not the preceding revision.
482 """
482 """
483 parents = ctx.parents()
483 parents = ctx.parents()
484 if len(parents) > 1:
484 if len(parents) > 1:
485 return parents
485 return parents
486 if repo.ui.debugflag:
486 if repo.ui.debugflag:
487 return [parents[0], repo['null']]
487 return [parents[0], repo['null']]
488 if parents[0].rev() >= intrev(ctx) - 1:
488 if parents[0].rev() >= intrev(ctx) - 1:
489 return []
489 return []
490 return parents
490 return parents
491
491
492 def expandpats(pats):
492 def expandpats(pats):
493 '''Expand bare globs when running on windows.
493 '''Expand bare globs when running on windows.
494 On posix we assume it already has already been done by sh.'''
494 On posix we assume it already has already been done by sh.'''
495 if not util.expandglobs:
495 if not util.expandglobs:
496 return list(pats)
496 return list(pats)
497 ret = []
497 ret = []
498 for kindpat in pats:
498 for kindpat in pats:
499 kind, pat = matchmod._patsplit(kindpat, None)
499 kind, pat = matchmod._patsplit(kindpat, None)
500 if kind is None:
500 if kind is None:
501 try:
501 try:
502 globbed = glob.glob(pat)
502 globbed = glob.glob(pat)
503 except re.error:
503 except re.error:
504 globbed = [pat]
504 globbed = [pat]
505 if globbed:
505 if globbed:
506 ret.extend(globbed)
506 ret.extend(globbed)
507 continue
507 continue
508 ret.append(kindpat)
508 ret.append(kindpat)
509 return ret
509 return ret
510
510
511 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
511 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
512 badfn=None):
512 badfn=None):
513 '''Return a matcher and the patterns that were used.
513 '''Return a matcher and the patterns that were used.
514 The matcher will warn about bad matches, unless an alternate badfn callback
514 The matcher will warn about bad matches, unless an alternate badfn callback
515 is provided.'''
515 is provided.'''
516 if pats == ("",):
516 if pats == ("",):
517 pats = []
517 pats = []
518 if opts is None:
518 if opts is None:
519 opts = {}
519 opts = {}
520 if not globbed and default == 'relpath':
520 if not globbed and default == 'relpath':
521 pats = expandpats(pats or [])
521 pats = expandpats(pats or [])
522
522
523 def bad(f, msg):
523 def bad(f, msg):
524 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
524 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
525
525
526 if badfn is None:
526 if badfn is None:
527 badfn = bad
527 badfn = bad
528
528
529 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
529 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
530 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
530 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
531
531
532 if m.always():
532 if m.always():
533 pats = []
533 pats = []
534 return m, pats
534 return m, pats
535
535
536 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
536 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
537 badfn=None):
537 badfn=None):
538 '''Return a matcher that will warn about bad matches.'''
538 '''Return a matcher that will warn about bad matches.'''
539 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
539 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
540
540
541 def matchall(repo):
541 def matchall(repo):
542 '''Return a matcher that will efficiently match everything.'''
542 '''Return a matcher that will efficiently match everything.'''
543 return matchmod.always(repo.root, repo.getcwd())
543 return matchmod.always(repo.root, repo.getcwd())
544
544
545 def matchfiles(repo, files, badfn=None):
545 def matchfiles(repo, files, badfn=None):
546 '''Return a matcher that will efficiently match exactly these files.'''
546 '''Return a matcher that will efficiently match exactly these files.'''
547 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
547 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
548
548
549 def origpath(ui, repo, filepath):
549 def origpath(ui, repo, filepath):
550 '''customize where .orig files are created
550 '''customize where .orig files are created
551
551
552 Fetch user defined path from config file: [ui] origbackuppath = <path>
552 Fetch user defined path from config file: [ui] origbackuppath = <path>
553 Fall back to default (filepath) if not specified
553 Fall back to default (filepath) if not specified
554 '''
554 '''
555 origbackuppath = ui.config('ui', 'origbackuppath')
555 origbackuppath = ui.config('ui', 'origbackuppath')
556 if origbackuppath is None:
556 if origbackuppath is None:
557 return filepath + ".orig"
557 return filepath + ".orig"
558
558
559 filepathfromroot = os.path.relpath(filepath, start=repo.root)
559 filepathfromroot = os.path.relpath(filepath, start=repo.root)
560 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
560 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
561
561
562 origbackupdir = repo.vfs.dirname(fullorigpath)
562 origbackupdir = repo.vfs.dirname(fullorigpath)
563 if not repo.vfs.exists(origbackupdir):
563 if not repo.vfs.exists(origbackupdir):
564 ui.note(_('creating directory: %s\n') % origbackupdir)
564 ui.note(_('creating directory: %s\n') % origbackupdir)
565 util.makedirs(origbackupdir)
565 util.makedirs(origbackupdir)
566
566
567 return fullorigpath + ".orig"
567 return fullorigpath + ".orig"
568
568
569 class _containsnode(object):
569 class _containsnode(object):
570 """proxy __contains__(node) to container.__contains__ which accepts revs"""
570 """proxy __contains__(node) to container.__contains__ which accepts revs"""
571
571
572 def __init__(self, repo, revcontainer):
572 def __init__(self, repo, revcontainer):
573 self._torev = repo.changelog.rev
573 self._torev = repo.changelog.rev
574 self._revcontains = revcontainer.__contains__
574 self._revcontains = revcontainer.__contains__
575
575
576 def __contains__(self, node):
576 def __contains__(self, node):
577 return self._revcontains(self._torev(node))
577 return self._revcontains(self._torev(node))
578
578
579 def cleanupnodes(repo, replacements, operation):
579 def cleanupnodes(repo, replacements, operation, moves=None):
580 """do common cleanups when old nodes are replaced by new nodes
580 """do common cleanups when old nodes are replaced by new nodes
581
581
582 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
582 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
583 (we might also want to move working directory parent in the future)
583 (we might also want to move working directory parent in the future)
584
584
585 By default, bookmark moves are calculated automatically from 'replacements',
586 but 'moves' can be used to override that. Also, 'moves' may include
587 additional bookmark moves that should not have associated obsmarkers.
588
585 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
589 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
586 have replacements. operation is a string, like "rebase".
590 have replacements. operation is a string, like "rebase".
587 """
591 """
592 if not replacements and not moves:
593 return
594
595 # translate mapping's other forms
588 if not util.safehasattr(replacements, 'items'):
596 if not util.safehasattr(replacements, 'items'):
589 replacements = {n: () for n in replacements}
597 replacements = {n: () for n in replacements}
590
598
591 # Calculate bookmark movements
599 # Calculate bookmark movements
600 if moves is None:
592 moves = {}
601 moves = {}
593 # Unfiltered repo is needed since nodes in replacements might be hidden.
602 # Unfiltered repo is needed since nodes in replacements might be hidden.
594 unfi = repo.unfiltered()
603 unfi = repo.unfiltered()
595 for oldnode, newnodes in replacements.items():
604 for oldnode, newnodes in replacements.items():
605 if oldnode in moves:
606 continue
596 if len(newnodes) > 1:
607 if len(newnodes) > 1:
597 # usually a split, take the one with biggest rev number
608 # usually a split, take the one with biggest rev number
598 newnode = next(unfi.set('max(%ln)', newnodes)).node()
609 newnode = next(unfi.set('max(%ln)', newnodes)).node()
599 elif len(newnodes) == 0:
610 elif len(newnodes) == 0:
600 # move bookmark backwards
611 # move bookmark backwards
601 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
612 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
602 list(replacements)))
613 list(replacements)))
603 if roots:
614 if roots:
604 newnode = roots[0].node()
615 newnode = roots[0].node()
605 else:
616 else:
606 newnode = nullid
617 newnode = nullid
607 else:
618 else:
608 newnode = newnodes[0]
619 newnode = newnodes[0]
609 moves[oldnode] = newnode
620 moves[oldnode] = newnode
610
621
611 with repo.transaction('cleanup') as tr:
622 with repo.transaction('cleanup') as tr:
612 # Move bookmarks
623 # Move bookmarks
613 bmarks = repo._bookmarks
624 bmarks = repo._bookmarks
614 bmarkchanges = []
625 bmarkchanges = []
615 allnewnodes = [n for ns in replacements.values() for n in ns]
626 allnewnodes = [n for ns in replacements.values() for n in ns]
616 for oldnode, newnode in moves.items():
627 for oldnode, newnode in moves.items():
617 oldbmarks = repo.nodebookmarks(oldnode)
628 oldbmarks = repo.nodebookmarks(oldnode)
618 if not oldbmarks:
629 if not oldbmarks:
619 continue
630 continue
620 from . import bookmarks # avoid import cycle
631 from . import bookmarks # avoid import cycle
621 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
632 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
622 (oldbmarks, hex(oldnode), hex(newnode)))
633 (oldbmarks, hex(oldnode), hex(newnode)))
623 # Delete divergent bookmarks being parents of related newnodes
634 # Delete divergent bookmarks being parents of related newnodes
624 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
635 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
625 allnewnodes, newnode, oldnode)
636 allnewnodes, newnode, oldnode)
626 deletenodes = _containsnode(repo, deleterevs)
637 deletenodes = _containsnode(repo, deleterevs)
627 for name in oldbmarks:
638 for name in oldbmarks:
628 bmarkchanges.append((name, newnode))
639 bmarkchanges.append((name, newnode))
629 for b in bookmarks.divergent2delete(repo, deletenodes, name):
640 for b in bookmarks.divergent2delete(repo, deletenodes, name):
630 bmarkchanges.append((b, None))
641 bmarkchanges.append((b, None))
631
642
632 if bmarkchanges:
643 if bmarkchanges:
633 bmarks.applychanges(repo, tr, bmarkchanges)
644 bmarks.applychanges(repo, tr, bmarkchanges)
634
645
635 # Obsolete or strip nodes
646 # Obsolete or strip nodes
636 if obsolete.isenabled(repo, obsolete.createmarkersopt):
647 if obsolete.isenabled(repo, obsolete.createmarkersopt):
637 # If a node is already obsoleted, and we want to obsolete it
648 # If a node is already obsoleted, and we want to obsolete it
638 # without a successor, skip that obssolete request since it's
649 # without a successor, skip that obssolete request since it's
639 # unnecessary. That's the "if s or not isobs(n)" check below.
650 # unnecessary. That's the "if s or not isobs(n)" check below.
640 # Also sort the node in topology order, that might be useful for
651 # Also sort the node in topology order, that might be useful for
641 # some obsstore logic.
652 # some obsstore logic.
642 # NOTE: the filtering and sorting might belong to createmarkers.
653 # NOTE: the filtering and sorting might belong to createmarkers.
643 isobs = unfi.obsstore.successors.__contains__
654 isobs = unfi.obsstore.successors.__contains__
644 torev = unfi.changelog.rev
655 torev = unfi.changelog.rev
645 sortfunc = lambda ns: torev(ns[0])
656 sortfunc = lambda ns: torev(ns[0])
646 rels = [(unfi[n], tuple(unfi[m] for m in s))
657 rels = [(unfi[n], tuple(unfi[m] for m in s))
647 for n, s in sorted(replacements.items(), key=sortfunc)
658 for n, s in sorted(replacements.items(), key=sortfunc)
648 if s or not isobs(n)]
659 if s or not isobs(n)]
660 if rels:
649 obsolete.createmarkers(repo, rels, operation=operation)
661 obsolete.createmarkers(repo, rels, operation=operation)
650 else:
662 else:
651 from . import repair # avoid import cycle
663 from . import repair # avoid import cycle
652 repair.delayedstrip(repo.ui, repo, list(replacements), operation)
664 tostrip = list(replacements)
665 if tostrip:
666 repair.delayedstrip(repo.ui, repo, tostrip, operation)
653
667
654 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
668 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
655 if opts is None:
669 if opts is None:
656 opts = {}
670 opts = {}
657 m = matcher
671 m = matcher
658 if dry_run is None:
672 if dry_run is None:
659 dry_run = opts.get('dry_run')
673 dry_run = opts.get('dry_run')
660 if similarity is None:
674 if similarity is None:
661 similarity = float(opts.get('similarity') or 0)
675 similarity = float(opts.get('similarity') or 0)
662
676
663 ret = 0
677 ret = 0
664 join = lambda f: os.path.join(prefix, f)
678 join = lambda f: os.path.join(prefix, f)
665
679
666 wctx = repo[None]
680 wctx = repo[None]
667 for subpath in sorted(wctx.substate):
681 for subpath in sorted(wctx.substate):
668 submatch = matchmod.subdirmatcher(subpath, m)
682 submatch = matchmod.subdirmatcher(subpath, m)
669 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
683 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
670 sub = wctx.sub(subpath)
684 sub = wctx.sub(subpath)
671 try:
685 try:
672 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
686 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
673 ret = 1
687 ret = 1
674 except error.LookupError:
688 except error.LookupError:
675 repo.ui.status(_("skipping missing subrepository: %s\n")
689 repo.ui.status(_("skipping missing subrepository: %s\n")
676 % join(subpath))
690 % join(subpath))
677
691
678 rejected = []
692 rejected = []
679 def badfn(f, msg):
693 def badfn(f, msg):
680 if f in m.files():
694 if f in m.files():
681 m.bad(f, msg)
695 m.bad(f, msg)
682 rejected.append(f)
696 rejected.append(f)
683
697
684 badmatch = matchmod.badmatch(m, badfn)
698 badmatch = matchmod.badmatch(m, badfn)
685 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
699 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
686 badmatch)
700 badmatch)
687
701
688 unknownset = set(unknown + forgotten)
702 unknownset = set(unknown + forgotten)
689 toprint = unknownset.copy()
703 toprint = unknownset.copy()
690 toprint.update(deleted)
704 toprint.update(deleted)
691 for abs in sorted(toprint):
705 for abs in sorted(toprint):
692 if repo.ui.verbose or not m.exact(abs):
706 if repo.ui.verbose or not m.exact(abs):
693 if abs in unknownset:
707 if abs in unknownset:
694 status = _('adding %s\n') % m.uipath(abs)
708 status = _('adding %s\n') % m.uipath(abs)
695 else:
709 else:
696 status = _('removing %s\n') % m.uipath(abs)
710 status = _('removing %s\n') % m.uipath(abs)
697 repo.ui.status(status)
711 repo.ui.status(status)
698
712
699 renames = _findrenames(repo, m, added + unknown, removed + deleted,
713 renames = _findrenames(repo, m, added + unknown, removed + deleted,
700 similarity)
714 similarity)
701
715
702 if not dry_run:
716 if not dry_run:
703 _markchanges(repo, unknown + forgotten, deleted, renames)
717 _markchanges(repo, unknown + forgotten, deleted, renames)
704
718
705 for f in rejected:
719 for f in rejected:
706 if f in m.files():
720 if f in m.files():
707 return 1
721 return 1
708 return ret
722 return ret
709
723
710 def marktouched(repo, files, similarity=0.0):
724 def marktouched(repo, files, similarity=0.0):
711 '''Assert that files have somehow been operated upon. files are relative to
725 '''Assert that files have somehow been operated upon. files are relative to
712 the repo root.'''
726 the repo root.'''
713 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
727 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
714 rejected = []
728 rejected = []
715
729
716 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
730 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
717
731
718 if repo.ui.verbose:
732 if repo.ui.verbose:
719 unknownset = set(unknown + forgotten)
733 unknownset = set(unknown + forgotten)
720 toprint = unknownset.copy()
734 toprint = unknownset.copy()
721 toprint.update(deleted)
735 toprint.update(deleted)
722 for abs in sorted(toprint):
736 for abs in sorted(toprint):
723 if abs in unknownset:
737 if abs in unknownset:
724 status = _('adding %s\n') % abs
738 status = _('adding %s\n') % abs
725 else:
739 else:
726 status = _('removing %s\n') % abs
740 status = _('removing %s\n') % abs
727 repo.ui.status(status)
741 repo.ui.status(status)
728
742
729 renames = _findrenames(repo, m, added + unknown, removed + deleted,
743 renames = _findrenames(repo, m, added + unknown, removed + deleted,
730 similarity)
744 similarity)
731
745
732 _markchanges(repo, unknown + forgotten, deleted, renames)
746 _markchanges(repo, unknown + forgotten, deleted, renames)
733
747
734 for f in rejected:
748 for f in rejected:
735 if f in m.files():
749 if f in m.files():
736 return 1
750 return 1
737 return 0
751 return 0
738
752
739 def _interestingfiles(repo, matcher):
753 def _interestingfiles(repo, matcher):
740 '''Walk dirstate with matcher, looking for files that addremove would care
754 '''Walk dirstate with matcher, looking for files that addremove would care
741 about.
755 about.
742
756
743 This is different from dirstate.status because it doesn't care about
757 This is different from dirstate.status because it doesn't care about
744 whether files are modified or clean.'''
758 whether files are modified or clean.'''
745 added, unknown, deleted, removed, forgotten = [], [], [], [], []
759 added, unknown, deleted, removed, forgotten = [], [], [], [], []
746 audit_path = pathutil.pathauditor(repo.root, cached=True)
760 audit_path = pathutil.pathauditor(repo.root, cached=True)
747
761
748 ctx = repo[None]
762 ctx = repo[None]
749 dirstate = repo.dirstate
763 dirstate = repo.dirstate
750 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
764 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
751 full=False)
765 full=False)
752 for abs, st in walkresults.iteritems():
766 for abs, st in walkresults.iteritems():
753 dstate = dirstate[abs]
767 dstate = dirstate[abs]
754 if dstate == '?' and audit_path.check(abs):
768 if dstate == '?' and audit_path.check(abs):
755 unknown.append(abs)
769 unknown.append(abs)
756 elif dstate != 'r' and not st:
770 elif dstate != 'r' and not st:
757 deleted.append(abs)
771 deleted.append(abs)
758 elif dstate == 'r' and st:
772 elif dstate == 'r' and st:
759 forgotten.append(abs)
773 forgotten.append(abs)
760 # for finding renames
774 # for finding renames
761 elif dstate == 'r' and not st:
775 elif dstate == 'r' and not st:
762 removed.append(abs)
776 removed.append(abs)
763 elif dstate == 'a':
777 elif dstate == 'a':
764 added.append(abs)
778 added.append(abs)
765
779
766 return added, unknown, deleted, removed, forgotten
780 return added, unknown, deleted, removed, forgotten
767
781
768 def _findrenames(repo, matcher, added, removed, similarity):
782 def _findrenames(repo, matcher, added, removed, similarity):
769 '''Find renames from removed files to added ones.'''
783 '''Find renames from removed files to added ones.'''
770 renames = {}
784 renames = {}
771 if similarity > 0:
785 if similarity > 0:
772 for old, new, score in similar.findrenames(repo, added, removed,
786 for old, new, score in similar.findrenames(repo, added, removed,
773 similarity):
787 similarity):
774 if (repo.ui.verbose or not matcher.exact(old)
788 if (repo.ui.verbose or not matcher.exact(old)
775 or not matcher.exact(new)):
789 or not matcher.exact(new)):
776 repo.ui.status(_('recording removal of %s as rename to %s '
790 repo.ui.status(_('recording removal of %s as rename to %s '
777 '(%d%% similar)\n') %
791 '(%d%% similar)\n') %
778 (matcher.rel(old), matcher.rel(new),
792 (matcher.rel(old), matcher.rel(new),
779 score * 100))
793 score * 100))
780 renames[new] = old
794 renames[new] = old
781 return renames
795 return renames
782
796
783 def _markchanges(repo, unknown, deleted, renames):
797 def _markchanges(repo, unknown, deleted, renames):
784 '''Marks the files in unknown as added, the files in deleted as removed,
798 '''Marks the files in unknown as added, the files in deleted as removed,
785 and the files in renames as copied.'''
799 and the files in renames as copied.'''
786 wctx = repo[None]
800 wctx = repo[None]
787 with repo.wlock():
801 with repo.wlock():
788 wctx.forget(deleted)
802 wctx.forget(deleted)
789 wctx.add(unknown)
803 wctx.add(unknown)
790 for new, old in renames.iteritems():
804 for new, old in renames.iteritems():
791 wctx.copy(old, new)
805 wctx.copy(old, new)
792
806
793 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
807 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
794 """Update the dirstate to reflect the intent of copying src to dst. For
808 """Update the dirstate to reflect the intent of copying src to dst. For
795 different reasons it might not end with dst being marked as copied from src.
809 different reasons it might not end with dst being marked as copied from src.
796 """
810 """
797 origsrc = repo.dirstate.copied(src) or src
811 origsrc = repo.dirstate.copied(src) or src
798 if dst == origsrc: # copying back a copy?
812 if dst == origsrc: # copying back a copy?
799 if repo.dirstate[dst] not in 'mn' and not dryrun:
813 if repo.dirstate[dst] not in 'mn' and not dryrun:
800 repo.dirstate.normallookup(dst)
814 repo.dirstate.normallookup(dst)
801 else:
815 else:
802 if repo.dirstate[origsrc] == 'a' and origsrc == src:
816 if repo.dirstate[origsrc] == 'a' and origsrc == src:
803 if not ui.quiet:
817 if not ui.quiet:
804 ui.warn(_("%s has not been committed yet, so no copy "
818 ui.warn(_("%s has not been committed yet, so no copy "
805 "data will be stored for %s.\n")
819 "data will be stored for %s.\n")
806 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
820 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
807 if repo.dirstate[dst] in '?r' and not dryrun:
821 if repo.dirstate[dst] in '?r' and not dryrun:
808 wctx.add([dst])
822 wctx.add([dst])
809 elif not dryrun:
823 elif not dryrun:
810 wctx.copy(origsrc, dst)
824 wctx.copy(origsrc, dst)
811
825
812 def readrequires(opener, supported):
826 def readrequires(opener, supported):
813 '''Reads and parses .hg/requires and checks if all entries found
827 '''Reads and parses .hg/requires and checks if all entries found
814 are in the list of supported features.'''
828 are in the list of supported features.'''
815 requirements = set(opener.read("requires").splitlines())
829 requirements = set(opener.read("requires").splitlines())
816 missings = []
830 missings = []
817 for r in requirements:
831 for r in requirements:
818 if r not in supported:
832 if r not in supported:
819 if not r or not r[0].isalnum():
833 if not r or not r[0].isalnum():
820 raise error.RequirementError(_(".hg/requires file is corrupt"))
834 raise error.RequirementError(_(".hg/requires file is corrupt"))
821 missings.append(r)
835 missings.append(r)
822 missings.sort()
836 missings.sort()
823 if missings:
837 if missings:
824 raise error.RequirementError(
838 raise error.RequirementError(
825 _("repository requires features unknown to this Mercurial: %s")
839 _("repository requires features unknown to this Mercurial: %s")
826 % " ".join(missings),
840 % " ".join(missings),
827 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
841 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
828 " for more information"))
842 " for more information"))
829 return requirements
843 return requirements
830
844
831 def writerequires(opener, requirements):
845 def writerequires(opener, requirements):
832 with opener('requires', 'w') as fp:
846 with opener('requires', 'w') as fp:
833 for r in sorted(requirements):
847 for r in sorted(requirements):
834 fp.write("%s\n" % r)
848 fp.write("%s\n" % r)
835
849
836 class filecachesubentry(object):
850 class filecachesubentry(object):
837 def __init__(self, path, stat):
851 def __init__(self, path, stat):
838 self.path = path
852 self.path = path
839 self.cachestat = None
853 self.cachestat = None
840 self._cacheable = None
854 self._cacheable = None
841
855
842 if stat:
856 if stat:
843 self.cachestat = filecachesubentry.stat(self.path)
857 self.cachestat = filecachesubentry.stat(self.path)
844
858
845 if self.cachestat:
859 if self.cachestat:
846 self._cacheable = self.cachestat.cacheable()
860 self._cacheable = self.cachestat.cacheable()
847 else:
861 else:
848 # None means we don't know yet
862 # None means we don't know yet
849 self._cacheable = None
863 self._cacheable = None
850
864
851 def refresh(self):
865 def refresh(self):
852 if self.cacheable():
866 if self.cacheable():
853 self.cachestat = filecachesubentry.stat(self.path)
867 self.cachestat = filecachesubentry.stat(self.path)
854
868
855 def cacheable(self):
869 def cacheable(self):
856 if self._cacheable is not None:
870 if self._cacheable is not None:
857 return self._cacheable
871 return self._cacheable
858
872
859 # we don't know yet, assume it is for now
873 # we don't know yet, assume it is for now
860 return True
874 return True
861
875
862 def changed(self):
876 def changed(self):
863 # no point in going further if we can't cache it
877 # no point in going further if we can't cache it
864 if not self.cacheable():
878 if not self.cacheable():
865 return True
879 return True
866
880
867 newstat = filecachesubentry.stat(self.path)
881 newstat = filecachesubentry.stat(self.path)
868
882
869 # we may not know if it's cacheable yet, check again now
883 # we may not know if it's cacheable yet, check again now
870 if newstat and self._cacheable is None:
884 if newstat and self._cacheable is None:
871 self._cacheable = newstat.cacheable()
885 self._cacheable = newstat.cacheable()
872
886
873 # check again
887 # check again
874 if not self._cacheable:
888 if not self._cacheable:
875 return True
889 return True
876
890
877 if self.cachestat != newstat:
891 if self.cachestat != newstat:
878 self.cachestat = newstat
892 self.cachestat = newstat
879 return True
893 return True
880 else:
894 else:
881 return False
895 return False
882
896
883 @staticmethod
897 @staticmethod
884 def stat(path):
898 def stat(path):
885 try:
899 try:
886 return util.cachestat(path)
900 return util.cachestat(path)
887 except OSError as e:
901 except OSError as e:
888 if e.errno != errno.ENOENT:
902 if e.errno != errno.ENOENT:
889 raise
903 raise
890
904
891 class filecacheentry(object):
905 class filecacheentry(object):
892 def __init__(self, paths, stat=True):
906 def __init__(self, paths, stat=True):
893 self._entries = []
907 self._entries = []
894 for path in paths:
908 for path in paths:
895 self._entries.append(filecachesubentry(path, stat))
909 self._entries.append(filecachesubentry(path, stat))
896
910
897 def changed(self):
911 def changed(self):
898 '''true if any entry has changed'''
912 '''true if any entry has changed'''
899 for entry in self._entries:
913 for entry in self._entries:
900 if entry.changed():
914 if entry.changed():
901 return True
915 return True
902 return False
916 return False
903
917
904 def refresh(self):
918 def refresh(self):
905 for entry in self._entries:
919 for entry in self._entries:
906 entry.refresh()
920 entry.refresh()
907
921
908 class filecache(object):
922 class filecache(object):
909 '''A property like decorator that tracks files under .hg/ for updates.
923 '''A property like decorator that tracks files under .hg/ for updates.
910
924
911 Records stat info when called in _filecache.
925 Records stat info when called in _filecache.
912
926
913 On subsequent calls, compares old stat info with new info, and recreates the
927 On subsequent calls, compares old stat info with new info, and recreates the
914 object when any of the files changes, updating the new stat info in
928 object when any of the files changes, updating the new stat info in
915 _filecache.
929 _filecache.
916
930
917 Mercurial either atomic renames or appends for files under .hg,
931 Mercurial either atomic renames or appends for files under .hg,
918 so to ensure the cache is reliable we need the filesystem to be able
932 so to ensure the cache is reliable we need the filesystem to be able
919 to tell us if a file has been replaced. If it can't, we fallback to
933 to tell us if a file has been replaced. If it can't, we fallback to
920 recreating the object on every call (essentially the same behavior as
934 recreating the object on every call (essentially the same behavior as
921 propertycache).
935 propertycache).
922
936
923 '''
937 '''
924 def __init__(self, *paths):
938 def __init__(self, *paths):
925 self.paths = paths
939 self.paths = paths
926
940
927 def join(self, obj, fname):
941 def join(self, obj, fname):
928 """Used to compute the runtime path of a cached file.
942 """Used to compute the runtime path of a cached file.
929
943
930 Users should subclass filecache and provide their own version of this
944 Users should subclass filecache and provide their own version of this
931 function to call the appropriate join function on 'obj' (an instance
945 function to call the appropriate join function on 'obj' (an instance
932 of the class that its member function was decorated).
946 of the class that its member function was decorated).
933 """
947 """
934 raise NotImplementedError
948 raise NotImplementedError
935
949
936 def __call__(self, func):
950 def __call__(self, func):
937 self.func = func
951 self.func = func
938 self.name = func.__name__.encode('ascii')
952 self.name = func.__name__.encode('ascii')
939 return self
953 return self
940
954
941 def __get__(self, obj, type=None):
955 def __get__(self, obj, type=None):
942 # if accessed on the class, return the descriptor itself.
956 # if accessed on the class, return the descriptor itself.
943 if obj is None:
957 if obj is None:
944 return self
958 return self
945 # do we need to check if the file changed?
959 # do we need to check if the file changed?
946 if self.name in obj.__dict__:
960 if self.name in obj.__dict__:
947 assert self.name in obj._filecache, self.name
961 assert self.name in obj._filecache, self.name
948 return obj.__dict__[self.name]
962 return obj.__dict__[self.name]
949
963
950 entry = obj._filecache.get(self.name)
964 entry = obj._filecache.get(self.name)
951
965
952 if entry:
966 if entry:
953 if entry.changed():
967 if entry.changed():
954 entry.obj = self.func(obj)
968 entry.obj = self.func(obj)
955 else:
969 else:
956 paths = [self.join(obj, path) for path in self.paths]
970 paths = [self.join(obj, path) for path in self.paths]
957
971
958 # We stat -before- creating the object so our cache doesn't lie if
972 # We stat -before- creating the object so our cache doesn't lie if
959 # a writer modified between the time we read and stat
973 # a writer modified between the time we read and stat
960 entry = filecacheentry(paths, True)
974 entry = filecacheentry(paths, True)
961 entry.obj = self.func(obj)
975 entry.obj = self.func(obj)
962
976
963 obj._filecache[self.name] = entry
977 obj._filecache[self.name] = entry
964
978
965 obj.__dict__[self.name] = entry.obj
979 obj.__dict__[self.name] = entry.obj
966 return entry.obj
980 return entry.obj
967
981
968 def __set__(self, obj, value):
982 def __set__(self, obj, value):
969 if self.name not in obj._filecache:
983 if self.name not in obj._filecache:
970 # we add an entry for the missing value because X in __dict__
984 # we add an entry for the missing value because X in __dict__
971 # implies X in _filecache
985 # implies X in _filecache
972 paths = [self.join(obj, path) for path in self.paths]
986 paths = [self.join(obj, path) for path in self.paths]
973 ce = filecacheentry(paths, False)
987 ce = filecacheentry(paths, False)
974 obj._filecache[self.name] = ce
988 obj._filecache[self.name] = ce
975 else:
989 else:
976 ce = obj._filecache[self.name]
990 ce = obj._filecache[self.name]
977
991
978 ce.obj = value # update cached copy
992 ce.obj = value # update cached copy
979 obj.__dict__[self.name] = value # update copy returned by obj.x
993 obj.__dict__[self.name] = value # update copy returned by obj.x
980
994
981 def __delete__(self, obj):
995 def __delete__(self, obj):
982 try:
996 try:
983 del obj.__dict__[self.name]
997 del obj.__dict__[self.name]
984 except KeyError:
998 except KeyError:
985 raise AttributeError(self.name)
999 raise AttributeError(self.name)
986
1000
987 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1001 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
988 if lock is None:
1002 if lock is None:
989 raise error.LockInheritanceContractViolation(
1003 raise error.LockInheritanceContractViolation(
990 'lock can only be inherited while held')
1004 'lock can only be inherited while held')
991 if environ is None:
1005 if environ is None:
992 environ = {}
1006 environ = {}
993 with lock.inherit() as locker:
1007 with lock.inherit() as locker:
994 environ[envvar] = locker
1008 environ[envvar] = locker
995 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1009 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
996
1010
997 def wlocksub(repo, cmd, *args, **kwargs):
1011 def wlocksub(repo, cmd, *args, **kwargs):
998 """run cmd as a subprocess that allows inheriting repo's wlock
1012 """run cmd as a subprocess that allows inheriting repo's wlock
999
1013
1000 This can only be called while the wlock is held. This takes all the
1014 This can only be called while the wlock is held. This takes all the
1001 arguments that ui.system does, and returns the exit code of the
1015 arguments that ui.system does, and returns the exit code of the
1002 subprocess."""
1016 subprocess."""
1003 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1017 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1004 **kwargs)
1018 **kwargs)
1005
1019
1006 def gdinitconfig(ui):
1020 def gdinitconfig(ui):
1007 """helper function to know if a repo should be created as general delta
1021 """helper function to know if a repo should be created as general delta
1008 """
1022 """
1009 # experimental config: format.generaldelta
1023 # experimental config: format.generaldelta
1010 return (ui.configbool('format', 'generaldelta')
1024 return (ui.configbool('format', 'generaldelta')
1011 or ui.configbool('format', 'usegeneraldelta'))
1025 or ui.configbool('format', 'usegeneraldelta'))
1012
1026
1013 def gddeltaconfig(ui):
1027 def gddeltaconfig(ui):
1014 """helper function to know if incoming delta should be optimised
1028 """helper function to know if incoming delta should be optimised
1015 """
1029 """
1016 # experimental config: format.generaldelta
1030 # experimental config: format.generaldelta
1017 return ui.configbool('format', 'generaldelta')
1031 return ui.configbool('format', 'generaldelta')
1018
1032
1019 class simplekeyvaluefile(object):
1033 class simplekeyvaluefile(object):
1020 """A simple file with key=value lines
1034 """A simple file with key=value lines
1021
1035
1022 Keys must be alphanumerics and start with a letter, values must not
1036 Keys must be alphanumerics and start with a letter, values must not
1023 contain '\n' characters"""
1037 contain '\n' characters"""
1024 firstlinekey = '__firstline'
1038 firstlinekey = '__firstline'
1025
1039
1026 def __init__(self, vfs, path, keys=None):
1040 def __init__(self, vfs, path, keys=None):
1027 self.vfs = vfs
1041 self.vfs = vfs
1028 self.path = path
1042 self.path = path
1029
1043
1030 def read(self, firstlinenonkeyval=False):
1044 def read(self, firstlinenonkeyval=False):
1031 """Read the contents of a simple key-value file
1045 """Read the contents of a simple key-value file
1032
1046
1033 'firstlinenonkeyval' indicates whether the first line of file should
1047 'firstlinenonkeyval' indicates whether the first line of file should
1034 be treated as a key-value pair or reuturned fully under the
1048 be treated as a key-value pair or reuturned fully under the
1035 __firstline key."""
1049 __firstline key."""
1036 lines = self.vfs.readlines(self.path)
1050 lines = self.vfs.readlines(self.path)
1037 d = {}
1051 d = {}
1038 if firstlinenonkeyval:
1052 if firstlinenonkeyval:
1039 if not lines:
1053 if not lines:
1040 e = _("empty simplekeyvalue file")
1054 e = _("empty simplekeyvalue file")
1041 raise error.CorruptedState(e)
1055 raise error.CorruptedState(e)
1042 # we don't want to include '\n' in the __firstline
1056 # we don't want to include '\n' in the __firstline
1043 d[self.firstlinekey] = lines[0][:-1]
1057 d[self.firstlinekey] = lines[0][:-1]
1044 del lines[0]
1058 del lines[0]
1045
1059
1046 try:
1060 try:
1047 # the 'if line.strip()' part prevents us from failing on empty
1061 # the 'if line.strip()' part prevents us from failing on empty
1048 # lines which only contain '\n' therefore are not skipped
1062 # lines which only contain '\n' therefore are not skipped
1049 # by 'if line'
1063 # by 'if line'
1050 updatedict = dict(line[:-1].split('=', 1) for line in lines
1064 updatedict = dict(line[:-1].split('=', 1) for line in lines
1051 if line.strip())
1065 if line.strip())
1052 if self.firstlinekey in updatedict:
1066 if self.firstlinekey in updatedict:
1053 e = _("%r can't be used as a key")
1067 e = _("%r can't be used as a key")
1054 raise error.CorruptedState(e % self.firstlinekey)
1068 raise error.CorruptedState(e % self.firstlinekey)
1055 d.update(updatedict)
1069 d.update(updatedict)
1056 except ValueError as e:
1070 except ValueError as e:
1057 raise error.CorruptedState(str(e))
1071 raise error.CorruptedState(str(e))
1058 return d
1072 return d
1059
1073
1060 def write(self, data, firstline=None):
1074 def write(self, data, firstline=None):
1061 """Write key=>value mapping to a file
1075 """Write key=>value mapping to a file
1062 data is a dict. Keys must be alphanumerical and start with a letter.
1076 data is a dict. Keys must be alphanumerical and start with a letter.
1063 Values must not contain newline characters.
1077 Values must not contain newline characters.
1064
1078
1065 If 'firstline' is not None, it is written to file before
1079 If 'firstline' is not None, it is written to file before
1066 everything else, as it is, not in a key=value form"""
1080 everything else, as it is, not in a key=value form"""
1067 lines = []
1081 lines = []
1068 if firstline is not None:
1082 if firstline is not None:
1069 lines.append('%s\n' % firstline)
1083 lines.append('%s\n' % firstline)
1070
1084
1071 for k, v in data.items():
1085 for k, v in data.items():
1072 if k == self.firstlinekey:
1086 if k == self.firstlinekey:
1073 e = "key name '%s' is reserved" % self.firstlinekey
1087 e = "key name '%s' is reserved" % self.firstlinekey
1074 raise error.ProgrammingError(e)
1088 raise error.ProgrammingError(e)
1075 if not k[0].isalpha():
1089 if not k[0].isalpha():
1076 e = "keys must start with a letter in a key-value file"
1090 e = "keys must start with a letter in a key-value file"
1077 raise error.ProgrammingError(e)
1091 raise error.ProgrammingError(e)
1078 if not k.isalnum():
1092 if not k.isalnum():
1079 e = "invalid key name in a simple key-value file"
1093 e = "invalid key name in a simple key-value file"
1080 raise error.ProgrammingError(e)
1094 raise error.ProgrammingError(e)
1081 if '\n' in v:
1095 if '\n' in v:
1082 e = "invalid value in a simple key-value file"
1096 e = "invalid value in a simple key-value file"
1083 raise error.ProgrammingError(e)
1097 raise error.ProgrammingError(e)
1084 lines.append("%s=%s\n" % (k, v))
1098 lines.append("%s=%s\n" % (k, v))
1085 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1099 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1086 fp.write(''.join(lines))
1100 fp.write(''.join(lines))
1087
1101
1088 _reportobsoletedsource = [
1102 _reportobsoletedsource = [
1089 'debugobsolete',
1103 'debugobsolete',
1090 'pull',
1104 'pull',
1091 'push',
1105 'push',
1092 'serve',
1106 'serve',
1093 'unbundle',
1107 'unbundle',
1094 ]
1108 ]
1095
1109
1096 def registersummarycallback(repo, otr, txnname=''):
1110 def registersummarycallback(repo, otr, txnname=''):
1097 """register a callback to issue a summary after the transaction is closed
1111 """register a callback to issue a summary after the transaction is closed
1098 """
1112 """
1099 for source in _reportobsoletedsource:
1113 for source in _reportobsoletedsource:
1100 if txnname.startswith(source):
1114 if txnname.startswith(source):
1101 reporef = weakref.ref(repo)
1115 reporef = weakref.ref(repo)
1102 def reportsummary(tr):
1116 def reportsummary(tr):
1103 """the actual callback reporting the summary"""
1117 """the actual callback reporting the summary"""
1104 repo = reporef()
1118 repo = reporef()
1105 obsoleted = obsutil.getobsoleted(repo, tr)
1119 obsoleted = obsutil.getobsoleted(repo, tr)
1106 if obsoleted:
1120 if obsoleted:
1107 repo.ui.status(_('obsoleted %i changesets\n')
1121 repo.ui.status(_('obsoleted %i changesets\n')
1108 % len(obsoleted))
1122 % len(obsoleted))
1109 otr.addpostclose('00-txnreport', reportsummary)
1123 otr.addpostclose('00-txnreport', reportsummary)
1110 break
1124 break
@@ -1,212 +1,245 b''
1 $ cat >> $HGRCPATH <<EOF
1 $ cat >> $HGRCPATH <<EOF
2 > [extensions]
2 > [extensions]
3 > rebase=
3 > rebase=
4 > drawdag=$TESTDIR/drawdag.py
4 >
5 >
5 > [phases]
6 > [phases]
6 > publish=False
7 > publish=False
7 >
8 >
8 > [alias]
9 > [alias]
9 > tglog = log -G --template "{rev}: '{desc}' bookmarks: {bookmarks}\n"
10 > tglog = log -G --template "{rev}: '{desc}' bookmarks: {bookmarks}\n"
10 > EOF
11 > EOF
11
12
12 Create a repo with several bookmarks
13 Create a repo with several bookmarks
13 $ hg init a
14 $ hg init a
14 $ cd a
15 $ cd a
15
16
16 $ echo a > a
17 $ echo a > a
17 $ hg ci -Am A
18 $ hg ci -Am A
18 adding a
19 adding a
19
20
20 $ echo b > b
21 $ echo b > b
21 $ hg ci -Am B
22 $ hg ci -Am B
22 adding b
23 adding b
23 $ hg book 'X'
24 $ hg book 'X'
24 $ hg book 'Y'
25 $ hg book 'Y'
25
26
26 $ echo c > c
27 $ echo c > c
27 $ hg ci -Am C
28 $ hg ci -Am C
28 adding c
29 adding c
29 $ hg book 'Z'
30 $ hg book 'Z'
30
31
31 $ hg up -q 0
32 $ hg up -q 0
32
33
33 $ echo d > d
34 $ echo d > d
34 $ hg ci -Am D
35 $ hg ci -Am D
35 adding d
36 adding d
36 created new head
37 created new head
37
38
38 $ hg book W
39 $ hg book W
39
40
40 $ hg tglog
41 $ hg tglog
41 @ 3: 'D' bookmarks: W
42 @ 3: 'D' bookmarks: W
42 |
43 |
43 | o 2: 'C' bookmarks: Y Z
44 | o 2: 'C' bookmarks: Y Z
44 | |
45 | |
45 | o 1: 'B' bookmarks: X
46 | o 1: 'B' bookmarks: X
46 |/
47 |/
47 o 0: 'A' bookmarks:
48 o 0: 'A' bookmarks:
48
49
49
50
50 Move only rebased bookmarks
51 Move only rebased bookmarks
51
52
52 $ cd ..
53 $ cd ..
53 $ hg clone -q a a1
54 $ hg clone -q a a1
54
55
55 $ cd a1
56 $ cd a1
56 $ hg up -q Z
57 $ hg up -q Z
57
58
58 Test deleting divergent bookmarks from dest (issue3685)
59 Test deleting divergent bookmarks from dest (issue3685)
59
60
60 $ hg book -r 3 Z@diverge
61 $ hg book -r 3 Z@diverge
61
62
62 ... and also test that bookmarks not on dest or not being moved aren't deleted
63 ... and also test that bookmarks not on dest or not being moved aren't deleted
63
64
64 $ hg book -r 3 X@diverge
65 $ hg book -r 3 X@diverge
65 $ hg book -r 0 Y@diverge
66 $ hg book -r 0 Y@diverge
66
67
67 $ hg tglog
68 $ hg tglog
68 o 3: 'D' bookmarks: W X@diverge Z@diverge
69 o 3: 'D' bookmarks: W X@diverge Z@diverge
69 |
70 |
70 | @ 2: 'C' bookmarks: Y Z
71 | @ 2: 'C' bookmarks: Y Z
71 | |
72 | |
72 | o 1: 'B' bookmarks: X
73 | o 1: 'B' bookmarks: X
73 |/
74 |/
74 o 0: 'A' bookmarks: Y@diverge
75 o 0: 'A' bookmarks: Y@diverge
75
76
76 $ hg rebase -s Y -d 3
77 $ hg rebase -s Y -d 3
77 rebasing 2:49cb3485fa0c "C" (Y Z)
78 rebasing 2:49cb3485fa0c "C" (Y Z)
78 saved backup bundle to $TESTTMP/a1/.hg/strip-backup/49cb3485fa0c-126f3e97-rebase.hg (glob)
79 saved backup bundle to $TESTTMP/a1/.hg/strip-backup/49cb3485fa0c-126f3e97-rebase.hg (glob)
79
80
80 $ hg tglog
81 $ hg tglog
81 @ 3: 'C' bookmarks: Y Z
82 @ 3: 'C' bookmarks: Y Z
82 |
83 |
83 o 2: 'D' bookmarks: W X@diverge
84 o 2: 'D' bookmarks: W X@diverge
84 |
85 |
85 | o 1: 'B' bookmarks: X
86 | o 1: 'B' bookmarks: X
86 |/
87 |/
87 o 0: 'A' bookmarks: Y@diverge
88 o 0: 'A' bookmarks: Y@diverge
88
89
89 Do not try to keep active but deleted divergent bookmark
90 Do not try to keep active but deleted divergent bookmark
90
91
91 $ cd ..
92 $ cd ..
92 $ hg clone -q a a4
93 $ hg clone -q a a4
93
94
94 $ cd a4
95 $ cd a4
95 $ hg up -q 2
96 $ hg up -q 2
96 $ hg book W@diverge
97 $ hg book W@diverge
97
98
98 $ hg rebase -s W -d .
99 $ hg rebase -s W -d .
99 rebasing 3:41acb9dca9eb "D" (tip W)
100 rebasing 3:41acb9dca9eb "D" (tip W)
100 saved backup bundle to $TESTTMP/a4/.hg/strip-backup/41acb9dca9eb-b35a6a63-rebase.hg (glob)
101 saved backup bundle to $TESTTMP/a4/.hg/strip-backup/41acb9dca9eb-b35a6a63-rebase.hg (glob)
101
102
102 $ hg bookmarks
103 $ hg bookmarks
103 W 3:0d3554f74897
104 W 3:0d3554f74897
104 X 1:6c81ed0049f8
105 X 1:6c81ed0049f8
105 Y 2:49cb3485fa0c
106 Y 2:49cb3485fa0c
106 Z 2:49cb3485fa0c
107 Z 2:49cb3485fa0c
107
108
108 Keep bookmarks to the correct rebased changeset
109 Keep bookmarks to the correct rebased changeset
109
110
110 $ cd ..
111 $ cd ..
111 $ hg clone -q a a2
112 $ hg clone -q a a2
112
113
113 $ cd a2
114 $ cd a2
114 $ hg up -q Z
115 $ hg up -q Z
115
116
116 $ hg rebase -s 1 -d 3
117 $ hg rebase -s 1 -d 3
117 rebasing 1:6c81ed0049f8 "B" (X)
118 rebasing 1:6c81ed0049f8 "B" (X)
118 rebasing 2:49cb3485fa0c "C" (Y Z)
119 rebasing 2:49cb3485fa0c "C" (Y Z)
119 saved backup bundle to $TESTTMP/a2/.hg/strip-backup/6c81ed0049f8-a687065f-rebase.hg (glob)
120 saved backup bundle to $TESTTMP/a2/.hg/strip-backup/6c81ed0049f8-a687065f-rebase.hg (glob)
120
121
121 $ hg tglog
122 $ hg tglog
122 @ 3: 'C' bookmarks: Y Z
123 @ 3: 'C' bookmarks: Y Z
123 |
124 |
124 o 2: 'B' bookmarks: X
125 o 2: 'B' bookmarks: X
125 |
126 |
126 o 1: 'D' bookmarks: W
127 o 1: 'D' bookmarks: W
127 |
128 |
128 o 0: 'A' bookmarks:
129 o 0: 'A' bookmarks:
129
130
130
131
131 Keep active bookmark on the correct changeset
132 Keep active bookmark on the correct changeset
132
133
133 $ cd ..
134 $ cd ..
134 $ hg clone -q a a3
135 $ hg clone -q a a3
135
136
136 $ cd a3
137 $ cd a3
137 $ hg up -q X
138 $ hg up -q X
138
139
139 $ hg rebase -d W
140 $ hg rebase -d W
140 rebasing 1:6c81ed0049f8 "B" (X)
141 rebasing 1:6c81ed0049f8 "B" (X)
141 rebasing 2:49cb3485fa0c "C" (Y Z)
142 rebasing 2:49cb3485fa0c "C" (Y Z)
142 saved backup bundle to $TESTTMP/a3/.hg/strip-backup/6c81ed0049f8-a687065f-rebase.hg (glob)
143 saved backup bundle to $TESTTMP/a3/.hg/strip-backup/6c81ed0049f8-a687065f-rebase.hg (glob)
143
144
144 $ hg tglog
145 $ hg tglog
145 o 3: 'C' bookmarks: Y Z
146 o 3: 'C' bookmarks: Y Z
146 |
147 |
147 @ 2: 'B' bookmarks: X
148 @ 2: 'B' bookmarks: X
148 |
149 |
149 o 1: 'D' bookmarks: W
150 o 1: 'D' bookmarks: W
150 |
151 |
151 o 0: 'A' bookmarks:
152 o 0: 'A' bookmarks:
152
153
153 $ hg bookmarks
154 $ hg bookmarks
154 W 1:41acb9dca9eb
155 W 1:41acb9dca9eb
155 * X 2:e926fccfa8ec
156 * X 2:e926fccfa8ec
156 Y 3:3d5fa227f4b5
157 Y 3:3d5fa227f4b5
157 Z 3:3d5fa227f4b5
158 Z 3:3d5fa227f4b5
158
159
159 rebase --continue with bookmarks present (issue3802)
160 rebase --continue with bookmarks present (issue3802)
160
161
161 $ hg up 2
162 $ hg up 2
162 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
163 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
163 (leaving bookmark X)
164 (leaving bookmark X)
164 $ echo 'C' > c
165 $ echo 'C' > c
165 $ hg add c
166 $ hg add c
166 $ hg ci -m 'other C'
167 $ hg ci -m 'other C'
167 created new head
168 created new head
168 $ hg up 3
169 $ hg up 3
169 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
170 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
170 $ hg rebase --dest 4
171 $ hg rebase --dest 4
171 rebasing 3:3d5fa227f4b5 "C" (Y Z)
172 rebasing 3:3d5fa227f4b5 "C" (Y Z)
172 merging c
173 merging c
173 warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
174 warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
174 unresolved conflicts (see hg resolve, then hg rebase --continue)
175 unresolved conflicts (see hg resolve, then hg rebase --continue)
175 [1]
176 [1]
176 $ echo 'c' > c
177 $ echo 'c' > c
177 $ hg resolve --mark c
178 $ hg resolve --mark c
178 (no more unresolved files)
179 (no more unresolved files)
179 continue: hg rebase --continue
180 continue: hg rebase --continue
180 $ hg rebase --continue
181 $ hg rebase --continue
181 rebasing 3:3d5fa227f4b5 "C" (Y Z)
182 rebasing 3:3d5fa227f4b5 "C" (Y Z)
182 saved backup bundle to $TESTTMP/a3/.hg/strip-backup/3d5fa227f4b5-c6ea2371-rebase.hg (glob)
183 saved backup bundle to $TESTTMP/a3/.hg/strip-backup/3d5fa227f4b5-c6ea2371-rebase.hg (glob)
183 $ hg tglog
184 $ hg tglog
184 @ 4: 'C' bookmarks: Y Z
185 @ 4: 'C' bookmarks: Y Z
185 |
186 |
186 o 3: 'other C' bookmarks:
187 o 3: 'other C' bookmarks:
187 |
188 |
188 o 2: 'B' bookmarks: X
189 o 2: 'B' bookmarks: X
189 |
190 |
190 o 1: 'D' bookmarks: W
191 o 1: 'D' bookmarks: W
191 |
192 |
192 o 0: 'A' bookmarks:
193 o 0: 'A' bookmarks:
193
194
194
195
195 ensure that bookmarks given the names of revset functions can be used
196 ensure that bookmarks given the names of revset functions can be used
196 as --rev arguments (issue3950)
197 as --rev arguments (issue3950)
197
198
198 $ hg update -q 3
199 $ hg update -q 3
199 $ echo bimble > bimble
200 $ echo bimble > bimble
200 $ hg add bimble
201 $ hg add bimble
201 $ hg commit -q -m 'bisect'
202 $ hg commit -q -m 'bisect'
202 $ echo e >> bimble
203 $ echo e >> bimble
203 $ hg ci -m bisect2
204 $ hg ci -m bisect2
204 $ echo e >> bimble
205 $ echo e >> bimble
205 $ hg ci -m bisect3
206 $ hg ci -m bisect3
206 $ hg book bisect
207 $ hg book bisect
207 $ hg update -q Y
208 $ hg update -q Y
208 $ hg rebase -r '"bisect"^^::"bisect"^' -r bisect -d Z
209 $ hg rebase -r '"bisect"^^::"bisect"^' -r bisect -d Z
209 rebasing 5:345c90f326a4 "bisect"
210 rebasing 5:345c90f326a4 "bisect"
210 rebasing 6:f677a2907404 "bisect2"
211 rebasing 6:f677a2907404 "bisect2"
211 rebasing 7:325c16001345 "bisect3" (tip bisect)
212 rebasing 7:325c16001345 "bisect3" (tip bisect)
212 saved backup bundle to $TESTTMP/a3/.hg/strip-backup/345c90f326a4-b4840586-rebase.hg (glob)
213 saved backup bundle to $TESTTMP/a3/.hg/strip-backup/345c90f326a4-b4840586-rebase.hg (glob)
214
215 Bookmark and working parent get moved even if --keep is set (issue5682)
216
217 $ hg init $TESTTMP/book-keep
218 $ cd $TESTTMP/book-keep
219 $ hg debugdrawdag <<'EOS'
220 > B C
221 > |/
222 > A
223 > EOS
224 $ eval `hg tags -T 'hg bookmark -ir {node} {tag};\n' | grep -v tip`
225 $ rm .hg/localtags
226 $ hg up -q B
227 $ hg tglog
228 o 2: 'C' bookmarks: C
229 |
230 | @ 1: 'B' bookmarks: B
231 |/
232 o 0: 'A' bookmarks: A
233
234 $ hg rebase -r B -d C --keep
235 rebasing 1:112478962961 "B" (B)
236 $ hg tglog
237 @ 3: 'B' bookmarks: B
238 |
239 o 2: 'C' bookmarks: C
240 |
241 | o 1: 'B' bookmarks:
242 |/
243 o 0: 'A' bookmarks: A
244
245
@@ -1,202 +1,207 b''
1 $ cat >> $HGRCPATH<<EOF
1 $ cat >> $HGRCPATH<<EOF
2 > [extensions]
2 > [extensions]
3 > rebase=
3 > rebase=
4 > drawdag=$TESTDIR/drawdag.py
4 > drawdag=$TESTDIR/drawdag.py
5 > EOF
5 > EOF
6
6
7 $ hg init non-merge
7 $ hg init non-merge
8 $ cd non-merge
8 $ cd non-merge
9 $ hg debugdrawdag<<'EOS'
9 $ hg debugdrawdag<<'EOS'
10 > F
10 > F
11 > |
11 > |
12 > E
12 > E
13 > |
13 > |
14 > D
14 > D
15 > |
15 > |
16 > B C
16 > B C
17 > |/
17 > |/
18 > A
18 > A
19 > EOS
19 > EOS
20
20
21 $ for i in C D E F; do
21 $ for i in C D E F; do
22 > hg bookmark -r $i -i BOOK-$i
22 > hg bookmark -r $i -i BOOK-$i
23 > done
23 > done
24
24
25 $ hg debugdrawdag<<'EOS'
25 $ hg debugdrawdag<<'EOS'
26 > E
26 > E
27 > |
27 > |
28 > D
28 > D
29 > |
29 > |
30 > B
30 > B
31 > EOS
31 > EOS
32
32
33 $ hg log -G -T '{rev} {desc} {bookmarks}'
33 $ hg log -G -T '{rev} {desc} {bookmarks}'
34 o 7 E
34 o 7 E
35 |
35 |
36 o 6 D
36 o 6 D
37 |
37 |
38 | o 5 F BOOK-F
38 | o 5 F BOOK-F
39 | |
39 | |
40 | o 4 E BOOK-E
40 | o 4 E BOOK-E
41 | |
41 | |
42 | o 3 D BOOK-D
42 | o 3 D BOOK-D
43 | |
43 | |
44 | o 2 C BOOK-C
44 | o 2 C BOOK-C
45 | |
45 | |
46 o | 1 B
46 o | 1 B
47 |/
47 |/
48 o 0 A
48 o 0 A
49
49
50 With --keep, bookmark should not move
50 With --keep, bookmark should move
51
51
52 $ hg rebase -r 3+4 -d E --keep
52 $ hg rebase -r 3+4 -d E --keep
53 rebasing 3:e7b3f00ed42e "D" (BOOK-D)
53 rebasing 3:e7b3f00ed42e "D" (BOOK-D)
54 note: rebase of 3:e7b3f00ed42e created no changes to commit
54 note: rebase of 3:e7b3f00ed42e created no changes to commit
55 rebasing 4:69a34c08022a "E" (BOOK-E)
55 rebasing 4:69a34c08022a "E" (BOOK-E)
56 note: rebase of 4:69a34c08022a created no changes to commit
56 note: rebase of 4:69a34c08022a created no changes to commit
57 $ hg log -G -T '{rev} {desc} {bookmarks}'
57 $ hg log -G -T '{rev} {desc} {bookmarks}'
58 o 7 E
58 o 7 E BOOK-D BOOK-E
59 |
59 |
60 o 6 D
60 o 6 D
61 |
61 |
62 | o 5 F BOOK-F
62 | o 5 F BOOK-F
63 | |
63 | |
64 | o 4 E BOOK-E
64 | o 4 E
65 | |
65 | |
66 | o 3 D BOOK-D
66 | o 3 D
67 | |
67 | |
68 | o 2 C BOOK-C
68 | o 2 C BOOK-C
69 | |
69 | |
70 o | 1 B
70 o | 1 B
71 |/
71 |/
72 o 0 A
72 o 0 A
73
73
74 Move D and E back for the next test
75
76 $ hg bookmark BOOK-D -fqir 3
77 $ hg bookmark BOOK-E -fqir 4
78
74 Bookmark is usually an indication of a head. For changes that are introduced by
79 Bookmark is usually an indication of a head. For changes that are introduced by
75 an ancestor of bookmark B, after moving B to B-NEW, the changes are ideally
80 an ancestor of bookmark B, after moving B to B-NEW, the changes are ideally
76 still introduced by an ancestor of changeset on B-NEW. In the below case,
81 still introduced by an ancestor of changeset on B-NEW. In the below case,
77 "BOOK-D", and "BOOK-E" include changes introduced by "C".
82 "BOOK-D", and "BOOK-E" include changes introduced by "C".
78
83
79 $ hg rebase -s 2 -d E
84 $ hg rebase -s 2 -d E
80 rebasing 2:dc0947a82db8 "C" (C BOOK-C)
85 rebasing 2:dc0947a82db8 "C" (C BOOK-C)
81 rebasing 3:e7b3f00ed42e "D" (BOOK-D)
86 rebasing 3:e7b3f00ed42e "D" (BOOK-D)
82 note: rebase of 3:e7b3f00ed42e created no changes to commit
87 note: rebase of 3:e7b3f00ed42e created no changes to commit
83 rebasing 4:69a34c08022a "E" (BOOK-E)
88 rebasing 4:69a34c08022a "E" (BOOK-E)
84 note: rebase of 4:69a34c08022a created no changes to commit
89 note: rebase of 4:69a34c08022a created no changes to commit
85 rebasing 5:6b2aeab91270 "F" (F BOOK-F)
90 rebasing 5:6b2aeab91270 "F" (F BOOK-F)
86 saved backup bundle to $TESTTMP/non-merge/.hg/strip-backup/dc0947a82db8-52bb4973-rebase.hg (glob)
91 saved backup bundle to $TESTTMP/non-merge/.hg/strip-backup/dc0947a82db8-52bb4973-rebase.hg (glob)
87 $ hg log -G -T '{rev} {desc} {bookmarks}'
92 $ hg log -G -T '{rev} {desc} {bookmarks}'
88 o 5 F BOOK-F
93 o 5 F BOOK-F
89 |
94 |
90 o 4 C BOOK-C BOOK-D BOOK-E
95 o 4 C BOOK-C BOOK-D BOOK-E
91 |
96 |
92 o 3 E
97 o 3 E
93 |
98 |
94 o 2 D
99 o 2 D
95 |
100 |
96 o 1 B
101 o 1 B
97 |
102 |
98 o 0 A
103 o 0 A
99
104
100 Merge and its ancestors all become empty
105 Merge and its ancestors all become empty
101
106
102 $ hg init $TESTTMP/merge1
107 $ hg init $TESTTMP/merge1
103 $ cd $TESTTMP/merge1
108 $ cd $TESTTMP/merge1
104
109
105 $ hg debugdrawdag<<'EOS'
110 $ hg debugdrawdag<<'EOS'
106 > E
111 > E
107 > /|
112 > /|
108 > B C D
113 > B C D
109 > \|/
114 > \|/
110 > A
115 > A
111 > EOS
116 > EOS
112
117
113 $ for i in C D E; do
118 $ for i in C D E; do
114 > hg bookmark -r $i -i BOOK-$i
119 > hg bookmark -r $i -i BOOK-$i
115 > done
120 > done
116
121
117 $ hg debugdrawdag<<'EOS'
122 $ hg debugdrawdag<<'EOS'
118 > H
123 > H
119 > |
124 > |
120 > D
125 > D
121 > |
126 > |
122 > C
127 > C
123 > |
128 > |
124 > B
129 > B
125 > EOS
130 > EOS
126
131
127 $ hg rebase -r '(A::)-(B::)-A' -d H
132 $ hg rebase -r '(A::)-(B::)-A' -d H
128 rebasing 2:dc0947a82db8 "C" (BOOK-C)
133 rebasing 2:dc0947a82db8 "C" (BOOK-C)
129 note: rebase of 2:dc0947a82db8 created no changes to commit
134 note: rebase of 2:dc0947a82db8 created no changes to commit
130 rebasing 3:b18e25de2cf5 "D" (BOOK-D)
135 rebasing 3:b18e25de2cf5 "D" (BOOK-D)
131 note: rebase of 3:b18e25de2cf5 created no changes to commit
136 note: rebase of 3:b18e25de2cf5 created no changes to commit
132 rebasing 4:86a1f6686812 "E" (E BOOK-E)
137 rebasing 4:86a1f6686812 "E" (E BOOK-E)
133 note: rebase of 4:86a1f6686812 created no changes to commit
138 note: rebase of 4:86a1f6686812 created no changes to commit
134 saved backup bundle to $TESTTMP/merge1/.hg/strip-backup/b18e25de2cf5-1fd0a4ba-rebase.hg (glob)
139 saved backup bundle to $TESTTMP/merge1/.hg/strip-backup/b18e25de2cf5-1fd0a4ba-rebase.hg (glob)
135
140
136 $ hg log -G -T '{rev} {desc} {bookmarks}'
141 $ hg log -G -T '{rev} {desc} {bookmarks}'
137 o 4 H BOOK-C BOOK-D BOOK-E
142 o 4 H BOOK-C BOOK-D BOOK-E
138 |
143 |
139 o 3 D
144 o 3 D
140 |
145 |
141 o 2 C
146 o 2 C
142 |
147 |
143 o 1 B
148 o 1 B
144 |
149 |
145 o 0 A
150 o 0 A
146
151
147 Part of ancestors of a merge become empty
152 Part of ancestors of a merge become empty
148
153
149 $ hg init $TESTTMP/merge2
154 $ hg init $TESTTMP/merge2
150 $ cd $TESTTMP/merge2
155 $ cd $TESTTMP/merge2
151
156
152 $ hg debugdrawdag<<'EOS'
157 $ hg debugdrawdag<<'EOS'
153 > G
158 > G
154 > /|
159 > /|
155 > E F
160 > E F
156 > | |
161 > | |
157 > B C D
162 > B C D
158 > \|/
163 > \|/
159 > A
164 > A
160 > EOS
165 > EOS
161
166
162 $ for i in C D E F G; do
167 $ for i in C D E F G; do
163 > hg bookmark -r $i -i BOOK-$i
168 > hg bookmark -r $i -i BOOK-$i
164 > done
169 > done
165
170
166 $ hg debugdrawdag<<'EOS'
171 $ hg debugdrawdag<<'EOS'
167 > H
172 > H
168 > |
173 > |
169 > F
174 > F
170 > |
175 > |
171 > C
176 > C
172 > |
177 > |
173 > B
178 > B
174 > EOS
179 > EOS
175
180
176 $ hg rebase -r '(A::)-(B::)-A' -d H
181 $ hg rebase -r '(A::)-(B::)-A' -d H
177 rebasing 2:dc0947a82db8 "C" (BOOK-C)
182 rebasing 2:dc0947a82db8 "C" (BOOK-C)
178 note: rebase of 2:dc0947a82db8 created no changes to commit
183 note: rebase of 2:dc0947a82db8 created no changes to commit
179 rebasing 3:b18e25de2cf5 "D" (D BOOK-D)
184 rebasing 3:b18e25de2cf5 "D" (D BOOK-D)
180 rebasing 4:03ca77807e91 "E" (E BOOK-E)
185 rebasing 4:03ca77807e91 "E" (E BOOK-E)
181 rebasing 5:ad6717a6a58e "F" (BOOK-F)
186 rebasing 5:ad6717a6a58e "F" (BOOK-F)
182 note: rebase of 5:ad6717a6a58e created no changes to commit
187 note: rebase of 5:ad6717a6a58e created no changes to commit
183 rebasing 6:c58e8bdac1f4 "G" (G BOOK-G)
188 rebasing 6:c58e8bdac1f4 "G" (G BOOK-G)
184 saved backup bundle to $TESTTMP/merge2/.hg/strip-backup/b18e25de2cf5-2d487005-rebase.hg (glob)
189 saved backup bundle to $TESTTMP/merge2/.hg/strip-backup/b18e25de2cf5-2d487005-rebase.hg (glob)
185
190
186 $ hg log -G -T '{rev} {desc} {bookmarks}'
191 $ hg log -G -T '{rev} {desc} {bookmarks}'
187 o 7 G BOOK-G
192 o 7 G BOOK-G
188 |\
193 |\
189 | o 6 E BOOK-E
194 | o 6 E BOOK-E
190 | |
195 | |
191 o | 5 D BOOK-D BOOK-F
196 o | 5 D BOOK-D BOOK-F
192 |/
197 |/
193 o 4 H BOOK-C
198 o 4 H BOOK-C
194 |
199 |
195 o 3 F
200 o 3 F
196 |
201 |
197 o 2 C
202 o 2 C
198 |
203 |
199 o 1 B
204 o 1 B
200 |
205 |
201 o 0 A
206 o 0 A
202
207
General Comments 0
You need to be logged in to leave comments. Login now