##// END OF EJS Templates
scmutil: make cleanupnodes handle filtered node...
Jun Wu -
r33330:ba43e5ee default
parent child Browse files
Show More
@@ -1,1076 +1,1079
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 hex,
20 hex,
21 nullid,
21 nullid,
22 wdirid,
22 wdirid,
23 wdirrev,
23 wdirrev,
24 )
24 )
25
25
26 from .i18n import _
26 from .i18n import _
27 from . import (
27 from . import (
28 encoding,
28 encoding,
29 error,
29 error,
30 match as matchmod,
30 match as matchmod,
31 obsolete,
31 obsolete,
32 obsutil,
32 obsutil,
33 pathutil,
33 pathutil,
34 phases,
34 phases,
35 pycompat,
35 pycompat,
36 revsetlang,
36 revsetlang,
37 similar,
37 similar,
38 util,
38 util,
39 )
39 )
40
40
41 if pycompat.osname == 'nt':
41 if pycompat.osname == 'nt':
42 from . import scmwindows as scmplatform
42 from . import scmwindows as scmplatform
43 else:
43 else:
44 from . import scmposix as scmplatform
44 from . import scmposix as scmplatform
45
45
46 termsize = scmplatform.termsize
46 termsize = scmplatform.termsize
47
47
48 class status(tuple):
48 class status(tuple):
49 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
49 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
50 and 'ignored' properties are only relevant to the working copy.
50 and 'ignored' properties are only relevant to the working copy.
51 '''
51 '''
52
52
53 __slots__ = ()
53 __slots__ = ()
54
54
55 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
55 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
56 clean):
56 clean):
57 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
57 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
58 ignored, clean))
58 ignored, clean))
59
59
60 @property
60 @property
61 def modified(self):
61 def modified(self):
62 '''files that have been modified'''
62 '''files that have been modified'''
63 return self[0]
63 return self[0]
64
64
65 @property
65 @property
66 def added(self):
66 def added(self):
67 '''files that have been added'''
67 '''files that have been added'''
68 return self[1]
68 return self[1]
69
69
70 @property
70 @property
71 def removed(self):
71 def removed(self):
72 '''files that have been removed'''
72 '''files that have been removed'''
73 return self[2]
73 return self[2]
74
74
75 @property
75 @property
76 def deleted(self):
76 def deleted(self):
77 '''files that are in the dirstate, but have been deleted from the
77 '''files that are in the dirstate, but have been deleted from the
78 working copy (aka "missing")
78 working copy (aka "missing")
79 '''
79 '''
80 return self[3]
80 return self[3]
81
81
82 @property
82 @property
83 def unknown(self):
83 def unknown(self):
84 '''files not in the dirstate that are not ignored'''
84 '''files not in the dirstate that are not ignored'''
85 return self[4]
85 return self[4]
86
86
87 @property
87 @property
88 def ignored(self):
88 def ignored(self):
89 '''files not in the dirstate that are ignored (by _dirignore())'''
89 '''files not in the dirstate that are ignored (by _dirignore())'''
90 return self[5]
90 return self[5]
91
91
92 @property
92 @property
93 def clean(self):
93 def clean(self):
94 '''files that have not been modified'''
94 '''files that have not been modified'''
95 return self[6]
95 return self[6]
96
96
97 def __repr__(self, *args, **kwargs):
97 def __repr__(self, *args, **kwargs):
98 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
98 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
99 'unknown=%r, ignored=%r, clean=%r>') % self)
99 'unknown=%r, ignored=%r, clean=%r>') % self)
100
100
101 def itersubrepos(ctx1, ctx2):
101 def itersubrepos(ctx1, ctx2):
102 """find subrepos in ctx1 or ctx2"""
102 """find subrepos in ctx1 or ctx2"""
103 # Create a (subpath, ctx) mapping where we prefer subpaths from
103 # Create a (subpath, ctx) mapping where we prefer subpaths from
104 # ctx1. The subpaths from ctx2 are important when the .hgsub file
104 # ctx1. The subpaths from ctx2 are important when the .hgsub file
105 # has been modified (in ctx2) but not yet committed (in ctx1).
105 # has been modified (in ctx2) but not yet committed (in ctx1).
106 subpaths = dict.fromkeys(ctx2.substate, ctx2)
106 subpaths = dict.fromkeys(ctx2.substate, ctx2)
107 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
107 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
108
108
109 missing = set()
109 missing = set()
110
110
111 for subpath in ctx2.substate:
111 for subpath in ctx2.substate:
112 if subpath not in ctx1.substate:
112 if subpath not in ctx1.substate:
113 del subpaths[subpath]
113 del subpaths[subpath]
114 missing.add(subpath)
114 missing.add(subpath)
115
115
116 for subpath, ctx in sorted(subpaths.iteritems()):
116 for subpath, ctx in sorted(subpaths.iteritems()):
117 yield subpath, ctx.sub(subpath)
117 yield subpath, ctx.sub(subpath)
118
118
119 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
119 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
120 # status and diff will have an accurate result when it does
120 # status and diff will have an accurate result when it does
121 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
121 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
122 # against itself.
122 # against itself.
123 for subpath in missing:
123 for subpath in missing:
124 yield subpath, ctx2.nullsub(subpath, ctx1)
124 yield subpath, ctx2.nullsub(subpath, ctx1)
125
125
126 def nochangesfound(ui, repo, excluded=None):
126 def nochangesfound(ui, repo, excluded=None):
127 '''Report no changes for push/pull, excluded is None or a list of
127 '''Report no changes for push/pull, excluded is None or a list of
128 nodes excluded from the push/pull.
128 nodes excluded from the push/pull.
129 '''
129 '''
130 secretlist = []
130 secretlist = []
131 if excluded:
131 if excluded:
132 for n in excluded:
132 for n in excluded:
133 ctx = repo[n]
133 ctx = repo[n]
134 if ctx.phase() >= phases.secret and not ctx.extinct():
134 if ctx.phase() >= phases.secret and not ctx.extinct():
135 secretlist.append(n)
135 secretlist.append(n)
136
136
137 if secretlist:
137 if secretlist:
138 ui.status(_("no changes found (ignored %d secret changesets)\n")
138 ui.status(_("no changes found (ignored %d secret changesets)\n")
139 % len(secretlist))
139 % len(secretlist))
140 else:
140 else:
141 ui.status(_("no changes found\n"))
141 ui.status(_("no changes found\n"))
142
142
143 def callcatch(ui, func):
143 def callcatch(ui, func):
144 """call func() with global exception handling
144 """call func() with global exception handling
145
145
146 return func() if no exception happens. otherwise do some error handling
146 return func() if no exception happens. otherwise do some error handling
147 and return an exit code accordingly. does not handle all exceptions.
147 and return an exit code accordingly. does not handle all exceptions.
148 """
148 """
149 try:
149 try:
150 try:
150 try:
151 return func()
151 return func()
152 except: # re-raises
152 except: # re-raises
153 ui.traceback()
153 ui.traceback()
154 raise
154 raise
155 # Global exception handling, alphabetically
155 # Global exception handling, alphabetically
156 # Mercurial-specific first, followed by built-in and library exceptions
156 # Mercurial-specific first, followed by built-in and library exceptions
157 except error.LockHeld as inst:
157 except error.LockHeld as inst:
158 if inst.errno == errno.ETIMEDOUT:
158 if inst.errno == errno.ETIMEDOUT:
159 reason = _('timed out waiting for lock held by %r') % inst.locker
159 reason = _('timed out waiting for lock held by %r') % inst.locker
160 else:
160 else:
161 reason = _('lock held by %r') % inst.locker
161 reason = _('lock held by %r') % inst.locker
162 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
162 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
163 if not inst.locker:
163 if not inst.locker:
164 ui.warn(_("(lock might be very busy)\n"))
164 ui.warn(_("(lock might be very busy)\n"))
165 except error.LockUnavailable as inst:
165 except error.LockUnavailable as inst:
166 ui.warn(_("abort: could not lock %s: %s\n") %
166 ui.warn(_("abort: could not lock %s: %s\n") %
167 (inst.desc or inst.filename, inst.strerror))
167 (inst.desc or inst.filename, inst.strerror))
168 except error.OutOfBandError as inst:
168 except error.OutOfBandError as inst:
169 if inst.args:
169 if inst.args:
170 msg = _("abort: remote error:\n")
170 msg = _("abort: remote error:\n")
171 else:
171 else:
172 msg = _("abort: remote error\n")
172 msg = _("abort: remote error\n")
173 ui.warn(msg)
173 ui.warn(msg)
174 if inst.args:
174 if inst.args:
175 ui.warn(''.join(inst.args))
175 ui.warn(''.join(inst.args))
176 if inst.hint:
176 if inst.hint:
177 ui.warn('(%s)\n' % inst.hint)
177 ui.warn('(%s)\n' % inst.hint)
178 except error.RepoError as inst:
178 except error.RepoError as inst:
179 ui.warn(_("abort: %s!\n") % inst)
179 ui.warn(_("abort: %s!\n") % inst)
180 if inst.hint:
180 if inst.hint:
181 ui.warn(_("(%s)\n") % inst.hint)
181 ui.warn(_("(%s)\n") % inst.hint)
182 except error.ResponseError as inst:
182 except error.ResponseError as inst:
183 ui.warn(_("abort: %s") % inst.args[0])
183 ui.warn(_("abort: %s") % inst.args[0])
184 if not isinstance(inst.args[1], basestring):
184 if not isinstance(inst.args[1], basestring):
185 ui.warn(" %r\n" % (inst.args[1],))
185 ui.warn(" %r\n" % (inst.args[1],))
186 elif not inst.args[1]:
186 elif not inst.args[1]:
187 ui.warn(_(" empty string\n"))
187 ui.warn(_(" empty string\n"))
188 else:
188 else:
189 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
189 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
190 except error.CensoredNodeError as inst:
190 except error.CensoredNodeError as inst:
191 ui.warn(_("abort: file censored %s!\n") % inst)
191 ui.warn(_("abort: file censored %s!\n") % inst)
192 except error.RevlogError as inst:
192 except error.RevlogError as inst:
193 ui.warn(_("abort: %s!\n") % inst)
193 ui.warn(_("abort: %s!\n") % inst)
194 except error.InterventionRequired as inst:
194 except error.InterventionRequired as inst:
195 ui.warn("%s\n" % inst)
195 ui.warn("%s\n" % inst)
196 if inst.hint:
196 if inst.hint:
197 ui.warn(_("(%s)\n") % inst.hint)
197 ui.warn(_("(%s)\n") % inst.hint)
198 return 1
198 return 1
199 except error.WdirUnsupported:
199 except error.WdirUnsupported:
200 ui.warn(_("abort: working directory revision cannot be specified\n"))
200 ui.warn(_("abort: working directory revision cannot be specified\n"))
201 except error.Abort as inst:
201 except error.Abort as inst:
202 ui.warn(_("abort: %s\n") % inst)
202 ui.warn(_("abort: %s\n") % inst)
203 if inst.hint:
203 if inst.hint:
204 ui.warn(_("(%s)\n") % inst.hint)
204 ui.warn(_("(%s)\n") % inst.hint)
205 except ImportError as inst:
205 except ImportError as inst:
206 ui.warn(_("abort: %s!\n") % inst)
206 ui.warn(_("abort: %s!\n") % inst)
207 m = str(inst).split()[-1]
207 m = str(inst).split()[-1]
208 if m in "mpatch bdiff".split():
208 if m in "mpatch bdiff".split():
209 ui.warn(_("(did you forget to compile extensions?)\n"))
209 ui.warn(_("(did you forget to compile extensions?)\n"))
210 elif m in "zlib".split():
210 elif m in "zlib".split():
211 ui.warn(_("(is your Python install correct?)\n"))
211 ui.warn(_("(is your Python install correct?)\n"))
212 except IOError as inst:
212 except IOError as inst:
213 if util.safehasattr(inst, "code"):
213 if util.safehasattr(inst, "code"):
214 ui.warn(_("abort: %s\n") % inst)
214 ui.warn(_("abort: %s\n") % inst)
215 elif util.safehasattr(inst, "reason"):
215 elif util.safehasattr(inst, "reason"):
216 try: # usually it is in the form (errno, strerror)
216 try: # usually it is in the form (errno, strerror)
217 reason = inst.reason.args[1]
217 reason = inst.reason.args[1]
218 except (AttributeError, IndexError):
218 except (AttributeError, IndexError):
219 # it might be anything, for example a string
219 # it might be anything, for example a string
220 reason = inst.reason
220 reason = inst.reason
221 if isinstance(reason, unicode):
221 if isinstance(reason, unicode):
222 # SSLError of Python 2.7.9 contains a unicode
222 # SSLError of Python 2.7.9 contains a unicode
223 reason = encoding.unitolocal(reason)
223 reason = encoding.unitolocal(reason)
224 ui.warn(_("abort: error: %s\n") % reason)
224 ui.warn(_("abort: error: %s\n") % reason)
225 elif (util.safehasattr(inst, "args")
225 elif (util.safehasattr(inst, "args")
226 and inst.args and inst.args[0] == errno.EPIPE):
226 and inst.args and inst.args[0] == errno.EPIPE):
227 pass
227 pass
228 elif getattr(inst, "strerror", None):
228 elif getattr(inst, "strerror", None):
229 if getattr(inst, "filename", None):
229 if getattr(inst, "filename", None):
230 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
230 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
231 else:
231 else:
232 ui.warn(_("abort: %s\n") % inst.strerror)
232 ui.warn(_("abort: %s\n") % inst.strerror)
233 else:
233 else:
234 raise
234 raise
235 except OSError as inst:
235 except OSError as inst:
236 if getattr(inst, "filename", None) is not None:
236 if getattr(inst, "filename", None) is not None:
237 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
237 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
238 else:
238 else:
239 ui.warn(_("abort: %s\n") % inst.strerror)
239 ui.warn(_("abort: %s\n") % inst.strerror)
240 except MemoryError:
240 except MemoryError:
241 ui.warn(_("abort: out of memory\n"))
241 ui.warn(_("abort: out of memory\n"))
242 except SystemExit as inst:
242 except SystemExit as inst:
243 # Commands shouldn't sys.exit directly, but give a return code.
243 # Commands shouldn't sys.exit directly, but give a return code.
244 # Just in case catch this and and pass exit code to caller.
244 # Just in case catch this and and pass exit code to caller.
245 return inst.code
245 return inst.code
246 except socket.error as inst:
246 except socket.error as inst:
247 ui.warn(_("abort: %s\n") % inst.args[-1])
247 ui.warn(_("abort: %s\n") % inst.args[-1])
248
248
249 return -1
249 return -1
250
250
251 def checknewlabel(repo, lbl, kind):
251 def checknewlabel(repo, lbl, kind):
252 # Do not use the "kind" parameter in ui output.
252 # Do not use the "kind" parameter in ui output.
253 # It makes strings difficult to translate.
253 # It makes strings difficult to translate.
254 if lbl in ['tip', '.', 'null']:
254 if lbl in ['tip', '.', 'null']:
255 raise error.Abort(_("the name '%s' is reserved") % lbl)
255 raise error.Abort(_("the name '%s' is reserved") % lbl)
256 for c in (':', '\0', '\n', '\r'):
256 for c in (':', '\0', '\n', '\r'):
257 if c in lbl:
257 if c in lbl:
258 raise error.Abort(_("%r cannot be used in a name") % c)
258 raise error.Abort(_("%r cannot be used in a name") % c)
259 try:
259 try:
260 int(lbl)
260 int(lbl)
261 raise error.Abort(_("cannot use an integer as a name"))
261 raise error.Abort(_("cannot use an integer as a name"))
262 except ValueError:
262 except ValueError:
263 pass
263 pass
264
264
265 def checkfilename(f):
265 def checkfilename(f):
266 '''Check that the filename f is an acceptable filename for a tracked file'''
266 '''Check that the filename f is an acceptable filename for a tracked file'''
267 if '\r' in f or '\n' in f:
267 if '\r' in f or '\n' in f:
268 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
268 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
269
269
270 def checkportable(ui, f):
270 def checkportable(ui, f):
271 '''Check if filename f is portable and warn or abort depending on config'''
271 '''Check if filename f is portable and warn or abort depending on config'''
272 checkfilename(f)
272 checkfilename(f)
273 abort, warn = checkportabilityalert(ui)
273 abort, warn = checkportabilityalert(ui)
274 if abort or warn:
274 if abort or warn:
275 msg = util.checkwinfilename(f)
275 msg = util.checkwinfilename(f)
276 if msg:
276 if msg:
277 msg = "%s: %r" % (msg, f)
277 msg = "%s: %r" % (msg, f)
278 if abort:
278 if abort:
279 raise error.Abort(msg)
279 raise error.Abort(msg)
280 ui.warn(_("warning: %s\n") % msg)
280 ui.warn(_("warning: %s\n") % msg)
281
281
282 def checkportabilityalert(ui):
282 def checkportabilityalert(ui):
283 '''check if the user's config requests nothing, a warning, or abort for
283 '''check if the user's config requests nothing, a warning, or abort for
284 non-portable filenames'''
284 non-portable filenames'''
285 val = ui.config('ui', 'portablefilenames', 'warn')
285 val = ui.config('ui', 'portablefilenames', 'warn')
286 lval = val.lower()
286 lval = val.lower()
287 bval = util.parsebool(val)
287 bval = util.parsebool(val)
288 abort = pycompat.osname == 'nt' or lval == 'abort'
288 abort = pycompat.osname == 'nt' or lval == 'abort'
289 warn = bval or lval == 'warn'
289 warn = bval or lval == 'warn'
290 if bval is None and not (warn or abort or lval == 'ignore'):
290 if bval is None and not (warn or abort or lval == 'ignore'):
291 raise error.ConfigError(
291 raise error.ConfigError(
292 _("ui.portablefilenames value is invalid ('%s')") % val)
292 _("ui.portablefilenames value is invalid ('%s')") % val)
293 return abort, warn
293 return abort, warn
294
294
295 class casecollisionauditor(object):
295 class casecollisionauditor(object):
296 def __init__(self, ui, abort, dirstate):
296 def __init__(self, ui, abort, dirstate):
297 self._ui = ui
297 self._ui = ui
298 self._abort = abort
298 self._abort = abort
299 allfiles = '\0'.join(dirstate._map)
299 allfiles = '\0'.join(dirstate._map)
300 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
300 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
301 self._dirstate = dirstate
301 self._dirstate = dirstate
302 # The purpose of _newfiles is so that we don't complain about
302 # The purpose of _newfiles is so that we don't complain about
303 # case collisions if someone were to call this object with the
303 # case collisions if someone were to call this object with the
304 # same filename twice.
304 # same filename twice.
305 self._newfiles = set()
305 self._newfiles = set()
306
306
307 def __call__(self, f):
307 def __call__(self, f):
308 if f in self._newfiles:
308 if f in self._newfiles:
309 return
309 return
310 fl = encoding.lower(f)
310 fl = encoding.lower(f)
311 if fl in self._loweredfiles and f not in self._dirstate:
311 if fl in self._loweredfiles and f not in self._dirstate:
312 msg = _('possible case-folding collision for %s') % f
312 msg = _('possible case-folding collision for %s') % f
313 if self._abort:
313 if self._abort:
314 raise error.Abort(msg)
314 raise error.Abort(msg)
315 self._ui.warn(_("warning: %s\n") % msg)
315 self._ui.warn(_("warning: %s\n") % msg)
316 self._loweredfiles.add(fl)
316 self._loweredfiles.add(fl)
317 self._newfiles.add(f)
317 self._newfiles.add(f)
318
318
319 def filteredhash(repo, maxrev):
319 def filteredhash(repo, maxrev):
320 """build hash of filtered revisions in the current repoview.
320 """build hash of filtered revisions in the current repoview.
321
321
322 Multiple caches perform up-to-date validation by checking that the
322 Multiple caches perform up-to-date validation by checking that the
323 tiprev and tipnode stored in the cache file match the current repository.
323 tiprev and tipnode stored in the cache file match the current repository.
324 However, this is not sufficient for validating repoviews because the set
324 However, this is not sufficient for validating repoviews because the set
325 of revisions in the view may change without the repository tiprev and
325 of revisions in the view may change without the repository tiprev and
326 tipnode changing.
326 tipnode changing.
327
327
328 This function hashes all the revs filtered from the view and returns
328 This function hashes all the revs filtered from the view and returns
329 that SHA-1 digest.
329 that SHA-1 digest.
330 """
330 """
331 cl = repo.changelog
331 cl = repo.changelog
332 if not cl.filteredrevs:
332 if not cl.filteredrevs:
333 return None
333 return None
334 key = None
334 key = None
335 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
335 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
336 if revs:
336 if revs:
337 s = hashlib.sha1()
337 s = hashlib.sha1()
338 for rev in revs:
338 for rev in revs:
339 s.update('%d;' % rev)
339 s.update('%d;' % rev)
340 key = s.digest()
340 key = s.digest()
341 return key
341 return key
342
342
343 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
343 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
344 '''yield every hg repository under path, always recursively.
344 '''yield every hg repository under path, always recursively.
345 The recurse flag will only control recursion into repo working dirs'''
345 The recurse flag will only control recursion into repo working dirs'''
346 def errhandler(err):
346 def errhandler(err):
347 if err.filename == path:
347 if err.filename == path:
348 raise err
348 raise err
349 samestat = getattr(os.path, 'samestat', None)
349 samestat = getattr(os.path, 'samestat', None)
350 if followsym and samestat is not None:
350 if followsym and samestat is not None:
351 def adddir(dirlst, dirname):
351 def adddir(dirlst, dirname):
352 match = False
352 match = False
353 dirstat = os.stat(dirname)
353 dirstat = os.stat(dirname)
354 for lstdirstat in dirlst:
354 for lstdirstat in dirlst:
355 if samestat(dirstat, lstdirstat):
355 if samestat(dirstat, lstdirstat):
356 match = True
356 match = True
357 break
357 break
358 if not match:
358 if not match:
359 dirlst.append(dirstat)
359 dirlst.append(dirstat)
360 return not match
360 return not match
361 else:
361 else:
362 followsym = False
362 followsym = False
363
363
364 if (seen_dirs is None) and followsym:
364 if (seen_dirs is None) and followsym:
365 seen_dirs = []
365 seen_dirs = []
366 adddir(seen_dirs, path)
366 adddir(seen_dirs, path)
367 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
367 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
368 dirs.sort()
368 dirs.sort()
369 if '.hg' in dirs:
369 if '.hg' in dirs:
370 yield root # found a repository
370 yield root # found a repository
371 qroot = os.path.join(root, '.hg', 'patches')
371 qroot = os.path.join(root, '.hg', 'patches')
372 if os.path.isdir(os.path.join(qroot, '.hg')):
372 if os.path.isdir(os.path.join(qroot, '.hg')):
373 yield qroot # we have a patch queue repo here
373 yield qroot # we have a patch queue repo here
374 if recurse:
374 if recurse:
375 # avoid recursing inside the .hg directory
375 # avoid recursing inside the .hg directory
376 dirs.remove('.hg')
376 dirs.remove('.hg')
377 else:
377 else:
378 dirs[:] = [] # don't descend further
378 dirs[:] = [] # don't descend further
379 elif followsym:
379 elif followsym:
380 newdirs = []
380 newdirs = []
381 for d in dirs:
381 for d in dirs:
382 fname = os.path.join(root, d)
382 fname = os.path.join(root, d)
383 if adddir(seen_dirs, fname):
383 if adddir(seen_dirs, fname):
384 if os.path.islink(fname):
384 if os.path.islink(fname):
385 for hgname in walkrepos(fname, True, seen_dirs):
385 for hgname in walkrepos(fname, True, seen_dirs):
386 yield hgname
386 yield hgname
387 else:
387 else:
388 newdirs.append(d)
388 newdirs.append(d)
389 dirs[:] = newdirs
389 dirs[:] = newdirs
390
390
391 def binnode(ctx):
391 def binnode(ctx):
392 """Return binary node id for a given basectx"""
392 """Return binary node id for a given basectx"""
393 node = ctx.node()
393 node = ctx.node()
394 if node is None:
394 if node is None:
395 return wdirid
395 return wdirid
396 return node
396 return node
397
397
398 def intrev(ctx):
398 def intrev(ctx):
399 """Return integer for a given basectx that can be used in comparison or
399 """Return integer for a given basectx that can be used in comparison or
400 arithmetic operation"""
400 arithmetic operation"""
401 rev = ctx.rev()
401 rev = ctx.rev()
402 if rev is None:
402 if rev is None:
403 return wdirrev
403 return wdirrev
404 return rev
404 return rev
405
405
406 def revsingle(repo, revspec, default='.'):
406 def revsingle(repo, revspec, default='.'):
407 if not revspec and revspec != 0:
407 if not revspec and revspec != 0:
408 return repo[default]
408 return repo[default]
409
409
410 l = revrange(repo, [revspec])
410 l = revrange(repo, [revspec])
411 if not l:
411 if not l:
412 raise error.Abort(_('empty revision set'))
412 raise error.Abort(_('empty revision set'))
413 return repo[l.last()]
413 return repo[l.last()]
414
414
415 def _pairspec(revspec):
415 def _pairspec(revspec):
416 tree = revsetlang.parse(revspec)
416 tree = revsetlang.parse(revspec)
417 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
417 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
418
418
419 def revpair(repo, revs):
419 def revpair(repo, revs):
420 if not revs:
420 if not revs:
421 return repo.dirstate.p1(), None
421 return repo.dirstate.p1(), None
422
422
423 l = revrange(repo, revs)
423 l = revrange(repo, revs)
424
424
425 if not l:
425 if not l:
426 first = second = None
426 first = second = None
427 elif l.isascending():
427 elif l.isascending():
428 first = l.min()
428 first = l.min()
429 second = l.max()
429 second = l.max()
430 elif l.isdescending():
430 elif l.isdescending():
431 first = l.max()
431 first = l.max()
432 second = l.min()
432 second = l.min()
433 else:
433 else:
434 first = l.first()
434 first = l.first()
435 second = l.last()
435 second = l.last()
436
436
437 if first is None:
437 if first is None:
438 raise error.Abort(_('empty revision range'))
438 raise error.Abort(_('empty revision range'))
439 if (first == second and len(revs) >= 2
439 if (first == second and len(revs) >= 2
440 and not all(revrange(repo, [r]) for r in revs)):
440 and not all(revrange(repo, [r]) for r in revs)):
441 raise error.Abort(_('empty revision on one side of range'))
441 raise error.Abort(_('empty revision on one side of range'))
442
442
443 # if top-level is range expression, the result must always be a pair
443 # if top-level is range expression, the result must always be a pair
444 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
444 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
445 return repo.lookup(first), None
445 return repo.lookup(first), None
446
446
447 return repo.lookup(first), repo.lookup(second)
447 return repo.lookup(first), repo.lookup(second)
448
448
449 def revrange(repo, specs):
449 def revrange(repo, specs):
450 """Execute 1 to many revsets and return the union.
450 """Execute 1 to many revsets and return the union.
451
451
452 This is the preferred mechanism for executing revsets using user-specified
452 This is the preferred mechanism for executing revsets using user-specified
453 config options, such as revset aliases.
453 config options, such as revset aliases.
454
454
455 The revsets specified by ``specs`` will be executed via a chained ``OR``
455 The revsets specified by ``specs`` will be executed via a chained ``OR``
456 expression. If ``specs`` is empty, an empty result is returned.
456 expression. If ``specs`` is empty, an empty result is returned.
457
457
458 ``specs`` can contain integers, in which case they are assumed to be
458 ``specs`` can contain integers, in which case they are assumed to be
459 revision numbers.
459 revision numbers.
460
460
461 It is assumed the revsets are already formatted. If you have arguments
461 It is assumed the revsets are already formatted. If you have arguments
462 that need to be expanded in the revset, call ``revsetlang.formatspec()``
462 that need to be expanded in the revset, call ``revsetlang.formatspec()``
463 and pass the result as an element of ``specs``.
463 and pass the result as an element of ``specs``.
464
464
465 Specifying a single revset is allowed.
465 Specifying a single revset is allowed.
466
466
467 Returns a ``revset.abstractsmartset`` which is a list-like interface over
467 Returns a ``revset.abstractsmartset`` which is a list-like interface over
468 integer revisions.
468 integer revisions.
469 """
469 """
470 allspecs = []
470 allspecs = []
471 for spec in specs:
471 for spec in specs:
472 if isinstance(spec, int):
472 if isinstance(spec, int):
473 spec = revsetlang.formatspec('rev(%d)', spec)
473 spec = revsetlang.formatspec('rev(%d)', spec)
474 allspecs.append(spec)
474 allspecs.append(spec)
475 return repo.anyrevs(allspecs, user=True)
475 return repo.anyrevs(allspecs, user=True)
476
476
477 def meaningfulparents(repo, ctx):
477 def meaningfulparents(repo, ctx):
478 """Return list of meaningful (or all if debug) parentrevs for rev.
478 """Return list of meaningful (or all if debug) parentrevs for rev.
479
479
480 For merges (two non-nullrev revisions) both parents are meaningful.
480 For merges (two non-nullrev revisions) both parents are meaningful.
481 Otherwise the first parent revision is considered meaningful if it
481 Otherwise the first parent revision is considered meaningful if it
482 is not the preceding revision.
482 is not the preceding revision.
483 """
483 """
484 parents = ctx.parents()
484 parents = ctx.parents()
485 if len(parents) > 1:
485 if len(parents) > 1:
486 return parents
486 return parents
487 if repo.ui.debugflag:
487 if repo.ui.debugflag:
488 return [parents[0], repo['null']]
488 return [parents[0], repo['null']]
489 if parents[0].rev() >= intrev(ctx) - 1:
489 if parents[0].rev() >= intrev(ctx) - 1:
490 return []
490 return []
491 return parents
491 return parents
492
492
493 def expandpats(pats):
493 def expandpats(pats):
494 '''Expand bare globs when running on windows.
494 '''Expand bare globs when running on windows.
495 On posix we assume it already has already been done by sh.'''
495 On posix we assume it already has already been done by sh.'''
496 if not util.expandglobs:
496 if not util.expandglobs:
497 return list(pats)
497 return list(pats)
498 ret = []
498 ret = []
499 for kindpat in pats:
499 for kindpat in pats:
500 kind, pat = matchmod._patsplit(kindpat, None)
500 kind, pat = matchmod._patsplit(kindpat, None)
501 if kind is None:
501 if kind is None:
502 try:
502 try:
503 globbed = glob.glob(pat)
503 globbed = glob.glob(pat)
504 except re.error:
504 except re.error:
505 globbed = [pat]
505 globbed = [pat]
506 if globbed:
506 if globbed:
507 ret.extend(globbed)
507 ret.extend(globbed)
508 continue
508 continue
509 ret.append(kindpat)
509 ret.append(kindpat)
510 return ret
510 return ret
511
511
512 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
512 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
513 badfn=None):
513 badfn=None):
514 '''Return a matcher and the patterns that were used.
514 '''Return a matcher and the patterns that were used.
515 The matcher will warn about bad matches, unless an alternate badfn callback
515 The matcher will warn about bad matches, unless an alternate badfn callback
516 is provided.'''
516 is provided.'''
517 if pats == ("",):
517 if pats == ("",):
518 pats = []
518 pats = []
519 if opts is None:
519 if opts is None:
520 opts = {}
520 opts = {}
521 if not globbed and default == 'relpath':
521 if not globbed and default == 'relpath':
522 pats = expandpats(pats or [])
522 pats = expandpats(pats or [])
523
523
524 def bad(f, msg):
524 def bad(f, msg):
525 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
525 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
526
526
527 if badfn is None:
527 if badfn is None:
528 badfn = bad
528 badfn = bad
529
529
530 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
530 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
531 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
531 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
532
532
533 if m.always():
533 if m.always():
534 pats = []
534 pats = []
535 return m, pats
535 return m, pats
536
536
537 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
537 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
538 badfn=None):
538 badfn=None):
539 '''Return a matcher that will warn about bad matches.'''
539 '''Return a matcher that will warn about bad matches.'''
540 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
540 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
541
541
542 def matchall(repo):
542 def matchall(repo):
543 '''Return a matcher that will efficiently match everything.'''
543 '''Return a matcher that will efficiently match everything.'''
544 return matchmod.always(repo.root, repo.getcwd())
544 return matchmod.always(repo.root, repo.getcwd())
545
545
546 def matchfiles(repo, files, badfn=None):
546 def matchfiles(repo, files, badfn=None):
547 '''Return a matcher that will efficiently match exactly these files.'''
547 '''Return a matcher that will efficiently match exactly these files.'''
548 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
548 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
549
549
550 def origpath(ui, repo, filepath):
550 def origpath(ui, repo, filepath):
551 '''customize where .orig files are created
551 '''customize where .orig files are created
552
552
553 Fetch user defined path from config file: [ui] origbackuppath = <path>
553 Fetch user defined path from config file: [ui] origbackuppath = <path>
554 Fall back to default (filepath) if not specified
554 Fall back to default (filepath) if not specified
555 '''
555 '''
556 origbackuppath = ui.config('ui', 'origbackuppath', None)
556 origbackuppath = ui.config('ui', 'origbackuppath', None)
557 if origbackuppath is None:
557 if origbackuppath is None:
558 return filepath + ".orig"
558 return filepath + ".orig"
559
559
560 filepathfromroot = os.path.relpath(filepath, start=repo.root)
560 filepathfromroot = os.path.relpath(filepath, start=repo.root)
561 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
561 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
562
562
563 origbackupdir = repo.vfs.dirname(fullorigpath)
563 origbackupdir = repo.vfs.dirname(fullorigpath)
564 if not repo.vfs.exists(origbackupdir):
564 if not repo.vfs.exists(origbackupdir):
565 ui.note(_('creating directory: %s\n') % origbackupdir)
565 ui.note(_('creating directory: %s\n') % origbackupdir)
566 util.makedirs(origbackupdir)
566 util.makedirs(origbackupdir)
567
567
568 return fullorigpath + ".orig"
568 return fullorigpath + ".orig"
569
569
570 def cleanupnodes(repo, mapping, operation):
570 def cleanupnodes(repo, mapping, operation):
571 """do common cleanups when old nodes are replaced by new nodes
571 """do common cleanups when old nodes are replaced by new nodes
572
572
573 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
573 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
574 (we might also want to move working directory parent in the future)
574 (we might also want to move working directory parent in the future)
575
575
576 mapping is {oldnode: [newnode]} or a iterable of nodes if they do not have
576 mapping is {oldnode: [newnode]} or a iterable of nodes if they do not have
577 replacements. operation is a string, like "rebase".
577 replacements. operation is a string, like "rebase".
578 """
578 """
579 if not util.safehasattr(mapping, 'items'):
579 if not util.safehasattr(mapping, 'items'):
580 mapping = {n: () for n in mapping}
580 mapping = {n: () for n in mapping}
581
581
582 with repo.transaction('cleanup') as tr:
582 with repo.transaction('cleanup') as tr:
583 # Move bookmarks
583 # Move bookmarks
584 bmarks = repo._bookmarks
584 bmarks = repo._bookmarks
585 bmarkchanged = False
585 bmarkchanged = False
586 for oldnode, newnodes in mapping.items():
586 for oldnode, newnodes in mapping.items():
587 oldbmarks = repo.nodebookmarks(oldnode)
587 oldbmarks = repo.nodebookmarks(oldnode)
588 if not oldbmarks:
588 if not oldbmarks:
589 continue
589 continue
590 bmarkchanged = True
590 bmarkchanged = True
591 if len(newnodes) > 1:
591 if len(newnodes) > 1:
592 heads = list(repo.set('heads(%ln)', newnodes))
592 heads = list(repo.set('heads(%ln)', newnodes))
593 if len(heads) != 1:
593 if len(heads) != 1:
594 raise error.ProgrammingError(
594 raise error.ProgrammingError(
595 'cannot figure out bookmark movement')
595 'cannot figure out bookmark movement')
596 newnode = heads[0].node()
596 newnode = heads[0].node()
597 elif len(newnodes) == 0:
597 elif len(newnodes) == 0:
598 # move bookmark backwards
598 # move bookmark backwards
599 roots = list(repo.set('max((::%n) - %ln)', oldnode,
599 roots = list(repo.set('max((::%n) - %ln)', oldnode,
600 list(mapping)))
600 list(mapping)))
601 if roots:
601 if roots:
602 newnode = roots[0].node()
602 newnode = roots[0].node()
603 else:
603 else:
604 newnode = nullid
604 newnode = nullid
605 else:
605 else:
606 newnode = newnodes[0]
606 newnode = newnodes[0]
607 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
607 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
608 (oldbmarks, hex(oldnode), hex(newnode)))
608 (oldbmarks, hex(oldnode), hex(newnode)))
609 for name in oldbmarks:
609 for name in oldbmarks:
610 bmarks[name] = newnode
610 bmarks[name] = newnode
611 if bmarkchanged:
611 if bmarkchanged:
612 bmarks.recordchange(tr)
612 bmarks.recordchange(tr)
613
613
614 # Obsolete or strip nodes
614 # Obsolete or strip nodes
615 if obsolete.isenabled(repo, obsolete.createmarkersopt):
615 if obsolete.isenabled(repo, obsolete.createmarkersopt):
616 # If a node is already obsoleted, and we want to obsolete it
616 # If a node is already obsoleted, and we want to obsolete it
617 # without a successor, skip that obssolete request since it's
617 # without a successor, skip that obssolete request since it's
618 # unnecessary. That's the "if s or not isobs(n)" check below.
618 # unnecessary. That's the "if s or not isobs(n)" check below.
619 # Also sort the node in topology order, that might be useful for
619 # Also sort the node in topology order, that might be useful for
620 # some obsstore logic.
620 # some obsstore logic.
621 # NOTE: the filtering and sorting might belong to createmarkers.
621 # NOTE: the filtering and sorting might belong to createmarkers.
622 isobs = repo.obsstore.successors.__contains__
622 # Unfiltered repo is needed since nodes in mapping might be hidden.
623 sortfunc = lambda ns: repo.changelog.rev(ns[0])
623 unfi = repo.unfiltered()
624 rels = [(repo[n], (repo[m] for m in s))
624 isobs = unfi.obsstore.successors.__contains__
625 torev = unfi.changelog.rev
626 sortfunc = lambda ns: torev(ns[0])
627 rels = [(unfi[n], (unfi[m] for m in s))
625 for n, s in sorted(mapping.items(), key=sortfunc)
628 for n, s in sorted(mapping.items(), key=sortfunc)
626 if s or not isobs(n)]
629 if s or not isobs(n)]
627 obsolete.createmarkers(repo, rels, operation=operation)
630 obsolete.createmarkers(repo, rels, operation=operation)
628 else:
631 else:
629 from . import repair # avoid import cycle
632 from . import repair # avoid import cycle
630 repair.delayedstrip(repo.ui, repo, list(mapping), operation)
633 repair.delayedstrip(repo.ui, repo, list(mapping), operation)
631
634
632 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
635 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
633 if opts is None:
636 if opts is None:
634 opts = {}
637 opts = {}
635 m = matcher
638 m = matcher
636 if dry_run is None:
639 if dry_run is None:
637 dry_run = opts.get('dry_run')
640 dry_run = opts.get('dry_run')
638 if similarity is None:
641 if similarity is None:
639 similarity = float(opts.get('similarity') or 0)
642 similarity = float(opts.get('similarity') or 0)
640
643
641 ret = 0
644 ret = 0
642 join = lambda f: os.path.join(prefix, f)
645 join = lambda f: os.path.join(prefix, f)
643
646
644 wctx = repo[None]
647 wctx = repo[None]
645 for subpath in sorted(wctx.substate):
648 for subpath in sorted(wctx.substate):
646 submatch = matchmod.subdirmatcher(subpath, m)
649 submatch = matchmod.subdirmatcher(subpath, m)
647 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
650 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
648 sub = wctx.sub(subpath)
651 sub = wctx.sub(subpath)
649 try:
652 try:
650 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
653 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
651 ret = 1
654 ret = 1
652 except error.LookupError:
655 except error.LookupError:
653 repo.ui.status(_("skipping missing subrepository: %s\n")
656 repo.ui.status(_("skipping missing subrepository: %s\n")
654 % join(subpath))
657 % join(subpath))
655
658
656 rejected = []
659 rejected = []
657 def badfn(f, msg):
660 def badfn(f, msg):
658 if f in m.files():
661 if f in m.files():
659 m.bad(f, msg)
662 m.bad(f, msg)
660 rejected.append(f)
663 rejected.append(f)
661
664
662 badmatch = matchmod.badmatch(m, badfn)
665 badmatch = matchmod.badmatch(m, badfn)
663 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
666 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
664 badmatch)
667 badmatch)
665
668
666 unknownset = set(unknown + forgotten)
669 unknownset = set(unknown + forgotten)
667 toprint = unknownset.copy()
670 toprint = unknownset.copy()
668 toprint.update(deleted)
671 toprint.update(deleted)
669 for abs in sorted(toprint):
672 for abs in sorted(toprint):
670 if repo.ui.verbose or not m.exact(abs):
673 if repo.ui.verbose or not m.exact(abs):
671 if abs in unknownset:
674 if abs in unknownset:
672 status = _('adding %s\n') % m.uipath(abs)
675 status = _('adding %s\n') % m.uipath(abs)
673 else:
676 else:
674 status = _('removing %s\n') % m.uipath(abs)
677 status = _('removing %s\n') % m.uipath(abs)
675 repo.ui.status(status)
678 repo.ui.status(status)
676
679
677 renames = _findrenames(repo, m, added + unknown, removed + deleted,
680 renames = _findrenames(repo, m, added + unknown, removed + deleted,
678 similarity)
681 similarity)
679
682
680 if not dry_run:
683 if not dry_run:
681 _markchanges(repo, unknown + forgotten, deleted, renames)
684 _markchanges(repo, unknown + forgotten, deleted, renames)
682
685
683 for f in rejected:
686 for f in rejected:
684 if f in m.files():
687 if f in m.files():
685 return 1
688 return 1
686 return ret
689 return ret
687
690
688 def marktouched(repo, files, similarity=0.0):
691 def marktouched(repo, files, similarity=0.0):
689 '''Assert that files have somehow been operated upon. files are relative to
692 '''Assert that files have somehow been operated upon. files are relative to
690 the repo root.'''
693 the repo root.'''
691 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
694 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
692 rejected = []
695 rejected = []
693
696
694 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
697 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
695
698
696 if repo.ui.verbose:
699 if repo.ui.verbose:
697 unknownset = set(unknown + forgotten)
700 unknownset = set(unknown + forgotten)
698 toprint = unknownset.copy()
701 toprint = unknownset.copy()
699 toprint.update(deleted)
702 toprint.update(deleted)
700 for abs in sorted(toprint):
703 for abs in sorted(toprint):
701 if abs in unknownset:
704 if abs in unknownset:
702 status = _('adding %s\n') % abs
705 status = _('adding %s\n') % abs
703 else:
706 else:
704 status = _('removing %s\n') % abs
707 status = _('removing %s\n') % abs
705 repo.ui.status(status)
708 repo.ui.status(status)
706
709
707 renames = _findrenames(repo, m, added + unknown, removed + deleted,
710 renames = _findrenames(repo, m, added + unknown, removed + deleted,
708 similarity)
711 similarity)
709
712
710 _markchanges(repo, unknown + forgotten, deleted, renames)
713 _markchanges(repo, unknown + forgotten, deleted, renames)
711
714
712 for f in rejected:
715 for f in rejected:
713 if f in m.files():
716 if f in m.files():
714 return 1
717 return 1
715 return 0
718 return 0
716
719
717 def _interestingfiles(repo, matcher):
720 def _interestingfiles(repo, matcher):
718 '''Walk dirstate with matcher, looking for files that addremove would care
721 '''Walk dirstate with matcher, looking for files that addremove would care
719 about.
722 about.
720
723
721 This is different from dirstate.status because it doesn't care about
724 This is different from dirstate.status because it doesn't care about
722 whether files are modified or clean.'''
725 whether files are modified or clean.'''
723 added, unknown, deleted, removed, forgotten = [], [], [], [], []
726 added, unknown, deleted, removed, forgotten = [], [], [], [], []
724 audit_path = pathutil.pathauditor(repo.root)
727 audit_path = pathutil.pathauditor(repo.root)
725
728
726 ctx = repo[None]
729 ctx = repo[None]
727 dirstate = repo.dirstate
730 dirstate = repo.dirstate
728 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
731 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
729 full=False)
732 full=False)
730 for abs, st in walkresults.iteritems():
733 for abs, st in walkresults.iteritems():
731 dstate = dirstate[abs]
734 dstate = dirstate[abs]
732 if dstate == '?' and audit_path.check(abs):
735 if dstate == '?' and audit_path.check(abs):
733 unknown.append(abs)
736 unknown.append(abs)
734 elif dstate != 'r' and not st:
737 elif dstate != 'r' and not st:
735 deleted.append(abs)
738 deleted.append(abs)
736 elif dstate == 'r' and st:
739 elif dstate == 'r' and st:
737 forgotten.append(abs)
740 forgotten.append(abs)
738 # for finding renames
741 # for finding renames
739 elif dstate == 'r' and not st:
742 elif dstate == 'r' and not st:
740 removed.append(abs)
743 removed.append(abs)
741 elif dstate == 'a':
744 elif dstate == 'a':
742 added.append(abs)
745 added.append(abs)
743
746
744 return added, unknown, deleted, removed, forgotten
747 return added, unknown, deleted, removed, forgotten
745
748
746 def _findrenames(repo, matcher, added, removed, similarity):
749 def _findrenames(repo, matcher, added, removed, similarity):
747 '''Find renames from removed files to added ones.'''
750 '''Find renames from removed files to added ones.'''
748 renames = {}
751 renames = {}
749 if similarity > 0:
752 if similarity > 0:
750 for old, new, score in similar.findrenames(repo, added, removed,
753 for old, new, score in similar.findrenames(repo, added, removed,
751 similarity):
754 similarity):
752 if (repo.ui.verbose or not matcher.exact(old)
755 if (repo.ui.verbose or not matcher.exact(old)
753 or not matcher.exact(new)):
756 or not matcher.exact(new)):
754 repo.ui.status(_('recording removal of %s as rename to %s '
757 repo.ui.status(_('recording removal of %s as rename to %s '
755 '(%d%% similar)\n') %
758 '(%d%% similar)\n') %
756 (matcher.rel(old), matcher.rel(new),
759 (matcher.rel(old), matcher.rel(new),
757 score * 100))
760 score * 100))
758 renames[new] = old
761 renames[new] = old
759 return renames
762 return renames
760
763
761 def _markchanges(repo, unknown, deleted, renames):
764 def _markchanges(repo, unknown, deleted, renames):
762 '''Marks the files in unknown as added, the files in deleted as removed,
765 '''Marks the files in unknown as added, the files in deleted as removed,
763 and the files in renames as copied.'''
766 and the files in renames as copied.'''
764 wctx = repo[None]
767 wctx = repo[None]
765 with repo.wlock():
768 with repo.wlock():
766 wctx.forget(deleted)
769 wctx.forget(deleted)
767 wctx.add(unknown)
770 wctx.add(unknown)
768 for new, old in renames.iteritems():
771 for new, old in renames.iteritems():
769 wctx.copy(old, new)
772 wctx.copy(old, new)
770
773
771 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
774 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
772 """Update the dirstate to reflect the intent of copying src to dst. For
775 """Update the dirstate to reflect the intent of copying src to dst. For
773 different reasons it might not end with dst being marked as copied from src.
776 different reasons it might not end with dst being marked as copied from src.
774 """
777 """
775 origsrc = repo.dirstate.copied(src) or src
778 origsrc = repo.dirstate.copied(src) or src
776 if dst == origsrc: # copying back a copy?
779 if dst == origsrc: # copying back a copy?
777 if repo.dirstate[dst] not in 'mn' and not dryrun:
780 if repo.dirstate[dst] not in 'mn' and not dryrun:
778 repo.dirstate.normallookup(dst)
781 repo.dirstate.normallookup(dst)
779 else:
782 else:
780 if repo.dirstate[origsrc] == 'a' and origsrc == src:
783 if repo.dirstate[origsrc] == 'a' and origsrc == src:
781 if not ui.quiet:
784 if not ui.quiet:
782 ui.warn(_("%s has not been committed yet, so no copy "
785 ui.warn(_("%s has not been committed yet, so no copy "
783 "data will be stored for %s.\n")
786 "data will be stored for %s.\n")
784 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
787 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
785 if repo.dirstate[dst] in '?r' and not dryrun:
788 if repo.dirstate[dst] in '?r' and not dryrun:
786 wctx.add([dst])
789 wctx.add([dst])
787 elif not dryrun:
790 elif not dryrun:
788 wctx.copy(origsrc, dst)
791 wctx.copy(origsrc, dst)
789
792
790 def readrequires(opener, supported):
793 def readrequires(opener, supported):
791 '''Reads and parses .hg/requires and checks if all entries found
794 '''Reads and parses .hg/requires and checks if all entries found
792 are in the list of supported features.'''
795 are in the list of supported features.'''
793 requirements = set(opener.read("requires").splitlines())
796 requirements = set(opener.read("requires").splitlines())
794 missings = []
797 missings = []
795 for r in requirements:
798 for r in requirements:
796 if r not in supported:
799 if r not in supported:
797 if not r or not r[0].isalnum():
800 if not r or not r[0].isalnum():
798 raise error.RequirementError(_(".hg/requires file is corrupt"))
801 raise error.RequirementError(_(".hg/requires file is corrupt"))
799 missings.append(r)
802 missings.append(r)
800 missings.sort()
803 missings.sort()
801 if missings:
804 if missings:
802 raise error.RequirementError(
805 raise error.RequirementError(
803 _("repository requires features unknown to this Mercurial: %s")
806 _("repository requires features unknown to this Mercurial: %s")
804 % " ".join(missings),
807 % " ".join(missings),
805 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
808 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
806 " for more information"))
809 " for more information"))
807 return requirements
810 return requirements
808
811
809 def writerequires(opener, requirements):
812 def writerequires(opener, requirements):
810 with opener('requires', 'w') as fp:
813 with opener('requires', 'w') as fp:
811 for r in sorted(requirements):
814 for r in sorted(requirements):
812 fp.write("%s\n" % r)
815 fp.write("%s\n" % r)
813
816
814 class filecachesubentry(object):
817 class filecachesubentry(object):
815 def __init__(self, path, stat):
818 def __init__(self, path, stat):
816 self.path = path
819 self.path = path
817 self.cachestat = None
820 self.cachestat = None
818 self._cacheable = None
821 self._cacheable = None
819
822
820 if stat:
823 if stat:
821 self.cachestat = filecachesubentry.stat(self.path)
824 self.cachestat = filecachesubentry.stat(self.path)
822
825
823 if self.cachestat:
826 if self.cachestat:
824 self._cacheable = self.cachestat.cacheable()
827 self._cacheable = self.cachestat.cacheable()
825 else:
828 else:
826 # None means we don't know yet
829 # None means we don't know yet
827 self._cacheable = None
830 self._cacheable = None
828
831
829 def refresh(self):
832 def refresh(self):
830 if self.cacheable():
833 if self.cacheable():
831 self.cachestat = filecachesubentry.stat(self.path)
834 self.cachestat = filecachesubentry.stat(self.path)
832
835
833 def cacheable(self):
836 def cacheable(self):
834 if self._cacheable is not None:
837 if self._cacheable is not None:
835 return self._cacheable
838 return self._cacheable
836
839
837 # we don't know yet, assume it is for now
840 # we don't know yet, assume it is for now
838 return True
841 return True
839
842
840 def changed(self):
843 def changed(self):
841 # no point in going further if we can't cache it
844 # no point in going further if we can't cache it
842 if not self.cacheable():
845 if not self.cacheable():
843 return True
846 return True
844
847
845 newstat = filecachesubentry.stat(self.path)
848 newstat = filecachesubentry.stat(self.path)
846
849
847 # we may not know if it's cacheable yet, check again now
850 # we may not know if it's cacheable yet, check again now
848 if newstat and self._cacheable is None:
851 if newstat and self._cacheable is None:
849 self._cacheable = newstat.cacheable()
852 self._cacheable = newstat.cacheable()
850
853
851 # check again
854 # check again
852 if not self._cacheable:
855 if not self._cacheable:
853 return True
856 return True
854
857
855 if self.cachestat != newstat:
858 if self.cachestat != newstat:
856 self.cachestat = newstat
859 self.cachestat = newstat
857 return True
860 return True
858 else:
861 else:
859 return False
862 return False
860
863
861 @staticmethod
864 @staticmethod
862 def stat(path):
865 def stat(path):
863 try:
866 try:
864 return util.cachestat(path)
867 return util.cachestat(path)
865 except OSError as e:
868 except OSError as e:
866 if e.errno != errno.ENOENT:
869 if e.errno != errno.ENOENT:
867 raise
870 raise
868
871
869 class filecacheentry(object):
872 class filecacheentry(object):
870 def __init__(self, paths, stat=True):
873 def __init__(self, paths, stat=True):
871 self._entries = []
874 self._entries = []
872 for path in paths:
875 for path in paths:
873 self._entries.append(filecachesubentry(path, stat))
876 self._entries.append(filecachesubentry(path, stat))
874
877
875 def changed(self):
878 def changed(self):
876 '''true if any entry has changed'''
879 '''true if any entry has changed'''
877 for entry in self._entries:
880 for entry in self._entries:
878 if entry.changed():
881 if entry.changed():
879 return True
882 return True
880 return False
883 return False
881
884
882 def refresh(self):
885 def refresh(self):
883 for entry in self._entries:
886 for entry in self._entries:
884 entry.refresh()
887 entry.refresh()
885
888
886 class filecache(object):
889 class filecache(object):
887 '''A property like decorator that tracks files under .hg/ for updates.
890 '''A property like decorator that tracks files under .hg/ for updates.
888
891
889 Records stat info when called in _filecache.
892 Records stat info when called in _filecache.
890
893
891 On subsequent calls, compares old stat info with new info, and recreates the
894 On subsequent calls, compares old stat info with new info, and recreates the
892 object when any of the files changes, updating the new stat info in
895 object when any of the files changes, updating the new stat info in
893 _filecache.
896 _filecache.
894
897
895 Mercurial either atomic renames or appends for files under .hg,
898 Mercurial either atomic renames or appends for files under .hg,
896 so to ensure the cache is reliable we need the filesystem to be able
899 so to ensure the cache is reliable we need the filesystem to be able
897 to tell us if a file has been replaced. If it can't, we fallback to
900 to tell us if a file has been replaced. If it can't, we fallback to
898 recreating the object on every call (essentially the same behavior as
901 recreating the object on every call (essentially the same behavior as
899 propertycache).
902 propertycache).
900
903
901 '''
904 '''
902 def __init__(self, *paths):
905 def __init__(self, *paths):
903 self.paths = paths
906 self.paths = paths
904
907
905 def join(self, obj, fname):
908 def join(self, obj, fname):
906 """Used to compute the runtime path of a cached file.
909 """Used to compute the runtime path of a cached file.
907
910
908 Users should subclass filecache and provide their own version of this
911 Users should subclass filecache and provide their own version of this
909 function to call the appropriate join function on 'obj' (an instance
912 function to call the appropriate join function on 'obj' (an instance
910 of the class that its member function was decorated).
913 of the class that its member function was decorated).
911 """
914 """
912 raise NotImplementedError
915 raise NotImplementedError
913
916
914 def __call__(self, func):
917 def __call__(self, func):
915 self.func = func
918 self.func = func
916 self.name = func.__name__.encode('ascii')
919 self.name = func.__name__.encode('ascii')
917 return self
920 return self
918
921
919 def __get__(self, obj, type=None):
922 def __get__(self, obj, type=None):
920 # if accessed on the class, return the descriptor itself.
923 # if accessed on the class, return the descriptor itself.
921 if obj is None:
924 if obj is None:
922 return self
925 return self
923 # do we need to check if the file changed?
926 # do we need to check if the file changed?
924 if self.name in obj.__dict__:
927 if self.name in obj.__dict__:
925 assert self.name in obj._filecache, self.name
928 assert self.name in obj._filecache, self.name
926 return obj.__dict__[self.name]
929 return obj.__dict__[self.name]
927
930
928 entry = obj._filecache.get(self.name)
931 entry = obj._filecache.get(self.name)
929
932
930 if entry:
933 if entry:
931 if entry.changed():
934 if entry.changed():
932 entry.obj = self.func(obj)
935 entry.obj = self.func(obj)
933 else:
936 else:
934 paths = [self.join(obj, path) for path in self.paths]
937 paths = [self.join(obj, path) for path in self.paths]
935
938
936 # We stat -before- creating the object so our cache doesn't lie if
939 # We stat -before- creating the object so our cache doesn't lie if
937 # a writer modified between the time we read and stat
940 # a writer modified between the time we read and stat
938 entry = filecacheentry(paths, True)
941 entry = filecacheentry(paths, True)
939 entry.obj = self.func(obj)
942 entry.obj = self.func(obj)
940
943
941 obj._filecache[self.name] = entry
944 obj._filecache[self.name] = entry
942
945
943 obj.__dict__[self.name] = entry.obj
946 obj.__dict__[self.name] = entry.obj
944 return entry.obj
947 return entry.obj
945
948
946 def __set__(self, obj, value):
949 def __set__(self, obj, value):
947 if self.name not in obj._filecache:
950 if self.name not in obj._filecache:
948 # we add an entry for the missing value because X in __dict__
951 # we add an entry for the missing value because X in __dict__
949 # implies X in _filecache
952 # implies X in _filecache
950 paths = [self.join(obj, path) for path in self.paths]
953 paths = [self.join(obj, path) for path in self.paths]
951 ce = filecacheentry(paths, False)
954 ce = filecacheentry(paths, False)
952 obj._filecache[self.name] = ce
955 obj._filecache[self.name] = ce
953 else:
956 else:
954 ce = obj._filecache[self.name]
957 ce = obj._filecache[self.name]
955
958
956 ce.obj = value # update cached copy
959 ce.obj = value # update cached copy
957 obj.__dict__[self.name] = value # update copy returned by obj.x
960 obj.__dict__[self.name] = value # update copy returned by obj.x
958
961
959 def __delete__(self, obj):
962 def __delete__(self, obj):
960 try:
963 try:
961 del obj.__dict__[self.name]
964 del obj.__dict__[self.name]
962 except KeyError:
965 except KeyError:
963 raise AttributeError(self.name)
966 raise AttributeError(self.name)
964
967
965 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
968 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
966 if lock is None:
969 if lock is None:
967 raise error.LockInheritanceContractViolation(
970 raise error.LockInheritanceContractViolation(
968 'lock can only be inherited while held')
971 'lock can only be inherited while held')
969 if environ is None:
972 if environ is None:
970 environ = {}
973 environ = {}
971 with lock.inherit() as locker:
974 with lock.inherit() as locker:
972 environ[envvar] = locker
975 environ[envvar] = locker
973 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
976 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
974
977
975 def wlocksub(repo, cmd, *args, **kwargs):
978 def wlocksub(repo, cmd, *args, **kwargs):
976 """run cmd as a subprocess that allows inheriting repo's wlock
979 """run cmd as a subprocess that allows inheriting repo's wlock
977
980
978 This can only be called while the wlock is held. This takes all the
981 This can only be called while the wlock is held. This takes all the
979 arguments that ui.system does, and returns the exit code of the
982 arguments that ui.system does, and returns the exit code of the
980 subprocess."""
983 subprocess."""
981 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
984 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
982 **kwargs)
985 **kwargs)
983
986
984 def gdinitconfig(ui):
987 def gdinitconfig(ui):
985 """helper function to know if a repo should be created as general delta
988 """helper function to know if a repo should be created as general delta
986 """
989 """
987 # experimental config: format.generaldelta
990 # experimental config: format.generaldelta
988 return (ui.configbool('format', 'generaldelta')
991 return (ui.configbool('format', 'generaldelta')
989 or ui.configbool('format', 'usegeneraldelta'))
992 or ui.configbool('format', 'usegeneraldelta'))
990
993
991 def gddeltaconfig(ui):
994 def gddeltaconfig(ui):
992 """helper function to know if incoming delta should be optimised
995 """helper function to know if incoming delta should be optimised
993 """
996 """
994 # experimental config: format.generaldelta
997 # experimental config: format.generaldelta
995 return ui.configbool('format', 'generaldelta')
998 return ui.configbool('format', 'generaldelta')
996
999
997 class simplekeyvaluefile(object):
1000 class simplekeyvaluefile(object):
998 """A simple file with key=value lines
1001 """A simple file with key=value lines
999
1002
1000 Keys must be alphanumerics and start with a letter, values must not
1003 Keys must be alphanumerics and start with a letter, values must not
1001 contain '\n' characters"""
1004 contain '\n' characters"""
1002 firstlinekey = '__firstline'
1005 firstlinekey = '__firstline'
1003
1006
1004 def __init__(self, vfs, path, keys=None):
1007 def __init__(self, vfs, path, keys=None):
1005 self.vfs = vfs
1008 self.vfs = vfs
1006 self.path = path
1009 self.path = path
1007
1010
1008 def read(self, firstlinenonkeyval=False):
1011 def read(self, firstlinenonkeyval=False):
1009 """Read the contents of a simple key-value file
1012 """Read the contents of a simple key-value file
1010
1013
1011 'firstlinenonkeyval' indicates whether the first line of file should
1014 'firstlinenonkeyval' indicates whether the first line of file should
1012 be treated as a key-value pair or reuturned fully under the
1015 be treated as a key-value pair or reuturned fully under the
1013 __firstline key."""
1016 __firstline key."""
1014 lines = self.vfs.readlines(self.path)
1017 lines = self.vfs.readlines(self.path)
1015 d = {}
1018 d = {}
1016 if firstlinenonkeyval:
1019 if firstlinenonkeyval:
1017 if not lines:
1020 if not lines:
1018 e = _("empty simplekeyvalue file")
1021 e = _("empty simplekeyvalue file")
1019 raise error.CorruptedState(e)
1022 raise error.CorruptedState(e)
1020 # we don't want to include '\n' in the __firstline
1023 # we don't want to include '\n' in the __firstline
1021 d[self.firstlinekey] = lines[0][:-1]
1024 d[self.firstlinekey] = lines[0][:-1]
1022 del lines[0]
1025 del lines[0]
1023
1026
1024 try:
1027 try:
1025 # the 'if line.strip()' part prevents us from failing on empty
1028 # the 'if line.strip()' part prevents us from failing on empty
1026 # lines which only contain '\n' therefore are not skipped
1029 # lines which only contain '\n' therefore are not skipped
1027 # by 'if line'
1030 # by 'if line'
1028 updatedict = dict(line[:-1].split('=', 1) for line in lines
1031 updatedict = dict(line[:-1].split('=', 1) for line in lines
1029 if line.strip())
1032 if line.strip())
1030 if self.firstlinekey in updatedict:
1033 if self.firstlinekey in updatedict:
1031 e = _("%r can't be used as a key")
1034 e = _("%r can't be used as a key")
1032 raise error.CorruptedState(e % self.firstlinekey)
1035 raise error.CorruptedState(e % self.firstlinekey)
1033 d.update(updatedict)
1036 d.update(updatedict)
1034 except ValueError as e:
1037 except ValueError as e:
1035 raise error.CorruptedState(str(e))
1038 raise error.CorruptedState(str(e))
1036 return d
1039 return d
1037
1040
1038 def write(self, data, firstline=None):
1041 def write(self, data, firstline=None):
1039 """Write key=>value mapping to a file
1042 """Write key=>value mapping to a file
1040 data is a dict. Keys must be alphanumerical and start with a letter.
1043 data is a dict. Keys must be alphanumerical and start with a letter.
1041 Values must not contain newline characters.
1044 Values must not contain newline characters.
1042
1045
1043 If 'firstline' is not None, it is written to file before
1046 If 'firstline' is not None, it is written to file before
1044 everything else, as it is, not in a key=value form"""
1047 everything else, as it is, not in a key=value form"""
1045 lines = []
1048 lines = []
1046 if firstline is not None:
1049 if firstline is not None:
1047 lines.append('%s\n' % firstline)
1050 lines.append('%s\n' % firstline)
1048
1051
1049 for k, v in data.items():
1052 for k, v in data.items():
1050 if k == self.firstlinekey:
1053 if k == self.firstlinekey:
1051 e = "key name '%s' is reserved" % self.firstlinekey
1054 e = "key name '%s' is reserved" % self.firstlinekey
1052 raise error.ProgrammingError(e)
1055 raise error.ProgrammingError(e)
1053 if not k[0].isalpha():
1056 if not k[0].isalpha():
1054 e = "keys must start with a letter in a key-value file"
1057 e = "keys must start with a letter in a key-value file"
1055 raise error.ProgrammingError(e)
1058 raise error.ProgrammingError(e)
1056 if not k.isalnum():
1059 if not k.isalnum():
1057 e = "invalid key name in a simple key-value file"
1060 e = "invalid key name in a simple key-value file"
1058 raise error.ProgrammingError(e)
1061 raise error.ProgrammingError(e)
1059 if '\n' in v:
1062 if '\n' in v:
1060 e = "invalid value in a simple key-value file"
1063 e = "invalid value in a simple key-value file"
1061 raise error.ProgrammingError(e)
1064 raise error.ProgrammingError(e)
1062 lines.append("%s=%s\n" % (k, v))
1065 lines.append("%s=%s\n" % (k, v))
1063 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1066 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1064 fp.write(''.join(lines))
1067 fp.write(''.join(lines))
1065
1068
1066 def registersummarycallback(repo, otr):
1069 def registersummarycallback(repo, otr):
1067 """register a callback to issue a summary after the transaction is closed
1070 """register a callback to issue a summary after the transaction is closed
1068 """
1071 """
1069 reporef = weakref.ref(repo)
1072 reporef = weakref.ref(repo)
1070 def reportsummary(tr):
1073 def reportsummary(tr):
1071 """the actual callback reporting the summary"""
1074 """the actual callback reporting the summary"""
1072 repo = reporef()
1075 repo = reporef()
1073 obsoleted = obsutil.getobsoleted(repo, tr)
1076 obsoleted = obsutil.getobsoleted(repo, tr)
1074 if obsoleted:
1077 if obsoleted:
1075 repo.ui.status(_('obsoleted %i changesets\n') % len(obsoleted))
1078 repo.ui.status(_('obsoleted %i changesets\n') % len(obsoleted))
1076 otr.addpostclose('00-txnreport', reportsummary)
1079 otr.addpostclose('00-txnreport', reportsummary)
@@ -1,619 +1,666
1 Test file dedicated to testing the divergent troubles from obsolete changeset.
1 Test file dedicated to testing the divergent troubles from obsolete changeset.
2
2
3 This is the most complex troubles from far so we isolate it in a dedicated
3 This is the most complex troubles from far so we isolate it in a dedicated
4 file.
4 file.
5
5
6 Enable obsolete
6 Enable obsolete
7
7
8 $ cat >> $HGRCPATH << EOF
8 $ cat >> $HGRCPATH << EOF
9 > [ui]
9 > [ui]
10 > logtemplate = {rev}:{node|short} {desc}\n
10 > logtemplate = {rev}:{node|short} {desc}\n
11 > [experimental]
11 > [experimental]
12 > evolution=createmarkers
12 > evolution=createmarkers
13 > [extensions]
14 > drawdag=$TESTDIR/drawdag.py
13 > [alias]
15 > [alias]
14 > debugobsolete = debugobsolete -d '0 0'
16 > debugobsolete = debugobsolete -d '0 0'
15 > [phases]
17 > [phases]
16 > publish=False
18 > publish=False
17 > EOF
19 > EOF
18
20
19
21
20 $ mkcommit() {
22 $ mkcommit() {
21 > echo "$1" > "$1"
23 > echo "$1" > "$1"
22 > hg add "$1"
24 > hg add "$1"
23 > hg ci -m "$1"
25 > hg ci -m "$1"
24 > }
26 > }
25 $ getid() {
27 $ getid() {
26 > hg log --hidden -r "desc('$1')" -T '{node}\n'
28 > hg log --hidden -r "desc('$1')" -T '{node}\n'
27 > }
29 > }
28
30
29 setup repo
31 setup repo
30
32
31 $ hg init reference
33 $ hg init reference
32 $ cd reference
34 $ cd reference
33 $ mkcommit base
35 $ mkcommit base
34 $ mkcommit A_0
36 $ mkcommit A_0
35 $ hg up 0
37 $ hg up 0
36 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
38 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
37 $ mkcommit A_1
39 $ mkcommit A_1
38 created new head
40 created new head
39 $ hg up 0
41 $ hg up 0
40 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
42 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
41 $ mkcommit A_2
43 $ mkcommit A_2
42 created new head
44 created new head
43 $ hg up 0
45 $ hg up 0
44 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
46 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
45 $ cd ..
47 $ cd ..
46
48
47
49
48 $ newcase() {
50 $ newcase() {
49 > hg clone -u 0 -q reference $1
51 > hg clone -u 0 -q reference $1
50 > cd $1
52 > cd $1
51 > }
53 > }
52
54
53 direct divergence
55 direct divergence
54 -----------------
56 -----------------
55
57
56 A_1 have two direct and divergent successors A_1 and A_1
58 A_1 have two direct and divergent successors A_1 and A_1
57
59
58 $ newcase direct
60 $ newcase direct
59 $ hg debugobsolete `getid A_0` `getid A_1`
61 $ hg debugobsolete `getid A_0` `getid A_1`
60 $ hg debugobsolete `getid A_0` `getid A_2`
62 $ hg debugobsolete `getid A_0` `getid A_2`
61 $ hg log -G --hidden
63 $ hg log -G --hidden
62 o 3:392fd25390da A_2
64 o 3:392fd25390da A_2
63 |
65 |
64 | o 2:82623d38b9ba A_1
66 | o 2:82623d38b9ba A_1
65 |/
67 |/
66 | x 1:007dc284c1f8 A_0
68 | x 1:007dc284c1f8 A_0
67 |/
69 |/
68 @ 0:d20a80d4def3 base
70 @ 0:d20a80d4def3 base
69
71
70 $ hg debugsuccessorssets --hidden 'all()'
72 $ hg debugsuccessorssets --hidden 'all()'
71 d20a80d4def3
73 d20a80d4def3
72 d20a80d4def3
74 d20a80d4def3
73 007dc284c1f8
75 007dc284c1f8
74 82623d38b9ba
76 82623d38b9ba
75 392fd25390da
77 392fd25390da
76 82623d38b9ba
78 82623d38b9ba
77 82623d38b9ba
79 82623d38b9ba
78 392fd25390da
80 392fd25390da
79 392fd25390da
81 392fd25390da
80 $ hg log -r 'divergent()'
82 $ hg log -r 'divergent()'
81 2:82623d38b9ba A_1
83 2:82623d38b9ba A_1
82 3:392fd25390da A_2
84 3:392fd25390da A_2
83 $ hg debugsuccessorssets 'all()' --closest
85 $ hg debugsuccessorssets 'all()' --closest
84 d20a80d4def3
86 d20a80d4def3
85 d20a80d4def3
87 d20a80d4def3
86 82623d38b9ba
88 82623d38b9ba
87 82623d38b9ba
89 82623d38b9ba
88 392fd25390da
90 392fd25390da
89 392fd25390da
91 392fd25390da
90 $ hg debugsuccessorssets 'all()' --closest --hidden
92 $ hg debugsuccessorssets 'all()' --closest --hidden
91 d20a80d4def3
93 d20a80d4def3
92 d20a80d4def3
94 d20a80d4def3
93 007dc284c1f8
95 007dc284c1f8
94 82623d38b9ba
96 82623d38b9ba
95 392fd25390da
97 392fd25390da
96 82623d38b9ba
98 82623d38b9ba
97 82623d38b9ba
99 82623d38b9ba
98 392fd25390da
100 392fd25390da
99 392fd25390da
101 392fd25390da
100
102
101 check that mercurial refuse to push
103 check that mercurial refuse to push
102
104
103 $ hg init ../other
105 $ hg init ../other
104 $ hg push ../other
106 $ hg push ../other
105 pushing to ../other
107 pushing to ../other
106 searching for changes
108 searching for changes
107 abort: push includes divergent changeset: 392fd25390da!
109 abort: push includes divergent changeset: 392fd25390da!
108 [255]
110 [255]
109
111
110 $ cd ..
112 $ cd ..
111
113
112
114
113 indirect divergence with known changeset
115 indirect divergence with known changeset
114 -------------------------------------------
116 -------------------------------------------
115
117
116 $ newcase indirect_known
118 $ newcase indirect_known
117 $ hg debugobsolete `getid A_0` `getid A_1`
119 $ hg debugobsolete `getid A_0` `getid A_1`
118 $ hg debugobsolete `getid A_0` `getid A_2`
120 $ hg debugobsolete `getid A_0` `getid A_2`
119 $ mkcommit A_3
121 $ mkcommit A_3
120 created new head
122 created new head
121 $ hg debugobsolete `getid A_2` `getid A_3`
123 $ hg debugobsolete `getid A_2` `getid A_3`
122 $ hg log -G --hidden
124 $ hg log -G --hidden
123 @ 4:01f36c5a8fda A_3
125 @ 4:01f36c5a8fda A_3
124 |
126 |
125 | x 3:392fd25390da A_2
127 | x 3:392fd25390da A_2
126 |/
128 |/
127 | o 2:82623d38b9ba A_1
129 | o 2:82623d38b9ba A_1
128 |/
130 |/
129 | x 1:007dc284c1f8 A_0
131 | x 1:007dc284c1f8 A_0
130 |/
132 |/
131 o 0:d20a80d4def3 base
133 o 0:d20a80d4def3 base
132
134
133 $ hg debugsuccessorssets --hidden 'all()'
135 $ hg debugsuccessorssets --hidden 'all()'
134 d20a80d4def3
136 d20a80d4def3
135 d20a80d4def3
137 d20a80d4def3
136 007dc284c1f8
138 007dc284c1f8
137 82623d38b9ba
139 82623d38b9ba
138 01f36c5a8fda
140 01f36c5a8fda
139 82623d38b9ba
141 82623d38b9ba
140 82623d38b9ba
142 82623d38b9ba
141 392fd25390da
143 392fd25390da
142 01f36c5a8fda
144 01f36c5a8fda
143 01f36c5a8fda
145 01f36c5a8fda
144 01f36c5a8fda
146 01f36c5a8fda
145 $ hg log -r 'divergent()'
147 $ hg log -r 'divergent()'
146 2:82623d38b9ba A_1
148 2:82623d38b9ba A_1
147 4:01f36c5a8fda A_3
149 4:01f36c5a8fda A_3
148 $ hg debugsuccessorssets 'all()' --closest
150 $ hg debugsuccessorssets 'all()' --closest
149 d20a80d4def3
151 d20a80d4def3
150 d20a80d4def3
152 d20a80d4def3
151 82623d38b9ba
153 82623d38b9ba
152 82623d38b9ba
154 82623d38b9ba
153 01f36c5a8fda
155 01f36c5a8fda
154 01f36c5a8fda
156 01f36c5a8fda
155 $ hg debugsuccessorssets 'all()' --closest --hidden
157 $ hg debugsuccessorssets 'all()' --closest --hidden
156 d20a80d4def3
158 d20a80d4def3
157 d20a80d4def3
159 d20a80d4def3
158 007dc284c1f8
160 007dc284c1f8
159 82623d38b9ba
161 82623d38b9ba
160 392fd25390da
162 392fd25390da
161 82623d38b9ba
163 82623d38b9ba
162 82623d38b9ba
164 82623d38b9ba
163 392fd25390da
165 392fd25390da
164 392fd25390da
166 392fd25390da
165 01f36c5a8fda
167 01f36c5a8fda
166 01f36c5a8fda
168 01f36c5a8fda
167 $ cd ..
169 $ cd ..
168
170
169
171
170 indirect divergence with known changeset
172 indirect divergence with known changeset
171 -------------------------------------------
173 -------------------------------------------
172
174
173 $ newcase indirect_unknown
175 $ newcase indirect_unknown
174 $ hg debugobsolete `getid A_0` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
176 $ hg debugobsolete `getid A_0` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
175 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1`
177 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1`
176 $ hg debugobsolete `getid A_0` `getid A_2`
178 $ hg debugobsolete `getid A_0` `getid A_2`
177 $ hg log -G --hidden
179 $ hg log -G --hidden
178 o 3:392fd25390da A_2
180 o 3:392fd25390da A_2
179 |
181 |
180 | o 2:82623d38b9ba A_1
182 | o 2:82623d38b9ba A_1
181 |/
183 |/
182 | x 1:007dc284c1f8 A_0
184 | x 1:007dc284c1f8 A_0
183 |/
185 |/
184 @ 0:d20a80d4def3 base
186 @ 0:d20a80d4def3 base
185
187
186 $ hg debugsuccessorssets --hidden 'all()'
188 $ hg debugsuccessorssets --hidden 'all()'
187 d20a80d4def3
189 d20a80d4def3
188 d20a80d4def3
190 d20a80d4def3
189 007dc284c1f8
191 007dc284c1f8
190 82623d38b9ba
192 82623d38b9ba
191 392fd25390da
193 392fd25390da
192 82623d38b9ba
194 82623d38b9ba
193 82623d38b9ba
195 82623d38b9ba
194 392fd25390da
196 392fd25390da
195 392fd25390da
197 392fd25390da
196 $ hg log -r 'divergent()'
198 $ hg log -r 'divergent()'
197 2:82623d38b9ba A_1
199 2:82623d38b9ba A_1
198 3:392fd25390da A_2
200 3:392fd25390da A_2
199 $ hg debugsuccessorssets 'all()' --closest
201 $ hg debugsuccessorssets 'all()' --closest
200 d20a80d4def3
202 d20a80d4def3
201 d20a80d4def3
203 d20a80d4def3
202 82623d38b9ba
204 82623d38b9ba
203 82623d38b9ba
205 82623d38b9ba
204 392fd25390da
206 392fd25390da
205 392fd25390da
207 392fd25390da
206 $ hg debugsuccessorssets 'all()' --closest --hidden
208 $ hg debugsuccessorssets 'all()' --closest --hidden
207 d20a80d4def3
209 d20a80d4def3
208 d20a80d4def3
210 d20a80d4def3
209 007dc284c1f8
211 007dc284c1f8
210 82623d38b9ba
212 82623d38b9ba
211 392fd25390da
213 392fd25390da
212 82623d38b9ba
214 82623d38b9ba
213 82623d38b9ba
215 82623d38b9ba
214 392fd25390da
216 392fd25390da
215 392fd25390da
217 392fd25390da
216 $ cd ..
218 $ cd ..
217
219
218 do not take unknown node in account if they are final
220 do not take unknown node in account if they are final
219 -----------------------------------------------------
221 -----------------------------------------------------
220
222
221 $ newcase final-unknown
223 $ newcase final-unknown
222 $ hg debugobsolete `getid A_0` `getid A_1`
224 $ hg debugobsolete `getid A_0` `getid A_1`
223 $ hg debugobsolete `getid A_1` `getid A_2`
225 $ hg debugobsolete `getid A_1` `getid A_2`
224 $ hg debugobsolete `getid A_0` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
226 $ hg debugobsolete `getid A_0` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
225 $ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccccccccccccccccccc
227 $ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccccccccccccccccccc
226 $ hg debugobsolete `getid A_1` dddddddddddddddddddddddddddddddddddddddd
228 $ hg debugobsolete `getid A_1` dddddddddddddddddddddddddddddddddddddddd
227
229
228 $ hg debugsuccessorssets --hidden 'desc('A_0')'
230 $ hg debugsuccessorssets --hidden 'desc('A_0')'
229 007dc284c1f8
231 007dc284c1f8
230 392fd25390da
232 392fd25390da
231 $ hg debugsuccessorssets 'desc('A_0')' --closest
233 $ hg debugsuccessorssets 'desc('A_0')' --closest
232 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
234 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
233 007dc284c1f8
235 007dc284c1f8
234 82623d38b9ba
236 82623d38b9ba
235
237
236 $ cd ..
238 $ cd ..
237
239
238 divergence that converge again is not divergence anymore
240 divergence that converge again is not divergence anymore
239 -----------------------------------------------------
241 -----------------------------------------------------
240
242
241 $ newcase converged_divergence
243 $ newcase converged_divergence
242 $ hg debugobsolete `getid A_0` `getid A_1`
244 $ hg debugobsolete `getid A_0` `getid A_1`
243 $ hg debugobsolete `getid A_0` `getid A_2`
245 $ hg debugobsolete `getid A_0` `getid A_2`
244 $ mkcommit A_3
246 $ mkcommit A_3
245 created new head
247 created new head
246 $ hg debugobsolete `getid A_1` `getid A_3`
248 $ hg debugobsolete `getid A_1` `getid A_3`
247 $ hg debugobsolete `getid A_2` `getid A_3`
249 $ hg debugobsolete `getid A_2` `getid A_3`
248 $ hg log -G --hidden
250 $ hg log -G --hidden
249 @ 4:01f36c5a8fda A_3
251 @ 4:01f36c5a8fda A_3
250 |
252 |
251 | x 3:392fd25390da A_2
253 | x 3:392fd25390da A_2
252 |/
254 |/
253 | x 2:82623d38b9ba A_1
255 | x 2:82623d38b9ba A_1
254 |/
256 |/
255 | x 1:007dc284c1f8 A_0
257 | x 1:007dc284c1f8 A_0
256 |/
258 |/
257 o 0:d20a80d4def3 base
259 o 0:d20a80d4def3 base
258
260
259 $ hg debugsuccessorssets --hidden 'all()'
261 $ hg debugsuccessorssets --hidden 'all()'
260 d20a80d4def3
262 d20a80d4def3
261 d20a80d4def3
263 d20a80d4def3
262 007dc284c1f8
264 007dc284c1f8
263 01f36c5a8fda
265 01f36c5a8fda
264 82623d38b9ba
266 82623d38b9ba
265 01f36c5a8fda
267 01f36c5a8fda
266 392fd25390da
268 392fd25390da
267 01f36c5a8fda
269 01f36c5a8fda
268 01f36c5a8fda
270 01f36c5a8fda
269 01f36c5a8fda
271 01f36c5a8fda
270 $ hg log -r 'divergent()'
272 $ hg log -r 'divergent()'
271 $ hg debugsuccessorssets 'all()' --closest
273 $ hg debugsuccessorssets 'all()' --closest
272 d20a80d4def3
274 d20a80d4def3
273 d20a80d4def3
275 d20a80d4def3
274 01f36c5a8fda
276 01f36c5a8fda
275 01f36c5a8fda
277 01f36c5a8fda
276 $ hg debugsuccessorssets 'all()' --closest --hidden
278 $ hg debugsuccessorssets 'all()' --closest --hidden
277 d20a80d4def3
279 d20a80d4def3
278 d20a80d4def3
280 d20a80d4def3
279 007dc284c1f8
281 007dc284c1f8
280 82623d38b9ba
282 82623d38b9ba
281 392fd25390da
283 392fd25390da
282 82623d38b9ba
284 82623d38b9ba
283 82623d38b9ba
285 82623d38b9ba
284 392fd25390da
286 392fd25390da
285 392fd25390da
287 392fd25390da
286 01f36c5a8fda
288 01f36c5a8fda
287 01f36c5a8fda
289 01f36c5a8fda
288 $ cd ..
290 $ cd ..
289
291
290 split is not divergences
292 split is not divergences
291 -----------------------------
293 -----------------------------
292
294
293 $ newcase split
295 $ newcase split
294 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
296 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
295 $ hg log -G --hidden
297 $ hg log -G --hidden
296 o 3:392fd25390da A_2
298 o 3:392fd25390da A_2
297 |
299 |
298 | o 2:82623d38b9ba A_1
300 | o 2:82623d38b9ba A_1
299 |/
301 |/
300 | x 1:007dc284c1f8 A_0
302 | x 1:007dc284c1f8 A_0
301 |/
303 |/
302 @ 0:d20a80d4def3 base
304 @ 0:d20a80d4def3 base
303
305
304 $ hg debugsuccessorssets --hidden 'all()'
306 $ hg debugsuccessorssets --hidden 'all()'
305 d20a80d4def3
307 d20a80d4def3
306 d20a80d4def3
308 d20a80d4def3
307 007dc284c1f8
309 007dc284c1f8
308 82623d38b9ba 392fd25390da
310 82623d38b9ba 392fd25390da
309 82623d38b9ba
311 82623d38b9ba
310 82623d38b9ba
312 82623d38b9ba
311 392fd25390da
313 392fd25390da
312 392fd25390da
314 392fd25390da
313 $ hg log -r 'divergent()'
315 $ hg log -r 'divergent()'
314 $ hg debugsuccessorssets 'all()' --closest
316 $ hg debugsuccessorssets 'all()' --closest
315 d20a80d4def3
317 d20a80d4def3
316 d20a80d4def3
318 d20a80d4def3
317 82623d38b9ba
319 82623d38b9ba
318 82623d38b9ba
320 82623d38b9ba
319 392fd25390da
321 392fd25390da
320 392fd25390da
322 392fd25390da
321 $ hg debugsuccessorssets 'all()' --closest --hidden
323 $ hg debugsuccessorssets 'all()' --closest --hidden
322 d20a80d4def3
324 d20a80d4def3
323 d20a80d4def3
325 d20a80d4def3
324 007dc284c1f8
326 007dc284c1f8
325 82623d38b9ba 392fd25390da
327 82623d38b9ba 392fd25390da
326 82623d38b9ba
328 82623d38b9ba
327 82623d38b9ba
329 82623d38b9ba
328 392fd25390da
330 392fd25390da
329 392fd25390da
331 392fd25390da
330
332
331 Even when subsequent rewriting happen
333 Even when subsequent rewriting happen
332
334
333 $ mkcommit A_3
335 $ mkcommit A_3
334 created new head
336 created new head
335 $ hg debugobsolete `getid A_1` `getid A_3`
337 $ hg debugobsolete `getid A_1` `getid A_3`
336 $ hg up 0
338 $ hg up 0
337 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
339 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
338 $ mkcommit A_4
340 $ mkcommit A_4
339 created new head
341 created new head
340 $ hg debugobsolete `getid A_2` `getid A_4`
342 $ hg debugobsolete `getid A_2` `getid A_4`
341 $ hg up 0
343 $ hg up 0
342 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
344 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
343 $ mkcommit A_5
345 $ mkcommit A_5
344 created new head
346 created new head
345 $ hg debugobsolete `getid A_4` `getid A_5`
347 $ hg debugobsolete `getid A_4` `getid A_5`
346 $ hg log -G --hidden
348 $ hg log -G --hidden
347 @ 6:e442cfc57690 A_5
349 @ 6:e442cfc57690 A_5
348 |
350 |
349 | x 5:6a411f0d7a0a A_4
351 | x 5:6a411f0d7a0a A_4
350 |/
352 |/
351 | o 4:01f36c5a8fda A_3
353 | o 4:01f36c5a8fda A_3
352 |/
354 |/
353 | x 3:392fd25390da A_2
355 | x 3:392fd25390da A_2
354 |/
356 |/
355 | x 2:82623d38b9ba A_1
357 | x 2:82623d38b9ba A_1
356 |/
358 |/
357 | x 1:007dc284c1f8 A_0
359 | x 1:007dc284c1f8 A_0
358 |/
360 |/
359 o 0:d20a80d4def3 base
361 o 0:d20a80d4def3 base
360
362
361 $ hg debugsuccessorssets --hidden 'all()'
363 $ hg debugsuccessorssets --hidden 'all()'
362 d20a80d4def3
364 d20a80d4def3
363 d20a80d4def3
365 d20a80d4def3
364 007dc284c1f8
366 007dc284c1f8
365 01f36c5a8fda e442cfc57690
367 01f36c5a8fda e442cfc57690
366 82623d38b9ba
368 82623d38b9ba
367 01f36c5a8fda
369 01f36c5a8fda
368 392fd25390da
370 392fd25390da
369 e442cfc57690
371 e442cfc57690
370 01f36c5a8fda
372 01f36c5a8fda
371 01f36c5a8fda
373 01f36c5a8fda
372 6a411f0d7a0a
374 6a411f0d7a0a
373 e442cfc57690
375 e442cfc57690
374 e442cfc57690
376 e442cfc57690
375 e442cfc57690
377 e442cfc57690
376 $ hg debugsuccessorssets 'all()' --closest
378 $ hg debugsuccessorssets 'all()' --closest
377 d20a80d4def3
379 d20a80d4def3
378 d20a80d4def3
380 d20a80d4def3
379 01f36c5a8fda
381 01f36c5a8fda
380 01f36c5a8fda
382 01f36c5a8fda
381 e442cfc57690
383 e442cfc57690
382 e442cfc57690
384 e442cfc57690
383 $ hg debugsuccessorssets 'all()' --closest --hidden
385 $ hg debugsuccessorssets 'all()' --closest --hidden
384 d20a80d4def3
386 d20a80d4def3
385 d20a80d4def3
387 d20a80d4def3
386 007dc284c1f8
388 007dc284c1f8
387 82623d38b9ba 392fd25390da
389 82623d38b9ba 392fd25390da
388 82623d38b9ba
390 82623d38b9ba
389 82623d38b9ba
391 82623d38b9ba
390 392fd25390da
392 392fd25390da
391 392fd25390da
393 392fd25390da
392 01f36c5a8fda
394 01f36c5a8fda
393 01f36c5a8fda
395 01f36c5a8fda
394 6a411f0d7a0a
396 6a411f0d7a0a
395 e442cfc57690
397 e442cfc57690
396 e442cfc57690
398 e442cfc57690
397 e442cfc57690
399 e442cfc57690
398 $ hg log -r 'divergent()'
400 $ hg log -r 'divergent()'
399
401
400 Check more complex obsolescence graft (with divergence)
402 Check more complex obsolescence graft (with divergence)
401
403
402 $ mkcommit B_0; hg up 0
404 $ mkcommit B_0; hg up 0
403 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
405 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
404 $ hg debugobsolete `getid B_0` `getid A_2`
406 $ hg debugobsolete `getid B_0` `getid A_2`
405 $ mkcommit A_7; hg up 0
407 $ mkcommit A_7; hg up 0
406 created new head
408 created new head
407 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
409 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
408 $ mkcommit A_8; hg up 0
410 $ mkcommit A_8; hg up 0
409 created new head
411 created new head
410 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
412 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
411 $ hg debugobsolete `getid A_5` `getid A_7` `getid A_8`
413 $ hg debugobsolete `getid A_5` `getid A_7` `getid A_8`
412 $ mkcommit A_9; hg up 0
414 $ mkcommit A_9; hg up 0
413 created new head
415 created new head
414 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
416 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
415 $ hg debugobsolete `getid A_5` `getid A_9`
417 $ hg debugobsolete `getid A_5` `getid A_9`
416 $ hg log -G --hidden
418 $ hg log -G --hidden
417 o 10:bed64f5d2f5a A_9
419 o 10:bed64f5d2f5a A_9
418 |
420 |
419 | o 9:14608b260df8 A_8
421 | o 9:14608b260df8 A_8
420 |/
422 |/
421 | o 8:7ae126973a96 A_7
423 | o 8:7ae126973a96 A_7
422 |/
424 |/
423 | x 7:3750ebee865d B_0
425 | x 7:3750ebee865d B_0
424 | |
426 | |
425 | x 6:e442cfc57690 A_5
427 | x 6:e442cfc57690 A_5
426 |/
428 |/
427 | x 5:6a411f0d7a0a A_4
429 | x 5:6a411f0d7a0a A_4
428 |/
430 |/
429 | o 4:01f36c5a8fda A_3
431 | o 4:01f36c5a8fda A_3
430 |/
432 |/
431 | x 3:392fd25390da A_2
433 | x 3:392fd25390da A_2
432 |/
434 |/
433 | x 2:82623d38b9ba A_1
435 | x 2:82623d38b9ba A_1
434 |/
436 |/
435 | x 1:007dc284c1f8 A_0
437 | x 1:007dc284c1f8 A_0
436 |/
438 |/
437 @ 0:d20a80d4def3 base
439 @ 0:d20a80d4def3 base
438
440
439 $ hg debugsuccessorssets --hidden 'all()'
441 $ hg debugsuccessorssets --hidden 'all()'
440 d20a80d4def3
442 d20a80d4def3
441 d20a80d4def3
443 d20a80d4def3
442 007dc284c1f8
444 007dc284c1f8
443 01f36c5a8fda bed64f5d2f5a
445 01f36c5a8fda bed64f5d2f5a
444 01f36c5a8fda 7ae126973a96 14608b260df8
446 01f36c5a8fda 7ae126973a96 14608b260df8
445 82623d38b9ba
447 82623d38b9ba
446 01f36c5a8fda
448 01f36c5a8fda
447 392fd25390da
449 392fd25390da
448 bed64f5d2f5a
450 bed64f5d2f5a
449 7ae126973a96 14608b260df8
451 7ae126973a96 14608b260df8
450 01f36c5a8fda
452 01f36c5a8fda
451 01f36c5a8fda
453 01f36c5a8fda
452 6a411f0d7a0a
454 6a411f0d7a0a
453 bed64f5d2f5a
455 bed64f5d2f5a
454 7ae126973a96 14608b260df8
456 7ae126973a96 14608b260df8
455 e442cfc57690
457 e442cfc57690
456 bed64f5d2f5a
458 bed64f5d2f5a
457 7ae126973a96 14608b260df8
459 7ae126973a96 14608b260df8
458 3750ebee865d
460 3750ebee865d
459 bed64f5d2f5a
461 bed64f5d2f5a
460 7ae126973a96 14608b260df8
462 7ae126973a96 14608b260df8
461 7ae126973a96
463 7ae126973a96
462 7ae126973a96
464 7ae126973a96
463 14608b260df8
465 14608b260df8
464 14608b260df8
466 14608b260df8
465 bed64f5d2f5a
467 bed64f5d2f5a
466 bed64f5d2f5a
468 bed64f5d2f5a
467 $ hg debugsuccessorssets 'all()' --closest
469 $ hg debugsuccessorssets 'all()' --closest
468 d20a80d4def3
470 d20a80d4def3
469 d20a80d4def3
471 d20a80d4def3
470 01f36c5a8fda
472 01f36c5a8fda
471 01f36c5a8fda
473 01f36c5a8fda
472 7ae126973a96
474 7ae126973a96
473 7ae126973a96
475 7ae126973a96
474 14608b260df8
476 14608b260df8
475 14608b260df8
477 14608b260df8
476 bed64f5d2f5a
478 bed64f5d2f5a
477 bed64f5d2f5a
479 bed64f5d2f5a
478 $ hg debugsuccessorssets 'all()' --closest --hidden
480 $ hg debugsuccessorssets 'all()' --closest --hidden
479 d20a80d4def3
481 d20a80d4def3
480 d20a80d4def3
482 d20a80d4def3
481 007dc284c1f8
483 007dc284c1f8
482 82623d38b9ba 392fd25390da
484 82623d38b9ba 392fd25390da
483 82623d38b9ba
485 82623d38b9ba
484 82623d38b9ba
486 82623d38b9ba
485 392fd25390da
487 392fd25390da
486 392fd25390da
488 392fd25390da
487 01f36c5a8fda
489 01f36c5a8fda
488 01f36c5a8fda
490 01f36c5a8fda
489 6a411f0d7a0a
491 6a411f0d7a0a
490 e442cfc57690
492 e442cfc57690
491 e442cfc57690
493 e442cfc57690
492 e442cfc57690
494 e442cfc57690
493 3750ebee865d
495 3750ebee865d
494 392fd25390da
496 392fd25390da
495 7ae126973a96
497 7ae126973a96
496 7ae126973a96
498 7ae126973a96
497 14608b260df8
499 14608b260df8
498 14608b260df8
500 14608b260df8
499 bed64f5d2f5a
501 bed64f5d2f5a
500 bed64f5d2f5a
502 bed64f5d2f5a
501 $ hg log -r 'divergent()'
503 $ hg log -r 'divergent()'
502 4:01f36c5a8fda A_3
504 4:01f36c5a8fda A_3
503 8:7ae126973a96 A_7
505 8:7ae126973a96 A_7
504 9:14608b260df8 A_8
506 9:14608b260df8 A_8
505 10:bed64f5d2f5a A_9
507 10:bed64f5d2f5a A_9
506
508
507 fix the divergence
509 fix the divergence
508
510
509 $ mkcommit A_A; hg up 0
511 $ mkcommit A_A; hg up 0
510 created new head
512 created new head
511 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
513 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
512 $ hg debugobsolete `getid A_9` `getid A_A`
514 $ hg debugobsolete `getid A_9` `getid A_A`
513 $ hg debugobsolete `getid A_7` `getid A_A`
515 $ hg debugobsolete `getid A_7` `getid A_A`
514 $ hg debugobsolete `getid A_8` `getid A_A`
516 $ hg debugobsolete `getid A_8` `getid A_A`
515 $ hg log -G --hidden
517 $ hg log -G --hidden
516 o 11:a139f71be9da A_A
518 o 11:a139f71be9da A_A
517 |
519 |
518 | x 10:bed64f5d2f5a A_9
520 | x 10:bed64f5d2f5a A_9
519 |/
521 |/
520 | x 9:14608b260df8 A_8
522 | x 9:14608b260df8 A_8
521 |/
523 |/
522 | x 8:7ae126973a96 A_7
524 | x 8:7ae126973a96 A_7
523 |/
525 |/
524 | x 7:3750ebee865d B_0
526 | x 7:3750ebee865d B_0
525 | |
527 | |
526 | x 6:e442cfc57690 A_5
528 | x 6:e442cfc57690 A_5
527 |/
529 |/
528 | x 5:6a411f0d7a0a A_4
530 | x 5:6a411f0d7a0a A_4
529 |/
531 |/
530 | o 4:01f36c5a8fda A_3
532 | o 4:01f36c5a8fda A_3
531 |/
533 |/
532 | x 3:392fd25390da A_2
534 | x 3:392fd25390da A_2
533 |/
535 |/
534 | x 2:82623d38b9ba A_1
536 | x 2:82623d38b9ba A_1
535 |/
537 |/
536 | x 1:007dc284c1f8 A_0
538 | x 1:007dc284c1f8 A_0
537 |/
539 |/
538 @ 0:d20a80d4def3 base
540 @ 0:d20a80d4def3 base
539
541
540 $ hg debugsuccessorssets --hidden 'all()'
542 $ hg debugsuccessorssets --hidden 'all()'
541 d20a80d4def3
543 d20a80d4def3
542 d20a80d4def3
544 d20a80d4def3
543 007dc284c1f8
545 007dc284c1f8
544 01f36c5a8fda a139f71be9da
546 01f36c5a8fda a139f71be9da
545 82623d38b9ba
547 82623d38b9ba
546 01f36c5a8fda
548 01f36c5a8fda
547 392fd25390da
549 392fd25390da
548 a139f71be9da
550 a139f71be9da
549 01f36c5a8fda
551 01f36c5a8fda
550 01f36c5a8fda
552 01f36c5a8fda
551 6a411f0d7a0a
553 6a411f0d7a0a
552 a139f71be9da
554 a139f71be9da
553 e442cfc57690
555 e442cfc57690
554 a139f71be9da
556 a139f71be9da
555 3750ebee865d
557 3750ebee865d
556 a139f71be9da
558 a139f71be9da
557 7ae126973a96
559 7ae126973a96
558 a139f71be9da
560 a139f71be9da
559 14608b260df8
561 14608b260df8
560 a139f71be9da
562 a139f71be9da
561 bed64f5d2f5a
563 bed64f5d2f5a
562 a139f71be9da
564 a139f71be9da
563 a139f71be9da
565 a139f71be9da
564 a139f71be9da
566 a139f71be9da
565 $ hg debugsuccessorssets 'all()' --closest
567 $ hg debugsuccessorssets 'all()' --closest
566 d20a80d4def3
568 d20a80d4def3
567 d20a80d4def3
569 d20a80d4def3
568 01f36c5a8fda
570 01f36c5a8fda
569 01f36c5a8fda
571 01f36c5a8fda
570 a139f71be9da
572 a139f71be9da
571 a139f71be9da
573 a139f71be9da
572 $ hg debugsuccessorssets 'all()' --closest --hidden
574 $ hg debugsuccessorssets 'all()' --closest --hidden
573 d20a80d4def3
575 d20a80d4def3
574 d20a80d4def3
576 d20a80d4def3
575 007dc284c1f8
577 007dc284c1f8
576 82623d38b9ba 392fd25390da
578 82623d38b9ba 392fd25390da
577 82623d38b9ba
579 82623d38b9ba
578 82623d38b9ba
580 82623d38b9ba
579 392fd25390da
581 392fd25390da
580 392fd25390da
582 392fd25390da
581 01f36c5a8fda
583 01f36c5a8fda
582 01f36c5a8fda
584 01f36c5a8fda
583 6a411f0d7a0a
585 6a411f0d7a0a
584 e442cfc57690
586 e442cfc57690
585 e442cfc57690
587 e442cfc57690
586 e442cfc57690
588 e442cfc57690
587 3750ebee865d
589 3750ebee865d
588 392fd25390da
590 392fd25390da
589 7ae126973a96
591 7ae126973a96
590 a139f71be9da
592 a139f71be9da
591 14608b260df8
593 14608b260df8
592 a139f71be9da
594 a139f71be9da
593 bed64f5d2f5a
595 bed64f5d2f5a
594 a139f71be9da
596 a139f71be9da
595 a139f71be9da
597 a139f71be9da
596 a139f71be9da
598 a139f71be9da
597 $ hg log -r 'divergent()'
599 $ hg log -r 'divergent()'
598
600
599 $ cd ..
601 $ cd ..
600
602
601
603
602 Subset does not diverge
604 Subset does not diverge
603 ------------------------------
605 ------------------------------
604
606
605 Do not report divergent successors-set if it is a subset of another
607 Do not report divergent successors-set if it is a subset of another
606 successors-set. (report [A,B] not [A] + [A,B])
608 successors-set. (report [A,B] not [A] + [A,B])
607
609
608 $ newcase subset
610 $ newcase subset
609 $ hg debugobsolete `getid A_0` `getid A_2`
611 $ hg debugobsolete `getid A_0` `getid A_2`
610 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
612 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
611 $ hg debugsuccessorssets --hidden 'desc('A_0')'
613 $ hg debugsuccessorssets --hidden 'desc('A_0')'
612 007dc284c1f8
614 007dc284c1f8
613 82623d38b9ba 392fd25390da
615 82623d38b9ba 392fd25390da
614 $ hg debugsuccessorssets 'desc('A_0')' --closest
616 $ hg debugsuccessorssets 'desc('A_0')' --closest
615 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
617 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
616 007dc284c1f8
618 007dc284c1f8
617 82623d38b9ba 392fd25390da
619 82623d38b9ba 392fd25390da
618
620
619 $ cd ..
621 $ cd ..
622
623 Use scmutil.cleanupnodes API to create divergence
624
625 $ hg init cleanupnodes
626 $ cd cleanupnodes
627 $ hg debugdrawdag <<'EOS'
628 > B1 B3 B4
629 > | \|
630 > A Z
631 > EOS
632
633 $ hg update -q B1
634 $ echo 3 >> B
635 $ hg commit --amend -m B2
636 $ cat > $TESTTMP/scmutilcleanup.py <<EOF
637 > from mercurial import registrar, scmutil
638 > cmdtable = {}
639 > command = registrar.command(cmdtable)
640 > @command('cleanup')
641 > def cleanup(ui, repo):
642 > def node(expr):
643 > unfi = repo.unfiltered()
644 > rev = unfi.revs(expr).first()
645 > return unfi.changelog.node(rev)
646 > with repo.wlock(), repo.lock(), repo.transaction('delayedstrip'):
647 > mapping = {node('desc(B1)'): [node('desc(B3)')],
648 > node('desc(B3)'): [node('desc(B4)')]}
649 > scmutil.cleanupnodes(repo, mapping, 'test')
650 > EOF
651
652 $ rm .hg/localtags
653 $ hg cleanup --config extensions.t=$TESTTMP/scmutilcleanup.py
654 $ hg log -G -T '{rev}:{node|short} {desc} {troubles}' -r 'sort(all(), topo)'
655 @ 5:1a2a9b5b0030 B2 divergent
656 |
657 | o 4:70d5a63ca112 B4 divergent
658 | |
659 | o 1:48b9aae0607f Z
660 |
661 o 0:426bada5c675 A
662
663 $ hg debugobsolete
664 a178212c3433c4e77b573f6011e29affb8aefa33 1a2a9b5b0030632400aa78e00388c20f99d3ec44 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
665 a178212c3433c4e77b573f6011e29affb8aefa33 ad6478fb94ecec98b86daae98722865d494ac561 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
666 ad6478fb94ecec98b86daae98722865d494ac561 70d5a63ca112acb3764bc1d7320ca90ea688d671 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
General Comments 0
You need to be logged in to leave comments. Login now