##// END OF EJS Templates
scmutil: speed up relativization of paths when it's a no-op...
Valentin Gatien-Baron -
r45385:5d8ae924 default
parent child Browse files
Show More
@@ -1,2209 +1,2212 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import os
12 import os
13 import posixpath
13 import posixpath
14 import re
14 import re
15 import subprocess
15 import subprocess
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 bin,
20 bin,
21 hex,
21 hex,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirrev,
26 wdirrev,
27 )
27 )
28 from .pycompat import getattr
28 from .pycompat import getattr
29 from .thirdparty import attr
29 from .thirdparty import attr
30 from . import (
30 from . import (
31 copies as copiesmod,
31 copies as copiesmod,
32 encoding,
32 encoding,
33 error,
33 error,
34 match as matchmod,
34 match as matchmod,
35 obsolete,
35 obsolete,
36 obsutil,
36 obsutil,
37 pathutil,
37 pathutil,
38 phases,
38 phases,
39 policy,
39 policy,
40 pycompat,
40 pycompat,
41 revsetlang,
41 revsetlang,
42 similar,
42 similar,
43 smartset,
43 smartset,
44 url,
44 url,
45 util,
45 util,
46 vfs,
46 vfs,
47 )
47 )
48
48
49 from .utils import (
49 from .utils import (
50 hashutil,
50 hashutil,
51 procutil,
51 procutil,
52 stringutil,
52 stringutil,
53 )
53 )
54
54
55 if pycompat.iswindows:
55 if pycompat.iswindows:
56 from . import scmwindows as scmplatform
56 from . import scmwindows as scmplatform
57 else:
57 else:
58 from . import scmposix as scmplatform
58 from . import scmposix as scmplatform
59
59
60 parsers = policy.importmod('parsers')
60 parsers = policy.importmod('parsers')
61 rustrevlog = policy.importrust('revlog')
61 rustrevlog = policy.importrust('revlog')
62
62
63 termsize = scmplatform.termsize
63 termsize = scmplatform.termsize
64
64
65
65
66 @attr.s(slots=True, repr=False)
66 @attr.s(slots=True, repr=False)
67 class status(object):
67 class status(object):
68 '''Struct with a list of files per status.
68 '''Struct with a list of files per status.
69
69
70 The 'deleted', 'unknown' and 'ignored' properties are only
70 The 'deleted', 'unknown' and 'ignored' properties are only
71 relevant to the working copy.
71 relevant to the working copy.
72 '''
72 '''
73
73
74 modified = attr.ib(default=attr.Factory(list))
74 modified = attr.ib(default=attr.Factory(list))
75 added = attr.ib(default=attr.Factory(list))
75 added = attr.ib(default=attr.Factory(list))
76 removed = attr.ib(default=attr.Factory(list))
76 removed = attr.ib(default=attr.Factory(list))
77 deleted = attr.ib(default=attr.Factory(list))
77 deleted = attr.ib(default=attr.Factory(list))
78 unknown = attr.ib(default=attr.Factory(list))
78 unknown = attr.ib(default=attr.Factory(list))
79 ignored = attr.ib(default=attr.Factory(list))
79 ignored = attr.ib(default=attr.Factory(list))
80 clean = attr.ib(default=attr.Factory(list))
80 clean = attr.ib(default=attr.Factory(list))
81
81
82 def __iter__(self):
82 def __iter__(self):
83 yield self.modified
83 yield self.modified
84 yield self.added
84 yield self.added
85 yield self.removed
85 yield self.removed
86 yield self.deleted
86 yield self.deleted
87 yield self.unknown
87 yield self.unknown
88 yield self.ignored
88 yield self.ignored
89 yield self.clean
89 yield self.clean
90
90
91 def __repr__(self):
91 def __repr__(self):
92 return (
92 return (
93 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
93 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
94 r'unknown=%s, ignored=%s, clean=%s>'
94 r'unknown=%s, ignored=%s, clean=%s>'
95 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
95 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
96
96
97
97
98 def itersubrepos(ctx1, ctx2):
98 def itersubrepos(ctx1, ctx2):
99 """find subrepos in ctx1 or ctx2"""
99 """find subrepos in ctx1 or ctx2"""
100 # Create a (subpath, ctx) mapping where we prefer subpaths from
100 # Create a (subpath, ctx) mapping where we prefer subpaths from
101 # ctx1. The subpaths from ctx2 are important when the .hgsub file
101 # ctx1. The subpaths from ctx2 are important when the .hgsub file
102 # has been modified (in ctx2) but not yet committed (in ctx1).
102 # has been modified (in ctx2) but not yet committed (in ctx1).
103 subpaths = dict.fromkeys(ctx2.substate, ctx2)
103 subpaths = dict.fromkeys(ctx2.substate, ctx2)
104 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
104 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
105
105
106 missing = set()
106 missing = set()
107
107
108 for subpath in ctx2.substate:
108 for subpath in ctx2.substate:
109 if subpath not in ctx1.substate:
109 if subpath not in ctx1.substate:
110 del subpaths[subpath]
110 del subpaths[subpath]
111 missing.add(subpath)
111 missing.add(subpath)
112
112
113 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
113 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
114 yield subpath, ctx.sub(subpath)
114 yield subpath, ctx.sub(subpath)
115
115
116 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
116 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
117 # status and diff will have an accurate result when it does
117 # status and diff will have an accurate result when it does
118 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
118 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
119 # against itself.
119 # against itself.
120 for subpath in missing:
120 for subpath in missing:
121 yield subpath, ctx2.nullsub(subpath, ctx1)
121 yield subpath, ctx2.nullsub(subpath, ctx1)
122
122
123
123
124 def nochangesfound(ui, repo, excluded=None):
124 def nochangesfound(ui, repo, excluded=None):
125 '''Report no changes for push/pull, excluded is None or a list of
125 '''Report no changes for push/pull, excluded is None or a list of
126 nodes excluded from the push/pull.
126 nodes excluded from the push/pull.
127 '''
127 '''
128 secretlist = []
128 secretlist = []
129 if excluded:
129 if excluded:
130 for n in excluded:
130 for n in excluded:
131 ctx = repo[n]
131 ctx = repo[n]
132 if ctx.phase() >= phases.secret and not ctx.extinct():
132 if ctx.phase() >= phases.secret and not ctx.extinct():
133 secretlist.append(n)
133 secretlist.append(n)
134
134
135 if secretlist:
135 if secretlist:
136 ui.status(
136 ui.status(
137 _(b"no changes found (ignored %d secret changesets)\n")
137 _(b"no changes found (ignored %d secret changesets)\n")
138 % len(secretlist)
138 % len(secretlist)
139 )
139 )
140 else:
140 else:
141 ui.status(_(b"no changes found\n"))
141 ui.status(_(b"no changes found\n"))
142
142
143
143
144 def callcatch(ui, func):
144 def callcatch(ui, func):
145 """call func() with global exception handling
145 """call func() with global exception handling
146
146
147 return func() if no exception happens. otherwise do some error handling
147 return func() if no exception happens. otherwise do some error handling
148 and return an exit code accordingly. does not handle all exceptions.
148 and return an exit code accordingly. does not handle all exceptions.
149 """
149 """
150 try:
150 try:
151 try:
151 try:
152 return func()
152 return func()
153 except: # re-raises
153 except: # re-raises
154 ui.traceback()
154 ui.traceback()
155 raise
155 raise
156 # Global exception handling, alphabetically
156 # Global exception handling, alphabetically
157 # Mercurial-specific first, followed by built-in and library exceptions
157 # Mercurial-specific first, followed by built-in and library exceptions
158 except error.LockHeld as inst:
158 except error.LockHeld as inst:
159 if inst.errno == errno.ETIMEDOUT:
159 if inst.errno == errno.ETIMEDOUT:
160 reason = _(b'timed out waiting for lock held by %r') % (
160 reason = _(b'timed out waiting for lock held by %r') % (
161 pycompat.bytestr(inst.locker)
161 pycompat.bytestr(inst.locker)
162 )
162 )
163 else:
163 else:
164 reason = _(b'lock held by %r') % inst.locker
164 reason = _(b'lock held by %r') % inst.locker
165 ui.error(
165 ui.error(
166 _(b"abort: %s: %s\n")
166 _(b"abort: %s: %s\n")
167 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
167 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
168 )
168 )
169 if not inst.locker:
169 if not inst.locker:
170 ui.error(_(b"(lock might be very busy)\n"))
170 ui.error(_(b"(lock might be very busy)\n"))
171 except error.LockUnavailable as inst:
171 except error.LockUnavailable as inst:
172 ui.error(
172 ui.error(
173 _(b"abort: could not lock %s: %s\n")
173 _(b"abort: could not lock %s: %s\n")
174 % (
174 % (
175 inst.desc or stringutil.forcebytestr(inst.filename),
175 inst.desc or stringutil.forcebytestr(inst.filename),
176 encoding.strtolocal(inst.strerror),
176 encoding.strtolocal(inst.strerror),
177 )
177 )
178 )
178 )
179 except error.OutOfBandError as inst:
179 except error.OutOfBandError as inst:
180 if inst.args:
180 if inst.args:
181 msg = _(b"abort: remote error:\n")
181 msg = _(b"abort: remote error:\n")
182 else:
182 else:
183 msg = _(b"abort: remote error\n")
183 msg = _(b"abort: remote error\n")
184 ui.error(msg)
184 ui.error(msg)
185 if inst.args:
185 if inst.args:
186 ui.error(b''.join(inst.args))
186 ui.error(b''.join(inst.args))
187 if inst.hint:
187 if inst.hint:
188 ui.error(b'(%s)\n' % inst.hint)
188 ui.error(b'(%s)\n' % inst.hint)
189 except error.RepoError as inst:
189 except error.RepoError as inst:
190 ui.error(_(b"abort: %s!\n") % inst)
190 ui.error(_(b"abort: %s!\n") % inst)
191 if inst.hint:
191 if inst.hint:
192 ui.error(_(b"(%s)\n") % inst.hint)
192 ui.error(_(b"(%s)\n") % inst.hint)
193 except error.ResponseError as inst:
193 except error.ResponseError as inst:
194 ui.error(_(b"abort: %s") % inst.args[0])
194 ui.error(_(b"abort: %s") % inst.args[0])
195 msg = inst.args[1]
195 msg = inst.args[1]
196 if isinstance(msg, type(u'')):
196 if isinstance(msg, type(u'')):
197 msg = pycompat.sysbytes(msg)
197 msg = pycompat.sysbytes(msg)
198 if not isinstance(msg, bytes):
198 if not isinstance(msg, bytes):
199 ui.error(b" %r\n" % (msg,))
199 ui.error(b" %r\n" % (msg,))
200 elif not msg:
200 elif not msg:
201 ui.error(_(b" empty string\n"))
201 ui.error(_(b" empty string\n"))
202 else:
202 else:
203 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
203 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
204 except error.CensoredNodeError as inst:
204 except error.CensoredNodeError as inst:
205 ui.error(_(b"abort: file censored %s!\n") % inst)
205 ui.error(_(b"abort: file censored %s!\n") % inst)
206 except error.StorageError as inst:
206 except error.StorageError as inst:
207 ui.error(_(b"abort: %s!\n") % inst)
207 ui.error(_(b"abort: %s!\n") % inst)
208 if inst.hint:
208 if inst.hint:
209 ui.error(_(b"(%s)\n") % inst.hint)
209 ui.error(_(b"(%s)\n") % inst.hint)
210 except error.InterventionRequired as inst:
210 except error.InterventionRequired as inst:
211 ui.error(b"%s\n" % inst)
211 ui.error(b"%s\n" % inst)
212 if inst.hint:
212 if inst.hint:
213 ui.error(_(b"(%s)\n") % inst.hint)
213 ui.error(_(b"(%s)\n") % inst.hint)
214 return 1
214 return 1
215 except error.WdirUnsupported:
215 except error.WdirUnsupported:
216 ui.error(_(b"abort: working directory revision cannot be specified\n"))
216 ui.error(_(b"abort: working directory revision cannot be specified\n"))
217 except error.Abort as inst:
217 except error.Abort as inst:
218 ui.error(_(b"abort: %s\n") % inst)
218 ui.error(_(b"abort: %s\n") % inst)
219 if inst.hint:
219 if inst.hint:
220 ui.error(_(b"(%s)\n") % inst.hint)
220 ui.error(_(b"(%s)\n") % inst.hint)
221 except ImportError as inst:
221 except ImportError as inst:
222 ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
222 ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
223 m = stringutil.forcebytestr(inst).split()[-1]
223 m = stringutil.forcebytestr(inst).split()[-1]
224 if m in b"mpatch bdiff".split():
224 if m in b"mpatch bdiff".split():
225 ui.error(_(b"(did you forget to compile extensions?)\n"))
225 ui.error(_(b"(did you forget to compile extensions?)\n"))
226 elif m in b"zlib".split():
226 elif m in b"zlib".split():
227 ui.error(_(b"(is your Python install correct?)\n"))
227 ui.error(_(b"(is your Python install correct?)\n"))
228 except (IOError, OSError) as inst:
228 except (IOError, OSError) as inst:
229 if util.safehasattr(inst, b"code"): # HTTPError
229 if util.safehasattr(inst, b"code"): # HTTPError
230 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
230 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
231 elif util.safehasattr(inst, b"reason"): # URLError or SSLError
231 elif util.safehasattr(inst, b"reason"): # URLError or SSLError
232 try: # usually it is in the form (errno, strerror)
232 try: # usually it is in the form (errno, strerror)
233 reason = inst.reason.args[1]
233 reason = inst.reason.args[1]
234 except (AttributeError, IndexError):
234 except (AttributeError, IndexError):
235 # it might be anything, for example a string
235 # it might be anything, for example a string
236 reason = inst.reason
236 reason = inst.reason
237 if isinstance(reason, pycompat.unicode):
237 if isinstance(reason, pycompat.unicode):
238 # SSLError of Python 2.7.9 contains a unicode
238 # SSLError of Python 2.7.9 contains a unicode
239 reason = encoding.unitolocal(reason)
239 reason = encoding.unitolocal(reason)
240 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
240 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
241 elif (
241 elif (
242 util.safehasattr(inst, b"args")
242 util.safehasattr(inst, b"args")
243 and inst.args
243 and inst.args
244 and inst.args[0] == errno.EPIPE
244 and inst.args[0] == errno.EPIPE
245 ):
245 ):
246 pass
246 pass
247 elif getattr(inst, "strerror", None): # common IOError or OSError
247 elif getattr(inst, "strerror", None): # common IOError or OSError
248 if getattr(inst, "filename", None) is not None:
248 if getattr(inst, "filename", None) is not None:
249 ui.error(
249 ui.error(
250 _(b"abort: %s: '%s'\n")
250 _(b"abort: %s: '%s'\n")
251 % (
251 % (
252 encoding.strtolocal(inst.strerror),
252 encoding.strtolocal(inst.strerror),
253 stringutil.forcebytestr(inst.filename),
253 stringutil.forcebytestr(inst.filename),
254 )
254 )
255 )
255 )
256 else:
256 else:
257 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
257 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
258 else: # suspicious IOError
258 else: # suspicious IOError
259 raise
259 raise
260 except MemoryError:
260 except MemoryError:
261 ui.error(_(b"abort: out of memory\n"))
261 ui.error(_(b"abort: out of memory\n"))
262 except SystemExit as inst:
262 except SystemExit as inst:
263 # Commands shouldn't sys.exit directly, but give a return code.
263 # Commands shouldn't sys.exit directly, but give a return code.
264 # Just in case catch this and and pass exit code to caller.
264 # Just in case catch this and and pass exit code to caller.
265 return inst.code
265 return inst.code
266
266
267 return -1
267 return -1
268
268
269
269
270 def checknewlabel(repo, lbl, kind):
270 def checknewlabel(repo, lbl, kind):
271 # Do not use the "kind" parameter in ui output.
271 # Do not use the "kind" parameter in ui output.
272 # It makes strings difficult to translate.
272 # It makes strings difficult to translate.
273 if lbl in [b'tip', b'.', b'null']:
273 if lbl in [b'tip', b'.', b'null']:
274 raise error.Abort(_(b"the name '%s' is reserved") % lbl)
274 raise error.Abort(_(b"the name '%s' is reserved") % lbl)
275 for c in (b':', b'\0', b'\n', b'\r'):
275 for c in (b':', b'\0', b'\n', b'\r'):
276 if c in lbl:
276 if c in lbl:
277 raise error.Abort(
277 raise error.Abort(
278 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
278 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
279 )
279 )
280 try:
280 try:
281 int(lbl)
281 int(lbl)
282 raise error.Abort(_(b"cannot use an integer as a name"))
282 raise error.Abort(_(b"cannot use an integer as a name"))
283 except ValueError:
283 except ValueError:
284 pass
284 pass
285 if lbl.strip() != lbl:
285 if lbl.strip() != lbl:
286 raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl)
286 raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl)
287
287
288
288
289 def checkfilename(f):
289 def checkfilename(f):
290 '''Check that the filename f is an acceptable filename for a tracked file'''
290 '''Check that the filename f is an acceptable filename for a tracked file'''
291 if b'\r' in f or b'\n' in f:
291 if b'\r' in f or b'\n' in f:
292 raise error.Abort(
292 raise error.Abort(
293 _(b"'\\n' and '\\r' disallowed in filenames: %r")
293 _(b"'\\n' and '\\r' disallowed in filenames: %r")
294 % pycompat.bytestr(f)
294 % pycompat.bytestr(f)
295 )
295 )
296
296
297
297
298 def checkportable(ui, f):
298 def checkportable(ui, f):
299 '''Check if filename f is portable and warn or abort depending on config'''
299 '''Check if filename f is portable and warn or abort depending on config'''
300 checkfilename(f)
300 checkfilename(f)
301 abort, warn = checkportabilityalert(ui)
301 abort, warn = checkportabilityalert(ui)
302 if abort or warn:
302 if abort or warn:
303 msg = util.checkwinfilename(f)
303 msg = util.checkwinfilename(f)
304 if msg:
304 if msg:
305 msg = b"%s: %s" % (msg, procutil.shellquote(f))
305 msg = b"%s: %s" % (msg, procutil.shellquote(f))
306 if abort:
306 if abort:
307 raise error.Abort(msg)
307 raise error.Abort(msg)
308 ui.warn(_(b"warning: %s\n") % msg)
308 ui.warn(_(b"warning: %s\n") % msg)
309
309
310
310
311 def checkportabilityalert(ui):
311 def checkportabilityalert(ui):
312 '''check if the user's config requests nothing, a warning, or abort for
312 '''check if the user's config requests nothing, a warning, or abort for
313 non-portable filenames'''
313 non-portable filenames'''
314 val = ui.config(b'ui', b'portablefilenames')
314 val = ui.config(b'ui', b'portablefilenames')
315 lval = val.lower()
315 lval = val.lower()
316 bval = stringutil.parsebool(val)
316 bval = stringutil.parsebool(val)
317 abort = pycompat.iswindows or lval == b'abort'
317 abort = pycompat.iswindows or lval == b'abort'
318 warn = bval or lval == b'warn'
318 warn = bval or lval == b'warn'
319 if bval is None and not (warn or abort or lval == b'ignore'):
319 if bval is None and not (warn or abort or lval == b'ignore'):
320 raise error.ConfigError(
320 raise error.ConfigError(
321 _(b"ui.portablefilenames value is invalid ('%s')") % val
321 _(b"ui.portablefilenames value is invalid ('%s')") % val
322 )
322 )
323 return abort, warn
323 return abort, warn
324
324
325
325
326 class casecollisionauditor(object):
326 class casecollisionauditor(object):
327 def __init__(self, ui, abort, dirstate):
327 def __init__(self, ui, abort, dirstate):
328 self._ui = ui
328 self._ui = ui
329 self._abort = abort
329 self._abort = abort
330 allfiles = b'\0'.join(dirstate)
330 allfiles = b'\0'.join(dirstate)
331 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
331 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
332 self._dirstate = dirstate
332 self._dirstate = dirstate
333 # The purpose of _newfiles is so that we don't complain about
333 # The purpose of _newfiles is so that we don't complain about
334 # case collisions if someone were to call this object with the
334 # case collisions if someone were to call this object with the
335 # same filename twice.
335 # same filename twice.
336 self._newfiles = set()
336 self._newfiles = set()
337
337
338 def __call__(self, f):
338 def __call__(self, f):
339 if f in self._newfiles:
339 if f in self._newfiles:
340 return
340 return
341 fl = encoding.lower(f)
341 fl = encoding.lower(f)
342 if fl in self._loweredfiles and f not in self._dirstate:
342 if fl in self._loweredfiles and f not in self._dirstate:
343 msg = _(b'possible case-folding collision for %s') % f
343 msg = _(b'possible case-folding collision for %s') % f
344 if self._abort:
344 if self._abort:
345 raise error.Abort(msg)
345 raise error.Abort(msg)
346 self._ui.warn(_(b"warning: %s\n") % msg)
346 self._ui.warn(_(b"warning: %s\n") % msg)
347 self._loweredfiles.add(fl)
347 self._loweredfiles.add(fl)
348 self._newfiles.add(f)
348 self._newfiles.add(f)
349
349
350
350
351 def filteredhash(repo, maxrev):
351 def filteredhash(repo, maxrev):
352 """build hash of filtered revisions in the current repoview.
352 """build hash of filtered revisions in the current repoview.
353
353
354 Multiple caches perform up-to-date validation by checking that the
354 Multiple caches perform up-to-date validation by checking that the
355 tiprev and tipnode stored in the cache file match the current repository.
355 tiprev and tipnode stored in the cache file match the current repository.
356 However, this is not sufficient for validating repoviews because the set
356 However, this is not sufficient for validating repoviews because the set
357 of revisions in the view may change without the repository tiprev and
357 of revisions in the view may change without the repository tiprev and
358 tipnode changing.
358 tipnode changing.
359
359
360 This function hashes all the revs filtered from the view and returns
360 This function hashes all the revs filtered from the view and returns
361 that SHA-1 digest.
361 that SHA-1 digest.
362 """
362 """
363 cl = repo.changelog
363 cl = repo.changelog
364 if not cl.filteredrevs:
364 if not cl.filteredrevs:
365 return None
365 return None
366 key = None
366 key = None
367 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
367 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
368 if revs:
368 if revs:
369 s = hashutil.sha1()
369 s = hashutil.sha1()
370 for rev in revs:
370 for rev in revs:
371 s.update(b'%d;' % rev)
371 s.update(b'%d;' % rev)
372 key = s.digest()
372 key = s.digest()
373 return key
373 return key
374
374
375
375
376 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
376 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
377 '''yield every hg repository under path, always recursively.
377 '''yield every hg repository under path, always recursively.
378 The recurse flag will only control recursion into repo working dirs'''
378 The recurse flag will only control recursion into repo working dirs'''
379
379
380 def errhandler(err):
380 def errhandler(err):
381 if err.filename == path:
381 if err.filename == path:
382 raise err
382 raise err
383
383
384 samestat = getattr(os.path, 'samestat', None)
384 samestat = getattr(os.path, 'samestat', None)
385 if followsym and samestat is not None:
385 if followsym and samestat is not None:
386
386
387 def adddir(dirlst, dirname):
387 def adddir(dirlst, dirname):
388 dirstat = os.stat(dirname)
388 dirstat = os.stat(dirname)
389 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
389 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
390 if not match:
390 if not match:
391 dirlst.append(dirstat)
391 dirlst.append(dirstat)
392 return not match
392 return not match
393
393
394 else:
394 else:
395 followsym = False
395 followsym = False
396
396
397 if (seen_dirs is None) and followsym:
397 if (seen_dirs is None) and followsym:
398 seen_dirs = []
398 seen_dirs = []
399 adddir(seen_dirs, path)
399 adddir(seen_dirs, path)
400 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
400 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
401 dirs.sort()
401 dirs.sort()
402 if b'.hg' in dirs:
402 if b'.hg' in dirs:
403 yield root # found a repository
403 yield root # found a repository
404 qroot = os.path.join(root, b'.hg', b'patches')
404 qroot = os.path.join(root, b'.hg', b'patches')
405 if os.path.isdir(os.path.join(qroot, b'.hg')):
405 if os.path.isdir(os.path.join(qroot, b'.hg')):
406 yield qroot # we have a patch queue repo here
406 yield qroot # we have a patch queue repo here
407 if recurse:
407 if recurse:
408 # avoid recursing inside the .hg directory
408 # avoid recursing inside the .hg directory
409 dirs.remove(b'.hg')
409 dirs.remove(b'.hg')
410 else:
410 else:
411 dirs[:] = [] # don't descend further
411 dirs[:] = [] # don't descend further
412 elif followsym:
412 elif followsym:
413 newdirs = []
413 newdirs = []
414 for d in dirs:
414 for d in dirs:
415 fname = os.path.join(root, d)
415 fname = os.path.join(root, d)
416 if adddir(seen_dirs, fname):
416 if adddir(seen_dirs, fname):
417 if os.path.islink(fname):
417 if os.path.islink(fname):
418 for hgname in walkrepos(fname, True, seen_dirs):
418 for hgname in walkrepos(fname, True, seen_dirs):
419 yield hgname
419 yield hgname
420 else:
420 else:
421 newdirs.append(d)
421 newdirs.append(d)
422 dirs[:] = newdirs
422 dirs[:] = newdirs
423
423
424
424
425 def binnode(ctx):
425 def binnode(ctx):
426 """Return binary node id for a given basectx"""
426 """Return binary node id for a given basectx"""
427 node = ctx.node()
427 node = ctx.node()
428 if node is None:
428 if node is None:
429 return wdirid
429 return wdirid
430 return node
430 return node
431
431
432
432
433 def intrev(ctx):
433 def intrev(ctx):
434 """Return integer for a given basectx that can be used in comparison or
434 """Return integer for a given basectx that can be used in comparison or
435 arithmetic operation"""
435 arithmetic operation"""
436 rev = ctx.rev()
436 rev = ctx.rev()
437 if rev is None:
437 if rev is None:
438 return wdirrev
438 return wdirrev
439 return rev
439 return rev
440
440
441
441
442 def formatchangeid(ctx):
442 def formatchangeid(ctx):
443 """Format changectx as '{rev}:{node|formatnode}', which is the default
443 """Format changectx as '{rev}:{node|formatnode}', which is the default
444 template provided by logcmdutil.changesettemplater"""
444 template provided by logcmdutil.changesettemplater"""
445 repo = ctx.repo()
445 repo = ctx.repo()
446 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
446 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
447
447
448
448
449 def formatrevnode(ui, rev, node):
449 def formatrevnode(ui, rev, node):
450 """Format given revision and node depending on the current verbosity"""
450 """Format given revision and node depending on the current verbosity"""
451 if ui.debugflag:
451 if ui.debugflag:
452 hexfunc = hex
452 hexfunc = hex
453 else:
453 else:
454 hexfunc = short
454 hexfunc = short
455 return b'%d:%s' % (rev, hexfunc(node))
455 return b'%d:%s' % (rev, hexfunc(node))
456
456
457
457
458 def resolvehexnodeidprefix(repo, prefix):
458 def resolvehexnodeidprefix(repo, prefix):
459 if prefix.startswith(b'x'):
459 if prefix.startswith(b'x'):
460 prefix = prefix[1:]
460 prefix = prefix[1:]
461 try:
461 try:
462 # Uses unfiltered repo because it's faster when prefix is ambiguous/
462 # Uses unfiltered repo because it's faster when prefix is ambiguous/
463 # This matches the shortesthexnodeidprefix() function below.
463 # This matches the shortesthexnodeidprefix() function below.
464 node = repo.unfiltered().changelog._partialmatch(prefix)
464 node = repo.unfiltered().changelog._partialmatch(prefix)
465 except error.AmbiguousPrefixLookupError:
465 except error.AmbiguousPrefixLookupError:
466 revset = repo.ui.config(
466 revset = repo.ui.config(
467 b'experimental', b'revisions.disambiguatewithin'
467 b'experimental', b'revisions.disambiguatewithin'
468 )
468 )
469 if revset:
469 if revset:
470 # Clear config to avoid infinite recursion
470 # Clear config to avoid infinite recursion
471 configoverrides = {
471 configoverrides = {
472 (b'experimental', b'revisions.disambiguatewithin'): None
472 (b'experimental', b'revisions.disambiguatewithin'): None
473 }
473 }
474 with repo.ui.configoverride(configoverrides):
474 with repo.ui.configoverride(configoverrides):
475 revs = repo.anyrevs([revset], user=True)
475 revs = repo.anyrevs([revset], user=True)
476 matches = []
476 matches = []
477 for rev in revs:
477 for rev in revs:
478 node = repo.changelog.node(rev)
478 node = repo.changelog.node(rev)
479 if hex(node).startswith(prefix):
479 if hex(node).startswith(prefix):
480 matches.append(node)
480 matches.append(node)
481 if len(matches) == 1:
481 if len(matches) == 1:
482 return matches[0]
482 return matches[0]
483 raise
483 raise
484 if node is None:
484 if node is None:
485 return
485 return
486 repo.changelog.rev(node) # make sure node isn't filtered
486 repo.changelog.rev(node) # make sure node isn't filtered
487 return node
487 return node
488
488
489
489
490 def mayberevnum(repo, prefix):
490 def mayberevnum(repo, prefix):
491 """Checks if the given prefix may be mistaken for a revision number"""
491 """Checks if the given prefix may be mistaken for a revision number"""
492 try:
492 try:
493 i = int(prefix)
493 i = int(prefix)
494 # if we are a pure int, then starting with zero will not be
494 # if we are a pure int, then starting with zero will not be
495 # confused as a rev; or, obviously, if the int is larger
495 # confused as a rev; or, obviously, if the int is larger
496 # than the value of the tip rev. We still need to disambiguate if
496 # than the value of the tip rev. We still need to disambiguate if
497 # prefix == '0', since that *is* a valid revnum.
497 # prefix == '0', since that *is* a valid revnum.
498 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
498 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
499 return False
499 return False
500 return True
500 return True
501 except ValueError:
501 except ValueError:
502 return False
502 return False
503
503
504
504
505 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
505 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
506 """Find the shortest unambiguous prefix that matches hexnode.
506 """Find the shortest unambiguous prefix that matches hexnode.
507
507
508 If "cache" is not None, it must be a dictionary that can be used for
508 If "cache" is not None, it must be a dictionary that can be used for
509 caching between calls to this method.
509 caching between calls to this method.
510 """
510 """
511 # _partialmatch() of filtered changelog could take O(len(repo)) time,
511 # _partialmatch() of filtered changelog could take O(len(repo)) time,
512 # which would be unacceptably slow. so we look for hash collision in
512 # which would be unacceptably slow. so we look for hash collision in
513 # unfiltered space, which means some hashes may be slightly longer.
513 # unfiltered space, which means some hashes may be slightly longer.
514
514
515 minlength = max(minlength, 1)
515 minlength = max(minlength, 1)
516
516
517 def disambiguate(prefix):
517 def disambiguate(prefix):
518 """Disambiguate against revnums."""
518 """Disambiguate against revnums."""
519 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
519 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
520 if mayberevnum(repo, prefix):
520 if mayberevnum(repo, prefix):
521 return b'x' + prefix
521 return b'x' + prefix
522 else:
522 else:
523 return prefix
523 return prefix
524
524
525 hexnode = hex(node)
525 hexnode = hex(node)
526 for length in range(len(prefix), len(hexnode) + 1):
526 for length in range(len(prefix), len(hexnode) + 1):
527 prefix = hexnode[:length]
527 prefix = hexnode[:length]
528 if not mayberevnum(repo, prefix):
528 if not mayberevnum(repo, prefix):
529 return prefix
529 return prefix
530
530
531 cl = repo.unfiltered().changelog
531 cl = repo.unfiltered().changelog
532 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
532 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
533 if revset:
533 if revset:
534 revs = None
534 revs = None
535 if cache is not None:
535 if cache is not None:
536 revs = cache.get(b'disambiguationrevset')
536 revs = cache.get(b'disambiguationrevset')
537 if revs is None:
537 if revs is None:
538 revs = repo.anyrevs([revset], user=True)
538 revs = repo.anyrevs([revset], user=True)
539 if cache is not None:
539 if cache is not None:
540 cache[b'disambiguationrevset'] = revs
540 cache[b'disambiguationrevset'] = revs
541 if cl.rev(node) in revs:
541 if cl.rev(node) in revs:
542 hexnode = hex(node)
542 hexnode = hex(node)
543 nodetree = None
543 nodetree = None
544 if cache is not None:
544 if cache is not None:
545 nodetree = cache.get(b'disambiguationnodetree')
545 nodetree = cache.get(b'disambiguationnodetree')
546 if not nodetree:
546 if not nodetree:
547 if util.safehasattr(parsers, 'nodetree'):
547 if util.safehasattr(parsers, 'nodetree'):
548 # The CExt is the only implementation to provide a nodetree
548 # The CExt is the only implementation to provide a nodetree
549 # class so far.
549 # class so far.
550 index = cl.index
550 index = cl.index
551 if util.safehasattr(index, 'get_cindex'):
551 if util.safehasattr(index, 'get_cindex'):
552 # the rust wrapped need to give access to its internal index
552 # the rust wrapped need to give access to its internal index
553 index = index.get_cindex()
553 index = index.get_cindex()
554 nodetree = parsers.nodetree(index, len(revs))
554 nodetree = parsers.nodetree(index, len(revs))
555 for r in revs:
555 for r in revs:
556 nodetree.insert(r)
556 nodetree.insert(r)
557 if cache is not None:
557 if cache is not None:
558 cache[b'disambiguationnodetree'] = nodetree
558 cache[b'disambiguationnodetree'] = nodetree
559 if nodetree is not None:
559 if nodetree is not None:
560 length = max(nodetree.shortest(node), minlength)
560 length = max(nodetree.shortest(node), minlength)
561 prefix = hexnode[:length]
561 prefix = hexnode[:length]
562 return disambiguate(prefix)
562 return disambiguate(prefix)
563 for length in range(minlength, len(hexnode) + 1):
563 for length in range(minlength, len(hexnode) + 1):
564 matches = []
564 matches = []
565 prefix = hexnode[:length]
565 prefix = hexnode[:length]
566 for rev in revs:
566 for rev in revs:
567 otherhexnode = repo[rev].hex()
567 otherhexnode = repo[rev].hex()
568 if prefix == otherhexnode[:length]:
568 if prefix == otherhexnode[:length]:
569 matches.append(otherhexnode)
569 matches.append(otherhexnode)
570 if len(matches) == 1:
570 if len(matches) == 1:
571 return disambiguate(prefix)
571 return disambiguate(prefix)
572
572
573 try:
573 try:
574 return disambiguate(cl.shortest(node, minlength))
574 return disambiguate(cl.shortest(node, minlength))
575 except error.LookupError:
575 except error.LookupError:
576 raise error.RepoLookupError()
576 raise error.RepoLookupError()
577
577
578
578
579 def isrevsymbol(repo, symbol):
579 def isrevsymbol(repo, symbol):
580 """Checks if a symbol exists in the repo.
580 """Checks if a symbol exists in the repo.
581
581
582 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
582 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
583 symbol is an ambiguous nodeid prefix.
583 symbol is an ambiguous nodeid prefix.
584 """
584 """
585 try:
585 try:
586 revsymbol(repo, symbol)
586 revsymbol(repo, symbol)
587 return True
587 return True
588 except error.RepoLookupError:
588 except error.RepoLookupError:
589 return False
589 return False
590
590
591
591
592 def revsymbol(repo, symbol):
592 def revsymbol(repo, symbol):
593 """Returns a context given a single revision symbol (as string).
593 """Returns a context given a single revision symbol (as string).
594
594
595 This is similar to revsingle(), but accepts only a single revision symbol,
595 This is similar to revsingle(), but accepts only a single revision symbol,
596 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
596 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
597 not "max(public())".
597 not "max(public())".
598 """
598 """
599 if not isinstance(symbol, bytes):
599 if not isinstance(symbol, bytes):
600 msg = (
600 msg = (
601 b"symbol (%s of type %s) was not a string, did you mean "
601 b"symbol (%s of type %s) was not a string, did you mean "
602 b"repo[symbol]?" % (symbol, type(symbol))
602 b"repo[symbol]?" % (symbol, type(symbol))
603 )
603 )
604 raise error.ProgrammingError(msg)
604 raise error.ProgrammingError(msg)
605 try:
605 try:
606 if symbol in (b'.', b'tip', b'null'):
606 if symbol in (b'.', b'tip', b'null'):
607 return repo[symbol]
607 return repo[symbol]
608
608
609 try:
609 try:
610 r = int(symbol)
610 r = int(symbol)
611 if b'%d' % r != symbol:
611 if b'%d' % r != symbol:
612 raise ValueError
612 raise ValueError
613 l = len(repo.changelog)
613 l = len(repo.changelog)
614 if r < 0:
614 if r < 0:
615 r += l
615 r += l
616 if r < 0 or r >= l and r != wdirrev:
616 if r < 0 or r >= l and r != wdirrev:
617 raise ValueError
617 raise ValueError
618 return repo[r]
618 return repo[r]
619 except error.FilteredIndexError:
619 except error.FilteredIndexError:
620 raise
620 raise
621 except (ValueError, OverflowError, IndexError):
621 except (ValueError, OverflowError, IndexError):
622 pass
622 pass
623
623
624 if len(symbol) == 40:
624 if len(symbol) == 40:
625 try:
625 try:
626 node = bin(symbol)
626 node = bin(symbol)
627 rev = repo.changelog.rev(node)
627 rev = repo.changelog.rev(node)
628 return repo[rev]
628 return repo[rev]
629 except error.FilteredLookupError:
629 except error.FilteredLookupError:
630 raise
630 raise
631 except (TypeError, LookupError):
631 except (TypeError, LookupError):
632 pass
632 pass
633
633
634 # look up bookmarks through the name interface
634 # look up bookmarks through the name interface
635 try:
635 try:
636 node = repo.names.singlenode(repo, symbol)
636 node = repo.names.singlenode(repo, symbol)
637 rev = repo.changelog.rev(node)
637 rev = repo.changelog.rev(node)
638 return repo[rev]
638 return repo[rev]
639 except KeyError:
639 except KeyError:
640 pass
640 pass
641
641
642 node = resolvehexnodeidprefix(repo, symbol)
642 node = resolvehexnodeidprefix(repo, symbol)
643 if node is not None:
643 if node is not None:
644 rev = repo.changelog.rev(node)
644 rev = repo.changelog.rev(node)
645 return repo[rev]
645 return repo[rev]
646
646
647 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
647 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
648
648
649 except error.WdirUnsupported:
649 except error.WdirUnsupported:
650 return repo[None]
650 return repo[None]
651 except (
651 except (
652 error.FilteredIndexError,
652 error.FilteredIndexError,
653 error.FilteredLookupError,
653 error.FilteredLookupError,
654 error.FilteredRepoLookupError,
654 error.FilteredRepoLookupError,
655 ):
655 ):
656 raise _filterederror(repo, symbol)
656 raise _filterederror(repo, symbol)
657
657
658
658
659 def _filterederror(repo, changeid):
659 def _filterederror(repo, changeid):
660 """build an exception to be raised about a filtered changeid
660 """build an exception to be raised about a filtered changeid
661
661
662 This is extracted in a function to help extensions (eg: evolve) to
662 This is extracted in a function to help extensions (eg: evolve) to
663 experiment with various message variants."""
663 experiment with various message variants."""
664 if repo.filtername.startswith(b'visible'):
664 if repo.filtername.startswith(b'visible'):
665
665
666 # Check if the changeset is obsolete
666 # Check if the changeset is obsolete
667 unfilteredrepo = repo.unfiltered()
667 unfilteredrepo = repo.unfiltered()
668 ctx = revsymbol(unfilteredrepo, changeid)
668 ctx = revsymbol(unfilteredrepo, changeid)
669
669
670 # If the changeset is obsolete, enrich the message with the reason
670 # If the changeset is obsolete, enrich the message with the reason
671 # that made this changeset not visible
671 # that made this changeset not visible
672 if ctx.obsolete():
672 if ctx.obsolete():
673 msg = obsutil._getfilteredreason(repo, changeid, ctx)
673 msg = obsutil._getfilteredreason(repo, changeid, ctx)
674 else:
674 else:
675 msg = _(b"hidden revision '%s'") % changeid
675 msg = _(b"hidden revision '%s'") % changeid
676
676
677 hint = _(b'use --hidden to access hidden revisions')
677 hint = _(b'use --hidden to access hidden revisions')
678
678
679 return error.FilteredRepoLookupError(msg, hint=hint)
679 return error.FilteredRepoLookupError(msg, hint=hint)
680 msg = _(b"filtered revision '%s' (not in '%s' subset)")
680 msg = _(b"filtered revision '%s' (not in '%s' subset)")
681 msg %= (changeid, repo.filtername)
681 msg %= (changeid, repo.filtername)
682 return error.FilteredRepoLookupError(msg)
682 return error.FilteredRepoLookupError(msg)
683
683
684
684
685 def revsingle(repo, revspec, default=b'.', localalias=None):
685 def revsingle(repo, revspec, default=b'.', localalias=None):
686 if not revspec and revspec != 0:
686 if not revspec and revspec != 0:
687 return repo[default]
687 return repo[default]
688
688
689 l = revrange(repo, [revspec], localalias=localalias)
689 l = revrange(repo, [revspec], localalias=localalias)
690 if not l:
690 if not l:
691 raise error.Abort(_(b'empty revision set'))
691 raise error.Abort(_(b'empty revision set'))
692 return repo[l.last()]
692 return repo[l.last()]
693
693
694
694
695 def _pairspec(revspec):
695 def _pairspec(revspec):
696 tree = revsetlang.parse(revspec)
696 tree = revsetlang.parse(revspec)
697 return tree and tree[0] in (
697 return tree and tree[0] in (
698 b'range',
698 b'range',
699 b'rangepre',
699 b'rangepre',
700 b'rangepost',
700 b'rangepost',
701 b'rangeall',
701 b'rangeall',
702 )
702 )
703
703
704
704
705 def revpair(repo, revs):
705 def revpair(repo, revs):
706 if not revs:
706 if not revs:
707 return repo[b'.'], repo[None]
707 return repo[b'.'], repo[None]
708
708
709 l = revrange(repo, revs)
709 l = revrange(repo, revs)
710
710
711 if not l:
711 if not l:
712 raise error.Abort(_(b'empty revision range'))
712 raise error.Abort(_(b'empty revision range'))
713
713
714 first = l.first()
714 first = l.first()
715 second = l.last()
715 second = l.last()
716
716
717 if (
717 if (
718 first == second
718 first == second
719 and len(revs) >= 2
719 and len(revs) >= 2
720 and not all(revrange(repo, [r]) for r in revs)
720 and not all(revrange(repo, [r]) for r in revs)
721 ):
721 ):
722 raise error.Abort(_(b'empty revision on one side of range'))
722 raise error.Abort(_(b'empty revision on one side of range'))
723
723
724 # if top-level is range expression, the result must always be a pair
724 # if top-level is range expression, the result must always be a pair
725 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
725 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
726 return repo[first], repo[None]
726 return repo[first], repo[None]
727
727
728 return repo[first], repo[second]
728 return repo[first], repo[second]
729
729
730
730
731 def revrange(repo, specs, localalias=None):
731 def revrange(repo, specs, localalias=None):
732 """Execute 1 to many revsets and return the union.
732 """Execute 1 to many revsets and return the union.
733
733
734 This is the preferred mechanism for executing revsets using user-specified
734 This is the preferred mechanism for executing revsets using user-specified
735 config options, such as revset aliases.
735 config options, such as revset aliases.
736
736
737 The revsets specified by ``specs`` will be executed via a chained ``OR``
737 The revsets specified by ``specs`` will be executed via a chained ``OR``
738 expression. If ``specs`` is empty, an empty result is returned.
738 expression. If ``specs`` is empty, an empty result is returned.
739
739
740 ``specs`` can contain integers, in which case they are assumed to be
740 ``specs`` can contain integers, in which case they are assumed to be
741 revision numbers.
741 revision numbers.
742
742
743 It is assumed the revsets are already formatted. If you have arguments
743 It is assumed the revsets are already formatted. If you have arguments
744 that need to be expanded in the revset, call ``revsetlang.formatspec()``
744 that need to be expanded in the revset, call ``revsetlang.formatspec()``
745 and pass the result as an element of ``specs``.
745 and pass the result as an element of ``specs``.
746
746
747 Specifying a single revset is allowed.
747 Specifying a single revset is allowed.
748
748
749 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
749 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
750 integer revisions.
750 integer revisions.
751 """
751 """
752 allspecs = []
752 allspecs = []
753 for spec in specs:
753 for spec in specs:
754 if isinstance(spec, int):
754 if isinstance(spec, int):
755 spec = revsetlang.formatspec(b'%d', spec)
755 spec = revsetlang.formatspec(b'%d', spec)
756 allspecs.append(spec)
756 allspecs.append(spec)
757 return repo.anyrevs(allspecs, user=True, localalias=localalias)
757 return repo.anyrevs(allspecs, user=True, localalias=localalias)
758
758
759
759
760 def meaningfulparents(repo, ctx):
760 def meaningfulparents(repo, ctx):
761 """Return list of meaningful (or all if debug) parentrevs for rev.
761 """Return list of meaningful (or all if debug) parentrevs for rev.
762
762
763 For merges (two non-nullrev revisions) both parents are meaningful.
763 For merges (two non-nullrev revisions) both parents are meaningful.
764 Otherwise the first parent revision is considered meaningful if it
764 Otherwise the first parent revision is considered meaningful if it
765 is not the preceding revision.
765 is not the preceding revision.
766 """
766 """
767 parents = ctx.parents()
767 parents = ctx.parents()
768 if len(parents) > 1:
768 if len(parents) > 1:
769 return parents
769 return parents
770 if repo.ui.debugflag:
770 if repo.ui.debugflag:
771 return [parents[0], repo[nullrev]]
771 return [parents[0], repo[nullrev]]
772 if parents[0].rev() >= intrev(ctx) - 1:
772 if parents[0].rev() >= intrev(ctx) - 1:
773 return []
773 return []
774 return parents
774 return parents
775
775
776
776
777 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
777 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
778 """Return a function that produced paths for presenting to the user.
778 """Return a function that produced paths for presenting to the user.
779
779
780 The returned function takes a repo-relative path and produces a path
780 The returned function takes a repo-relative path and produces a path
781 that can be presented in the UI.
781 that can be presented in the UI.
782
782
783 Depending on the value of ui.relative-paths, either a repo-relative or
783 Depending on the value of ui.relative-paths, either a repo-relative or
784 cwd-relative path will be produced.
784 cwd-relative path will be produced.
785
785
786 legacyrelativevalue is the value to use if ui.relative-paths=legacy
786 legacyrelativevalue is the value to use if ui.relative-paths=legacy
787
787
788 If forcerelativevalue is not None, then that value will be used regardless
788 If forcerelativevalue is not None, then that value will be used regardless
789 of what ui.relative-paths is set to.
789 of what ui.relative-paths is set to.
790 """
790 """
791 if forcerelativevalue is not None:
791 if forcerelativevalue is not None:
792 relative = forcerelativevalue
792 relative = forcerelativevalue
793 else:
793 else:
794 config = repo.ui.config(b'ui', b'relative-paths')
794 config = repo.ui.config(b'ui', b'relative-paths')
795 if config == b'legacy':
795 if config == b'legacy':
796 relative = legacyrelativevalue
796 relative = legacyrelativevalue
797 else:
797 else:
798 relative = stringutil.parsebool(config)
798 relative = stringutil.parsebool(config)
799 if relative is None:
799 if relative is None:
800 raise error.ConfigError(
800 raise error.ConfigError(
801 _(b"ui.relative-paths is not a boolean ('%s')") % config
801 _(b"ui.relative-paths is not a boolean ('%s')") % config
802 )
802 )
803
803
804 if relative:
804 if relative:
805 cwd = repo.getcwd()
805 cwd = repo.getcwd()
806 pathto = repo.pathto
806 if cwd != b'':
807 return lambda f: pathto(f, cwd)
807 # this branch is correct when cwd == b'', ie cwd = repo root,
808 elif repo.ui.configbool(b'ui', b'slash'):
808 # but it's slower
809 pathto = repo.pathto
810 return lambda f: pathto(f, cwd)
811 if repo.ui.configbool(b'ui', b'slash'):
809 return lambda f: f
812 return lambda f: f
810 else:
813 else:
811 return util.localpath
814 return util.localpath
812
815
813
816
814 def subdiruipathfn(subpath, uipathfn):
817 def subdiruipathfn(subpath, uipathfn):
815 '''Create a new uipathfn that treats the file as relative to subpath.'''
818 '''Create a new uipathfn that treats the file as relative to subpath.'''
816 return lambda f: uipathfn(posixpath.join(subpath, f))
819 return lambda f: uipathfn(posixpath.join(subpath, f))
817
820
818
821
819 def anypats(pats, opts):
822 def anypats(pats, opts):
820 '''Checks if any patterns, including --include and --exclude were given.
823 '''Checks if any patterns, including --include and --exclude were given.
821
824
822 Some commands (e.g. addremove) use this condition for deciding whether to
825 Some commands (e.g. addremove) use this condition for deciding whether to
823 print absolute or relative paths.
826 print absolute or relative paths.
824 '''
827 '''
825 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
828 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
826
829
827
830
828 def expandpats(pats):
831 def expandpats(pats):
829 '''Expand bare globs when running on windows.
832 '''Expand bare globs when running on windows.
830 On posix we assume it already has already been done by sh.'''
833 On posix we assume it already has already been done by sh.'''
831 if not util.expandglobs:
834 if not util.expandglobs:
832 return list(pats)
835 return list(pats)
833 ret = []
836 ret = []
834 for kindpat in pats:
837 for kindpat in pats:
835 kind, pat = matchmod._patsplit(kindpat, None)
838 kind, pat = matchmod._patsplit(kindpat, None)
836 if kind is None:
839 if kind is None:
837 try:
840 try:
838 globbed = glob.glob(pat)
841 globbed = glob.glob(pat)
839 except re.error:
842 except re.error:
840 globbed = [pat]
843 globbed = [pat]
841 if globbed:
844 if globbed:
842 ret.extend(globbed)
845 ret.extend(globbed)
843 continue
846 continue
844 ret.append(kindpat)
847 ret.append(kindpat)
845 return ret
848 return ret
846
849
847
850
848 def matchandpats(
851 def matchandpats(
849 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
852 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
850 ):
853 ):
851 '''Return a matcher and the patterns that were used.
854 '''Return a matcher and the patterns that were used.
852 The matcher will warn about bad matches, unless an alternate badfn callback
855 The matcher will warn about bad matches, unless an alternate badfn callback
853 is provided.'''
856 is provided.'''
854 if opts is None:
857 if opts is None:
855 opts = {}
858 opts = {}
856 if not globbed and default == b'relpath':
859 if not globbed and default == b'relpath':
857 pats = expandpats(pats or [])
860 pats = expandpats(pats or [])
858
861
859 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
862 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
860
863
861 def bad(f, msg):
864 def bad(f, msg):
862 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
865 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
863
866
864 if badfn is None:
867 if badfn is None:
865 badfn = bad
868 badfn = bad
866
869
867 m = ctx.match(
870 m = ctx.match(
868 pats,
871 pats,
869 opts.get(b'include'),
872 opts.get(b'include'),
870 opts.get(b'exclude'),
873 opts.get(b'exclude'),
871 default,
874 default,
872 listsubrepos=opts.get(b'subrepos'),
875 listsubrepos=opts.get(b'subrepos'),
873 badfn=badfn,
876 badfn=badfn,
874 )
877 )
875
878
876 if m.always():
879 if m.always():
877 pats = []
880 pats = []
878 return m, pats
881 return m, pats
879
882
880
883
881 def match(
884 def match(
882 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
885 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
883 ):
886 ):
884 '''Return a matcher that will warn about bad matches.'''
887 '''Return a matcher that will warn about bad matches.'''
885 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
888 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
886
889
887
890
888 def matchall(repo):
891 def matchall(repo):
889 '''Return a matcher that will efficiently match everything.'''
892 '''Return a matcher that will efficiently match everything.'''
890 return matchmod.always()
893 return matchmod.always()
891
894
892
895
893 def matchfiles(repo, files, badfn=None):
896 def matchfiles(repo, files, badfn=None):
894 '''Return a matcher that will efficiently match exactly these files.'''
897 '''Return a matcher that will efficiently match exactly these files.'''
895 return matchmod.exact(files, badfn=badfn)
898 return matchmod.exact(files, badfn=badfn)
896
899
897
900
898 def parsefollowlinespattern(repo, rev, pat, msg):
901 def parsefollowlinespattern(repo, rev, pat, msg):
899 """Return a file name from `pat` pattern suitable for usage in followlines
902 """Return a file name from `pat` pattern suitable for usage in followlines
900 logic.
903 logic.
901 """
904 """
902 if not matchmod.patkind(pat):
905 if not matchmod.patkind(pat):
903 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
906 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
904 else:
907 else:
905 ctx = repo[rev]
908 ctx = repo[rev]
906 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
909 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
907 files = [f for f in ctx if m(f)]
910 files = [f for f in ctx if m(f)]
908 if len(files) != 1:
911 if len(files) != 1:
909 raise error.ParseError(msg)
912 raise error.ParseError(msg)
910 return files[0]
913 return files[0]
911
914
912
915
913 def getorigvfs(ui, repo):
916 def getorigvfs(ui, repo):
914 """return a vfs suitable to save 'orig' file
917 """return a vfs suitable to save 'orig' file
915
918
916 return None if no special directory is configured"""
919 return None if no special directory is configured"""
917 origbackuppath = ui.config(b'ui', b'origbackuppath')
920 origbackuppath = ui.config(b'ui', b'origbackuppath')
918 if not origbackuppath:
921 if not origbackuppath:
919 return None
922 return None
920 return vfs.vfs(repo.wvfs.join(origbackuppath))
923 return vfs.vfs(repo.wvfs.join(origbackuppath))
921
924
922
925
923 def backuppath(ui, repo, filepath):
926 def backuppath(ui, repo, filepath):
924 '''customize where working copy backup files (.orig files) are created
927 '''customize where working copy backup files (.orig files) are created
925
928
926 Fetch user defined path from config file: [ui] origbackuppath = <path>
929 Fetch user defined path from config file: [ui] origbackuppath = <path>
927 Fall back to default (filepath with .orig suffix) if not specified
930 Fall back to default (filepath with .orig suffix) if not specified
928
931
929 filepath is repo-relative
932 filepath is repo-relative
930
933
931 Returns an absolute path
934 Returns an absolute path
932 '''
935 '''
933 origvfs = getorigvfs(ui, repo)
936 origvfs = getorigvfs(ui, repo)
934 if origvfs is None:
937 if origvfs is None:
935 return repo.wjoin(filepath + b".orig")
938 return repo.wjoin(filepath + b".orig")
936
939
937 origbackupdir = origvfs.dirname(filepath)
940 origbackupdir = origvfs.dirname(filepath)
938 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
941 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
939 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
942 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
940
943
941 # Remove any files that conflict with the backup file's path
944 # Remove any files that conflict with the backup file's path
942 for f in reversed(list(pathutil.finddirs(filepath))):
945 for f in reversed(list(pathutil.finddirs(filepath))):
943 if origvfs.isfileorlink(f):
946 if origvfs.isfileorlink(f):
944 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
947 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
945 origvfs.unlink(f)
948 origvfs.unlink(f)
946 break
949 break
947
950
948 origvfs.makedirs(origbackupdir)
951 origvfs.makedirs(origbackupdir)
949
952
950 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
953 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
951 ui.note(
954 ui.note(
952 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
955 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
953 )
956 )
954 origvfs.rmtree(filepath, forcibly=True)
957 origvfs.rmtree(filepath, forcibly=True)
955
958
956 return origvfs.join(filepath)
959 return origvfs.join(filepath)
957
960
958
961
959 class _containsnode(object):
962 class _containsnode(object):
960 """proxy __contains__(node) to container.__contains__ which accepts revs"""
963 """proxy __contains__(node) to container.__contains__ which accepts revs"""
961
964
962 def __init__(self, repo, revcontainer):
965 def __init__(self, repo, revcontainer):
963 self._torev = repo.changelog.rev
966 self._torev = repo.changelog.rev
964 self._revcontains = revcontainer.__contains__
967 self._revcontains = revcontainer.__contains__
965
968
966 def __contains__(self, node):
969 def __contains__(self, node):
967 return self._revcontains(self._torev(node))
970 return self._revcontains(self._torev(node))
968
971
969
972
970 def cleanupnodes(
973 def cleanupnodes(
971 repo,
974 repo,
972 replacements,
975 replacements,
973 operation,
976 operation,
974 moves=None,
977 moves=None,
975 metadata=None,
978 metadata=None,
976 fixphase=False,
979 fixphase=False,
977 targetphase=None,
980 targetphase=None,
978 backup=True,
981 backup=True,
979 ):
982 ):
980 """do common cleanups when old nodes are replaced by new nodes
983 """do common cleanups when old nodes are replaced by new nodes
981
984
982 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
985 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
983 (we might also want to move working directory parent in the future)
986 (we might also want to move working directory parent in the future)
984
987
985 By default, bookmark moves are calculated automatically from 'replacements',
988 By default, bookmark moves are calculated automatically from 'replacements',
986 but 'moves' can be used to override that. Also, 'moves' may include
989 but 'moves' can be used to override that. Also, 'moves' may include
987 additional bookmark moves that should not have associated obsmarkers.
990 additional bookmark moves that should not have associated obsmarkers.
988
991
989 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
992 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
990 have replacements. operation is a string, like "rebase".
993 have replacements. operation is a string, like "rebase".
991
994
992 metadata is dictionary containing metadata to be stored in obsmarker if
995 metadata is dictionary containing metadata to be stored in obsmarker if
993 obsolescence is enabled.
996 obsolescence is enabled.
994 """
997 """
995 assert fixphase or targetphase is None
998 assert fixphase or targetphase is None
996 if not replacements and not moves:
999 if not replacements and not moves:
997 return
1000 return
998
1001
999 # translate mapping's other forms
1002 # translate mapping's other forms
1000 if not util.safehasattr(replacements, b'items'):
1003 if not util.safehasattr(replacements, b'items'):
1001 replacements = {(n,): () for n in replacements}
1004 replacements = {(n,): () for n in replacements}
1002 else:
1005 else:
1003 # upgrading non tuple "source" to tuple ones for BC
1006 # upgrading non tuple "source" to tuple ones for BC
1004 repls = {}
1007 repls = {}
1005 for key, value in replacements.items():
1008 for key, value in replacements.items():
1006 if not isinstance(key, tuple):
1009 if not isinstance(key, tuple):
1007 key = (key,)
1010 key = (key,)
1008 repls[key] = value
1011 repls[key] = value
1009 replacements = repls
1012 replacements = repls
1010
1013
1011 # Unfiltered repo is needed since nodes in replacements might be hidden.
1014 # Unfiltered repo is needed since nodes in replacements might be hidden.
1012 unfi = repo.unfiltered()
1015 unfi = repo.unfiltered()
1013
1016
1014 # Calculate bookmark movements
1017 # Calculate bookmark movements
1015 if moves is None:
1018 if moves is None:
1016 moves = {}
1019 moves = {}
1017 for oldnodes, newnodes in replacements.items():
1020 for oldnodes, newnodes in replacements.items():
1018 for oldnode in oldnodes:
1021 for oldnode in oldnodes:
1019 if oldnode in moves:
1022 if oldnode in moves:
1020 continue
1023 continue
1021 if len(newnodes) > 1:
1024 if len(newnodes) > 1:
1022 # usually a split, take the one with biggest rev number
1025 # usually a split, take the one with biggest rev number
1023 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1026 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1024 elif len(newnodes) == 0:
1027 elif len(newnodes) == 0:
1025 # move bookmark backwards
1028 # move bookmark backwards
1026 allreplaced = []
1029 allreplaced = []
1027 for rep in replacements:
1030 for rep in replacements:
1028 allreplaced.extend(rep)
1031 allreplaced.extend(rep)
1029 roots = list(
1032 roots = list(
1030 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1033 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1031 )
1034 )
1032 if roots:
1035 if roots:
1033 newnode = roots[0].node()
1036 newnode = roots[0].node()
1034 else:
1037 else:
1035 newnode = nullid
1038 newnode = nullid
1036 else:
1039 else:
1037 newnode = newnodes[0]
1040 newnode = newnodes[0]
1038 moves[oldnode] = newnode
1041 moves[oldnode] = newnode
1039
1042
1040 allnewnodes = [n for ns in replacements.values() for n in ns]
1043 allnewnodes = [n for ns in replacements.values() for n in ns]
1041 toretract = {}
1044 toretract = {}
1042 toadvance = {}
1045 toadvance = {}
1043 if fixphase:
1046 if fixphase:
1044 precursors = {}
1047 precursors = {}
1045 for oldnodes, newnodes in replacements.items():
1048 for oldnodes, newnodes in replacements.items():
1046 for oldnode in oldnodes:
1049 for oldnode in oldnodes:
1047 for newnode in newnodes:
1050 for newnode in newnodes:
1048 precursors.setdefault(newnode, []).append(oldnode)
1051 precursors.setdefault(newnode, []).append(oldnode)
1049
1052
1050 allnewnodes.sort(key=lambda n: unfi[n].rev())
1053 allnewnodes.sort(key=lambda n: unfi[n].rev())
1051 newphases = {}
1054 newphases = {}
1052
1055
1053 def phase(ctx):
1056 def phase(ctx):
1054 return newphases.get(ctx.node(), ctx.phase())
1057 return newphases.get(ctx.node(), ctx.phase())
1055
1058
1056 for newnode in allnewnodes:
1059 for newnode in allnewnodes:
1057 ctx = unfi[newnode]
1060 ctx = unfi[newnode]
1058 parentphase = max(phase(p) for p in ctx.parents())
1061 parentphase = max(phase(p) for p in ctx.parents())
1059 if targetphase is None:
1062 if targetphase is None:
1060 oldphase = max(
1063 oldphase = max(
1061 unfi[oldnode].phase() for oldnode in precursors[newnode]
1064 unfi[oldnode].phase() for oldnode in precursors[newnode]
1062 )
1065 )
1063 newphase = max(oldphase, parentphase)
1066 newphase = max(oldphase, parentphase)
1064 else:
1067 else:
1065 newphase = max(targetphase, parentphase)
1068 newphase = max(targetphase, parentphase)
1066 newphases[newnode] = newphase
1069 newphases[newnode] = newphase
1067 if newphase > ctx.phase():
1070 if newphase > ctx.phase():
1068 toretract.setdefault(newphase, []).append(newnode)
1071 toretract.setdefault(newphase, []).append(newnode)
1069 elif newphase < ctx.phase():
1072 elif newphase < ctx.phase():
1070 toadvance.setdefault(newphase, []).append(newnode)
1073 toadvance.setdefault(newphase, []).append(newnode)
1071
1074
1072 with repo.transaction(b'cleanup') as tr:
1075 with repo.transaction(b'cleanup') as tr:
1073 # Move bookmarks
1076 # Move bookmarks
1074 bmarks = repo._bookmarks
1077 bmarks = repo._bookmarks
1075 bmarkchanges = []
1078 bmarkchanges = []
1076 for oldnode, newnode in moves.items():
1079 for oldnode, newnode in moves.items():
1077 oldbmarks = repo.nodebookmarks(oldnode)
1080 oldbmarks = repo.nodebookmarks(oldnode)
1078 if not oldbmarks:
1081 if not oldbmarks:
1079 continue
1082 continue
1080 from . import bookmarks # avoid import cycle
1083 from . import bookmarks # avoid import cycle
1081
1084
1082 repo.ui.debug(
1085 repo.ui.debug(
1083 b'moving bookmarks %r from %s to %s\n'
1086 b'moving bookmarks %r from %s to %s\n'
1084 % (
1087 % (
1085 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1088 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1086 hex(oldnode),
1089 hex(oldnode),
1087 hex(newnode),
1090 hex(newnode),
1088 )
1091 )
1089 )
1092 )
1090 # Delete divergent bookmarks being parents of related newnodes
1093 # Delete divergent bookmarks being parents of related newnodes
1091 deleterevs = repo.revs(
1094 deleterevs = repo.revs(
1092 b'parents(roots(%ln & (::%n))) - parents(%n)',
1095 b'parents(roots(%ln & (::%n))) - parents(%n)',
1093 allnewnodes,
1096 allnewnodes,
1094 newnode,
1097 newnode,
1095 oldnode,
1098 oldnode,
1096 )
1099 )
1097 deletenodes = _containsnode(repo, deleterevs)
1100 deletenodes = _containsnode(repo, deleterevs)
1098 for name in oldbmarks:
1101 for name in oldbmarks:
1099 bmarkchanges.append((name, newnode))
1102 bmarkchanges.append((name, newnode))
1100 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1103 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1101 bmarkchanges.append((b, None))
1104 bmarkchanges.append((b, None))
1102
1105
1103 if bmarkchanges:
1106 if bmarkchanges:
1104 bmarks.applychanges(repo, tr, bmarkchanges)
1107 bmarks.applychanges(repo, tr, bmarkchanges)
1105
1108
1106 for phase, nodes in toretract.items():
1109 for phase, nodes in toretract.items():
1107 phases.retractboundary(repo, tr, phase, nodes)
1110 phases.retractboundary(repo, tr, phase, nodes)
1108 for phase, nodes in toadvance.items():
1111 for phase, nodes in toadvance.items():
1109 phases.advanceboundary(repo, tr, phase, nodes)
1112 phases.advanceboundary(repo, tr, phase, nodes)
1110
1113
1111 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1114 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1112 # Obsolete or strip nodes
1115 # Obsolete or strip nodes
1113 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1116 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1114 # If a node is already obsoleted, and we want to obsolete it
1117 # If a node is already obsoleted, and we want to obsolete it
1115 # without a successor, skip that obssolete request since it's
1118 # without a successor, skip that obssolete request since it's
1116 # unnecessary. That's the "if s or not isobs(n)" check below.
1119 # unnecessary. That's the "if s or not isobs(n)" check below.
1117 # Also sort the node in topology order, that might be useful for
1120 # Also sort the node in topology order, that might be useful for
1118 # some obsstore logic.
1121 # some obsstore logic.
1119 # NOTE: the sorting might belong to createmarkers.
1122 # NOTE: the sorting might belong to createmarkers.
1120 torev = unfi.changelog.rev
1123 torev = unfi.changelog.rev
1121 sortfunc = lambda ns: torev(ns[0][0])
1124 sortfunc = lambda ns: torev(ns[0][0])
1122 rels = []
1125 rels = []
1123 for ns, s in sorted(replacements.items(), key=sortfunc):
1126 for ns, s in sorted(replacements.items(), key=sortfunc):
1124 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1127 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1125 rels.append(rel)
1128 rels.append(rel)
1126 if rels:
1129 if rels:
1127 obsolete.createmarkers(
1130 obsolete.createmarkers(
1128 repo, rels, operation=operation, metadata=metadata
1131 repo, rels, operation=operation, metadata=metadata
1129 )
1132 )
1130 elif phases.supportinternal(repo) and mayusearchived:
1133 elif phases.supportinternal(repo) and mayusearchived:
1131 # this assume we do not have "unstable" nodes above the cleaned ones
1134 # this assume we do not have "unstable" nodes above the cleaned ones
1132 allreplaced = set()
1135 allreplaced = set()
1133 for ns in replacements.keys():
1136 for ns in replacements.keys():
1134 allreplaced.update(ns)
1137 allreplaced.update(ns)
1135 if backup:
1138 if backup:
1136 from . import repair # avoid import cycle
1139 from . import repair # avoid import cycle
1137
1140
1138 node = min(allreplaced, key=repo.changelog.rev)
1141 node = min(allreplaced, key=repo.changelog.rev)
1139 repair.backupbundle(
1142 repair.backupbundle(
1140 repo, allreplaced, allreplaced, node, operation
1143 repo, allreplaced, allreplaced, node, operation
1141 )
1144 )
1142 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1145 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1143 else:
1146 else:
1144 from . import repair # avoid import cycle
1147 from . import repair # avoid import cycle
1145
1148
1146 tostrip = list(n for ns in replacements for n in ns)
1149 tostrip = list(n for ns in replacements for n in ns)
1147 if tostrip:
1150 if tostrip:
1148 repair.delayedstrip(
1151 repair.delayedstrip(
1149 repo.ui, repo, tostrip, operation, backup=backup
1152 repo.ui, repo, tostrip, operation, backup=backup
1150 )
1153 )
1151
1154
1152
1155
1153 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1156 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1154 if opts is None:
1157 if opts is None:
1155 opts = {}
1158 opts = {}
1156 m = matcher
1159 m = matcher
1157 dry_run = opts.get(b'dry_run')
1160 dry_run = opts.get(b'dry_run')
1158 try:
1161 try:
1159 similarity = float(opts.get(b'similarity') or 0)
1162 similarity = float(opts.get(b'similarity') or 0)
1160 except ValueError:
1163 except ValueError:
1161 raise error.Abort(_(b'similarity must be a number'))
1164 raise error.Abort(_(b'similarity must be a number'))
1162 if similarity < 0 or similarity > 100:
1165 if similarity < 0 or similarity > 100:
1163 raise error.Abort(_(b'similarity must be between 0 and 100'))
1166 raise error.Abort(_(b'similarity must be between 0 and 100'))
1164 similarity /= 100.0
1167 similarity /= 100.0
1165
1168
1166 ret = 0
1169 ret = 0
1167
1170
1168 wctx = repo[None]
1171 wctx = repo[None]
1169 for subpath in sorted(wctx.substate):
1172 for subpath in sorted(wctx.substate):
1170 submatch = matchmod.subdirmatcher(subpath, m)
1173 submatch = matchmod.subdirmatcher(subpath, m)
1171 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1174 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1172 sub = wctx.sub(subpath)
1175 sub = wctx.sub(subpath)
1173 subprefix = repo.wvfs.reljoin(prefix, subpath)
1176 subprefix = repo.wvfs.reljoin(prefix, subpath)
1174 subuipathfn = subdiruipathfn(subpath, uipathfn)
1177 subuipathfn = subdiruipathfn(subpath, uipathfn)
1175 try:
1178 try:
1176 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1179 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1177 ret = 1
1180 ret = 1
1178 except error.LookupError:
1181 except error.LookupError:
1179 repo.ui.status(
1182 repo.ui.status(
1180 _(b"skipping missing subrepository: %s\n")
1183 _(b"skipping missing subrepository: %s\n")
1181 % uipathfn(subpath)
1184 % uipathfn(subpath)
1182 )
1185 )
1183
1186
1184 rejected = []
1187 rejected = []
1185
1188
1186 def badfn(f, msg):
1189 def badfn(f, msg):
1187 if f in m.files():
1190 if f in m.files():
1188 m.bad(f, msg)
1191 m.bad(f, msg)
1189 rejected.append(f)
1192 rejected.append(f)
1190
1193
1191 badmatch = matchmod.badmatch(m, badfn)
1194 badmatch = matchmod.badmatch(m, badfn)
1192 added, unknown, deleted, removed, forgotten = _interestingfiles(
1195 added, unknown, deleted, removed, forgotten = _interestingfiles(
1193 repo, badmatch
1196 repo, badmatch
1194 )
1197 )
1195
1198
1196 unknownset = set(unknown + forgotten)
1199 unknownset = set(unknown + forgotten)
1197 toprint = unknownset.copy()
1200 toprint = unknownset.copy()
1198 toprint.update(deleted)
1201 toprint.update(deleted)
1199 for abs in sorted(toprint):
1202 for abs in sorted(toprint):
1200 if repo.ui.verbose or not m.exact(abs):
1203 if repo.ui.verbose or not m.exact(abs):
1201 if abs in unknownset:
1204 if abs in unknownset:
1202 status = _(b'adding %s\n') % uipathfn(abs)
1205 status = _(b'adding %s\n') % uipathfn(abs)
1203 label = b'ui.addremove.added'
1206 label = b'ui.addremove.added'
1204 else:
1207 else:
1205 status = _(b'removing %s\n') % uipathfn(abs)
1208 status = _(b'removing %s\n') % uipathfn(abs)
1206 label = b'ui.addremove.removed'
1209 label = b'ui.addremove.removed'
1207 repo.ui.status(status, label=label)
1210 repo.ui.status(status, label=label)
1208
1211
1209 renames = _findrenames(
1212 renames = _findrenames(
1210 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1213 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1211 )
1214 )
1212
1215
1213 if not dry_run:
1216 if not dry_run:
1214 _markchanges(repo, unknown + forgotten, deleted, renames)
1217 _markchanges(repo, unknown + forgotten, deleted, renames)
1215
1218
1216 for f in rejected:
1219 for f in rejected:
1217 if f in m.files():
1220 if f in m.files():
1218 return 1
1221 return 1
1219 return ret
1222 return ret
1220
1223
1221
1224
1222 def marktouched(repo, files, similarity=0.0):
1225 def marktouched(repo, files, similarity=0.0):
1223 '''Assert that files have somehow been operated upon. files are relative to
1226 '''Assert that files have somehow been operated upon. files are relative to
1224 the repo root.'''
1227 the repo root.'''
1225 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1228 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1226 rejected = []
1229 rejected = []
1227
1230
1228 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1231 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1229
1232
1230 if repo.ui.verbose:
1233 if repo.ui.verbose:
1231 unknownset = set(unknown + forgotten)
1234 unknownset = set(unknown + forgotten)
1232 toprint = unknownset.copy()
1235 toprint = unknownset.copy()
1233 toprint.update(deleted)
1236 toprint.update(deleted)
1234 for abs in sorted(toprint):
1237 for abs in sorted(toprint):
1235 if abs in unknownset:
1238 if abs in unknownset:
1236 status = _(b'adding %s\n') % abs
1239 status = _(b'adding %s\n') % abs
1237 else:
1240 else:
1238 status = _(b'removing %s\n') % abs
1241 status = _(b'removing %s\n') % abs
1239 repo.ui.status(status)
1242 repo.ui.status(status)
1240
1243
1241 # TODO: We should probably have the caller pass in uipathfn and apply it to
1244 # TODO: We should probably have the caller pass in uipathfn and apply it to
1242 # the messages above too. legacyrelativevalue=True is consistent with how
1245 # the messages above too. legacyrelativevalue=True is consistent with how
1243 # it used to work.
1246 # it used to work.
1244 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1247 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1245 renames = _findrenames(
1248 renames = _findrenames(
1246 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1249 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1247 )
1250 )
1248
1251
1249 _markchanges(repo, unknown + forgotten, deleted, renames)
1252 _markchanges(repo, unknown + forgotten, deleted, renames)
1250
1253
1251 for f in rejected:
1254 for f in rejected:
1252 if f in m.files():
1255 if f in m.files():
1253 return 1
1256 return 1
1254 return 0
1257 return 0
1255
1258
1256
1259
1257 def _interestingfiles(repo, matcher):
1260 def _interestingfiles(repo, matcher):
1258 '''Walk dirstate with matcher, looking for files that addremove would care
1261 '''Walk dirstate with matcher, looking for files that addremove would care
1259 about.
1262 about.
1260
1263
1261 This is different from dirstate.status because it doesn't care about
1264 This is different from dirstate.status because it doesn't care about
1262 whether files are modified or clean.'''
1265 whether files are modified or clean.'''
1263 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1266 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1264 audit_path = pathutil.pathauditor(repo.root, cached=True)
1267 audit_path = pathutil.pathauditor(repo.root, cached=True)
1265
1268
1266 ctx = repo[None]
1269 ctx = repo[None]
1267 dirstate = repo.dirstate
1270 dirstate = repo.dirstate
1268 matcher = repo.narrowmatch(matcher, includeexact=True)
1271 matcher = repo.narrowmatch(matcher, includeexact=True)
1269 walkresults = dirstate.walk(
1272 walkresults = dirstate.walk(
1270 matcher,
1273 matcher,
1271 subrepos=sorted(ctx.substate),
1274 subrepos=sorted(ctx.substate),
1272 unknown=True,
1275 unknown=True,
1273 ignored=False,
1276 ignored=False,
1274 full=False,
1277 full=False,
1275 )
1278 )
1276 for abs, st in pycompat.iteritems(walkresults):
1279 for abs, st in pycompat.iteritems(walkresults):
1277 dstate = dirstate[abs]
1280 dstate = dirstate[abs]
1278 if dstate == b'?' and audit_path.check(abs):
1281 if dstate == b'?' and audit_path.check(abs):
1279 unknown.append(abs)
1282 unknown.append(abs)
1280 elif dstate != b'r' and not st:
1283 elif dstate != b'r' and not st:
1281 deleted.append(abs)
1284 deleted.append(abs)
1282 elif dstate == b'r' and st:
1285 elif dstate == b'r' and st:
1283 forgotten.append(abs)
1286 forgotten.append(abs)
1284 # for finding renames
1287 # for finding renames
1285 elif dstate == b'r' and not st:
1288 elif dstate == b'r' and not st:
1286 removed.append(abs)
1289 removed.append(abs)
1287 elif dstate == b'a':
1290 elif dstate == b'a':
1288 added.append(abs)
1291 added.append(abs)
1289
1292
1290 return added, unknown, deleted, removed, forgotten
1293 return added, unknown, deleted, removed, forgotten
1291
1294
1292
1295
1293 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1296 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1294 '''Find renames from removed files to added ones.'''
1297 '''Find renames from removed files to added ones.'''
1295 renames = {}
1298 renames = {}
1296 if similarity > 0:
1299 if similarity > 0:
1297 for old, new, score in similar.findrenames(
1300 for old, new, score in similar.findrenames(
1298 repo, added, removed, similarity
1301 repo, added, removed, similarity
1299 ):
1302 ):
1300 if (
1303 if (
1301 repo.ui.verbose
1304 repo.ui.verbose
1302 or not matcher.exact(old)
1305 or not matcher.exact(old)
1303 or not matcher.exact(new)
1306 or not matcher.exact(new)
1304 ):
1307 ):
1305 repo.ui.status(
1308 repo.ui.status(
1306 _(
1309 _(
1307 b'recording removal of %s as rename to %s '
1310 b'recording removal of %s as rename to %s '
1308 b'(%d%% similar)\n'
1311 b'(%d%% similar)\n'
1309 )
1312 )
1310 % (uipathfn(old), uipathfn(new), score * 100)
1313 % (uipathfn(old), uipathfn(new), score * 100)
1311 )
1314 )
1312 renames[new] = old
1315 renames[new] = old
1313 return renames
1316 return renames
1314
1317
1315
1318
1316 def _markchanges(repo, unknown, deleted, renames):
1319 def _markchanges(repo, unknown, deleted, renames):
1317 '''Marks the files in unknown as added, the files in deleted as removed,
1320 '''Marks the files in unknown as added, the files in deleted as removed,
1318 and the files in renames as copied.'''
1321 and the files in renames as copied.'''
1319 wctx = repo[None]
1322 wctx = repo[None]
1320 with repo.wlock():
1323 with repo.wlock():
1321 wctx.forget(deleted)
1324 wctx.forget(deleted)
1322 wctx.add(unknown)
1325 wctx.add(unknown)
1323 for new, old in pycompat.iteritems(renames):
1326 for new, old in pycompat.iteritems(renames):
1324 wctx.copy(old, new)
1327 wctx.copy(old, new)
1325
1328
1326
1329
1327 def getrenamedfn(repo, endrev=None):
1330 def getrenamedfn(repo, endrev=None):
1328 if copiesmod.usechangesetcentricalgo(repo):
1331 if copiesmod.usechangesetcentricalgo(repo):
1329
1332
1330 def getrenamed(fn, rev):
1333 def getrenamed(fn, rev):
1331 ctx = repo[rev]
1334 ctx = repo[rev]
1332 p1copies = ctx.p1copies()
1335 p1copies = ctx.p1copies()
1333 if fn in p1copies:
1336 if fn in p1copies:
1334 return p1copies[fn]
1337 return p1copies[fn]
1335 p2copies = ctx.p2copies()
1338 p2copies = ctx.p2copies()
1336 if fn in p2copies:
1339 if fn in p2copies:
1337 return p2copies[fn]
1340 return p2copies[fn]
1338 return None
1341 return None
1339
1342
1340 return getrenamed
1343 return getrenamed
1341
1344
1342 rcache = {}
1345 rcache = {}
1343 if endrev is None:
1346 if endrev is None:
1344 endrev = len(repo)
1347 endrev = len(repo)
1345
1348
1346 def getrenamed(fn, rev):
1349 def getrenamed(fn, rev):
1347 '''looks up all renames for a file (up to endrev) the first
1350 '''looks up all renames for a file (up to endrev) the first
1348 time the file is given. It indexes on the changerev and only
1351 time the file is given. It indexes on the changerev and only
1349 parses the manifest if linkrev != changerev.
1352 parses the manifest if linkrev != changerev.
1350 Returns rename info for fn at changerev rev.'''
1353 Returns rename info for fn at changerev rev.'''
1351 if fn not in rcache:
1354 if fn not in rcache:
1352 rcache[fn] = {}
1355 rcache[fn] = {}
1353 fl = repo.file(fn)
1356 fl = repo.file(fn)
1354 for i in fl:
1357 for i in fl:
1355 lr = fl.linkrev(i)
1358 lr = fl.linkrev(i)
1356 renamed = fl.renamed(fl.node(i))
1359 renamed = fl.renamed(fl.node(i))
1357 rcache[fn][lr] = renamed and renamed[0]
1360 rcache[fn][lr] = renamed and renamed[0]
1358 if lr >= endrev:
1361 if lr >= endrev:
1359 break
1362 break
1360 if rev in rcache[fn]:
1363 if rev in rcache[fn]:
1361 return rcache[fn][rev]
1364 return rcache[fn][rev]
1362
1365
1363 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1366 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1364 # filectx logic.
1367 # filectx logic.
1365 try:
1368 try:
1366 return repo[rev][fn].copysource()
1369 return repo[rev][fn].copysource()
1367 except error.LookupError:
1370 except error.LookupError:
1368 return None
1371 return None
1369
1372
1370 return getrenamed
1373 return getrenamed
1371
1374
1372
1375
1373 def getcopiesfn(repo, endrev=None):
1376 def getcopiesfn(repo, endrev=None):
1374 if copiesmod.usechangesetcentricalgo(repo):
1377 if copiesmod.usechangesetcentricalgo(repo):
1375
1378
1376 def copiesfn(ctx):
1379 def copiesfn(ctx):
1377 if ctx.p2copies():
1380 if ctx.p2copies():
1378 allcopies = ctx.p1copies().copy()
1381 allcopies = ctx.p1copies().copy()
1379 # There should be no overlap
1382 # There should be no overlap
1380 allcopies.update(ctx.p2copies())
1383 allcopies.update(ctx.p2copies())
1381 return sorted(allcopies.items())
1384 return sorted(allcopies.items())
1382 else:
1385 else:
1383 return sorted(ctx.p1copies().items())
1386 return sorted(ctx.p1copies().items())
1384
1387
1385 else:
1388 else:
1386 getrenamed = getrenamedfn(repo, endrev)
1389 getrenamed = getrenamedfn(repo, endrev)
1387
1390
1388 def copiesfn(ctx):
1391 def copiesfn(ctx):
1389 copies = []
1392 copies = []
1390 for fn in ctx.files():
1393 for fn in ctx.files():
1391 rename = getrenamed(fn, ctx.rev())
1394 rename = getrenamed(fn, ctx.rev())
1392 if rename:
1395 if rename:
1393 copies.append((fn, rename))
1396 copies.append((fn, rename))
1394 return copies
1397 return copies
1395
1398
1396 return copiesfn
1399 return copiesfn
1397
1400
1398
1401
1399 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1402 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1400 """Update the dirstate to reflect the intent of copying src to dst. For
1403 """Update the dirstate to reflect the intent of copying src to dst. For
1401 different reasons it might not end with dst being marked as copied from src.
1404 different reasons it might not end with dst being marked as copied from src.
1402 """
1405 """
1403 origsrc = repo.dirstate.copied(src) or src
1406 origsrc = repo.dirstate.copied(src) or src
1404 if dst == origsrc: # copying back a copy?
1407 if dst == origsrc: # copying back a copy?
1405 if repo.dirstate[dst] not in b'mn' and not dryrun:
1408 if repo.dirstate[dst] not in b'mn' and not dryrun:
1406 repo.dirstate.normallookup(dst)
1409 repo.dirstate.normallookup(dst)
1407 else:
1410 else:
1408 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1411 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1409 if not ui.quiet:
1412 if not ui.quiet:
1410 ui.warn(
1413 ui.warn(
1411 _(
1414 _(
1412 b"%s has not been committed yet, so no copy "
1415 b"%s has not been committed yet, so no copy "
1413 b"data will be stored for %s.\n"
1416 b"data will be stored for %s.\n"
1414 )
1417 )
1415 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1418 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1416 )
1419 )
1417 if repo.dirstate[dst] in b'?r' and not dryrun:
1420 if repo.dirstate[dst] in b'?r' and not dryrun:
1418 wctx.add([dst])
1421 wctx.add([dst])
1419 elif not dryrun:
1422 elif not dryrun:
1420 wctx.copy(origsrc, dst)
1423 wctx.copy(origsrc, dst)
1421
1424
1422
1425
1423 def movedirstate(repo, newctx, match=None):
1426 def movedirstate(repo, newctx, match=None):
1424 """Move the dirstate to newctx and adjust it as necessary.
1427 """Move the dirstate to newctx and adjust it as necessary.
1425
1428
1426 A matcher can be provided as an optimization. It is probably a bug to pass
1429 A matcher can be provided as an optimization. It is probably a bug to pass
1427 a matcher that doesn't match all the differences between the parent of the
1430 a matcher that doesn't match all the differences between the parent of the
1428 working copy and newctx.
1431 working copy and newctx.
1429 """
1432 """
1430 oldctx = repo[b'.']
1433 oldctx = repo[b'.']
1431 ds = repo.dirstate
1434 ds = repo.dirstate
1432 copies = dict(ds.copies())
1435 copies = dict(ds.copies())
1433 ds.setparents(newctx.node(), nullid)
1436 ds.setparents(newctx.node(), nullid)
1434 s = newctx.status(oldctx, match=match)
1437 s = newctx.status(oldctx, match=match)
1435 for f in s.modified:
1438 for f in s.modified:
1436 if ds[f] == b'r':
1439 if ds[f] == b'r':
1437 # modified + removed -> removed
1440 # modified + removed -> removed
1438 continue
1441 continue
1439 ds.normallookup(f)
1442 ds.normallookup(f)
1440
1443
1441 for f in s.added:
1444 for f in s.added:
1442 if ds[f] == b'r':
1445 if ds[f] == b'r':
1443 # added + removed -> unknown
1446 # added + removed -> unknown
1444 ds.drop(f)
1447 ds.drop(f)
1445 elif ds[f] != b'a':
1448 elif ds[f] != b'a':
1446 ds.add(f)
1449 ds.add(f)
1447
1450
1448 for f in s.removed:
1451 for f in s.removed:
1449 if ds[f] == b'a':
1452 if ds[f] == b'a':
1450 # removed + added -> normal
1453 # removed + added -> normal
1451 ds.normallookup(f)
1454 ds.normallookup(f)
1452 elif ds[f] != b'r':
1455 elif ds[f] != b'r':
1453 ds.remove(f)
1456 ds.remove(f)
1454
1457
1455 # Merge old parent and old working dir copies
1458 # Merge old parent and old working dir copies
1456 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1459 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1457 oldcopies.update(copies)
1460 oldcopies.update(copies)
1458 copies = {
1461 copies = {
1459 dst: oldcopies.get(src, src)
1462 dst: oldcopies.get(src, src)
1460 for dst, src in pycompat.iteritems(oldcopies)
1463 for dst, src in pycompat.iteritems(oldcopies)
1461 }
1464 }
1462 # Adjust the dirstate copies
1465 # Adjust the dirstate copies
1463 for dst, src in pycompat.iteritems(copies):
1466 for dst, src in pycompat.iteritems(copies):
1464 if src not in newctx or dst in newctx or ds[dst] != b'a':
1467 if src not in newctx or dst in newctx or ds[dst] != b'a':
1465 src = None
1468 src = None
1466 ds.copy(src, dst)
1469 ds.copy(src, dst)
1467 repo._quick_access_changeid_invalidate()
1470 repo._quick_access_changeid_invalidate()
1468
1471
1469
1472
1470 def writerequires(opener, requirements):
1473 def writerequires(opener, requirements):
1471 with opener(b'requires', b'w', atomictemp=True) as fp:
1474 with opener(b'requires', b'w', atomictemp=True) as fp:
1472 for r in sorted(requirements):
1475 for r in sorted(requirements):
1473 fp.write(b"%s\n" % r)
1476 fp.write(b"%s\n" % r)
1474
1477
1475
1478
1476 class filecachesubentry(object):
1479 class filecachesubentry(object):
1477 def __init__(self, path, stat):
1480 def __init__(self, path, stat):
1478 self.path = path
1481 self.path = path
1479 self.cachestat = None
1482 self.cachestat = None
1480 self._cacheable = None
1483 self._cacheable = None
1481
1484
1482 if stat:
1485 if stat:
1483 self.cachestat = filecachesubentry.stat(self.path)
1486 self.cachestat = filecachesubentry.stat(self.path)
1484
1487
1485 if self.cachestat:
1488 if self.cachestat:
1486 self._cacheable = self.cachestat.cacheable()
1489 self._cacheable = self.cachestat.cacheable()
1487 else:
1490 else:
1488 # None means we don't know yet
1491 # None means we don't know yet
1489 self._cacheable = None
1492 self._cacheable = None
1490
1493
1491 def refresh(self):
1494 def refresh(self):
1492 if self.cacheable():
1495 if self.cacheable():
1493 self.cachestat = filecachesubentry.stat(self.path)
1496 self.cachestat = filecachesubentry.stat(self.path)
1494
1497
1495 def cacheable(self):
1498 def cacheable(self):
1496 if self._cacheable is not None:
1499 if self._cacheable is not None:
1497 return self._cacheable
1500 return self._cacheable
1498
1501
1499 # we don't know yet, assume it is for now
1502 # we don't know yet, assume it is for now
1500 return True
1503 return True
1501
1504
1502 def changed(self):
1505 def changed(self):
1503 # no point in going further if we can't cache it
1506 # no point in going further if we can't cache it
1504 if not self.cacheable():
1507 if not self.cacheable():
1505 return True
1508 return True
1506
1509
1507 newstat = filecachesubentry.stat(self.path)
1510 newstat = filecachesubentry.stat(self.path)
1508
1511
1509 # we may not know if it's cacheable yet, check again now
1512 # we may not know if it's cacheable yet, check again now
1510 if newstat and self._cacheable is None:
1513 if newstat and self._cacheable is None:
1511 self._cacheable = newstat.cacheable()
1514 self._cacheable = newstat.cacheable()
1512
1515
1513 # check again
1516 # check again
1514 if not self._cacheable:
1517 if not self._cacheable:
1515 return True
1518 return True
1516
1519
1517 if self.cachestat != newstat:
1520 if self.cachestat != newstat:
1518 self.cachestat = newstat
1521 self.cachestat = newstat
1519 return True
1522 return True
1520 else:
1523 else:
1521 return False
1524 return False
1522
1525
1523 @staticmethod
1526 @staticmethod
1524 def stat(path):
1527 def stat(path):
1525 try:
1528 try:
1526 return util.cachestat(path)
1529 return util.cachestat(path)
1527 except OSError as e:
1530 except OSError as e:
1528 if e.errno != errno.ENOENT:
1531 if e.errno != errno.ENOENT:
1529 raise
1532 raise
1530
1533
1531
1534
1532 class filecacheentry(object):
1535 class filecacheentry(object):
1533 def __init__(self, paths, stat=True):
1536 def __init__(self, paths, stat=True):
1534 self._entries = []
1537 self._entries = []
1535 for path in paths:
1538 for path in paths:
1536 self._entries.append(filecachesubentry(path, stat))
1539 self._entries.append(filecachesubentry(path, stat))
1537
1540
1538 def changed(self):
1541 def changed(self):
1539 '''true if any entry has changed'''
1542 '''true if any entry has changed'''
1540 for entry in self._entries:
1543 for entry in self._entries:
1541 if entry.changed():
1544 if entry.changed():
1542 return True
1545 return True
1543 return False
1546 return False
1544
1547
1545 def refresh(self):
1548 def refresh(self):
1546 for entry in self._entries:
1549 for entry in self._entries:
1547 entry.refresh()
1550 entry.refresh()
1548
1551
1549
1552
1550 class filecache(object):
1553 class filecache(object):
1551 """A property like decorator that tracks files under .hg/ for updates.
1554 """A property like decorator that tracks files under .hg/ for updates.
1552
1555
1553 On first access, the files defined as arguments are stat()ed and the
1556 On first access, the files defined as arguments are stat()ed and the
1554 results cached. The decorated function is called. The results are stashed
1557 results cached. The decorated function is called. The results are stashed
1555 away in a ``_filecache`` dict on the object whose method is decorated.
1558 away in a ``_filecache`` dict on the object whose method is decorated.
1556
1559
1557 On subsequent access, the cached result is used as it is set to the
1560 On subsequent access, the cached result is used as it is set to the
1558 instance dictionary.
1561 instance dictionary.
1559
1562
1560 On external property set/delete operations, the caller must update the
1563 On external property set/delete operations, the caller must update the
1561 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1564 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1562 instead of directly setting <attr>.
1565 instead of directly setting <attr>.
1563
1566
1564 When using the property API, the cached data is always used if available.
1567 When using the property API, the cached data is always used if available.
1565 No stat() is performed to check if the file has changed.
1568 No stat() is performed to check if the file has changed.
1566
1569
1567 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1570 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1568 can populate an entry before the property's getter is called. In this case,
1571 can populate an entry before the property's getter is called. In this case,
1569 entries in ``_filecache`` will be used during property operations,
1572 entries in ``_filecache`` will be used during property operations,
1570 if available. If the underlying file changes, it is up to external callers
1573 if available. If the underlying file changes, it is up to external callers
1571 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1574 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1572 method result as well as possibly calling ``del obj._filecache[attr]`` to
1575 method result as well as possibly calling ``del obj._filecache[attr]`` to
1573 remove the ``filecacheentry``.
1576 remove the ``filecacheentry``.
1574 """
1577 """
1575
1578
1576 def __init__(self, *paths):
1579 def __init__(self, *paths):
1577 self.paths = paths
1580 self.paths = paths
1578
1581
1579 def join(self, obj, fname):
1582 def join(self, obj, fname):
1580 """Used to compute the runtime path of a cached file.
1583 """Used to compute the runtime path of a cached file.
1581
1584
1582 Users should subclass filecache and provide their own version of this
1585 Users should subclass filecache and provide their own version of this
1583 function to call the appropriate join function on 'obj' (an instance
1586 function to call the appropriate join function on 'obj' (an instance
1584 of the class that its member function was decorated).
1587 of the class that its member function was decorated).
1585 """
1588 """
1586 raise NotImplementedError
1589 raise NotImplementedError
1587
1590
1588 def __call__(self, func):
1591 def __call__(self, func):
1589 self.func = func
1592 self.func = func
1590 self.sname = func.__name__
1593 self.sname = func.__name__
1591 self.name = pycompat.sysbytes(self.sname)
1594 self.name = pycompat.sysbytes(self.sname)
1592 return self
1595 return self
1593
1596
1594 def __get__(self, obj, type=None):
1597 def __get__(self, obj, type=None):
1595 # if accessed on the class, return the descriptor itself.
1598 # if accessed on the class, return the descriptor itself.
1596 if obj is None:
1599 if obj is None:
1597 return self
1600 return self
1598
1601
1599 assert self.sname not in obj.__dict__
1602 assert self.sname not in obj.__dict__
1600
1603
1601 entry = obj._filecache.get(self.name)
1604 entry = obj._filecache.get(self.name)
1602
1605
1603 if entry:
1606 if entry:
1604 if entry.changed():
1607 if entry.changed():
1605 entry.obj = self.func(obj)
1608 entry.obj = self.func(obj)
1606 else:
1609 else:
1607 paths = [self.join(obj, path) for path in self.paths]
1610 paths = [self.join(obj, path) for path in self.paths]
1608
1611
1609 # We stat -before- creating the object so our cache doesn't lie if
1612 # We stat -before- creating the object so our cache doesn't lie if
1610 # a writer modified between the time we read and stat
1613 # a writer modified between the time we read and stat
1611 entry = filecacheentry(paths, True)
1614 entry = filecacheentry(paths, True)
1612 entry.obj = self.func(obj)
1615 entry.obj = self.func(obj)
1613
1616
1614 obj._filecache[self.name] = entry
1617 obj._filecache[self.name] = entry
1615
1618
1616 obj.__dict__[self.sname] = entry.obj
1619 obj.__dict__[self.sname] = entry.obj
1617 return entry.obj
1620 return entry.obj
1618
1621
1619 # don't implement __set__(), which would make __dict__ lookup as slow as
1622 # don't implement __set__(), which would make __dict__ lookup as slow as
1620 # function call.
1623 # function call.
1621
1624
1622 def set(self, obj, value):
1625 def set(self, obj, value):
1623 if self.name not in obj._filecache:
1626 if self.name not in obj._filecache:
1624 # we add an entry for the missing value because X in __dict__
1627 # we add an entry for the missing value because X in __dict__
1625 # implies X in _filecache
1628 # implies X in _filecache
1626 paths = [self.join(obj, path) for path in self.paths]
1629 paths = [self.join(obj, path) for path in self.paths]
1627 ce = filecacheentry(paths, False)
1630 ce = filecacheentry(paths, False)
1628 obj._filecache[self.name] = ce
1631 obj._filecache[self.name] = ce
1629 else:
1632 else:
1630 ce = obj._filecache[self.name]
1633 ce = obj._filecache[self.name]
1631
1634
1632 ce.obj = value # update cached copy
1635 ce.obj = value # update cached copy
1633 obj.__dict__[self.sname] = value # update copy returned by obj.x
1636 obj.__dict__[self.sname] = value # update copy returned by obj.x
1634
1637
1635
1638
1636 def extdatasource(repo, source):
1639 def extdatasource(repo, source):
1637 """Gather a map of rev -> value dict from the specified source
1640 """Gather a map of rev -> value dict from the specified source
1638
1641
1639 A source spec is treated as a URL, with a special case shell: type
1642 A source spec is treated as a URL, with a special case shell: type
1640 for parsing the output from a shell command.
1643 for parsing the output from a shell command.
1641
1644
1642 The data is parsed as a series of newline-separated records where
1645 The data is parsed as a series of newline-separated records where
1643 each record is a revision specifier optionally followed by a space
1646 each record is a revision specifier optionally followed by a space
1644 and a freeform string value. If the revision is known locally, it
1647 and a freeform string value. If the revision is known locally, it
1645 is converted to a rev, otherwise the record is skipped.
1648 is converted to a rev, otherwise the record is skipped.
1646
1649
1647 Note that both key and value are treated as UTF-8 and converted to
1650 Note that both key and value are treated as UTF-8 and converted to
1648 the local encoding. This allows uniformity between local and
1651 the local encoding. This allows uniformity between local and
1649 remote data sources.
1652 remote data sources.
1650 """
1653 """
1651
1654
1652 spec = repo.ui.config(b"extdata", source)
1655 spec = repo.ui.config(b"extdata", source)
1653 if not spec:
1656 if not spec:
1654 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1657 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1655
1658
1656 data = {}
1659 data = {}
1657 src = proc = None
1660 src = proc = None
1658 try:
1661 try:
1659 if spec.startswith(b"shell:"):
1662 if spec.startswith(b"shell:"):
1660 # external commands should be run relative to the repo root
1663 # external commands should be run relative to the repo root
1661 cmd = spec[6:]
1664 cmd = spec[6:]
1662 proc = subprocess.Popen(
1665 proc = subprocess.Popen(
1663 procutil.tonativestr(cmd),
1666 procutil.tonativestr(cmd),
1664 shell=True,
1667 shell=True,
1665 bufsize=-1,
1668 bufsize=-1,
1666 close_fds=procutil.closefds,
1669 close_fds=procutil.closefds,
1667 stdout=subprocess.PIPE,
1670 stdout=subprocess.PIPE,
1668 cwd=procutil.tonativestr(repo.root),
1671 cwd=procutil.tonativestr(repo.root),
1669 )
1672 )
1670 src = proc.stdout
1673 src = proc.stdout
1671 else:
1674 else:
1672 # treat as a URL or file
1675 # treat as a URL or file
1673 src = url.open(repo.ui, spec)
1676 src = url.open(repo.ui, spec)
1674 for l in src:
1677 for l in src:
1675 if b" " in l:
1678 if b" " in l:
1676 k, v = l.strip().split(b" ", 1)
1679 k, v = l.strip().split(b" ", 1)
1677 else:
1680 else:
1678 k, v = l.strip(), b""
1681 k, v = l.strip(), b""
1679
1682
1680 k = encoding.tolocal(k)
1683 k = encoding.tolocal(k)
1681 try:
1684 try:
1682 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1685 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1683 except (error.LookupError, error.RepoLookupError):
1686 except (error.LookupError, error.RepoLookupError):
1684 pass # we ignore data for nodes that don't exist locally
1687 pass # we ignore data for nodes that don't exist locally
1685 finally:
1688 finally:
1686 if proc:
1689 if proc:
1687 try:
1690 try:
1688 proc.communicate()
1691 proc.communicate()
1689 except ValueError:
1692 except ValueError:
1690 # This happens if we started iterating src and then
1693 # This happens if we started iterating src and then
1691 # get a parse error on a line. It should be safe to ignore.
1694 # get a parse error on a line. It should be safe to ignore.
1692 pass
1695 pass
1693 if src:
1696 if src:
1694 src.close()
1697 src.close()
1695 if proc and proc.returncode != 0:
1698 if proc and proc.returncode != 0:
1696 raise error.Abort(
1699 raise error.Abort(
1697 _(b"extdata command '%s' failed: %s")
1700 _(b"extdata command '%s' failed: %s")
1698 % (cmd, procutil.explainexit(proc.returncode))
1701 % (cmd, procutil.explainexit(proc.returncode))
1699 )
1702 )
1700
1703
1701 return data
1704 return data
1702
1705
1703
1706
1704 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1707 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1705 if lock is None:
1708 if lock is None:
1706 raise error.LockInheritanceContractViolation(
1709 raise error.LockInheritanceContractViolation(
1707 b'lock can only be inherited while held'
1710 b'lock can only be inherited while held'
1708 )
1711 )
1709 if environ is None:
1712 if environ is None:
1710 environ = {}
1713 environ = {}
1711 with lock.inherit() as locker:
1714 with lock.inherit() as locker:
1712 environ[envvar] = locker
1715 environ[envvar] = locker
1713 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1716 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1714
1717
1715
1718
1716 def wlocksub(repo, cmd, *args, **kwargs):
1719 def wlocksub(repo, cmd, *args, **kwargs):
1717 """run cmd as a subprocess that allows inheriting repo's wlock
1720 """run cmd as a subprocess that allows inheriting repo's wlock
1718
1721
1719 This can only be called while the wlock is held. This takes all the
1722 This can only be called while the wlock is held. This takes all the
1720 arguments that ui.system does, and returns the exit code of the
1723 arguments that ui.system does, and returns the exit code of the
1721 subprocess."""
1724 subprocess."""
1722 return _locksub(
1725 return _locksub(
1723 repo, repo.currentwlock(), b'HG_WLOCK_LOCKER', cmd, *args, **kwargs
1726 repo, repo.currentwlock(), b'HG_WLOCK_LOCKER', cmd, *args, **kwargs
1724 )
1727 )
1725
1728
1726
1729
1727 class progress(object):
1730 class progress(object):
1728 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1731 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1729 self.ui = ui
1732 self.ui = ui
1730 self.pos = 0
1733 self.pos = 0
1731 self.topic = topic
1734 self.topic = topic
1732 self.unit = unit
1735 self.unit = unit
1733 self.total = total
1736 self.total = total
1734 self.debug = ui.configbool(b'progress', b'debug')
1737 self.debug = ui.configbool(b'progress', b'debug')
1735 self._updatebar = updatebar
1738 self._updatebar = updatebar
1736
1739
1737 def __enter__(self):
1740 def __enter__(self):
1738 return self
1741 return self
1739
1742
1740 def __exit__(self, exc_type, exc_value, exc_tb):
1743 def __exit__(self, exc_type, exc_value, exc_tb):
1741 self.complete()
1744 self.complete()
1742
1745
1743 def update(self, pos, item=b"", total=None):
1746 def update(self, pos, item=b"", total=None):
1744 assert pos is not None
1747 assert pos is not None
1745 if total:
1748 if total:
1746 self.total = total
1749 self.total = total
1747 self.pos = pos
1750 self.pos = pos
1748 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1751 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1749 if self.debug:
1752 if self.debug:
1750 self._printdebug(item)
1753 self._printdebug(item)
1751
1754
1752 def increment(self, step=1, item=b"", total=None):
1755 def increment(self, step=1, item=b"", total=None):
1753 self.update(self.pos + step, item, total)
1756 self.update(self.pos + step, item, total)
1754
1757
1755 def complete(self):
1758 def complete(self):
1756 self.pos = None
1759 self.pos = None
1757 self.unit = b""
1760 self.unit = b""
1758 self.total = None
1761 self.total = None
1759 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1762 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1760
1763
1761 def _printdebug(self, item):
1764 def _printdebug(self, item):
1762 unit = b''
1765 unit = b''
1763 if self.unit:
1766 if self.unit:
1764 unit = b' ' + self.unit
1767 unit = b' ' + self.unit
1765 if item:
1768 if item:
1766 item = b' ' + item
1769 item = b' ' + item
1767
1770
1768 if self.total:
1771 if self.total:
1769 pct = 100.0 * self.pos / self.total
1772 pct = 100.0 * self.pos / self.total
1770 self.ui.debug(
1773 self.ui.debug(
1771 b'%s:%s %d/%d%s (%4.2f%%)\n'
1774 b'%s:%s %d/%d%s (%4.2f%%)\n'
1772 % (self.topic, item, self.pos, self.total, unit, pct)
1775 % (self.topic, item, self.pos, self.total, unit, pct)
1773 )
1776 )
1774 else:
1777 else:
1775 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1778 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1776
1779
1777
1780
1778 def gdinitconfig(ui):
1781 def gdinitconfig(ui):
1779 """helper function to know if a repo should be created as general delta
1782 """helper function to know if a repo should be created as general delta
1780 """
1783 """
1781 # experimental config: format.generaldelta
1784 # experimental config: format.generaldelta
1782 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1785 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1783 b'format', b'usegeneraldelta'
1786 b'format', b'usegeneraldelta'
1784 )
1787 )
1785
1788
1786
1789
1787 def gddeltaconfig(ui):
1790 def gddeltaconfig(ui):
1788 """helper function to know if incoming delta should be optimised
1791 """helper function to know if incoming delta should be optimised
1789 """
1792 """
1790 # experimental config: format.generaldelta
1793 # experimental config: format.generaldelta
1791 return ui.configbool(b'format', b'generaldelta')
1794 return ui.configbool(b'format', b'generaldelta')
1792
1795
1793
1796
1794 class simplekeyvaluefile(object):
1797 class simplekeyvaluefile(object):
1795 """A simple file with key=value lines
1798 """A simple file with key=value lines
1796
1799
1797 Keys must be alphanumerics and start with a letter, values must not
1800 Keys must be alphanumerics and start with a letter, values must not
1798 contain '\n' characters"""
1801 contain '\n' characters"""
1799
1802
1800 firstlinekey = b'__firstline'
1803 firstlinekey = b'__firstline'
1801
1804
1802 def __init__(self, vfs, path, keys=None):
1805 def __init__(self, vfs, path, keys=None):
1803 self.vfs = vfs
1806 self.vfs = vfs
1804 self.path = path
1807 self.path = path
1805
1808
1806 def read(self, firstlinenonkeyval=False):
1809 def read(self, firstlinenonkeyval=False):
1807 """Read the contents of a simple key-value file
1810 """Read the contents of a simple key-value file
1808
1811
1809 'firstlinenonkeyval' indicates whether the first line of file should
1812 'firstlinenonkeyval' indicates whether the first line of file should
1810 be treated as a key-value pair or reuturned fully under the
1813 be treated as a key-value pair or reuturned fully under the
1811 __firstline key."""
1814 __firstline key."""
1812 lines = self.vfs.readlines(self.path)
1815 lines = self.vfs.readlines(self.path)
1813 d = {}
1816 d = {}
1814 if firstlinenonkeyval:
1817 if firstlinenonkeyval:
1815 if not lines:
1818 if not lines:
1816 e = _(b"empty simplekeyvalue file")
1819 e = _(b"empty simplekeyvalue file")
1817 raise error.CorruptedState(e)
1820 raise error.CorruptedState(e)
1818 # we don't want to include '\n' in the __firstline
1821 # we don't want to include '\n' in the __firstline
1819 d[self.firstlinekey] = lines[0][:-1]
1822 d[self.firstlinekey] = lines[0][:-1]
1820 del lines[0]
1823 del lines[0]
1821
1824
1822 try:
1825 try:
1823 # the 'if line.strip()' part prevents us from failing on empty
1826 # the 'if line.strip()' part prevents us from failing on empty
1824 # lines which only contain '\n' therefore are not skipped
1827 # lines which only contain '\n' therefore are not skipped
1825 # by 'if line'
1828 # by 'if line'
1826 updatedict = dict(
1829 updatedict = dict(
1827 line[:-1].split(b'=', 1) for line in lines if line.strip()
1830 line[:-1].split(b'=', 1) for line in lines if line.strip()
1828 )
1831 )
1829 if self.firstlinekey in updatedict:
1832 if self.firstlinekey in updatedict:
1830 e = _(b"%r can't be used as a key")
1833 e = _(b"%r can't be used as a key")
1831 raise error.CorruptedState(e % self.firstlinekey)
1834 raise error.CorruptedState(e % self.firstlinekey)
1832 d.update(updatedict)
1835 d.update(updatedict)
1833 except ValueError as e:
1836 except ValueError as e:
1834 raise error.CorruptedState(stringutil.forcebytestr(e))
1837 raise error.CorruptedState(stringutil.forcebytestr(e))
1835 return d
1838 return d
1836
1839
1837 def write(self, data, firstline=None):
1840 def write(self, data, firstline=None):
1838 """Write key=>value mapping to a file
1841 """Write key=>value mapping to a file
1839 data is a dict. Keys must be alphanumerical and start with a letter.
1842 data is a dict. Keys must be alphanumerical and start with a letter.
1840 Values must not contain newline characters.
1843 Values must not contain newline characters.
1841
1844
1842 If 'firstline' is not None, it is written to file before
1845 If 'firstline' is not None, it is written to file before
1843 everything else, as it is, not in a key=value form"""
1846 everything else, as it is, not in a key=value form"""
1844 lines = []
1847 lines = []
1845 if firstline is not None:
1848 if firstline is not None:
1846 lines.append(b'%s\n' % firstline)
1849 lines.append(b'%s\n' % firstline)
1847
1850
1848 for k, v in data.items():
1851 for k, v in data.items():
1849 if k == self.firstlinekey:
1852 if k == self.firstlinekey:
1850 e = b"key name '%s' is reserved" % self.firstlinekey
1853 e = b"key name '%s' is reserved" % self.firstlinekey
1851 raise error.ProgrammingError(e)
1854 raise error.ProgrammingError(e)
1852 if not k[0:1].isalpha():
1855 if not k[0:1].isalpha():
1853 e = b"keys must start with a letter in a key-value file"
1856 e = b"keys must start with a letter in a key-value file"
1854 raise error.ProgrammingError(e)
1857 raise error.ProgrammingError(e)
1855 if not k.isalnum():
1858 if not k.isalnum():
1856 e = b"invalid key name in a simple key-value file"
1859 e = b"invalid key name in a simple key-value file"
1857 raise error.ProgrammingError(e)
1860 raise error.ProgrammingError(e)
1858 if b'\n' in v:
1861 if b'\n' in v:
1859 e = b"invalid value in a simple key-value file"
1862 e = b"invalid value in a simple key-value file"
1860 raise error.ProgrammingError(e)
1863 raise error.ProgrammingError(e)
1861 lines.append(b"%s=%s\n" % (k, v))
1864 lines.append(b"%s=%s\n" % (k, v))
1862 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1865 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1863 fp.write(b''.join(lines))
1866 fp.write(b''.join(lines))
1864
1867
1865
1868
1866 _reportobsoletedsource = [
1869 _reportobsoletedsource = [
1867 b'debugobsolete',
1870 b'debugobsolete',
1868 b'pull',
1871 b'pull',
1869 b'push',
1872 b'push',
1870 b'serve',
1873 b'serve',
1871 b'unbundle',
1874 b'unbundle',
1872 ]
1875 ]
1873
1876
1874 _reportnewcssource = [
1877 _reportnewcssource = [
1875 b'pull',
1878 b'pull',
1876 b'unbundle',
1879 b'unbundle',
1877 ]
1880 ]
1878
1881
1879
1882
1880 def prefetchfiles(repo, revs, match):
1883 def prefetchfiles(repo, revs, match):
1881 """Invokes the registered file prefetch functions, allowing extensions to
1884 """Invokes the registered file prefetch functions, allowing extensions to
1882 ensure the corresponding files are available locally, before the command
1885 ensure the corresponding files are available locally, before the command
1883 uses them."""
1886 uses them."""
1884 if match:
1887 if match:
1885 # The command itself will complain about files that don't exist, so
1888 # The command itself will complain about files that don't exist, so
1886 # don't duplicate the message.
1889 # don't duplicate the message.
1887 match = matchmod.badmatch(match, lambda fn, msg: None)
1890 match = matchmod.badmatch(match, lambda fn, msg: None)
1888 else:
1891 else:
1889 match = matchall(repo)
1892 match = matchall(repo)
1890
1893
1891 fileprefetchhooks(repo, revs, match)
1894 fileprefetchhooks(repo, revs, match)
1892
1895
1893
1896
1894 # a list of (repo, revs, match) prefetch functions
1897 # a list of (repo, revs, match) prefetch functions
1895 fileprefetchhooks = util.hooks()
1898 fileprefetchhooks = util.hooks()
1896
1899
1897 # A marker that tells the evolve extension to suppress its own reporting
1900 # A marker that tells the evolve extension to suppress its own reporting
1898 _reportstroubledchangesets = True
1901 _reportstroubledchangesets = True
1899
1902
1900
1903
1901 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
1904 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
1902 """register a callback to issue a summary after the transaction is closed
1905 """register a callback to issue a summary after the transaction is closed
1903
1906
1904 If as_validator is true, then the callbacks are registered as transaction
1907 If as_validator is true, then the callbacks are registered as transaction
1905 validators instead
1908 validators instead
1906 """
1909 """
1907
1910
1908 def txmatch(sources):
1911 def txmatch(sources):
1909 return any(txnname.startswith(source) for source in sources)
1912 return any(txnname.startswith(source) for source in sources)
1910
1913
1911 categories = []
1914 categories = []
1912
1915
1913 def reportsummary(func):
1916 def reportsummary(func):
1914 """decorator for report callbacks."""
1917 """decorator for report callbacks."""
1915 # The repoview life cycle is shorter than the one of the actual
1918 # The repoview life cycle is shorter than the one of the actual
1916 # underlying repository. So the filtered object can die before the
1919 # underlying repository. So the filtered object can die before the
1917 # weakref is used leading to troubles. We keep a reference to the
1920 # weakref is used leading to troubles. We keep a reference to the
1918 # unfiltered object and restore the filtering when retrieving the
1921 # unfiltered object and restore the filtering when retrieving the
1919 # repository through the weakref.
1922 # repository through the weakref.
1920 filtername = repo.filtername
1923 filtername = repo.filtername
1921 reporef = weakref.ref(repo.unfiltered())
1924 reporef = weakref.ref(repo.unfiltered())
1922
1925
1923 def wrapped(tr):
1926 def wrapped(tr):
1924 repo = reporef()
1927 repo = reporef()
1925 if filtername:
1928 if filtername:
1926 assert repo is not None # help pytype
1929 assert repo is not None # help pytype
1927 repo = repo.filtered(filtername)
1930 repo = repo.filtered(filtername)
1928 func(repo, tr)
1931 func(repo, tr)
1929
1932
1930 newcat = b'%02i-txnreport' % len(categories)
1933 newcat = b'%02i-txnreport' % len(categories)
1931 if as_validator:
1934 if as_validator:
1932 otr.addvalidator(newcat, wrapped)
1935 otr.addvalidator(newcat, wrapped)
1933 else:
1936 else:
1934 otr.addpostclose(newcat, wrapped)
1937 otr.addpostclose(newcat, wrapped)
1935 categories.append(newcat)
1938 categories.append(newcat)
1936 return wrapped
1939 return wrapped
1937
1940
1938 @reportsummary
1941 @reportsummary
1939 def reportchangegroup(repo, tr):
1942 def reportchangegroup(repo, tr):
1940 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
1943 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
1941 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
1944 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
1942 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
1945 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
1943 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
1946 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
1944 if cgchangesets or cgrevisions or cgfiles:
1947 if cgchangesets or cgrevisions or cgfiles:
1945 htext = b""
1948 htext = b""
1946 if cgheads:
1949 if cgheads:
1947 htext = _(b" (%+d heads)") % cgheads
1950 htext = _(b" (%+d heads)") % cgheads
1948 msg = _(b"added %d changesets with %d changes to %d files%s\n")
1951 msg = _(b"added %d changesets with %d changes to %d files%s\n")
1949 if as_validator:
1952 if as_validator:
1950 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
1953 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
1951 assert repo is not None # help pytype
1954 assert repo is not None # help pytype
1952 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
1955 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
1953
1956
1954 if txmatch(_reportobsoletedsource):
1957 if txmatch(_reportobsoletedsource):
1955
1958
1956 @reportsummary
1959 @reportsummary
1957 def reportobsoleted(repo, tr):
1960 def reportobsoleted(repo, tr):
1958 obsoleted = obsutil.getobsoleted(repo, tr)
1961 obsoleted = obsutil.getobsoleted(repo, tr)
1959 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
1962 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
1960 if newmarkers:
1963 if newmarkers:
1961 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
1964 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
1962 if obsoleted:
1965 if obsoleted:
1963 msg = _(b'obsoleted %i changesets\n')
1966 msg = _(b'obsoleted %i changesets\n')
1964 if as_validator:
1967 if as_validator:
1965 msg = _(b'obsoleting %i changesets\n')
1968 msg = _(b'obsoleting %i changesets\n')
1966 repo.ui.status(msg % len(obsoleted))
1969 repo.ui.status(msg % len(obsoleted))
1967
1970
1968 if obsolete.isenabled(
1971 if obsolete.isenabled(
1969 repo, obsolete.createmarkersopt
1972 repo, obsolete.createmarkersopt
1970 ) and repo.ui.configbool(
1973 ) and repo.ui.configbool(
1971 b'experimental', b'evolution.report-instabilities'
1974 b'experimental', b'evolution.report-instabilities'
1972 ):
1975 ):
1973 instabilitytypes = [
1976 instabilitytypes = [
1974 (b'orphan', b'orphan'),
1977 (b'orphan', b'orphan'),
1975 (b'phase-divergent', b'phasedivergent'),
1978 (b'phase-divergent', b'phasedivergent'),
1976 (b'content-divergent', b'contentdivergent'),
1979 (b'content-divergent', b'contentdivergent'),
1977 ]
1980 ]
1978
1981
1979 def getinstabilitycounts(repo):
1982 def getinstabilitycounts(repo):
1980 filtered = repo.changelog.filteredrevs
1983 filtered = repo.changelog.filteredrevs
1981 counts = {}
1984 counts = {}
1982 for instability, revset in instabilitytypes:
1985 for instability, revset in instabilitytypes:
1983 counts[instability] = len(
1986 counts[instability] = len(
1984 set(obsolete.getrevs(repo, revset)) - filtered
1987 set(obsolete.getrevs(repo, revset)) - filtered
1985 )
1988 )
1986 return counts
1989 return counts
1987
1990
1988 oldinstabilitycounts = getinstabilitycounts(repo)
1991 oldinstabilitycounts = getinstabilitycounts(repo)
1989
1992
1990 @reportsummary
1993 @reportsummary
1991 def reportnewinstabilities(repo, tr):
1994 def reportnewinstabilities(repo, tr):
1992 newinstabilitycounts = getinstabilitycounts(repo)
1995 newinstabilitycounts = getinstabilitycounts(repo)
1993 for instability, revset in instabilitytypes:
1996 for instability, revset in instabilitytypes:
1994 delta = (
1997 delta = (
1995 newinstabilitycounts[instability]
1998 newinstabilitycounts[instability]
1996 - oldinstabilitycounts[instability]
1999 - oldinstabilitycounts[instability]
1997 )
2000 )
1998 msg = getinstabilitymessage(delta, instability)
2001 msg = getinstabilitymessage(delta, instability)
1999 if msg:
2002 if msg:
2000 repo.ui.warn(msg)
2003 repo.ui.warn(msg)
2001
2004
2002 if txmatch(_reportnewcssource):
2005 if txmatch(_reportnewcssource):
2003
2006
2004 @reportsummary
2007 @reportsummary
2005 def reportnewcs(repo, tr):
2008 def reportnewcs(repo, tr):
2006 """Report the range of new revisions pulled/unbundled."""
2009 """Report the range of new revisions pulled/unbundled."""
2007 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2010 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2008 unfi = repo.unfiltered()
2011 unfi = repo.unfiltered()
2009 if origrepolen >= len(unfi):
2012 if origrepolen >= len(unfi):
2010 return
2013 return
2011
2014
2012 # Compute the bounds of new visible revisions' range.
2015 # Compute the bounds of new visible revisions' range.
2013 revs = smartset.spanset(repo, start=origrepolen)
2016 revs = smartset.spanset(repo, start=origrepolen)
2014 if revs:
2017 if revs:
2015 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2018 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2016
2019
2017 if minrev == maxrev:
2020 if minrev == maxrev:
2018 revrange = minrev
2021 revrange = minrev
2019 else:
2022 else:
2020 revrange = b'%s:%s' % (minrev, maxrev)
2023 revrange = b'%s:%s' % (minrev, maxrev)
2021 draft = len(repo.revs(b'%ld and draft()', revs))
2024 draft = len(repo.revs(b'%ld and draft()', revs))
2022 secret = len(repo.revs(b'%ld and secret()', revs))
2025 secret = len(repo.revs(b'%ld and secret()', revs))
2023 if not (draft or secret):
2026 if not (draft or secret):
2024 msg = _(b'new changesets %s\n') % revrange
2027 msg = _(b'new changesets %s\n') % revrange
2025 elif draft and secret:
2028 elif draft and secret:
2026 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2029 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2027 msg %= (revrange, draft, secret)
2030 msg %= (revrange, draft, secret)
2028 elif draft:
2031 elif draft:
2029 msg = _(b'new changesets %s (%d drafts)\n')
2032 msg = _(b'new changesets %s (%d drafts)\n')
2030 msg %= (revrange, draft)
2033 msg %= (revrange, draft)
2031 elif secret:
2034 elif secret:
2032 msg = _(b'new changesets %s (%d secrets)\n')
2035 msg = _(b'new changesets %s (%d secrets)\n')
2033 msg %= (revrange, secret)
2036 msg %= (revrange, secret)
2034 else:
2037 else:
2035 errormsg = b'entered unreachable condition'
2038 errormsg = b'entered unreachable condition'
2036 raise error.ProgrammingError(errormsg)
2039 raise error.ProgrammingError(errormsg)
2037 repo.ui.status(msg)
2040 repo.ui.status(msg)
2038
2041
2039 # search new changesets directly pulled as obsolete
2042 # search new changesets directly pulled as obsolete
2040 duplicates = tr.changes.get(b'revduplicates', ())
2043 duplicates = tr.changes.get(b'revduplicates', ())
2041 obsadded = unfi.revs(
2044 obsadded = unfi.revs(
2042 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2045 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2043 )
2046 )
2044 cl = repo.changelog
2047 cl = repo.changelog
2045 extinctadded = [r for r in obsadded if r not in cl]
2048 extinctadded = [r for r in obsadded if r not in cl]
2046 if extinctadded:
2049 if extinctadded:
2047 # They are not just obsolete, but obsolete and invisible
2050 # They are not just obsolete, but obsolete and invisible
2048 # we call them "extinct" internally but the terms have not been
2051 # we call them "extinct" internally but the terms have not been
2049 # exposed to users.
2052 # exposed to users.
2050 msg = b'(%d other changesets obsolete on arrival)\n'
2053 msg = b'(%d other changesets obsolete on arrival)\n'
2051 repo.ui.status(msg % len(extinctadded))
2054 repo.ui.status(msg % len(extinctadded))
2052
2055
2053 @reportsummary
2056 @reportsummary
2054 def reportphasechanges(repo, tr):
2057 def reportphasechanges(repo, tr):
2055 """Report statistics of phase changes for changesets pre-existing
2058 """Report statistics of phase changes for changesets pre-existing
2056 pull/unbundle.
2059 pull/unbundle.
2057 """
2060 """
2058 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2061 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2059 published = []
2062 published = []
2060 for revs, (old, new) in tr.changes.get(b'phases', []):
2063 for revs, (old, new) in tr.changes.get(b'phases', []):
2061 if new != phases.public:
2064 if new != phases.public:
2062 continue
2065 continue
2063 published.extend(rev for rev in revs if rev < origrepolen)
2066 published.extend(rev for rev in revs if rev < origrepolen)
2064 if not published:
2067 if not published:
2065 return
2068 return
2066 msg = _(b'%d local changesets published\n')
2069 msg = _(b'%d local changesets published\n')
2067 if as_validator:
2070 if as_validator:
2068 msg = _(b'%d local changesets will be published\n')
2071 msg = _(b'%d local changesets will be published\n')
2069 repo.ui.status(msg % len(published))
2072 repo.ui.status(msg % len(published))
2070
2073
2071
2074
2072 def getinstabilitymessage(delta, instability):
2075 def getinstabilitymessage(delta, instability):
2073 """function to return the message to show warning about new instabilities
2076 """function to return the message to show warning about new instabilities
2074
2077
2075 exists as a separate function so that extension can wrap to show more
2078 exists as a separate function so that extension can wrap to show more
2076 information like how to fix instabilities"""
2079 information like how to fix instabilities"""
2077 if delta > 0:
2080 if delta > 0:
2078 return _(b'%i new %s changesets\n') % (delta, instability)
2081 return _(b'%i new %s changesets\n') % (delta, instability)
2079
2082
2080
2083
2081 def nodesummaries(repo, nodes, maxnumnodes=4):
2084 def nodesummaries(repo, nodes, maxnumnodes=4):
2082 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2085 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2083 return b' '.join(short(h) for h in nodes)
2086 return b' '.join(short(h) for h in nodes)
2084 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2087 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2085 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2088 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2086
2089
2087
2090
2088 def enforcesinglehead(repo, tr, desc, accountclosed=False):
2091 def enforcesinglehead(repo, tr, desc, accountclosed=False):
2089 """check that no named branch has multiple heads"""
2092 """check that no named branch has multiple heads"""
2090 if desc in (b'strip', b'repair'):
2093 if desc in (b'strip', b'repair'):
2091 # skip the logic during strip
2094 # skip the logic during strip
2092 return
2095 return
2093 visible = repo.filtered(b'visible')
2096 visible = repo.filtered(b'visible')
2094 # possible improvement: we could restrict the check to affected branch
2097 # possible improvement: we could restrict the check to affected branch
2095 bm = visible.branchmap()
2098 bm = visible.branchmap()
2096 for name in bm:
2099 for name in bm:
2097 heads = bm.branchheads(name, closed=accountclosed)
2100 heads = bm.branchheads(name, closed=accountclosed)
2098 if len(heads) > 1:
2101 if len(heads) > 1:
2099 msg = _(b'rejecting multiple heads on branch "%s"')
2102 msg = _(b'rejecting multiple heads on branch "%s"')
2100 msg %= name
2103 msg %= name
2101 hint = _(b'%d heads: %s')
2104 hint = _(b'%d heads: %s')
2102 hint %= (len(heads), nodesummaries(repo, heads))
2105 hint %= (len(heads), nodesummaries(repo, heads))
2103 raise error.Abort(msg, hint=hint)
2106 raise error.Abort(msg, hint=hint)
2104
2107
2105
2108
2106 def wrapconvertsink(sink):
2109 def wrapconvertsink(sink):
2107 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2110 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2108 before it is used, whether or not the convert extension was formally loaded.
2111 before it is used, whether or not the convert extension was formally loaded.
2109 """
2112 """
2110 return sink
2113 return sink
2111
2114
2112
2115
2113 def unhidehashlikerevs(repo, specs, hiddentype):
2116 def unhidehashlikerevs(repo, specs, hiddentype):
2114 """parse the user specs and unhide changesets whose hash or revision number
2117 """parse the user specs and unhide changesets whose hash or revision number
2115 is passed.
2118 is passed.
2116
2119
2117 hiddentype can be: 1) 'warn': warn while unhiding changesets
2120 hiddentype can be: 1) 'warn': warn while unhiding changesets
2118 2) 'nowarn': don't warn while unhiding changesets
2121 2) 'nowarn': don't warn while unhiding changesets
2119
2122
2120 returns a repo object with the required changesets unhidden
2123 returns a repo object with the required changesets unhidden
2121 """
2124 """
2122 if not repo.filtername or not repo.ui.configbool(
2125 if not repo.filtername or not repo.ui.configbool(
2123 b'experimental', b'directaccess'
2126 b'experimental', b'directaccess'
2124 ):
2127 ):
2125 return repo
2128 return repo
2126
2129
2127 if repo.filtername not in (b'visible', b'visible-hidden'):
2130 if repo.filtername not in (b'visible', b'visible-hidden'):
2128 return repo
2131 return repo
2129
2132
2130 symbols = set()
2133 symbols = set()
2131 for spec in specs:
2134 for spec in specs:
2132 try:
2135 try:
2133 tree = revsetlang.parse(spec)
2136 tree = revsetlang.parse(spec)
2134 except error.ParseError: # will be reported by scmutil.revrange()
2137 except error.ParseError: # will be reported by scmutil.revrange()
2135 continue
2138 continue
2136
2139
2137 symbols.update(revsetlang.gethashlikesymbols(tree))
2140 symbols.update(revsetlang.gethashlikesymbols(tree))
2138
2141
2139 if not symbols:
2142 if not symbols:
2140 return repo
2143 return repo
2141
2144
2142 revs = _getrevsfromsymbols(repo, symbols)
2145 revs = _getrevsfromsymbols(repo, symbols)
2143
2146
2144 if not revs:
2147 if not revs:
2145 return repo
2148 return repo
2146
2149
2147 if hiddentype == b'warn':
2150 if hiddentype == b'warn':
2148 unfi = repo.unfiltered()
2151 unfi = repo.unfiltered()
2149 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2152 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2150 repo.ui.warn(
2153 repo.ui.warn(
2151 _(
2154 _(
2152 b"warning: accessing hidden changesets for write "
2155 b"warning: accessing hidden changesets for write "
2153 b"operation: %s\n"
2156 b"operation: %s\n"
2154 )
2157 )
2155 % revstr
2158 % revstr
2156 )
2159 )
2157
2160
2158 # we have to use new filtername to separate branch/tags cache until we can
2161 # we have to use new filtername to separate branch/tags cache until we can
2159 # disbale these cache when revisions are dynamically pinned.
2162 # disbale these cache when revisions are dynamically pinned.
2160 return repo.filtered(b'visible-hidden', revs)
2163 return repo.filtered(b'visible-hidden', revs)
2161
2164
2162
2165
2163 def _getrevsfromsymbols(repo, symbols):
2166 def _getrevsfromsymbols(repo, symbols):
2164 """parse the list of symbols and returns a set of revision numbers of hidden
2167 """parse the list of symbols and returns a set of revision numbers of hidden
2165 changesets present in symbols"""
2168 changesets present in symbols"""
2166 revs = set()
2169 revs = set()
2167 unfi = repo.unfiltered()
2170 unfi = repo.unfiltered()
2168 unficl = unfi.changelog
2171 unficl = unfi.changelog
2169 cl = repo.changelog
2172 cl = repo.changelog
2170 tiprev = len(unficl)
2173 tiprev = len(unficl)
2171 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2174 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2172 for s in symbols:
2175 for s in symbols:
2173 try:
2176 try:
2174 n = int(s)
2177 n = int(s)
2175 if n <= tiprev:
2178 if n <= tiprev:
2176 if not allowrevnums:
2179 if not allowrevnums:
2177 continue
2180 continue
2178 else:
2181 else:
2179 if n not in cl:
2182 if n not in cl:
2180 revs.add(n)
2183 revs.add(n)
2181 continue
2184 continue
2182 except ValueError:
2185 except ValueError:
2183 pass
2186 pass
2184
2187
2185 try:
2188 try:
2186 s = resolvehexnodeidprefix(unfi, s)
2189 s = resolvehexnodeidprefix(unfi, s)
2187 except (error.LookupError, error.WdirUnsupported):
2190 except (error.LookupError, error.WdirUnsupported):
2188 s = None
2191 s = None
2189
2192
2190 if s is not None:
2193 if s is not None:
2191 rev = unficl.rev(s)
2194 rev = unficl.rev(s)
2192 if rev not in cl:
2195 if rev not in cl:
2193 revs.add(rev)
2196 revs.add(rev)
2194
2197
2195 return revs
2198 return revs
2196
2199
2197
2200
2198 def bookmarkrevs(repo, mark):
2201 def bookmarkrevs(repo, mark):
2199 """
2202 """
2200 Select revisions reachable by a given bookmark
2203 Select revisions reachable by a given bookmark
2201 """
2204 """
2202 return repo.revs(
2205 return repo.revs(
2203 b"ancestors(bookmark(%s)) - "
2206 b"ancestors(bookmark(%s)) - "
2204 b"ancestors(head() and not bookmark(%s)) - "
2207 b"ancestors(head() and not bookmark(%s)) - "
2205 b"ancestors(bookmark() and not bookmark(%s))",
2208 b"ancestors(bookmark() and not bookmark(%s))",
2206 mark,
2209 mark,
2207 mark,
2210 mark,
2208 mark,
2211 mark,
2209 )
2212 )
General Comments 0
You need to be logged in to leave comments. Login now