##// END OF EJS Templates
scmutil: convert status data object from a tuple to an attrs (API)...
Augie Fackler -
r44053:c5548b0b default
parent child Browse files
Show More
@@ -1,2221 +1,2195 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 nullrev,
24 nullrev,
25 short,
25 short,
26 wdirid,
26 wdirid,
27 wdirrev,
27 wdirrev,
28 )
28 )
29 from .pycompat import getattr
29 from .pycompat import getattr
30
30 from .thirdparty import attr
31 from . import (
31 from . import (
32 copies as copiesmod,
32 copies as copiesmod,
33 encoding,
33 encoding,
34 error,
34 error,
35 match as matchmod,
35 match as matchmod,
36 obsolete,
36 obsolete,
37 obsutil,
37 obsutil,
38 pathutil,
38 pathutil,
39 phases,
39 phases,
40 policy,
40 policy,
41 pycompat,
41 pycompat,
42 revsetlang,
42 revsetlang,
43 similar,
43 similar,
44 smartset,
44 smartset,
45 url,
45 url,
46 util,
46 util,
47 vfs,
47 vfs,
48 )
48 )
49
49
50 from .utils import (
50 from .utils import (
51 procutil,
51 procutil,
52 stringutil,
52 stringutil,
53 )
53 )
54
54
55 if pycompat.iswindows:
55 if pycompat.iswindows:
56 from . import scmwindows as scmplatform
56 from . import scmwindows as scmplatform
57 else:
57 else:
58 from . import scmposix as scmplatform
58 from . import scmposix as scmplatform
59
59
60 parsers = policy.importmod('parsers')
60 parsers = policy.importmod('parsers')
61
61
62 termsize = scmplatform.termsize
62 termsize = scmplatform.termsize
63
63
64
64
65 class status(tuple):
65 @attr.s(slots=True, repr=False)
66 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
66 class status(object):
67 and 'ignored' properties are only relevant to the working copy.
67 '''Struct with a list of files per status.
68
69 The 'deleted', 'unknown' and 'ignored' properties are only
70 relevant to the working copy.
68 '''
71 '''
69
72
70 __slots__ = ()
73 modified = attr.ib(default=list)
71
74 added = attr.ib(default=list)
72 def __new__(
75 removed = attr.ib(default=list)
73 cls, modified, added, removed, deleted, unknown, ignored, clean
76 deleted = attr.ib(default=list)
74 ):
77 unknown = attr.ib(default=list)
75 return tuple.__new__(
78 ignored = attr.ib(default=list)
76 cls, (modified, added, removed, deleted, unknown, ignored, clean)
79 clean = attr.ib(default=list)
77 )
78
79 @property
80 def modified(self):
81 '''files that have been modified'''
82 return self[0]
83
84 @property
85 def added(self):
86 '''files that have been added'''
87 return self[1]
88
89 @property
90 def removed(self):
91 '''files that have been removed'''
92 return self[2]
93
80
94 @property
81 def __iter__(self):
95 def deleted(self):
82 yield self.modified
96 '''files that are in the dirstate, but have been deleted from the
83 yield self.added
97 working copy (aka "missing")
84 yield self.removed
98 '''
85 yield self.deleted
99 return self[3]
86 yield self.unknown
100
87 yield self.ignored
101 @property
88 yield self.clean
102 def unknown(self):
103 '''files not in the dirstate that are not ignored'''
104 return self[4]
105
89
106 @property
90 def __repr__(self):
107 def ignored(self):
108 '''files not in the dirstate that are ignored (by _dirignore())'''
109 return self[5]
110
111 @property
112 def clean(self):
113 '''files that have not been modified'''
114 return self[6]
115
116 def __repr__(self, *args, **kwargs):
117 return (
91 return (
118 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
92 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
119 r'unknown=%s, ignored=%s, clean=%s>'
93 r'unknown=%s, ignored=%s, clean=%s>'
120 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
94 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
121
95
122
96
123 def itersubrepos(ctx1, ctx2):
97 def itersubrepos(ctx1, ctx2):
124 """find subrepos in ctx1 or ctx2"""
98 """find subrepos in ctx1 or ctx2"""
125 # Create a (subpath, ctx) mapping where we prefer subpaths from
99 # Create a (subpath, ctx) mapping where we prefer subpaths from
126 # ctx1. The subpaths from ctx2 are important when the .hgsub file
100 # ctx1. The subpaths from ctx2 are important when the .hgsub file
127 # has been modified (in ctx2) but not yet committed (in ctx1).
101 # has been modified (in ctx2) but not yet committed (in ctx1).
128 subpaths = dict.fromkeys(ctx2.substate, ctx2)
102 subpaths = dict.fromkeys(ctx2.substate, ctx2)
129 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
103 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
130
104
131 missing = set()
105 missing = set()
132
106
133 for subpath in ctx2.substate:
107 for subpath in ctx2.substate:
134 if subpath not in ctx1.substate:
108 if subpath not in ctx1.substate:
135 del subpaths[subpath]
109 del subpaths[subpath]
136 missing.add(subpath)
110 missing.add(subpath)
137
111
138 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
112 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
139 yield subpath, ctx.sub(subpath)
113 yield subpath, ctx.sub(subpath)
140
114
141 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
115 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
142 # status and diff will have an accurate result when it does
116 # status and diff will have an accurate result when it does
143 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
117 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
144 # against itself.
118 # against itself.
145 for subpath in missing:
119 for subpath in missing:
146 yield subpath, ctx2.nullsub(subpath, ctx1)
120 yield subpath, ctx2.nullsub(subpath, ctx1)
147
121
148
122
149 def nochangesfound(ui, repo, excluded=None):
123 def nochangesfound(ui, repo, excluded=None):
150 '''Report no changes for push/pull, excluded is None or a list of
124 '''Report no changes for push/pull, excluded is None or a list of
151 nodes excluded from the push/pull.
125 nodes excluded from the push/pull.
152 '''
126 '''
153 secretlist = []
127 secretlist = []
154 if excluded:
128 if excluded:
155 for n in excluded:
129 for n in excluded:
156 ctx = repo[n]
130 ctx = repo[n]
157 if ctx.phase() >= phases.secret and not ctx.extinct():
131 if ctx.phase() >= phases.secret and not ctx.extinct():
158 secretlist.append(n)
132 secretlist.append(n)
159
133
160 if secretlist:
134 if secretlist:
161 ui.status(
135 ui.status(
162 _(b"no changes found (ignored %d secret changesets)\n")
136 _(b"no changes found (ignored %d secret changesets)\n")
163 % len(secretlist)
137 % len(secretlist)
164 )
138 )
165 else:
139 else:
166 ui.status(_(b"no changes found\n"))
140 ui.status(_(b"no changes found\n"))
167
141
168
142
169 def callcatch(ui, func):
143 def callcatch(ui, func):
170 """call func() with global exception handling
144 """call func() with global exception handling
171
145
172 return func() if no exception happens. otherwise do some error handling
146 return func() if no exception happens. otherwise do some error handling
173 and return an exit code accordingly. does not handle all exceptions.
147 and return an exit code accordingly. does not handle all exceptions.
174 """
148 """
175 try:
149 try:
176 try:
150 try:
177 return func()
151 return func()
178 except: # re-raises
152 except: # re-raises
179 ui.traceback()
153 ui.traceback()
180 raise
154 raise
181 # Global exception handling, alphabetically
155 # Global exception handling, alphabetically
182 # Mercurial-specific first, followed by built-in and library exceptions
156 # Mercurial-specific first, followed by built-in and library exceptions
183 except error.LockHeld as inst:
157 except error.LockHeld as inst:
184 if inst.errno == errno.ETIMEDOUT:
158 if inst.errno == errno.ETIMEDOUT:
185 reason = _(b'timed out waiting for lock held by %r') % (
159 reason = _(b'timed out waiting for lock held by %r') % (
186 pycompat.bytestr(inst.locker)
160 pycompat.bytestr(inst.locker)
187 )
161 )
188 else:
162 else:
189 reason = _(b'lock held by %r') % inst.locker
163 reason = _(b'lock held by %r') % inst.locker
190 ui.error(
164 ui.error(
191 _(b"abort: %s: %s\n")
165 _(b"abort: %s: %s\n")
192 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
166 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
193 )
167 )
194 if not inst.locker:
168 if not inst.locker:
195 ui.error(_(b"(lock might be very busy)\n"))
169 ui.error(_(b"(lock might be very busy)\n"))
196 except error.LockUnavailable as inst:
170 except error.LockUnavailable as inst:
197 ui.error(
171 ui.error(
198 _(b"abort: could not lock %s: %s\n")
172 _(b"abort: could not lock %s: %s\n")
199 % (
173 % (
200 inst.desc or stringutil.forcebytestr(inst.filename),
174 inst.desc or stringutil.forcebytestr(inst.filename),
201 encoding.strtolocal(inst.strerror),
175 encoding.strtolocal(inst.strerror),
202 )
176 )
203 )
177 )
204 except error.OutOfBandError as inst:
178 except error.OutOfBandError as inst:
205 if inst.args:
179 if inst.args:
206 msg = _(b"abort: remote error:\n")
180 msg = _(b"abort: remote error:\n")
207 else:
181 else:
208 msg = _(b"abort: remote error\n")
182 msg = _(b"abort: remote error\n")
209 ui.error(msg)
183 ui.error(msg)
210 if inst.args:
184 if inst.args:
211 ui.error(b''.join(inst.args))
185 ui.error(b''.join(inst.args))
212 if inst.hint:
186 if inst.hint:
213 ui.error(b'(%s)\n' % inst.hint)
187 ui.error(b'(%s)\n' % inst.hint)
214 except error.RepoError as inst:
188 except error.RepoError as inst:
215 ui.error(_(b"abort: %s!\n") % inst)
189 ui.error(_(b"abort: %s!\n") % inst)
216 if inst.hint:
190 if inst.hint:
217 ui.error(_(b"(%s)\n") % inst.hint)
191 ui.error(_(b"(%s)\n") % inst.hint)
218 except error.ResponseError as inst:
192 except error.ResponseError as inst:
219 ui.error(_(b"abort: %s") % inst.args[0])
193 ui.error(_(b"abort: %s") % inst.args[0])
220 msg = inst.args[1]
194 msg = inst.args[1]
221 if isinstance(msg, type(u'')):
195 if isinstance(msg, type(u'')):
222 msg = pycompat.sysbytes(msg)
196 msg = pycompat.sysbytes(msg)
223 if not isinstance(msg, bytes):
197 if not isinstance(msg, bytes):
224 ui.error(b" %r\n" % (msg,))
198 ui.error(b" %r\n" % (msg,))
225 elif not msg:
199 elif not msg:
226 ui.error(_(b" empty string\n"))
200 ui.error(_(b" empty string\n"))
227 else:
201 else:
228 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
202 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
229 except error.CensoredNodeError as inst:
203 except error.CensoredNodeError as inst:
230 ui.error(_(b"abort: file censored %s!\n") % inst)
204 ui.error(_(b"abort: file censored %s!\n") % inst)
231 except error.StorageError as inst:
205 except error.StorageError as inst:
232 ui.error(_(b"abort: %s!\n") % inst)
206 ui.error(_(b"abort: %s!\n") % inst)
233 if inst.hint:
207 if inst.hint:
234 ui.error(_(b"(%s)\n") % inst.hint)
208 ui.error(_(b"(%s)\n") % inst.hint)
235 except error.InterventionRequired as inst:
209 except error.InterventionRequired as inst:
236 ui.error(b"%s\n" % inst)
210 ui.error(b"%s\n" % inst)
237 if inst.hint:
211 if inst.hint:
238 ui.error(_(b"(%s)\n") % inst.hint)
212 ui.error(_(b"(%s)\n") % inst.hint)
239 return 1
213 return 1
240 except error.WdirUnsupported:
214 except error.WdirUnsupported:
241 ui.error(_(b"abort: working directory revision cannot be specified\n"))
215 ui.error(_(b"abort: working directory revision cannot be specified\n"))
242 except error.Abort as inst:
216 except error.Abort as inst:
243 ui.error(_(b"abort: %s\n") % inst)
217 ui.error(_(b"abort: %s\n") % inst)
244 if inst.hint:
218 if inst.hint:
245 ui.error(_(b"(%s)\n") % inst.hint)
219 ui.error(_(b"(%s)\n") % inst.hint)
246 except ImportError as inst:
220 except ImportError as inst:
247 ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
221 ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
248 m = stringutil.forcebytestr(inst).split()[-1]
222 m = stringutil.forcebytestr(inst).split()[-1]
249 if m in b"mpatch bdiff".split():
223 if m in b"mpatch bdiff".split():
250 ui.error(_(b"(did you forget to compile extensions?)\n"))
224 ui.error(_(b"(did you forget to compile extensions?)\n"))
251 elif m in b"zlib".split():
225 elif m in b"zlib".split():
252 ui.error(_(b"(is your Python install correct?)\n"))
226 ui.error(_(b"(is your Python install correct?)\n"))
253 except (IOError, OSError) as inst:
227 except (IOError, OSError) as inst:
254 if util.safehasattr(inst, b"code"): # HTTPError
228 if util.safehasattr(inst, b"code"): # HTTPError
255 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
229 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
256 elif util.safehasattr(inst, b"reason"): # URLError or SSLError
230 elif util.safehasattr(inst, b"reason"): # URLError or SSLError
257 try: # usually it is in the form (errno, strerror)
231 try: # usually it is in the form (errno, strerror)
258 reason = inst.reason.args[1]
232 reason = inst.reason.args[1]
259 except (AttributeError, IndexError):
233 except (AttributeError, IndexError):
260 # it might be anything, for example a string
234 # it might be anything, for example a string
261 reason = inst.reason
235 reason = inst.reason
262 if isinstance(reason, pycompat.unicode):
236 if isinstance(reason, pycompat.unicode):
263 # SSLError of Python 2.7.9 contains a unicode
237 # SSLError of Python 2.7.9 contains a unicode
264 reason = encoding.unitolocal(reason)
238 reason = encoding.unitolocal(reason)
265 ui.error(_(b"abort: error: %s\n") % reason)
239 ui.error(_(b"abort: error: %s\n") % reason)
266 elif (
240 elif (
267 util.safehasattr(inst, b"args")
241 util.safehasattr(inst, b"args")
268 and inst.args
242 and inst.args
269 and inst.args[0] == errno.EPIPE
243 and inst.args[0] == errno.EPIPE
270 ):
244 ):
271 pass
245 pass
272 elif getattr(inst, "strerror", None): # common IOError or OSError
246 elif getattr(inst, "strerror", None): # common IOError or OSError
273 if getattr(inst, "filename", None) is not None:
247 if getattr(inst, "filename", None) is not None:
274 ui.error(
248 ui.error(
275 _(b"abort: %s: '%s'\n")
249 _(b"abort: %s: '%s'\n")
276 % (
250 % (
277 encoding.strtolocal(inst.strerror),
251 encoding.strtolocal(inst.strerror),
278 stringutil.forcebytestr(inst.filename),
252 stringutil.forcebytestr(inst.filename),
279 )
253 )
280 )
254 )
281 else:
255 else:
282 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
256 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
283 else: # suspicious IOError
257 else: # suspicious IOError
284 raise
258 raise
285 except MemoryError:
259 except MemoryError:
286 ui.error(_(b"abort: out of memory\n"))
260 ui.error(_(b"abort: out of memory\n"))
287 except SystemExit as inst:
261 except SystemExit as inst:
288 # Commands shouldn't sys.exit directly, but give a return code.
262 # Commands shouldn't sys.exit directly, but give a return code.
289 # Just in case catch this and and pass exit code to caller.
263 # Just in case catch this and and pass exit code to caller.
290 return inst.code
264 return inst.code
291
265
292 return -1
266 return -1
293
267
294
268
295 def checknewlabel(repo, lbl, kind):
269 def checknewlabel(repo, lbl, kind):
296 # Do not use the "kind" parameter in ui output.
270 # Do not use the "kind" parameter in ui output.
297 # It makes strings difficult to translate.
271 # It makes strings difficult to translate.
298 if lbl in [b'tip', b'.', b'null']:
272 if lbl in [b'tip', b'.', b'null']:
299 raise error.Abort(_(b"the name '%s' is reserved") % lbl)
273 raise error.Abort(_(b"the name '%s' is reserved") % lbl)
300 for c in (b':', b'\0', b'\n', b'\r'):
274 for c in (b':', b'\0', b'\n', b'\r'):
301 if c in lbl:
275 if c in lbl:
302 raise error.Abort(
276 raise error.Abort(
303 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
277 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
304 )
278 )
305 try:
279 try:
306 int(lbl)
280 int(lbl)
307 raise error.Abort(_(b"cannot use an integer as a name"))
281 raise error.Abort(_(b"cannot use an integer as a name"))
308 except ValueError:
282 except ValueError:
309 pass
283 pass
310 if lbl.strip() != lbl:
284 if lbl.strip() != lbl:
311 raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl)
285 raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl)
312
286
313
287
314 def checkfilename(f):
288 def checkfilename(f):
315 '''Check that the filename f is an acceptable filename for a tracked file'''
289 '''Check that the filename f is an acceptable filename for a tracked file'''
316 if b'\r' in f or b'\n' in f:
290 if b'\r' in f or b'\n' in f:
317 raise error.Abort(
291 raise error.Abort(
318 _(b"'\\n' and '\\r' disallowed in filenames: %r")
292 _(b"'\\n' and '\\r' disallowed in filenames: %r")
319 % pycompat.bytestr(f)
293 % pycompat.bytestr(f)
320 )
294 )
321
295
322
296
323 def checkportable(ui, f):
297 def checkportable(ui, f):
324 '''Check if filename f is portable and warn or abort depending on config'''
298 '''Check if filename f is portable and warn or abort depending on config'''
325 checkfilename(f)
299 checkfilename(f)
326 abort, warn = checkportabilityalert(ui)
300 abort, warn = checkportabilityalert(ui)
327 if abort or warn:
301 if abort or warn:
328 msg = util.checkwinfilename(f)
302 msg = util.checkwinfilename(f)
329 if msg:
303 if msg:
330 msg = b"%s: %s" % (msg, procutil.shellquote(f))
304 msg = b"%s: %s" % (msg, procutil.shellquote(f))
331 if abort:
305 if abort:
332 raise error.Abort(msg)
306 raise error.Abort(msg)
333 ui.warn(_(b"warning: %s\n") % msg)
307 ui.warn(_(b"warning: %s\n") % msg)
334
308
335
309
336 def checkportabilityalert(ui):
310 def checkportabilityalert(ui):
337 '''check if the user's config requests nothing, a warning, or abort for
311 '''check if the user's config requests nothing, a warning, or abort for
338 non-portable filenames'''
312 non-portable filenames'''
339 val = ui.config(b'ui', b'portablefilenames')
313 val = ui.config(b'ui', b'portablefilenames')
340 lval = val.lower()
314 lval = val.lower()
341 bval = stringutil.parsebool(val)
315 bval = stringutil.parsebool(val)
342 abort = pycompat.iswindows or lval == b'abort'
316 abort = pycompat.iswindows or lval == b'abort'
343 warn = bval or lval == b'warn'
317 warn = bval or lval == b'warn'
344 if bval is None and not (warn or abort or lval == b'ignore'):
318 if bval is None and not (warn or abort or lval == b'ignore'):
345 raise error.ConfigError(
319 raise error.ConfigError(
346 _(b"ui.portablefilenames value is invalid ('%s')") % val
320 _(b"ui.portablefilenames value is invalid ('%s')") % val
347 )
321 )
348 return abort, warn
322 return abort, warn
349
323
350
324
351 class casecollisionauditor(object):
325 class casecollisionauditor(object):
352 def __init__(self, ui, abort, dirstate):
326 def __init__(self, ui, abort, dirstate):
353 self._ui = ui
327 self._ui = ui
354 self._abort = abort
328 self._abort = abort
355 allfiles = b'\0'.join(dirstate)
329 allfiles = b'\0'.join(dirstate)
356 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
330 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
357 self._dirstate = dirstate
331 self._dirstate = dirstate
358 # The purpose of _newfiles is so that we don't complain about
332 # The purpose of _newfiles is so that we don't complain about
359 # case collisions if someone were to call this object with the
333 # case collisions if someone were to call this object with the
360 # same filename twice.
334 # same filename twice.
361 self._newfiles = set()
335 self._newfiles = set()
362
336
363 def __call__(self, f):
337 def __call__(self, f):
364 if f in self._newfiles:
338 if f in self._newfiles:
365 return
339 return
366 fl = encoding.lower(f)
340 fl = encoding.lower(f)
367 if fl in self._loweredfiles and f not in self._dirstate:
341 if fl in self._loweredfiles and f not in self._dirstate:
368 msg = _(b'possible case-folding collision for %s') % f
342 msg = _(b'possible case-folding collision for %s') % f
369 if self._abort:
343 if self._abort:
370 raise error.Abort(msg)
344 raise error.Abort(msg)
371 self._ui.warn(_(b"warning: %s\n") % msg)
345 self._ui.warn(_(b"warning: %s\n") % msg)
372 self._loweredfiles.add(fl)
346 self._loweredfiles.add(fl)
373 self._newfiles.add(f)
347 self._newfiles.add(f)
374
348
375
349
376 def filteredhash(repo, maxrev):
350 def filteredhash(repo, maxrev):
377 """build hash of filtered revisions in the current repoview.
351 """build hash of filtered revisions in the current repoview.
378
352
379 Multiple caches perform up-to-date validation by checking that the
353 Multiple caches perform up-to-date validation by checking that the
380 tiprev and tipnode stored in the cache file match the current repository.
354 tiprev and tipnode stored in the cache file match the current repository.
381 However, this is not sufficient for validating repoviews because the set
355 However, this is not sufficient for validating repoviews because the set
382 of revisions in the view may change without the repository tiprev and
356 of revisions in the view may change without the repository tiprev and
383 tipnode changing.
357 tipnode changing.
384
358
385 This function hashes all the revs filtered from the view and returns
359 This function hashes all the revs filtered from the view and returns
386 that SHA-1 digest.
360 that SHA-1 digest.
387 """
361 """
388 cl = repo.changelog
362 cl = repo.changelog
389 if not cl.filteredrevs:
363 if not cl.filteredrevs:
390 return None
364 return None
391 key = None
365 key = None
392 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
366 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
393 if revs:
367 if revs:
394 s = hashlib.sha1()
368 s = hashlib.sha1()
395 for rev in revs:
369 for rev in revs:
396 s.update(b'%d;' % rev)
370 s.update(b'%d;' % rev)
397 key = s.digest()
371 key = s.digest()
398 return key
372 return key
399
373
400
374
401 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
375 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
402 '''yield every hg repository under path, always recursively.
376 '''yield every hg repository under path, always recursively.
403 The recurse flag will only control recursion into repo working dirs'''
377 The recurse flag will only control recursion into repo working dirs'''
404
378
405 def errhandler(err):
379 def errhandler(err):
406 if err.filename == path:
380 if err.filename == path:
407 raise err
381 raise err
408
382
409 samestat = getattr(os.path, 'samestat', None)
383 samestat = getattr(os.path, 'samestat', None)
410 if followsym and samestat is not None:
384 if followsym and samestat is not None:
411
385
412 def adddir(dirlst, dirname):
386 def adddir(dirlst, dirname):
413 dirstat = os.stat(dirname)
387 dirstat = os.stat(dirname)
414 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
388 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
415 if not match:
389 if not match:
416 dirlst.append(dirstat)
390 dirlst.append(dirstat)
417 return not match
391 return not match
418
392
419 else:
393 else:
420 followsym = False
394 followsym = False
421
395
422 if (seen_dirs is None) and followsym:
396 if (seen_dirs is None) and followsym:
423 seen_dirs = []
397 seen_dirs = []
424 adddir(seen_dirs, path)
398 adddir(seen_dirs, path)
425 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
399 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
426 dirs.sort()
400 dirs.sort()
427 if b'.hg' in dirs:
401 if b'.hg' in dirs:
428 yield root # found a repository
402 yield root # found a repository
429 qroot = os.path.join(root, b'.hg', b'patches')
403 qroot = os.path.join(root, b'.hg', b'patches')
430 if os.path.isdir(os.path.join(qroot, b'.hg')):
404 if os.path.isdir(os.path.join(qroot, b'.hg')):
431 yield qroot # we have a patch queue repo here
405 yield qroot # we have a patch queue repo here
432 if recurse:
406 if recurse:
433 # avoid recursing inside the .hg directory
407 # avoid recursing inside the .hg directory
434 dirs.remove(b'.hg')
408 dirs.remove(b'.hg')
435 else:
409 else:
436 dirs[:] = [] # don't descend further
410 dirs[:] = [] # don't descend further
437 elif followsym:
411 elif followsym:
438 newdirs = []
412 newdirs = []
439 for d in dirs:
413 for d in dirs:
440 fname = os.path.join(root, d)
414 fname = os.path.join(root, d)
441 if adddir(seen_dirs, fname):
415 if adddir(seen_dirs, fname):
442 if os.path.islink(fname):
416 if os.path.islink(fname):
443 for hgname in walkrepos(fname, True, seen_dirs):
417 for hgname in walkrepos(fname, True, seen_dirs):
444 yield hgname
418 yield hgname
445 else:
419 else:
446 newdirs.append(d)
420 newdirs.append(d)
447 dirs[:] = newdirs
421 dirs[:] = newdirs
448
422
449
423
450 def binnode(ctx):
424 def binnode(ctx):
451 """Return binary node id for a given basectx"""
425 """Return binary node id for a given basectx"""
452 node = ctx.node()
426 node = ctx.node()
453 if node is None:
427 if node is None:
454 return wdirid
428 return wdirid
455 return node
429 return node
456
430
457
431
458 def intrev(ctx):
432 def intrev(ctx):
459 """Return integer for a given basectx that can be used in comparison or
433 """Return integer for a given basectx that can be used in comparison or
460 arithmetic operation"""
434 arithmetic operation"""
461 rev = ctx.rev()
435 rev = ctx.rev()
462 if rev is None:
436 if rev is None:
463 return wdirrev
437 return wdirrev
464 return rev
438 return rev
465
439
466
440
467 def formatchangeid(ctx):
441 def formatchangeid(ctx):
468 """Format changectx as '{rev}:{node|formatnode}', which is the default
442 """Format changectx as '{rev}:{node|formatnode}', which is the default
469 template provided by logcmdutil.changesettemplater"""
443 template provided by logcmdutil.changesettemplater"""
470 repo = ctx.repo()
444 repo = ctx.repo()
471 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
445 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
472
446
473
447
474 def formatrevnode(ui, rev, node):
448 def formatrevnode(ui, rev, node):
475 """Format given revision and node depending on the current verbosity"""
449 """Format given revision and node depending on the current verbosity"""
476 if ui.debugflag:
450 if ui.debugflag:
477 hexfunc = hex
451 hexfunc = hex
478 else:
452 else:
479 hexfunc = short
453 hexfunc = short
480 return b'%d:%s' % (rev, hexfunc(node))
454 return b'%d:%s' % (rev, hexfunc(node))
481
455
482
456
483 def resolvehexnodeidprefix(repo, prefix):
457 def resolvehexnodeidprefix(repo, prefix):
484 if prefix.startswith(b'x') and repo.ui.configbool(
458 if prefix.startswith(b'x') and repo.ui.configbool(
485 b'experimental', b'revisions.prefixhexnode'
459 b'experimental', b'revisions.prefixhexnode'
486 ):
460 ):
487 prefix = prefix[1:]
461 prefix = prefix[1:]
488 try:
462 try:
489 # Uses unfiltered repo because it's faster when prefix is ambiguous/
463 # Uses unfiltered repo because it's faster when prefix is ambiguous/
490 # This matches the shortesthexnodeidprefix() function below.
464 # This matches the shortesthexnodeidprefix() function below.
491 node = repo.unfiltered().changelog._partialmatch(prefix)
465 node = repo.unfiltered().changelog._partialmatch(prefix)
492 except error.AmbiguousPrefixLookupError:
466 except error.AmbiguousPrefixLookupError:
493 revset = repo.ui.config(
467 revset = repo.ui.config(
494 b'experimental', b'revisions.disambiguatewithin'
468 b'experimental', b'revisions.disambiguatewithin'
495 )
469 )
496 if revset:
470 if revset:
497 # Clear config to avoid infinite recursion
471 # Clear config to avoid infinite recursion
498 configoverrides = {
472 configoverrides = {
499 (b'experimental', b'revisions.disambiguatewithin'): None
473 (b'experimental', b'revisions.disambiguatewithin'): None
500 }
474 }
501 with repo.ui.configoverride(configoverrides):
475 with repo.ui.configoverride(configoverrides):
502 revs = repo.anyrevs([revset], user=True)
476 revs = repo.anyrevs([revset], user=True)
503 matches = []
477 matches = []
504 for rev in revs:
478 for rev in revs:
505 node = repo.changelog.node(rev)
479 node = repo.changelog.node(rev)
506 if hex(node).startswith(prefix):
480 if hex(node).startswith(prefix):
507 matches.append(node)
481 matches.append(node)
508 if len(matches) == 1:
482 if len(matches) == 1:
509 return matches[0]
483 return matches[0]
510 raise
484 raise
511 if node is None:
485 if node is None:
512 return
486 return
513 repo.changelog.rev(node) # make sure node isn't filtered
487 repo.changelog.rev(node) # make sure node isn't filtered
514 return node
488 return node
515
489
516
490
517 def mayberevnum(repo, prefix):
491 def mayberevnum(repo, prefix):
518 """Checks if the given prefix may be mistaken for a revision number"""
492 """Checks if the given prefix may be mistaken for a revision number"""
519 try:
493 try:
520 i = int(prefix)
494 i = int(prefix)
521 # if we are a pure int, then starting with zero will not be
495 # if we are a pure int, then starting with zero will not be
522 # confused as a rev; or, obviously, if the int is larger
496 # confused as a rev; or, obviously, if the int is larger
523 # than the value of the tip rev. We still need to disambiguate if
497 # than the value of the tip rev. We still need to disambiguate if
524 # prefix == '0', since that *is* a valid revnum.
498 # prefix == '0', since that *is* a valid revnum.
525 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
499 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
526 return False
500 return False
527 return True
501 return True
528 except ValueError:
502 except ValueError:
529 return False
503 return False
530
504
531
505
532 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
506 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
533 """Find the shortest unambiguous prefix that matches hexnode.
507 """Find the shortest unambiguous prefix that matches hexnode.
534
508
535 If "cache" is not None, it must be a dictionary that can be used for
509 If "cache" is not None, it must be a dictionary that can be used for
536 caching between calls to this method.
510 caching between calls to this method.
537 """
511 """
538 # _partialmatch() of filtered changelog could take O(len(repo)) time,
512 # _partialmatch() of filtered changelog could take O(len(repo)) time,
539 # which would be unacceptably slow. so we look for hash collision in
513 # which would be unacceptably slow. so we look for hash collision in
540 # unfiltered space, which means some hashes may be slightly longer.
514 # unfiltered space, which means some hashes may be slightly longer.
541
515
542 minlength = max(minlength, 1)
516 minlength = max(minlength, 1)
543
517
544 def disambiguate(prefix):
518 def disambiguate(prefix):
545 """Disambiguate against revnums."""
519 """Disambiguate against revnums."""
546 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
520 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
547 if mayberevnum(repo, prefix):
521 if mayberevnum(repo, prefix):
548 return b'x' + prefix
522 return b'x' + prefix
549 else:
523 else:
550 return prefix
524 return prefix
551
525
552 hexnode = hex(node)
526 hexnode = hex(node)
553 for length in range(len(prefix), len(hexnode) + 1):
527 for length in range(len(prefix), len(hexnode) + 1):
554 prefix = hexnode[:length]
528 prefix = hexnode[:length]
555 if not mayberevnum(repo, prefix):
529 if not mayberevnum(repo, prefix):
556 return prefix
530 return prefix
557
531
558 cl = repo.unfiltered().changelog
532 cl = repo.unfiltered().changelog
559 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
533 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
560 if revset:
534 if revset:
561 revs = None
535 revs = None
562 if cache is not None:
536 if cache is not None:
563 revs = cache.get(b'disambiguationrevset')
537 revs = cache.get(b'disambiguationrevset')
564 if revs is None:
538 if revs is None:
565 revs = repo.anyrevs([revset], user=True)
539 revs = repo.anyrevs([revset], user=True)
566 if cache is not None:
540 if cache is not None:
567 cache[b'disambiguationrevset'] = revs
541 cache[b'disambiguationrevset'] = revs
568 if cl.rev(node) in revs:
542 if cl.rev(node) in revs:
569 hexnode = hex(node)
543 hexnode = hex(node)
570 nodetree = None
544 nodetree = None
571 if cache is not None:
545 if cache is not None:
572 nodetree = cache.get(b'disambiguationnodetree')
546 nodetree = cache.get(b'disambiguationnodetree')
573 if not nodetree:
547 if not nodetree:
574 try:
548 try:
575 nodetree = parsers.nodetree(cl.index, len(revs))
549 nodetree = parsers.nodetree(cl.index, len(revs))
576 except AttributeError:
550 except AttributeError:
577 # no native nodetree
551 # no native nodetree
578 pass
552 pass
579 else:
553 else:
580 for r in revs:
554 for r in revs:
581 nodetree.insert(r)
555 nodetree.insert(r)
582 if cache is not None:
556 if cache is not None:
583 cache[b'disambiguationnodetree'] = nodetree
557 cache[b'disambiguationnodetree'] = nodetree
584 if nodetree is not None:
558 if nodetree is not None:
585 length = max(nodetree.shortest(node), minlength)
559 length = max(nodetree.shortest(node), minlength)
586 prefix = hexnode[:length]
560 prefix = hexnode[:length]
587 return disambiguate(prefix)
561 return disambiguate(prefix)
588 for length in range(minlength, len(hexnode) + 1):
562 for length in range(minlength, len(hexnode) + 1):
589 matches = []
563 matches = []
590 prefix = hexnode[:length]
564 prefix = hexnode[:length]
591 for rev in revs:
565 for rev in revs:
592 otherhexnode = repo[rev].hex()
566 otherhexnode = repo[rev].hex()
593 if prefix == otherhexnode[:length]:
567 if prefix == otherhexnode[:length]:
594 matches.append(otherhexnode)
568 matches.append(otherhexnode)
595 if len(matches) == 1:
569 if len(matches) == 1:
596 return disambiguate(prefix)
570 return disambiguate(prefix)
597
571
598 try:
572 try:
599 return disambiguate(cl.shortest(node, minlength))
573 return disambiguate(cl.shortest(node, minlength))
600 except error.LookupError:
574 except error.LookupError:
601 raise error.RepoLookupError()
575 raise error.RepoLookupError()
602
576
603
577
604 def isrevsymbol(repo, symbol):
578 def isrevsymbol(repo, symbol):
605 """Checks if a symbol exists in the repo.
579 """Checks if a symbol exists in the repo.
606
580
607 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
581 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
608 symbol is an ambiguous nodeid prefix.
582 symbol is an ambiguous nodeid prefix.
609 """
583 """
610 try:
584 try:
611 revsymbol(repo, symbol)
585 revsymbol(repo, symbol)
612 return True
586 return True
613 except error.RepoLookupError:
587 except error.RepoLookupError:
614 return False
588 return False
615
589
616
590
617 def revsymbol(repo, symbol):
591 def revsymbol(repo, symbol):
618 """Returns a context given a single revision symbol (as string).
592 """Returns a context given a single revision symbol (as string).
619
593
620 This is similar to revsingle(), but accepts only a single revision symbol,
594 This is similar to revsingle(), but accepts only a single revision symbol,
621 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
595 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
622 not "max(public())".
596 not "max(public())".
623 """
597 """
624 if not isinstance(symbol, bytes):
598 if not isinstance(symbol, bytes):
625 msg = (
599 msg = (
626 b"symbol (%s of type %s) was not a string, did you mean "
600 b"symbol (%s of type %s) was not a string, did you mean "
627 b"repo[symbol]?" % (symbol, type(symbol))
601 b"repo[symbol]?" % (symbol, type(symbol))
628 )
602 )
629 raise error.ProgrammingError(msg)
603 raise error.ProgrammingError(msg)
630 try:
604 try:
631 if symbol in (b'.', b'tip', b'null'):
605 if symbol in (b'.', b'tip', b'null'):
632 return repo[symbol]
606 return repo[symbol]
633
607
634 try:
608 try:
635 r = int(symbol)
609 r = int(symbol)
636 if b'%d' % r != symbol:
610 if b'%d' % r != symbol:
637 raise ValueError
611 raise ValueError
638 l = len(repo.changelog)
612 l = len(repo.changelog)
639 if r < 0:
613 if r < 0:
640 r += l
614 r += l
641 if r < 0 or r >= l and r != wdirrev:
615 if r < 0 or r >= l and r != wdirrev:
642 raise ValueError
616 raise ValueError
643 return repo[r]
617 return repo[r]
644 except error.FilteredIndexError:
618 except error.FilteredIndexError:
645 raise
619 raise
646 except (ValueError, OverflowError, IndexError):
620 except (ValueError, OverflowError, IndexError):
647 pass
621 pass
648
622
649 if len(symbol) == 40:
623 if len(symbol) == 40:
650 try:
624 try:
651 node = bin(symbol)
625 node = bin(symbol)
652 rev = repo.changelog.rev(node)
626 rev = repo.changelog.rev(node)
653 return repo[rev]
627 return repo[rev]
654 except error.FilteredLookupError:
628 except error.FilteredLookupError:
655 raise
629 raise
656 except (TypeError, LookupError):
630 except (TypeError, LookupError):
657 pass
631 pass
658
632
659 # look up bookmarks through the name interface
633 # look up bookmarks through the name interface
660 try:
634 try:
661 node = repo.names.singlenode(repo, symbol)
635 node = repo.names.singlenode(repo, symbol)
662 rev = repo.changelog.rev(node)
636 rev = repo.changelog.rev(node)
663 return repo[rev]
637 return repo[rev]
664 except KeyError:
638 except KeyError:
665 pass
639 pass
666
640
667 node = resolvehexnodeidprefix(repo, symbol)
641 node = resolvehexnodeidprefix(repo, symbol)
668 if node is not None:
642 if node is not None:
669 rev = repo.changelog.rev(node)
643 rev = repo.changelog.rev(node)
670 return repo[rev]
644 return repo[rev]
671
645
672 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
646 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
673
647
674 except error.WdirUnsupported:
648 except error.WdirUnsupported:
675 return repo[None]
649 return repo[None]
676 except (
650 except (
677 error.FilteredIndexError,
651 error.FilteredIndexError,
678 error.FilteredLookupError,
652 error.FilteredLookupError,
679 error.FilteredRepoLookupError,
653 error.FilteredRepoLookupError,
680 ):
654 ):
681 raise _filterederror(repo, symbol)
655 raise _filterederror(repo, symbol)
682
656
683
657
684 def _filterederror(repo, changeid):
658 def _filterederror(repo, changeid):
685 """build an exception to be raised about a filtered changeid
659 """build an exception to be raised about a filtered changeid
686
660
687 This is extracted in a function to help extensions (eg: evolve) to
661 This is extracted in a function to help extensions (eg: evolve) to
688 experiment with various message variants."""
662 experiment with various message variants."""
689 if repo.filtername.startswith(b'visible'):
663 if repo.filtername.startswith(b'visible'):
690
664
691 # Check if the changeset is obsolete
665 # Check if the changeset is obsolete
692 unfilteredrepo = repo.unfiltered()
666 unfilteredrepo = repo.unfiltered()
693 ctx = revsymbol(unfilteredrepo, changeid)
667 ctx = revsymbol(unfilteredrepo, changeid)
694
668
695 # If the changeset is obsolete, enrich the message with the reason
669 # If the changeset is obsolete, enrich the message with the reason
696 # that made this changeset not visible
670 # that made this changeset not visible
697 if ctx.obsolete():
671 if ctx.obsolete():
698 msg = obsutil._getfilteredreason(repo, changeid, ctx)
672 msg = obsutil._getfilteredreason(repo, changeid, ctx)
699 else:
673 else:
700 msg = _(b"hidden revision '%s'") % changeid
674 msg = _(b"hidden revision '%s'") % changeid
701
675
702 hint = _(b'use --hidden to access hidden revisions')
676 hint = _(b'use --hidden to access hidden revisions')
703
677
704 return error.FilteredRepoLookupError(msg, hint=hint)
678 return error.FilteredRepoLookupError(msg, hint=hint)
705 msg = _(b"filtered revision '%s' (not in '%s' subset)")
679 msg = _(b"filtered revision '%s' (not in '%s' subset)")
706 msg %= (changeid, repo.filtername)
680 msg %= (changeid, repo.filtername)
707 return error.FilteredRepoLookupError(msg)
681 return error.FilteredRepoLookupError(msg)
708
682
709
683
710 def revsingle(repo, revspec, default=b'.', localalias=None):
684 def revsingle(repo, revspec, default=b'.', localalias=None):
711 if not revspec and revspec != 0:
685 if not revspec and revspec != 0:
712 return repo[default]
686 return repo[default]
713
687
714 l = revrange(repo, [revspec], localalias=localalias)
688 l = revrange(repo, [revspec], localalias=localalias)
715 if not l:
689 if not l:
716 raise error.Abort(_(b'empty revision set'))
690 raise error.Abort(_(b'empty revision set'))
717 return repo[l.last()]
691 return repo[l.last()]
718
692
719
693
720 def _pairspec(revspec):
694 def _pairspec(revspec):
721 tree = revsetlang.parse(revspec)
695 tree = revsetlang.parse(revspec)
722 return tree and tree[0] in (
696 return tree and tree[0] in (
723 b'range',
697 b'range',
724 b'rangepre',
698 b'rangepre',
725 b'rangepost',
699 b'rangepost',
726 b'rangeall',
700 b'rangeall',
727 )
701 )
728
702
729
703
730 def revpair(repo, revs):
704 def revpair(repo, revs):
731 if not revs:
705 if not revs:
732 return repo[b'.'], repo[None]
706 return repo[b'.'], repo[None]
733
707
734 l = revrange(repo, revs)
708 l = revrange(repo, revs)
735
709
736 if not l:
710 if not l:
737 raise error.Abort(_(b'empty revision range'))
711 raise error.Abort(_(b'empty revision range'))
738
712
739 first = l.first()
713 first = l.first()
740 second = l.last()
714 second = l.last()
741
715
742 if (
716 if (
743 first == second
717 first == second
744 and len(revs) >= 2
718 and len(revs) >= 2
745 and not all(revrange(repo, [r]) for r in revs)
719 and not all(revrange(repo, [r]) for r in revs)
746 ):
720 ):
747 raise error.Abort(_(b'empty revision on one side of range'))
721 raise error.Abort(_(b'empty revision on one side of range'))
748
722
749 # if top-level is range expression, the result must always be a pair
723 # if top-level is range expression, the result must always be a pair
750 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
724 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
751 return repo[first], repo[None]
725 return repo[first], repo[None]
752
726
753 return repo[first], repo[second]
727 return repo[first], repo[second]
754
728
755
729
756 def revrange(repo, specs, localalias=None):
730 def revrange(repo, specs, localalias=None):
757 """Execute 1 to many revsets and return the union.
731 """Execute 1 to many revsets and return the union.
758
732
759 This is the preferred mechanism for executing revsets using user-specified
733 This is the preferred mechanism for executing revsets using user-specified
760 config options, such as revset aliases.
734 config options, such as revset aliases.
761
735
762 The revsets specified by ``specs`` will be executed via a chained ``OR``
736 The revsets specified by ``specs`` will be executed via a chained ``OR``
763 expression. If ``specs`` is empty, an empty result is returned.
737 expression. If ``specs`` is empty, an empty result is returned.
764
738
765 ``specs`` can contain integers, in which case they are assumed to be
739 ``specs`` can contain integers, in which case they are assumed to be
766 revision numbers.
740 revision numbers.
767
741
768 It is assumed the revsets are already formatted. If you have arguments
742 It is assumed the revsets are already formatted. If you have arguments
769 that need to be expanded in the revset, call ``revsetlang.formatspec()``
743 that need to be expanded in the revset, call ``revsetlang.formatspec()``
770 and pass the result as an element of ``specs``.
744 and pass the result as an element of ``specs``.
771
745
772 Specifying a single revset is allowed.
746 Specifying a single revset is allowed.
773
747
774 Returns a ``revset.abstractsmartset`` which is a list-like interface over
748 Returns a ``revset.abstractsmartset`` which is a list-like interface over
775 integer revisions.
749 integer revisions.
776 """
750 """
777 allspecs = []
751 allspecs = []
778 for spec in specs:
752 for spec in specs:
779 if isinstance(spec, int):
753 if isinstance(spec, int):
780 spec = revsetlang.formatspec(b'%d', spec)
754 spec = revsetlang.formatspec(b'%d', spec)
781 allspecs.append(spec)
755 allspecs.append(spec)
782 return repo.anyrevs(allspecs, user=True, localalias=localalias)
756 return repo.anyrevs(allspecs, user=True, localalias=localalias)
783
757
784
758
785 def meaningfulparents(repo, ctx):
759 def meaningfulparents(repo, ctx):
786 """Return list of meaningful (or all if debug) parentrevs for rev.
760 """Return list of meaningful (or all if debug) parentrevs for rev.
787
761
788 For merges (two non-nullrev revisions) both parents are meaningful.
762 For merges (two non-nullrev revisions) both parents are meaningful.
789 Otherwise the first parent revision is considered meaningful if it
763 Otherwise the first parent revision is considered meaningful if it
790 is not the preceding revision.
764 is not the preceding revision.
791 """
765 """
792 parents = ctx.parents()
766 parents = ctx.parents()
793 if len(parents) > 1:
767 if len(parents) > 1:
794 return parents
768 return parents
795 if repo.ui.debugflag:
769 if repo.ui.debugflag:
796 return [parents[0], repo[nullrev]]
770 return [parents[0], repo[nullrev]]
797 if parents[0].rev() >= intrev(ctx) - 1:
771 if parents[0].rev() >= intrev(ctx) - 1:
798 return []
772 return []
799 return parents
773 return parents
800
774
801
775
802 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
776 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
803 """Return a function that produced paths for presenting to the user.
777 """Return a function that produced paths for presenting to the user.
804
778
805 The returned function takes a repo-relative path and produces a path
779 The returned function takes a repo-relative path and produces a path
806 that can be presented in the UI.
780 that can be presented in the UI.
807
781
808 Depending on the value of ui.relative-paths, either a repo-relative or
782 Depending on the value of ui.relative-paths, either a repo-relative or
809 cwd-relative path will be produced.
783 cwd-relative path will be produced.
810
784
811 legacyrelativevalue is the value to use if ui.relative-paths=legacy
785 legacyrelativevalue is the value to use if ui.relative-paths=legacy
812
786
813 If forcerelativevalue is not None, then that value will be used regardless
787 If forcerelativevalue is not None, then that value will be used regardless
814 of what ui.relative-paths is set to.
788 of what ui.relative-paths is set to.
815 """
789 """
816 if forcerelativevalue is not None:
790 if forcerelativevalue is not None:
817 relative = forcerelativevalue
791 relative = forcerelativevalue
818 else:
792 else:
819 config = repo.ui.config(b'ui', b'relative-paths')
793 config = repo.ui.config(b'ui', b'relative-paths')
820 if config == b'legacy':
794 if config == b'legacy':
821 relative = legacyrelativevalue
795 relative = legacyrelativevalue
822 else:
796 else:
823 relative = stringutil.parsebool(config)
797 relative = stringutil.parsebool(config)
824 if relative is None:
798 if relative is None:
825 raise error.ConfigError(
799 raise error.ConfigError(
826 _(b"ui.relative-paths is not a boolean ('%s')") % config
800 _(b"ui.relative-paths is not a boolean ('%s')") % config
827 )
801 )
828
802
829 if relative:
803 if relative:
830 cwd = repo.getcwd()
804 cwd = repo.getcwd()
831 pathto = repo.pathto
805 pathto = repo.pathto
832 return lambda f: pathto(f, cwd)
806 return lambda f: pathto(f, cwd)
833 elif repo.ui.configbool(b'ui', b'slash'):
807 elif repo.ui.configbool(b'ui', b'slash'):
834 return lambda f: f
808 return lambda f: f
835 else:
809 else:
836 return util.localpath
810 return util.localpath
837
811
838
812
839 def subdiruipathfn(subpath, uipathfn):
813 def subdiruipathfn(subpath, uipathfn):
840 '''Create a new uipathfn that treats the file as relative to subpath.'''
814 '''Create a new uipathfn that treats the file as relative to subpath.'''
841 return lambda f: uipathfn(posixpath.join(subpath, f))
815 return lambda f: uipathfn(posixpath.join(subpath, f))
842
816
843
817
844 def anypats(pats, opts):
818 def anypats(pats, opts):
845 '''Checks if any patterns, including --include and --exclude were given.
819 '''Checks if any patterns, including --include and --exclude were given.
846
820
847 Some commands (e.g. addremove) use this condition for deciding whether to
821 Some commands (e.g. addremove) use this condition for deciding whether to
848 print absolute or relative paths.
822 print absolute or relative paths.
849 '''
823 '''
850 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
824 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
851
825
852
826
853 def expandpats(pats):
827 def expandpats(pats):
854 '''Expand bare globs when running on windows.
828 '''Expand bare globs when running on windows.
855 On posix we assume it already has already been done by sh.'''
829 On posix we assume it already has already been done by sh.'''
856 if not util.expandglobs:
830 if not util.expandglobs:
857 return list(pats)
831 return list(pats)
858 ret = []
832 ret = []
859 for kindpat in pats:
833 for kindpat in pats:
860 kind, pat = matchmod._patsplit(kindpat, None)
834 kind, pat = matchmod._patsplit(kindpat, None)
861 if kind is None:
835 if kind is None:
862 try:
836 try:
863 globbed = glob.glob(pat)
837 globbed = glob.glob(pat)
864 except re.error:
838 except re.error:
865 globbed = [pat]
839 globbed = [pat]
866 if globbed:
840 if globbed:
867 ret.extend(globbed)
841 ret.extend(globbed)
868 continue
842 continue
869 ret.append(kindpat)
843 ret.append(kindpat)
870 return ret
844 return ret
871
845
872
846
873 def matchandpats(
847 def matchandpats(
874 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
848 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
875 ):
849 ):
876 '''Return a matcher and the patterns that were used.
850 '''Return a matcher and the patterns that were used.
877 The matcher will warn about bad matches, unless an alternate badfn callback
851 The matcher will warn about bad matches, unless an alternate badfn callback
878 is provided.'''
852 is provided.'''
879 if opts is None:
853 if opts is None:
880 opts = {}
854 opts = {}
881 if not globbed and default == b'relpath':
855 if not globbed and default == b'relpath':
882 pats = expandpats(pats or [])
856 pats = expandpats(pats or [])
883
857
884 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
858 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
885
859
886 def bad(f, msg):
860 def bad(f, msg):
887 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
861 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
888
862
889 if badfn is None:
863 if badfn is None:
890 badfn = bad
864 badfn = bad
891
865
892 m = ctx.match(
866 m = ctx.match(
893 pats,
867 pats,
894 opts.get(b'include'),
868 opts.get(b'include'),
895 opts.get(b'exclude'),
869 opts.get(b'exclude'),
896 default,
870 default,
897 listsubrepos=opts.get(b'subrepos'),
871 listsubrepos=opts.get(b'subrepos'),
898 badfn=badfn,
872 badfn=badfn,
899 )
873 )
900
874
901 if m.always():
875 if m.always():
902 pats = []
876 pats = []
903 return m, pats
877 return m, pats
904
878
905
879
906 def match(
880 def match(
907 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
881 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
908 ):
882 ):
909 '''Return a matcher that will warn about bad matches.'''
883 '''Return a matcher that will warn about bad matches.'''
910 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
884 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
911
885
912
886
913 def matchall(repo):
887 def matchall(repo):
914 '''Return a matcher that will efficiently match everything.'''
888 '''Return a matcher that will efficiently match everything.'''
915 return matchmod.always()
889 return matchmod.always()
916
890
917
891
918 def matchfiles(repo, files, badfn=None):
892 def matchfiles(repo, files, badfn=None):
919 '''Return a matcher that will efficiently match exactly these files.'''
893 '''Return a matcher that will efficiently match exactly these files.'''
920 return matchmod.exact(files, badfn=badfn)
894 return matchmod.exact(files, badfn=badfn)
921
895
922
896
923 def parsefollowlinespattern(repo, rev, pat, msg):
897 def parsefollowlinespattern(repo, rev, pat, msg):
924 """Return a file name from `pat` pattern suitable for usage in followlines
898 """Return a file name from `pat` pattern suitable for usage in followlines
925 logic.
899 logic.
926 """
900 """
927 if not matchmod.patkind(pat):
901 if not matchmod.patkind(pat):
928 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
902 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
929 else:
903 else:
930 ctx = repo[rev]
904 ctx = repo[rev]
931 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
905 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
932 files = [f for f in ctx if m(f)]
906 files = [f for f in ctx if m(f)]
933 if len(files) != 1:
907 if len(files) != 1:
934 raise error.ParseError(msg)
908 raise error.ParseError(msg)
935 return files[0]
909 return files[0]
936
910
937
911
938 def getorigvfs(ui, repo):
912 def getorigvfs(ui, repo):
939 """return a vfs suitable to save 'orig' file
913 """return a vfs suitable to save 'orig' file
940
914
941 return None if no special directory is configured"""
915 return None if no special directory is configured"""
942 origbackuppath = ui.config(b'ui', b'origbackuppath')
916 origbackuppath = ui.config(b'ui', b'origbackuppath')
943 if not origbackuppath:
917 if not origbackuppath:
944 return None
918 return None
945 return vfs.vfs(repo.wvfs.join(origbackuppath))
919 return vfs.vfs(repo.wvfs.join(origbackuppath))
946
920
947
921
948 def backuppath(ui, repo, filepath):
922 def backuppath(ui, repo, filepath):
949 '''customize where working copy backup files (.orig files) are created
923 '''customize where working copy backup files (.orig files) are created
950
924
951 Fetch user defined path from config file: [ui] origbackuppath = <path>
925 Fetch user defined path from config file: [ui] origbackuppath = <path>
952 Fall back to default (filepath with .orig suffix) if not specified
926 Fall back to default (filepath with .orig suffix) if not specified
953
927
954 filepath is repo-relative
928 filepath is repo-relative
955
929
956 Returns an absolute path
930 Returns an absolute path
957 '''
931 '''
958 origvfs = getorigvfs(ui, repo)
932 origvfs = getorigvfs(ui, repo)
959 if origvfs is None:
933 if origvfs is None:
960 return repo.wjoin(filepath + b".orig")
934 return repo.wjoin(filepath + b".orig")
961
935
962 origbackupdir = origvfs.dirname(filepath)
936 origbackupdir = origvfs.dirname(filepath)
963 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
937 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
964 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
938 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
965
939
966 # Remove any files that conflict with the backup file's path
940 # Remove any files that conflict with the backup file's path
967 for f in reversed(list(pathutil.finddirs(filepath))):
941 for f in reversed(list(pathutil.finddirs(filepath))):
968 if origvfs.isfileorlink(f):
942 if origvfs.isfileorlink(f):
969 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
943 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
970 origvfs.unlink(f)
944 origvfs.unlink(f)
971 break
945 break
972
946
973 origvfs.makedirs(origbackupdir)
947 origvfs.makedirs(origbackupdir)
974
948
975 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
949 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
976 ui.note(
950 ui.note(
977 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
951 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
978 )
952 )
979 origvfs.rmtree(filepath, forcibly=True)
953 origvfs.rmtree(filepath, forcibly=True)
980
954
981 return origvfs.join(filepath)
955 return origvfs.join(filepath)
982
956
983
957
984 class _containsnode(object):
958 class _containsnode(object):
985 """proxy __contains__(node) to container.__contains__ which accepts revs"""
959 """proxy __contains__(node) to container.__contains__ which accepts revs"""
986
960
987 def __init__(self, repo, revcontainer):
961 def __init__(self, repo, revcontainer):
988 self._torev = repo.changelog.rev
962 self._torev = repo.changelog.rev
989 self._revcontains = revcontainer.__contains__
963 self._revcontains = revcontainer.__contains__
990
964
991 def __contains__(self, node):
965 def __contains__(self, node):
992 return self._revcontains(self._torev(node))
966 return self._revcontains(self._torev(node))
993
967
994
968
995 def cleanupnodes(
969 def cleanupnodes(
996 repo,
970 repo,
997 replacements,
971 replacements,
998 operation,
972 operation,
999 moves=None,
973 moves=None,
1000 metadata=None,
974 metadata=None,
1001 fixphase=False,
975 fixphase=False,
1002 targetphase=None,
976 targetphase=None,
1003 backup=True,
977 backup=True,
1004 ):
978 ):
1005 """do common cleanups when old nodes are replaced by new nodes
979 """do common cleanups when old nodes are replaced by new nodes
1006
980
1007 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
981 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1008 (we might also want to move working directory parent in the future)
982 (we might also want to move working directory parent in the future)
1009
983
1010 By default, bookmark moves are calculated automatically from 'replacements',
984 By default, bookmark moves are calculated automatically from 'replacements',
1011 but 'moves' can be used to override that. Also, 'moves' may include
985 but 'moves' can be used to override that. Also, 'moves' may include
1012 additional bookmark moves that should not have associated obsmarkers.
986 additional bookmark moves that should not have associated obsmarkers.
1013
987
1014 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
988 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1015 have replacements. operation is a string, like "rebase".
989 have replacements. operation is a string, like "rebase".
1016
990
1017 metadata is dictionary containing metadata to be stored in obsmarker if
991 metadata is dictionary containing metadata to be stored in obsmarker if
1018 obsolescence is enabled.
992 obsolescence is enabled.
1019 """
993 """
1020 assert fixphase or targetphase is None
994 assert fixphase or targetphase is None
1021 if not replacements and not moves:
995 if not replacements and not moves:
1022 return
996 return
1023
997
1024 # translate mapping's other forms
998 # translate mapping's other forms
1025 if not util.safehasattr(replacements, b'items'):
999 if not util.safehasattr(replacements, b'items'):
1026 replacements = {(n,): () for n in replacements}
1000 replacements = {(n,): () for n in replacements}
1027 else:
1001 else:
1028 # upgrading non tuple "source" to tuple ones for BC
1002 # upgrading non tuple "source" to tuple ones for BC
1029 repls = {}
1003 repls = {}
1030 for key, value in replacements.items():
1004 for key, value in replacements.items():
1031 if not isinstance(key, tuple):
1005 if not isinstance(key, tuple):
1032 key = (key,)
1006 key = (key,)
1033 repls[key] = value
1007 repls[key] = value
1034 replacements = repls
1008 replacements = repls
1035
1009
1036 # Unfiltered repo is needed since nodes in replacements might be hidden.
1010 # Unfiltered repo is needed since nodes in replacements might be hidden.
1037 unfi = repo.unfiltered()
1011 unfi = repo.unfiltered()
1038
1012
1039 # Calculate bookmark movements
1013 # Calculate bookmark movements
1040 if moves is None:
1014 if moves is None:
1041 moves = {}
1015 moves = {}
1042 for oldnodes, newnodes in replacements.items():
1016 for oldnodes, newnodes in replacements.items():
1043 for oldnode in oldnodes:
1017 for oldnode in oldnodes:
1044 if oldnode in moves:
1018 if oldnode in moves:
1045 continue
1019 continue
1046 if len(newnodes) > 1:
1020 if len(newnodes) > 1:
1047 # usually a split, take the one with biggest rev number
1021 # usually a split, take the one with biggest rev number
1048 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1022 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1049 elif len(newnodes) == 0:
1023 elif len(newnodes) == 0:
1050 # move bookmark backwards
1024 # move bookmark backwards
1051 allreplaced = []
1025 allreplaced = []
1052 for rep in replacements:
1026 for rep in replacements:
1053 allreplaced.extend(rep)
1027 allreplaced.extend(rep)
1054 roots = list(
1028 roots = list(
1055 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1029 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1056 )
1030 )
1057 if roots:
1031 if roots:
1058 newnode = roots[0].node()
1032 newnode = roots[0].node()
1059 else:
1033 else:
1060 newnode = nullid
1034 newnode = nullid
1061 else:
1035 else:
1062 newnode = newnodes[0]
1036 newnode = newnodes[0]
1063 moves[oldnode] = newnode
1037 moves[oldnode] = newnode
1064
1038
1065 allnewnodes = [n for ns in replacements.values() for n in ns]
1039 allnewnodes = [n for ns in replacements.values() for n in ns]
1066 toretract = {}
1040 toretract = {}
1067 toadvance = {}
1041 toadvance = {}
1068 if fixphase:
1042 if fixphase:
1069 precursors = {}
1043 precursors = {}
1070 for oldnodes, newnodes in replacements.items():
1044 for oldnodes, newnodes in replacements.items():
1071 for oldnode in oldnodes:
1045 for oldnode in oldnodes:
1072 for newnode in newnodes:
1046 for newnode in newnodes:
1073 precursors.setdefault(newnode, []).append(oldnode)
1047 precursors.setdefault(newnode, []).append(oldnode)
1074
1048
1075 allnewnodes.sort(key=lambda n: unfi[n].rev())
1049 allnewnodes.sort(key=lambda n: unfi[n].rev())
1076 newphases = {}
1050 newphases = {}
1077
1051
1078 def phase(ctx):
1052 def phase(ctx):
1079 return newphases.get(ctx.node(), ctx.phase())
1053 return newphases.get(ctx.node(), ctx.phase())
1080
1054
1081 for newnode in allnewnodes:
1055 for newnode in allnewnodes:
1082 ctx = unfi[newnode]
1056 ctx = unfi[newnode]
1083 parentphase = max(phase(p) for p in ctx.parents())
1057 parentphase = max(phase(p) for p in ctx.parents())
1084 if targetphase is None:
1058 if targetphase is None:
1085 oldphase = max(
1059 oldphase = max(
1086 unfi[oldnode].phase() for oldnode in precursors[newnode]
1060 unfi[oldnode].phase() for oldnode in precursors[newnode]
1087 )
1061 )
1088 newphase = max(oldphase, parentphase)
1062 newphase = max(oldphase, parentphase)
1089 else:
1063 else:
1090 newphase = max(targetphase, parentphase)
1064 newphase = max(targetphase, parentphase)
1091 newphases[newnode] = newphase
1065 newphases[newnode] = newphase
1092 if newphase > ctx.phase():
1066 if newphase > ctx.phase():
1093 toretract.setdefault(newphase, []).append(newnode)
1067 toretract.setdefault(newphase, []).append(newnode)
1094 elif newphase < ctx.phase():
1068 elif newphase < ctx.phase():
1095 toadvance.setdefault(newphase, []).append(newnode)
1069 toadvance.setdefault(newphase, []).append(newnode)
1096
1070
1097 with repo.transaction(b'cleanup') as tr:
1071 with repo.transaction(b'cleanup') as tr:
1098 # Move bookmarks
1072 # Move bookmarks
1099 bmarks = repo._bookmarks
1073 bmarks = repo._bookmarks
1100 bmarkchanges = []
1074 bmarkchanges = []
1101 for oldnode, newnode in moves.items():
1075 for oldnode, newnode in moves.items():
1102 oldbmarks = repo.nodebookmarks(oldnode)
1076 oldbmarks = repo.nodebookmarks(oldnode)
1103 if not oldbmarks:
1077 if not oldbmarks:
1104 continue
1078 continue
1105 from . import bookmarks # avoid import cycle
1079 from . import bookmarks # avoid import cycle
1106
1080
1107 repo.ui.debug(
1081 repo.ui.debug(
1108 b'moving bookmarks %r from %s to %s\n'
1082 b'moving bookmarks %r from %s to %s\n'
1109 % (
1083 % (
1110 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1084 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1111 hex(oldnode),
1085 hex(oldnode),
1112 hex(newnode),
1086 hex(newnode),
1113 )
1087 )
1114 )
1088 )
1115 # Delete divergent bookmarks being parents of related newnodes
1089 # Delete divergent bookmarks being parents of related newnodes
1116 deleterevs = repo.revs(
1090 deleterevs = repo.revs(
1117 b'parents(roots(%ln & (::%n))) - parents(%n)',
1091 b'parents(roots(%ln & (::%n))) - parents(%n)',
1118 allnewnodes,
1092 allnewnodes,
1119 newnode,
1093 newnode,
1120 oldnode,
1094 oldnode,
1121 )
1095 )
1122 deletenodes = _containsnode(repo, deleterevs)
1096 deletenodes = _containsnode(repo, deleterevs)
1123 for name in oldbmarks:
1097 for name in oldbmarks:
1124 bmarkchanges.append((name, newnode))
1098 bmarkchanges.append((name, newnode))
1125 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1099 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1126 bmarkchanges.append((b, None))
1100 bmarkchanges.append((b, None))
1127
1101
1128 if bmarkchanges:
1102 if bmarkchanges:
1129 bmarks.applychanges(repo, tr, bmarkchanges)
1103 bmarks.applychanges(repo, tr, bmarkchanges)
1130
1104
1131 for phase, nodes in toretract.items():
1105 for phase, nodes in toretract.items():
1132 phases.retractboundary(repo, tr, phase, nodes)
1106 phases.retractboundary(repo, tr, phase, nodes)
1133 for phase, nodes in toadvance.items():
1107 for phase, nodes in toadvance.items():
1134 phases.advanceboundary(repo, tr, phase, nodes)
1108 phases.advanceboundary(repo, tr, phase, nodes)
1135
1109
1136 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1110 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1137 # Obsolete or strip nodes
1111 # Obsolete or strip nodes
1138 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1112 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1139 # If a node is already obsoleted, and we want to obsolete it
1113 # If a node is already obsoleted, and we want to obsolete it
1140 # without a successor, skip that obssolete request since it's
1114 # without a successor, skip that obssolete request since it's
1141 # unnecessary. That's the "if s or not isobs(n)" check below.
1115 # unnecessary. That's the "if s or not isobs(n)" check below.
1142 # Also sort the node in topology order, that might be useful for
1116 # Also sort the node in topology order, that might be useful for
1143 # some obsstore logic.
1117 # some obsstore logic.
1144 # NOTE: the sorting might belong to createmarkers.
1118 # NOTE: the sorting might belong to createmarkers.
1145 torev = unfi.changelog.rev
1119 torev = unfi.changelog.rev
1146 sortfunc = lambda ns: torev(ns[0][0])
1120 sortfunc = lambda ns: torev(ns[0][0])
1147 rels = []
1121 rels = []
1148 for ns, s in sorted(replacements.items(), key=sortfunc):
1122 for ns, s in sorted(replacements.items(), key=sortfunc):
1149 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1123 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1150 rels.append(rel)
1124 rels.append(rel)
1151 if rels:
1125 if rels:
1152 obsolete.createmarkers(
1126 obsolete.createmarkers(
1153 repo, rels, operation=operation, metadata=metadata
1127 repo, rels, operation=operation, metadata=metadata
1154 )
1128 )
1155 elif phases.supportinternal(repo) and mayusearchived:
1129 elif phases.supportinternal(repo) and mayusearchived:
1156 # this assume we do not have "unstable" nodes above the cleaned ones
1130 # this assume we do not have "unstable" nodes above the cleaned ones
1157 allreplaced = set()
1131 allreplaced = set()
1158 for ns in replacements.keys():
1132 for ns in replacements.keys():
1159 allreplaced.update(ns)
1133 allreplaced.update(ns)
1160 if backup:
1134 if backup:
1161 from . import repair # avoid import cycle
1135 from . import repair # avoid import cycle
1162
1136
1163 node = min(allreplaced, key=repo.changelog.rev)
1137 node = min(allreplaced, key=repo.changelog.rev)
1164 repair.backupbundle(
1138 repair.backupbundle(
1165 repo, allreplaced, allreplaced, node, operation
1139 repo, allreplaced, allreplaced, node, operation
1166 )
1140 )
1167 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1141 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1168 else:
1142 else:
1169 from . import repair # avoid import cycle
1143 from . import repair # avoid import cycle
1170
1144
1171 tostrip = list(n for ns in replacements for n in ns)
1145 tostrip = list(n for ns in replacements for n in ns)
1172 if tostrip:
1146 if tostrip:
1173 repair.delayedstrip(
1147 repair.delayedstrip(
1174 repo.ui, repo, tostrip, operation, backup=backup
1148 repo.ui, repo, tostrip, operation, backup=backup
1175 )
1149 )
1176
1150
1177
1151
1178 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1152 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1179 if opts is None:
1153 if opts is None:
1180 opts = {}
1154 opts = {}
1181 m = matcher
1155 m = matcher
1182 dry_run = opts.get(b'dry_run')
1156 dry_run = opts.get(b'dry_run')
1183 try:
1157 try:
1184 similarity = float(opts.get(b'similarity') or 0)
1158 similarity = float(opts.get(b'similarity') or 0)
1185 except ValueError:
1159 except ValueError:
1186 raise error.Abort(_(b'similarity must be a number'))
1160 raise error.Abort(_(b'similarity must be a number'))
1187 if similarity < 0 or similarity > 100:
1161 if similarity < 0 or similarity > 100:
1188 raise error.Abort(_(b'similarity must be between 0 and 100'))
1162 raise error.Abort(_(b'similarity must be between 0 and 100'))
1189 similarity /= 100.0
1163 similarity /= 100.0
1190
1164
1191 ret = 0
1165 ret = 0
1192
1166
1193 wctx = repo[None]
1167 wctx = repo[None]
1194 for subpath in sorted(wctx.substate):
1168 for subpath in sorted(wctx.substate):
1195 submatch = matchmod.subdirmatcher(subpath, m)
1169 submatch = matchmod.subdirmatcher(subpath, m)
1196 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1170 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1197 sub = wctx.sub(subpath)
1171 sub = wctx.sub(subpath)
1198 subprefix = repo.wvfs.reljoin(prefix, subpath)
1172 subprefix = repo.wvfs.reljoin(prefix, subpath)
1199 subuipathfn = subdiruipathfn(subpath, uipathfn)
1173 subuipathfn = subdiruipathfn(subpath, uipathfn)
1200 try:
1174 try:
1201 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1175 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1202 ret = 1
1176 ret = 1
1203 except error.LookupError:
1177 except error.LookupError:
1204 repo.ui.status(
1178 repo.ui.status(
1205 _(b"skipping missing subrepository: %s\n")
1179 _(b"skipping missing subrepository: %s\n")
1206 % uipathfn(subpath)
1180 % uipathfn(subpath)
1207 )
1181 )
1208
1182
1209 rejected = []
1183 rejected = []
1210
1184
1211 def badfn(f, msg):
1185 def badfn(f, msg):
1212 if f in m.files():
1186 if f in m.files():
1213 m.bad(f, msg)
1187 m.bad(f, msg)
1214 rejected.append(f)
1188 rejected.append(f)
1215
1189
1216 badmatch = matchmod.badmatch(m, badfn)
1190 badmatch = matchmod.badmatch(m, badfn)
1217 added, unknown, deleted, removed, forgotten = _interestingfiles(
1191 added, unknown, deleted, removed, forgotten = _interestingfiles(
1218 repo, badmatch
1192 repo, badmatch
1219 )
1193 )
1220
1194
1221 unknownset = set(unknown + forgotten)
1195 unknownset = set(unknown + forgotten)
1222 toprint = unknownset.copy()
1196 toprint = unknownset.copy()
1223 toprint.update(deleted)
1197 toprint.update(deleted)
1224 for abs in sorted(toprint):
1198 for abs in sorted(toprint):
1225 if repo.ui.verbose or not m.exact(abs):
1199 if repo.ui.verbose or not m.exact(abs):
1226 if abs in unknownset:
1200 if abs in unknownset:
1227 status = _(b'adding %s\n') % uipathfn(abs)
1201 status = _(b'adding %s\n') % uipathfn(abs)
1228 label = b'ui.addremove.added'
1202 label = b'ui.addremove.added'
1229 else:
1203 else:
1230 status = _(b'removing %s\n') % uipathfn(abs)
1204 status = _(b'removing %s\n') % uipathfn(abs)
1231 label = b'ui.addremove.removed'
1205 label = b'ui.addremove.removed'
1232 repo.ui.status(status, label=label)
1206 repo.ui.status(status, label=label)
1233
1207
1234 renames = _findrenames(
1208 renames = _findrenames(
1235 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1209 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1236 )
1210 )
1237
1211
1238 if not dry_run:
1212 if not dry_run:
1239 _markchanges(repo, unknown + forgotten, deleted, renames)
1213 _markchanges(repo, unknown + forgotten, deleted, renames)
1240
1214
1241 for f in rejected:
1215 for f in rejected:
1242 if f in m.files():
1216 if f in m.files():
1243 return 1
1217 return 1
1244 return ret
1218 return ret
1245
1219
1246
1220
1247 def marktouched(repo, files, similarity=0.0):
1221 def marktouched(repo, files, similarity=0.0):
1248 '''Assert that files have somehow been operated upon. files are relative to
1222 '''Assert that files have somehow been operated upon. files are relative to
1249 the repo root.'''
1223 the repo root.'''
1250 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1224 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1251 rejected = []
1225 rejected = []
1252
1226
1253 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1227 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1254
1228
1255 if repo.ui.verbose:
1229 if repo.ui.verbose:
1256 unknownset = set(unknown + forgotten)
1230 unknownset = set(unknown + forgotten)
1257 toprint = unknownset.copy()
1231 toprint = unknownset.copy()
1258 toprint.update(deleted)
1232 toprint.update(deleted)
1259 for abs in sorted(toprint):
1233 for abs in sorted(toprint):
1260 if abs in unknownset:
1234 if abs in unknownset:
1261 status = _(b'adding %s\n') % abs
1235 status = _(b'adding %s\n') % abs
1262 else:
1236 else:
1263 status = _(b'removing %s\n') % abs
1237 status = _(b'removing %s\n') % abs
1264 repo.ui.status(status)
1238 repo.ui.status(status)
1265
1239
1266 # TODO: We should probably have the caller pass in uipathfn and apply it to
1240 # TODO: We should probably have the caller pass in uipathfn and apply it to
1267 # the messages above too. legacyrelativevalue=True is consistent with how
1241 # the messages above too. legacyrelativevalue=True is consistent with how
1268 # it used to work.
1242 # it used to work.
1269 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1243 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1270 renames = _findrenames(
1244 renames = _findrenames(
1271 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1245 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1272 )
1246 )
1273
1247
1274 _markchanges(repo, unknown + forgotten, deleted, renames)
1248 _markchanges(repo, unknown + forgotten, deleted, renames)
1275
1249
1276 for f in rejected:
1250 for f in rejected:
1277 if f in m.files():
1251 if f in m.files():
1278 return 1
1252 return 1
1279 return 0
1253 return 0
1280
1254
1281
1255
1282 def _interestingfiles(repo, matcher):
1256 def _interestingfiles(repo, matcher):
1283 '''Walk dirstate with matcher, looking for files that addremove would care
1257 '''Walk dirstate with matcher, looking for files that addremove would care
1284 about.
1258 about.
1285
1259
1286 This is different from dirstate.status because it doesn't care about
1260 This is different from dirstate.status because it doesn't care about
1287 whether files are modified or clean.'''
1261 whether files are modified or clean.'''
1288 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1262 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1289 audit_path = pathutil.pathauditor(repo.root, cached=True)
1263 audit_path = pathutil.pathauditor(repo.root, cached=True)
1290
1264
1291 ctx = repo[None]
1265 ctx = repo[None]
1292 dirstate = repo.dirstate
1266 dirstate = repo.dirstate
1293 matcher = repo.narrowmatch(matcher, includeexact=True)
1267 matcher = repo.narrowmatch(matcher, includeexact=True)
1294 walkresults = dirstate.walk(
1268 walkresults = dirstate.walk(
1295 matcher,
1269 matcher,
1296 subrepos=sorted(ctx.substate),
1270 subrepos=sorted(ctx.substate),
1297 unknown=True,
1271 unknown=True,
1298 ignored=False,
1272 ignored=False,
1299 full=False,
1273 full=False,
1300 )
1274 )
1301 for abs, st in pycompat.iteritems(walkresults):
1275 for abs, st in pycompat.iteritems(walkresults):
1302 dstate = dirstate[abs]
1276 dstate = dirstate[abs]
1303 if dstate == b'?' and audit_path.check(abs):
1277 if dstate == b'?' and audit_path.check(abs):
1304 unknown.append(abs)
1278 unknown.append(abs)
1305 elif dstate != b'r' and not st:
1279 elif dstate != b'r' and not st:
1306 deleted.append(abs)
1280 deleted.append(abs)
1307 elif dstate == b'r' and st:
1281 elif dstate == b'r' and st:
1308 forgotten.append(abs)
1282 forgotten.append(abs)
1309 # for finding renames
1283 # for finding renames
1310 elif dstate == b'r' and not st:
1284 elif dstate == b'r' and not st:
1311 removed.append(abs)
1285 removed.append(abs)
1312 elif dstate == b'a':
1286 elif dstate == b'a':
1313 added.append(abs)
1287 added.append(abs)
1314
1288
1315 return added, unknown, deleted, removed, forgotten
1289 return added, unknown, deleted, removed, forgotten
1316
1290
1317
1291
1318 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1292 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1319 '''Find renames from removed files to added ones.'''
1293 '''Find renames from removed files to added ones.'''
1320 renames = {}
1294 renames = {}
1321 if similarity > 0:
1295 if similarity > 0:
1322 for old, new, score in similar.findrenames(
1296 for old, new, score in similar.findrenames(
1323 repo, added, removed, similarity
1297 repo, added, removed, similarity
1324 ):
1298 ):
1325 if (
1299 if (
1326 repo.ui.verbose
1300 repo.ui.verbose
1327 or not matcher.exact(old)
1301 or not matcher.exact(old)
1328 or not matcher.exact(new)
1302 or not matcher.exact(new)
1329 ):
1303 ):
1330 repo.ui.status(
1304 repo.ui.status(
1331 _(
1305 _(
1332 b'recording removal of %s as rename to %s '
1306 b'recording removal of %s as rename to %s '
1333 b'(%d%% similar)\n'
1307 b'(%d%% similar)\n'
1334 )
1308 )
1335 % (uipathfn(old), uipathfn(new), score * 100)
1309 % (uipathfn(old), uipathfn(new), score * 100)
1336 )
1310 )
1337 renames[new] = old
1311 renames[new] = old
1338 return renames
1312 return renames
1339
1313
1340
1314
1341 def _markchanges(repo, unknown, deleted, renames):
1315 def _markchanges(repo, unknown, deleted, renames):
1342 '''Marks the files in unknown as added, the files in deleted as removed,
1316 '''Marks the files in unknown as added, the files in deleted as removed,
1343 and the files in renames as copied.'''
1317 and the files in renames as copied.'''
1344 wctx = repo[None]
1318 wctx = repo[None]
1345 with repo.wlock():
1319 with repo.wlock():
1346 wctx.forget(deleted)
1320 wctx.forget(deleted)
1347 wctx.add(unknown)
1321 wctx.add(unknown)
1348 for new, old in pycompat.iteritems(renames):
1322 for new, old in pycompat.iteritems(renames):
1349 wctx.copy(old, new)
1323 wctx.copy(old, new)
1350
1324
1351
1325
1352 def getrenamedfn(repo, endrev=None):
1326 def getrenamedfn(repo, endrev=None):
1353 if copiesmod.usechangesetcentricalgo(repo):
1327 if copiesmod.usechangesetcentricalgo(repo):
1354
1328
1355 def getrenamed(fn, rev):
1329 def getrenamed(fn, rev):
1356 ctx = repo[rev]
1330 ctx = repo[rev]
1357 p1copies = ctx.p1copies()
1331 p1copies = ctx.p1copies()
1358 if fn in p1copies:
1332 if fn in p1copies:
1359 return p1copies[fn]
1333 return p1copies[fn]
1360 p2copies = ctx.p2copies()
1334 p2copies = ctx.p2copies()
1361 if fn in p2copies:
1335 if fn in p2copies:
1362 return p2copies[fn]
1336 return p2copies[fn]
1363 return None
1337 return None
1364
1338
1365 return getrenamed
1339 return getrenamed
1366
1340
1367 rcache = {}
1341 rcache = {}
1368 if endrev is None:
1342 if endrev is None:
1369 endrev = len(repo)
1343 endrev = len(repo)
1370
1344
1371 def getrenamed(fn, rev):
1345 def getrenamed(fn, rev):
1372 '''looks up all renames for a file (up to endrev) the first
1346 '''looks up all renames for a file (up to endrev) the first
1373 time the file is given. It indexes on the changerev and only
1347 time the file is given. It indexes on the changerev and only
1374 parses the manifest if linkrev != changerev.
1348 parses the manifest if linkrev != changerev.
1375 Returns rename info for fn at changerev rev.'''
1349 Returns rename info for fn at changerev rev.'''
1376 if fn not in rcache:
1350 if fn not in rcache:
1377 rcache[fn] = {}
1351 rcache[fn] = {}
1378 fl = repo.file(fn)
1352 fl = repo.file(fn)
1379 for i in fl:
1353 for i in fl:
1380 lr = fl.linkrev(i)
1354 lr = fl.linkrev(i)
1381 renamed = fl.renamed(fl.node(i))
1355 renamed = fl.renamed(fl.node(i))
1382 rcache[fn][lr] = renamed and renamed[0]
1356 rcache[fn][lr] = renamed and renamed[0]
1383 if lr >= endrev:
1357 if lr >= endrev:
1384 break
1358 break
1385 if rev in rcache[fn]:
1359 if rev in rcache[fn]:
1386 return rcache[fn][rev]
1360 return rcache[fn][rev]
1387
1361
1388 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1362 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1389 # filectx logic.
1363 # filectx logic.
1390 try:
1364 try:
1391 return repo[rev][fn].copysource()
1365 return repo[rev][fn].copysource()
1392 except error.LookupError:
1366 except error.LookupError:
1393 return None
1367 return None
1394
1368
1395 return getrenamed
1369 return getrenamed
1396
1370
1397
1371
1398 def getcopiesfn(repo, endrev=None):
1372 def getcopiesfn(repo, endrev=None):
1399 if copiesmod.usechangesetcentricalgo(repo):
1373 if copiesmod.usechangesetcentricalgo(repo):
1400
1374
1401 def copiesfn(ctx):
1375 def copiesfn(ctx):
1402 if ctx.p2copies():
1376 if ctx.p2copies():
1403 allcopies = ctx.p1copies().copy()
1377 allcopies = ctx.p1copies().copy()
1404 # There should be no overlap
1378 # There should be no overlap
1405 allcopies.update(ctx.p2copies())
1379 allcopies.update(ctx.p2copies())
1406 return sorted(allcopies.items())
1380 return sorted(allcopies.items())
1407 else:
1381 else:
1408 return sorted(ctx.p1copies().items())
1382 return sorted(ctx.p1copies().items())
1409
1383
1410 else:
1384 else:
1411 getrenamed = getrenamedfn(repo, endrev)
1385 getrenamed = getrenamedfn(repo, endrev)
1412
1386
1413 def copiesfn(ctx):
1387 def copiesfn(ctx):
1414 copies = []
1388 copies = []
1415 for fn in ctx.files():
1389 for fn in ctx.files():
1416 rename = getrenamed(fn, ctx.rev())
1390 rename = getrenamed(fn, ctx.rev())
1417 if rename:
1391 if rename:
1418 copies.append((fn, rename))
1392 copies.append((fn, rename))
1419 return copies
1393 return copies
1420
1394
1421 return copiesfn
1395 return copiesfn
1422
1396
1423
1397
1424 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1398 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1425 """Update the dirstate to reflect the intent of copying src to dst. For
1399 """Update the dirstate to reflect the intent of copying src to dst. For
1426 different reasons it might not end with dst being marked as copied from src.
1400 different reasons it might not end with dst being marked as copied from src.
1427 """
1401 """
1428 origsrc = repo.dirstate.copied(src) or src
1402 origsrc = repo.dirstate.copied(src) or src
1429 if dst == origsrc: # copying back a copy?
1403 if dst == origsrc: # copying back a copy?
1430 if repo.dirstate[dst] not in b'mn' and not dryrun:
1404 if repo.dirstate[dst] not in b'mn' and not dryrun:
1431 repo.dirstate.normallookup(dst)
1405 repo.dirstate.normallookup(dst)
1432 else:
1406 else:
1433 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1407 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1434 if not ui.quiet:
1408 if not ui.quiet:
1435 ui.warn(
1409 ui.warn(
1436 _(
1410 _(
1437 b"%s has not been committed yet, so no copy "
1411 b"%s has not been committed yet, so no copy "
1438 b"data will be stored for %s.\n"
1412 b"data will be stored for %s.\n"
1439 )
1413 )
1440 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1414 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1441 )
1415 )
1442 if repo.dirstate[dst] in b'?r' and not dryrun:
1416 if repo.dirstate[dst] in b'?r' and not dryrun:
1443 wctx.add([dst])
1417 wctx.add([dst])
1444 elif not dryrun:
1418 elif not dryrun:
1445 wctx.copy(origsrc, dst)
1419 wctx.copy(origsrc, dst)
1446
1420
1447
1421
1448 def movedirstate(repo, newctx, match=None):
1422 def movedirstate(repo, newctx, match=None):
1449 """Move the dirstate to newctx and adjust it as necessary.
1423 """Move the dirstate to newctx and adjust it as necessary.
1450
1424
1451 A matcher can be provided as an optimization. It is probably a bug to pass
1425 A matcher can be provided as an optimization. It is probably a bug to pass
1452 a matcher that doesn't match all the differences between the parent of the
1426 a matcher that doesn't match all the differences between the parent of the
1453 working copy and newctx.
1427 working copy and newctx.
1454 """
1428 """
1455 oldctx = repo[b'.']
1429 oldctx = repo[b'.']
1456 ds = repo.dirstate
1430 ds = repo.dirstate
1457 ds.setparents(newctx.node(), nullid)
1431 ds.setparents(newctx.node(), nullid)
1458 copies = dict(ds.copies())
1432 copies = dict(ds.copies())
1459 s = newctx.status(oldctx, match=match)
1433 s = newctx.status(oldctx, match=match)
1460 for f in s.modified:
1434 for f in s.modified:
1461 if ds[f] == b'r':
1435 if ds[f] == b'r':
1462 # modified + removed -> removed
1436 # modified + removed -> removed
1463 continue
1437 continue
1464 ds.normallookup(f)
1438 ds.normallookup(f)
1465
1439
1466 for f in s.added:
1440 for f in s.added:
1467 if ds[f] == b'r':
1441 if ds[f] == b'r':
1468 # added + removed -> unknown
1442 # added + removed -> unknown
1469 ds.drop(f)
1443 ds.drop(f)
1470 elif ds[f] != b'a':
1444 elif ds[f] != b'a':
1471 ds.add(f)
1445 ds.add(f)
1472
1446
1473 for f in s.removed:
1447 for f in s.removed:
1474 if ds[f] == b'a':
1448 if ds[f] == b'a':
1475 # removed + added -> normal
1449 # removed + added -> normal
1476 ds.normallookup(f)
1450 ds.normallookup(f)
1477 elif ds[f] != b'r':
1451 elif ds[f] != b'r':
1478 ds.remove(f)
1452 ds.remove(f)
1479
1453
1480 # Merge old parent and old working dir copies
1454 # Merge old parent and old working dir copies
1481 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1455 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1482 oldcopies.update(copies)
1456 oldcopies.update(copies)
1483 copies = dict(
1457 copies = dict(
1484 (dst, oldcopies.get(src, src))
1458 (dst, oldcopies.get(src, src))
1485 for dst, src in pycompat.iteritems(oldcopies)
1459 for dst, src in pycompat.iteritems(oldcopies)
1486 )
1460 )
1487 # Adjust the dirstate copies
1461 # Adjust the dirstate copies
1488 for dst, src in pycompat.iteritems(copies):
1462 for dst, src in pycompat.iteritems(copies):
1489 if src not in newctx or dst in newctx or ds[dst] != b'a':
1463 if src not in newctx or dst in newctx or ds[dst] != b'a':
1490 src = None
1464 src = None
1491 ds.copy(src, dst)
1465 ds.copy(src, dst)
1492
1466
1493
1467
1494 def writerequires(opener, requirements):
1468 def writerequires(opener, requirements):
1495 with opener(b'requires', b'w', atomictemp=True) as fp:
1469 with opener(b'requires', b'w', atomictemp=True) as fp:
1496 for r in sorted(requirements):
1470 for r in sorted(requirements):
1497 fp.write(b"%s\n" % r)
1471 fp.write(b"%s\n" % r)
1498
1472
1499
1473
1500 class filecachesubentry(object):
1474 class filecachesubentry(object):
1501 def __init__(self, path, stat):
1475 def __init__(self, path, stat):
1502 self.path = path
1476 self.path = path
1503 self.cachestat = None
1477 self.cachestat = None
1504 self._cacheable = None
1478 self._cacheable = None
1505
1479
1506 if stat:
1480 if stat:
1507 self.cachestat = filecachesubentry.stat(self.path)
1481 self.cachestat = filecachesubentry.stat(self.path)
1508
1482
1509 if self.cachestat:
1483 if self.cachestat:
1510 self._cacheable = self.cachestat.cacheable()
1484 self._cacheable = self.cachestat.cacheable()
1511 else:
1485 else:
1512 # None means we don't know yet
1486 # None means we don't know yet
1513 self._cacheable = None
1487 self._cacheable = None
1514
1488
1515 def refresh(self):
1489 def refresh(self):
1516 if self.cacheable():
1490 if self.cacheable():
1517 self.cachestat = filecachesubentry.stat(self.path)
1491 self.cachestat = filecachesubentry.stat(self.path)
1518
1492
1519 def cacheable(self):
1493 def cacheable(self):
1520 if self._cacheable is not None:
1494 if self._cacheable is not None:
1521 return self._cacheable
1495 return self._cacheable
1522
1496
1523 # we don't know yet, assume it is for now
1497 # we don't know yet, assume it is for now
1524 return True
1498 return True
1525
1499
1526 def changed(self):
1500 def changed(self):
1527 # no point in going further if we can't cache it
1501 # no point in going further if we can't cache it
1528 if not self.cacheable():
1502 if not self.cacheable():
1529 return True
1503 return True
1530
1504
1531 newstat = filecachesubentry.stat(self.path)
1505 newstat = filecachesubentry.stat(self.path)
1532
1506
1533 # we may not know if it's cacheable yet, check again now
1507 # we may not know if it's cacheable yet, check again now
1534 if newstat and self._cacheable is None:
1508 if newstat and self._cacheable is None:
1535 self._cacheable = newstat.cacheable()
1509 self._cacheable = newstat.cacheable()
1536
1510
1537 # check again
1511 # check again
1538 if not self._cacheable:
1512 if not self._cacheable:
1539 return True
1513 return True
1540
1514
1541 if self.cachestat != newstat:
1515 if self.cachestat != newstat:
1542 self.cachestat = newstat
1516 self.cachestat = newstat
1543 return True
1517 return True
1544 else:
1518 else:
1545 return False
1519 return False
1546
1520
1547 @staticmethod
1521 @staticmethod
1548 def stat(path):
1522 def stat(path):
1549 try:
1523 try:
1550 return util.cachestat(path)
1524 return util.cachestat(path)
1551 except OSError as e:
1525 except OSError as e:
1552 if e.errno != errno.ENOENT:
1526 if e.errno != errno.ENOENT:
1553 raise
1527 raise
1554
1528
1555
1529
1556 class filecacheentry(object):
1530 class filecacheentry(object):
1557 def __init__(self, paths, stat=True):
1531 def __init__(self, paths, stat=True):
1558 self._entries = []
1532 self._entries = []
1559 for path in paths:
1533 for path in paths:
1560 self._entries.append(filecachesubentry(path, stat))
1534 self._entries.append(filecachesubentry(path, stat))
1561
1535
1562 def changed(self):
1536 def changed(self):
1563 '''true if any entry has changed'''
1537 '''true if any entry has changed'''
1564 for entry in self._entries:
1538 for entry in self._entries:
1565 if entry.changed():
1539 if entry.changed():
1566 return True
1540 return True
1567 return False
1541 return False
1568
1542
1569 def refresh(self):
1543 def refresh(self):
1570 for entry in self._entries:
1544 for entry in self._entries:
1571 entry.refresh()
1545 entry.refresh()
1572
1546
1573
1547
1574 class filecache(object):
1548 class filecache(object):
1575 """A property like decorator that tracks files under .hg/ for updates.
1549 """A property like decorator that tracks files under .hg/ for updates.
1576
1550
1577 On first access, the files defined as arguments are stat()ed and the
1551 On first access, the files defined as arguments are stat()ed and the
1578 results cached. The decorated function is called. The results are stashed
1552 results cached. The decorated function is called. The results are stashed
1579 away in a ``_filecache`` dict on the object whose method is decorated.
1553 away in a ``_filecache`` dict on the object whose method is decorated.
1580
1554
1581 On subsequent access, the cached result is used as it is set to the
1555 On subsequent access, the cached result is used as it is set to the
1582 instance dictionary.
1556 instance dictionary.
1583
1557
1584 On external property set/delete operations, the caller must update the
1558 On external property set/delete operations, the caller must update the
1585 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1559 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1586 instead of directly setting <attr>.
1560 instead of directly setting <attr>.
1587
1561
1588 When using the property API, the cached data is always used if available.
1562 When using the property API, the cached data is always used if available.
1589 No stat() is performed to check if the file has changed.
1563 No stat() is performed to check if the file has changed.
1590
1564
1591 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1565 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1592 can populate an entry before the property's getter is called. In this case,
1566 can populate an entry before the property's getter is called. In this case,
1593 entries in ``_filecache`` will be used during property operations,
1567 entries in ``_filecache`` will be used during property operations,
1594 if available. If the underlying file changes, it is up to external callers
1568 if available. If the underlying file changes, it is up to external callers
1595 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1569 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1596 method result as well as possibly calling ``del obj._filecache[attr]`` to
1570 method result as well as possibly calling ``del obj._filecache[attr]`` to
1597 remove the ``filecacheentry``.
1571 remove the ``filecacheentry``.
1598 """
1572 """
1599
1573
1600 def __init__(self, *paths):
1574 def __init__(self, *paths):
1601 self.paths = paths
1575 self.paths = paths
1602
1576
1603 def join(self, obj, fname):
1577 def join(self, obj, fname):
1604 """Used to compute the runtime path of a cached file.
1578 """Used to compute the runtime path of a cached file.
1605
1579
1606 Users should subclass filecache and provide their own version of this
1580 Users should subclass filecache and provide their own version of this
1607 function to call the appropriate join function on 'obj' (an instance
1581 function to call the appropriate join function on 'obj' (an instance
1608 of the class that its member function was decorated).
1582 of the class that its member function was decorated).
1609 """
1583 """
1610 raise NotImplementedError
1584 raise NotImplementedError
1611
1585
1612 def __call__(self, func):
1586 def __call__(self, func):
1613 self.func = func
1587 self.func = func
1614 self.sname = func.__name__
1588 self.sname = func.__name__
1615 self.name = pycompat.sysbytes(self.sname)
1589 self.name = pycompat.sysbytes(self.sname)
1616 return self
1590 return self
1617
1591
1618 def __get__(self, obj, type=None):
1592 def __get__(self, obj, type=None):
1619 # if accessed on the class, return the descriptor itself.
1593 # if accessed on the class, return the descriptor itself.
1620 if obj is None:
1594 if obj is None:
1621 return self
1595 return self
1622
1596
1623 assert self.sname not in obj.__dict__
1597 assert self.sname not in obj.__dict__
1624
1598
1625 entry = obj._filecache.get(self.name)
1599 entry = obj._filecache.get(self.name)
1626
1600
1627 if entry:
1601 if entry:
1628 if entry.changed():
1602 if entry.changed():
1629 entry.obj = self.func(obj)
1603 entry.obj = self.func(obj)
1630 else:
1604 else:
1631 paths = [self.join(obj, path) for path in self.paths]
1605 paths = [self.join(obj, path) for path in self.paths]
1632
1606
1633 # We stat -before- creating the object so our cache doesn't lie if
1607 # We stat -before- creating the object so our cache doesn't lie if
1634 # a writer modified between the time we read and stat
1608 # a writer modified between the time we read and stat
1635 entry = filecacheentry(paths, True)
1609 entry = filecacheentry(paths, True)
1636 entry.obj = self.func(obj)
1610 entry.obj = self.func(obj)
1637
1611
1638 obj._filecache[self.name] = entry
1612 obj._filecache[self.name] = entry
1639
1613
1640 obj.__dict__[self.sname] = entry.obj
1614 obj.__dict__[self.sname] = entry.obj
1641 return entry.obj
1615 return entry.obj
1642
1616
1643 # don't implement __set__(), which would make __dict__ lookup as slow as
1617 # don't implement __set__(), which would make __dict__ lookup as slow as
1644 # function call.
1618 # function call.
1645
1619
1646 def set(self, obj, value):
1620 def set(self, obj, value):
1647 if self.name not in obj._filecache:
1621 if self.name not in obj._filecache:
1648 # we add an entry for the missing value because X in __dict__
1622 # we add an entry for the missing value because X in __dict__
1649 # implies X in _filecache
1623 # implies X in _filecache
1650 paths = [self.join(obj, path) for path in self.paths]
1624 paths = [self.join(obj, path) for path in self.paths]
1651 ce = filecacheentry(paths, False)
1625 ce = filecacheentry(paths, False)
1652 obj._filecache[self.name] = ce
1626 obj._filecache[self.name] = ce
1653 else:
1627 else:
1654 ce = obj._filecache[self.name]
1628 ce = obj._filecache[self.name]
1655
1629
1656 ce.obj = value # update cached copy
1630 ce.obj = value # update cached copy
1657 obj.__dict__[self.sname] = value # update copy returned by obj.x
1631 obj.__dict__[self.sname] = value # update copy returned by obj.x
1658
1632
1659
1633
1660 def extdatasource(repo, source):
1634 def extdatasource(repo, source):
1661 """Gather a map of rev -> value dict from the specified source
1635 """Gather a map of rev -> value dict from the specified source
1662
1636
1663 A source spec is treated as a URL, with a special case shell: type
1637 A source spec is treated as a URL, with a special case shell: type
1664 for parsing the output from a shell command.
1638 for parsing the output from a shell command.
1665
1639
1666 The data is parsed as a series of newline-separated records where
1640 The data is parsed as a series of newline-separated records where
1667 each record is a revision specifier optionally followed by a space
1641 each record is a revision specifier optionally followed by a space
1668 and a freeform string value. If the revision is known locally, it
1642 and a freeform string value. If the revision is known locally, it
1669 is converted to a rev, otherwise the record is skipped.
1643 is converted to a rev, otherwise the record is skipped.
1670
1644
1671 Note that both key and value are treated as UTF-8 and converted to
1645 Note that both key and value are treated as UTF-8 and converted to
1672 the local encoding. This allows uniformity between local and
1646 the local encoding. This allows uniformity between local and
1673 remote data sources.
1647 remote data sources.
1674 """
1648 """
1675
1649
1676 spec = repo.ui.config(b"extdata", source)
1650 spec = repo.ui.config(b"extdata", source)
1677 if not spec:
1651 if not spec:
1678 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1652 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1679
1653
1680 data = {}
1654 data = {}
1681 src = proc = None
1655 src = proc = None
1682 try:
1656 try:
1683 if spec.startswith(b"shell:"):
1657 if spec.startswith(b"shell:"):
1684 # external commands should be run relative to the repo root
1658 # external commands should be run relative to the repo root
1685 cmd = spec[6:]
1659 cmd = spec[6:]
1686 proc = subprocess.Popen(
1660 proc = subprocess.Popen(
1687 procutil.tonativestr(cmd),
1661 procutil.tonativestr(cmd),
1688 shell=True,
1662 shell=True,
1689 bufsize=-1,
1663 bufsize=-1,
1690 close_fds=procutil.closefds,
1664 close_fds=procutil.closefds,
1691 stdout=subprocess.PIPE,
1665 stdout=subprocess.PIPE,
1692 cwd=procutil.tonativestr(repo.root),
1666 cwd=procutil.tonativestr(repo.root),
1693 )
1667 )
1694 src = proc.stdout
1668 src = proc.stdout
1695 else:
1669 else:
1696 # treat as a URL or file
1670 # treat as a URL or file
1697 src = url.open(repo.ui, spec)
1671 src = url.open(repo.ui, spec)
1698 for l in src:
1672 for l in src:
1699 if b" " in l:
1673 if b" " in l:
1700 k, v = l.strip().split(b" ", 1)
1674 k, v = l.strip().split(b" ", 1)
1701 else:
1675 else:
1702 k, v = l.strip(), b""
1676 k, v = l.strip(), b""
1703
1677
1704 k = encoding.tolocal(k)
1678 k = encoding.tolocal(k)
1705 try:
1679 try:
1706 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1680 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1707 except (error.LookupError, error.RepoLookupError):
1681 except (error.LookupError, error.RepoLookupError):
1708 pass # we ignore data for nodes that don't exist locally
1682 pass # we ignore data for nodes that don't exist locally
1709 finally:
1683 finally:
1710 if proc:
1684 if proc:
1711 try:
1685 try:
1712 proc.communicate()
1686 proc.communicate()
1713 except ValueError:
1687 except ValueError:
1714 # This happens if we started iterating src and then
1688 # This happens if we started iterating src and then
1715 # get a parse error on a line. It should be safe to ignore.
1689 # get a parse error on a line. It should be safe to ignore.
1716 pass
1690 pass
1717 if src:
1691 if src:
1718 src.close()
1692 src.close()
1719 if proc and proc.returncode != 0:
1693 if proc and proc.returncode != 0:
1720 raise error.Abort(
1694 raise error.Abort(
1721 _(b"extdata command '%s' failed: %s")
1695 _(b"extdata command '%s' failed: %s")
1722 % (cmd, procutil.explainexit(proc.returncode))
1696 % (cmd, procutil.explainexit(proc.returncode))
1723 )
1697 )
1724
1698
1725 return data
1699 return data
1726
1700
1727
1701
1728 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1702 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1729 if lock is None:
1703 if lock is None:
1730 raise error.LockInheritanceContractViolation(
1704 raise error.LockInheritanceContractViolation(
1731 b'lock can only be inherited while held'
1705 b'lock can only be inherited while held'
1732 )
1706 )
1733 if environ is None:
1707 if environ is None:
1734 environ = {}
1708 environ = {}
1735 with lock.inherit() as locker:
1709 with lock.inherit() as locker:
1736 environ[envvar] = locker
1710 environ[envvar] = locker
1737 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1711 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1738
1712
1739
1713
1740 def wlocksub(repo, cmd, *args, **kwargs):
1714 def wlocksub(repo, cmd, *args, **kwargs):
1741 """run cmd as a subprocess that allows inheriting repo's wlock
1715 """run cmd as a subprocess that allows inheriting repo's wlock
1742
1716
1743 This can only be called while the wlock is held. This takes all the
1717 This can only be called while the wlock is held. This takes all the
1744 arguments that ui.system does, and returns the exit code of the
1718 arguments that ui.system does, and returns the exit code of the
1745 subprocess."""
1719 subprocess."""
1746 return _locksub(
1720 return _locksub(
1747 repo, repo.currentwlock(), b'HG_WLOCK_LOCKER', cmd, *args, **kwargs
1721 repo, repo.currentwlock(), b'HG_WLOCK_LOCKER', cmd, *args, **kwargs
1748 )
1722 )
1749
1723
1750
1724
1751 class progress(object):
1725 class progress(object):
1752 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1726 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1753 self.ui = ui
1727 self.ui = ui
1754 self.pos = 0
1728 self.pos = 0
1755 self.topic = topic
1729 self.topic = topic
1756 self.unit = unit
1730 self.unit = unit
1757 self.total = total
1731 self.total = total
1758 self.debug = ui.configbool(b'progress', b'debug')
1732 self.debug = ui.configbool(b'progress', b'debug')
1759 self._updatebar = updatebar
1733 self._updatebar = updatebar
1760
1734
1761 def __enter__(self):
1735 def __enter__(self):
1762 return self
1736 return self
1763
1737
1764 def __exit__(self, exc_type, exc_value, exc_tb):
1738 def __exit__(self, exc_type, exc_value, exc_tb):
1765 self.complete()
1739 self.complete()
1766
1740
1767 def update(self, pos, item=b"", total=None):
1741 def update(self, pos, item=b"", total=None):
1768 assert pos is not None
1742 assert pos is not None
1769 if total:
1743 if total:
1770 self.total = total
1744 self.total = total
1771 self.pos = pos
1745 self.pos = pos
1772 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1746 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1773 if self.debug:
1747 if self.debug:
1774 self._printdebug(item)
1748 self._printdebug(item)
1775
1749
1776 def increment(self, step=1, item=b"", total=None):
1750 def increment(self, step=1, item=b"", total=None):
1777 self.update(self.pos + step, item, total)
1751 self.update(self.pos + step, item, total)
1778
1752
1779 def complete(self):
1753 def complete(self):
1780 self.pos = None
1754 self.pos = None
1781 self.unit = b""
1755 self.unit = b""
1782 self.total = None
1756 self.total = None
1783 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1757 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1784
1758
1785 def _printdebug(self, item):
1759 def _printdebug(self, item):
1786 if self.unit:
1760 if self.unit:
1787 unit = b' ' + self.unit
1761 unit = b' ' + self.unit
1788 if item:
1762 if item:
1789 item = b' ' + item
1763 item = b' ' + item
1790
1764
1791 if self.total:
1765 if self.total:
1792 pct = 100.0 * self.pos / self.total
1766 pct = 100.0 * self.pos / self.total
1793 self.ui.debug(
1767 self.ui.debug(
1794 b'%s:%s %d/%d%s (%4.2f%%)\n'
1768 b'%s:%s %d/%d%s (%4.2f%%)\n'
1795 % (self.topic, item, self.pos, self.total, unit, pct)
1769 % (self.topic, item, self.pos, self.total, unit, pct)
1796 )
1770 )
1797 else:
1771 else:
1798 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1772 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1799
1773
1800
1774
1801 def gdinitconfig(ui):
1775 def gdinitconfig(ui):
1802 """helper function to know if a repo should be created as general delta
1776 """helper function to know if a repo should be created as general delta
1803 """
1777 """
1804 # experimental config: format.generaldelta
1778 # experimental config: format.generaldelta
1805 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1779 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1806 b'format', b'usegeneraldelta'
1780 b'format', b'usegeneraldelta'
1807 )
1781 )
1808
1782
1809
1783
1810 def gddeltaconfig(ui):
1784 def gddeltaconfig(ui):
1811 """helper function to know if incoming delta should be optimised
1785 """helper function to know if incoming delta should be optimised
1812 """
1786 """
1813 # experimental config: format.generaldelta
1787 # experimental config: format.generaldelta
1814 return ui.configbool(b'format', b'generaldelta')
1788 return ui.configbool(b'format', b'generaldelta')
1815
1789
1816
1790
1817 class simplekeyvaluefile(object):
1791 class simplekeyvaluefile(object):
1818 """A simple file with key=value lines
1792 """A simple file with key=value lines
1819
1793
1820 Keys must be alphanumerics and start with a letter, values must not
1794 Keys must be alphanumerics and start with a letter, values must not
1821 contain '\n' characters"""
1795 contain '\n' characters"""
1822
1796
1823 firstlinekey = b'__firstline'
1797 firstlinekey = b'__firstline'
1824
1798
1825 def __init__(self, vfs, path, keys=None):
1799 def __init__(self, vfs, path, keys=None):
1826 self.vfs = vfs
1800 self.vfs = vfs
1827 self.path = path
1801 self.path = path
1828
1802
1829 def read(self, firstlinenonkeyval=False):
1803 def read(self, firstlinenonkeyval=False):
1830 """Read the contents of a simple key-value file
1804 """Read the contents of a simple key-value file
1831
1805
1832 'firstlinenonkeyval' indicates whether the first line of file should
1806 'firstlinenonkeyval' indicates whether the first line of file should
1833 be treated as a key-value pair or reuturned fully under the
1807 be treated as a key-value pair or reuturned fully under the
1834 __firstline key."""
1808 __firstline key."""
1835 lines = self.vfs.readlines(self.path)
1809 lines = self.vfs.readlines(self.path)
1836 d = {}
1810 d = {}
1837 if firstlinenonkeyval:
1811 if firstlinenonkeyval:
1838 if not lines:
1812 if not lines:
1839 e = _(b"empty simplekeyvalue file")
1813 e = _(b"empty simplekeyvalue file")
1840 raise error.CorruptedState(e)
1814 raise error.CorruptedState(e)
1841 # we don't want to include '\n' in the __firstline
1815 # we don't want to include '\n' in the __firstline
1842 d[self.firstlinekey] = lines[0][:-1]
1816 d[self.firstlinekey] = lines[0][:-1]
1843 del lines[0]
1817 del lines[0]
1844
1818
1845 try:
1819 try:
1846 # the 'if line.strip()' part prevents us from failing on empty
1820 # the 'if line.strip()' part prevents us from failing on empty
1847 # lines which only contain '\n' therefore are not skipped
1821 # lines which only contain '\n' therefore are not skipped
1848 # by 'if line'
1822 # by 'if line'
1849 updatedict = dict(
1823 updatedict = dict(
1850 line[:-1].split(b'=', 1) for line in lines if line.strip()
1824 line[:-1].split(b'=', 1) for line in lines if line.strip()
1851 )
1825 )
1852 if self.firstlinekey in updatedict:
1826 if self.firstlinekey in updatedict:
1853 e = _(b"%r can't be used as a key")
1827 e = _(b"%r can't be used as a key")
1854 raise error.CorruptedState(e % self.firstlinekey)
1828 raise error.CorruptedState(e % self.firstlinekey)
1855 d.update(updatedict)
1829 d.update(updatedict)
1856 except ValueError as e:
1830 except ValueError as e:
1857 raise error.CorruptedState(stringutil.forcebytestr(e))
1831 raise error.CorruptedState(stringutil.forcebytestr(e))
1858 return d
1832 return d
1859
1833
1860 def write(self, data, firstline=None):
1834 def write(self, data, firstline=None):
1861 """Write key=>value mapping to a file
1835 """Write key=>value mapping to a file
1862 data is a dict. Keys must be alphanumerical and start with a letter.
1836 data is a dict. Keys must be alphanumerical and start with a letter.
1863 Values must not contain newline characters.
1837 Values must not contain newline characters.
1864
1838
1865 If 'firstline' is not None, it is written to file before
1839 If 'firstline' is not None, it is written to file before
1866 everything else, as it is, not in a key=value form"""
1840 everything else, as it is, not in a key=value form"""
1867 lines = []
1841 lines = []
1868 if firstline is not None:
1842 if firstline is not None:
1869 lines.append(b'%s\n' % firstline)
1843 lines.append(b'%s\n' % firstline)
1870
1844
1871 for k, v in data.items():
1845 for k, v in data.items():
1872 if k == self.firstlinekey:
1846 if k == self.firstlinekey:
1873 e = b"key name '%s' is reserved" % self.firstlinekey
1847 e = b"key name '%s' is reserved" % self.firstlinekey
1874 raise error.ProgrammingError(e)
1848 raise error.ProgrammingError(e)
1875 if not k[0:1].isalpha():
1849 if not k[0:1].isalpha():
1876 e = b"keys must start with a letter in a key-value file"
1850 e = b"keys must start with a letter in a key-value file"
1877 raise error.ProgrammingError(e)
1851 raise error.ProgrammingError(e)
1878 if not k.isalnum():
1852 if not k.isalnum():
1879 e = b"invalid key name in a simple key-value file"
1853 e = b"invalid key name in a simple key-value file"
1880 raise error.ProgrammingError(e)
1854 raise error.ProgrammingError(e)
1881 if b'\n' in v:
1855 if b'\n' in v:
1882 e = b"invalid value in a simple key-value file"
1856 e = b"invalid value in a simple key-value file"
1883 raise error.ProgrammingError(e)
1857 raise error.ProgrammingError(e)
1884 lines.append(b"%s=%s\n" % (k, v))
1858 lines.append(b"%s=%s\n" % (k, v))
1885 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1859 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1886 fp.write(b''.join(lines))
1860 fp.write(b''.join(lines))
1887
1861
1888
1862
1889 _reportobsoletedsource = [
1863 _reportobsoletedsource = [
1890 b'debugobsolete',
1864 b'debugobsolete',
1891 b'pull',
1865 b'pull',
1892 b'push',
1866 b'push',
1893 b'serve',
1867 b'serve',
1894 b'unbundle',
1868 b'unbundle',
1895 ]
1869 ]
1896
1870
1897 _reportnewcssource = [
1871 _reportnewcssource = [
1898 b'pull',
1872 b'pull',
1899 b'unbundle',
1873 b'unbundle',
1900 ]
1874 ]
1901
1875
1902
1876
1903 def prefetchfiles(repo, revs, match):
1877 def prefetchfiles(repo, revs, match):
1904 """Invokes the registered file prefetch functions, allowing extensions to
1878 """Invokes the registered file prefetch functions, allowing extensions to
1905 ensure the corresponding files are available locally, before the command
1879 ensure the corresponding files are available locally, before the command
1906 uses them."""
1880 uses them."""
1907 if match:
1881 if match:
1908 # The command itself will complain about files that don't exist, so
1882 # The command itself will complain about files that don't exist, so
1909 # don't duplicate the message.
1883 # don't duplicate the message.
1910 match = matchmod.badmatch(match, lambda fn, msg: None)
1884 match = matchmod.badmatch(match, lambda fn, msg: None)
1911 else:
1885 else:
1912 match = matchall(repo)
1886 match = matchall(repo)
1913
1887
1914 fileprefetchhooks(repo, revs, match)
1888 fileprefetchhooks(repo, revs, match)
1915
1889
1916
1890
1917 # a list of (repo, revs, match) prefetch functions
1891 # a list of (repo, revs, match) prefetch functions
1918 fileprefetchhooks = util.hooks()
1892 fileprefetchhooks = util.hooks()
1919
1893
1920 # A marker that tells the evolve extension to suppress its own reporting
1894 # A marker that tells the evolve extension to suppress its own reporting
1921 _reportstroubledchangesets = True
1895 _reportstroubledchangesets = True
1922
1896
1923
1897
1924 def registersummarycallback(repo, otr, txnname=b''):
1898 def registersummarycallback(repo, otr, txnname=b''):
1925 """register a callback to issue a summary after the transaction is closed
1899 """register a callback to issue a summary after the transaction is closed
1926 """
1900 """
1927
1901
1928 def txmatch(sources):
1902 def txmatch(sources):
1929 return any(txnname.startswith(source) for source in sources)
1903 return any(txnname.startswith(source) for source in sources)
1930
1904
1931 categories = []
1905 categories = []
1932
1906
1933 def reportsummary(func):
1907 def reportsummary(func):
1934 """decorator for report callbacks."""
1908 """decorator for report callbacks."""
1935 # The repoview life cycle is shorter than the one of the actual
1909 # The repoview life cycle is shorter than the one of the actual
1936 # underlying repository. So the filtered object can die before the
1910 # underlying repository. So the filtered object can die before the
1937 # weakref is used leading to troubles. We keep a reference to the
1911 # weakref is used leading to troubles. We keep a reference to the
1938 # unfiltered object and restore the filtering when retrieving the
1912 # unfiltered object and restore the filtering when retrieving the
1939 # repository through the weakref.
1913 # repository through the weakref.
1940 filtername = repo.filtername
1914 filtername = repo.filtername
1941 reporef = weakref.ref(repo.unfiltered())
1915 reporef = weakref.ref(repo.unfiltered())
1942
1916
1943 def wrapped(tr):
1917 def wrapped(tr):
1944 repo = reporef()
1918 repo = reporef()
1945 if filtername:
1919 if filtername:
1946 repo = repo.filtered(filtername)
1920 repo = repo.filtered(filtername)
1947 func(repo, tr)
1921 func(repo, tr)
1948
1922
1949 newcat = b'%02i-txnreport' % len(categories)
1923 newcat = b'%02i-txnreport' % len(categories)
1950 otr.addpostclose(newcat, wrapped)
1924 otr.addpostclose(newcat, wrapped)
1951 categories.append(newcat)
1925 categories.append(newcat)
1952 return wrapped
1926 return wrapped
1953
1927
1954 @reportsummary
1928 @reportsummary
1955 def reportchangegroup(repo, tr):
1929 def reportchangegroup(repo, tr):
1956 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
1930 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
1957 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
1931 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
1958 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
1932 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
1959 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
1933 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
1960 if cgchangesets or cgrevisions or cgfiles:
1934 if cgchangesets or cgrevisions or cgfiles:
1961 htext = b""
1935 htext = b""
1962 if cgheads:
1936 if cgheads:
1963 htext = _(b" (%+d heads)") % cgheads
1937 htext = _(b" (%+d heads)") % cgheads
1964 msg = _(b"added %d changesets with %d changes to %d files%s\n")
1938 msg = _(b"added %d changesets with %d changes to %d files%s\n")
1965 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
1939 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
1966
1940
1967 if txmatch(_reportobsoletedsource):
1941 if txmatch(_reportobsoletedsource):
1968
1942
1969 @reportsummary
1943 @reportsummary
1970 def reportobsoleted(repo, tr):
1944 def reportobsoleted(repo, tr):
1971 obsoleted = obsutil.getobsoleted(repo, tr)
1945 obsoleted = obsutil.getobsoleted(repo, tr)
1972 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
1946 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
1973 if newmarkers:
1947 if newmarkers:
1974 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
1948 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
1975 if obsoleted:
1949 if obsoleted:
1976 repo.ui.status(_(b'obsoleted %i changesets\n') % len(obsoleted))
1950 repo.ui.status(_(b'obsoleted %i changesets\n') % len(obsoleted))
1977
1951
1978 if obsolete.isenabled(
1952 if obsolete.isenabled(
1979 repo, obsolete.createmarkersopt
1953 repo, obsolete.createmarkersopt
1980 ) and repo.ui.configbool(
1954 ) and repo.ui.configbool(
1981 b'experimental', b'evolution.report-instabilities'
1955 b'experimental', b'evolution.report-instabilities'
1982 ):
1956 ):
1983 instabilitytypes = [
1957 instabilitytypes = [
1984 (b'orphan', b'orphan'),
1958 (b'orphan', b'orphan'),
1985 (b'phase-divergent', b'phasedivergent'),
1959 (b'phase-divergent', b'phasedivergent'),
1986 (b'content-divergent', b'contentdivergent'),
1960 (b'content-divergent', b'contentdivergent'),
1987 ]
1961 ]
1988
1962
1989 def getinstabilitycounts(repo):
1963 def getinstabilitycounts(repo):
1990 filtered = repo.changelog.filteredrevs
1964 filtered = repo.changelog.filteredrevs
1991 counts = {}
1965 counts = {}
1992 for instability, revset in instabilitytypes:
1966 for instability, revset in instabilitytypes:
1993 counts[instability] = len(
1967 counts[instability] = len(
1994 set(obsolete.getrevs(repo, revset)) - filtered
1968 set(obsolete.getrevs(repo, revset)) - filtered
1995 )
1969 )
1996 return counts
1970 return counts
1997
1971
1998 oldinstabilitycounts = getinstabilitycounts(repo)
1972 oldinstabilitycounts = getinstabilitycounts(repo)
1999
1973
2000 @reportsummary
1974 @reportsummary
2001 def reportnewinstabilities(repo, tr):
1975 def reportnewinstabilities(repo, tr):
2002 newinstabilitycounts = getinstabilitycounts(repo)
1976 newinstabilitycounts = getinstabilitycounts(repo)
2003 for instability, revset in instabilitytypes:
1977 for instability, revset in instabilitytypes:
2004 delta = (
1978 delta = (
2005 newinstabilitycounts[instability]
1979 newinstabilitycounts[instability]
2006 - oldinstabilitycounts[instability]
1980 - oldinstabilitycounts[instability]
2007 )
1981 )
2008 msg = getinstabilitymessage(delta, instability)
1982 msg = getinstabilitymessage(delta, instability)
2009 if msg:
1983 if msg:
2010 repo.ui.warn(msg)
1984 repo.ui.warn(msg)
2011
1985
2012 if txmatch(_reportnewcssource):
1986 if txmatch(_reportnewcssource):
2013
1987
2014 @reportsummary
1988 @reportsummary
2015 def reportnewcs(repo, tr):
1989 def reportnewcs(repo, tr):
2016 """Report the range of new revisions pulled/unbundled."""
1990 """Report the range of new revisions pulled/unbundled."""
2017 origrepolen = tr.changes.get(b'origrepolen', len(repo))
1991 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2018 unfi = repo.unfiltered()
1992 unfi = repo.unfiltered()
2019 if origrepolen >= len(unfi):
1993 if origrepolen >= len(unfi):
2020 return
1994 return
2021
1995
2022 # Compute the bounds of new visible revisions' range.
1996 # Compute the bounds of new visible revisions' range.
2023 revs = smartset.spanset(repo, start=origrepolen)
1997 revs = smartset.spanset(repo, start=origrepolen)
2024 if revs:
1998 if revs:
2025 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1999 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2026
2000
2027 if minrev == maxrev:
2001 if minrev == maxrev:
2028 revrange = minrev
2002 revrange = minrev
2029 else:
2003 else:
2030 revrange = b'%s:%s' % (minrev, maxrev)
2004 revrange = b'%s:%s' % (minrev, maxrev)
2031 draft = len(repo.revs(b'%ld and draft()', revs))
2005 draft = len(repo.revs(b'%ld and draft()', revs))
2032 secret = len(repo.revs(b'%ld and secret()', revs))
2006 secret = len(repo.revs(b'%ld and secret()', revs))
2033 if not (draft or secret):
2007 if not (draft or secret):
2034 msg = _(b'new changesets %s\n') % revrange
2008 msg = _(b'new changesets %s\n') % revrange
2035 elif draft and secret:
2009 elif draft and secret:
2036 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2010 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2037 msg %= (revrange, draft, secret)
2011 msg %= (revrange, draft, secret)
2038 elif draft:
2012 elif draft:
2039 msg = _(b'new changesets %s (%d drafts)\n')
2013 msg = _(b'new changesets %s (%d drafts)\n')
2040 msg %= (revrange, draft)
2014 msg %= (revrange, draft)
2041 elif secret:
2015 elif secret:
2042 msg = _(b'new changesets %s (%d secrets)\n')
2016 msg = _(b'new changesets %s (%d secrets)\n')
2043 msg %= (revrange, secret)
2017 msg %= (revrange, secret)
2044 else:
2018 else:
2045 errormsg = b'entered unreachable condition'
2019 errormsg = b'entered unreachable condition'
2046 raise error.ProgrammingError(errormsg)
2020 raise error.ProgrammingError(errormsg)
2047 repo.ui.status(msg)
2021 repo.ui.status(msg)
2048
2022
2049 # search new changesets directly pulled as obsolete
2023 # search new changesets directly pulled as obsolete
2050 duplicates = tr.changes.get(b'revduplicates', ())
2024 duplicates = tr.changes.get(b'revduplicates', ())
2051 obsadded = unfi.revs(
2025 obsadded = unfi.revs(
2052 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2026 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2053 )
2027 )
2054 cl = repo.changelog
2028 cl = repo.changelog
2055 extinctadded = [r for r in obsadded if r not in cl]
2029 extinctadded = [r for r in obsadded if r not in cl]
2056 if extinctadded:
2030 if extinctadded:
2057 # They are not just obsolete, but obsolete and invisible
2031 # They are not just obsolete, but obsolete and invisible
2058 # we call them "extinct" internally but the terms have not been
2032 # we call them "extinct" internally but the terms have not been
2059 # exposed to users.
2033 # exposed to users.
2060 msg = b'(%d other changesets obsolete on arrival)\n'
2034 msg = b'(%d other changesets obsolete on arrival)\n'
2061 repo.ui.status(msg % len(extinctadded))
2035 repo.ui.status(msg % len(extinctadded))
2062
2036
2063 @reportsummary
2037 @reportsummary
2064 def reportphasechanges(repo, tr):
2038 def reportphasechanges(repo, tr):
2065 """Report statistics of phase changes for changesets pre-existing
2039 """Report statistics of phase changes for changesets pre-existing
2066 pull/unbundle.
2040 pull/unbundle.
2067 """
2041 """
2068 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2042 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2069 phasetracking = tr.changes.get(b'phases', {})
2043 phasetracking = tr.changes.get(b'phases', {})
2070 if not phasetracking:
2044 if not phasetracking:
2071 return
2045 return
2072 published = [
2046 published = [
2073 rev
2047 rev
2074 for rev, (old, new) in pycompat.iteritems(phasetracking)
2048 for rev, (old, new) in pycompat.iteritems(phasetracking)
2075 if new == phases.public and rev < origrepolen
2049 if new == phases.public and rev < origrepolen
2076 ]
2050 ]
2077 if not published:
2051 if not published:
2078 return
2052 return
2079 repo.ui.status(
2053 repo.ui.status(
2080 _(b'%d local changesets published\n') % len(published)
2054 _(b'%d local changesets published\n') % len(published)
2081 )
2055 )
2082
2056
2083
2057
2084 def getinstabilitymessage(delta, instability):
2058 def getinstabilitymessage(delta, instability):
2085 """function to return the message to show warning about new instabilities
2059 """function to return the message to show warning about new instabilities
2086
2060
2087 exists as a separate function so that extension can wrap to show more
2061 exists as a separate function so that extension can wrap to show more
2088 information like how to fix instabilities"""
2062 information like how to fix instabilities"""
2089 if delta > 0:
2063 if delta > 0:
2090 return _(b'%i new %s changesets\n') % (delta, instability)
2064 return _(b'%i new %s changesets\n') % (delta, instability)
2091
2065
2092
2066
2093 def nodesummaries(repo, nodes, maxnumnodes=4):
2067 def nodesummaries(repo, nodes, maxnumnodes=4):
2094 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2068 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2095 return b' '.join(short(h) for h in nodes)
2069 return b' '.join(short(h) for h in nodes)
2096 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2070 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2097 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2071 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2098
2072
2099
2073
2100 def enforcesinglehead(repo, tr, desc, accountclosed=False):
2074 def enforcesinglehead(repo, tr, desc, accountclosed=False):
2101 """check that no named branch has multiple heads"""
2075 """check that no named branch has multiple heads"""
2102 if desc in (b'strip', b'repair'):
2076 if desc in (b'strip', b'repair'):
2103 # skip the logic during strip
2077 # skip the logic during strip
2104 return
2078 return
2105 visible = repo.filtered(b'visible')
2079 visible = repo.filtered(b'visible')
2106 # possible improvement: we could restrict the check to affected branch
2080 # possible improvement: we could restrict the check to affected branch
2107 bm = visible.branchmap()
2081 bm = visible.branchmap()
2108 for name in bm:
2082 for name in bm:
2109 heads = bm.branchheads(name, closed=accountclosed)
2083 heads = bm.branchheads(name, closed=accountclosed)
2110 if len(heads) > 1:
2084 if len(heads) > 1:
2111 msg = _(b'rejecting multiple heads on branch "%s"')
2085 msg = _(b'rejecting multiple heads on branch "%s"')
2112 msg %= name
2086 msg %= name
2113 hint = _(b'%d heads: %s')
2087 hint = _(b'%d heads: %s')
2114 hint %= (len(heads), nodesummaries(repo, heads))
2088 hint %= (len(heads), nodesummaries(repo, heads))
2115 raise error.Abort(msg, hint=hint)
2089 raise error.Abort(msg, hint=hint)
2116
2090
2117
2091
2118 def wrapconvertsink(sink):
2092 def wrapconvertsink(sink):
2119 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2093 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2120 before it is used, whether or not the convert extension was formally loaded.
2094 before it is used, whether or not the convert extension was formally loaded.
2121 """
2095 """
2122 return sink
2096 return sink
2123
2097
2124
2098
2125 def unhidehashlikerevs(repo, specs, hiddentype):
2099 def unhidehashlikerevs(repo, specs, hiddentype):
2126 """parse the user specs and unhide changesets whose hash or revision number
2100 """parse the user specs and unhide changesets whose hash or revision number
2127 is passed.
2101 is passed.
2128
2102
2129 hiddentype can be: 1) 'warn': warn while unhiding changesets
2103 hiddentype can be: 1) 'warn': warn while unhiding changesets
2130 2) 'nowarn': don't warn while unhiding changesets
2104 2) 'nowarn': don't warn while unhiding changesets
2131
2105
2132 returns a repo object with the required changesets unhidden
2106 returns a repo object with the required changesets unhidden
2133 """
2107 """
2134 if not repo.filtername or not repo.ui.configbool(
2108 if not repo.filtername or not repo.ui.configbool(
2135 b'experimental', b'directaccess'
2109 b'experimental', b'directaccess'
2136 ):
2110 ):
2137 return repo
2111 return repo
2138
2112
2139 if repo.filtername not in (b'visible', b'visible-hidden'):
2113 if repo.filtername not in (b'visible', b'visible-hidden'):
2140 return repo
2114 return repo
2141
2115
2142 symbols = set()
2116 symbols = set()
2143 for spec in specs:
2117 for spec in specs:
2144 try:
2118 try:
2145 tree = revsetlang.parse(spec)
2119 tree = revsetlang.parse(spec)
2146 except error.ParseError: # will be reported by scmutil.revrange()
2120 except error.ParseError: # will be reported by scmutil.revrange()
2147 continue
2121 continue
2148
2122
2149 symbols.update(revsetlang.gethashlikesymbols(tree))
2123 symbols.update(revsetlang.gethashlikesymbols(tree))
2150
2124
2151 if not symbols:
2125 if not symbols:
2152 return repo
2126 return repo
2153
2127
2154 revs = _getrevsfromsymbols(repo, symbols)
2128 revs = _getrevsfromsymbols(repo, symbols)
2155
2129
2156 if not revs:
2130 if not revs:
2157 return repo
2131 return repo
2158
2132
2159 if hiddentype == b'warn':
2133 if hiddentype == b'warn':
2160 unfi = repo.unfiltered()
2134 unfi = repo.unfiltered()
2161 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2135 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2162 repo.ui.warn(
2136 repo.ui.warn(
2163 _(
2137 _(
2164 b"warning: accessing hidden changesets for write "
2138 b"warning: accessing hidden changesets for write "
2165 b"operation: %s\n"
2139 b"operation: %s\n"
2166 )
2140 )
2167 % revstr
2141 % revstr
2168 )
2142 )
2169
2143
2170 # we have to use new filtername to separate branch/tags cache until we can
2144 # we have to use new filtername to separate branch/tags cache until we can
2171 # disbale these cache when revisions are dynamically pinned.
2145 # disbale these cache when revisions are dynamically pinned.
2172 return repo.filtered(b'visible-hidden', revs)
2146 return repo.filtered(b'visible-hidden', revs)
2173
2147
2174
2148
2175 def _getrevsfromsymbols(repo, symbols):
2149 def _getrevsfromsymbols(repo, symbols):
2176 """parse the list of symbols and returns a set of revision numbers of hidden
2150 """parse the list of symbols and returns a set of revision numbers of hidden
2177 changesets present in symbols"""
2151 changesets present in symbols"""
2178 revs = set()
2152 revs = set()
2179 unfi = repo.unfiltered()
2153 unfi = repo.unfiltered()
2180 unficl = unfi.changelog
2154 unficl = unfi.changelog
2181 cl = repo.changelog
2155 cl = repo.changelog
2182 tiprev = len(unficl)
2156 tiprev = len(unficl)
2183 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2157 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2184 for s in symbols:
2158 for s in symbols:
2185 try:
2159 try:
2186 n = int(s)
2160 n = int(s)
2187 if n <= tiprev:
2161 if n <= tiprev:
2188 if not allowrevnums:
2162 if not allowrevnums:
2189 continue
2163 continue
2190 else:
2164 else:
2191 if n not in cl:
2165 if n not in cl:
2192 revs.add(n)
2166 revs.add(n)
2193 continue
2167 continue
2194 except ValueError:
2168 except ValueError:
2195 pass
2169 pass
2196
2170
2197 try:
2171 try:
2198 s = resolvehexnodeidprefix(unfi, s)
2172 s = resolvehexnodeidprefix(unfi, s)
2199 except (error.LookupError, error.WdirUnsupported):
2173 except (error.LookupError, error.WdirUnsupported):
2200 s = None
2174 s = None
2201
2175
2202 if s is not None:
2176 if s is not None:
2203 rev = unficl.rev(s)
2177 rev = unficl.rev(s)
2204 if rev not in cl:
2178 if rev not in cl:
2205 revs.add(rev)
2179 revs.add(rev)
2206
2180
2207 return revs
2181 return revs
2208
2182
2209
2183
2210 def bookmarkrevs(repo, mark):
2184 def bookmarkrevs(repo, mark):
2211 """
2185 """
2212 Select revisions reachable by a given bookmark
2186 Select revisions reachable by a given bookmark
2213 """
2187 """
2214 return repo.revs(
2188 return repo.revs(
2215 b"ancestors(bookmark(%s)) - "
2189 b"ancestors(bookmark(%s)) - "
2216 b"ancestors(head() and not bookmark(%s)) - "
2190 b"ancestors(head() and not bookmark(%s)) - "
2217 b"ancestors(bookmark() and not bookmark(%s))",
2191 b"ancestors(bookmark() and not bookmark(%s))",
2218 mark,
2192 mark,
2219 mark,
2193 mark,
2220 mark,
2194 mark,
2221 )
2195 )
General Comments 0
You need to be logged in to leave comments. Login now