##// END OF EJS Templates
nodetree: simplify a conditionnal in shortesthexnodeidprefix...
marmoute -
r44363:f2de8dc9 default
parent child Browse files
Show More
@@ -1,2197 +1,2195 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 nullrev,
24 nullrev,
25 short,
25 short,
26 wdirid,
26 wdirid,
27 wdirrev,
27 wdirrev,
28 )
28 )
29 from .pycompat import getattr
29 from .pycompat import getattr
30 from .thirdparty import attr
30 from .thirdparty import attr
31 from . import (
31 from . import (
32 copies as copiesmod,
32 copies as copiesmod,
33 encoding,
33 encoding,
34 error,
34 error,
35 match as matchmod,
35 match as matchmod,
36 obsolete,
36 obsolete,
37 obsutil,
37 obsutil,
38 pathutil,
38 pathutil,
39 phases,
39 phases,
40 policy,
40 policy,
41 pycompat,
41 pycompat,
42 revsetlang,
42 revsetlang,
43 similar,
43 similar,
44 smartset,
44 smartset,
45 url,
45 url,
46 util,
46 util,
47 vfs,
47 vfs,
48 )
48 )
49
49
50 from .utils import (
50 from .utils import (
51 procutil,
51 procutil,
52 stringutil,
52 stringutil,
53 )
53 )
54
54
55 if pycompat.iswindows:
55 if pycompat.iswindows:
56 from . import scmwindows as scmplatform
56 from . import scmwindows as scmplatform
57 else:
57 else:
58 from . import scmposix as scmplatform
58 from . import scmposix as scmplatform
59
59
60 parsers = policy.importmod('parsers')
60 parsers = policy.importmod('parsers')
61
61
62 termsize = scmplatform.termsize
62 termsize = scmplatform.termsize
63
63
64
64
65 @attr.s(slots=True, repr=False)
65 @attr.s(slots=True, repr=False)
66 class status(object):
66 class status(object):
67 '''Struct with a list of files per status.
67 '''Struct with a list of files per status.
68
68
69 The 'deleted', 'unknown' and 'ignored' properties are only
69 The 'deleted', 'unknown' and 'ignored' properties are only
70 relevant to the working copy.
70 relevant to the working copy.
71 '''
71 '''
72
72
73 modified = attr.ib(default=attr.Factory(list))
73 modified = attr.ib(default=attr.Factory(list))
74 added = attr.ib(default=attr.Factory(list))
74 added = attr.ib(default=attr.Factory(list))
75 removed = attr.ib(default=attr.Factory(list))
75 removed = attr.ib(default=attr.Factory(list))
76 deleted = attr.ib(default=attr.Factory(list))
76 deleted = attr.ib(default=attr.Factory(list))
77 unknown = attr.ib(default=attr.Factory(list))
77 unknown = attr.ib(default=attr.Factory(list))
78 ignored = attr.ib(default=attr.Factory(list))
78 ignored = attr.ib(default=attr.Factory(list))
79 clean = attr.ib(default=attr.Factory(list))
79 clean = attr.ib(default=attr.Factory(list))
80
80
81 def __iter__(self):
81 def __iter__(self):
82 yield self.modified
82 yield self.modified
83 yield self.added
83 yield self.added
84 yield self.removed
84 yield self.removed
85 yield self.deleted
85 yield self.deleted
86 yield self.unknown
86 yield self.unknown
87 yield self.ignored
87 yield self.ignored
88 yield self.clean
88 yield self.clean
89
89
90 def __repr__(self):
90 def __repr__(self):
91 return (
91 return (
92 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
92 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
93 r'unknown=%s, ignored=%s, clean=%s>'
93 r'unknown=%s, ignored=%s, clean=%s>'
94 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
94 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
95
95
96
96
97 def itersubrepos(ctx1, ctx2):
97 def itersubrepos(ctx1, ctx2):
98 """find subrepos in ctx1 or ctx2"""
98 """find subrepos in ctx1 or ctx2"""
99 # Create a (subpath, ctx) mapping where we prefer subpaths from
99 # Create a (subpath, ctx) mapping where we prefer subpaths from
100 # ctx1. The subpaths from ctx2 are important when the .hgsub file
100 # ctx1. The subpaths from ctx2 are important when the .hgsub file
101 # has been modified (in ctx2) but not yet committed (in ctx1).
101 # has been modified (in ctx2) but not yet committed (in ctx1).
102 subpaths = dict.fromkeys(ctx2.substate, ctx2)
102 subpaths = dict.fromkeys(ctx2.substate, ctx2)
103 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
103 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
104
104
105 missing = set()
105 missing = set()
106
106
107 for subpath in ctx2.substate:
107 for subpath in ctx2.substate:
108 if subpath not in ctx1.substate:
108 if subpath not in ctx1.substate:
109 del subpaths[subpath]
109 del subpaths[subpath]
110 missing.add(subpath)
110 missing.add(subpath)
111
111
112 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
112 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
113 yield subpath, ctx.sub(subpath)
113 yield subpath, ctx.sub(subpath)
114
114
115 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
115 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
116 # status and diff will have an accurate result when it does
116 # status and diff will have an accurate result when it does
117 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
117 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
118 # against itself.
118 # against itself.
119 for subpath in missing:
119 for subpath in missing:
120 yield subpath, ctx2.nullsub(subpath, ctx1)
120 yield subpath, ctx2.nullsub(subpath, ctx1)
121
121
122
122
123 def nochangesfound(ui, repo, excluded=None):
123 def nochangesfound(ui, repo, excluded=None):
124 '''Report no changes for push/pull, excluded is None or a list of
124 '''Report no changes for push/pull, excluded is None or a list of
125 nodes excluded from the push/pull.
125 nodes excluded from the push/pull.
126 '''
126 '''
127 secretlist = []
127 secretlist = []
128 if excluded:
128 if excluded:
129 for n in excluded:
129 for n in excluded:
130 ctx = repo[n]
130 ctx = repo[n]
131 if ctx.phase() >= phases.secret and not ctx.extinct():
131 if ctx.phase() >= phases.secret and not ctx.extinct():
132 secretlist.append(n)
132 secretlist.append(n)
133
133
134 if secretlist:
134 if secretlist:
135 ui.status(
135 ui.status(
136 _(b"no changes found (ignored %d secret changesets)\n")
136 _(b"no changes found (ignored %d secret changesets)\n")
137 % len(secretlist)
137 % len(secretlist)
138 )
138 )
139 else:
139 else:
140 ui.status(_(b"no changes found\n"))
140 ui.status(_(b"no changes found\n"))
141
141
142
142
143 def callcatch(ui, func):
143 def callcatch(ui, func):
144 """call func() with global exception handling
144 """call func() with global exception handling
145
145
146 return func() if no exception happens. otherwise do some error handling
146 return func() if no exception happens. otherwise do some error handling
147 and return an exit code accordingly. does not handle all exceptions.
147 and return an exit code accordingly. does not handle all exceptions.
148 """
148 """
149 try:
149 try:
150 try:
150 try:
151 return func()
151 return func()
152 except: # re-raises
152 except: # re-raises
153 ui.traceback()
153 ui.traceback()
154 raise
154 raise
155 # Global exception handling, alphabetically
155 # Global exception handling, alphabetically
156 # Mercurial-specific first, followed by built-in and library exceptions
156 # Mercurial-specific first, followed by built-in and library exceptions
157 except error.LockHeld as inst:
157 except error.LockHeld as inst:
158 if inst.errno == errno.ETIMEDOUT:
158 if inst.errno == errno.ETIMEDOUT:
159 reason = _(b'timed out waiting for lock held by %r') % (
159 reason = _(b'timed out waiting for lock held by %r') % (
160 pycompat.bytestr(inst.locker)
160 pycompat.bytestr(inst.locker)
161 )
161 )
162 else:
162 else:
163 reason = _(b'lock held by %r') % inst.locker
163 reason = _(b'lock held by %r') % inst.locker
164 ui.error(
164 ui.error(
165 _(b"abort: %s: %s\n")
165 _(b"abort: %s: %s\n")
166 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
166 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
167 )
167 )
168 if not inst.locker:
168 if not inst.locker:
169 ui.error(_(b"(lock might be very busy)\n"))
169 ui.error(_(b"(lock might be very busy)\n"))
170 except error.LockUnavailable as inst:
170 except error.LockUnavailable as inst:
171 ui.error(
171 ui.error(
172 _(b"abort: could not lock %s: %s\n")
172 _(b"abort: could not lock %s: %s\n")
173 % (
173 % (
174 inst.desc or stringutil.forcebytestr(inst.filename),
174 inst.desc or stringutil.forcebytestr(inst.filename),
175 encoding.strtolocal(inst.strerror),
175 encoding.strtolocal(inst.strerror),
176 )
176 )
177 )
177 )
178 except error.OutOfBandError as inst:
178 except error.OutOfBandError as inst:
179 if inst.args:
179 if inst.args:
180 msg = _(b"abort: remote error:\n")
180 msg = _(b"abort: remote error:\n")
181 else:
181 else:
182 msg = _(b"abort: remote error\n")
182 msg = _(b"abort: remote error\n")
183 ui.error(msg)
183 ui.error(msg)
184 if inst.args:
184 if inst.args:
185 ui.error(b''.join(inst.args))
185 ui.error(b''.join(inst.args))
186 if inst.hint:
186 if inst.hint:
187 ui.error(b'(%s)\n' % inst.hint)
187 ui.error(b'(%s)\n' % inst.hint)
188 except error.RepoError as inst:
188 except error.RepoError as inst:
189 ui.error(_(b"abort: %s!\n") % inst)
189 ui.error(_(b"abort: %s!\n") % inst)
190 if inst.hint:
190 if inst.hint:
191 ui.error(_(b"(%s)\n") % inst.hint)
191 ui.error(_(b"(%s)\n") % inst.hint)
192 except error.ResponseError as inst:
192 except error.ResponseError as inst:
193 ui.error(_(b"abort: %s") % inst.args[0])
193 ui.error(_(b"abort: %s") % inst.args[0])
194 msg = inst.args[1]
194 msg = inst.args[1]
195 if isinstance(msg, type(u'')):
195 if isinstance(msg, type(u'')):
196 msg = pycompat.sysbytes(msg)
196 msg = pycompat.sysbytes(msg)
197 if not isinstance(msg, bytes):
197 if not isinstance(msg, bytes):
198 ui.error(b" %r\n" % (msg,))
198 ui.error(b" %r\n" % (msg,))
199 elif not msg:
199 elif not msg:
200 ui.error(_(b" empty string\n"))
200 ui.error(_(b" empty string\n"))
201 else:
201 else:
202 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
202 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
203 except error.CensoredNodeError as inst:
203 except error.CensoredNodeError as inst:
204 ui.error(_(b"abort: file censored %s!\n") % inst)
204 ui.error(_(b"abort: file censored %s!\n") % inst)
205 except error.StorageError as inst:
205 except error.StorageError as inst:
206 ui.error(_(b"abort: %s!\n") % inst)
206 ui.error(_(b"abort: %s!\n") % inst)
207 if inst.hint:
207 if inst.hint:
208 ui.error(_(b"(%s)\n") % inst.hint)
208 ui.error(_(b"(%s)\n") % inst.hint)
209 except error.InterventionRequired as inst:
209 except error.InterventionRequired as inst:
210 ui.error(b"%s\n" % inst)
210 ui.error(b"%s\n" % inst)
211 if inst.hint:
211 if inst.hint:
212 ui.error(_(b"(%s)\n") % inst.hint)
212 ui.error(_(b"(%s)\n") % inst.hint)
213 return 1
213 return 1
214 except error.WdirUnsupported:
214 except error.WdirUnsupported:
215 ui.error(_(b"abort: working directory revision cannot be specified\n"))
215 ui.error(_(b"abort: working directory revision cannot be specified\n"))
216 except error.Abort as inst:
216 except error.Abort as inst:
217 ui.error(_(b"abort: %s\n") % inst)
217 ui.error(_(b"abort: %s\n") % inst)
218 if inst.hint:
218 if inst.hint:
219 ui.error(_(b"(%s)\n") % inst.hint)
219 ui.error(_(b"(%s)\n") % inst.hint)
220 except ImportError as inst:
220 except ImportError as inst:
221 ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
221 ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
222 m = stringutil.forcebytestr(inst).split()[-1]
222 m = stringutil.forcebytestr(inst).split()[-1]
223 if m in b"mpatch bdiff".split():
223 if m in b"mpatch bdiff".split():
224 ui.error(_(b"(did you forget to compile extensions?)\n"))
224 ui.error(_(b"(did you forget to compile extensions?)\n"))
225 elif m in b"zlib".split():
225 elif m in b"zlib".split():
226 ui.error(_(b"(is your Python install correct?)\n"))
226 ui.error(_(b"(is your Python install correct?)\n"))
227 except (IOError, OSError) as inst:
227 except (IOError, OSError) as inst:
228 if util.safehasattr(inst, b"code"): # HTTPError
228 if util.safehasattr(inst, b"code"): # HTTPError
229 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
229 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
230 elif util.safehasattr(inst, b"reason"): # URLError or SSLError
230 elif util.safehasattr(inst, b"reason"): # URLError or SSLError
231 try: # usually it is in the form (errno, strerror)
231 try: # usually it is in the form (errno, strerror)
232 reason = inst.reason.args[1]
232 reason = inst.reason.args[1]
233 except (AttributeError, IndexError):
233 except (AttributeError, IndexError):
234 # it might be anything, for example a string
234 # it might be anything, for example a string
235 reason = inst.reason
235 reason = inst.reason
236 if isinstance(reason, pycompat.unicode):
236 if isinstance(reason, pycompat.unicode):
237 # SSLError of Python 2.7.9 contains a unicode
237 # SSLError of Python 2.7.9 contains a unicode
238 reason = encoding.unitolocal(reason)
238 reason = encoding.unitolocal(reason)
239 ui.error(_(b"abort: error: %s\n") % reason)
239 ui.error(_(b"abort: error: %s\n") % reason)
240 elif (
240 elif (
241 util.safehasattr(inst, b"args")
241 util.safehasattr(inst, b"args")
242 and inst.args
242 and inst.args
243 and inst.args[0] == errno.EPIPE
243 and inst.args[0] == errno.EPIPE
244 ):
244 ):
245 pass
245 pass
246 elif getattr(inst, "strerror", None): # common IOError or OSError
246 elif getattr(inst, "strerror", None): # common IOError or OSError
247 if getattr(inst, "filename", None) is not None:
247 if getattr(inst, "filename", None) is not None:
248 ui.error(
248 ui.error(
249 _(b"abort: %s: '%s'\n")
249 _(b"abort: %s: '%s'\n")
250 % (
250 % (
251 encoding.strtolocal(inst.strerror),
251 encoding.strtolocal(inst.strerror),
252 stringutil.forcebytestr(inst.filename),
252 stringutil.forcebytestr(inst.filename),
253 )
253 )
254 )
254 )
255 else:
255 else:
256 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
256 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
257 else: # suspicious IOError
257 else: # suspicious IOError
258 raise
258 raise
259 except MemoryError:
259 except MemoryError:
260 ui.error(_(b"abort: out of memory\n"))
260 ui.error(_(b"abort: out of memory\n"))
261 except SystemExit as inst:
261 except SystemExit as inst:
262 # Commands shouldn't sys.exit directly, but give a return code.
262 # Commands shouldn't sys.exit directly, but give a return code.
263 # Just in case catch this and and pass exit code to caller.
263 # Just in case catch this and and pass exit code to caller.
264 return inst.code
264 return inst.code
265
265
266 return -1
266 return -1
267
267
268
268
269 def checknewlabel(repo, lbl, kind):
269 def checknewlabel(repo, lbl, kind):
270 # Do not use the "kind" parameter in ui output.
270 # Do not use the "kind" parameter in ui output.
271 # It makes strings difficult to translate.
271 # It makes strings difficult to translate.
272 if lbl in [b'tip', b'.', b'null']:
272 if lbl in [b'tip', b'.', b'null']:
273 raise error.Abort(_(b"the name '%s' is reserved") % lbl)
273 raise error.Abort(_(b"the name '%s' is reserved") % lbl)
274 for c in (b':', b'\0', b'\n', b'\r'):
274 for c in (b':', b'\0', b'\n', b'\r'):
275 if c in lbl:
275 if c in lbl:
276 raise error.Abort(
276 raise error.Abort(
277 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
277 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
278 )
278 )
279 try:
279 try:
280 int(lbl)
280 int(lbl)
281 raise error.Abort(_(b"cannot use an integer as a name"))
281 raise error.Abort(_(b"cannot use an integer as a name"))
282 except ValueError:
282 except ValueError:
283 pass
283 pass
284 if lbl.strip() != lbl:
284 if lbl.strip() != lbl:
285 raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl)
285 raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl)
286
286
287
287
288 def checkfilename(f):
288 def checkfilename(f):
289 '''Check that the filename f is an acceptable filename for a tracked file'''
289 '''Check that the filename f is an acceptable filename for a tracked file'''
290 if b'\r' in f or b'\n' in f:
290 if b'\r' in f or b'\n' in f:
291 raise error.Abort(
291 raise error.Abort(
292 _(b"'\\n' and '\\r' disallowed in filenames: %r")
292 _(b"'\\n' and '\\r' disallowed in filenames: %r")
293 % pycompat.bytestr(f)
293 % pycompat.bytestr(f)
294 )
294 )
295
295
296
296
297 def checkportable(ui, f):
297 def checkportable(ui, f):
298 '''Check if filename f is portable and warn or abort depending on config'''
298 '''Check if filename f is portable and warn or abort depending on config'''
299 checkfilename(f)
299 checkfilename(f)
300 abort, warn = checkportabilityalert(ui)
300 abort, warn = checkportabilityalert(ui)
301 if abort or warn:
301 if abort or warn:
302 msg = util.checkwinfilename(f)
302 msg = util.checkwinfilename(f)
303 if msg:
303 if msg:
304 msg = b"%s: %s" % (msg, procutil.shellquote(f))
304 msg = b"%s: %s" % (msg, procutil.shellquote(f))
305 if abort:
305 if abort:
306 raise error.Abort(msg)
306 raise error.Abort(msg)
307 ui.warn(_(b"warning: %s\n") % msg)
307 ui.warn(_(b"warning: %s\n") % msg)
308
308
309
309
310 def checkportabilityalert(ui):
310 def checkportabilityalert(ui):
311 '''check if the user's config requests nothing, a warning, or abort for
311 '''check if the user's config requests nothing, a warning, or abort for
312 non-portable filenames'''
312 non-portable filenames'''
313 val = ui.config(b'ui', b'portablefilenames')
313 val = ui.config(b'ui', b'portablefilenames')
314 lval = val.lower()
314 lval = val.lower()
315 bval = stringutil.parsebool(val)
315 bval = stringutil.parsebool(val)
316 abort = pycompat.iswindows or lval == b'abort'
316 abort = pycompat.iswindows or lval == b'abort'
317 warn = bval or lval == b'warn'
317 warn = bval or lval == b'warn'
318 if bval is None and not (warn or abort or lval == b'ignore'):
318 if bval is None and not (warn or abort or lval == b'ignore'):
319 raise error.ConfigError(
319 raise error.ConfigError(
320 _(b"ui.portablefilenames value is invalid ('%s')") % val
320 _(b"ui.portablefilenames value is invalid ('%s')") % val
321 )
321 )
322 return abort, warn
322 return abort, warn
323
323
324
324
325 class casecollisionauditor(object):
325 class casecollisionauditor(object):
326 def __init__(self, ui, abort, dirstate):
326 def __init__(self, ui, abort, dirstate):
327 self._ui = ui
327 self._ui = ui
328 self._abort = abort
328 self._abort = abort
329 allfiles = b'\0'.join(dirstate)
329 allfiles = b'\0'.join(dirstate)
330 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
330 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
331 self._dirstate = dirstate
331 self._dirstate = dirstate
332 # The purpose of _newfiles is so that we don't complain about
332 # The purpose of _newfiles is so that we don't complain about
333 # case collisions if someone were to call this object with the
333 # case collisions if someone were to call this object with the
334 # same filename twice.
334 # same filename twice.
335 self._newfiles = set()
335 self._newfiles = set()
336
336
337 def __call__(self, f):
337 def __call__(self, f):
338 if f in self._newfiles:
338 if f in self._newfiles:
339 return
339 return
340 fl = encoding.lower(f)
340 fl = encoding.lower(f)
341 if fl in self._loweredfiles and f not in self._dirstate:
341 if fl in self._loweredfiles and f not in self._dirstate:
342 msg = _(b'possible case-folding collision for %s') % f
342 msg = _(b'possible case-folding collision for %s') % f
343 if self._abort:
343 if self._abort:
344 raise error.Abort(msg)
344 raise error.Abort(msg)
345 self._ui.warn(_(b"warning: %s\n") % msg)
345 self._ui.warn(_(b"warning: %s\n") % msg)
346 self._loweredfiles.add(fl)
346 self._loweredfiles.add(fl)
347 self._newfiles.add(f)
347 self._newfiles.add(f)
348
348
349
349
350 def filteredhash(repo, maxrev):
350 def filteredhash(repo, maxrev):
351 """build hash of filtered revisions in the current repoview.
351 """build hash of filtered revisions in the current repoview.
352
352
353 Multiple caches perform up-to-date validation by checking that the
353 Multiple caches perform up-to-date validation by checking that the
354 tiprev and tipnode stored in the cache file match the current repository.
354 tiprev and tipnode stored in the cache file match the current repository.
355 However, this is not sufficient for validating repoviews because the set
355 However, this is not sufficient for validating repoviews because the set
356 of revisions in the view may change without the repository tiprev and
356 of revisions in the view may change without the repository tiprev and
357 tipnode changing.
357 tipnode changing.
358
358
359 This function hashes all the revs filtered from the view and returns
359 This function hashes all the revs filtered from the view and returns
360 that SHA-1 digest.
360 that SHA-1 digest.
361 """
361 """
362 cl = repo.changelog
362 cl = repo.changelog
363 if not cl.filteredrevs:
363 if not cl.filteredrevs:
364 return None
364 return None
365 key = None
365 key = None
366 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
366 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
367 if revs:
367 if revs:
368 s = hashlib.sha1()
368 s = hashlib.sha1()
369 for rev in revs:
369 for rev in revs:
370 s.update(b'%d;' % rev)
370 s.update(b'%d;' % rev)
371 key = s.digest()
371 key = s.digest()
372 return key
372 return key
373
373
374
374
375 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
375 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
376 '''yield every hg repository under path, always recursively.
376 '''yield every hg repository under path, always recursively.
377 The recurse flag will only control recursion into repo working dirs'''
377 The recurse flag will only control recursion into repo working dirs'''
378
378
379 def errhandler(err):
379 def errhandler(err):
380 if err.filename == path:
380 if err.filename == path:
381 raise err
381 raise err
382
382
383 samestat = getattr(os.path, 'samestat', None)
383 samestat = getattr(os.path, 'samestat', None)
384 if followsym and samestat is not None:
384 if followsym and samestat is not None:
385
385
386 def adddir(dirlst, dirname):
386 def adddir(dirlst, dirname):
387 dirstat = os.stat(dirname)
387 dirstat = os.stat(dirname)
388 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
388 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
389 if not match:
389 if not match:
390 dirlst.append(dirstat)
390 dirlst.append(dirstat)
391 return not match
391 return not match
392
392
393 else:
393 else:
394 followsym = False
394 followsym = False
395
395
396 if (seen_dirs is None) and followsym:
396 if (seen_dirs is None) and followsym:
397 seen_dirs = []
397 seen_dirs = []
398 adddir(seen_dirs, path)
398 adddir(seen_dirs, path)
399 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
399 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
400 dirs.sort()
400 dirs.sort()
401 if b'.hg' in dirs:
401 if b'.hg' in dirs:
402 yield root # found a repository
402 yield root # found a repository
403 qroot = os.path.join(root, b'.hg', b'patches')
403 qroot = os.path.join(root, b'.hg', b'patches')
404 if os.path.isdir(os.path.join(qroot, b'.hg')):
404 if os.path.isdir(os.path.join(qroot, b'.hg')):
405 yield qroot # we have a patch queue repo here
405 yield qroot # we have a patch queue repo here
406 if recurse:
406 if recurse:
407 # avoid recursing inside the .hg directory
407 # avoid recursing inside the .hg directory
408 dirs.remove(b'.hg')
408 dirs.remove(b'.hg')
409 else:
409 else:
410 dirs[:] = [] # don't descend further
410 dirs[:] = [] # don't descend further
411 elif followsym:
411 elif followsym:
412 newdirs = []
412 newdirs = []
413 for d in dirs:
413 for d in dirs:
414 fname = os.path.join(root, d)
414 fname = os.path.join(root, d)
415 if adddir(seen_dirs, fname):
415 if adddir(seen_dirs, fname):
416 if os.path.islink(fname):
416 if os.path.islink(fname):
417 for hgname in walkrepos(fname, True, seen_dirs):
417 for hgname in walkrepos(fname, True, seen_dirs):
418 yield hgname
418 yield hgname
419 else:
419 else:
420 newdirs.append(d)
420 newdirs.append(d)
421 dirs[:] = newdirs
421 dirs[:] = newdirs
422
422
423
423
424 def binnode(ctx):
424 def binnode(ctx):
425 """Return binary node id for a given basectx"""
425 """Return binary node id for a given basectx"""
426 node = ctx.node()
426 node = ctx.node()
427 if node is None:
427 if node is None:
428 return wdirid
428 return wdirid
429 return node
429 return node
430
430
431
431
432 def intrev(ctx):
432 def intrev(ctx):
433 """Return integer for a given basectx that can be used in comparison or
433 """Return integer for a given basectx that can be used in comparison or
434 arithmetic operation"""
434 arithmetic operation"""
435 rev = ctx.rev()
435 rev = ctx.rev()
436 if rev is None:
436 if rev is None:
437 return wdirrev
437 return wdirrev
438 return rev
438 return rev
439
439
440
440
441 def formatchangeid(ctx):
441 def formatchangeid(ctx):
442 """Format changectx as '{rev}:{node|formatnode}', which is the default
442 """Format changectx as '{rev}:{node|formatnode}', which is the default
443 template provided by logcmdutil.changesettemplater"""
443 template provided by logcmdutil.changesettemplater"""
444 repo = ctx.repo()
444 repo = ctx.repo()
445 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
445 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
446
446
447
447
448 def formatrevnode(ui, rev, node):
448 def formatrevnode(ui, rev, node):
449 """Format given revision and node depending on the current verbosity"""
449 """Format given revision and node depending on the current verbosity"""
450 if ui.debugflag:
450 if ui.debugflag:
451 hexfunc = hex
451 hexfunc = hex
452 else:
452 else:
453 hexfunc = short
453 hexfunc = short
454 return b'%d:%s' % (rev, hexfunc(node))
454 return b'%d:%s' % (rev, hexfunc(node))
455
455
456
456
457 def resolvehexnodeidprefix(repo, prefix):
457 def resolvehexnodeidprefix(repo, prefix):
458 if prefix.startswith(b'x') and repo.ui.configbool(
458 if prefix.startswith(b'x') and repo.ui.configbool(
459 b'experimental', b'revisions.prefixhexnode'
459 b'experimental', b'revisions.prefixhexnode'
460 ):
460 ):
461 prefix = prefix[1:]
461 prefix = prefix[1:]
462 try:
462 try:
463 # Uses unfiltered repo because it's faster when prefix is ambiguous/
463 # Uses unfiltered repo because it's faster when prefix is ambiguous/
464 # This matches the shortesthexnodeidprefix() function below.
464 # This matches the shortesthexnodeidprefix() function below.
465 node = repo.unfiltered().changelog._partialmatch(prefix)
465 node = repo.unfiltered().changelog._partialmatch(prefix)
466 except error.AmbiguousPrefixLookupError:
466 except error.AmbiguousPrefixLookupError:
467 revset = repo.ui.config(
467 revset = repo.ui.config(
468 b'experimental', b'revisions.disambiguatewithin'
468 b'experimental', b'revisions.disambiguatewithin'
469 )
469 )
470 if revset:
470 if revset:
471 # Clear config to avoid infinite recursion
471 # Clear config to avoid infinite recursion
472 configoverrides = {
472 configoverrides = {
473 (b'experimental', b'revisions.disambiguatewithin'): None
473 (b'experimental', b'revisions.disambiguatewithin'): None
474 }
474 }
475 with repo.ui.configoverride(configoverrides):
475 with repo.ui.configoverride(configoverrides):
476 revs = repo.anyrevs([revset], user=True)
476 revs = repo.anyrevs([revset], user=True)
477 matches = []
477 matches = []
478 for rev in revs:
478 for rev in revs:
479 node = repo.changelog.node(rev)
479 node = repo.changelog.node(rev)
480 if hex(node).startswith(prefix):
480 if hex(node).startswith(prefix):
481 matches.append(node)
481 matches.append(node)
482 if len(matches) == 1:
482 if len(matches) == 1:
483 return matches[0]
483 return matches[0]
484 raise
484 raise
485 if node is None:
485 if node is None:
486 return
486 return
487 repo.changelog.rev(node) # make sure node isn't filtered
487 repo.changelog.rev(node) # make sure node isn't filtered
488 return node
488 return node
489
489
490
490
491 def mayberevnum(repo, prefix):
491 def mayberevnum(repo, prefix):
492 """Checks if the given prefix may be mistaken for a revision number"""
492 """Checks if the given prefix may be mistaken for a revision number"""
493 try:
493 try:
494 i = int(prefix)
494 i = int(prefix)
495 # if we are a pure int, then starting with zero will not be
495 # if we are a pure int, then starting with zero will not be
496 # confused as a rev; or, obviously, if the int is larger
496 # confused as a rev; or, obviously, if the int is larger
497 # than the value of the tip rev. We still need to disambiguate if
497 # than the value of the tip rev. We still need to disambiguate if
498 # prefix == '0', since that *is* a valid revnum.
498 # prefix == '0', since that *is* a valid revnum.
499 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
499 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
500 return False
500 return False
501 return True
501 return True
502 except ValueError:
502 except ValueError:
503 return False
503 return False
504
504
505
505
506 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
506 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
507 """Find the shortest unambiguous prefix that matches hexnode.
507 """Find the shortest unambiguous prefix that matches hexnode.
508
508
509 If "cache" is not None, it must be a dictionary that can be used for
509 If "cache" is not None, it must be a dictionary that can be used for
510 caching between calls to this method.
510 caching between calls to this method.
511 """
511 """
512 # _partialmatch() of filtered changelog could take O(len(repo)) time,
512 # _partialmatch() of filtered changelog could take O(len(repo)) time,
513 # which would be unacceptably slow. so we look for hash collision in
513 # which would be unacceptably slow. so we look for hash collision in
514 # unfiltered space, which means some hashes may be slightly longer.
514 # unfiltered space, which means some hashes may be slightly longer.
515
515
516 minlength = max(minlength, 1)
516 minlength = max(minlength, 1)
517
517
518 def disambiguate(prefix):
518 def disambiguate(prefix):
519 """Disambiguate against revnums."""
519 """Disambiguate against revnums."""
520 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
520 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
521 if mayberevnum(repo, prefix):
521 if mayberevnum(repo, prefix):
522 return b'x' + prefix
522 return b'x' + prefix
523 else:
523 else:
524 return prefix
524 return prefix
525
525
526 hexnode = hex(node)
526 hexnode = hex(node)
527 for length in range(len(prefix), len(hexnode) + 1):
527 for length in range(len(prefix), len(hexnode) + 1):
528 prefix = hexnode[:length]
528 prefix = hexnode[:length]
529 if not mayberevnum(repo, prefix):
529 if not mayberevnum(repo, prefix):
530 return prefix
530 return prefix
531
531
532 cl = repo.unfiltered().changelog
532 cl = repo.unfiltered().changelog
533 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
533 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
534 if revset:
534 if revset:
535 revs = None
535 revs = None
536 if cache is not None:
536 if cache is not None:
537 revs = cache.get(b'disambiguationrevset')
537 revs = cache.get(b'disambiguationrevset')
538 if revs is None:
538 if revs is None:
539 revs = repo.anyrevs([revset], user=True)
539 revs = repo.anyrevs([revset], user=True)
540 if cache is not None:
540 if cache is not None:
541 cache[b'disambiguationrevset'] = revs
541 cache[b'disambiguationrevset'] = revs
542 if cl.rev(node) in revs:
542 if cl.rev(node) in revs:
543 hexnode = hex(node)
543 hexnode = hex(node)
544 nodetree = None
544 nodetree = None
545 if cache is not None:
545 if cache is not None:
546 nodetree = cache.get(b'disambiguationnodetree')
546 nodetree = cache.get(b'disambiguationnodetree')
547 if not nodetree:
547 if not nodetree:
548 try:
548 if util.safehasattr(parsers, 'nodetree'):
549 # The CExt is the only implementation to provide a nodetree
550 # class so far.
549 nodetree = parsers.nodetree(cl.index, len(revs))
551 nodetree = parsers.nodetree(cl.index, len(revs))
550 except AttributeError:
551 # no native nodetree
552 pass
553 else:
554 for r in revs:
552 for r in revs:
555 nodetree.insert(r)
553 nodetree.insert(r)
556 if cache is not None:
554 if cache is not None:
557 cache[b'disambiguationnodetree'] = nodetree
555 cache[b'disambiguationnodetree'] = nodetree
558 if nodetree is not None:
556 if nodetree is not None:
559 length = max(nodetree.shortest(node), minlength)
557 length = max(nodetree.shortest(node), minlength)
560 prefix = hexnode[:length]
558 prefix = hexnode[:length]
561 return disambiguate(prefix)
559 return disambiguate(prefix)
562 for length in range(minlength, len(hexnode) + 1):
560 for length in range(minlength, len(hexnode) + 1):
563 matches = []
561 matches = []
564 prefix = hexnode[:length]
562 prefix = hexnode[:length]
565 for rev in revs:
563 for rev in revs:
566 otherhexnode = repo[rev].hex()
564 otherhexnode = repo[rev].hex()
567 if prefix == otherhexnode[:length]:
565 if prefix == otherhexnode[:length]:
568 matches.append(otherhexnode)
566 matches.append(otherhexnode)
569 if len(matches) == 1:
567 if len(matches) == 1:
570 return disambiguate(prefix)
568 return disambiguate(prefix)
571
569
572 try:
570 try:
573 return disambiguate(cl.shortest(node, minlength))
571 return disambiguate(cl.shortest(node, minlength))
574 except error.LookupError:
572 except error.LookupError:
575 raise error.RepoLookupError()
573 raise error.RepoLookupError()
576
574
577
575
578 def isrevsymbol(repo, symbol):
576 def isrevsymbol(repo, symbol):
579 """Checks if a symbol exists in the repo.
577 """Checks if a symbol exists in the repo.
580
578
581 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
579 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
582 symbol is an ambiguous nodeid prefix.
580 symbol is an ambiguous nodeid prefix.
583 """
581 """
584 try:
582 try:
585 revsymbol(repo, symbol)
583 revsymbol(repo, symbol)
586 return True
584 return True
587 except error.RepoLookupError:
585 except error.RepoLookupError:
588 return False
586 return False
589
587
590
588
591 def revsymbol(repo, symbol):
589 def revsymbol(repo, symbol):
592 """Returns a context given a single revision symbol (as string).
590 """Returns a context given a single revision symbol (as string).
593
591
594 This is similar to revsingle(), but accepts only a single revision symbol,
592 This is similar to revsingle(), but accepts only a single revision symbol,
595 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
593 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
596 not "max(public())".
594 not "max(public())".
597 """
595 """
598 if not isinstance(symbol, bytes):
596 if not isinstance(symbol, bytes):
599 msg = (
597 msg = (
600 b"symbol (%s of type %s) was not a string, did you mean "
598 b"symbol (%s of type %s) was not a string, did you mean "
601 b"repo[symbol]?" % (symbol, type(symbol))
599 b"repo[symbol]?" % (symbol, type(symbol))
602 )
600 )
603 raise error.ProgrammingError(msg)
601 raise error.ProgrammingError(msg)
604 try:
602 try:
605 if symbol in (b'.', b'tip', b'null'):
603 if symbol in (b'.', b'tip', b'null'):
606 return repo[symbol]
604 return repo[symbol]
607
605
608 try:
606 try:
609 r = int(symbol)
607 r = int(symbol)
610 if b'%d' % r != symbol:
608 if b'%d' % r != symbol:
611 raise ValueError
609 raise ValueError
612 l = len(repo.changelog)
610 l = len(repo.changelog)
613 if r < 0:
611 if r < 0:
614 r += l
612 r += l
615 if r < 0 or r >= l and r != wdirrev:
613 if r < 0 or r >= l and r != wdirrev:
616 raise ValueError
614 raise ValueError
617 return repo[r]
615 return repo[r]
618 except error.FilteredIndexError:
616 except error.FilteredIndexError:
619 raise
617 raise
620 except (ValueError, OverflowError, IndexError):
618 except (ValueError, OverflowError, IndexError):
621 pass
619 pass
622
620
623 if len(symbol) == 40:
621 if len(symbol) == 40:
624 try:
622 try:
625 node = bin(symbol)
623 node = bin(symbol)
626 rev = repo.changelog.rev(node)
624 rev = repo.changelog.rev(node)
627 return repo[rev]
625 return repo[rev]
628 except error.FilteredLookupError:
626 except error.FilteredLookupError:
629 raise
627 raise
630 except (TypeError, LookupError):
628 except (TypeError, LookupError):
631 pass
629 pass
632
630
633 # look up bookmarks through the name interface
631 # look up bookmarks through the name interface
634 try:
632 try:
635 node = repo.names.singlenode(repo, symbol)
633 node = repo.names.singlenode(repo, symbol)
636 rev = repo.changelog.rev(node)
634 rev = repo.changelog.rev(node)
637 return repo[rev]
635 return repo[rev]
638 except KeyError:
636 except KeyError:
639 pass
637 pass
640
638
641 node = resolvehexnodeidprefix(repo, symbol)
639 node = resolvehexnodeidprefix(repo, symbol)
642 if node is not None:
640 if node is not None:
643 rev = repo.changelog.rev(node)
641 rev = repo.changelog.rev(node)
644 return repo[rev]
642 return repo[rev]
645
643
646 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
644 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
647
645
648 except error.WdirUnsupported:
646 except error.WdirUnsupported:
649 return repo[None]
647 return repo[None]
650 except (
648 except (
651 error.FilteredIndexError,
649 error.FilteredIndexError,
652 error.FilteredLookupError,
650 error.FilteredLookupError,
653 error.FilteredRepoLookupError,
651 error.FilteredRepoLookupError,
654 ):
652 ):
655 raise _filterederror(repo, symbol)
653 raise _filterederror(repo, symbol)
656
654
657
655
658 def _filterederror(repo, changeid):
656 def _filterederror(repo, changeid):
659 """build an exception to be raised about a filtered changeid
657 """build an exception to be raised about a filtered changeid
660
658
661 This is extracted in a function to help extensions (eg: evolve) to
659 This is extracted in a function to help extensions (eg: evolve) to
662 experiment with various message variants."""
660 experiment with various message variants."""
663 if repo.filtername.startswith(b'visible'):
661 if repo.filtername.startswith(b'visible'):
664
662
665 # Check if the changeset is obsolete
663 # Check if the changeset is obsolete
666 unfilteredrepo = repo.unfiltered()
664 unfilteredrepo = repo.unfiltered()
667 ctx = revsymbol(unfilteredrepo, changeid)
665 ctx = revsymbol(unfilteredrepo, changeid)
668
666
669 # If the changeset is obsolete, enrich the message with the reason
667 # If the changeset is obsolete, enrich the message with the reason
670 # that made this changeset not visible
668 # that made this changeset not visible
671 if ctx.obsolete():
669 if ctx.obsolete():
672 msg = obsutil._getfilteredreason(repo, changeid, ctx)
670 msg = obsutil._getfilteredreason(repo, changeid, ctx)
673 else:
671 else:
674 msg = _(b"hidden revision '%s'") % changeid
672 msg = _(b"hidden revision '%s'") % changeid
675
673
676 hint = _(b'use --hidden to access hidden revisions')
674 hint = _(b'use --hidden to access hidden revisions')
677
675
678 return error.FilteredRepoLookupError(msg, hint=hint)
676 return error.FilteredRepoLookupError(msg, hint=hint)
679 msg = _(b"filtered revision '%s' (not in '%s' subset)")
677 msg = _(b"filtered revision '%s' (not in '%s' subset)")
680 msg %= (changeid, repo.filtername)
678 msg %= (changeid, repo.filtername)
681 return error.FilteredRepoLookupError(msg)
679 return error.FilteredRepoLookupError(msg)
682
680
683
681
684 def revsingle(repo, revspec, default=b'.', localalias=None):
682 def revsingle(repo, revspec, default=b'.', localalias=None):
685 if not revspec and revspec != 0:
683 if not revspec and revspec != 0:
686 return repo[default]
684 return repo[default]
687
685
688 l = revrange(repo, [revspec], localalias=localalias)
686 l = revrange(repo, [revspec], localalias=localalias)
689 if not l:
687 if not l:
690 raise error.Abort(_(b'empty revision set'))
688 raise error.Abort(_(b'empty revision set'))
691 return repo[l.last()]
689 return repo[l.last()]
692
690
693
691
694 def _pairspec(revspec):
692 def _pairspec(revspec):
695 tree = revsetlang.parse(revspec)
693 tree = revsetlang.parse(revspec)
696 return tree and tree[0] in (
694 return tree and tree[0] in (
697 b'range',
695 b'range',
698 b'rangepre',
696 b'rangepre',
699 b'rangepost',
697 b'rangepost',
700 b'rangeall',
698 b'rangeall',
701 )
699 )
702
700
703
701
704 def revpair(repo, revs):
702 def revpair(repo, revs):
705 if not revs:
703 if not revs:
706 return repo[b'.'], repo[None]
704 return repo[b'.'], repo[None]
707
705
708 l = revrange(repo, revs)
706 l = revrange(repo, revs)
709
707
710 if not l:
708 if not l:
711 raise error.Abort(_(b'empty revision range'))
709 raise error.Abort(_(b'empty revision range'))
712
710
713 first = l.first()
711 first = l.first()
714 second = l.last()
712 second = l.last()
715
713
716 if (
714 if (
717 first == second
715 first == second
718 and len(revs) >= 2
716 and len(revs) >= 2
719 and not all(revrange(repo, [r]) for r in revs)
717 and not all(revrange(repo, [r]) for r in revs)
720 ):
718 ):
721 raise error.Abort(_(b'empty revision on one side of range'))
719 raise error.Abort(_(b'empty revision on one side of range'))
722
720
723 # if top-level is range expression, the result must always be a pair
721 # if top-level is range expression, the result must always be a pair
724 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
722 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
725 return repo[first], repo[None]
723 return repo[first], repo[None]
726
724
727 return repo[first], repo[second]
725 return repo[first], repo[second]
728
726
729
727
730 def revrange(repo, specs, localalias=None):
728 def revrange(repo, specs, localalias=None):
731 """Execute 1 to many revsets and return the union.
729 """Execute 1 to many revsets and return the union.
732
730
733 This is the preferred mechanism for executing revsets using user-specified
731 This is the preferred mechanism for executing revsets using user-specified
734 config options, such as revset aliases.
732 config options, such as revset aliases.
735
733
736 The revsets specified by ``specs`` will be executed via a chained ``OR``
734 The revsets specified by ``specs`` will be executed via a chained ``OR``
737 expression. If ``specs`` is empty, an empty result is returned.
735 expression. If ``specs`` is empty, an empty result is returned.
738
736
739 ``specs`` can contain integers, in which case they are assumed to be
737 ``specs`` can contain integers, in which case they are assumed to be
740 revision numbers.
738 revision numbers.
741
739
742 It is assumed the revsets are already formatted. If you have arguments
740 It is assumed the revsets are already formatted. If you have arguments
743 that need to be expanded in the revset, call ``revsetlang.formatspec()``
741 that need to be expanded in the revset, call ``revsetlang.formatspec()``
744 and pass the result as an element of ``specs``.
742 and pass the result as an element of ``specs``.
745
743
746 Specifying a single revset is allowed.
744 Specifying a single revset is allowed.
747
745
748 Returns a ``revset.abstractsmartset`` which is a list-like interface over
746 Returns a ``revset.abstractsmartset`` which is a list-like interface over
749 integer revisions.
747 integer revisions.
750 """
748 """
751 allspecs = []
749 allspecs = []
752 for spec in specs:
750 for spec in specs:
753 if isinstance(spec, int):
751 if isinstance(spec, int):
754 spec = revsetlang.formatspec(b'%d', spec)
752 spec = revsetlang.formatspec(b'%d', spec)
755 allspecs.append(spec)
753 allspecs.append(spec)
756 return repo.anyrevs(allspecs, user=True, localalias=localalias)
754 return repo.anyrevs(allspecs, user=True, localalias=localalias)
757
755
758
756
759 def meaningfulparents(repo, ctx):
757 def meaningfulparents(repo, ctx):
760 """Return list of meaningful (or all if debug) parentrevs for rev.
758 """Return list of meaningful (or all if debug) parentrevs for rev.
761
759
762 For merges (two non-nullrev revisions) both parents are meaningful.
760 For merges (two non-nullrev revisions) both parents are meaningful.
763 Otherwise the first parent revision is considered meaningful if it
761 Otherwise the first parent revision is considered meaningful if it
764 is not the preceding revision.
762 is not the preceding revision.
765 """
763 """
766 parents = ctx.parents()
764 parents = ctx.parents()
767 if len(parents) > 1:
765 if len(parents) > 1:
768 return parents
766 return parents
769 if repo.ui.debugflag:
767 if repo.ui.debugflag:
770 return [parents[0], repo[nullrev]]
768 return [parents[0], repo[nullrev]]
771 if parents[0].rev() >= intrev(ctx) - 1:
769 if parents[0].rev() >= intrev(ctx) - 1:
772 return []
770 return []
773 return parents
771 return parents
774
772
775
773
776 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
774 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
777 """Return a function that produced paths for presenting to the user.
775 """Return a function that produced paths for presenting to the user.
778
776
779 The returned function takes a repo-relative path and produces a path
777 The returned function takes a repo-relative path and produces a path
780 that can be presented in the UI.
778 that can be presented in the UI.
781
779
782 Depending on the value of ui.relative-paths, either a repo-relative or
780 Depending on the value of ui.relative-paths, either a repo-relative or
783 cwd-relative path will be produced.
781 cwd-relative path will be produced.
784
782
785 legacyrelativevalue is the value to use if ui.relative-paths=legacy
783 legacyrelativevalue is the value to use if ui.relative-paths=legacy
786
784
787 If forcerelativevalue is not None, then that value will be used regardless
785 If forcerelativevalue is not None, then that value will be used regardless
788 of what ui.relative-paths is set to.
786 of what ui.relative-paths is set to.
789 """
787 """
790 if forcerelativevalue is not None:
788 if forcerelativevalue is not None:
791 relative = forcerelativevalue
789 relative = forcerelativevalue
792 else:
790 else:
793 config = repo.ui.config(b'ui', b'relative-paths')
791 config = repo.ui.config(b'ui', b'relative-paths')
794 if config == b'legacy':
792 if config == b'legacy':
795 relative = legacyrelativevalue
793 relative = legacyrelativevalue
796 else:
794 else:
797 relative = stringutil.parsebool(config)
795 relative = stringutil.parsebool(config)
798 if relative is None:
796 if relative is None:
799 raise error.ConfigError(
797 raise error.ConfigError(
800 _(b"ui.relative-paths is not a boolean ('%s')") % config
798 _(b"ui.relative-paths is not a boolean ('%s')") % config
801 )
799 )
802
800
803 if relative:
801 if relative:
804 cwd = repo.getcwd()
802 cwd = repo.getcwd()
805 pathto = repo.pathto
803 pathto = repo.pathto
806 return lambda f: pathto(f, cwd)
804 return lambda f: pathto(f, cwd)
807 elif repo.ui.configbool(b'ui', b'slash'):
805 elif repo.ui.configbool(b'ui', b'slash'):
808 return lambda f: f
806 return lambda f: f
809 else:
807 else:
810 return util.localpath
808 return util.localpath
811
809
812
810
813 def subdiruipathfn(subpath, uipathfn):
811 def subdiruipathfn(subpath, uipathfn):
814 '''Create a new uipathfn that treats the file as relative to subpath.'''
812 '''Create a new uipathfn that treats the file as relative to subpath.'''
815 return lambda f: uipathfn(posixpath.join(subpath, f))
813 return lambda f: uipathfn(posixpath.join(subpath, f))
816
814
817
815
818 def anypats(pats, opts):
816 def anypats(pats, opts):
819 '''Checks if any patterns, including --include and --exclude were given.
817 '''Checks if any patterns, including --include and --exclude were given.
820
818
821 Some commands (e.g. addremove) use this condition for deciding whether to
819 Some commands (e.g. addremove) use this condition for deciding whether to
822 print absolute or relative paths.
820 print absolute or relative paths.
823 '''
821 '''
824 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
822 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
825
823
826
824
827 def expandpats(pats):
825 def expandpats(pats):
828 '''Expand bare globs when running on windows.
826 '''Expand bare globs when running on windows.
829 On posix we assume it already has already been done by sh.'''
827 On posix we assume it already has already been done by sh.'''
830 if not util.expandglobs:
828 if not util.expandglobs:
831 return list(pats)
829 return list(pats)
832 ret = []
830 ret = []
833 for kindpat in pats:
831 for kindpat in pats:
834 kind, pat = matchmod._patsplit(kindpat, None)
832 kind, pat = matchmod._patsplit(kindpat, None)
835 if kind is None:
833 if kind is None:
836 try:
834 try:
837 globbed = glob.glob(pat)
835 globbed = glob.glob(pat)
838 except re.error:
836 except re.error:
839 globbed = [pat]
837 globbed = [pat]
840 if globbed:
838 if globbed:
841 ret.extend(globbed)
839 ret.extend(globbed)
842 continue
840 continue
843 ret.append(kindpat)
841 ret.append(kindpat)
844 return ret
842 return ret
845
843
846
844
847 def matchandpats(
845 def matchandpats(
848 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
846 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
849 ):
847 ):
850 '''Return a matcher and the patterns that were used.
848 '''Return a matcher and the patterns that were used.
851 The matcher will warn about bad matches, unless an alternate badfn callback
849 The matcher will warn about bad matches, unless an alternate badfn callback
852 is provided.'''
850 is provided.'''
853 if opts is None:
851 if opts is None:
854 opts = {}
852 opts = {}
855 if not globbed and default == b'relpath':
853 if not globbed and default == b'relpath':
856 pats = expandpats(pats or [])
854 pats = expandpats(pats or [])
857
855
858 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
856 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
859
857
860 def bad(f, msg):
858 def bad(f, msg):
861 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
859 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
862
860
863 if badfn is None:
861 if badfn is None:
864 badfn = bad
862 badfn = bad
865
863
866 m = ctx.match(
864 m = ctx.match(
867 pats,
865 pats,
868 opts.get(b'include'),
866 opts.get(b'include'),
869 opts.get(b'exclude'),
867 opts.get(b'exclude'),
870 default,
868 default,
871 listsubrepos=opts.get(b'subrepos'),
869 listsubrepos=opts.get(b'subrepos'),
872 badfn=badfn,
870 badfn=badfn,
873 )
871 )
874
872
875 if m.always():
873 if m.always():
876 pats = []
874 pats = []
877 return m, pats
875 return m, pats
878
876
879
877
880 def match(
878 def match(
881 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
879 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
882 ):
880 ):
883 '''Return a matcher that will warn about bad matches.'''
881 '''Return a matcher that will warn about bad matches.'''
884 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
882 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
885
883
886
884
887 def matchall(repo):
885 def matchall(repo):
888 '''Return a matcher that will efficiently match everything.'''
886 '''Return a matcher that will efficiently match everything.'''
889 return matchmod.always()
887 return matchmod.always()
890
888
891
889
892 def matchfiles(repo, files, badfn=None):
890 def matchfiles(repo, files, badfn=None):
893 '''Return a matcher that will efficiently match exactly these files.'''
891 '''Return a matcher that will efficiently match exactly these files.'''
894 return matchmod.exact(files, badfn=badfn)
892 return matchmod.exact(files, badfn=badfn)
895
893
896
894
897 def parsefollowlinespattern(repo, rev, pat, msg):
895 def parsefollowlinespattern(repo, rev, pat, msg):
898 """Return a file name from `pat` pattern suitable for usage in followlines
896 """Return a file name from `pat` pattern suitable for usage in followlines
899 logic.
897 logic.
900 """
898 """
901 if not matchmod.patkind(pat):
899 if not matchmod.patkind(pat):
902 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
900 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
903 else:
901 else:
904 ctx = repo[rev]
902 ctx = repo[rev]
905 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
903 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
906 files = [f for f in ctx if m(f)]
904 files = [f for f in ctx if m(f)]
907 if len(files) != 1:
905 if len(files) != 1:
908 raise error.ParseError(msg)
906 raise error.ParseError(msg)
909 return files[0]
907 return files[0]
910
908
911
909
912 def getorigvfs(ui, repo):
910 def getorigvfs(ui, repo):
913 """return a vfs suitable to save 'orig' file
911 """return a vfs suitable to save 'orig' file
914
912
915 return None if no special directory is configured"""
913 return None if no special directory is configured"""
916 origbackuppath = ui.config(b'ui', b'origbackuppath')
914 origbackuppath = ui.config(b'ui', b'origbackuppath')
917 if not origbackuppath:
915 if not origbackuppath:
918 return None
916 return None
919 return vfs.vfs(repo.wvfs.join(origbackuppath))
917 return vfs.vfs(repo.wvfs.join(origbackuppath))
920
918
921
919
922 def backuppath(ui, repo, filepath):
920 def backuppath(ui, repo, filepath):
923 '''customize where working copy backup files (.orig files) are created
921 '''customize where working copy backup files (.orig files) are created
924
922
925 Fetch user defined path from config file: [ui] origbackuppath = <path>
923 Fetch user defined path from config file: [ui] origbackuppath = <path>
926 Fall back to default (filepath with .orig suffix) if not specified
924 Fall back to default (filepath with .orig suffix) if not specified
927
925
928 filepath is repo-relative
926 filepath is repo-relative
929
927
930 Returns an absolute path
928 Returns an absolute path
931 '''
929 '''
932 origvfs = getorigvfs(ui, repo)
930 origvfs = getorigvfs(ui, repo)
933 if origvfs is None:
931 if origvfs is None:
934 return repo.wjoin(filepath + b".orig")
932 return repo.wjoin(filepath + b".orig")
935
933
936 origbackupdir = origvfs.dirname(filepath)
934 origbackupdir = origvfs.dirname(filepath)
937 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
935 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
938 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
936 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
939
937
940 # Remove any files that conflict with the backup file's path
938 # Remove any files that conflict with the backup file's path
941 for f in reversed(list(pathutil.finddirs(filepath))):
939 for f in reversed(list(pathutil.finddirs(filepath))):
942 if origvfs.isfileorlink(f):
940 if origvfs.isfileorlink(f):
943 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
941 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
944 origvfs.unlink(f)
942 origvfs.unlink(f)
945 break
943 break
946
944
947 origvfs.makedirs(origbackupdir)
945 origvfs.makedirs(origbackupdir)
948
946
949 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
947 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
950 ui.note(
948 ui.note(
951 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
949 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
952 )
950 )
953 origvfs.rmtree(filepath, forcibly=True)
951 origvfs.rmtree(filepath, forcibly=True)
954
952
955 return origvfs.join(filepath)
953 return origvfs.join(filepath)
956
954
957
955
958 class _containsnode(object):
956 class _containsnode(object):
959 """proxy __contains__(node) to container.__contains__ which accepts revs"""
957 """proxy __contains__(node) to container.__contains__ which accepts revs"""
960
958
961 def __init__(self, repo, revcontainer):
959 def __init__(self, repo, revcontainer):
962 self._torev = repo.changelog.rev
960 self._torev = repo.changelog.rev
963 self._revcontains = revcontainer.__contains__
961 self._revcontains = revcontainer.__contains__
964
962
965 def __contains__(self, node):
963 def __contains__(self, node):
966 return self._revcontains(self._torev(node))
964 return self._revcontains(self._torev(node))
967
965
968
966
969 def cleanupnodes(
967 def cleanupnodes(
970 repo,
968 repo,
971 replacements,
969 replacements,
972 operation,
970 operation,
973 moves=None,
971 moves=None,
974 metadata=None,
972 metadata=None,
975 fixphase=False,
973 fixphase=False,
976 targetphase=None,
974 targetphase=None,
977 backup=True,
975 backup=True,
978 ):
976 ):
979 """do common cleanups when old nodes are replaced by new nodes
977 """do common cleanups when old nodes are replaced by new nodes
980
978
981 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
979 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
982 (we might also want to move working directory parent in the future)
980 (we might also want to move working directory parent in the future)
983
981
984 By default, bookmark moves are calculated automatically from 'replacements',
982 By default, bookmark moves are calculated automatically from 'replacements',
985 but 'moves' can be used to override that. Also, 'moves' may include
983 but 'moves' can be used to override that. Also, 'moves' may include
986 additional bookmark moves that should not have associated obsmarkers.
984 additional bookmark moves that should not have associated obsmarkers.
987
985
988 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
986 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
989 have replacements. operation is a string, like "rebase".
987 have replacements. operation is a string, like "rebase".
990
988
991 metadata is dictionary containing metadata to be stored in obsmarker if
989 metadata is dictionary containing metadata to be stored in obsmarker if
992 obsolescence is enabled.
990 obsolescence is enabled.
993 """
991 """
994 assert fixphase or targetphase is None
992 assert fixphase or targetphase is None
995 if not replacements and not moves:
993 if not replacements and not moves:
996 return
994 return
997
995
998 # translate mapping's other forms
996 # translate mapping's other forms
999 if not util.safehasattr(replacements, b'items'):
997 if not util.safehasattr(replacements, b'items'):
1000 replacements = {(n,): () for n in replacements}
998 replacements = {(n,): () for n in replacements}
1001 else:
999 else:
1002 # upgrading non tuple "source" to tuple ones for BC
1000 # upgrading non tuple "source" to tuple ones for BC
1003 repls = {}
1001 repls = {}
1004 for key, value in replacements.items():
1002 for key, value in replacements.items():
1005 if not isinstance(key, tuple):
1003 if not isinstance(key, tuple):
1006 key = (key,)
1004 key = (key,)
1007 repls[key] = value
1005 repls[key] = value
1008 replacements = repls
1006 replacements = repls
1009
1007
1010 # Unfiltered repo is needed since nodes in replacements might be hidden.
1008 # Unfiltered repo is needed since nodes in replacements might be hidden.
1011 unfi = repo.unfiltered()
1009 unfi = repo.unfiltered()
1012
1010
1013 # Calculate bookmark movements
1011 # Calculate bookmark movements
1014 if moves is None:
1012 if moves is None:
1015 moves = {}
1013 moves = {}
1016 for oldnodes, newnodes in replacements.items():
1014 for oldnodes, newnodes in replacements.items():
1017 for oldnode in oldnodes:
1015 for oldnode in oldnodes:
1018 if oldnode in moves:
1016 if oldnode in moves:
1019 continue
1017 continue
1020 if len(newnodes) > 1:
1018 if len(newnodes) > 1:
1021 # usually a split, take the one with biggest rev number
1019 # usually a split, take the one with biggest rev number
1022 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1020 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1023 elif len(newnodes) == 0:
1021 elif len(newnodes) == 0:
1024 # move bookmark backwards
1022 # move bookmark backwards
1025 allreplaced = []
1023 allreplaced = []
1026 for rep in replacements:
1024 for rep in replacements:
1027 allreplaced.extend(rep)
1025 allreplaced.extend(rep)
1028 roots = list(
1026 roots = list(
1029 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1027 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1030 )
1028 )
1031 if roots:
1029 if roots:
1032 newnode = roots[0].node()
1030 newnode = roots[0].node()
1033 else:
1031 else:
1034 newnode = nullid
1032 newnode = nullid
1035 else:
1033 else:
1036 newnode = newnodes[0]
1034 newnode = newnodes[0]
1037 moves[oldnode] = newnode
1035 moves[oldnode] = newnode
1038
1036
1039 allnewnodes = [n for ns in replacements.values() for n in ns]
1037 allnewnodes = [n for ns in replacements.values() for n in ns]
1040 toretract = {}
1038 toretract = {}
1041 toadvance = {}
1039 toadvance = {}
1042 if fixphase:
1040 if fixphase:
1043 precursors = {}
1041 precursors = {}
1044 for oldnodes, newnodes in replacements.items():
1042 for oldnodes, newnodes in replacements.items():
1045 for oldnode in oldnodes:
1043 for oldnode in oldnodes:
1046 for newnode in newnodes:
1044 for newnode in newnodes:
1047 precursors.setdefault(newnode, []).append(oldnode)
1045 precursors.setdefault(newnode, []).append(oldnode)
1048
1046
1049 allnewnodes.sort(key=lambda n: unfi[n].rev())
1047 allnewnodes.sort(key=lambda n: unfi[n].rev())
1050 newphases = {}
1048 newphases = {}
1051
1049
1052 def phase(ctx):
1050 def phase(ctx):
1053 return newphases.get(ctx.node(), ctx.phase())
1051 return newphases.get(ctx.node(), ctx.phase())
1054
1052
1055 for newnode in allnewnodes:
1053 for newnode in allnewnodes:
1056 ctx = unfi[newnode]
1054 ctx = unfi[newnode]
1057 parentphase = max(phase(p) for p in ctx.parents())
1055 parentphase = max(phase(p) for p in ctx.parents())
1058 if targetphase is None:
1056 if targetphase is None:
1059 oldphase = max(
1057 oldphase = max(
1060 unfi[oldnode].phase() for oldnode in precursors[newnode]
1058 unfi[oldnode].phase() for oldnode in precursors[newnode]
1061 )
1059 )
1062 newphase = max(oldphase, parentphase)
1060 newphase = max(oldphase, parentphase)
1063 else:
1061 else:
1064 newphase = max(targetphase, parentphase)
1062 newphase = max(targetphase, parentphase)
1065 newphases[newnode] = newphase
1063 newphases[newnode] = newphase
1066 if newphase > ctx.phase():
1064 if newphase > ctx.phase():
1067 toretract.setdefault(newphase, []).append(newnode)
1065 toretract.setdefault(newphase, []).append(newnode)
1068 elif newphase < ctx.phase():
1066 elif newphase < ctx.phase():
1069 toadvance.setdefault(newphase, []).append(newnode)
1067 toadvance.setdefault(newphase, []).append(newnode)
1070
1068
1071 with repo.transaction(b'cleanup') as tr:
1069 with repo.transaction(b'cleanup') as tr:
1072 # Move bookmarks
1070 # Move bookmarks
1073 bmarks = repo._bookmarks
1071 bmarks = repo._bookmarks
1074 bmarkchanges = []
1072 bmarkchanges = []
1075 for oldnode, newnode in moves.items():
1073 for oldnode, newnode in moves.items():
1076 oldbmarks = repo.nodebookmarks(oldnode)
1074 oldbmarks = repo.nodebookmarks(oldnode)
1077 if not oldbmarks:
1075 if not oldbmarks:
1078 continue
1076 continue
1079 from . import bookmarks # avoid import cycle
1077 from . import bookmarks # avoid import cycle
1080
1078
1081 repo.ui.debug(
1079 repo.ui.debug(
1082 b'moving bookmarks %r from %s to %s\n'
1080 b'moving bookmarks %r from %s to %s\n'
1083 % (
1081 % (
1084 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1082 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1085 hex(oldnode),
1083 hex(oldnode),
1086 hex(newnode),
1084 hex(newnode),
1087 )
1085 )
1088 )
1086 )
1089 # Delete divergent bookmarks being parents of related newnodes
1087 # Delete divergent bookmarks being parents of related newnodes
1090 deleterevs = repo.revs(
1088 deleterevs = repo.revs(
1091 b'parents(roots(%ln & (::%n))) - parents(%n)',
1089 b'parents(roots(%ln & (::%n))) - parents(%n)',
1092 allnewnodes,
1090 allnewnodes,
1093 newnode,
1091 newnode,
1094 oldnode,
1092 oldnode,
1095 )
1093 )
1096 deletenodes = _containsnode(repo, deleterevs)
1094 deletenodes = _containsnode(repo, deleterevs)
1097 for name in oldbmarks:
1095 for name in oldbmarks:
1098 bmarkchanges.append((name, newnode))
1096 bmarkchanges.append((name, newnode))
1099 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1097 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1100 bmarkchanges.append((b, None))
1098 bmarkchanges.append((b, None))
1101
1099
1102 if bmarkchanges:
1100 if bmarkchanges:
1103 bmarks.applychanges(repo, tr, bmarkchanges)
1101 bmarks.applychanges(repo, tr, bmarkchanges)
1104
1102
1105 for phase, nodes in toretract.items():
1103 for phase, nodes in toretract.items():
1106 phases.retractboundary(repo, tr, phase, nodes)
1104 phases.retractboundary(repo, tr, phase, nodes)
1107 for phase, nodes in toadvance.items():
1105 for phase, nodes in toadvance.items():
1108 phases.advanceboundary(repo, tr, phase, nodes)
1106 phases.advanceboundary(repo, tr, phase, nodes)
1109
1107
1110 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1108 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1111 # Obsolete or strip nodes
1109 # Obsolete or strip nodes
1112 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1110 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1113 # If a node is already obsoleted, and we want to obsolete it
1111 # If a node is already obsoleted, and we want to obsolete it
1114 # without a successor, skip that obssolete request since it's
1112 # without a successor, skip that obssolete request since it's
1115 # unnecessary. That's the "if s or not isobs(n)" check below.
1113 # unnecessary. That's the "if s or not isobs(n)" check below.
1116 # Also sort the node in topology order, that might be useful for
1114 # Also sort the node in topology order, that might be useful for
1117 # some obsstore logic.
1115 # some obsstore logic.
1118 # NOTE: the sorting might belong to createmarkers.
1116 # NOTE: the sorting might belong to createmarkers.
1119 torev = unfi.changelog.rev
1117 torev = unfi.changelog.rev
1120 sortfunc = lambda ns: torev(ns[0][0])
1118 sortfunc = lambda ns: torev(ns[0][0])
1121 rels = []
1119 rels = []
1122 for ns, s in sorted(replacements.items(), key=sortfunc):
1120 for ns, s in sorted(replacements.items(), key=sortfunc):
1123 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1121 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1124 rels.append(rel)
1122 rels.append(rel)
1125 if rels:
1123 if rels:
1126 obsolete.createmarkers(
1124 obsolete.createmarkers(
1127 repo, rels, operation=operation, metadata=metadata
1125 repo, rels, operation=operation, metadata=metadata
1128 )
1126 )
1129 elif phases.supportinternal(repo) and mayusearchived:
1127 elif phases.supportinternal(repo) and mayusearchived:
1130 # this assume we do not have "unstable" nodes above the cleaned ones
1128 # this assume we do not have "unstable" nodes above the cleaned ones
1131 allreplaced = set()
1129 allreplaced = set()
1132 for ns in replacements.keys():
1130 for ns in replacements.keys():
1133 allreplaced.update(ns)
1131 allreplaced.update(ns)
1134 if backup:
1132 if backup:
1135 from . import repair # avoid import cycle
1133 from . import repair # avoid import cycle
1136
1134
1137 node = min(allreplaced, key=repo.changelog.rev)
1135 node = min(allreplaced, key=repo.changelog.rev)
1138 repair.backupbundle(
1136 repair.backupbundle(
1139 repo, allreplaced, allreplaced, node, operation
1137 repo, allreplaced, allreplaced, node, operation
1140 )
1138 )
1141 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1139 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1142 else:
1140 else:
1143 from . import repair # avoid import cycle
1141 from . import repair # avoid import cycle
1144
1142
1145 tostrip = list(n for ns in replacements for n in ns)
1143 tostrip = list(n for ns in replacements for n in ns)
1146 if tostrip:
1144 if tostrip:
1147 repair.delayedstrip(
1145 repair.delayedstrip(
1148 repo.ui, repo, tostrip, operation, backup=backup
1146 repo.ui, repo, tostrip, operation, backup=backup
1149 )
1147 )
1150
1148
1151
1149
1152 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1150 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1153 if opts is None:
1151 if opts is None:
1154 opts = {}
1152 opts = {}
1155 m = matcher
1153 m = matcher
1156 dry_run = opts.get(b'dry_run')
1154 dry_run = opts.get(b'dry_run')
1157 try:
1155 try:
1158 similarity = float(opts.get(b'similarity') or 0)
1156 similarity = float(opts.get(b'similarity') or 0)
1159 except ValueError:
1157 except ValueError:
1160 raise error.Abort(_(b'similarity must be a number'))
1158 raise error.Abort(_(b'similarity must be a number'))
1161 if similarity < 0 or similarity > 100:
1159 if similarity < 0 or similarity > 100:
1162 raise error.Abort(_(b'similarity must be between 0 and 100'))
1160 raise error.Abort(_(b'similarity must be between 0 and 100'))
1163 similarity /= 100.0
1161 similarity /= 100.0
1164
1162
1165 ret = 0
1163 ret = 0
1166
1164
1167 wctx = repo[None]
1165 wctx = repo[None]
1168 for subpath in sorted(wctx.substate):
1166 for subpath in sorted(wctx.substate):
1169 submatch = matchmod.subdirmatcher(subpath, m)
1167 submatch = matchmod.subdirmatcher(subpath, m)
1170 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1168 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1171 sub = wctx.sub(subpath)
1169 sub = wctx.sub(subpath)
1172 subprefix = repo.wvfs.reljoin(prefix, subpath)
1170 subprefix = repo.wvfs.reljoin(prefix, subpath)
1173 subuipathfn = subdiruipathfn(subpath, uipathfn)
1171 subuipathfn = subdiruipathfn(subpath, uipathfn)
1174 try:
1172 try:
1175 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1173 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1176 ret = 1
1174 ret = 1
1177 except error.LookupError:
1175 except error.LookupError:
1178 repo.ui.status(
1176 repo.ui.status(
1179 _(b"skipping missing subrepository: %s\n")
1177 _(b"skipping missing subrepository: %s\n")
1180 % uipathfn(subpath)
1178 % uipathfn(subpath)
1181 )
1179 )
1182
1180
1183 rejected = []
1181 rejected = []
1184
1182
1185 def badfn(f, msg):
1183 def badfn(f, msg):
1186 if f in m.files():
1184 if f in m.files():
1187 m.bad(f, msg)
1185 m.bad(f, msg)
1188 rejected.append(f)
1186 rejected.append(f)
1189
1187
1190 badmatch = matchmod.badmatch(m, badfn)
1188 badmatch = matchmod.badmatch(m, badfn)
1191 added, unknown, deleted, removed, forgotten = _interestingfiles(
1189 added, unknown, deleted, removed, forgotten = _interestingfiles(
1192 repo, badmatch
1190 repo, badmatch
1193 )
1191 )
1194
1192
1195 unknownset = set(unknown + forgotten)
1193 unknownset = set(unknown + forgotten)
1196 toprint = unknownset.copy()
1194 toprint = unknownset.copy()
1197 toprint.update(deleted)
1195 toprint.update(deleted)
1198 for abs in sorted(toprint):
1196 for abs in sorted(toprint):
1199 if repo.ui.verbose or not m.exact(abs):
1197 if repo.ui.verbose or not m.exact(abs):
1200 if abs in unknownset:
1198 if abs in unknownset:
1201 status = _(b'adding %s\n') % uipathfn(abs)
1199 status = _(b'adding %s\n') % uipathfn(abs)
1202 label = b'ui.addremove.added'
1200 label = b'ui.addremove.added'
1203 else:
1201 else:
1204 status = _(b'removing %s\n') % uipathfn(abs)
1202 status = _(b'removing %s\n') % uipathfn(abs)
1205 label = b'ui.addremove.removed'
1203 label = b'ui.addremove.removed'
1206 repo.ui.status(status, label=label)
1204 repo.ui.status(status, label=label)
1207
1205
1208 renames = _findrenames(
1206 renames = _findrenames(
1209 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1207 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1210 )
1208 )
1211
1209
1212 if not dry_run:
1210 if not dry_run:
1213 _markchanges(repo, unknown + forgotten, deleted, renames)
1211 _markchanges(repo, unknown + forgotten, deleted, renames)
1214
1212
1215 for f in rejected:
1213 for f in rejected:
1216 if f in m.files():
1214 if f in m.files():
1217 return 1
1215 return 1
1218 return ret
1216 return ret
1219
1217
1220
1218
1221 def marktouched(repo, files, similarity=0.0):
1219 def marktouched(repo, files, similarity=0.0):
1222 '''Assert that files have somehow been operated upon. files are relative to
1220 '''Assert that files have somehow been operated upon. files are relative to
1223 the repo root.'''
1221 the repo root.'''
1224 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1222 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1225 rejected = []
1223 rejected = []
1226
1224
1227 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1225 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1228
1226
1229 if repo.ui.verbose:
1227 if repo.ui.verbose:
1230 unknownset = set(unknown + forgotten)
1228 unknownset = set(unknown + forgotten)
1231 toprint = unknownset.copy()
1229 toprint = unknownset.copy()
1232 toprint.update(deleted)
1230 toprint.update(deleted)
1233 for abs in sorted(toprint):
1231 for abs in sorted(toprint):
1234 if abs in unknownset:
1232 if abs in unknownset:
1235 status = _(b'adding %s\n') % abs
1233 status = _(b'adding %s\n') % abs
1236 else:
1234 else:
1237 status = _(b'removing %s\n') % abs
1235 status = _(b'removing %s\n') % abs
1238 repo.ui.status(status)
1236 repo.ui.status(status)
1239
1237
1240 # TODO: We should probably have the caller pass in uipathfn and apply it to
1238 # TODO: We should probably have the caller pass in uipathfn and apply it to
1241 # the messages above too. legacyrelativevalue=True is consistent with how
1239 # the messages above too. legacyrelativevalue=True is consistent with how
1242 # it used to work.
1240 # it used to work.
1243 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1241 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1244 renames = _findrenames(
1242 renames = _findrenames(
1245 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1243 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1246 )
1244 )
1247
1245
1248 _markchanges(repo, unknown + forgotten, deleted, renames)
1246 _markchanges(repo, unknown + forgotten, deleted, renames)
1249
1247
1250 for f in rejected:
1248 for f in rejected:
1251 if f in m.files():
1249 if f in m.files():
1252 return 1
1250 return 1
1253 return 0
1251 return 0
1254
1252
1255
1253
1256 def _interestingfiles(repo, matcher):
1254 def _interestingfiles(repo, matcher):
1257 '''Walk dirstate with matcher, looking for files that addremove would care
1255 '''Walk dirstate with matcher, looking for files that addremove would care
1258 about.
1256 about.
1259
1257
1260 This is different from dirstate.status because it doesn't care about
1258 This is different from dirstate.status because it doesn't care about
1261 whether files are modified or clean.'''
1259 whether files are modified or clean.'''
1262 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1260 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1263 audit_path = pathutil.pathauditor(repo.root, cached=True)
1261 audit_path = pathutil.pathauditor(repo.root, cached=True)
1264
1262
1265 ctx = repo[None]
1263 ctx = repo[None]
1266 dirstate = repo.dirstate
1264 dirstate = repo.dirstate
1267 matcher = repo.narrowmatch(matcher, includeexact=True)
1265 matcher = repo.narrowmatch(matcher, includeexact=True)
1268 walkresults = dirstate.walk(
1266 walkresults = dirstate.walk(
1269 matcher,
1267 matcher,
1270 subrepos=sorted(ctx.substate),
1268 subrepos=sorted(ctx.substate),
1271 unknown=True,
1269 unknown=True,
1272 ignored=False,
1270 ignored=False,
1273 full=False,
1271 full=False,
1274 )
1272 )
1275 for abs, st in pycompat.iteritems(walkresults):
1273 for abs, st in pycompat.iteritems(walkresults):
1276 dstate = dirstate[abs]
1274 dstate = dirstate[abs]
1277 if dstate == b'?' and audit_path.check(abs):
1275 if dstate == b'?' and audit_path.check(abs):
1278 unknown.append(abs)
1276 unknown.append(abs)
1279 elif dstate != b'r' and not st:
1277 elif dstate != b'r' and not st:
1280 deleted.append(abs)
1278 deleted.append(abs)
1281 elif dstate == b'r' and st:
1279 elif dstate == b'r' and st:
1282 forgotten.append(abs)
1280 forgotten.append(abs)
1283 # for finding renames
1281 # for finding renames
1284 elif dstate == b'r' and not st:
1282 elif dstate == b'r' and not st:
1285 removed.append(abs)
1283 removed.append(abs)
1286 elif dstate == b'a':
1284 elif dstate == b'a':
1287 added.append(abs)
1285 added.append(abs)
1288
1286
1289 return added, unknown, deleted, removed, forgotten
1287 return added, unknown, deleted, removed, forgotten
1290
1288
1291
1289
1292 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1290 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1293 '''Find renames from removed files to added ones.'''
1291 '''Find renames from removed files to added ones.'''
1294 renames = {}
1292 renames = {}
1295 if similarity > 0:
1293 if similarity > 0:
1296 for old, new, score in similar.findrenames(
1294 for old, new, score in similar.findrenames(
1297 repo, added, removed, similarity
1295 repo, added, removed, similarity
1298 ):
1296 ):
1299 if (
1297 if (
1300 repo.ui.verbose
1298 repo.ui.verbose
1301 or not matcher.exact(old)
1299 or not matcher.exact(old)
1302 or not matcher.exact(new)
1300 or not matcher.exact(new)
1303 ):
1301 ):
1304 repo.ui.status(
1302 repo.ui.status(
1305 _(
1303 _(
1306 b'recording removal of %s as rename to %s '
1304 b'recording removal of %s as rename to %s '
1307 b'(%d%% similar)\n'
1305 b'(%d%% similar)\n'
1308 )
1306 )
1309 % (uipathfn(old), uipathfn(new), score * 100)
1307 % (uipathfn(old), uipathfn(new), score * 100)
1310 )
1308 )
1311 renames[new] = old
1309 renames[new] = old
1312 return renames
1310 return renames
1313
1311
1314
1312
1315 def _markchanges(repo, unknown, deleted, renames):
1313 def _markchanges(repo, unknown, deleted, renames):
1316 '''Marks the files in unknown as added, the files in deleted as removed,
1314 '''Marks the files in unknown as added, the files in deleted as removed,
1317 and the files in renames as copied.'''
1315 and the files in renames as copied.'''
1318 wctx = repo[None]
1316 wctx = repo[None]
1319 with repo.wlock():
1317 with repo.wlock():
1320 wctx.forget(deleted)
1318 wctx.forget(deleted)
1321 wctx.add(unknown)
1319 wctx.add(unknown)
1322 for new, old in pycompat.iteritems(renames):
1320 for new, old in pycompat.iteritems(renames):
1323 wctx.copy(old, new)
1321 wctx.copy(old, new)
1324
1322
1325
1323
1326 def getrenamedfn(repo, endrev=None):
1324 def getrenamedfn(repo, endrev=None):
1327 if copiesmod.usechangesetcentricalgo(repo):
1325 if copiesmod.usechangesetcentricalgo(repo):
1328
1326
1329 def getrenamed(fn, rev):
1327 def getrenamed(fn, rev):
1330 ctx = repo[rev]
1328 ctx = repo[rev]
1331 p1copies = ctx.p1copies()
1329 p1copies = ctx.p1copies()
1332 if fn in p1copies:
1330 if fn in p1copies:
1333 return p1copies[fn]
1331 return p1copies[fn]
1334 p2copies = ctx.p2copies()
1332 p2copies = ctx.p2copies()
1335 if fn in p2copies:
1333 if fn in p2copies:
1336 return p2copies[fn]
1334 return p2copies[fn]
1337 return None
1335 return None
1338
1336
1339 return getrenamed
1337 return getrenamed
1340
1338
1341 rcache = {}
1339 rcache = {}
1342 if endrev is None:
1340 if endrev is None:
1343 endrev = len(repo)
1341 endrev = len(repo)
1344
1342
1345 def getrenamed(fn, rev):
1343 def getrenamed(fn, rev):
1346 '''looks up all renames for a file (up to endrev) the first
1344 '''looks up all renames for a file (up to endrev) the first
1347 time the file is given. It indexes on the changerev and only
1345 time the file is given. It indexes on the changerev and only
1348 parses the manifest if linkrev != changerev.
1346 parses the manifest if linkrev != changerev.
1349 Returns rename info for fn at changerev rev.'''
1347 Returns rename info for fn at changerev rev.'''
1350 if fn not in rcache:
1348 if fn not in rcache:
1351 rcache[fn] = {}
1349 rcache[fn] = {}
1352 fl = repo.file(fn)
1350 fl = repo.file(fn)
1353 for i in fl:
1351 for i in fl:
1354 lr = fl.linkrev(i)
1352 lr = fl.linkrev(i)
1355 renamed = fl.renamed(fl.node(i))
1353 renamed = fl.renamed(fl.node(i))
1356 rcache[fn][lr] = renamed and renamed[0]
1354 rcache[fn][lr] = renamed and renamed[0]
1357 if lr >= endrev:
1355 if lr >= endrev:
1358 break
1356 break
1359 if rev in rcache[fn]:
1357 if rev in rcache[fn]:
1360 return rcache[fn][rev]
1358 return rcache[fn][rev]
1361
1359
1362 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1360 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1363 # filectx logic.
1361 # filectx logic.
1364 try:
1362 try:
1365 return repo[rev][fn].copysource()
1363 return repo[rev][fn].copysource()
1366 except error.LookupError:
1364 except error.LookupError:
1367 return None
1365 return None
1368
1366
1369 return getrenamed
1367 return getrenamed
1370
1368
1371
1369
1372 def getcopiesfn(repo, endrev=None):
1370 def getcopiesfn(repo, endrev=None):
1373 if copiesmod.usechangesetcentricalgo(repo):
1371 if copiesmod.usechangesetcentricalgo(repo):
1374
1372
1375 def copiesfn(ctx):
1373 def copiesfn(ctx):
1376 if ctx.p2copies():
1374 if ctx.p2copies():
1377 allcopies = ctx.p1copies().copy()
1375 allcopies = ctx.p1copies().copy()
1378 # There should be no overlap
1376 # There should be no overlap
1379 allcopies.update(ctx.p2copies())
1377 allcopies.update(ctx.p2copies())
1380 return sorted(allcopies.items())
1378 return sorted(allcopies.items())
1381 else:
1379 else:
1382 return sorted(ctx.p1copies().items())
1380 return sorted(ctx.p1copies().items())
1383
1381
1384 else:
1382 else:
1385 getrenamed = getrenamedfn(repo, endrev)
1383 getrenamed = getrenamedfn(repo, endrev)
1386
1384
1387 def copiesfn(ctx):
1385 def copiesfn(ctx):
1388 copies = []
1386 copies = []
1389 for fn in ctx.files():
1387 for fn in ctx.files():
1390 rename = getrenamed(fn, ctx.rev())
1388 rename = getrenamed(fn, ctx.rev())
1391 if rename:
1389 if rename:
1392 copies.append((fn, rename))
1390 copies.append((fn, rename))
1393 return copies
1391 return copies
1394
1392
1395 return copiesfn
1393 return copiesfn
1396
1394
1397
1395
1398 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1396 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1399 """Update the dirstate to reflect the intent of copying src to dst. For
1397 """Update the dirstate to reflect the intent of copying src to dst. For
1400 different reasons it might not end with dst being marked as copied from src.
1398 different reasons it might not end with dst being marked as copied from src.
1401 """
1399 """
1402 origsrc = repo.dirstate.copied(src) or src
1400 origsrc = repo.dirstate.copied(src) or src
1403 if dst == origsrc: # copying back a copy?
1401 if dst == origsrc: # copying back a copy?
1404 if repo.dirstate[dst] not in b'mn' and not dryrun:
1402 if repo.dirstate[dst] not in b'mn' and not dryrun:
1405 repo.dirstate.normallookup(dst)
1403 repo.dirstate.normallookup(dst)
1406 else:
1404 else:
1407 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1405 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1408 if not ui.quiet:
1406 if not ui.quiet:
1409 ui.warn(
1407 ui.warn(
1410 _(
1408 _(
1411 b"%s has not been committed yet, so no copy "
1409 b"%s has not been committed yet, so no copy "
1412 b"data will be stored for %s.\n"
1410 b"data will be stored for %s.\n"
1413 )
1411 )
1414 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1412 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1415 )
1413 )
1416 if repo.dirstate[dst] in b'?r' and not dryrun:
1414 if repo.dirstate[dst] in b'?r' and not dryrun:
1417 wctx.add([dst])
1415 wctx.add([dst])
1418 elif not dryrun:
1416 elif not dryrun:
1419 wctx.copy(origsrc, dst)
1417 wctx.copy(origsrc, dst)
1420
1418
1421
1419
1422 def movedirstate(repo, newctx, match=None):
1420 def movedirstate(repo, newctx, match=None):
1423 """Move the dirstate to newctx and adjust it as necessary.
1421 """Move the dirstate to newctx and adjust it as necessary.
1424
1422
1425 A matcher can be provided as an optimization. It is probably a bug to pass
1423 A matcher can be provided as an optimization. It is probably a bug to pass
1426 a matcher that doesn't match all the differences between the parent of the
1424 a matcher that doesn't match all the differences between the parent of the
1427 working copy and newctx.
1425 working copy and newctx.
1428 """
1426 """
1429 oldctx = repo[b'.']
1427 oldctx = repo[b'.']
1430 ds = repo.dirstate
1428 ds = repo.dirstate
1431 ds.setparents(newctx.node(), nullid)
1429 ds.setparents(newctx.node(), nullid)
1432 copies = dict(ds.copies())
1430 copies = dict(ds.copies())
1433 s = newctx.status(oldctx, match=match)
1431 s = newctx.status(oldctx, match=match)
1434 for f in s.modified:
1432 for f in s.modified:
1435 if ds[f] == b'r':
1433 if ds[f] == b'r':
1436 # modified + removed -> removed
1434 # modified + removed -> removed
1437 continue
1435 continue
1438 ds.normallookup(f)
1436 ds.normallookup(f)
1439
1437
1440 for f in s.added:
1438 for f in s.added:
1441 if ds[f] == b'r':
1439 if ds[f] == b'r':
1442 # added + removed -> unknown
1440 # added + removed -> unknown
1443 ds.drop(f)
1441 ds.drop(f)
1444 elif ds[f] != b'a':
1442 elif ds[f] != b'a':
1445 ds.add(f)
1443 ds.add(f)
1446
1444
1447 for f in s.removed:
1445 for f in s.removed:
1448 if ds[f] == b'a':
1446 if ds[f] == b'a':
1449 # removed + added -> normal
1447 # removed + added -> normal
1450 ds.normallookup(f)
1448 ds.normallookup(f)
1451 elif ds[f] != b'r':
1449 elif ds[f] != b'r':
1452 ds.remove(f)
1450 ds.remove(f)
1453
1451
1454 # Merge old parent and old working dir copies
1452 # Merge old parent and old working dir copies
1455 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1453 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1456 oldcopies.update(copies)
1454 oldcopies.update(copies)
1457 copies = dict(
1455 copies = dict(
1458 (dst, oldcopies.get(src, src))
1456 (dst, oldcopies.get(src, src))
1459 for dst, src in pycompat.iteritems(oldcopies)
1457 for dst, src in pycompat.iteritems(oldcopies)
1460 )
1458 )
1461 # Adjust the dirstate copies
1459 # Adjust the dirstate copies
1462 for dst, src in pycompat.iteritems(copies):
1460 for dst, src in pycompat.iteritems(copies):
1463 if src not in newctx or dst in newctx or ds[dst] != b'a':
1461 if src not in newctx or dst in newctx or ds[dst] != b'a':
1464 src = None
1462 src = None
1465 ds.copy(src, dst)
1463 ds.copy(src, dst)
1466
1464
1467
1465
1468 def writerequires(opener, requirements):
1466 def writerequires(opener, requirements):
1469 with opener(b'requires', b'w', atomictemp=True) as fp:
1467 with opener(b'requires', b'w', atomictemp=True) as fp:
1470 for r in sorted(requirements):
1468 for r in sorted(requirements):
1471 fp.write(b"%s\n" % r)
1469 fp.write(b"%s\n" % r)
1472
1470
1473
1471
1474 class filecachesubentry(object):
1472 class filecachesubentry(object):
1475 def __init__(self, path, stat):
1473 def __init__(self, path, stat):
1476 self.path = path
1474 self.path = path
1477 self.cachestat = None
1475 self.cachestat = None
1478 self._cacheable = None
1476 self._cacheable = None
1479
1477
1480 if stat:
1478 if stat:
1481 self.cachestat = filecachesubentry.stat(self.path)
1479 self.cachestat = filecachesubentry.stat(self.path)
1482
1480
1483 if self.cachestat:
1481 if self.cachestat:
1484 self._cacheable = self.cachestat.cacheable()
1482 self._cacheable = self.cachestat.cacheable()
1485 else:
1483 else:
1486 # None means we don't know yet
1484 # None means we don't know yet
1487 self._cacheable = None
1485 self._cacheable = None
1488
1486
1489 def refresh(self):
1487 def refresh(self):
1490 if self.cacheable():
1488 if self.cacheable():
1491 self.cachestat = filecachesubentry.stat(self.path)
1489 self.cachestat = filecachesubentry.stat(self.path)
1492
1490
1493 def cacheable(self):
1491 def cacheable(self):
1494 if self._cacheable is not None:
1492 if self._cacheable is not None:
1495 return self._cacheable
1493 return self._cacheable
1496
1494
1497 # we don't know yet, assume it is for now
1495 # we don't know yet, assume it is for now
1498 return True
1496 return True
1499
1497
1500 def changed(self):
1498 def changed(self):
1501 # no point in going further if we can't cache it
1499 # no point in going further if we can't cache it
1502 if not self.cacheable():
1500 if not self.cacheable():
1503 return True
1501 return True
1504
1502
1505 newstat = filecachesubentry.stat(self.path)
1503 newstat = filecachesubentry.stat(self.path)
1506
1504
1507 # we may not know if it's cacheable yet, check again now
1505 # we may not know if it's cacheable yet, check again now
1508 if newstat and self._cacheable is None:
1506 if newstat and self._cacheable is None:
1509 self._cacheable = newstat.cacheable()
1507 self._cacheable = newstat.cacheable()
1510
1508
1511 # check again
1509 # check again
1512 if not self._cacheable:
1510 if not self._cacheable:
1513 return True
1511 return True
1514
1512
1515 if self.cachestat != newstat:
1513 if self.cachestat != newstat:
1516 self.cachestat = newstat
1514 self.cachestat = newstat
1517 return True
1515 return True
1518 else:
1516 else:
1519 return False
1517 return False
1520
1518
1521 @staticmethod
1519 @staticmethod
1522 def stat(path):
1520 def stat(path):
1523 try:
1521 try:
1524 return util.cachestat(path)
1522 return util.cachestat(path)
1525 except OSError as e:
1523 except OSError as e:
1526 if e.errno != errno.ENOENT:
1524 if e.errno != errno.ENOENT:
1527 raise
1525 raise
1528
1526
1529
1527
1530 class filecacheentry(object):
1528 class filecacheentry(object):
1531 def __init__(self, paths, stat=True):
1529 def __init__(self, paths, stat=True):
1532 self._entries = []
1530 self._entries = []
1533 for path in paths:
1531 for path in paths:
1534 self._entries.append(filecachesubentry(path, stat))
1532 self._entries.append(filecachesubentry(path, stat))
1535
1533
1536 def changed(self):
1534 def changed(self):
1537 '''true if any entry has changed'''
1535 '''true if any entry has changed'''
1538 for entry in self._entries:
1536 for entry in self._entries:
1539 if entry.changed():
1537 if entry.changed():
1540 return True
1538 return True
1541 return False
1539 return False
1542
1540
1543 def refresh(self):
1541 def refresh(self):
1544 for entry in self._entries:
1542 for entry in self._entries:
1545 entry.refresh()
1543 entry.refresh()
1546
1544
1547
1545
1548 class filecache(object):
1546 class filecache(object):
1549 """A property like decorator that tracks files under .hg/ for updates.
1547 """A property like decorator that tracks files under .hg/ for updates.
1550
1548
1551 On first access, the files defined as arguments are stat()ed and the
1549 On first access, the files defined as arguments are stat()ed and the
1552 results cached. The decorated function is called. The results are stashed
1550 results cached. The decorated function is called. The results are stashed
1553 away in a ``_filecache`` dict on the object whose method is decorated.
1551 away in a ``_filecache`` dict on the object whose method is decorated.
1554
1552
1555 On subsequent access, the cached result is used as it is set to the
1553 On subsequent access, the cached result is used as it is set to the
1556 instance dictionary.
1554 instance dictionary.
1557
1555
1558 On external property set/delete operations, the caller must update the
1556 On external property set/delete operations, the caller must update the
1559 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1557 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1560 instead of directly setting <attr>.
1558 instead of directly setting <attr>.
1561
1559
1562 When using the property API, the cached data is always used if available.
1560 When using the property API, the cached data is always used if available.
1563 No stat() is performed to check if the file has changed.
1561 No stat() is performed to check if the file has changed.
1564
1562
1565 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1563 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1566 can populate an entry before the property's getter is called. In this case,
1564 can populate an entry before the property's getter is called. In this case,
1567 entries in ``_filecache`` will be used during property operations,
1565 entries in ``_filecache`` will be used during property operations,
1568 if available. If the underlying file changes, it is up to external callers
1566 if available. If the underlying file changes, it is up to external callers
1569 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1567 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1570 method result as well as possibly calling ``del obj._filecache[attr]`` to
1568 method result as well as possibly calling ``del obj._filecache[attr]`` to
1571 remove the ``filecacheentry``.
1569 remove the ``filecacheentry``.
1572 """
1570 """
1573
1571
1574 def __init__(self, *paths):
1572 def __init__(self, *paths):
1575 self.paths = paths
1573 self.paths = paths
1576
1574
1577 def join(self, obj, fname):
1575 def join(self, obj, fname):
1578 """Used to compute the runtime path of a cached file.
1576 """Used to compute the runtime path of a cached file.
1579
1577
1580 Users should subclass filecache and provide their own version of this
1578 Users should subclass filecache and provide their own version of this
1581 function to call the appropriate join function on 'obj' (an instance
1579 function to call the appropriate join function on 'obj' (an instance
1582 of the class that its member function was decorated).
1580 of the class that its member function was decorated).
1583 """
1581 """
1584 raise NotImplementedError
1582 raise NotImplementedError
1585
1583
1586 def __call__(self, func):
1584 def __call__(self, func):
1587 self.func = func
1585 self.func = func
1588 self.sname = func.__name__
1586 self.sname = func.__name__
1589 self.name = pycompat.sysbytes(self.sname)
1587 self.name = pycompat.sysbytes(self.sname)
1590 return self
1588 return self
1591
1589
1592 def __get__(self, obj, type=None):
1590 def __get__(self, obj, type=None):
1593 # if accessed on the class, return the descriptor itself.
1591 # if accessed on the class, return the descriptor itself.
1594 if obj is None:
1592 if obj is None:
1595 return self
1593 return self
1596
1594
1597 assert self.sname not in obj.__dict__
1595 assert self.sname not in obj.__dict__
1598
1596
1599 entry = obj._filecache.get(self.name)
1597 entry = obj._filecache.get(self.name)
1600
1598
1601 if entry:
1599 if entry:
1602 if entry.changed():
1600 if entry.changed():
1603 entry.obj = self.func(obj)
1601 entry.obj = self.func(obj)
1604 else:
1602 else:
1605 paths = [self.join(obj, path) for path in self.paths]
1603 paths = [self.join(obj, path) for path in self.paths]
1606
1604
1607 # We stat -before- creating the object so our cache doesn't lie if
1605 # We stat -before- creating the object so our cache doesn't lie if
1608 # a writer modified between the time we read and stat
1606 # a writer modified between the time we read and stat
1609 entry = filecacheentry(paths, True)
1607 entry = filecacheentry(paths, True)
1610 entry.obj = self.func(obj)
1608 entry.obj = self.func(obj)
1611
1609
1612 obj._filecache[self.name] = entry
1610 obj._filecache[self.name] = entry
1613
1611
1614 obj.__dict__[self.sname] = entry.obj
1612 obj.__dict__[self.sname] = entry.obj
1615 return entry.obj
1613 return entry.obj
1616
1614
1617 # don't implement __set__(), which would make __dict__ lookup as slow as
1615 # don't implement __set__(), which would make __dict__ lookup as slow as
1618 # function call.
1616 # function call.
1619
1617
1620 def set(self, obj, value):
1618 def set(self, obj, value):
1621 if self.name not in obj._filecache:
1619 if self.name not in obj._filecache:
1622 # we add an entry for the missing value because X in __dict__
1620 # we add an entry for the missing value because X in __dict__
1623 # implies X in _filecache
1621 # implies X in _filecache
1624 paths = [self.join(obj, path) for path in self.paths]
1622 paths = [self.join(obj, path) for path in self.paths]
1625 ce = filecacheentry(paths, False)
1623 ce = filecacheentry(paths, False)
1626 obj._filecache[self.name] = ce
1624 obj._filecache[self.name] = ce
1627 else:
1625 else:
1628 ce = obj._filecache[self.name]
1626 ce = obj._filecache[self.name]
1629
1627
1630 ce.obj = value # update cached copy
1628 ce.obj = value # update cached copy
1631 obj.__dict__[self.sname] = value # update copy returned by obj.x
1629 obj.__dict__[self.sname] = value # update copy returned by obj.x
1632
1630
1633
1631
1634 def extdatasource(repo, source):
1632 def extdatasource(repo, source):
1635 """Gather a map of rev -> value dict from the specified source
1633 """Gather a map of rev -> value dict from the specified source
1636
1634
1637 A source spec is treated as a URL, with a special case shell: type
1635 A source spec is treated as a URL, with a special case shell: type
1638 for parsing the output from a shell command.
1636 for parsing the output from a shell command.
1639
1637
1640 The data is parsed as a series of newline-separated records where
1638 The data is parsed as a series of newline-separated records where
1641 each record is a revision specifier optionally followed by a space
1639 each record is a revision specifier optionally followed by a space
1642 and a freeform string value. If the revision is known locally, it
1640 and a freeform string value. If the revision is known locally, it
1643 is converted to a rev, otherwise the record is skipped.
1641 is converted to a rev, otherwise the record is skipped.
1644
1642
1645 Note that both key and value are treated as UTF-8 and converted to
1643 Note that both key and value are treated as UTF-8 and converted to
1646 the local encoding. This allows uniformity between local and
1644 the local encoding. This allows uniformity between local and
1647 remote data sources.
1645 remote data sources.
1648 """
1646 """
1649
1647
1650 spec = repo.ui.config(b"extdata", source)
1648 spec = repo.ui.config(b"extdata", source)
1651 if not spec:
1649 if not spec:
1652 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1650 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1653
1651
1654 data = {}
1652 data = {}
1655 src = proc = None
1653 src = proc = None
1656 try:
1654 try:
1657 if spec.startswith(b"shell:"):
1655 if spec.startswith(b"shell:"):
1658 # external commands should be run relative to the repo root
1656 # external commands should be run relative to the repo root
1659 cmd = spec[6:]
1657 cmd = spec[6:]
1660 proc = subprocess.Popen(
1658 proc = subprocess.Popen(
1661 procutil.tonativestr(cmd),
1659 procutil.tonativestr(cmd),
1662 shell=True,
1660 shell=True,
1663 bufsize=-1,
1661 bufsize=-1,
1664 close_fds=procutil.closefds,
1662 close_fds=procutil.closefds,
1665 stdout=subprocess.PIPE,
1663 stdout=subprocess.PIPE,
1666 cwd=procutil.tonativestr(repo.root),
1664 cwd=procutil.tonativestr(repo.root),
1667 )
1665 )
1668 src = proc.stdout
1666 src = proc.stdout
1669 else:
1667 else:
1670 # treat as a URL or file
1668 # treat as a URL or file
1671 src = url.open(repo.ui, spec)
1669 src = url.open(repo.ui, spec)
1672 for l in src:
1670 for l in src:
1673 if b" " in l:
1671 if b" " in l:
1674 k, v = l.strip().split(b" ", 1)
1672 k, v = l.strip().split(b" ", 1)
1675 else:
1673 else:
1676 k, v = l.strip(), b""
1674 k, v = l.strip(), b""
1677
1675
1678 k = encoding.tolocal(k)
1676 k = encoding.tolocal(k)
1679 try:
1677 try:
1680 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1678 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1681 except (error.LookupError, error.RepoLookupError):
1679 except (error.LookupError, error.RepoLookupError):
1682 pass # we ignore data for nodes that don't exist locally
1680 pass # we ignore data for nodes that don't exist locally
1683 finally:
1681 finally:
1684 if proc:
1682 if proc:
1685 try:
1683 try:
1686 proc.communicate()
1684 proc.communicate()
1687 except ValueError:
1685 except ValueError:
1688 # This happens if we started iterating src and then
1686 # This happens if we started iterating src and then
1689 # get a parse error on a line. It should be safe to ignore.
1687 # get a parse error on a line. It should be safe to ignore.
1690 pass
1688 pass
1691 if src:
1689 if src:
1692 src.close()
1690 src.close()
1693 if proc and proc.returncode != 0:
1691 if proc and proc.returncode != 0:
1694 raise error.Abort(
1692 raise error.Abort(
1695 _(b"extdata command '%s' failed: %s")
1693 _(b"extdata command '%s' failed: %s")
1696 % (cmd, procutil.explainexit(proc.returncode))
1694 % (cmd, procutil.explainexit(proc.returncode))
1697 )
1695 )
1698
1696
1699 return data
1697 return data
1700
1698
1701
1699
1702 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1700 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1703 if lock is None:
1701 if lock is None:
1704 raise error.LockInheritanceContractViolation(
1702 raise error.LockInheritanceContractViolation(
1705 b'lock can only be inherited while held'
1703 b'lock can only be inherited while held'
1706 )
1704 )
1707 if environ is None:
1705 if environ is None:
1708 environ = {}
1706 environ = {}
1709 with lock.inherit() as locker:
1707 with lock.inherit() as locker:
1710 environ[envvar] = locker
1708 environ[envvar] = locker
1711 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1709 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1712
1710
1713
1711
1714 def wlocksub(repo, cmd, *args, **kwargs):
1712 def wlocksub(repo, cmd, *args, **kwargs):
1715 """run cmd as a subprocess that allows inheriting repo's wlock
1713 """run cmd as a subprocess that allows inheriting repo's wlock
1716
1714
1717 This can only be called while the wlock is held. This takes all the
1715 This can only be called while the wlock is held. This takes all the
1718 arguments that ui.system does, and returns the exit code of the
1716 arguments that ui.system does, and returns the exit code of the
1719 subprocess."""
1717 subprocess."""
1720 return _locksub(
1718 return _locksub(
1721 repo, repo.currentwlock(), b'HG_WLOCK_LOCKER', cmd, *args, **kwargs
1719 repo, repo.currentwlock(), b'HG_WLOCK_LOCKER', cmd, *args, **kwargs
1722 )
1720 )
1723
1721
1724
1722
1725 class progress(object):
1723 class progress(object):
1726 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1724 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1727 self.ui = ui
1725 self.ui = ui
1728 self.pos = 0
1726 self.pos = 0
1729 self.topic = topic
1727 self.topic = topic
1730 self.unit = unit
1728 self.unit = unit
1731 self.total = total
1729 self.total = total
1732 self.debug = ui.configbool(b'progress', b'debug')
1730 self.debug = ui.configbool(b'progress', b'debug')
1733 self._updatebar = updatebar
1731 self._updatebar = updatebar
1734
1732
1735 def __enter__(self):
1733 def __enter__(self):
1736 return self
1734 return self
1737
1735
1738 def __exit__(self, exc_type, exc_value, exc_tb):
1736 def __exit__(self, exc_type, exc_value, exc_tb):
1739 self.complete()
1737 self.complete()
1740
1738
1741 def update(self, pos, item=b"", total=None):
1739 def update(self, pos, item=b"", total=None):
1742 assert pos is not None
1740 assert pos is not None
1743 if total:
1741 if total:
1744 self.total = total
1742 self.total = total
1745 self.pos = pos
1743 self.pos = pos
1746 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1744 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1747 if self.debug:
1745 if self.debug:
1748 self._printdebug(item)
1746 self._printdebug(item)
1749
1747
1750 def increment(self, step=1, item=b"", total=None):
1748 def increment(self, step=1, item=b"", total=None):
1751 self.update(self.pos + step, item, total)
1749 self.update(self.pos + step, item, total)
1752
1750
1753 def complete(self):
1751 def complete(self):
1754 self.pos = None
1752 self.pos = None
1755 self.unit = b""
1753 self.unit = b""
1756 self.total = None
1754 self.total = None
1757 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1755 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1758
1756
1759 def _printdebug(self, item):
1757 def _printdebug(self, item):
1760 if self.unit:
1758 if self.unit:
1761 unit = b' ' + self.unit
1759 unit = b' ' + self.unit
1762 if item:
1760 if item:
1763 item = b' ' + item
1761 item = b' ' + item
1764
1762
1765 if self.total:
1763 if self.total:
1766 pct = 100.0 * self.pos / self.total
1764 pct = 100.0 * self.pos / self.total
1767 self.ui.debug(
1765 self.ui.debug(
1768 b'%s:%s %d/%d%s (%4.2f%%)\n'
1766 b'%s:%s %d/%d%s (%4.2f%%)\n'
1769 % (self.topic, item, self.pos, self.total, unit, pct)
1767 % (self.topic, item, self.pos, self.total, unit, pct)
1770 )
1768 )
1771 else:
1769 else:
1772 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1770 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1773
1771
1774
1772
1775 def gdinitconfig(ui):
1773 def gdinitconfig(ui):
1776 """helper function to know if a repo should be created as general delta
1774 """helper function to know if a repo should be created as general delta
1777 """
1775 """
1778 # experimental config: format.generaldelta
1776 # experimental config: format.generaldelta
1779 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1777 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1780 b'format', b'usegeneraldelta'
1778 b'format', b'usegeneraldelta'
1781 )
1779 )
1782
1780
1783
1781
1784 def gddeltaconfig(ui):
1782 def gddeltaconfig(ui):
1785 """helper function to know if incoming delta should be optimised
1783 """helper function to know if incoming delta should be optimised
1786 """
1784 """
1787 # experimental config: format.generaldelta
1785 # experimental config: format.generaldelta
1788 return ui.configbool(b'format', b'generaldelta')
1786 return ui.configbool(b'format', b'generaldelta')
1789
1787
1790
1788
1791 class simplekeyvaluefile(object):
1789 class simplekeyvaluefile(object):
1792 """A simple file with key=value lines
1790 """A simple file with key=value lines
1793
1791
1794 Keys must be alphanumerics and start with a letter, values must not
1792 Keys must be alphanumerics and start with a letter, values must not
1795 contain '\n' characters"""
1793 contain '\n' characters"""
1796
1794
1797 firstlinekey = b'__firstline'
1795 firstlinekey = b'__firstline'
1798
1796
1799 def __init__(self, vfs, path, keys=None):
1797 def __init__(self, vfs, path, keys=None):
1800 self.vfs = vfs
1798 self.vfs = vfs
1801 self.path = path
1799 self.path = path
1802
1800
1803 def read(self, firstlinenonkeyval=False):
1801 def read(self, firstlinenonkeyval=False):
1804 """Read the contents of a simple key-value file
1802 """Read the contents of a simple key-value file
1805
1803
1806 'firstlinenonkeyval' indicates whether the first line of file should
1804 'firstlinenonkeyval' indicates whether the first line of file should
1807 be treated as a key-value pair or reuturned fully under the
1805 be treated as a key-value pair or reuturned fully under the
1808 __firstline key."""
1806 __firstline key."""
1809 lines = self.vfs.readlines(self.path)
1807 lines = self.vfs.readlines(self.path)
1810 d = {}
1808 d = {}
1811 if firstlinenonkeyval:
1809 if firstlinenonkeyval:
1812 if not lines:
1810 if not lines:
1813 e = _(b"empty simplekeyvalue file")
1811 e = _(b"empty simplekeyvalue file")
1814 raise error.CorruptedState(e)
1812 raise error.CorruptedState(e)
1815 # we don't want to include '\n' in the __firstline
1813 # we don't want to include '\n' in the __firstline
1816 d[self.firstlinekey] = lines[0][:-1]
1814 d[self.firstlinekey] = lines[0][:-1]
1817 del lines[0]
1815 del lines[0]
1818
1816
1819 try:
1817 try:
1820 # the 'if line.strip()' part prevents us from failing on empty
1818 # the 'if line.strip()' part prevents us from failing on empty
1821 # lines which only contain '\n' therefore are not skipped
1819 # lines which only contain '\n' therefore are not skipped
1822 # by 'if line'
1820 # by 'if line'
1823 updatedict = dict(
1821 updatedict = dict(
1824 line[:-1].split(b'=', 1) for line in lines if line.strip()
1822 line[:-1].split(b'=', 1) for line in lines if line.strip()
1825 )
1823 )
1826 if self.firstlinekey in updatedict:
1824 if self.firstlinekey in updatedict:
1827 e = _(b"%r can't be used as a key")
1825 e = _(b"%r can't be used as a key")
1828 raise error.CorruptedState(e % self.firstlinekey)
1826 raise error.CorruptedState(e % self.firstlinekey)
1829 d.update(updatedict)
1827 d.update(updatedict)
1830 except ValueError as e:
1828 except ValueError as e:
1831 raise error.CorruptedState(stringutil.forcebytestr(e))
1829 raise error.CorruptedState(stringutil.forcebytestr(e))
1832 return d
1830 return d
1833
1831
1834 def write(self, data, firstline=None):
1832 def write(self, data, firstline=None):
1835 """Write key=>value mapping to a file
1833 """Write key=>value mapping to a file
1836 data is a dict. Keys must be alphanumerical and start with a letter.
1834 data is a dict. Keys must be alphanumerical and start with a letter.
1837 Values must not contain newline characters.
1835 Values must not contain newline characters.
1838
1836
1839 If 'firstline' is not None, it is written to file before
1837 If 'firstline' is not None, it is written to file before
1840 everything else, as it is, not in a key=value form"""
1838 everything else, as it is, not in a key=value form"""
1841 lines = []
1839 lines = []
1842 if firstline is not None:
1840 if firstline is not None:
1843 lines.append(b'%s\n' % firstline)
1841 lines.append(b'%s\n' % firstline)
1844
1842
1845 for k, v in data.items():
1843 for k, v in data.items():
1846 if k == self.firstlinekey:
1844 if k == self.firstlinekey:
1847 e = b"key name '%s' is reserved" % self.firstlinekey
1845 e = b"key name '%s' is reserved" % self.firstlinekey
1848 raise error.ProgrammingError(e)
1846 raise error.ProgrammingError(e)
1849 if not k[0:1].isalpha():
1847 if not k[0:1].isalpha():
1850 e = b"keys must start with a letter in a key-value file"
1848 e = b"keys must start with a letter in a key-value file"
1851 raise error.ProgrammingError(e)
1849 raise error.ProgrammingError(e)
1852 if not k.isalnum():
1850 if not k.isalnum():
1853 e = b"invalid key name in a simple key-value file"
1851 e = b"invalid key name in a simple key-value file"
1854 raise error.ProgrammingError(e)
1852 raise error.ProgrammingError(e)
1855 if b'\n' in v:
1853 if b'\n' in v:
1856 e = b"invalid value in a simple key-value file"
1854 e = b"invalid value in a simple key-value file"
1857 raise error.ProgrammingError(e)
1855 raise error.ProgrammingError(e)
1858 lines.append(b"%s=%s\n" % (k, v))
1856 lines.append(b"%s=%s\n" % (k, v))
1859 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1857 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1860 fp.write(b''.join(lines))
1858 fp.write(b''.join(lines))
1861
1859
1862
1860
1863 _reportobsoletedsource = [
1861 _reportobsoletedsource = [
1864 b'debugobsolete',
1862 b'debugobsolete',
1865 b'pull',
1863 b'pull',
1866 b'push',
1864 b'push',
1867 b'serve',
1865 b'serve',
1868 b'unbundle',
1866 b'unbundle',
1869 ]
1867 ]
1870
1868
1871 _reportnewcssource = [
1869 _reportnewcssource = [
1872 b'pull',
1870 b'pull',
1873 b'unbundle',
1871 b'unbundle',
1874 ]
1872 ]
1875
1873
1876
1874
1877 def prefetchfiles(repo, revs, match):
1875 def prefetchfiles(repo, revs, match):
1878 """Invokes the registered file prefetch functions, allowing extensions to
1876 """Invokes the registered file prefetch functions, allowing extensions to
1879 ensure the corresponding files are available locally, before the command
1877 ensure the corresponding files are available locally, before the command
1880 uses them."""
1878 uses them."""
1881 if match:
1879 if match:
1882 # The command itself will complain about files that don't exist, so
1880 # The command itself will complain about files that don't exist, so
1883 # don't duplicate the message.
1881 # don't duplicate the message.
1884 match = matchmod.badmatch(match, lambda fn, msg: None)
1882 match = matchmod.badmatch(match, lambda fn, msg: None)
1885 else:
1883 else:
1886 match = matchall(repo)
1884 match = matchall(repo)
1887
1885
1888 fileprefetchhooks(repo, revs, match)
1886 fileprefetchhooks(repo, revs, match)
1889
1887
1890
1888
1891 # a list of (repo, revs, match) prefetch functions
1889 # a list of (repo, revs, match) prefetch functions
1892 fileprefetchhooks = util.hooks()
1890 fileprefetchhooks = util.hooks()
1893
1891
1894 # A marker that tells the evolve extension to suppress its own reporting
1892 # A marker that tells the evolve extension to suppress its own reporting
1895 _reportstroubledchangesets = True
1893 _reportstroubledchangesets = True
1896
1894
1897
1895
1898 def registersummarycallback(repo, otr, txnname=b''):
1896 def registersummarycallback(repo, otr, txnname=b''):
1899 """register a callback to issue a summary after the transaction is closed
1897 """register a callback to issue a summary after the transaction is closed
1900 """
1898 """
1901
1899
1902 def txmatch(sources):
1900 def txmatch(sources):
1903 return any(txnname.startswith(source) for source in sources)
1901 return any(txnname.startswith(source) for source in sources)
1904
1902
1905 categories = []
1903 categories = []
1906
1904
1907 def reportsummary(func):
1905 def reportsummary(func):
1908 """decorator for report callbacks."""
1906 """decorator for report callbacks."""
1909 # The repoview life cycle is shorter than the one of the actual
1907 # The repoview life cycle is shorter than the one of the actual
1910 # underlying repository. So the filtered object can die before the
1908 # underlying repository. So the filtered object can die before the
1911 # weakref is used leading to troubles. We keep a reference to the
1909 # weakref is used leading to troubles. We keep a reference to the
1912 # unfiltered object and restore the filtering when retrieving the
1910 # unfiltered object and restore the filtering when retrieving the
1913 # repository through the weakref.
1911 # repository through the weakref.
1914 filtername = repo.filtername
1912 filtername = repo.filtername
1915 reporef = weakref.ref(repo.unfiltered())
1913 reporef = weakref.ref(repo.unfiltered())
1916
1914
1917 def wrapped(tr):
1915 def wrapped(tr):
1918 repo = reporef()
1916 repo = reporef()
1919 if filtername:
1917 if filtername:
1920 assert repo is not None # help pytype
1918 assert repo is not None # help pytype
1921 repo = repo.filtered(filtername)
1919 repo = repo.filtered(filtername)
1922 func(repo, tr)
1920 func(repo, tr)
1923
1921
1924 newcat = b'%02i-txnreport' % len(categories)
1922 newcat = b'%02i-txnreport' % len(categories)
1925 otr.addpostclose(newcat, wrapped)
1923 otr.addpostclose(newcat, wrapped)
1926 categories.append(newcat)
1924 categories.append(newcat)
1927 return wrapped
1925 return wrapped
1928
1926
1929 @reportsummary
1927 @reportsummary
1930 def reportchangegroup(repo, tr):
1928 def reportchangegroup(repo, tr):
1931 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
1929 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
1932 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
1930 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
1933 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
1931 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
1934 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
1932 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
1935 if cgchangesets or cgrevisions or cgfiles:
1933 if cgchangesets or cgrevisions or cgfiles:
1936 htext = b""
1934 htext = b""
1937 if cgheads:
1935 if cgheads:
1938 htext = _(b" (%+d heads)") % cgheads
1936 htext = _(b" (%+d heads)") % cgheads
1939 msg = _(b"added %d changesets with %d changes to %d files%s\n")
1937 msg = _(b"added %d changesets with %d changes to %d files%s\n")
1940 assert repo is not None # help pytype
1938 assert repo is not None # help pytype
1941 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
1939 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
1942
1940
1943 if txmatch(_reportobsoletedsource):
1941 if txmatch(_reportobsoletedsource):
1944
1942
1945 @reportsummary
1943 @reportsummary
1946 def reportobsoleted(repo, tr):
1944 def reportobsoleted(repo, tr):
1947 obsoleted = obsutil.getobsoleted(repo, tr)
1945 obsoleted = obsutil.getobsoleted(repo, tr)
1948 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
1946 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
1949 if newmarkers:
1947 if newmarkers:
1950 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
1948 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
1951 if obsoleted:
1949 if obsoleted:
1952 repo.ui.status(_(b'obsoleted %i changesets\n') % len(obsoleted))
1950 repo.ui.status(_(b'obsoleted %i changesets\n') % len(obsoleted))
1953
1951
1954 if obsolete.isenabled(
1952 if obsolete.isenabled(
1955 repo, obsolete.createmarkersopt
1953 repo, obsolete.createmarkersopt
1956 ) and repo.ui.configbool(
1954 ) and repo.ui.configbool(
1957 b'experimental', b'evolution.report-instabilities'
1955 b'experimental', b'evolution.report-instabilities'
1958 ):
1956 ):
1959 instabilitytypes = [
1957 instabilitytypes = [
1960 (b'orphan', b'orphan'),
1958 (b'orphan', b'orphan'),
1961 (b'phase-divergent', b'phasedivergent'),
1959 (b'phase-divergent', b'phasedivergent'),
1962 (b'content-divergent', b'contentdivergent'),
1960 (b'content-divergent', b'contentdivergent'),
1963 ]
1961 ]
1964
1962
1965 def getinstabilitycounts(repo):
1963 def getinstabilitycounts(repo):
1966 filtered = repo.changelog.filteredrevs
1964 filtered = repo.changelog.filteredrevs
1967 counts = {}
1965 counts = {}
1968 for instability, revset in instabilitytypes:
1966 for instability, revset in instabilitytypes:
1969 counts[instability] = len(
1967 counts[instability] = len(
1970 set(obsolete.getrevs(repo, revset)) - filtered
1968 set(obsolete.getrevs(repo, revset)) - filtered
1971 )
1969 )
1972 return counts
1970 return counts
1973
1971
1974 oldinstabilitycounts = getinstabilitycounts(repo)
1972 oldinstabilitycounts = getinstabilitycounts(repo)
1975
1973
1976 @reportsummary
1974 @reportsummary
1977 def reportnewinstabilities(repo, tr):
1975 def reportnewinstabilities(repo, tr):
1978 newinstabilitycounts = getinstabilitycounts(repo)
1976 newinstabilitycounts = getinstabilitycounts(repo)
1979 for instability, revset in instabilitytypes:
1977 for instability, revset in instabilitytypes:
1980 delta = (
1978 delta = (
1981 newinstabilitycounts[instability]
1979 newinstabilitycounts[instability]
1982 - oldinstabilitycounts[instability]
1980 - oldinstabilitycounts[instability]
1983 )
1981 )
1984 msg = getinstabilitymessage(delta, instability)
1982 msg = getinstabilitymessage(delta, instability)
1985 if msg:
1983 if msg:
1986 repo.ui.warn(msg)
1984 repo.ui.warn(msg)
1987
1985
1988 if txmatch(_reportnewcssource):
1986 if txmatch(_reportnewcssource):
1989
1987
1990 @reportsummary
1988 @reportsummary
1991 def reportnewcs(repo, tr):
1989 def reportnewcs(repo, tr):
1992 """Report the range of new revisions pulled/unbundled."""
1990 """Report the range of new revisions pulled/unbundled."""
1993 origrepolen = tr.changes.get(b'origrepolen', len(repo))
1991 origrepolen = tr.changes.get(b'origrepolen', len(repo))
1994 unfi = repo.unfiltered()
1992 unfi = repo.unfiltered()
1995 if origrepolen >= len(unfi):
1993 if origrepolen >= len(unfi):
1996 return
1994 return
1997
1995
1998 # Compute the bounds of new visible revisions' range.
1996 # Compute the bounds of new visible revisions' range.
1999 revs = smartset.spanset(repo, start=origrepolen)
1997 revs = smartset.spanset(repo, start=origrepolen)
2000 if revs:
1998 if revs:
2001 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1999 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2002
2000
2003 if minrev == maxrev:
2001 if minrev == maxrev:
2004 revrange = minrev
2002 revrange = minrev
2005 else:
2003 else:
2006 revrange = b'%s:%s' % (minrev, maxrev)
2004 revrange = b'%s:%s' % (minrev, maxrev)
2007 draft = len(repo.revs(b'%ld and draft()', revs))
2005 draft = len(repo.revs(b'%ld and draft()', revs))
2008 secret = len(repo.revs(b'%ld and secret()', revs))
2006 secret = len(repo.revs(b'%ld and secret()', revs))
2009 if not (draft or secret):
2007 if not (draft or secret):
2010 msg = _(b'new changesets %s\n') % revrange
2008 msg = _(b'new changesets %s\n') % revrange
2011 elif draft and secret:
2009 elif draft and secret:
2012 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2010 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2013 msg %= (revrange, draft, secret)
2011 msg %= (revrange, draft, secret)
2014 elif draft:
2012 elif draft:
2015 msg = _(b'new changesets %s (%d drafts)\n')
2013 msg = _(b'new changesets %s (%d drafts)\n')
2016 msg %= (revrange, draft)
2014 msg %= (revrange, draft)
2017 elif secret:
2015 elif secret:
2018 msg = _(b'new changesets %s (%d secrets)\n')
2016 msg = _(b'new changesets %s (%d secrets)\n')
2019 msg %= (revrange, secret)
2017 msg %= (revrange, secret)
2020 else:
2018 else:
2021 errormsg = b'entered unreachable condition'
2019 errormsg = b'entered unreachable condition'
2022 raise error.ProgrammingError(errormsg)
2020 raise error.ProgrammingError(errormsg)
2023 repo.ui.status(msg)
2021 repo.ui.status(msg)
2024
2022
2025 # search new changesets directly pulled as obsolete
2023 # search new changesets directly pulled as obsolete
2026 duplicates = tr.changes.get(b'revduplicates', ())
2024 duplicates = tr.changes.get(b'revduplicates', ())
2027 obsadded = unfi.revs(
2025 obsadded = unfi.revs(
2028 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2026 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2029 )
2027 )
2030 cl = repo.changelog
2028 cl = repo.changelog
2031 extinctadded = [r for r in obsadded if r not in cl]
2029 extinctadded = [r for r in obsadded if r not in cl]
2032 if extinctadded:
2030 if extinctadded:
2033 # They are not just obsolete, but obsolete and invisible
2031 # They are not just obsolete, but obsolete and invisible
2034 # we call them "extinct" internally but the terms have not been
2032 # we call them "extinct" internally but the terms have not been
2035 # exposed to users.
2033 # exposed to users.
2036 msg = b'(%d other changesets obsolete on arrival)\n'
2034 msg = b'(%d other changesets obsolete on arrival)\n'
2037 repo.ui.status(msg % len(extinctadded))
2035 repo.ui.status(msg % len(extinctadded))
2038
2036
2039 @reportsummary
2037 @reportsummary
2040 def reportphasechanges(repo, tr):
2038 def reportphasechanges(repo, tr):
2041 """Report statistics of phase changes for changesets pre-existing
2039 """Report statistics of phase changes for changesets pre-existing
2042 pull/unbundle.
2040 pull/unbundle.
2043 """
2041 """
2044 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2042 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2045 phasetracking = tr.changes.get(b'phases', {})
2043 phasetracking = tr.changes.get(b'phases', {})
2046 if not phasetracking:
2044 if not phasetracking:
2047 return
2045 return
2048 published = [
2046 published = [
2049 rev
2047 rev
2050 for rev, (old, new) in pycompat.iteritems(phasetracking)
2048 for rev, (old, new) in pycompat.iteritems(phasetracking)
2051 if new == phases.public and rev < origrepolen
2049 if new == phases.public and rev < origrepolen
2052 ]
2050 ]
2053 if not published:
2051 if not published:
2054 return
2052 return
2055 repo.ui.status(
2053 repo.ui.status(
2056 _(b'%d local changesets published\n') % len(published)
2054 _(b'%d local changesets published\n') % len(published)
2057 )
2055 )
2058
2056
2059
2057
2060 def getinstabilitymessage(delta, instability):
2058 def getinstabilitymessage(delta, instability):
2061 """function to return the message to show warning about new instabilities
2059 """function to return the message to show warning about new instabilities
2062
2060
2063 exists as a separate function so that extension can wrap to show more
2061 exists as a separate function so that extension can wrap to show more
2064 information like how to fix instabilities"""
2062 information like how to fix instabilities"""
2065 if delta > 0:
2063 if delta > 0:
2066 return _(b'%i new %s changesets\n') % (delta, instability)
2064 return _(b'%i new %s changesets\n') % (delta, instability)
2067
2065
2068
2066
2069 def nodesummaries(repo, nodes, maxnumnodes=4):
2067 def nodesummaries(repo, nodes, maxnumnodes=4):
2070 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2068 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2071 return b' '.join(short(h) for h in nodes)
2069 return b' '.join(short(h) for h in nodes)
2072 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2070 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2073 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2071 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2074
2072
2075
2073
2076 def enforcesinglehead(repo, tr, desc, accountclosed=False):
2074 def enforcesinglehead(repo, tr, desc, accountclosed=False):
2077 """check that no named branch has multiple heads"""
2075 """check that no named branch has multiple heads"""
2078 if desc in (b'strip', b'repair'):
2076 if desc in (b'strip', b'repair'):
2079 # skip the logic during strip
2077 # skip the logic during strip
2080 return
2078 return
2081 visible = repo.filtered(b'visible')
2079 visible = repo.filtered(b'visible')
2082 # possible improvement: we could restrict the check to affected branch
2080 # possible improvement: we could restrict the check to affected branch
2083 bm = visible.branchmap()
2081 bm = visible.branchmap()
2084 for name in bm:
2082 for name in bm:
2085 heads = bm.branchheads(name, closed=accountclosed)
2083 heads = bm.branchheads(name, closed=accountclosed)
2086 if len(heads) > 1:
2084 if len(heads) > 1:
2087 msg = _(b'rejecting multiple heads on branch "%s"')
2085 msg = _(b'rejecting multiple heads on branch "%s"')
2088 msg %= name
2086 msg %= name
2089 hint = _(b'%d heads: %s')
2087 hint = _(b'%d heads: %s')
2090 hint %= (len(heads), nodesummaries(repo, heads))
2088 hint %= (len(heads), nodesummaries(repo, heads))
2091 raise error.Abort(msg, hint=hint)
2089 raise error.Abort(msg, hint=hint)
2092
2090
2093
2091
2094 def wrapconvertsink(sink):
2092 def wrapconvertsink(sink):
2095 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2093 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2096 before it is used, whether or not the convert extension was formally loaded.
2094 before it is used, whether or not the convert extension was formally loaded.
2097 """
2095 """
2098 return sink
2096 return sink
2099
2097
2100
2098
2101 def unhidehashlikerevs(repo, specs, hiddentype):
2099 def unhidehashlikerevs(repo, specs, hiddentype):
2102 """parse the user specs and unhide changesets whose hash or revision number
2100 """parse the user specs and unhide changesets whose hash or revision number
2103 is passed.
2101 is passed.
2104
2102
2105 hiddentype can be: 1) 'warn': warn while unhiding changesets
2103 hiddentype can be: 1) 'warn': warn while unhiding changesets
2106 2) 'nowarn': don't warn while unhiding changesets
2104 2) 'nowarn': don't warn while unhiding changesets
2107
2105
2108 returns a repo object with the required changesets unhidden
2106 returns a repo object with the required changesets unhidden
2109 """
2107 """
2110 if not repo.filtername or not repo.ui.configbool(
2108 if not repo.filtername or not repo.ui.configbool(
2111 b'experimental', b'directaccess'
2109 b'experimental', b'directaccess'
2112 ):
2110 ):
2113 return repo
2111 return repo
2114
2112
2115 if repo.filtername not in (b'visible', b'visible-hidden'):
2113 if repo.filtername not in (b'visible', b'visible-hidden'):
2116 return repo
2114 return repo
2117
2115
2118 symbols = set()
2116 symbols = set()
2119 for spec in specs:
2117 for spec in specs:
2120 try:
2118 try:
2121 tree = revsetlang.parse(spec)
2119 tree = revsetlang.parse(spec)
2122 except error.ParseError: # will be reported by scmutil.revrange()
2120 except error.ParseError: # will be reported by scmutil.revrange()
2123 continue
2121 continue
2124
2122
2125 symbols.update(revsetlang.gethashlikesymbols(tree))
2123 symbols.update(revsetlang.gethashlikesymbols(tree))
2126
2124
2127 if not symbols:
2125 if not symbols:
2128 return repo
2126 return repo
2129
2127
2130 revs = _getrevsfromsymbols(repo, symbols)
2128 revs = _getrevsfromsymbols(repo, symbols)
2131
2129
2132 if not revs:
2130 if not revs:
2133 return repo
2131 return repo
2134
2132
2135 if hiddentype == b'warn':
2133 if hiddentype == b'warn':
2136 unfi = repo.unfiltered()
2134 unfi = repo.unfiltered()
2137 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2135 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2138 repo.ui.warn(
2136 repo.ui.warn(
2139 _(
2137 _(
2140 b"warning: accessing hidden changesets for write "
2138 b"warning: accessing hidden changesets for write "
2141 b"operation: %s\n"
2139 b"operation: %s\n"
2142 )
2140 )
2143 % revstr
2141 % revstr
2144 )
2142 )
2145
2143
2146 # we have to use new filtername to separate branch/tags cache until we can
2144 # we have to use new filtername to separate branch/tags cache until we can
2147 # disbale these cache when revisions are dynamically pinned.
2145 # disbale these cache when revisions are dynamically pinned.
2148 return repo.filtered(b'visible-hidden', revs)
2146 return repo.filtered(b'visible-hidden', revs)
2149
2147
2150
2148
2151 def _getrevsfromsymbols(repo, symbols):
2149 def _getrevsfromsymbols(repo, symbols):
2152 """parse the list of symbols and returns a set of revision numbers of hidden
2150 """parse the list of symbols and returns a set of revision numbers of hidden
2153 changesets present in symbols"""
2151 changesets present in symbols"""
2154 revs = set()
2152 revs = set()
2155 unfi = repo.unfiltered()
2153 unfi = repo.unfiltered()
2156 unficl = unfi.changelog
2154 unficl = unfi.changelog
2157 cl = repo.changelog
2155 cl = repo.changelog
2158 tiprev = len(unficl)
2156 tiprev = len(unficl)
2159 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2157 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2160 for s in symbols:
2158 for s in symbols:
2161 try:
2159 try:
2162 n = int(s)
2160 n = int(s)
2163 if n <= tiprev:
2161 if n <= tiprev:
2164 if not allowrevnums:
2162 if not allowrevnums:
2165 continue
2163 continue
2166 else:
2164 else:
2167 if n not in cl:
2165 if n not in cl:
2168 revs.add(n)
2166 revs.add(n)
2169 continue
2167 continue
2170 except ValueError:
2168 except ValueError:
2171 pass
2169 pass
2172
2170
2173 try:
2171 try:
2174 s = resolvehexnodeidprefix(unfi, s)
2172 s = resolvehexnodeidprefix(unfi, s)
2175 except (error.LookupError, error.WdirUnsupported):
2173 except (error.LookupError, error.WdirUnsupported):
2176 s = None
2174 s = None
2177
2175
2178 if s is not None:
2176 if s is not None:
2179 rev = unficl.rev(s)
2177 rev = unficl.rev(s)
2180 if rev not in cl:
2178 if rev not in cl:
2181 revs.add(rev)
2179 revs.add(rev)
2182
2180
2183 return revs
2181 return revs
2184
2182
2185
2183
2186 def bookmarkrevs(repo, mark):
2184 def bookmarkrevs(repo, mark):
2187 """
2185 """
2188 Select revisions reachable by a given bookmark
2186 Select revisions reachable by a given bookmark
2189 """
2187 """
2190 return repo.revs(
2188 return repo.revs(
2191 b"ancestors(bookmark(%s)) - "
2189 b"ancestors(bookmark(%s)) - "
2192 b"ancestors(head() and not bookmark(%s)) - "
2190 b"ancestors(head() and not bookmark(%s)) - "
2193 b"ancestors(bookmark() and not bookmark(%s))",
2191 b"ancestors(bookmark() and not bookmark(%s))",
2194 mark,
2192 mark,
2195 mark,
2193 mark,
2196 mark,
2194 mark,
2197 )
2195 )
General Comments 0
You need to be logged in to leave comments. Login now