##// END OF EJS Templates
scmutil: obsrevs is already a frozenset...
av6 -
r49575:c7e67584 default
parent child Browse files
Show More
@@ -1,2309 +1,2307 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Olivia Mackall <olivia@selenic.com>
3 # Copyright Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import os
12 import os
13 import posixpath
13 import posixpath
14 import re
14 import re
15 import subprocess
15 import subprocess
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 bin,
20 bin,
21 hex,
21 hex,
22 nullrev,
22 nullrev,
23 short,
23 short,
24 wdirrev,
24 wdirrev,
25 )
25 )
26 from .pycompat import getattr
26 from .pycompat import getattr
27 from .thirdparty import attr
27 from .thirdparty import attr
28 from . import (
28 from . import (
29 copies as copiesmod,
29 copies as copiesmod,
30 encoding,
30 encoding,
31 error,
31 error,
32 match as matchmod,
32 match as matchmod,
33 obsolete,
33 obsolete,
34 obsutil,
34 obsutil,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 policy,
37 policy,
38 pycompat,
38 pycompat,
39 requirements as requirementsmod,
39 requirements as requirementsmod,
40 revsetlang,
40 revsetlang,
41 similar,
41 similar,
42 smartset,
42 smartset,
43 url,
43 url,
44 util,
44 util,
45 vfs,
45 vfs,
46 )
46 )
47
47
48 from .utils import (
48 from .utils import (
49 hashutil,
49 hashutil,
50 procutil,
50 procutil,
51 stringutil,
51 stringutil,
52 )
52 )
53
53
54 if pycompat.iswindows:
54 if pycompat.iswindows:
55 from . import scmwindows as scmplatform
55 from . import scmwindows as scmplatform
56 else:
56 else:
57 from . import scmposix as scmplatform
57 from . import scmposix as scmplatform
58
58
59 parsers = policy.importmod('parsers')
59 parsers = policy.importmod('parsers')
60 rustrevlog = policy.importrust('revlog')
60 rustrevlog = policy.importrust('revlog')
61
61
62 termsize = scmplatform.termsize
62 termsize = scmplatform.termsize
63
63
64
64
65 @attr.s(slots=True, repr=False)
65 @attr.s(slots=True, repr=False)
66 class status(object):
66 class status(object):
67 """Struct with a list of files per status.
67 """Struct with a list of files per status.
68
68
69 The 'deleted', 'unknown' and 'ignored' properties are only
69 The 'deleted', 'unknown' and 'ignored' properties are only
70 relevant to the working copy.
70 relevant to the working copy.
71 """
71 """
72
72
73 modified = attr.ib(default=attr.Factory(list))
73 modified = attr.ib(default=attr.Factory(list))
74 added = attr.ib(default=attr.Factory(list))
74 added = attr.ib(default=attr.Factory(list))
75 removed = attr.ib(default=attr.Factory(list))
75 removed = attr.ib(default=attr.Factory(list))
76 deleted = attr.ib(default=attr.Factory(list))
76 deleted = attr.ib(default=attr.Factory(list))
77 unknown = attr.ib(default=attr.Factory(list))
77 unknown = attr.ib(default=attr.Factory(list))
78 ignored = attr.ib(default=attr.Factory(list))
78 ignored = attr.ib(default=attr.Factory(list))
79 clean = attr.ib(default=attr.Factory(list))
79 clean = attr.ib(default=attr.Factory(list))
80
80
81 def __iter__(self):
81 def __iter__(self):
82 yield self.modified
82 yield self.modified
83 yield self.added
83 yield self.added
84 yield self.removed
84 yield self.removed
85 yield self.deleted
85 yield self.deleted
86 yield self.unknown
86 yield self.unknown
87 yield self.ignored
87 yield self.ignored
88 yield self.clean
88 yield self.clean
89
89
90 def __repr__(self):
90 def __repr__(self):
91 return (
91 return (
92 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
92 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
93 r'unknown=%s, ignored=%s, clean=%s>'
93 r'unknown=%s, ignored=%s, clean=%s>'
94 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
94 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
95
95
96
96
97 def itersubrepos(ctx1, ctx2):
97 def itersubrepos(ctx1, ctx2):
98 """find subrepos in ctx1 or ctx2"""
98 """find subrepos in ctx1 or ctx2"""
99 # Create a (subpath, ctx) mapping where we prefer subpaths from
99 # Create a (subpath, ctx) mapping where we prefer subpaths from
100 # ctx1. The subpaths from ctx2 are important when the .hgsub file
100 # ctx1. The subpaths from ctx2 are important when the .hgsub file
101 # has been modified (in ctx2) but not yet committed (in ctx1).
101 # has been modified (in ctx2) but not yet committed (in ctx1).
102 subpaths = dict.fromkeys(ctx2.substate, ctx2)
102 subpaths = dict.fromkeys(ctx2.substate, ctx2)
103 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
103 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
104
104
105 missing = set()
105 missing = set()
106
106
107 for subpath in ctx2.substate:
107 for subpath in ctx2.substate:
108 if subpath not in ctx1.substate:
108 if subpath not in ctx1.substate:
109 del subpaths[subpath]
109 del subpaths[subpath]
110 missing.add(subpath)
110 missing.add(subpath)
111
111
112 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
112 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
113 yield subpath, ctx.sub(subpath)
113 yield subpath, ctx.sub(subpath)
114
114
115 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
115 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
116 # status and diff will have an accurate result when it does
116 # status and diff will have an accurate result when it does
117 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
117 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
118 # against itself.
118 # against itself.
119 for subpath in missing:
119 for subpath in missing:
120 yield subpath, ctx2.nullsub(subpath, ctx1)
120 yield subpath, ctx2.nullsub(subpath, ctx1)
121
121
122
122
123 def nochangesfound(ui, repo, excluded=None):
123 def nochangesfound(ui, repo, excluded=None):
124 """Report no changes for push/pull, excluded is None or a list of
124 """Report no changes for push/pull, excluded is None or a list of
125 nodes excluded from the push/pull.
125 nodes excluded from the push/pull.
126 """
126 """
127 secretlist = []
127 secretlist = []
128 if excluded:
128 if excluded:
129 for n in excluded:
129 for n in excluded:
130 ctx = repo[n]
130 ctx = repo[n]
131 if ctx.phase() >= phases.secret and not ctx.extinct():
131 if ctx.phase() >= phases.secret and not ctx.extinct():
132 secretlist.append(n)
132 secretlist.append(n)
133
133
134 if secretlist:
134 if secretlist:
135 ui.status(
135 ui.status(
136 _(b"no changes found (ignored %d secret changesets)\n")
136 _(b"no changes found (ignored %d secret changesets)\n")
137 % len(secretlist)
137 % len(secretlist)
138 )
138 )
139 else:
139 else:
140 ui.status(_(b"no changes found\n"))
140 ui.status(_(b"no changes found\n"))
141
141
142
142
143 def callcatch(ui, func):
143 def callcatch(ui, func):
144 """call func() with global exception handling
144 """call func() with global exception handling
145
145
146 return func() if no exception happens. otherwise do some error handling
146 return func() if no exception happens. otherwise do some error handling
147 and return an exit code accordingly. does not handle all exceptions.
147 and return an exit code accordingly. does not handle all exceptions.
148 """
148 """
149 coarse_exit_code = -1
149 coarse_exit_code = -1
150 detailed_exit_code = -1
150 detailed_exit_code = -1
151 try:
151 try:
152 try:
152 try:
153 return func()
153 return func()
154 except: # re-raises
154 except: # re-raises
155 ui.traceback()
155 ui.traceback()
156 raise
156 raise
157 # Global exception handling, alphabetically
157 # Global exception handling, alphabetically
158 # Mercurial-specific first, followed by built-in and library exceptions
158 # Mercurial-specific first, followed by built-in and library exceptions
159 except error.LockHeld as inst:
159 except error.LockHeld as inst:
160 detailed_exit_code = 20
160 detailed_exit_code = 20
161 if inst.errno == errno.ETIMEDOUT:
161 if inst.errno == errno.ETIMEDOUT:
162 reason = _(b'timed out waiting for lock held by %r') % (
162 reason = _(b'timed out waiting for lock held by %r') % (
163 pycompat.bytestr(inst.locker)
163 pycompat.bytestr(inst.locker)
164 )
164 )
165 else:
165 else:
166 reason = _(b'lock held by %r') % inst.locker
166 reason = _(b'lock held by %r') % inst.locker
167 ui.error(
167 ui.error(
168 _(b"abort: %s: %s\n")
168 _(b"abort: %s: %s\n")
169 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
169 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
170 )
170 )
171 if not inst.locker:
171 if not inst.locker:
172 ui.error(_(b"(lock might be very busy)\n"))
172 ui.error(_(b"(lock might be very busy)\n"))
173 except error.LockUnavailable as inst:
173 except error.LockUnavailable as inst:
174 detailed_exit_code = 20
174 detailed_exit_code = 20
175 ui.error(
175 ui.error(
176 _(b"abort: could not lock %s: %s\n")
176 _(b"abort: could not lock %s: %s\n")
177 % (
177 % (
178 inst.desc or stringutil.forcebytestr(inst.filename),
178 inst.desc or stringutil.forcebytestr(inst.filename),
179 encoding.strtolocal(inst.strerror),
179 encoding.strtolocal(inst.strerror),
180 )
180 )
181 )
181 )
182 except error.RepoError as inst:
182 except error.RepoError as inst:
183 if isinstance(inst, error.RepoLookupError):
183 if isinstance(inst, error.RepoLookupError):
184 detailed_exit_code = 10
184 detailed_exit_code = 10
185 ui.error(_(b"abort: %s\n") % inst)
185 ui.error(_(b"abort: %s\n") % inst)
186 if inst.hint:
186 if inst.hint:
187 ui.error(_(b"(%s)\n") % inst.hint)
187 ui.error(_(b"(%s)\n") % inst.hint)
188 except error.ResponseError as inst:
188 except error.ResponseError as inst:
189 ui.error(_(b"abort: %s") % inst.args[0])
189 ui.error(_(b"abort: %s") % inst.args[0])
190 msg = inst.args[1]
190 msg = inst.args[1]
191 if isinstance(msg, type(u'')):
191 if isinstance(msg, type(u'')):
192 msg = pycompat.sysbytes(msg)
192 msg = pycompat.sysbytes(msg)
193 if msg is None:
193 if msg is None:
194 ui.error(b"\n")
194 ui.error(b"\n")
195 elif not isinstance(msg, bytes):
195 elif not isinstance(msg, bytes):
196 ui.error(b" %r\n" % (msg,))
196 ui.error(b" %r\n" % (msg,))
197 elif not msg:
197 elif not msg:
198 ui.error(_(b" empty string\n"))
198 ui.error(_(b" empty string\n"))
199 else:
199 else:
200 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
200 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
201 except error.CensoredNodeError as inst:
201 except error.CensoredNodeError as inst:
202 ui.error(_(b"abort: file censored %s\n") % inst)
202 ui.error(_(b"abort: file censored %s\n") % inst)
203 except error.WdirUnsupported:
203 except error.WdirUnsupported:
204 ui.error(_(b"abort: working directory revision cannot be specified\n"))
204 ui.error(_(b"abort: working directory revision cannot be specified\n"))
205 except error.Error as inst:
205 except error.Error as inst:
206 if inst.detailed_exit_code is not None:
206 if inst.detailed_exit_code is not None:
207 detailed_exit_code = inst.detailed_exit_code
207 detailed_exit_code = inst.detailed_exit_code
208 if inst.coarse_exit_code is not None:
208 if inst.coarse_exit_code is not None:
209 coarse_exit_code = inst.coarse_exit_code
209 coarse_exit_code = inst.coarse_exit_code
210 ui.error(inst.format())
210 ui.error(inst.format())
211 except error.WorkerError as inst:
211 except error.WorkerError as inst:
212 # Don't print a message -- the worker already should have
212 # Don't print a message -- the worker already should have
213 return inst.status_code
213 return inst.status_code
214 except ImportError as inst:
214 except ImportError as inst:
215 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
215 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
216 m = stringutil.forcebytestr(inst).split()[-1]
216 m = stringutil.forcebytestr(inst).split()[-1]
217 if m in b"mpatch bdiff".split():
217 if m in b"mpatch bdiff".split():
218 ui.error(_(b"(did you forget to compile extensions?)\n"))
218 ui.error(_(b"(did you forget to compile extensions?)\n"))
219 elif m in b"zlib".split():
219 elif m in b"zlib".split():
220 ui.error(_(b"(is your Python install correct?)\n"))
220 ui.error(_(b"(is your Python install correct?)\n"))
221 except util.urlerr.httperror as inst:
221 except util.urlerr.httperror as inst:
222 detailed_exit_code = 100
222 detailed_exit_code = 100
223 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
223 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
224 except util.urlerr.urlerror as inst:
224 except util.urlerr.urlerror as inst:
225 detailed_exit_code = 100
225 detailed_exit_code = 100
226 try: # usually it is in the form (errno, strerror)
226 try: # usually it is in the form (errno, strerror)
227 reason = inst.reason.args[1]
227 reason = inst.reason.args[1]
228 except (AttributeError, IndexError):
228 except (AttributeError, IndexError):
229 # it might be anything, for example a string
229 # it might be anything, for example a string
230 reason = inst.reason
230 reason = inst.reason
231 if isinstance(reason, pycompat.unicode):
231 if isinstance(reason, pycompat.unicode):
232 # SSLError of Python 2.7.9 contains a unicode
232 # SSLError of Python 2.7.9 contains a unicode
233 reason = encoding.unitolocal(reason)
233 reason = encoding.unitolocal(reason)
234 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
234 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
235 except (IOError, OSError) as inst:
235 except (IOError, OSError) as inst:
236 if (
236 if (
237 util.safehasattr(inst, b"args")
237 util.safehasattr(inst, b"args")
238 and inst.args
238 and inst.args
239 and inst.args[0] == errno.EPIPE
239 and inst.args[0] == errno.EPIPE
240 ):
240 ):
241 pass
241 pass
242 elif getattr(inst, "strerror", None): # common IOError or OSError
242 elif getattr(inst, "strerror", None): # common IOError or OSError
243 if getattr(inst, "filename", None) is not None:
243 if getattr(inst, "filename", None) is not None:
244 ui.error(
244 ui.error(
245 _(b"abort: %s: '%s'\n")
245 _(b"abort: %s: '%s'\n")
246 % (
246 % (
247 encoding.strtolocal(inst.strerror),
247 encoding.strtolocal(inst.strerror),
248 stringutil.forcebytestr(inst.filename),
248 stringutil.forcebytestr(inst.filename),
249 )
249 )
250 )
250 )
251 else:
251 else:
252 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
252 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
253 else: # suspicious IOError
253 else: # suspicious IOError
254 raise
254 raise
255 except MemoryError:
255 except MemoryError:
256 ui.error(_(b"abort: out of memory\n"))
256 ui.error(_(b"abort: out of memory\n"))
257 except SystemExit as inst:
257 except SystemExit as inst:
258 # Commands shouldn't sys.exit directly, but give a return code.
258 # Commands shouldn't sys.exit directly, but give a return code.
259 # Just in case catch this and and pass exit code to caller.
259 # Just in case catch this and and pass exit code to caller.
260 detailed_exit_code = 254
260 detailed_exit_code = 254
261 coarse_exit_code = inst.code
261 coarse_exit_code = inst.code
262
262
263 if ui.configbool(b'ui', b'detailed-exit-code'):
263 if ui.configbool(b'ui', b'detailed-exit-code'):
264 return detailed_exit_code
264 return detailed_exit_code
265 else:
265 else:
266 return coarse_exit_code
266 return coarse_exit_code
267
267
268
268
269 def checknewlabel(repo, lbl, kind):
269 def checknewlabel(repo, lbl, kind):
270 # Do not use the "kind" parameter in ui output.
270 # Do not use the "kind" parameter in ui output.
271 # It makes strings difficult to translate.
271 # It makes strings difficult to translate.
272 if lbl in [b'tip', b'.', b'null']:
272 if lbl in [b'tip', b'.', b'null']:
273 raise error.InputError(_(b"the name '%s' is reserved") % lbl)
273 raise error.InputError(_(b"the name '%s' is reserved") % lbl)
274 for c in (b':', b'\0', b'\n', b'\r'):
274 for c in (b':', b'\0', b'\n', b'\r'):
275 if c in lbl:
275 if c in lbl:
276 raise error.InputError(
276 raise error.InputError(
277 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
277 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
278 )
278 )
279 try:
279 try:
280 int(lbl)
280 int(lbl)
281 raise error.InputError(_(b"cannot use an integer as a name"))
281 raise error.InputError(_(b"cannot use an integer as a name"))
282 except ValueError:
282 except ValueError:
283 pass
283 pass
284 if lbl.strip() != lbl:
284 if lbl.strip() != lbl:
285 raise error.InputError(
285 raise error.InputError(
286 _(b"leading or trailing whitespace in name %r") % lbl
286 _(b"leading or trailing whitespace in name %r") % lbl
287 )
287 )
288
288
289
289
290 def checkfilename(f):
290 def checkfilename(f):
291 '''Check that the filename f is an acceptable filename for a tracked file'''
291 '''Check that the filename f is an acceptable filename for a tracked file'''
292 if b'\r' in f or b'\n' in f:
292 if b'\r' in f or b'\n' in f:
293 raise error.InputError(
293 raise error.InputError(
294 _(b"'\\n' and '\\r' disallowed in filenames: %r")
294 _(b"'\\n' and '\\r' disallowed in filenames: %r")
295 % pycompat.bytestr(f)
295 % pycompat.bytestr(f)
296 )
296 )
297
297
298
298
299 def checkportable(ui, f):
299 def checkportable(ui, f):
300 '''Check if filename f is portable and warn or abort depending on config'''
300 '''Check if filename f is portable and warn or abort depending on config'''
301 checkfilename(f)
301 checkfilename(f)
302 abort, warn = checkportabilityalert(ui)
302 abort, warn = checkportabilityalert(ui)
303 if abort or warn:
303 if abort or warn:
304 msg = util.checkwinfilename(f)
304 msg = util.checkwinfilename(f)
305 if msg:
305 if msg:
306 msg = b"%s: %s" % (msg, procutil.shellquote(f))
306 msg = b"%s: %s" % (msg, procutil.shellquote(f))
307 if abort:
307 if abort:
308 raise error.InputError(msg)
308 raise error.InputError(msg)
309 ui.warn(_(b"warning: %s\n") % msg)
309 ui.warn(_(b"warning: %s\n") % msg)
310
310
311
311
312 def checkportabilityalert(ui):
312 def checkportabilityalert(ui):
313 """check if the user's config requests nothing, a warning, or abort for
313 """check if the user's config requests nothing, a warning, or abort for
314 non-portable filenames"""
314 non-portable filenames"""
315 val = ui.config(b'ui', b'portablefilenames')
315 val = ui.config(b'ui', b'portablefilenames')
316 lval = val.lower()
316 lval = val.lower()
317 bval = stringutil.parsebool(val)
317 bval = stringutil.parsebool(val)
318 abort = pycompat.iswindows or lval == b'abort'
318 abort = pycompat.iswindows or lval == b'abort'
319 warn = bval or lval == b'warn'
319 warn = bval or lval == b'warn'
320 if bval is None and not (warn or abort or lval == b'ignore'):
320 if bval is None and not (warn or abort or lval == b'ignore'):
321 raise error.ConfigError(
321 raise error.ConfigError(
322 _(b"ui.portablefilenames value is invalid ('%s')") % val
322 _(b"ui.portablefilenames value is invalid ('%s')") % val
323 )
323 )
324 return abort, warn
324 return abort, warn
325
325
326
326
327 class casecollisionauditor(object):
327 class casecollisionauditor(object):
328 def __init__(self, ui, abort, dirstate):
328 def __init__(self, ui, abort, dirstate):
329 self._ui = ui
329 self._ui = ui
330 self._abort = abort
330 self._abort = abort
331 allfiles = b'\0'.join(dirstate)
331 allfiles = b'\0'.join(dirstate)
332 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
332 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
333 self._dirstate = dirstate
333 self._dirstate = dirstate
334 # The purpose of _newfiles is so that we don't complain about
334 # The purpose of _newfiles is so that we don't complain about
335 # case collisions if someone were to call this object with the
335 # case collisions if someone were to call this object with the
336 # same filename twice.
336 # same filename twice.
337 self._newfiles = set()
337 self._newfiles = set()
338
338
339 def __call__(self, f):
339 def __call__(self, f):
340 if f in self._newfiles:
340 if f in self._newfiles:
341 return
341 return
342 fl = encoding.lower(f)
342 fl = encoding.lower(f)
343 if fl in self._loweredfiles and f not in self._dirstate:
343 if fl in self._loweredfiles and f not in self._dirstate:
344 msg = _(b'possible case-folding collision for %s') % f
344 msg = _(b'possible case-folding collision for %s') % f
345 if self._abort:
345 if self._abort:
346 raise error.StateError(msg)
346 raise error.StateError(msg)
347 self._ui.warn(_(b"warning: %s\n") % msg)
347 self._ui.warn(_(b"warning: %s\n") % msg)
348 self._loweredfiles.add(fl)
348 self._loweredfiles.add(fl)
349 self._newfiles.add(f)
349 self._newfiles.add(f)
350
350
351
351
352 def filteredhash(repo, maxrev, needobsolete=False):
352 def filteredhash(repo, maxrev, needobsolete=False):
353 """build hash of filtered revisions in the current repoview.
353 """build hash of filtered revisions in the current repoview.
354
354
355 Multiple caches perform up-to-date validation by checking that the
355 Multiple caches perform up-to-date validation by checking that the
356 tiprev and tipnode stored in the cache file match the current repository.
356 tiprev and tipnode stored in the cache file match the current repository.
357 However, this is not sufficient for validating repoviews because the set
357 However, this is not sufficient for validating repoviews because the set
358 of revisions in the view may change without the repository tiprev and
358 of revisions in the view may change without the repository tiprev and
359 tipnode changing.
359 tipnode changing.
360
360
361 This function hashes all the revs filtered from the view (and, optionally,
361 This function hashes all the revs filtered from the view (and, optionally,
362 all obsolete revs) up to maxrev and returns that SHA-1 digest.
362 all obsolete revs) up to maxrev and returns that SHA-1 digest.
363 """
363 """
364 cl = repo.changelog
364 cl = repo.changelog
365 if needobsolete:
365 if needobsolete:
366 obsrevs = obsolete.getrevs(repo, b'obsolete')
366 obsrevs = obsolete.getrevs(repo, b'obsolete')
367 if not cl.filteredrevs and not obsrevs:
367 if not cl.filteredrevs and not obsrevs:
368 return None
368 return None
369 # TODO: obsrevs should be a frozenset, but right now obsolete.getrevs()
369 key = (maxrev, hash(cl.filteredrevs), hash(obsrevs))
370 # may return a set, which is not a hashable type.
371 key = (maxrev, hash(cl.filteredrevs), hash(frozenset(obsrevs)))
372 else:
370 else:
373 if not cl.filteredrevs:
371 if not cl.filteredrevs:
374 return None
372 return None
375 key = maxrev
373 key = maxrev
376 obsrevs = frozenset()
374 obsrevs = frozenset()
377
375
378 result = cl._filteredrevs_hashcache.get(key)
376 result = cl._filteredrevs_hashcache.get(key)
379 if not result:
377 if not result:
380 revs = sorted(r for r in cl.filteredrevs | obsrevs if r <= maxrev)
378 revs = sorted(r for r in cl.filteredrevs | obsrevs if r <= maxrev)
381 if revs:
379 if revs:
382 s = hashutil.sha1()
380 s = hashutil.sha1()
383 for rev in revs:
381 for rev in revs:
384 s.update(b'%d;' % rev)
382 s.update(b'%d;' % rev)
385 result = s.digest()
383 result = s.digest()
386 cl._filteredrevs_hashcache[key] = result
384 cl._filteredrevs_hashcache[key] = result
387 return result
385 return result
388
386
389
387
390 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
388 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
391 """yield every hg repository under path, always recursively.
389 """yield every hg repository under path, always recursively.
392 The recurse flag will only control recursion into repo working dirs"""
390 The recurse flag will only control recursion into repo working dirs"""
393
391
394 def errhandler(err):
392 def errhandler(err):
395 if err.filename == path:
393 if err.filename == path:
396 raise err
394 raise err
397
395
398 samestat = getattr(os.path, 'samestat', None)
396 samestat = getattr(os.path, 'samestat', None)
399 if followsym and samestat is not None:
397 if followsym and samestat is not None:
400
398
401 def adddir(dirlst, dirname):
399 def adddir(dirlst, dirname):
402 dirstat = os.stat(dirname)
400 dirstat = os.stat(dirname)
403 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
401 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
404 if not match:
402 if not match:
405 dirlst.append(dirstat)
403 dirlst.append(dirstat)
406 return not match
404 return not match
407
405
408 else:
406 else:
409 followsym = False
407 followsym = False
410
408
411 if (seen_dirs is None) and followsym:
409 if (seen_dirs is None) and followsym:
412 seen_dirs = []
410 seen_dirs = []
413 adddir(seen_dirs, path)
411 adddir(seen_dirs, path)
414 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
412 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
415 dirs.sort()
413 dirs.sort()
416 if b'.hg' in dirs:
414 if b'.hg' in dirs:
417 yield root # found a repository
415 yield root # found a repository
418 qroot = os.path.join(root, b'.hg', b'patches')
416 qroot = os.path.join(root, b'.hg', b'patches')
419 if os.path.isdir(os.path.join(qroot, b'.hg')):
417 if os.path.isdir(os.path.join(qroot, b'.hg')):
420 yield qroot # we have a patch queue repo here
418 yield qroot # we have a patch queue repo here
421 if recurse:
419 if recurse:
422 # avoid recursing inside the .hg directory
420 # avoid recursing inside the .hg directory
423 dirs.remove(b'.hg')
421 dirs.remove(b'.hg')
424 else:
422 else:
425 dirs[:] = [] # don't descend further
423 dirs[:] = [] # don't descend further
426 elif followsym:
424 elif followsym:
427 newdirs = []
425 newdirs = []
428 for d in dirs:
426 for d in dirs:
429 fname = os.path.join(root, d)
427 fname = os.path.join(root, d)
430 if adddir(seen_dirs, fname):
428 if adddir(seen_dirs, fname):
431 if os.path.islink(fname):
429 if os.path.islink(fname):
432 for hgname in walkrepos(fname, True, seen_dirs):
430 for hgname in walkrepos(fname, True, seen_dirs):
433 yield hgname
431 yield hgname
434 else:
432 else:
435 newdirs.append(d)
433 newdirs.append(d)
436 dirs[:] = newdirs
434 dirs[:] = newdirs
437
435
438
436
439 def binnode(ctx):
437 def binnode(ctx):
440 """Return binary node id for a given basectx"""
438 """Return binary node id for a given basectx"""
441 node = ctx.node()
439 node = ctx.node()
442 if node is None:
440 if node is None:
443 return ctx.repo().nodeconstants.wdirid
441 return ctx.repo().nodeconstants.wdirid
444 return node
442 return node
445
443
446
444
447 def intrev(ctx):
445 def intrev(ctx):
448 """Return integer for a given basectx that can be used in comparison or
446 """Return integer for a given basectx that can be used in comparison or
449 arithmetic operation"""
447 arithmetic operation"""
450 rev = ctx.rev()
448 rev = ctx.rev()
451 if rev is None:
449 if rev is None:
452 return wdirrev
450 return wdirrev
453 return rev
451 return rev
454
452
455
453
456 def formatchangeid(ctx):
454 def formatchangeid(ctx):
457 """Format changectx as '{rev}:{node|formatnode}', which is the default
455 """Format changectx as '{rev}:{node|formatnode}', which is the default
458 template provided by logcmdutil.changesettemplater"""
456 template provided by logcmdutil.changesettemplater"""
459 repo = ctx.repo()
457 repo = ctx.repo()
460 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
458 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
461
459
462
460
463 def formatrevnode(ui, rev, node):
461 def formatrevnode(ui, rev, node):
464 """Format given revision and node depending on the current verbosity"""
462 """Format given revision and node depending on the current verbosity"""
465 if ui.debugflag:
463 if ui.debugflag:
466 hexfunc = hex
464 hexfunc = hex
467 else:
465 else:
468 hexfunc = short
466 hexfunc = short
469 return b'%d:%s' % (rev, hexfunc(node))
467 return b'%d:%s' % (rev, hexfunc(node))
470
468
471
469
472 def resolvehexnodeidprefix(repo, prefix):
470 def resolvehexnodeidprefix(repo, prefix):
473 if prefix.startswith(b'x'):
471 if prefix.startswith(b'x'):
474 prefix = prefix[1:]
472 prefix = prefix[1:]
475 try:
473 try:
476 # Uses unfiltered repo because it's faster when prefix is ambiguous/
474 # Uses unfiltered repo because it's faster when prefix is ambiguous/
477 # This matches the shortesthexnodeidprefix() function below.
475 # This matches the shortesthexnodeidprefix() function below.
478 node = repo.unfiltered().changelog._partialmatch(prefix)
476 node = repo.unfiltered().changelog._partialmatch(prefix)
479 except error.AmbiguousPrefixLookupError:
477 except error.AmbiguousPrefixLookupError:
480 revset = repo.ui.config(
478 revset = repo.ui.config(
481 b'experimental', b'revisions.disambiguatewithin'
479 b'experimental', b'revisions.disambiguatewithin'
482 )
480 )
483 if revset:
481 if revset:
484 # Clear config to avoid infinite recursion
482 # Clear config to avoid infinite recursion
485 configoverrides = {
483 configoverrides = {
486 (b'experimental', b'revisions.disambiguatewithin'): None
484 (b'experimental', b'revisions.disambiguatewithin'): None
487 }
485 }
488 with repo.ui.configoverride(configoverrides):
486 with repo.ui.configoverride(configoverrides):
489 revs = repo.anyrevs([revset], user=True)
487 revs = repo.anyrevs([revset], user=True)
490 matches = []
488 matches = []
491 for rev in revs:
489 for rev in revs:
492 node = repo.changelog.node(rev)
490 node = repo.changelog.node(rev)
493 if hex(node).startswith(prefix):
491 if hex(node).startswith(prefix):
494 matches.append(node)
492 matches.append(node)
495 if len(matches) == 1:
493 if len(matches) == 1:
496 return matches[0]
494 return matches[0]
497 raise
495 raise
498 if node is None:
496 if node is None:
499 return
497 return
500 repo.changelog.rev(node) # make sure node isn't filtered
498 repo.changelog.rev(node) # make sure node isn't filtered
501 return node
499 return node
502
500
503
501
504 def mayberevnum(repo, prefix):
502 def mayberevnum(repo, prefix):
505 """Checks if the given prefix may be mistaken for a revision number"""
503 """Checks if the given prefix may be mistaken for a revision number"""
506 try:
504 try:
507 i = int(prefix)
505 i = int(prefix)
508 # if we are a pure int, then starting with zero will not be
506 # if we are a pure int, then starting with zero will not be
509 # confused as a rev; or, obviously, if the int is larger
507 # confused as a rev; or, obviously, if the int is larger
510 # than the value of the tip rev. We still need to disambiguate if
508 # than the value of the tip rev. We still need to disambiguate if
511 # prefix == '0', since that *is* a valid revnum.
509 # prefix == '0', since that *is* a valid revnum.
512 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
510 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
513 return False
511 return False
514 return True
512 return True
515 except ValueError:
513 except ValueError:
516 return False
514 return False
517
515
518
516
519 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
517 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
520 """Find the shortest unambiguous prefix that matches hexnode.
518 """Find the shortest unambiguous prefix that matches hexnode.
521
519
522 If "cache" is not None, it must be a dictionary that can be used for
520 If "cache" is not None, it must be a dictionary that can be used for
523 caching between calls to this method.
521 caching between calls to this method.
524 """
522 """
525 # _partialmatch() of filtered changelog could take O(len(repo)) time,
523 # _partialmatch() of filtered changelog could take O(len(repo)) time,
526 # which would be unacceptably slow. so we look for hash collision in
524 # which would be unacceptably slow. so we look for hash collision in
527 # unfiltered space, which means some hashes may be slightly longer.
525 # unfiltered space, which means some hashes may be slightly longer.
528
526
529 minlength = max(minlength, 1)
527 minlength = max(minlength, 1)
530
528
531 def disambiguate(prefix):
529 def disambiguate(prefix):
532 """Disambiguate against revnums."""
530 """Disambiguate against revnums."""
533 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
531 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
534 if mayberevnum(repo, prefix):
532 if mayberevnum(repo, prefix):
535 return b'x' + prefix
533 return b'x' + prefix
536 else:
534 else:
537 return prefix
535 return prefix
538
536
539 hexnode = hex(node)
537 hexnode = hex(node)
540 for length in range(len(prefix), len(hexnode) + 1):
538 for length in range(len(prefix), len(hexnode) + 1):
541 prefix = hexnode[:length]
539 prefix = hexnode[:length]
542 if not mayberevnum(repo, prefix):
540 if not mayberevnum(repo, prefix):
543 return prefix
541 return prefix
544
542
545 cl = repo.unfiltered().changelog
543 cl = repo.unfiltered().changelog
546 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
544 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
547 if revset:
545 if revset:
548 revs = None
546 revs = None
549 if cache is not None:
547 if cache is not None:
550 revs = cache.get(b'disambiguationrevset')
548 revs = cache.get(b'disambiguationrevset')
551 if revs is None:
549 if revs is None:
552 revs = repo.anyrevs([revset], user=True)
550 revs = repo.anyrevs([revset], user=True)
553 if cache is not None:
551 if cache is not None:
554 cache[b'disambiguationrevset'] = revs
552 cache[b'disambiguationrevset'] = revs
555 if cl.rev(node) in revs:
553 if cl.rev(node) in revs:
556 hexnode = hex(node)
554 hexnode = hex(node)
557 nodetree = None
555 nodetree = None
558 if cache is not None:
556 if cache is not None:
559 nodetree = cache.get(b'disambiguationnodetree')
557 nodetree = cache.get(b'disambiguationnodetree')
560 if not nodetree:
558 if not nodetree:
561 if util.safehasattr(parsers, 'nodetree'):
559 if util.safehasattr(parsers, 'nodetree'):
562 # The CExt is the only implementation to provide a nodetree
560 # The CExt is the only implementation to provide a nodetree
563 # class so far.
561 # class so far.
564 index = cl.index
562 index = cl.index
565 if util.safehasattr(index, 'get_cindex'):
563 if util.safehasattr(index, 'get_cindex'):
566 # the rust wrapped need to give access to its internal index
564 # the rust wrapped need to give access to its internal index
567 index = index.get_cindex()
565 index = index.get_cindex()
568 nodetree = parsers.nodetree(index, len(revs))
566 nodetree = parsers.nodetree(index, len(revs))
569 for r in revs:
567 for r in revs:
570 nodetree.insert(r)
568 nodetree.insert(r)
571 if cache is not None:
569 if cache is not None:
572 cache[b'disambiguationnodetree'] = nodetree
570 cache[b'disambiguationnodetree'] = nodetree
573 if nodetree is not None:
571 if nodetree is not None:
574 length = max(nodetree.shortest(node), minlength)
572 length = max(nodetree.shortest(node), minlength)
575 prefix = hexnode[:length]
573 prefix = hexnode[:length]
576 return disambiguate(prefix)
574 return disambiguate(prefix)
577 for length in range(minlength, len(hexnode) + 1):
575 for length in range(minlength, len(hexnode) + 1):
578 matches = []
576 matches = []
579 prefix = hexnode[:length]
577 prefix = hexnode[:length]
580 for rev in revs:
578 for rev in revs:
581 otherhexnode = repo[rev].hex()
579 otherhexnode = repo[rev].hex()
582 if prefix == otherhexnode[:length]:
580 if prefix == otherhexnode[:length]:
583 matches.append(otherhexnode)
581 matches.append(otherhexnode)
584 if len(matches) == 1:
582 if len(matches) == 1:
585 return disambiguate(prefix)
583 return disambiguate(prefix)
586
584
587 try:
585 try:
588 return disambiguate(cl.shortest(node, minlength))
586 return disambiguate(cl.shortest(node, minlength))
589 except error.LookupError:
587 except error.LookupError:
590 raise error.RepoLookupError()
588 raise error.RepoLookupError()
591
589
592
590
593 def isrevsymbol(repo, symbol):
591 def isrevsymbol(repo, symbol):
594 """Checks if a symbol exists in the repo.
592 """Checks if a symbol exists in the repo.
595
593
596 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
594 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
597 symbol is an ambiguous nodeid prefix.
595 symbol is an ambiguous nodeid prefix.
598 """
596 """
599 try:
597 try:
600 revsymbol(repo, symbol)
598 revsymbol(repo, symbol)
601 return True
599 return True
602 except error.RepoLookupError:
600 except error.RepoLookupError:
603 return False
601 return False
604
602
605
603
606 def revsymbol(repo, symbol):
604 def revsymbol(repo, symbol):
607 """Returns a context given a single revision symbol (as string).
605 """Returns a context given a single revision symbol (as string).
608
606
609 This is similar to revsingle(), but accepts only a single revision symbol,
607 This is similar to revsingle(), but accepts only a single revision symbol,
610 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
608 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
611 not "max(public())".
609 not "max(public())".
612 """
610 """
613 if not isinstance(symbol, bytes):
611 if not isinstance(symbol, bytes):
614 msg = (
612 msg = (
615 b"symbol (%s of type %s) was not a string, did you mean "
613 b"symbol (%s of type %s) was not a string, did you mean "
616 b"repo[symbol]?" % (symbol, type(symbol))
614 b"repo[symbol]?" % (symbol, type(symbol))
617 )
615 )
618 raise error.ProgrammingError(msg)
616 raise error.ProgrammingError(msg)
619 try:
617 try:
620 if symbol in (b'.', b'tip', b'null'):
618 if symbol in (b'.', b'tip', b'null'):
621 return repo[symbol]
619 return repo[symbol]
622
620
623 try:
621 try:
624 r = int(symbol)
622 r = int(symbol)
625 if b'%d' % r != symbol:
623 if b'%d' % r != symbol:
626 raise ValueError
624 raise ValueError
627 l = len(repo.changelog)
625 l = len(repo.changelog)
628 if r < 0:
626 if r < 0:
629 r += l
627 r += l
630 if r < 0 or r >= l and r != wdirrev:
628 if r < 0 or r >= l and r != wdirrev:
631 raise ValueError
629 raise ValueError
632 return repo[r]
630 return repo[r]
633 except error.FilteredIndexError:
631 except error.FilteredIndexError:
634 raise
632 raise
635 except (ValueError, OverflowError, IndexError):
633 except (ValueError, OverflowError, IndexError):
636 pass
634 pass
637
635
638 if len(symbol) == 2 * repo.nodeconstants.nodelen:
636 if len(symbol) == 2 * repo.nodeconstants.nodelen:
639 try:
637 try:
640 node = bin(symbol)
638 node = bin(symbol)
641 rev = repo.changelog.rev(node)
639 rev = repo.changelog.rev(node)
642 return repo[rev]
640 return repo[rev]
643 except error.FilteredLookupError:
641 except error.FilteredLookupError:
644 raise
642 raise
645 except (TypeError, LookupError):
643 except (TypeError, LookupError):
646 pass
644 pass
647
645
648 # look up bookmarks through the name interface
646 # look up bookmarks through the name interface
649 try:
647 try:
650 node = repo.names.singlenode(repo, symbol)
648 node = repo.names.singlenode(repo, symbol)
651 rev = repo.changelog.rev(node)
649 rev = repo.changelog.rev(node)
652 return repo[rev]
650 return repo[rev]
653 except KeyError:
651 except KeyError:
654 pass
652 pass
655
653
656 node = resolvehexnodeidprefix(repo, symbol)
654 node = resolvehexnodeidprefix(repo, symbol)
657 if node is not None:
655 if node is not None:
658 rev = repo.changelog.rev(node)
656 rev = repo.changelog.rev(node)
659 return repo[rev]
657 return repo[rev]
660
658
661 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
659 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
662
660
663 except error.WdirUnsupported:
661 except error.WdirUnsupported:
664 return repo[None]
662 return repo[None]
665 except (
663 except (
666 error.FilteredIndexError,
664 error.FilteredIndexError,
667 error.FilteredLookupError,
665 error.FilteredLookupError,
668 error.FilteredRepoLookupError,
666 error.FilteredRepoLookupError,
669 ):
667 ):
670 raise _filterederror(repo, symbol)
668 raise _filterederror(repo, symbol)
671
669
672
670
673 def _filterederror(repo, changeid):
671 def _filterederror(repo, changeid):
674 """build an exception to be raised about a filtered changeid
672 """build an exception to be raised about a filtered changeid
675
673
676 This is extracted in a function to help extensions (eg: evolve) to
674 This is extracted in a function to help extensions (eg: evolve) to
677 experiment with various message variants."""
675 experiment with various message variants."""
678 if repo.filtername.startswith(b'visible'):
676 if repo.filtername.startswith(b'visible'):
679
677
680 # Check if the changeset is obsolete
678 # Check if the changeset is obsolete
681 unfilteredrepo = repo.unfiltered()
679 unfilteredrepo = repo.unfiltered()
682 ctx = revsymbol(unfilteredrepo, changeid)
680 ctx = revsymbol(unfilteredrepo, changeid)
683
681
684 # If the changeset is obsolete, enrich the message with the reason
682 # If the changeset is obsolete, enrich the message with the reason
685 # that made this changeset not visible
683 # that made this changeset not visible
686 if ctx.obsolete():
684 if ctx.obsolete():
687 msg = obsutil._getfilteredreason(repo, changeid, ctx)
685 msg = obsutil._getfilteredreason(repo, changeid, ctx)
688 else:
686 else:
689 msg = _(b"hidden revision '%s'") % changeid
687 msg = _(b"hidden revision '%s'") % changeid
690
688
691 hint = _(b'use --hidden to access hidden revisions')
689 hint = _(b'use --hidden to access hidden revisions')
692
690
693 return error.FilteredRepoLookupError(msg, hint=hint)
691 return error.FilteredRepoLookupError(msg, hint=hint)
694 msg = _(b"filtered revision '%s' (not in '%s' subset)")
692 msg = _(b"filtered revision '%s' (not in '%s' subset)")
695 msg %= (changeid, repo.filtername)
693 msg %= (changeid, repo.filtername)
696 return error.FilteredRepoLookupError(msg)
694 return error.FilteredRepoLookupError(msg)
697
695
698
696
699 def revsingle(repo, revspec, default=b'.', localalias=None):
697 def revsingle(repo, revspec, default=b'.', localalias=None):
700 if not revspec and revspec != 0:
698 if not revspec and revspec != 0:
701 return repo[default]
699 return repo[default]
702
700
703 l = revrange(repo, [revspec], localalias=localalias)
701 l = revrange(repo, [revspec], localalias=localalias)
704 if not l:
702 if not l:
705 raise error.InputError(_(b'empty revision set'))
703 raise error.InputError(_(b'empty revision set'))
706 return repo[l.last()]
704 return repo[l.last()]
707
705
708
706
709 def _pairspec(revspec):
707 def _pairspec(revspec):
710 tree = revsetlang.parse(revspec)
708 tree = revsetlang.parse(revspec)
711 return tree and tree[0] in (
709 return tree and tree[0] in (
712 b'range',
710 b'range',
713 b'rangepre',
711 b'rangepre',
714 b'rangepost',
712 b'rangepost',
715 b'rangeall',
713 b'rangeall',
716 )
714 )
717
715
718
716
719 def revpair(repo, revs):
717 def revpair(repo, revs):
720 if not revs:
718 if not revs:
721 return repo[b'.'], repo[None]
719 return repo[b'.'], repo[None]
722
720
723 l = revrange(repo, revs)
721 l = revrange(repo, revs)
724
722
725 if not l:
723 if not l:
726 raise error.InputError(_(b'empty revision range'))
724 raise error.InputError(_(b'empty revision range'))
727
725
728 first = l.first()
726 first = l.first()
729 second = l.last()
727 second = l.last()
730
728
731 if (
729 if (
732 first == second
730 first == second
733 and len(revs) >= 2
731 and len(revs) >= 2
734 and not all(revrange(repo, [r]) for r in revs)
732 and not all(revrange(repo, [r]) for r in revs)
735 ):
733 ):
736 raise error.InputError(_(b'empty revision on one side of range'))
734 raise error.InputError(_(b'empty revision on one side of range'))
737
735
738 # if top-level is range expression, the result must always be a pair
736 # if top-level is range expression, the result must always be a pair
739 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
737 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
740 return repo[first], repo[None]
738 return repo[first], repo[None]
741
739
742 return repo[first], repo[second]
740 return repo[first], repo[second]
743
741
744
742
745 def revrange(repo, specs, localalias=None):
743 def revrange(repo, specs, localalias=None):
746 """Execute 1 to many revsets and return the union.
744 """Execute 1 to many revsets and return the union.
747
745
748 This is the preferred mechanism for executing revsets using user-specified
746 This is the preferred mechanism for executing revsets using user-specified
749 config options, such as revset aliases.
747 config options, such as revset aliases.
750
748
751 The revsets specified by ``specs`` will be executed via a chained ``OR``
749 The revsets specified by ``specs`` will be executed via a chained ``OR``
752 expression. If ``specs`` is empty, an empty result is returned.
750 expression. If ``specs`` is empty, an empty result is returned.
753
751
754 ``specs`` can contain integers, in which case they are assumed to be
752 ``specs`` can contain integers, in which case they are assumed to be
755 revision numbers.
753 revision numbers.
756
754
757 It is assumed the revsets are already formatted. If you have arguments
755 It is assumed the revsets are already formatted. If you have arguments
758 that need to be expanded in the revset, call ``revsetlang.formatspec()``
756 that need to be expanded in the revset, call ``revsetlang.formatspec()``
759 and pass the result as an element of ``specs``.
757 and pass the result as an element of ``specs``.
760
758
761 Specifying a single revset is allowed.
759 Specifying a single revset is allowed.
762
760
763 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
761 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
764 integer revisions.
762 integer revisions.
765 """
763 """
766 allspecs = []
764 allspecs = []
767 for spec in specs:
765 for spec in specs:
768 if isinstance(spec, int):
766 if isinstance(spec, int):
769 spec = revsetlang.formatspec(b'%d', spec)
767 spec = revsetlang.formatspec(b'%d', spec)
770 allspecs.append(spec)
768 allspecs.append(spec)
771 return repo.anyrevs(allspecs, user=True, localalias=localalias)
769 return repo.anyrevs(allspecs, user=True, localalias=localalias)
772
770
773
771
774 def increasingwindows(windowsize=8, sizelimit=512):
772 def increasingwindows(windowsize=8, sizelimit=512):
775 while True:
773 while True:
776 yield windowsize
774 yield windowsize
777 if windowsize < sizelimit:
775 if windowsize < sizelimit:
778 windowsize *= 2
776 windowsize *= 2
779
777
780
778
781 def walkchangerevs(repo, revs, makefilematcher, prepare):
779 def walkchangerevs(repo, revs, makefilematcher, prepare):
782 """Iterate over files and the revs in a "windowed" way.
780 """Iterate over files and the revs in a "windowed" way.
783
781
784 Callers most commonly need to iterate backwards over the history
782 Callers most commonly need to iterate backwards over the history
785 in which they are interested. Doing so has awful (quadratic-looking)
783 in which they are interested. Doing so has awful (quadratic-looking)
786 performance, so we use iterators in a "windowed" way.
784 performance, so we use iterators in a "windowed" way.
787
785
788 We walk a window of revisions in the desired order. Within the
786 We walk a window of revisions in the desired order. Within the
789 window, we first walk forwards to gather data, then in the desired
787 window, we first walk forwards to gather data, then in the desired
790 order (usually backwards) to display it.
788 order (usually backwards) to display it.
791
789
792 This function returns an iterator yielding contexts. Before
790 This function returns an iterator yielding contexts. Before
793 yielding each context, the iterator will first call the prepare
791 yielding each context, the iterator will first call the prepare
794 function on each context in the window in forward order."""
792 function on each context in the window in forward order."""
795
793
796 if not revs:
794 if not revs:
797 return []
795 return []
798 change = repo.__getitem__
796 change = repo.__getitem__
799
797
800 def iterate():
798 def iterate():
801 it = iter(revs)
799 it = iter(revs)
802 stopiteration = False
800 stopiteration = False
803 for windowsize in increasingwindows():
801 for windowsize in increasingwindows():
804 nrevs = []
802 nrevs = []
805 for i in pycompat.xrange(windowsize):
803 for i in pycompat.xrange(windowsize):
806 rev = next(it, None)
804 rev = next(it, None)
807 if rev is None:
805 if rev is None:
808 stopiteration = True
806 stopiteration = True
809 break
807 break
810 nrevs.append(rev)
808 nrevs.append(rev)
811 for rev in sorted(nrevs):
809 for rev in sorted(nrevs):
812 ctx = change(rev)
810 ctx = change(rev)
813 prepare(ctx, makefilematcher(ctx))
811 prepare(ctx, makefilematcher(ctx))
814 for rev in nrevs:
812 for rev in nrevs:
815 yield change(rev)
813 yield change(rev)
816
814
817 if stopiteration:
815 if stopiteration:
818 break
816 break
819
817
820 return iterate()
818 return iterate()
821
819
822
820
823 def meaningfulparents(repo, ctx):
821 def meaningfulparents(repo, ctx):
824 """Return list of meaningful (or all if debug) parentrevs for rev.
822 """Return list of meaningful (or all if debug) parentrevs for rev.
825
823
826 For merges (two non-nullrev revisions) both parents are meaningful.
824 For merges (two non-nullrev revisions) both parents are meaningful.
827 Otherwise the first parent revision is considered meaningful if it
825 Otherwise the first parent revision is considered meaningful if it
828 is not the preceding revision.
826 is not the preceding revision.
829 """
827 """
830 parents = ctx.parents()
828 parents = ctx.parents()
831 if len(parents) > 1:
829 if len(parents) > 1:
832 return parents
830 return parents
833 if repo.ui.debugflag:
831 if repo.ui.debugflag:
834 return [parents[0], repo[nullrev]]
832 return [parents[0], repo[nullrev]]
835 if parents[0].rev() >= intrev(ctx) - 1:
833 if parents[0].rev() >= intrev(ctx) - 1:
836 return []
834 return []
837 return parents
835 return parents
838
836
839
837
840 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
838 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
841 """Return a function that produced paths for presenting to the user.
839 """Return a function that produced paths for presenting to the user.
842
840
843 The returned function takes a repo-relative path and produces a path
841 The returned function takes a repo-relative path and produces a path
844 that can be presented in the UI.
842 that can be presented in the UI.
845
843
846 Depending on the value of ui.relative-paths, either a repo-relative or
844 Depending on the value of ui.relative-paths, either a repo-relative or
847 cwd-relative path will be produced.
845 cwd-relative path will be produced.
848
846
849 legacyrelativevalue is the value to use if ui.relative-paths=legacy
847 legacyrelativevalue is the value to use if ui.relative-paths=legacy
850
848
851 If forcerelativevalue is not None, then that value will be used regardless
849 If forcerelativevalue is not None, then that value will be used regardless
852 of what ui.relative-paths is set to.
850 of what ui.relative-paths is set to.
853 """
851 """
854 if forcerelativevalue is not None:
852 if forcerelativevalue is not None:
855 relative = forcerelativevalue
853 relative = forcerelativevalue
856 else:
854 else:
857 config = repo.ui.config(b'ui', b'relative-paths')
855 config = repo.ui.config(b'ui', b'relative-paths')
858 if config == b'legacy':
856 if config == b'legacy':
859 relative = legacyrelativevalue
857 relative = legacyrelativevalue
860 else:
858 else:
861 relative = stringutil.parsebool(config)
859 relative = stringutil.parsebool(config)
862 if relative is None:
860 if relative is None:
863 raise error.ConfigError(
861 raise error.ConfigError(
864 _(b"ui.relative-paths is not a boolean ('%s')") % config
862 _(b"ui.relative-paths is not a boolean ('%s')") % config
865 )
863 )
866
864
867 if relative:
865 if relative:
868 cwd = repo.getcwd()
866 cwd = repo.getcwd()
869 if cwd != b'':
867 if cwd != b'':
870 # this branch would work even if cwd == b'' (ie cwd = repo
868 # this branch would work even if cwd == b'' (ie cwd = repo
871 # root), but its generality makes the returned function slower
869 # root), but its generality makes the returned function slower
872 pathto = repo.pathto
870 pathto = repo.pathto
873 return lambda f: pathto(f, cwd)
871 return lambda f: pathto(f, cwd)
874 if repo.ui.configbool(b'ui', b'slash'):
872 if repo.ui.configbool(b'ui', b'slash'):
875 return lambda f: f
873 return lambda f: f
876 else:
874 else:
877 return util.localpath
875 return util.localpath
878
876
879
877
880 def subdiruipathfn(subpath, uipathfn):
878 def subdiruipathfn(subpath, uipathfn):
881 '''Create a new uipathfn that treats the file as relative to subpath.'''
879 '''Create a new uipathfn that treats the file as relative to subpath.'''
882 return lambda f: uipathfn(posixpath.join(subpath, f))
880 return lambda f: uipathfn(posixpath.join(subpath, f))
883
881
884
882
885 def anypats(pats, opts):
883 def anypats(pats, opts):
886 """Checks if any patterns, including --include and --exclude were given.
884 """Checks if any patterns, including --include and --exclude were given.
887
885
888 Some commands (e.g. addremove) use this condition for deciding whether to
886 Some commands (e.g. addremove) use this condition for deciding whether to
889 print absolute or relative paths.
887 print absolute or relative paths.
890 """
888 """
891 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
889 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
892
890
893
891
894 def expandpats(pats):
892 def expandpats(pats):
895 """Expand bare globs when running on windows.
893 """Expand bare globs when running on windows.
896 On posix we assume it already has already been done by sh."""
894 On posix we assume it already has already been done by sh."""
897 if not util.expandglobs:
895 if not util.expandglobs:
898 return list(pats)
896 return list(pats)
899 ret = []
897 ret = []
900 for kindpat in pats:
898 for kindpat in pats:
901 kind, pat = matchmod._patsplit(kindpat, None)
899 kind, pat = matchmod._patsplit(kindpat, None)
902 if kind is None:
900 if kind is None:
903 try:
901 try:
904 globbed = glob.glob(pat)
902 globbed = glob.glob(pat)
905 except re.error:
903 except re.error:
906 globbed = [pat]
904 globbed = [pat]
907 if globbed:
905 if globbed:
908 ret.extend(globbed)
906 ret.extend(globbed)
909 continue
907 continue
910 ret.append(kindpat)
908 ret.append(kindpat)
911 return ret
909 return ret
912
910
913
911
914 def matchandpats(
912 def matchandpats(
915 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
913 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
916 ):
914 ):
917 """Return a matcher and the patterns that were used.
915 """Return a matcher and the patterns that were used.
918 The matcher will warn about bad matches, unless an alternate badfn callback
916 The matcher will warn about bad matches, unless an alternate badfn callback
919 is provided."""
917 is provided."""
920 if opts is None:
918 if opts is None:
921 opts = {}
919 opts = {}
922 if not globbed and default == b'relpath':
920 if not globbed and default == b'relpath':
923 pats = expandpats(pats or [])
921 pats = expandpats(pats or [])
924
922
925 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
923 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
926
924
927 def bad(f, msg):
925 def bad(f, msg):
928 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
926 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
929
927
930 if badfn is None:
928 if badfn is None:
931 badfn = bad
929 badfn = bad
932
930
933 m = ctx.match(
931 m = ctx.match(
934 pats,
932 pats,
935 opts.get(b'include'),
933 opts.get(b'include'),
936 opts.get(b'exclude'),
934 opts.get(b'exclude'),
937 default,
935 default,
938 listsubrepos=opts.get(b'subrepos'),
936 listsubrepos=opts.get(b'subrepos'),
939 badfn=badfn,
937 badfn=badfn,
940 )
938 )
941
939
942 if m.always():
940 if m.always():
943 pats = []
941 pats = []
944 return m, pats
942 return m, pats
945
943
946
944
947 def match(
945 def match(
948 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
946 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
949 ):
947 ):
950 '''Return a matcher that will warn about bad matches.'''
948 '''Return a matcher that will warn about bad matches.'''
951 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
949 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
952
950
953
951
954 def matchall(repo):
952 def matchall(repo):
955 '''Return a matcher that will efficiently match everything.'''
953 '''Return a matcher that will efficiently match everything.'''
956 return matchmod.always()
954 return matchmod.always()
957
955
958
956
959 def matchfiles(repo, files, badfn=None):
957 def matchfiles(repo, files, badfn=None):
960 '''Return a matcher that will efficiently match exactly these files.'''
958 '''Return a matcher that will efficiently match exactly these files.'''
961 return matchmod.exact(files, badfn=badfn)
959 return matchmod.exact(files, badfn=badfn)
962
960
963
961
964 def parsefollowlinespattern(repo, rev, pat, msg):
962 def parsefollowlinespattern(repo, rev, pat, msg):
965 """Return a file name from `pat` pattern suitable for usage in followlines
963 """Return a file name from `pat` pattern suitable for usage in followlines
966 logic.
964 logic.
967 """
965 """
968 if not matchmod.patkind(pat):
966 if not matchmod.patkind(pat):
969 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
967 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
970 else:
968 else:
971 ctx = repo[rev]
969 ctx = repo[rev]
972 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
970 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
973 files = [f for f in ctx if m(f)]
971 files = [f for f in ctx if m(f)]
974 if len(files) != 1:
972 if len(files) != 1:
975 raise error.ParseError(msg)
973 raise error.ParseError(msg)
976 return files[0]
974 return files[0]
977
975
978
976
979 def getorigvfs(ui, repo):
977 def getorigvfs(ui, repo):
980 """return a vfs suitable to save 'orig' file
978 """return a vfs suitable to save 'orig' file
981
979
982 return None if no special directory is configured"""
980 return None if no special directory is configured"""
983 origbackuppath = ui.config(b'ui', b'origbackuppath')
981 origbackuppath = ui.config(b'ui', b'origbackuppath')
984 if not origbackuppath:
982 if not origbackuppath:
985 return None
983 return None
986 return vfs.vfs(repo.wvfs.join(origbackuppath))
984 return vfs.vfs(repo.wvfs.join(origbackuppath))
987
985
988
986
989 def backuppath(ui, repo, filepath):
987 def backuppath(ui, repo, filepath):
990 """customize where working copy backup files (.orig files) are created
988 """customize where working copy backup files (.orig files) are created
991
989
992 Fetch user defined path from config file: [ui] origbackuppath = <path>
990 Fetch user defined path from config file: [ui] origbackuppath = <path>
993 Fall back to default (filepath with .orig suffix) if not specified
991 Fall back to default (filepath with .orig suffix) if not specified
994
992
995 filepath is repo-relative
993 filepath is repo-relative
996
994
997 Returns an absolute path
995 Returns an absolute path
998 """
996 """
999 origvfs = getorigvfs(ui, repo)
997 origvfs = getorigvfs(ui, repo)
1000 if origvfs is None:
998 if origvfs is None:
1001 return repo.wjoin(filepath + b".orig")
999 return repo.wjoin(filepath + b".orig")
1002
1000
1003 origbackupdir = origvfs.dirname(filepath)
1001 origbackupdir = origvfs.dirname(filepath)
1004 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
1002 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
1005 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
1003 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
1006
1004
1007 # Remove any files that conflict with the backup file's path
1005 # Remove any files that conflict with the backup file's path
1008 for f in reversed(list(pathutil.finddirs(filepath))):
1006 for f in reversed(list(pathutil.finddirs(filepath))):
1009 if origvfs.isfileorlink(f):
1007 if origvfs.isfileorlink(f):
1010 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
1008 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
1011 origvfs.unlink(f)
1009 origvfs.unlink(f)
1012 break
1010 break
1013
1011
1014 origvfs.makedirs(origbackupdir)
1012 origvfs.makedirs(origbackupdir)
1015
1013
1016 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
1014 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
1017 ui.note(
1015 ui.note(
1018 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
1016 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
1019 )
1017 )
1020 origvfs.rmtree(filepath, forcibly=True)
1018 origvfs.rmtree(filepath, forcibly=True)
1021
1019
1022 return origvfs.join(filepath)
1020 return origvfs.join(filepath)
1023
1021
1024
1022
1025 class _containsnode(object):
1023 class _containsnode(object):
1026 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1024 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1027
1025
1028 def __init__(self, repo, revcontainer):
1026 def __init__(self, repo, revcontainer):
1029 self._torev = repo.changelog.rev
1027 self._torev = repo.changelog.rev
1030 self._revcontains = revcontainer.__contains__
1028 self._revcontains = revcontainer.__contains__
1031
1029
1032 def __contains__(self, node):
1030 def __contains__(self, node):
1033 return self._revcontains(self._torev(node))
1031 return self._revcontains(self._torev(node))
1034
1032
1035
1033
1036 def cleanupnodes(
1034 def cleanupnodes(
1037 repo,
1035 repo,
1038 replacements,
1036 replacements,
1039 operation,
1037 operation,
1040 moves=None,
1038 moves=None,
1041 metadata=None,
1039 metadata=None,
1042 fixphase=False,
1040 fixphase=False,
1043 targetphase=None,
1041 targetphase=None,
1044 backup=True,
1042 backup=True,
1045 ):
1043 ):
1046 """do common cleanups when old nodes are replaced by new nodes
1044 """do common cleanups when old nodes are replaced by new nodes
1047
1045
1048 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1046 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1049 (we might also want to move working directory parent in the future)
1047 (we might also want to move working directory parent in the future)
1050
1048
1051 By default, bookmark moves are calculated automatically from 'replacements',
1049 By default, bookmark moves are calculated automatically from 'replacements',
1052 but 'moves' can be used to override that. Also, 'moves' may include
1050 but 'moves' can be used to override that. Also, 'moves' may include
1053 additional bookmark moves that should not have associated obsmarkers.
1051 additional bookmark moves that should not have associated obsmarkers.
1054
1052
1055 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1053 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1056 have replacements. operation is a string, like "rebase".
1054 have replacements. operation is a string, like "rebase".
1057
1055
1058 metadata is dictionary containing metadata to be stored in obsmarker if
1056 metadata is dictionary containing metadata to be stored in obsmarker if
1059 obsolescence is enabled.
1057 obsolescence is enabled.
1060 """
1058 """
1061 assert fixphase or targetphase is None
1059 assert fixphase or targetphase is None
1062 if not replacements and not moves:
1060 if not replacements and not moves:
1063 return
1061 return
1064
1062
1065 # translate mapping's other forms
1063 # translate mapping's other forms
1066 if not util.safehasattr(replacements, b'items'):
1064 if not util.safehasattr(replacements, b'items'):
1067 replacements = {(n,): () for n in replacements}
1065 replacements = {(n,): () for n in replacements}
1068 else:
1066 else:
1069 # upgrading non tuple "source" to tuple ones for BC
1067 # upgrading non tuple "source" to tuple ones for BC
1070 repls = {}
1068 repls = {}
1071 for key, value in replacements.items():
1069 for key, value in replacements.items():
1072 if not isinstance(key, tuple):
1070 if not isinstance(key, tuple):
1073 key = (key,)
1071 key = (key,)
1074 repls[key] = value
1072 repls[key] = value
1075 replacements = repls
1073 replacements = repls
1076
1074
1077 # Unfiltered repo is needed since nodes in replacements might be hidden.
1075 # Unfiltered repo is needed since nodes in replacements might be hidden.
1078 unfi = repo.unfiltered()
1076 unfi = repo.unfiltered()
1079
1077
1080 # Calculate bookmark movements
1078 # Calculate bookmark movements
1081 if moves is None:
1079 if moves is None:
1082 moves = {}
1080 moves = {}
1083 for oldnodes, newnodes in replacements.items():
1081 for oldnodes, newnodes in replacements.items():
1084 for oldnode in oldnodes:
1082 for oldnode in oldnodes:
1085 if oldnode in moves:
1083 if oldnode in moves:
1086 continue
1084 continue
1087 if len(newnodes) > 1:
1085 if len(newnodes) > 1:
1088 # usually a split, take the one with biggest rev number
1086 # usually a split, take the one with biggest rev number
1089 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1087 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1090 elif len(newnodes) == 0:
1088 elif len(newnodes) == 0:
1091 # move bookmark backwards
1089 # move bookmark backwards
1092 allreplaced = []
1090 allreplaced = []
1093 for rep in replacements:
1091 for rep in replacements:
1094 allreplaced.extend(rep)
1092 allreplaced.extend(rep)
1095 roots = list(
1093 roots = list(
1096 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1094 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1097 )
1095 )
1098 if roots:
1096 if roots:
1099 newnode = roots[0].node()
1097 newnode = roots[0].node()
1100 else:
1098 else:
1101 newnode = repo.nullid
1099 newnode = repo.nullid
1102 else:
1100 else:
1103 newnode = newnodes[0]
1101 newnode = newnodes[0]
1104 moves[oldnode] = newnode
1102 moves[oldnode] = newnode
1105
1103
1106 allnewnodes = [n for ns in replacements.values() for n in ns]
1104 allnewnodes = [n for ns in replacements.values() for n in ns]
1107 toretract = {}
1105 toretract = {}
1108 toadvance = {}
1106 toadvance = {}
1109 if fixphase:
1107 if fixphase:
1110 precursors = {}
1108 precursors = {}
1111 for oldnodes, newnodes in replacements.items():
1109 for oldnodes, newnodes in replacements.items():
1112 for oldnode in oldnodes:
1110 for oldnode in oldnodes:
1113 for newnode in newnodes:
1111 for newnode in newnodes:
1114 precursors.setdefault(newnode, []).append(oldnode)
1112 precursors.setdefault(newnode, []).append(oldnode)
1115
1113
1116 allnewnodes.sort(key=lambda n: unfi[n].rev())
1114 allnewnodes.sort(key=lambda n: unfi[n].rev())
1117 newphases = {}
1115 newphases = {}
1118
1116
1119 def phase(ctx):
1117 def phase(ctx):
1120 return newphases.get(ctx.node(), ctx.phase())
1118 return newphases.get(ctx.node(), ctx.phase())
1121
1119
1122 for newnode in allnewnodes:
1120 for newnode in allnewnodes:
1123 ctx = unfi[newnode]
1121 ctx = unfi[newnode]
1124 parentphase = max(phase(p) for p in ctx.parents())
1122 parentphase = max(phase(p) for p in ctx.parents())
1125 if targetphase is None:
1123 if targetphase is None:
1126 oldphase = max(
1124 oldphase = max(
1127 unfi[oldnode].phase() for oldnode in precursors[newnode]
1125 unfi[oldnode].phase() for oldnode in precursors[newnode]
1128 )
1126 )
1129 newphase = max(oldphase, parentphase)
1127 newphase = max(oldphase, parentphase)
1130 else:
1128 else:
1131 newphase = max(targetphase, parentphase)
1129 newphase = max(targetphase, parentphase)
1132 newphases[newnode] = newphase
1130 newphases[newnode] = newphase
1133 if newphase > ctx.phase():
1131 if newphase > ctx.phase():
1134 toretract.setdefault(newphase, []).append(newnode)
1132 toretract.setdefault(newphase, []).append(newnode)
1135 elif newphase < ctx.phase():
1133 elif newphase < ctx.phase():
1136 toadvance.setdefault(newphase, []).append(newnode)
1134 toadvance.setdefault(newphase, []).append(newnode)
1137
1135
1138 with repo.transaction(b'cleanup') as tr:
1136 with repo.transaction(b'cleanup') as tr:
1139 # Move bookmarks
1137 # Move bookmarks
1140 bmarks = repo._bookmarks
1138 bmarks = repo._bookmarks
1141 bmarkchanges = []
1139 bmarkchanges = []
1142 for oldnode, newnode in moves.items():
1140 for oldnode, newnode in moves.items():
1143 oldbmarks = repo.nodebookmarks(oldnode)
1141 oldbmarks = repo.nodebookmarks(oldnode)
1144 if not oldbmarks:
1142 if not oldbmarks:
1145 continue
1143 continue
1146 from . import bookmarks # avoid import cycle
1144 from . import bookmarks # avoid import cycle
1147
1145
1148 repo.ui.debug(
1146 repo.ui.debug(
1149 b'moving bookmarks %r from %s to %s\n'
1147 b'moving bookmarks %r from %s to %s\n'
1150 % (
1148 % (
1151 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1149 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1152 hex(oldnode),
1150 hex(oldnode),
1153 hex(newnode),
1151 hex(newnode),
1154 )
1152 )
1155 )
1153 )
1156 # Delete divergent bookmarks being parents of related newnodes
1154 # Delete divergent bookmarks being parents of related newnodes
1157 deleterevs = repo.revs(
1155 deleterevs = repo.revs(
1158 b'parents(roots(%ln & (::%n))) - parents(%n)',
1156 b'parents(roots(%ln & (::%n))) - parents(%n)',
1159 allnewnodes,
1157 allnewnodes,
1160 newnode,
1158 newnode,
1161 oldnode,
1159 oldnode,
1162 )
1160 )
1163 deletenodes = _containsnode(repo, deleterevs)
1161 deletenodes = _containsnode(repo, deleterevs)
1164 for name in oldbmarks:
1162 for name in oldbmarks:
1165 bmarkchanges.append((name, newnode))
1163 bmarkchanges.append((name, newnode))
1166 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1164 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1167 bmarkchanges.append((b, None))
1165 bmarkchanges.append((b, None))
1168
1166
1169 if bmarkchanges:
1167 if bmarkchanges:
1170 bmarks.applychanges(repo, tr, bmarkchanges)
1168 bmarks.applychanges(repo, tr, bmarkchanges)
1171
1169
1172 for phase, nodes in toretract.items():
1170 for phase, nodes in toretract.items():
1173 phases.retractboundary(repo, tr, phase, nodes)
1171 phases.retractboundary(repo, tr, phase, nodes)
1174 for phase, nodes in toadvance.items():
1172 for phase, nodes in toadvance.items():
1175 phases.advanceboundary(repo, tr, phase, nodes)
1173 phases.advanceboundary(repo, tr, phase, nodes)
1176
1174
1177 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1175 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1178 # Obsolete or strip nodes
1176 # Obsolete or strip nodes
1179 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1177 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1180 # If a node is already obsoleted, and we want to obsolete it
1178 # If a node is already obsoleted, and we want to obsolete it
1181 # without a successor, skip that obssolete request since it's
1179 # without a successor, skip that obssolete request since it's
1182 # unnecessary. That's the "if s or not isobs(n)" check below.
1180 # unnecessary. That's the "if s or not isobs(n)" check below.
1183 # Also sort the node in topology order, that might be useful for
1181 # Also sort the node in topology order, that might be useful for
1184 # some obsstore logic.
1182 # some obsstore logic.
1185 # NOTE: the sorting might belong to createmarkers.
1183 # NOTE: the sorting might belong to createmarkers.
1186 torev = unfi.changelog.rev
1184 torev = unfi.changelog.rev
1187 sortfunc = lambda ns: torev(ns[0][0])
1185 sortfunc = lambda ns: torev(ns[0][0])
1188 rels = []
1186 rels = []
1189 for ns, s in sorted(replacements.items(), key=sortfunc):
1187 for ns, s in sorted(replacements.items(), key=sortfunc):
1190 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1188 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1191 rels.append(rel)
1189 rels.append(rel)
1192 if rels:
1190 if rels:
1193 obsolete.createmarkers(
1191 obsolete.createmarkers(
1194 repo, rels, operation=operation, metadata=metadata
1192 repo, rels, operation=operation, metadata=metadata
1195 )
1193 )
1196 elif phases.supportinternal(repo) and mayusearchived:
1194 elif phases.supportinternal(repo) and mayusearchived:
1197 # this assume we do not have "unstable" nodes above the cleaned ones
1195 # this assume we do not have "unstable" nodes above the cleaned ones
1198 allreplaced = set()
1196 allreplaced = set()
1199 for ns in replacements.keys():
1197 for ns in replacements.keys():
1200 allreplaced.update(ns)
1198 allreplaced.update(ns)
1201 if backup:
1199 if backup:
1202 from . import repair # avoid import cycle
1200 from . import repair # avoid import cycle
1203
1201
1204 node = min(allreplaced, key=repo.changelog.rev)
1202 node = min(allreplaced, key=repo.changelog.rev)
1205 repair.backupbundle(
1203 repair.backupbundle(
1206 repo, allreplaced, allreplaced, node, operation
1204 repo, allreplaced, allreplaced, node, operation
1207 )
1205 )
1208 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1206 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1209 else:
1207 else:
1210 from . import repair # avoid import cycle
1208 from . import repair # avoid import cycle
1211
1209
1212 tostrip = list(n for ns in replacements for n in ns)
1210 tostrip = list(n for ns in replacements for n in ns)
1213 if tostrip:
1211 if tostrip:
1214 repair.delayedstrip(
1212 repair.delayedstrip(
1215 repo.ui, repo, tostrip, operation, backup=backup
1213 repo.ui, repo, tostrip, operation, backup=backup
1216 )
1214 )
1217
1215
1218
1216
1219 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1217 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1220 if opts is None:
1218 if opts is None:
1221 opts = {}
1219 opts = {}
1222 m = matcher
1220 m = matcher
1223 dry_run = opts.get(b'dry_run')
1221 dry_run = opts.get(b'dry_run')
1224 try:
1222 try:
1225 similarity = float(opts.get(b'similarity') or 0)
1223 similarity = float(opts.get(b'similarity') or 0)
1226 except ValueError:
1224 except ValueError:
1227 raise error.InputError(_(b'similarity must be a number'))
1225 raise error.InputError(_(b'similarity must be a number'))
1228 if similarity < 0 or similarity > 100:
1226 if similarity < 0 or similarity > 100:
1229 raise error.InputError(_(b'similarity must be between 0 and 100'))
1227 raise error.InputError(_(b'similarity must be between 0 and 100'))
1230 similarity /= 100.0
1228 similarity /= 100.0
1231
1229
1232 ret = 0
1230 ret = 0
1233
1231
1234 wctx = repo[None]
1232 wctx = repo[None]
1235 for subpath in sorted(wctx.substate):
1233 for subpath in sorted(wctx.substate):
1236 submatch = matchmod.subdirmatcher(subpath, m)
1234 submatch = matchmod.subdirmatcher(subpath, m)
1237 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1235 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1238 sub = wctx.sub(subpath)
1236 sub = wctx.sub(subpath)
1239 subprefix = repo.wvfs.reljoin(prefix, subpath)
1237 subprefix = repo.wvfs.reljoin(prefix, subpath)
1240 subuipathfn = subdiruipathfn(subpath, uipathfn)
1238 subuipathfn = subdiruipathfn(subpath, uipathfn)
1241 try:
1239 try:
1242 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1240 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1243 ret = 1
1241 ret = 1
1244 except error.LookupError:
1242 except error.LookupError:
1245 repo.ui.status(
1243 repo.ui.status(
1246 _(b"skipping missing subrepository: %s\n")
1244 _(b"skipping missing subrepository: %s\n")
1247 % uipathfn(subpath)
1245 % uipathfn(subpath)
1248 )
1246 )
1249
1247
1250 rejected = []
1248 rejected = []
1251
1249
1252 def badfn(f, msg):
1250 def badfn(f, msg):
1253 if f in m.files():
1251 if f in m.files():
1254 m.bad(f, msg)
1252 m.bad(f, msg)
1255 rejected.append(f)
1253 rejected.append(f)
1256
1254
1257 badmatch = matchmod.badmatch(m, badfn)
1255 badmatch = matchmod.badmatch(m, badfn)
1258 added, unknown, deleted, removed, forgotten = _interestingfiles(
1256 added, unknown, deleted, removed, forgotten = _interestingfiles(
1259 repo, badmatch
1257 repo, badmatch
1260 )
1258 )
1261
1259
1262 unknownset = set(unknown + forgotten)
1260 unknownset = set(unknown + forgotten)
1263 toprint = unknownset.copy()
1261 toprint = unknownset.copy()
1264 toprint.update(deleted)
1262 toprint.update(deleted)
1265 for abs in sorted(toprint):
1263 for abs in sorted(toprint):
1266 if repo.ui.verbose or not m.exact(abs):
1264 if repo.ui.verbose or not m.exact(abs):
1267 if abs in unknownset:
1265 if abs in unknownset:
1268 status = _(b'adding %s\n') % uipathfn(abs)
1266 status = _(b'adding %s\n') % uipathfn(abs)
1269 label = b'ui.addremove.added'
1267 label = b'ui.addremove.added'
1270 else:
1268 else:
1271 status = _(b'removing %s\n') % uipathfn(abs)
1269 status = _(b'removing %s\n') % uipathfn(abs)
1272 label = b'ui.addremove.removed'
1270 label = b'ui.addremove.removed'
1273 repo.ui.status(status, label=label)
1271 repo.ui.status(status, label=label)
1274
1272
1275 renames = _findrenames(
1273 renames = _findrenames(
1276 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1274 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1277 )
1275 )
1278
1276
1279 if not dry_run:
1277 if not dry_run:
1280 _markchanges(repo, unknown + forgotten, deleted, renames)
1278 _markchanges(repo, unknown + forgotten, deleted, renames)
1281
1279
1282 for f in rejected:
1280 for f in rejected:
1283 if f in m.files():
1281 if f in m.files():
1284 return 1
1282 return 1
1285 return ret
1283 return ret
1286
1284
1287
1285
1288 def marktouched(repo, files, similarity=0.0):
1286 def marktouched(repo, files, similarity=0.0):
1289 """Assert that files have somehow been operated upon. files are relative to
1287 """Assert that files have somehow been operated upon. files are relative to
1290 the repo root."""
1288 the repo root."""
1291 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1289 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1292 rejected = []
1290 rejected = []
1293
1291
1294 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1292 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1295
1293
1296 if repo.ui.verbose:
1294 if repo.ui.verbose:
1297 unknownset = set(unknown + forgotten)
1295 unknownset = set(unknown + forgotten)
1298 toprint = unknownset.copy()
1296 toprint = unknownset.copy()
1299 toprint.update(deleted)
1297 toprint.update(deleted)
1300 for abs in sorted(toprint):
1298 for abs in sorted(toprint):
1301 if abs in unknownset:
1299 if abs in unknownset:
1302 status = _(b'adding %s\n') % abs
1300 status = _(b'adding %s\n') % abs
1303 else:
1301 else:
1304 status = _(b'removing %s\n') % abs
1302 status = _(b'removing %s\n') % abs
1305 repo.ui.status(status)
1303 repo.ui.status(status)
1306
1304
1307 # TODO: We should probably have the caller pass in uipathfn and apply it to
1305 # TODO: We should probably have the caller pass in uipathfn and apply it to
1308 # the messages above too. legacyrelativevalue=True is consistent with how
1306 # the messages above too. legacyrelativevalue=True is consistent with how
1309 # it used to work.
1307 # it used to work.
1310 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1308 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1311 renames = _findrenames(
1309 renames = _findrenames(
1312 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1310 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1313 )
1311 )
1314
1312
1315 _markchanges(repo, unknown + forgotten, deleted, renames)
1313 _markchanges(repo, unknown + forgotten, deleted, renames)
1316
1314
1317 for f in rejected:
1315 for f in rejected:
1318 if f in m.files():
1316 if f in m.files():
1319 return 1
1317 return 1
1320 return 0
1318 return 0
1321
1319
1322
1320
1323 def _interestingfiles(repo, matcher):
1321 def _interestingfiles(repo, matcher):
1324 """Walk dirstate with matcher, looking for files that addremove would care
1322 """Walk dirstate with matcher, looking for files that addremove would care
1325 about.
1323 about.
1326
1324
1327 This is different from dirstate.status because it doesn't care about
1325 This is different from dirstate.status because it doesn't care about
1328 whether files are modified or clean."""
1326 whether files are modified or clean."""
1329 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1327 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1330 audit_path = pathutil.pathauditor(repo.root, cached=True)
1328 audit_path = pathutil.pathauditor(repo.root, cached=True)
1331
1329
1332 ctx = repo[None]
1330 ctx = repo[None]
1333 dirstate = repo.dirstate
1331 dirstate = repo.dirstate
1334 matcher = repo.narrowmatch(matcher, includeexact=True)
1332 matcher = repo.narrowmatch(matcher, includeexact=True)
1335 walkresults = dirstate.walk(
1333 walkresults = dirstate.walk(
1336 matcher,
1334 matcher,
1337 subrepos=sorted(ctx.substate),
1335 subrepos=sorted(ctx.substate),
1338 unknown=True,
1336 unknown=True,
1339 ignored=False,
1337 ignored=False,
1340 full=False,
1338 full=False,
1341 )
1339 )
1342 for abs, st in pycompat.iteritems(walkresults):
1340 for abs, st in pycompat.iteritems(walkresults):
1343 entry = dirstate.get_entry(abs)
1341 entry = dirstate.get_entry(abs)
1344 if (not entry.any_tracked) and audit_path.check(abs):
1342 if (not entry.any_tracked) and audit_path.check(abs):
1345 unknown.append(abs)
1343 unknown.append(abs)
1346 elif (not entry.removed) and not st:
1344 elif (not entry.removed) and not st:
1347 deleted.append(abs)
1345 deleted.append(abs)
1348 elif entry.removed and st:
1346 elif entry.removed and st:
1349 forgotten.append(abs)
1347 forgotten.append(abs)
1350 # for finding renames
1348 # for finding renames
1351 elif entry.removed and not st:
1349 elif entry.removed and not st:
1352 removed.append(abs)
1350 removed.append(abs)
1353 elif entry.added:
1351 elif entry.added:
1354 added.append(abs)
1352 added.append(abs)
1355
1353
1356 return added, unknown, deleted, removed, forgotten
1354 return added, unknown, deleted, removed, forgotten
1357
1355
1358
1356
1359 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1357 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1360 '''Find renames from removed files to added ones.'''
1358 '''Find renames from removed files to added ones.'''
1361 renames = {}
1359 renames = {}
1362 if similarity > 0:
1360 if similarity > 0:
1363 for old, new, score in similar.findrenames(
1361 for old, new, score in similar.findrenames(
1364 repo, added, removed, similarity
1362 repo, added, removed, similarity
1365 ):
1363 ):
1366 if (
1364 if (
1367 repo.ui.verbose
1365 repo.ui.verbose
1368 or not matcher.exact(old)
1366 or not matcher.exact(old)
1369 or not matcher.exact(new)
1367 or not matcher.exact(new)
1370 ):
1368 ):
1371 repo.ui.status(
1369 repo.ui.status(
1372 _(
1370 _(
1373 b'recording removal of %s as rename to %s '
1371 b'recording removal of %s as rename to %s '
1374 b'(%d%% similar)\n'
1372 b'(%d%% similar)\n'
1375 )
1373 )
1376 % (uipathfn(old), uipathfn(new), score * 100)
1374 % (uipathfn(old), uipathfn(new), score * 100)
1377 )
1375 )
1378 renames[new] = old
1376 renames[new] = old
1379 return renames
1377 return renames
1380
1378
1381
1379
1382 def _markchanges(repo, unknown, deleted, renames):
1380 def _markchanges(repo, unknown, deleted, renames):
1383 """Marks the files in unknown as added, the files in deleted as removed,
1381 """Marks the files in unknown as added, the files in deleted as removed,
1384 and the files in renames as copied."""
1382 and the files in renames as copied."""
1385 wctx = repo[None]
1383 wctx = repo[None]
1386 with repo.wlock():
1384 with repo.wlock():
1387 wctx.forget(deleted)
1385 wctx.forget(deleted)
1388 wctx.add(unknown)
1386 wctx.add(unknown)
1389 for new, old in pycompat.iteritems(renames):
1387 for new, old in pycompat.iteritems(renames):
1390 wctx.copy(old, new)
1388 wctx.copy(old, new)
1391
1389
1392
1390
1393 def getrenamedfn(repo, endrev=None):
1391 def getrenamedfn(repo, endrev=None):
1394 if copiesmod.usechangesetcentricalgo(repo):
1392 if copiesmod.usechangesetcentricalgo(repo):
1395
1393
1396 def getrenamed(fn, rev):
1394 def getrenamed(fn, rev):
1397 ctx = repo[rev]
1395 ctx = repo[rev]
1398 p1copies = ctx.p1copies()
1396 p1copies = ctx.p1copies()
1399 if fn in p1copies:
1397 if fn in p1copies:
1400 return p1copies[fn]
1398 return p1copies[fn]
1401 p2copies = ctx.p2copies()
1399 p2copies = ctx.p2copies()
1402 if fn in p2copies:
1400 if fn in p2copies:
1403 return p2copies[fn]
1401 return p2copies[fn]
1404 return None
1402 return None
1405
1403
1406 return getrenamed
1404 return getrenamed
1407
1405
1408 rcache = {}
1406 rcache = {}
1409 if endrev is None:
1407 if endrev is None:
1410 endrev = len(repo)
1408 endrev = len(repo)
1411
1409
1412 def getrenamed(fn, rev):
1410 def getrenamed(fn, rev):
1413 """looks up all renames for a file (up to endrev) the first
1411 """looks up all renames for a file (up to endrev) the first
1414 time the file is given. It indexes on the changerev and only
1412 time the file is given. It indexes on the changerev and only
1415 parses the manifest if linkrev != changerev.
1413 parses the manifest if linkrev != changerev.
1416 Returns rename info for fn at changerev rev."""
1414 Returns rename info for fn at changerev rev."""
1417 if fn not in rcache:
1415 if fn not in rcache:
1418 rcache[fn] = {}
1416 rcache[fn] = {}
1419 fl = repo.file(fn)
1417 fl = repo.file(fn)
1420 for i in fl:
1418 for i in fl:
1421 lr = fl.linkrev(i)
1419 lr = fl.linkrev(i)
1422 renamed = fl.renamed(fl.node(i))
1420 renamed = fl.renamed(fl.node(i))
1423 rcache[fn][lr] = renamed and renamed[0]
1421 rcache[fn][lr] = renamed and renamed[0]
1424 if lr >= endrev:
1422 if lr >= endrev:
1425 break
1423 break
1426 if rev in rcache[fn]:
1424 if rev in rcache[fn]:
1427 return rcache[fn][rev]
1425 return rcache[fn][rev]
1428
1426
1429 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1427 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1430 # filectx logic.
1428 # filectx logic.
1431 try:
1429 try:
1432 return repo[rev][fn].copysource()
1430 return repo[rev][fn].copysource()
1433 except error.LookupError:
1431 except error.LookupError:
1434 return None
1432 return None
1435
1433
1436 return getrenamed
1434 return getrenamed
1437
1435
1438
1436
1439 def getcopiesfn(repo, endrev=None):
1437 def getcopiesfn(repo, endrev=None):
1440 if copiesmod.usechangesetcentricalgo(repo):
1438 if copiesmod.usechangesetcentricalgo(repo):
1441
1439
1442 def copiesfn(ctx):
1440 def copiesfn(ctx):
1443 if ctx.p2copies():
1441 if ctx.p2copies():
1444 allcopies = ctx.p1copies().copy()
1442 allcopies = ctx.p1copies().copy()
1445 # There should be no overlap
1443 # There should be no overlap
1446 allcopies.update(ctx.p2copies())
1444 allcopies.update(ctx.p2copies())
1447 return sorted(allcopies.items())
1445 return sorted(allcopies.items())
1448 else:
1446 else:
1449 return sorted(ctx.p1copies().items())
1447 return sorted(ctx.p1copies().items())
1450
1448
1451 else:
1449 else:
1452 getrenamed = getrenamedfn(repo, endrev)
1450 getrenamed = getrenamedfn(repo, endrev)
1453
1451
1454 def copiesfn(ctx):
1452 def copiesfn(ctx):
1455 copies = []
1453 copies = []
1456 for fn in ctx.files():
1454 for fn in ctx.files():
1457 rename = getrenamed(fn, ctx.rev())
1455 rename = getrenamed(fn, ctx.rev())
1458 if rename:
1456 if rename:
1459 copies.append((fn, rename))
1457 copies.append((fn, rename))
1460 return copies
1458 return copies
1461
1459
1462 return copiesfn
1460 return copiesfn
1463
1461
1464
1462
1465 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1463 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1466 """Update the dirstate to reflect the intent of copying src to dst. For
1464 """Update the dirstate to reflect the intent of copying src to dst. For
1467 different reasons it might not end with dst being marked as copied from src.
1465 different reasons it might not end with dst being marked as copied from src.
1468 """
1466 """
1469 origsrc = repo.dirstate.copied(src) or src
1467 origsrc = repo.dirstate.copied(src) or src
1470 if dst == origsrc: # copying back a copy?
1468 if dst == origsrc: # copying back a copy?
1471 entry = repo.dirstate.get_entry(dst)
1469 entry = repo.dirstate.get_entry(dst)
1472 if (entry.added or not entry.tracked) and not dryrun:
1470 if (entry.added or not entry.tracked) and not dryrun:
1473 repo.dirstate.set_tracked(dst)
1471 repo.dirstate.set_tracked(dst)
1474 else:
1472 else:
1475 if repo.dirstate.get_entry(origsrc).added and origsrc == src:
1473 if repo.dirstate.get_entry(origsrc).added and origsrc == src:
1476 if not ui.quiet:
1474 if not ui.quiet:
1477 ui.warn(
1475 ui.warn(
1478 _(
1476 _(
1479 b"%s has not been committed yet, so no copy "
1477 b"%s has not been committed yet, so no copy "
1480 b"data will be stored for %s.\n"
1478 b"data will be stored for %s.\n"
1481 )
1479 )
1482 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1480 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1483 )
1481 )
1484 if not repo.dirstate.get_entry(dst).tracked and not dryrun:
1482 if not repo.dirstate.get_entry(dst).tracked and not dryrun:
1485 wctx.add([dst])
1483 wctx.add([dst])
1486 elif not dryrun:
1484 elif not dryrun:
1487 wctx.copy(origsrc, dst)
1485 wctx.copy(origsrc, dst)
1488
1486
1489
1487
1490 def movedirstate(repo, newctx, match=None):
1488 def movedirstate(repo, newctx, match=None):
1491 """Move the dirstate to newctx and adjust it as necessary.
1489 """Move the dirstate to newctx and adjust it as necessary.
1492
1490
1493 A matcher can be provided as an optimization. It is probably a bug to pass
1491 A matcher can be provided as an optimization. It is probably a bug to pass
1494 a matcher that doesn't match all the differences between the parent of the
1492 a matcher that doesn't match all the differences between the parent of the
1495 working copy and newctx.
1493 working copy and newctx.
1496 """
1494 """
1497 oldctx = repo[b'.']
1495 oldctx = repo[b'.']
1498 ds = repo.dirstate
1496 ds = repo.dirstate
1499 copies = dict(ds.copies())
1497 copies = dict(ds.copies())
1500 ds.setparents(newctx.node(), repo.nullid)
1498 ds.setparents(newctx.node(), repo.nullid)
1501 s = newctx.status(oldctx, match=match)
1499 s = newctx.status(oldctx, match=match)
1502
1500
1503 for f in s.modified:
1501 for f in s.modified:
1504 ds.update_file_p1(f, p1_tracked=True)
1502 ds.update_file_p1(f, p1_tracked=True)
1505
1503
1506 for f in s.added:
1504 for f in s.added:
1507 ds.update_file_p1(f, p1_tracked=False)
1505 ds.update_file_p1(f, p1_tracked=False)
1508
1506
1509 for f in s.removed:
1507 for f in s.removed:
1510 ds.update_file_p1(f, p1_tracked=True)
1508 ds.update_file_p1(f, p1_tracked=True)
1511
1509
1512 # Merge old parent and old working dir copies
1510 # Merge old parent and old working dir copies
1513 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1511 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1514 oldcopies.update(copies)
1512 oldcopies.update(copies)
1515 copies = {
1513 copies = {
1516 dst: oldcopies.get(src, src)
1514 dst: oldcopies.get(src, src)
1517 for dst, src in pycompat.iteritems(oldcopies)
1515 for dst, src in pycompat.iteritems(oldcopies)
1518 }
1516 }
1519 # Adjust the dirstate copies
1517 # Adjust the dirstate copies
1520 for dst, src in pycompat.iteritems(copies):
1518 for dst, src in pycompat.iteritems(copies):
1521 if src not in newctx or dst in newctx or not ds.get_entry(dst).added:
1519 if src not in newctx or dst in newctx or not ds.get_entry(dst).added:
1522 src = None
1520 src = None
1523 ds.copy(src, dst)
1521 ds.copy(src, dst)
1524 repo._quick_access_changeid_invalidate()
1522 repo._quick_access_changeid_invalidate()
1525
1523
1526
1524
1527 def filterrequirements(requirements):
1525 def filterrequirements(requirements):
1528 """filters the requirements into two sets:
1526 """filters the requirements into two sets:
1529
1527
1530 wcreq: requirements which should be written in .hg/requires
1528 wcreq: requirements which should be written in .hg/requires
1531 storereq: which should be written in .hg/store/requires
1529 storereq: which should be written in .hg/store/requires
1532
1530
1533 Returns (wcreq, storereq)
1531 Returns (wcreq, storereq)
1534 """
1532 """
1535 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
1533 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
1536 wc, store = set(), set()
1534 wc, store = set(), set()
1537 for r in requirements:
1535 for r in requirements:
1538 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
1536 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
1539 wc.add(r)
1537 wc.add(r)
1540 else:
1538 else:
1541 store.add(r)
1539 store.add(r)
1542 return wc, store
1540 return wc, store
1543 return requirements, None
1541 return requirements, None
1544
1542
1545
1543
1546 def istreemanifest(repo):
1544 def istreemanifest(repo):
1547 """returns whether the repository is using treemanifest or not"""
1545 """returns whether the repository is using treemanifest or not"""
1548 return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements
1546 return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements
1549
1547
1550
1548
1551 def writereporequirements(repo, requirements=None):
1549 def writereporequirements(repo, requirements=None):
1552 """writes requirements for the repo
1550 """writes requirements for the repo
1553
1551
1554 Requirements are written to .hg/requires and .hg/store/requires based
1552 Requirements are written to .hg/requires and .hg/store/requires based
1555 on whether share-safe mode is enabled and which requirements are wdir
1553 on whether share-safe mode is enabled and which requirements are wdir
1556 requirements and which are store requirements
1554 requirements and which are store requirements
1557 """
1555 """
1558 if requirements:
1556 if requirements:
1559 repo.requirements = requirements
1557 repo.requirements = requirements
1560 wcreq, storereq = filterrequirements(repo.requirements)
1558 wcreq, storereq = filterrequirements(repo.requirements)
1561 if wcreq is not None:
1559 if wcreq is not None:
1562 writerequires(repo.vfs, wcreq)
1560 writerequires(repo.vfs, wcreq)
1563 if storereq is not None:
1561 if storereq is not None:
1564 writerequires(repo.svfs, storereq)
1562 writerequires(repo.svfs, storereq)
1565 elif repo.ui.configbool(b'format', b'usestore'):
1563 elif repo.ui.configbool(b'format', b'usestore'):
1566 # only remove store requires if we are using store
1564 # only remove store requires if we are using store
1567 repo.svfs.tryunlink(b'requires')
1565 repo.svfs.tryunlink(b'requires')
1568
1566
1569
1567
1570 def writerequires(opener, requirements):
1568 def writerequires(opener, requirements):
1571 with opener(b'requires', b'w', atomictemp=True) as fp:
1569 with opener(b'requires', b'w', atomictemp=True) as fp:
1572 for r in sorted(requirements):
1570 for r in sorted(requirements):
1573 fp.write(b"%s\n" % r)
1571 fp.write(b"%s\n" % r)
1574
1572
1575
1573
1576 class filecachesubentry(object):
1574 class filecachesubentry(object):
1577 def __init__(self, path, stat):
1575 def __init__(self, path, stat):
1578 self.path = path
1576 self.path = path
1579 self.cachestat = None
1577 self.cachestat = None
1580 self._cacheable = None
1578 self._cacheable = None
1581
1579
1582 if stat:
1580 if stat:
1583 self.cachestat = filecachesubentry.stat(self.path)
1581 self.cachestat = filecachesubentry.stat(self.path)
1584
1582
1585 if self.cachestat:
1583 if self.cachestat:
1586 self._cacheable = self.cachestat.cacheable()
1584 self._cacheable = self.cachestat.cacheable()
1587 else:
1585 else:
1588 # None means we don't know yet
1586 # None means we don't know yet
1589 self._cacheable = None
1587 self._cacheable = None
1590
1588
1591 def refresh(self):
1589 def refresh(self):
1592 if self.cacheable():
1590 if self.cacheable():
1593 self.cachestat = filecachesubentry.stat(self.path)
1591 self.cachestat = filecachesubentry.stat(self.path)
1594
1592
1595 def cacheable(self):
1593 def cacheable(self):
1596 if self._cacheable is not None:
1594 if self._cacheable is not None:
1597 return self._cacheable
1595 return self._cacheable
1598
1596
1599 # we don't know yet, assume it is for now
1597 # we don't know yet, assume it is for now
1600 return True
1598 return True
1601
1599
1602 def changed(self):
1600 def changed(self):
1603 # no point in going further if we can't cache it
1601 # no point in going further if we can't cache it
1604 if not self.cacheable():
1602 if not self.cacheable():
1605 return True
1603 return True
1606
1604
1607 newstat = filecachesubentry.stat(self.path)
1605 newstat = filecachesubentry.stat(self.path)
1608
1606
1609 # we may not know if it's cacheable yet, check again now
1607 # we may not know if it's cacheable yet, check again now
1610 if newstat and self._cacheable is None:
1608 if newstat and self._cacheable is None:
1611 self._cacheable = newstat.cacheable()
1609 self._cacheable = newstat.cacheable()
1612
1610
1613 # check again
1611 # check again
1614 if not self._cacheable:
1612 if not self._cacheable:
1615 return True
1613 return True
1616
1614
1617 if self.cachestat != newstat:
1615 if self.cachestat != newstat:
1618 self.cachestat = newstat
1616 self.cachestat = newstat
1619 return True
1617 return True
1620 else:
1618 else:
1621 return False
1619 return False
1622
1620
1623 @staticmethod
1621 @staticmethod
1624 def stat(path):
1622 def stat(path):
1625 try:
1623 try:
1626 return util.cachestat(path)
1624 return util.cachestat(path)
1627 except OSError as e:
1625 except OSError as e:
1628 if e.errno != errno.ENOENT:
1626 if e.errno != errno.ENOENT:
1629 raise
1627 raise
1630
1628
1631
1629
1632 class filecacheentry(object):
1630 class filecacheentry(object):
1633 def __init__(self, paths, stat=True):
1631 def __init__(self, paths, stat=True):
1634 self._entries = []
1632 self._entries = []
1635 for path in paths:
1633 for path in paths:
1636 self._entries.append(filecachesubentry(path, stat))
1634 self._entries.append(filecachesubentry(path, stat))
1637
1635
1638 def changed(self):
1636 def changed(self):
1639 '''true if any entry has changed'''
1637 '''true if any entry has changed'''
1640 for entry in self._entries:
1638 for entry in self._entries:
1641 if entry.changed():
1639 if entry.changed():
1642 return True
1640 return True
1643 return False
1641 return False
1644
1642
1645 def refresh(self):
1643 def refresh(self):
1646 for entry in self._entries:
1644 for entry in self._entries:
1647 entry.refresh()
1645 entry.refresh()
1648
1646
1649
1647
1650 class filecache(object):
1648 class filecache(object):
1651 """A property like decorator that tracks files under .hg/ for updates.
1649 """A property like decorator that tracks files under .hg/ for updates.
1652
1650
1653 On first access, the files defined as arguments are stat()ed and the
1651 On first access, the files defined as arguments are stat()ed and the
1654 results cached. The decorated function is called. The results are stashed
1652 results cached. The decorated function is called. The results are stashed
1655 away in a ``_filecache`` dict on the object whose method is decorated.
1653 away in a ``_filecache`` dict on the object whose method is decorated.
1656
1654
1657 On subsequent access, the cached result is used as it is set to the
1655 On subsequent access, the cached result is used as it is set to the
1658 instance dictionary.
1656 instance dictionary.
1659
1657
1660 On external property set/delete operations, the caller must update the
1658 On external property set/delete operations, the caller must update the
1661 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1659 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1662 instead of directly setting <attr>.
1660 instead of directly setting <attr>.
1663
1661
1664 When using the property API, the cached data is always used if available.
1662 When using the property API, the cached data is always used if available.
1665 No stat() is performed to check if the file has changed.
1663 No stat() is performed to check if the file has changed.
1666
1664
1667 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1665 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1668 can populate an entry before the property's getter is called. In this case,
1666 can populate an entry before the property's getter is called. In this case,
1669 entries in ``_filecache`` will be used during property operations,
1667 entries in ``_filecache`` will be used during property operations,
1670 if available. If the underlying file changes, it is up to external callers
1668 if available. If the underlying file changes, it is up to external callers
1671 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1669 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1672 method result as well as possibly calling ``del obj._filecache[attr]`` to
1670 method result as well as possibly calling ``del obj._filecache[attr]`` to
1673 remove the ``filecacheentry``.
1671 remove the ``filecacheentry``.
1674 """
1672 """
1675
1673
1676 def __init__(self, *paths):
1674 def __init__(self, *paths):
1677 self.paths = paths
1675 self.paths = paths
1678
1676
1679 def tracked_paths(self, obj):
1677 def tracked_paths(self, obj):
1680 return [self.join(obj, path) for path in self.paths]
1678 return [self.join(obj, path) for path in self.paths]
1681
1679
1682 def join(self, obj, fname):
1680 def join(self, obj, fname):
1683 """Used to compute the runtime path of a cached file.
1681 """Used to compute the runtime path of a cached file.
1684
1682
1685 Users should subclass filecache and provide their own version of this
1683 Users should subclass filecache and provide their own version of this
1686 function to call the appropriate join function on 'obj' (an instance
1684 function to call the appropriate join function on 'obj' (an instance
1687 of the class that its member function was decorated).
1685 of the class that its member function was decorated).
1688 """
1686 """
1689 raise NotImplementedError
1687 raise NotImplementedError
1690
1688
1691 def __call__(self, func):
1689 def __call__(self, func):
1692 self.func = func
1690 self.func = func
1693 self.sname = func.__name__
1691 self.sname = func.__name__
1694 self.name = pycompat.sysbytes(self.sname)
1692 self.name = pycompat.sysbytes(self.sname)
1695 return self
1693 return self
1696
1694
1697 def __get__(self, obj, type=None):
1695 def __get__(self, obj, type=None):
1698 # if accessed on the class, return the descriptor itself.
1696 # if accessed on the class, return the descriptor itself.
1699 if obj is None:
1697 if obj is None:
1700 return self
1698 return self
1701
1699
1702 assert self.sname not in obj.__dict__
1700 assert self.sname not in obj.__dict__
1703
1701
1704 entry = obj._filecache.get(self.name)
1702 entry = obj._filecache.get(self.name)
1705
1703
1706 if entry:
1704 if entry:
1707 if entry.changed():
1705 if entry.changed():
1708 entry.obj = self.func(obj)
1706 entry.obj = self.func(obj)
1709 else:
1707 else:
1710 paths = self.tracked_paths(obj)
1708 paths = self.tracked_paths(obj)
1711
1709
1712 # We stat -before- creating the object so our cache doesn't lie if
1710 # We stat -before- creating the object so our cache doesn't lie if
1713 # a writer modified between the time we read and stat
1711 # a writer modified between the time we read and stat
1714 entry = filecacheentry(paths, True)
1712 entry = filecacheentry(paths, True)
1715 entry.obj = self.func(obj)
1713 entry.obj = self.func(obj)
1716
1714
1717 obj._filecache[self.name] = entry
1715 obj._filecache[self.name] = entry
1718
1716
1719 obj.__dict__[self.sname] = entry.obj
1717 obj.__dict__[self.sname] = entry.obj
1720 return entry.obj
1718 return entry.obj
1721
1719
1722 # don't implement __set__(), which would make __dict__ lookup as slow as
1720 # don't implement __set__(), which would make __dict__ lookup as slow as
1723 # function call.
1721 # function call.
1724
1722
1725 def set(self, obj, value):
1723 def set(self, obj, value):
1726 if self.name not in obj._filecache:
1724 if self.name not in obj._filecache:
1727 # we add an entry for the missing value because X in __dict__
1725 # we add an entry for the missing value because X in __dict__
1728 # implies X in _filecache
1726 # implies X in _filecache
1729 paths = self.tracked_paths(obj)
1727 paths = self.tracked_paths(obj)
1730 ce = filecacheentry(paths, False)
1728 ce = filecacheentry(paths, False)
1731 obj._filecache[self.name] = ce
1729 obj._filecache[self.name] = ce
1732 else:
1730 else:
1733 ce = obj._filecache[self.name]
1731 ce = obj._filecache[self.name]
1734
1732
1735 ce.obj = value # update cached copy
1733 ce.obj = value # update cached copy
1736 obj.__dict__[self.sname] = value # update copy returned by obj.x
1734 obj.__dict__[self.sname] = value # update copy returned by obj.x
1737
1735
1738
1736
1739 def extdatasource(repo, source):
1737 def extdatasource(repo, source):
1740 """Gather a map of rev -> value dict from the specified source
1738 """Gather a map of rev -> value dict from the specified source
1741
1739
1742 A source spec is treated as a URL, with a special case shell: type
1740 A source spec is treated as a URL, with a special case shell: type
1743 for parsing the output from a shell command.
1741 for parsing the output from a shell command.
1744
1742
1745 The data is parsed as a series of newline-separated records where
1743 The data is parsed as a series of newline-separated records where
1746 each record is a revision specifier optionally followed by a space
1744 each record is a revision specifier optionally followed by a space
1747 and a freeform string value. If the revision is known locally, it
1745 and a freeform string value. If the revision is known locally, it
1748 is converted to a rev, otherwise the record is skipped.
1746 is converted to a rev, otherwise the record is skipped.
1749
1747
1750 Note that both key and value are treated as UTF-8 and converted to
1748 Note that both key and value are treated as UTF-8 and converted to
1751 the local encoding. This allows uniformity between local and
1749 the local encoding. This allows uniformity between local and
1752 remote data sources.
1750 remote data sources.
1753 """
1751 """
1754
1752
1755 spec = repo.ui.config(b"extdata", source)
1753 spec = repo.ui.config(b"extdata", source)
1756 if not spec:
1754 if not spec:
1757 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1755 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1758
1756
1759 data = {}
1757 data = {}
1760 src = proc = None
1758 src = proc = None
1761 try:
1759 try:
1762 if spec.startswith(b"shell:"):
1760 if spec.startswith(b"shell:"):
1763 # external commands should be run relative to the repo root
1761 # external commands should be run relative to the repo root
1764 cmd = spec[6:]
1762 cmd = spec[6:]
1765 proc = subprocess.Popen(
1763 proc = subprocess.Popen(
1766 procutil.tonativestr(cmd),
1764 procutil.tonativestr(cmd),
1767 shell=True,
1765 shell=True,
1768 bufsize=-1,
1766 bufsize=-1,
1769 close_fds=procutil.closefds,
1767 close_fds=procutil.closefds,
1770 stdout=subprocess.PIPE,
1768 stdout=subprocess.PIPE,
1771 cwd=procutil.tonativestr(repo.root),
1769 cwd=procutil.tonativestr(repo.root),
1772 )
1770 )
1773 src = proc.stdout
1771 src = proc.stdout
1774 else:
1772 else:
1775 # treat as a URL or file
1773 # treat as a URL or file
1776 src = url.open(repo.ui, spec)
1774 src = url.open(repo.ui, spec)
1777 for l in src:
1775 for l in src:
1778 if b" " in l:
1776 if b" " in l:
1779 k, v = l.strip().split(b" ", 1)
1777 k, v = l.strip().split(b" ", 1)
1780 else:
1778 else:
1781 k, v = l.strip(), b""
1779 k, v = l.strip(), b""
1782
1780
1783 k = encoding.tolocal(k)
1781 k = encoding.tolocal(k)
1784 try:
1782 try:
1785 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1783 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1786 except (error.LookupError, error.RepoLookupError, error.InputError):
1784 except (error.LookupError, error.RepoLookupError, error.InputError):
1787 pass # we ignore data for nodes that don't exist locally
1785 pass # we ignore data for nodes that don't exist locally
1788 finally:
1786 finally:
1789 if proc:
1787 if proc:
1790 try:
1788 try:
1791 proc.communicate()
1789 proc.communicate()
1792 except ValueError:
1790 except ValueError:
1793 # This happens if we started iterating src and then
1791 # This happens if we started iterating src and then
1794 # get a parse error on a line. It should be safe to ignore.
1792 # get a parse error on a line. It should be safe to ignore.
1795 pass
1793 pass
1796 if src:
1794 if src:
1797 src.close()
1795 src.close()
1798 if proc and proc.returncode != 0:
1796 if proc and proc.returncode != 0:
1799 raise error.Abort(
1797 raise error.Abort(
1800 _(b"extdata command '%s' failed: %s")
1798 _(b"extdata command '%s' failed: %s")
1801 % (cmd, procutil.explainexit(proc.returncode))
1799 % (cmd, procutil.explainexit(proc.returncode))
1802 )
1800 )
1803
1801
1804 return data
1802 return data
1805
1803
1806
1804
1807 class progress(object):
1805 class progress(object):
1808 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1806 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1809 self.ui = ui
1807 self.ui = ui
1810 self.pos = 0
1808 self.pos = 0
1811 self.topic = topic
1809 self.topic = topic
1812 self.unit = unit
1810 self.unit = unit
1813 self.total = total
1811 self.total = total
1814 self.debug = ui.configbool(b'progress', b'debug')
1812 self.debug = ui.configbool(b'progress', b'debug')
1815 self._updatebar = updatebar
1813 self._updatebar = updatebar
1816
1814
1817 def __enter__(self):
1815 def __enter__(self):
1818 return self
1816 return self
1819
1817
1820 def __exit__(self, exc_type, exc_value, exc_tb):
1818 def __exit__(self, exc_type, exc_value, exc_tb):
1821 self.complete()
1819 self.complete()
1822
1820
1823 def update(self, pos, item=b"", total=None):
1821 def update(self, pos, item=b"", total=None):
1824 assert pos is not None
1822 assert pos is not None
1825 if total:
1823 if total:
1826 self.total = total
1824 self.total = total
1827 self.pos = pos
1825 self.pos = pos
1828 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1826 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1829 if self.debug:
1827 if self.debug:
1830 self._printdebug(item)
1828 self._printdebug(item)
1831
1829
1832 def increment(self, step=1, item=b"", total=None):
1830 def increment(self, step=1, item=b"", total=None):
1833 self.update(self.pos + step, item, total)
1831 self.update(self.pos + step, item, total)
1834
1832
1835 def complete(self):
1833 def complete(self):
1836 self.pos = None
1834 self.pos = None
1837 self.unit = b""
1835 self.unit = b""
1838 self.total = None
1836 self.total = None
1839 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1837 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1840
1838
1841 def _printdebug(self, item):
1839 def _printdebug(self, item):
1842 unit = b''
1840 unit = b''
1843 if self.unit:
1841 if self.unit:
1844 unit = b' ' + self.unit
1842 unit = b' ' + self.unit
1845 if item:
1843 if item:
1846 item = b' ' + item
1844 item = b' ' + item
1847
1845
1848 if self.total:
1846 if self.total:
1849 pct = 100.0 * self.pos / self.total
1847 pct = 100.0 * self.pos / self.total
1850 self.ui.debug(
1848 self.ui.debug(
1851 b'%s:%s %d/%d%s (%4.2f%%)\n'
1849 b'%s:%s %d/%d%s (%4.2f%%)\n'
1852 % (self.topic, item, self.pos, self.total, unit, pct)
1850 % (self.topic, item, self.pos, self.total, unit, pct)
1853 )
1851 )
1854 else:
1852 else:
1855 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1853 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1856
1854
1857
1855
1858 def gdinitconfig(ui):
1856 def gdinitconfig(ui):
1859 """helper function to know if a repo should be created as general delta"""
1857 """helper function to know if a repo should be created as general delta"""
1860 # experimental config: format.generaldelta
1858 # experimental config: format.generaldelta
1861 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1859 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1862 b'format', b'usegeneraldelta'
1860 b'format', b'usegeneraldelta'
1863 )
1861 )
1864
1862
1865
1863
1866 def gddeltaconfig(ui):
1864 def gddeltaconfig(ui):
1867 """helper function to know if incoming delta should be optimised"""
1865 """helper function to know if incoming delta should be optimised"""
1868 # experimental config: format.generaldelta
1866 # experimental config: format.generaldelta
1869 return ui.configbool(b'format', b'generaldelta')
1867 return ui.configbool(b'format', b'generaldelta')
1870
1868
1871
1869
1872 class simplekeyvaluefile(object):
1870 class simplekeyvaluefile(object):
1873 """A simple file with key=value lines
1871 """A simple file with key=value lines
1874
1872
1875 Keys must be alphanumerics and start with a letter, values must not
1873 Keys must be alphanumerics and start with a letter, values must not
1876 contain '\n' characters"""
1874 contain '\n' characters"""
1877
1875
1878 firstlinekey = b'__firstline'
1876 firstlinekey = b'__firstline'
1879
1877
1880 def __init__(self, vfs, path, keys=None):
1878 def __init__(self, vfs, path, keys=None):
1881 self.vfs = vfs
1879 self.vfs = vfs
1882 self.path = path
1880 self.path = path
1883
1881
1884 def read(self, firstlinenonkeyval=False):
1882 def read(self, firstlinenonkeyval=False):
1885 """Read the contents of a simple key-value file
1883 """Read the contents of a simple key-value file
1886
1884
1887 'firstlinenonkeyval' indicates whether the first line of file should
1885 'firstlinenonkeyval' indicates whether the first line of file should
1888 be treated as a key-value pair or reuturned fully under the
1886 be treated as a key-value pair or reuturned fully under the
1889 __firstline key."""
1887 __firstline key."""
1890 lines = self.vfs.readlines(self.path)
1888 lines = self.vfs.readlines(self.path)
1891 d = {}
1889 d = {}
1892 if firstlinenonkeyval:
1890 if firstlinenonkeyval:
1893 if not lines:
1891 if not lines:
1894 e = _(b"empty simplekeyvalue file")
1892 e = _(b"empty simplekeyvalue file")
1895 raise error.CorruptedState(e)
1893 raise error.CorruptedState(e)
1896 # we don't want to include '\n' in the __firstline
1894 # we don't want to include '\n' in the __firstline
1897 d[self.firstlinekey] = lines[0][:-1]
1895 d[self.firstlinekey] = lines[0][:-1]
1898 del lines[0]
1896 del lines[0]
1899
1897
1900 try:
1898 try:
1901 # the 'if line.strip()' part prevents us from failing on empty
1899 # the 'if line.strip()' part prevents us from failing on empty
1902 # lines which only contain '\n' therefore are not skipped
1900 # lines which only contain '\n' therefore are not skipped
1903 # by 'if line'
1901 # by 'if line'
1904 updatedict = dict(
1902 updatedict = dict(
1905 line[:-1].split(b'=', 1) for line in lines if line.strip()
1903 line[:-1].split(b'=', 1) for line in lines if line.strip()
1906 )
1904 )
1907 if self.firstlinekey in updatedict:
1905 if self.firstlinekey in updatedict:
1908 e = _(b"%r can't be used as a key")
1906 e = _(b"%r can't be used as a key")
1909 raise error.CorruptedState(e % self.firstlinekey)
1907 raise error.CorruptedState(e % self.firstlinekey)
1910 d.update(updatedict)
1908 d.update(updatedict)
1911 except ValueError as e:
1909 except ValueError as e:
1912 raise error.CorruptedState(stringutil.forcebytestr(e))
1910 raise error.CorruptedState(stringutil.forcebytestr(e))
1913 return d
1911 return d
1914
1912
1915 def write(self, data, firstline=None):
1913 def write(self, data, firstline=None):
1916 """Write key=>value mapping to a file
1914 """Write key=>value mapping to a file
1917 data is a dict. Keys must be alphanumerical and start with a letter.
1915 data is a dict. Keys must be alphanumerical and start with a letter.
1918 Values must not contain newline characters.
1916 Values must not contain newline characters.
1919
1917
1920 If 'firstline' is not None, it is written to file before
1918 If 'firstline' is not None, it is written to file before
1921 everything else, as it is, not in a key=value form"""
1919 everything else, as it is, not in a key=value form"""
1922 lines = []
1920 lines = []
1923 if firstline is not None:
1921 if firstline is not None:
1924 lines.append(b'%s\n' % firstline)
1922 lines.append(b'%s\n' % firstline)
1925
1923
1926 for k, v in data.items():
1924 for k, v in data.items():
1927 if k == self.firstlinekey:
1925 if k == self.firstlinekey:
1928 e = b"key name '%s' is reserved" % self.firstlinekey
1926 e = b"key name '%s' is reserved" % self.firstlinekey
1929 raise error.ProgrammingError(e)
1927 raise error.ProgrammingError(e)
1930 if not k[0:1].isalpha():
1928 if not k[0:1].isalpha():
1931 e = b"keys must start with a letter in a key-value file"
1929 e = b"keys must start with a letter in a key-value file"
1932 raise error.ProgrammingError(e)
1930 raise error.ProgrammingError(e)
1933 if not k.isalnum():
1931 if not k.isalnum():
1934 e = b"invalid key name in a simple key-value file"
1932 e = b"invalid key name in a simple key-value file"
1935 raise error.ProgrammingError(e)
1933 raise error.ProgrammingError(e)
1936 if b'\n' in v:
1934 if b'\n' in v:
1937 e = b"invalid value in a simple key-value file"
1935 e = b"invalid value in a simple key-value file"
1938 raise error.ProgrammingError(e)
1936 raise error.ProgrammingError(e)
1939 lines.append(b"%s=%s\n" % (k, v))
1937 lines.append(b"%s=%s\n" % (k, v))
1940 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1938 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1941 fp.write(b''.join(lines))
1939 fp.write(b''.join(lines))
1942
1940
1943
1941
1944 _reportobsoletedsource = [
1942 _reportobsoletedsource = [
1945 b'debugobsolete',
1943 b'debugobsolete',
1946 b'pull',
1944 b'pull',
1947 b'push',
1945 b'push',
1948 b'serve',
1946 b'serve',
1949 b'unbundle',
1947 b'unbundle',
1950 ]
1948 ]
1951
1949
1952 _reportnewcssource = [
1950 _reportnewcssource = [
1953 b'pull',
1951 b'pull',
1954 b'unbundle',
1952 b'unbundle',
1955 ]
1953 ]
1956
1954
1957
1955
1958 def prefetchfiles(repo, revmatches):
1956 def prefetchfiles(repo, revmatches):
1959 """Invokes the registered file prefetch functions, allowing extensions to
1957 """Invokes the registered file prefetch functions, allowing extensions to
1960 ensure the corresponding files are available locally, before the command
1958 ensure the corresponding files are available locally, before the command
1961 uses them.
1959 uses them.
1962
1960
1963 Args:
1961 Args:
1964 revmatches: a list of (revision, match) tuples to indicate the files to
1962 revmatches: a list of (revision, match) tuples to indicate the files to
1965 fetch at each revision. If any of the match elements is None, it matches
1963 fetch at each revision. If any of the match elements is None, it matches
1966 all files.
1964 all files.
1967 """
1965 """
1968
1966
1969 def _matcher(m):
1967 def _matcher(m):
1970 if m:
1968 if m:
1971 assert isinstance(m, matchmod.basematcher)
1969 assert isinstance(m, matchmod.basematcher)
1972 # The command itself will complain about files that don't exist, so
1970 # The command itself will complain about files that don't exist, so
1973 # don't duplicate the message.
1971 # don't duplicate the message.
1974 return matchmod.badmatch(m, lambda fn, msg: None)
1972 return matchmod.badmatch(m, lambda fn, msg: None)
1975 else:
1973 else:
1976 return matchall(repo)
1974 return matchall(repo)
1977
1975
1978 revbadmatches = [(rev, _matcher(match)) for (rev, match) in revmatches]
1976 revbadmatches = [(rev, _matcher(match)) for (rev, match) in revmatches]
1979
1977
1980 fileprefetchhooks(repo, revbadmatches)
1978 fileprefetchhooks(repo, revbadmatches)
1981
1979
1982
1980
1983 # a list of (repo, revs, match) prefetch functions
1981 # a list of (repo, revs, match) prefetch functions
1984 fileprefetchhooks = util.hooks()
1982 fileprefetchhooks = util.hooks()
1985
1983
1986 # A marker that tells the evolve extension to suppress its own reporting
1984 # A marker that tells the evolve extension to suppress its own reporting
1987 _reportstroubledchangesets = True
1985 _reportstroubledchangesets = True
1988
1986
1989
1987
1990 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
1988 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
1991 """register a callback to issue a summary after the transaction is closed
1989 """register a callback to issue a summary after the transaction is closed
1992
1990
1993 If as_validator is true, then the callbacks are registered as transaction
1991 If as_validator is true, then the callbacks are registered as transaction
1994 validators instead
1992 validators instead
1995 """
1993 """
1996
1994
1997 def txmatch(sources):
1995 def txmatch(sources):
1998 return any(txnname.startswith(source) for source in sources)
1996 return any(txnname.startswith(source) for source in sources)
1999
1997
2000 categories = []
1998 categories = []
2001
1999
2002 def reportsummary(func):
2000 def reportsummary(func):
2003 """decorator for report callbacks."""
2001 """decorator for report callbacks."""
2004 # The repoview life cycle is shorter than the one of the actual
2002 # The repoview life cycle is shorter than the one of the actual
2005 # underlying repository. So the filtered object can die before the
2003 # underlying repository. So the filtered object can die before the
2006 # weakref is used leading to troubles. We keep a reference to the
2004 # weakref is used leading to troubles. We keep a reference to the
2007 # unfiltered object and restore the filtering when retrieving the
2005 # unfiltered object and restore the filtering when retrieving the
2008 # repository through the weakref.
2006 # repository through the weakref.
2009 filtername = repo.filtername
2007 filtername = repo.filtername
2010 reporef = weakref.ref(repo.unfiltered())
2008 reporef = weakref.ref(repo.unfiltered())
2011
2009
2012 def wrapped(tr):
2010 def wrapped(tr):
2013 repo = reporef()
2011 repo = reporef()
2014 if filtername:
2012 if filtername:
2015 assert repo is not None # help pytype
2013 assert repo is not None # help pytype
2016 repo = repo.filtered(filtername)
2014 repo = repo.filtered(filtername)
2017 func(repo, tr)
2015 func(repo, tr)
2018
2016
2019 newcat = b'%02i-txnreport' % len(categories)
2017 newcat = b'%02i-txnreport' % len(categories)
2020 if as_validator:
2018 if as_validator:
2021 otr.addvalidator(newcat, wrapped)
2019 otr.addvalidator(newcat, wrapped)
2022 else:
2020 else:
2023 otr.addpostclose(newcat, wrapped)
2021 otr.addpostclose(newcat, wrapped)
2024 categories.append(newcat)
2022 categories.append(newcat)
2025 return wrapped
2023 return wrapped
2026
2024
2027 @reportsummary
2025 @reportsummary
2028 def reportchangegroup(repo, tr):
2026 def reportchangegroup(repo, tr):
2029 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
2027 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
2030 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
2028 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
2031 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
2029 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
2032 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
2030 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
2033 if cgchangesets or cgrevisions or cgfiles:
2031 if cgchangesets or cgrevisions or cgfiles:
2034 htext = b""
2032 htext = b""
2035 if cgheads:
2033 if cgheads:
2036 htext = _(b" (%+d heads)") % cgheads
2034 htext = _(b" (%+d heads)") % cgheads
2037 msg = _(b"added %d changesets with %d changes to %d files%s\n")
2035 msg = _(b"added %d changesets with %d changes to %d files%s\n")
2038 if as_validator:
2036 if as_validator:
2039 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
2037 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
2040 assert repo is not None # help pytype
2038 assert repo is not None # help pytype
2041 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
2039 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
2042
2040
2043 if txmatch(_reportobsoletedsource):
2041 if txmatch(_reportobsoletedsource):
2044
2042
2045 @reportsummary
2043 @reportsummary
2046 def reportobsoleted(repo, tr):
2044 def reportobsoleted(repo, tr):
2047 obsoleted = obsutil.getobsoleted(repo, tr)
2045 obsoleted = obsutil.getobsoleted(repo, tr)
2048 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
2046 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
2049 if newmarkers:
2047 if newmarkers:
2050 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
2048 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
2051 if obsoleted:
2049 if obsoleted:
2052 msg = _(b'obsoleted %i changesets\n')
2050 msg = _(b'obsoleted %i changesets\n')
2053 if as_validator:
2051 if as_validator:
2054 msg = _(b'obsoleting %i changesets\n')
2052 msg = _(b'obsoleting %i changesets\n')
2055 repo.ui.status(msg % len(obsoleted))
2053 repo.ui.status(msg % len(obsoleted))
2056
2054
2057 if obsolete.isenabled(
2055 if obsolete.isenabled(
2058 repo, obsolete.createmarkersopt
2056 repo, obsolete.createmarkersopt
2059 ) and repo.ui.configbool(
2057 ) and repo.ui.configbool(
2060 b'experimental', b'evolution.report-instabilities'
2058 b'experimental', b'evolution.report-instabilities'
2061 ):
2059 ):
2062 instabilitytypes = [
2060 instabilitytypes = [
2063 (b'orphan', b'orphan'),
2061 (b'orphan', b'orphan'),
2064 (b'phase-divergent', b'phasedivergent'),
2062 (b'phase-divergent', b'phasedivergent'),
2065 (b'content-divergent', b'contentdivergent'),
2063 (b'content-divergent', b'contentdivergent'),
2066 ]
2064 ]
2067
2065
2068 def getinstabilitycounts(repo):
2066 def getinstabilitycounts(repo):
2069 filtered = repo.changelog.filteredrevs
2067 filtered = repo.changelog.filteredrevs
2070 counts = {}
2068 counts = {}
2071 for instability, revset in instabilitytypes:
2069 for instability, revset in instabilitytypes:
2072 counts[instability] = len(
2070 counts[instability] = len(
2073 set(obsolete.getrevs(repo, revset)) - filtered
2071 set(obsolete.getrevs(repo, revset)) - filtered
2074 )
2072 )
2075 return counts
2073 return counts
2076
2074
2077 oldinstabilitycounts = getinstabilitycounts(repo)
2075 oldinstabilitycounts = getinstabilitycounts(repo)
2078
2076
2079 @reportsummary
2077 @reportsummary
2080 def reportnewinstabilities(repo, tr):
2078 def reportnewinstabilities(repo, tr):
2081 newinstabilitycounts = getinstabilitycounts(repo)
2079 newinstabilitycounts = getinstabilitycounts(repo)
2082 for instability, revset in instabilitytypes:
2080 for instability, revset in instabilitytypes:
2083 delta = (
2081 delta = (
2084 newinstabilitycounts[instability]
2082 newinstabilitycounts[instability]
2085 - oldinstabilitycounts[instability]
2083 - oldinstabilitycounts[instability]
2086 )
2084 )
2087 msg = getinstabilitymessage(delta, instability)
2085 msg = getinstabilitymessage(delta, instability)
2088 if msg:
2086 if msg:
2089 repo.ui.warn(msg)
2087 repo.ui.warn(msg)
2090
2088
2091 if txmatch(_reportnewcssource):
2089 if txmatch(_reportnewcssource):
2092
2090
2093 @reportsummary
2091 @reportsummary
2094 def reportnewcs(repo, tr):
2092 def reportnewcs(repo, tr):
2095 """Report the range of new revisions pulled/unbundled."""
2093 """Report the range of new revisions pulled/unbundled."""
2096 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2094 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2097 unfi = repo.unfiltered()
2095 unfi = repo.unfiltered()
2098 if origrepolen >= len(unfi):
2096 if origrepolen >= len(unfi):
2099 return
2097 return
2100
2098
2101 # Compute the bounds of new visible revisions' range.
2099 # Compute the bounds of new visible revisions' range.
2102 revs = smartset.spanset(repo, start=origrepolen)
2100 revs = smartset.spanset(repo, start=origrepolen)
2103 if revs:
2101 if revs:
2104 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2102 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2105
2103
2106 if minrev == maxrev:
2104 if minrev == maxrev:
2107 revrange = minrev
2105 revrange = minrev
2108 else:
2106 else:
2109 revrange = b'%s:%s' % (minrev, maxrev)
2107 revrange = b'%s:%s' % (minrev, maxrev)
2110 draft = len(repo.revs(b'%ld and draft()', revs))
2108 draft = len(repo.revs(b'%ld and draft()', revs))
2111 secret = len(repo.revs(b'%ld and secret()', revs))
2109 secret = len(repo.revs(b'%ld and secret()', revs))
2112 if not (draft or secret):
2110 if not (draft or secret):
2113 msg = _(b'new changesets %s\n') % revrange
2111 msg = _(b'new changesets %s\n') % revrange
2114 elif draft and secret:
2112 elif draft and secret:
2115 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2113 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2116 msg %= (revrange, draft, secret)
2114 msg %= (revrange, draft, secret)
2117 elif draft:
2115 elif draft:
2118 msg = _(b'new changesets %s (%d drafts)\n')
2116 msg = _(b'new changesets %s (%d drafts)\n')
2119 msg %= (revrange, draft)
2117 msg %= (revrange, draft)
2120 elif secret:
2118 elif secret:
2121 msg = _(b'new changesets %s (%d secrets)\n')
2119 msg = _(b'new changesets %s (%d secrets)\n')
2122 msg %= (revrange, secret)
2120 msg %= (revrange, secret)
2123 else:
2121 else:
2124 errormsg = b'entered unreachable condition'
2122 errormsg = b'entered unreachable condition'
2125 raise error.ProgrammingError(errormsg)
2123 raise error.ProgrammingError(errormsg)
2126 repo.ui.status(msg)
2124 repo.ui.status(msg)
2127
2125
2128 # search new changesets directly pulled as obsolete
2126 # search new changesets directly pulled as obsolete
2129 duplicates = tr.changes.get(b'revduplicates', ())
2127 duplicates = tr.changes.get(b'revduplicates', ())
2130 obsadded = unfi.revs(
2128 obsadded = unfi.revs(
2131 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2129 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2132 )
2130 )
2133 cl = repo.changelog
2131 cl = repo.changelog
2134 extinctadded = [r for r in obsadded if r not in cl]
2132 extinctadded = [r for r in obsadded if r not in cl]
2135 if extinctadded:
2133 if extinctadded:
2136 # They are not just obsolete, but obsolete and invisible
2134 # They are not just obsolete, but obsolete and invisible
2137 # we call them "extinct" internally but the terms have not been
2135 # we call them "extinct" internally but the terms have not been
2138 # exposed to users.
2136 # exposed to users.
2139 msg = b'(%d other changesets obsolete on arrival)\n'
2137 msg = b'(%d other changesets obsolete on arrival)\n'
2140 repo.ui.status(msg % len(extinctadded))
2138 repo.ui.status(msg % len(extinctadded))
2141
2139
2142 @reportsummary
2140 @reportsummary
2143 def reportphasechanges(repo, tr):
2141 def reportphasechanges(repo, tr):
2144 """Report statistics of phase changes for changesets pre-existing
2142 """Report statistics of phase changes for changesets pre-existing
2145 pull/unbundle.
2143 pull/unbundle.
2146 """
2144 """
2147 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2145 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2148 published = []
2146 published = []
2149 for revs, (old, new) in tr.changes.get(b'phases', []):
2147 for revs, (old, new) in tr.changes.get(b'phases', []):
2150 if new != phases.public:
2148 if new != phases.public:
2151 continue
2149 continue
2152 published.extend(rev for rev in revs if rev < origrepolen)
2150 published.extend(rev for rev in revs if rev < origrepolen)
2153 if not published:
2151 if not published:
2154 return
2152 return
2155 msg = _(b'%d local changesets published\n')
2153 msg = _(b'%d local changesets published\n')
2156 if as_validator:
2154 if as_validator:
2157 msg = _(b'%d local changesets will be published\n')
2155 msg = _(b'%d local changesets will be published\n')
2158 repo.ui.status(msg % len(published))
2156 repo.ui.status(msg % len(published))
2159
2157
2160
2158
2161 def getinstabilitymessage(delta, instability):
2159 def getinstabilitymessage(delta, instability):
2162 """function to return the message to show warning about new instabilities
2160 """function to return the message to show warning about new instabilities
2163
2161
2164 exists as a separate function so that extension can wrap to show more
2162 exists as a separate function so that extension can wrap to show more
2165 information like how to fix instabilities"""
2163 information like how to fix instabilities"""
2166 if delta > 0:
2164 if delta > 0:
2167 return _(b'%i new %s changesets\n') % (delta, instability)
2165 return _(b'%i new %s changesets\n') % (delta, instability)
2168
2166
2169
2167
2170 def nodesummaries(repo, nodes, maxnumnodes=4):
2168 def nodesummaries(repo, nodes, maxnumnodes=4):
2171 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2169 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2172 return b' '.join(short(h) for h in nodes)
2170 return b' '.join(short(h) for h in nodes)
2173 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2171 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2174 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2172 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2175
2173
2176
2174
2177 def enforcesinglehead(repo, tr, desc, accountclosed, filtername):
2175 def enforcesinglehead(repo, tr, desc, accountclosed, filtername):
2178 """check that no named branch has multiple heads"""
2176 """check that no named branch has multiple heads"""
2179 if desc in (b'strip', b'repair'):
2177 if desc in (b'strip', b'repair'):
2180 # skip the logic during strip
2178 # skip the logic during strip
2181 return
2179 return
2182 visible = repo.filtered(filtername)
2180 visible = repo.filtered(filtername)
2183 # possible improvement: we could restrict the check to affected branch
2181 # possible improvement: we could restrict the check to affected branch
2184 bm = visible.branchmap()
2182 bm = visible.branchmap()
2185 for name in bm:
2183 for name in bm:
2186 heads = bm.branchheads(name, closed=accountclosed)
2184 heads = bm.branchheads(name, closed=accountclosed)
2187 if len(heads) > 1:
2185 if len(heads) > 1:
2188 msg = _(b'rejecting multiple heads on branch "%s"')
2186 msg = _(b'rejecting multiple heads on branch "%s"')
2189 msg %= name
2187 msg %= name
2190 hint = _(b'%d heads: %s')
2188 hint = _(b'%d heads: %s')
2191 hint %= (len(heads), nodesummaries(repo, heads))
2189 hint %= (len(heads), nodesummaries(repo, heads))
2192 raise error.Abort(msg, hint=hint)
2190 raise error.Abort(msg, hint=hint)
2193
2191
2194
2192
2195 def wrapconvertsink(sink):
2193 def wrapconvertsink(sink):
2196 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2194 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2197 before it is used, whether or not the convert extension was formally loaded.
2195 before it is used, whether or not the convert extension was formally loaded.
2198 """
2196 """
2199 return sink
2197 return sink
2200
2198
2201
2199
2202 def unhidehashlikerevs(repo, specs, hiddentype):
2200 def unhidehashlikerevs(repo, specs, hiddentype):
2203 """parse the user specs and unhide changesets whose hash or revision number
2201 """parse the user specs and unhide changesets whose hash or revision number
2204 is passed.
2202 is passed.
2205
2203
2206 hiddentype can be: 1) 'warn': warn while unhiding changesets
2204 hiddentype can be: 1) 'warn': warn while unhiding changesets
2207 2) 'nowarn': don't warn while unhiding changesets
2205 2) 'nowarn': don't warn while unhiding changesets
2208
2206
2209 returns a repo object with the required changesets unhidden
2207 returns a repo object with the required changesets unhidden
2210 """
2208 """
2211 if not specs:
2209 if not specs:
2212 return repo
2210 return repo
2213
2211
2214 if not repo.filtername or not repo.ui.configbool(
2212 if not repo.filtername or not repo.ui.configbool(
2215 b'experimental', b'directaccess'
2213 b'experimental', b'directaccess'
2216 ):
2214 ):
2217 return repo
2215 return repo
2218
2216
2219 if repo.filtername not in (b'visible', b'visible-hidden'):
2217 if repo.filtername not in (b'visible', b'visible-hidden'):
2220 return repo
2218 return repo
2221
2219
2222 symbols = set()
2220 symbols = set()
2223 for spec in specs:
2221 for spec in specs:
2224 try:
2222 try:
2225 tree = revsetlang.parse(spec)
2223 tree = revsetlang.parse(spec)
2226 except error.ParseError: # will be reported by scmutil.revrange()
2224 except error.ParseError: # will be reported by scmutil.revrange()
2227 continue
2225 continue
2228
2226
2229 symbols.update(revsetlang.gethashlikesymbols(tree))
2227 symbols.update(revsetlang.gethashlikesymbols(tree))
2230
2228
2231 if not symbols:
2229 if not symbols:
2232 return repo
2230 return repo
2233
2231
2234 revs = _getrevsfromsymbols(repo, symbols)
2232 revs = _getrevsfromsymbols(repo, symbols)
2235
2233
2236 if not revs:
2234 if not revs:
2237 return repo
2235 return repo
2238
2236
2239 if hiddentype == b'warn':
2237 if hiddentype == b'warn':
2240 unfi = repo.unfiltered()
2238 unfi = repo.unfiltered()
2241 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2239 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2242 repo.ui.warn(
2240 repo.ui.warn(
2243 _(
2241 _(
2244 b"warning: accessing hidden changesets for write "
2242 b"warning: accessing hidden changesets for write "
2245 b"operation: %s\n"
2243 b"operation: %s\n"
2246 )
2244 )
2247 % revstr
2245 % revstr
2248 )
2246 )
2249
2247
2250 # we have to use new filtername to separate branch/tags cache until we can
2248 # we have to use new filtername to separate branch/tags cache until we can
2251 # disbale these cache when revisions are dynamically pinned.
2249 # disbale these cache when revisions are dynamically pinned.
2252 return repo.filtered(b'visible-hidden', revs)
2250 return repo.filtered(b'visible-hidden', revs)
2253
2251
2254
2252
2255 def _getrevsfromsymbols(repo, symbols):
2253 def _getrevsfromsymbols(repo, symbols):
2256 """parse the list of symbols and returns a set of revision numbers of hidden
2254 """parse the list of symbols and returns a set of revision numbers of hidden
2257 changesets present in symbols"""
2255 changesets present in symbols"""
2258 revs = set()
2256 revs = set()
2259 unfi = repo.unfiltered()
2257 unfi = repo.unfiltered()
2260 unficl = unfi.changelog
2258 unficl = unfi.changelog
2261 cl = repo.changelog
2259 cl = repo.changelog
2262 tiprev = len(unficl)
2260 tiprev = len(unficl)
2263 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2261 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2264 for s in symbols:
2262 for s in symbols:
2265 try:
2263 try:
2266 n = int(s)
2264 n = int(s)
2267 if n <= tiprev:
2265 if n <= tiprev:
2268 if not allowrevnums:
2266 if not allowrevnums:
2269 continue
2267 continue
2270 else:
2268 else:
2271 if n not in cl:
2269 if n not in cl:
2272 revs.add(n)
2270 revs.add(n)
2273 continue
2271 continue
2274 except ValueError:
2272 except ValueError:
2275 pass
2273 pass
2276
2274
2277 try:
2275 try:
2278 s = resolvehexnodeidprefix(unfi, s)
2276 s = resolvehexnodeidprefix(unfi, s)
2279 except (error.LookupError, error.WdirUnsupported):
2277 except (error.LookupError, error.WdirUnsupported):
2280 s = None
2278 s = None
2281
2279
2282 if s is not None:
2280 if s is not None:
2283 rev = unficl.rev(s)
2281 rev = unficl.rev(s)
2284 if rev not in cl:
2282 if rev not in cl:
2285 revs.add(rev)
2283 revs.add(rev)
2286
2284
2287 return revs
2285 return revs
2288
2286
2289
2287
2290 def bookmarkrevs(repo, mark):
2288 def bookmarkrevs(repo, mark):
2291 """Select revisions reachable by a given bookmark
2289 """Select revisions reachable by a given bookmark
2292
2290
2293 If the bookmarked revision isn't a head, an empty set will be returned.
2291 If the bookmarked revision isn't a head, an empty set will be returned.
2294 """
2292 """
2295 return repo.revs(format_bookmark_revspec(mark))
2293 return repo.revs(format_bookmark_revspec(mark))
2296
2294
2297
2295
2298 def format_bookmark_revspec(mark):
2296 def format_bookmark_revspec(mark):
2299 """Build a revset expression to select revisions reachable by a given
2297 """Build a revset expression to select revisions reachable by a given
2300 bookmark"""
2298 bookmark"""
2301 mark = b'literal:' + mark
2299 mark = b'literal:' + mark
2302 return revsetlang.formatspec(
2300 return revsetlang.formatspec(
2303 b"ancestors(bookmark(%s)) - "
2301 b"ancestors(bookmark(%s)) - "
2304 b"ancestors(head() and not bookmark(%s)) - "
2302 b"ancestors(head() and not bookmark(%s)) - "
2305 b"ancestors(bookmark() and not bookmark(%s))",
2303 b"ancestors(bookmark() and not bookmark(%s))",
2306 mark,
2304 mark,
2307 mark,
2305 mark,
2308 mark,
2306 mark,
2309 )
2307 )
General Comments 0
You need to be logged in to leave comments. Login now