##// END OF EJS Templates
errors: catch urllib errors specifically instead of using safehasattr()...
Martin von Zweigbergk -
r46442:ae00e170 default
parent child Browse files
Show More
@@ -1,2301 +1,2301 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import os
12 import os
13 import posixpath
13 import posixpath
14 import re
14 import re
15 import subprocess
15 import subprocess
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 bin,
20 bin,
21 hex,
21 hex,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirrev,
26 wdirrev,
27 )
27 )
28 from .pycompat import getattr
28 from .pycompat import getattr
29 from .thirdparty import attr
29 from .thirdparty import attr
30 from . import (
30 from . import (
31 copies as copiesmod,
31 copies as copiesmod,
32 encoding,
32 encoding,
33 error,
33 error,
34 match as matchmod,
34 match as matchmod,
35 obsolete,
35 obsolete,
36 obsutil,
36 obsutil,
37 pathutil,
37 pathutil,
38 phases,
38 phases,
39 policy,
39 policy,
40 pycompat,
40 pycompat,
41 requirements as requirementsmod,
41 requirements as requirementsmod,
42 revsetlang,
42 revsetlang,
43 similar,
43 similar,
44 smartset,
44 smartset,
45 url,
45 url,
46 util,
46 util,
47 vfs,
47 vfs,
48 )
48 )
49
49
50 from .utils import (
50 from .utils import (
51 hashutil,
51 hashutil,
52 procutil,
52 procutil,
53 stringutil,
53 stringutil,
54 )
54 )
55
55
56 if pycompat.iswindows:
56 if pycompat.iswindows:
57 from . import scmwindows as scmplatform
57 from . import scmwindows as scmplatform
58 else:
58 else:
59 from . import scmposix as scmplatform
59 from . import scmposix as scmplatform
60
60
61 parsers = policy.importmod('parsers')
61 parsers = policy.importmod('parsers')
62 rustrevlog = policy.importrust('revlog')
62 rustrevlog = policy.importrust('revlog')
63
63
64 termsize = scmplatform.termsize
64 termsize = scmplatform.termsize
65
65
66
66
67 @attr.s(slots=True, repr=False)
67 @attr.s(slots=True, repr=False)
68 class status(object):
68 class status(object):
69 '''Struct with a list of files per status.
69 '''Struct with a list of files per status.
70
70
71 The 'deleted', 'unknown' and 'ignored' properties are only
71 The 'deleted', 'unknown' and 'ignored' properties are only
72 relevant to the working copy.
72 relevant to the working copy.
73 '''
73 '''
74
74
75 modified = attr.ib(default=attr.Factory(list))
75 modified = attr.ib(default=attr.Factory(list))
76 added = attr.ib(default=attr.Factory(list))
76 added = attr.ib(default=attr.Factory(list))
77 removed = attr.ib(default=attr.Factory(list))
77 removed = attr.ib(default=attr.Factory(list))
78 deleted = attr.ib(default=attr.Factory(list))
78 deleted = attr.ib(default=attr.Factory(list))
79 unknown = attr.ib(default=attr.Factory(list))
79 unknown = attr.ib(default=attr.Factory(list))
80 ignored = attr.ib(default=attr.Factory(list))
80 ignored = attr.ib(default=attr.Factory(list))
81 clean = attr.ib(default=attr.Factory(list))
81 clean = attr.ib(default=attr.Factory(list))
82
82
83 def __iter__(self):
83 def __iter__(self):
84 yield self.modified
84 yield self.modified
85 yield self.added
85 yield self.added
86 yield self.removed
86 yield self.removed
87 yield self.deleted
87 yield self.deleted
88 yield self.unknown
88 yield self.unknown
89 yield self.ignored
89 yield self.ignored
90 yield self.clean
90 yield self.clean
91
91
92 def __repr__(self):
92 def __repr__(self):
93 return (
93 return (
94 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
94 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
95 r'unknown=%s, ignored=%s, clean=%s>'
95 r'unknown=%s, ignored=%s, clean=%s>'
96 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
96 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
97
97
98
98
99 def itersubrepos(ctx1, ctx2):
99 def itersubrepos(ctx1, ctx2):
100 """find subrepos in ctx1 or ctx2"""
100 """find subrepos in ctx1 or ctx2"""
101 # Create a (subpath, ctx) mapping where we prefer subpaths from
101 # Create a (subpath, ctx) mapping where we prefer subpaths from
102 # ctx1. The subpaths from ctx2 are important when the .hgsub file
102 # ctx1. The subpaths from ctx2 are important when the .hgsub file
103 # has been modified (in ctx2) but not yet committed (in ctx1).
103 # has been modified (in ctx2) but not yet committed (in ctx1).
104 subpaths = dict.fromkeys(ctx2.substate, ctx2)
104 subpaths = dict.fromkeys(ctx2.substate, ctx2)
105 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
105 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
106
106
107 missing = set()
107 missing = set()
108
108
109 for subpath in ctx2.substate:
109 for subpath in ctx2.substate:
110 if subpath not in ctx1.substate:
110 if subpath not in ctx1.substate:
111 del subpaths[subpath]
111 del subpaths[subpath]
112 missing.add(subpath)
112 missing.add(subpath)
113
113
114 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
114 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
115 yield subpath, ctx.sub(subpath)
115 yield subpath, ctx.sub(subpath)
116
116
117 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
117 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
118 # status and diff will have an accurate result when it does
118 # status and diff will have an accurate result when it does
119 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
119 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
120 # against itself.
120 # against itself.
121 for subpath in missing:
121 for subpath in missing:
122 yield subpath, ctx2.nullsub(subpath, ctx1)
122 yield subpath, ctx2.nullsub(subpath, ctx1)
123
123
124
124
125 def nochangesfound(ui, repo, excluded=None):
125 def nochangesfound(ui, repo, excluded=None):
126 '''Report no changes for push/pull, excluded is None or a list of
126 '''Report no changes for push/pull, excluded is None or a list of
127 nodes excluded from the push/pull.
127 nodes excluded from the push/pull.
128 '''
128 '''
129 secretlist = []
129 secretlist = []
130 if excluded:
130 if excluded:
131 for n in excluded:
131 for n in excluded:
132 ctx = repo[n]
132 ctx = repo[n]
133 if ctx.phase() >= phases.secret and not ctx.extinct():
133 if ctx.phase() >= phases.secret and not ctx.extinct():
134 secretlist.append(n)
134 secretlist.append(n)
135
135
136 if secretlist:
136 if secretlist:
137 ui.status(
137 ui.status(
138 _(b"no changes found (ignored %d secret changesets)\n")
138 _(b"no changes found (ignored %d secret changesets)\n")
139 % len(secretlist)
139 % len(secretlist)
140 )
140 )
141 else:
141 else:
142 ui.status(_(b"no changes found\n"))
142 ui.status(_(b"no changes found\n"))
143
143
144
144
145 def callcatch(ui, func):
145 def callcatch(ui, func):
146 """call func() with global exception handling
146 """call func() with global exception handling
147
147
148 return func() if no exception happens. otherwise do some error handling
148 return func() if no exception happens. otherwise do some error handling
149 and return an exit code accordingly. does not handle all exceptions.
149 and return an exit code accordingly. does not handle all exceptions.
150 """
150 """
151 coarse_exit_code = -1
151 coarse_exit_code = -1
152 detailed_exit_code = -1
152 detailed_exit_code = -1
153 try:
153 try:
154 try:
154 try:
155 return func()
155 return func()
156 except: # re-raises
156 except: # re-raises
157 ui.traceback()
157 ui.traceback()
158 raise
158 raise
159 # Global exception handling, alphabetically
159 # Global exception handling, alphabetically
160 # Mercurial-specific first, followed by built-in and library exceptions
160 # Mercurial-specific first, followed by built-in and library exceptions
161 except error.LockHeld as inst:
161 except error.LockHeld as inst:
162 detailed_exit_code = 20
162 detailed_exit_code = 20
163 if inst.errno == errno.ETIMEDOUT:
163 if inst.errno == errno.ETIMEDOUT:
164 reason = _(b'timed out waiting for lock held by %r') % (
164 reason = _(b'timed out waiting for lock held by %r') % (
165 pycompat.bytestr(inst.locker)
165 pycompat.bytestr(inst.locker)
166 )
166 )
167 else:
167 else:
168 reason = _(b'lock held by %r') % inst.locker
168 reason = _(b'lock held by %r') % inst.locker
169 ui.error(
169 ui.error(
170 _(b"abort: %s: %s\n")
170 _(b"abort: %s: %s\n")
171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
172 )
172 )
173 if not inst.locker:
173 if not inst.locker:
174 ui.error(_(b"(lock might be very busy)\n"))
174 ui.error(_(b"(lock might be very busy)\n"))
175 except error.LockUnavailable as inst:
175 except error.LockUnavailable as inst:
176 detailed_exit_code = 20
176 detailed_exit_code = 20
177 ui.error(
177 ui.error(
178 _(b"abort: could not lock %s: %s\n")
178 _(b"abort: could not lock %s: %s\n")
179 % (
179 % (
180 inst.desc or stringutil.forcebytestr(inst.filename),
180 inst.desc or stringutil.forcebytestr(inst.filename),
181 encoding.strtolocal(inst.strerror),
181 encoding.strtolocal(inst.strerror),
182 )
182 )
183 )
183 )
184 except error.OutOfBandError as inst:
184 except error.OutOfBandError as inst:
185 if inst.args:
185 if inst.args:
186 msg = _(b"abort: remote error:\n")
186 msg = _(b"abort: remote error:\n")
187 else:
187 else:
188 msg = _(b"abort: remote error\n")
188 msg = _(b"abort: remote error\n")
189 ui.error(msg)
189 ui.error(msg)
190 if inst.args:
190 if inst.args:
191 ui.error(b''.join(inst.args))
191 ui.error(b''.join(inst.args))
192 if inst.hint:
192 if inst.hint:
193 ui.error(b'(%s)\n' % inst.hint)
193 ui.error(b'(%s)\n' % inst.hint)
194 except error.RepoError as inst:
194 except error.RepoError as inst:
195 ui.error(_(b"abort: %s!\n") % inst)
195 ui.error(_(b"abort: %s!\n") % inst)
196 if inst.hint:
196 if inst.hint:
197 ui.error(_(b"(%s)\n") % inst.hint)
197 ui.error(_(b"(%s)\n") % inst.hint)
198 except error.ResponseError as inst:
198 except error.ResponseError as inst:
199 ui.error(_(b"abort: %s") % inst.args[0])
199 ui.error(_(b"abort: %s") % inst.args[0])
200 msg = inst.args[1]
200 msg = inst.args[1]
201 if isinstance(msg, type(u'')):
201 if isinstance(msg, type(u'')):
202 msg = pycompat.sysbytes(msg)
202 msg = pycompat.sysbytes(msg)
203 if not isinstance(msg, bytes):
203 if not isinstance(msg, bytes):
204 ui.error(b" %r\n" % (msg,))
204 ui.error(b" %r\n" % (msg,))
205 elif not msg:
205 elif not msg:
206 ui.error(_(b" empty string\n"))
206 ui.error(_(b" empty string\n"))
207 else:
207 else:
208 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
208 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
209 except error.CensoredNodeError as inst:
209 except error.CensoredNodeError as inst:
210 ui.error(_(b"abort: file censored %s!\n") % inst)
210 ui.error(_(b"abort: file censored %s!\n") % inst)
211 except error.StorageError as inst:
211 except error.StorageError as inst:
212 ui.error(_(b"abort: %s!\n") % inst)
212 ui.error(_(b"abort: %s!\n") % inst)
213 if inst.hint:
213 if inst.hint:
214 ui.error(_(b"(%s)\n") % inst.hint)
214 ui.error(_(b"(%s)\n") % inst.hint)
215 except error.InterventionRequired as inst:
215 except error.InterventionRequired as inst:
216 ui.error(b"%s\n" % inst)
216 ui.error(b"%s\n" % inst)
217 if inst.hint:
217 if inst.hint:
218 ui.error(_(b"(%s)\n") % inst.hint)
218 ui.error(_(b"(%s)\n") % inst.hint)
219 detailed_exit_code = 240
219 detailed_exit_code = 240
220 coarse_exit_code = 1
220 coarse_exit_code = 1
221 except error.WdirUnsupported:
221 except error.WdirUnsupported:
222 ui.error(_(b"abort: working directory revision cannot be specified\n"))
222 ui.error(_(b"abort: working directory revision cannot be specified\n"))
223 except error.Abort as inst:
223 except error.Abort as inst:
224 if isinstance(inst, error.InputError):
224 if isinstance(inst, error.InputError):
225 detailed_exit_code = 10
225 detailed_exit_code = 10
226 ui.error(_(b"abort: %s\n") % inst.message)
226 ui.error(_(b"abort: %s\n") % inst.message)
227 if inst.hint:
227 if inst.hint:
228 ui.error(_(b"(%s)\n") % inst.hint)
228 ui.error(_(b"(%s)\n") % inst.hint)
229 except error.WorkerError as inst:
229 except error.WorkerError as inst:
230 # Don't print a message -- the worker already should have
230 # Don't print a message -- the worker already should have
231 return inst.status_code
231 return inst.status_code
232 except ImportError as inst:
232 except ImportError as inst:
233 ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
233 ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
234 m = stringutil.forcebytestr(inst).split()[-1]
234 m = stringutil.forcebytestr(inst).split()[-1]
235 if m in b"mpatch bdiff".split():
235 if m in b"mpatch bdiff".split():
236 ui.error(_(b"(did you forget to compile extensions?)\n"))
236 ui.error(_(b"(did you forget to compile extensions?)\n"))
237 elif m in b"zlib".split():
237 elif m in b"zlib".split():
238 ui.error(_(b"(is your Python install correct?)\n"))
238 ui.error(_(b"(is your Python install correct?)\n"))
239 except (IOError, OSError) as inst:
239 except util.urlerr.httperror as inst:
240 if util.safehasattr(inst, b"code"): # HTTPError
241 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
240 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
242 elif util.safehasattr(inst, b"reason"): # URLError or SSLError
241 except util.urlerr.urlerror as inst:
243 try: # usually it is in the form (errno, strerror)
242 try: # usually it is in the form (errno, strerror)
244 reason = inst.reason.args[1]
243 reason = inst.reason.args[1]
245 except (AttributeError, IndexError):
244 except (AttributeError, IndexError):
246 # it might be anything, for example a string
245 # it might be anything, for example a string
247 reason = inst.reason
246 reason = inst.reason
248 if isinstance(reason, pycompat.unicode):
247 if isinstance(reason, pycompat.unicode):
249 # SSLError of Python 2.7.9 contains a unicode
248 # SSLError of Python 2.7.9 contains a unicode
250 reason = encoding.unitolocal(reason)
249 reason = encoding.unitolocal(reason)
251 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
250 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
252 elif (
251 except (IOError, OSError) as inst:
252 if (
253 util.safehasattr(inst, b"args")
253 util.safehasattr(inst, b"args")
254 and inst.args
254 and inst.args
255 and inst.args[0] == errno.EPIPE
255 and inst.args[0] == errno.EPIPE
256 ):
256 ):
257 pass
257 pass
258 elif getattr(inst, "strerror", None): # common IOError or OSError
258 elif getattr(inst, "strerror", None): # common IOError or OSError
259 if getattr(inst, "filename", None) is not None:
259 if getattr(inst, "filename", None) is not None:
260 ui.error(
260 ui.error(
261 _(b"abort: %s: '%s'\n")
261 _(b"abort: %s: '%s'\n")
262 % (
262 % (
263 encoding.strtolocal(inst.strerror),
263 encoding.strtolocal(inst.strerror),
264 stringutil.forcebytestr(inst.filename),
264 stringutil.forcebytestr(inst.filename),
265 )
265 )
266 )
266 )
267 else:
267 else:
268 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
268 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
269 else: # suspicious IOError
269 else: # suspicious IOError
270 raise
270 raise
271 except MemoryError:
271 except MemoryError:
272 ui.error(_(b"abort: out of memory\n"))
272 ui.error(_(b"abort: out of memory\n"))
273 except SystemExit as inst:
273 except SystemExit as inst:
274 # Commands shouldn't sys.exit directly, but give a return code.
274 # Commands shouldn't sys.exit directly, but give a return code.
275 # Just in case catch this and and pass exit code to caller.
275 # Just in case catch this and and pass exit code to caller.
276 detailed_exit_code = 254
276 detailed_exit_code = 254
277 coarse_exit_code = inst.code
277 coarse_exit_code = inst.code
278
278
279 if ui.configbool(b'ui', b'detailed-exit-code'):
279 if ui.configbool(b'ui', b'detailed-exit-code'):
280 return detailed_exit_code
280 return detailed_exit_code
281 else:
281 else:
282 return coarse_exit_code
282 return coarse_exit_code
283
283
284
284
285 def checknewlabel(repo, lbl, kind):
285 def checknewlabel(repo, lbl, kind):
286 # Do not use the "kind" parameter in ui output.
286 # Do not use the "kind" parameter in ui output.
287 # It makes strings difficult to translate.
287 # It makes strings difficult to translate.
288 if lbl in [b'tip', b'.', b'null']:
288 if lbl in [b'tip', b'.', b'null']:
289 raise error.Abort(_(b"the name '%s' is reserved") % lbl)
289 raise error.Abort(_(b"the name '%s' is reserved") % lbl)
290 for c in (b':', b'\0', b'\n', b'\r'):
290 for c in (b':', b'\0', b'\n', b'\r'):
291 if c in lbl:
291 if c in lbl:
292 raise error.Abort(
292 raise error.Abort(
293 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
293 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
294 )
294 )
295 try:
295 try:
296 int(lbl)
296 int(lbl)
297 raise error.Abort(_(b"cannot use an integer as a name"))
297 raise error.Abort(_(b"cannot use an integer as a name"))
298 except ValueError:
298 except ValueError:
299 pass
299 pass
300 if lbl.strip() != lbl:
300 if lbl.strip() != lbl:
301 raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl)
301 raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl)
302
302
303
303
304 def checkfilename(f):
304 def checkfilename(f):
305 '''Check that the filename f is an acceptable filename for a tracked file'''
305 '''Check that the filename f is an acceptable filename for a tracked file'''
306 if b'\r' in f or b'\n' in f:
306 if b'\r' in f or b'\n' in f:
307 raise error.Abort(
307 raise error.Abort(
308 _(b"'\\n' and '\\r' disallowed in filenames: %r")
308 _(b"'\\n' and '\\r' disallowed in filenames: %r")
309 % pycompat.bytestr(f)
309 % pycompat.bytestr(f)
310 )
310 )
311
311
312
312
313 def checkportable(ui, f):
313 def checkportable(ui, f):
314 '''Check if filename f is portable and warn or abort depending on config'''
314 '''Check if filename f is portable and warn or abort depending on config'''
315 checkfilename(f)
315 checkfilename(f)
316 abort, warn = checkportabilityalert(ui)
316 abort, warn = checkportabilityalert(ui)
317 if abort or warn:
317 if abort or warn:
318 msg = util.checkwinfilename(f)
318 msg = util.checkwinfilename(f)
319 if msg:
319 if msg:
320 msg = b"%s: %s" % (msg, procutil.shellquote(f))
320 msg = b"%s: %s" % (msg, procutil.shellquote(f))
321 if abort:
321 if abort:
322 raise error.Abort(msg)
322 raise error.Abort(msg)
323 ui.warn(_(b"warning: %s\n") % msg)
323 ui.warn(_(b"warning: %s\n") % msg)
324
324
325
325
326 def checkportabilityalert(ui):
326 def checkportabilityalert(ui):
327 '''check if the user's config requests nothing, a warning, or abort for
327 '''check if the user's config requests nothing, a warning, or abort for
328 non-portable filenames'''
328 non-portable filenames'''
329 val = ui.config(b'ui', b'portablefilenames')
329 val = ui.config(b'ui', b'portablefilenames')
330 lval = val.lower()
330 lval = val.lower()
331 bval = stringutil.parsebool(val)
331 bval = stringutil.parsebool(val)
332 abort = pycompat.iswindows or lval == b'abort'
332 abort = pycompat.iswindows or lval == b'abort'
333 warn = bval or lval == b'warn'
333 warn = bval or lval == b'warn'
334 if bval is None and not (warn or abort or lval == b'ignore'):
334 if bval is None and not (warn or abort or lval == b'ignore'):
335 raise error.ConfigError(
335 raise error.ConfigError(
336 _(b"ui.portablefilenames value is invalid ('%s')") % val
336 _(b"ui.portablefilenames value is invalid ('%s')") % val
337 )
337 )
338 return abort, warn
338 return abort, warn
339
339
340
340
341 class casecollisionauditor(object):
341 class casecollisionauditor(object):
342 def __init__(self, ui, abort, dirstate):
342 def __init__(self, ui, abort, dirstate):
343 self._ui = ui
343 self._ui = ui
344 self._abort = abort
344 self._abort = abort
345 allfiles = b'\0'.join(dirstate)
345 allfiles = b'\0'.join(dirstate)
346 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
346 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
347 self._dirstate = dirstate
347 self._dirstate = dirstate
348 # The purpose of _newfiles is so that we don't complain about
348 # The purpose of _newfiles is so that we don't complain about
349 # case collisions if someone were to call this object with the
349 # case collisions if someone were to call this object with the
350 # same filename twice.
350 # same filename twice.
351 self._newfiles = set()
351 self._newfiles = set()
352
352
353 def __call__(self, f):
353 def __call__(self, f):
354 if f in self._newfiles:
354 if f in self._newfiles:
355 return
355 return
356 fl = encoding.lower(f)
356 fl = encoding.lower(f)
357 if fl in self._loweredfiles and f not in self._dirstate:
357 if fl in self._loweredfiles and f not in self._dirstate:
358 msg = _(b'possible case-folding collision for %s') % f
358 msg = _(b'possible case-folding collision for %s') % f
359 if self._abort:
359 if self._abort:
360 raise error.Abort(msg)
360 raise error.Abort(msg)
361 self._ui.warn(_(b"warning: %s\n") % msg)
361 self._ui.warn(_(b"warning: %s\n") % msg)
362 self._loweredfiles.add(fl)
362 self._loweredfiles.add(fl)
363 self._newfiles.add(f)
363 self._newfiles.add(f)
364
364
365
365
366 def filteredhash(repo, maxrev):
366 def filteredhash(repo, maxrev):
367 """build hash of filtered revisions in the current repoview.
367 """build hash of filtered revisions in the current repoview.
368
368
369 Multiple caches perform up-to-date validation by checking that the
369 Multiple caches perform up-to-date validation by checking that the
370 tiprev and tipnode stored in the cache file match the current repository.
370 tiprev and tipnode stored in the cache file match the current repository.
371 However, this is not sufficient for validating repoviews because the set
371 However, this is not sufficient for validating repoviews because the set
372 of revisions in the view may change without the repository tiprev and
372 of revisions in the view may change without the repository tiprev and
373 tipnode changing.
373 tipnode changing.
374
374
375 This function hashes all the revs filtered from the view and returns
375 This function hashes all the revs filtered from the view and returns
376 that SHA-1 digest.
376 that SHA-1 digest.
377 """
377 """
378 cl = repo.changelog
378 cl = repo.changelog
379 if not cl.filteredrevs:
379 if not cl.filteredrevs:
380 return None
380 return None
381 key = cl._filteredrevs_hashcache.get(maxrev)
381 key = cl._filteredrevs_hashcache.get(maxrev)
382 if not key:
382 if not key:
383 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
383 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
384 if revs:
384 if revs:
385 s = hashutil.sha1()
385 s = hashutil.sha1()
386 for rev in revs:
386 for rev in revs:
387 s.update(b'%d;' % rev)
387 s.update(b'%d;' % rev)
388 key = s.digest()
388 key = s.digest()
389 cl._filteredrevs_hashcache[maxrev] = key
389 cl._filteredrevs_hashcache[maxrev] = key
390 return key
390 return key
391
391
392
392
393 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
393 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
394 '''yield every hg repository under path, always recursively.
394 '''yield every hg repository under path, always recursively.
395 The recurse flag will only control recursion into repo working dirs'''
395 The recurse flag will only control recursion into repo working dirs'''
396
396
397 def errhandler(err):
397 def errhandler(err):
398 if err.filename == path:
398 if err.filename == path:
399 raise err
399 raise err
400
400
401 samestat = getattr(os.path, 'samestat', None)
401 samestat = getattr(os.path, 'samestat', None)
402 if followsym and samestat is not None:
402 if followsym and samestat is not None:
403
403
404 def adddir(dirlst, dirname):
404 def adddir(dirlst, dirname):
405 dirstat = os.stat(dirname)
405 dirstat = os.stat(dirname)
406 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
406 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
407 if not match:
407 if not match:
408 dirlst.append(dirstat)
408 dirlst.append(dirstat)
409 return not match
409 return not match
410
410
411 else:
411 else:
412 followsym = False
412 followsym = False
413
413
414 if (seen_dirs is None) and followsym:
414 if (seen_dirs is None) and followsym:
415 seen_dirs = []
415 seen_dirs = []
416 adddir(seen_dirs, path)
416 adddir(seen_dirs, path)
417 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
417 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
418 dirs.sort()
418 dirs.sort()
419 if b'.hg' in dirs:
419 if b'.hg' in dirs:
420 yield root # found a repository
420 yield root # found a repository
421 qroot = os.path.join(root, b'.hg', b'patches')
421 qroot = os.path.join(root, b'.hg', b'patches')
422 if os.path.isdir(os.path.join(qroot, b'.hg')):
422 if os.path.isdir(os.path.join(qroot, b'.hg')):
423 yield qroot # we have a patch queue repo here
423 yield qroot # we have a patch queue repo here
424 if recurse:
424 if recurse:
425 # avoid recursing inside the .hg directory
425 # avoid recursing inside the .hg directory
426 dirs.remove(b'.hg')
426 dirs.remove(b'.hg')
427 else:
427 else:
428 dirs[:] = [] # don't descend further
428 dirs[:] = [] # don't descend further
429 elif followsym:
429 elif followsym:
430 newdirs = []
430 newdirs = []
431 for d in dirs:
431 for d in dirs:
432 fname = os.path.join(root, d)
432 fname = os.path.join(root, d)
433 if adddir(seen_dirs, fname):
433 if adddir(seen_dirs, fname):
434 if os.path.islink(fname):
434 if os.path.islink(fname):
435 for hgname in walkrepos(fname, True, seen_dirs):
435 for hgname in walkrepos(fname, True, seen_dirs):
436 yield hgname
436 yield hgname
437 else:
437 else:
438 newdirs.append(d)
438 newdirs.append(d)
439 dirs[:] = newdirs
439 dirs[:] = newdirs
440
440
441
441
442 def binnode(ctx):
442 def binnode(ctx):
443 """Return binary node id for a given basectx"""
443 """Return binary node id for a given basectx"""
444 node = ctx.node()
444 node = ctx.node()
445 if node is None:
445 if node is None:
446 return wdirid
446 return wdirid
447 return node
447 return node
448
448
449
449
450 def intrev(ctx):
450 def intrev(ctx):
451 """Return integer for a given basectx that can be used in comparison or
451 """Return integer for a given basectx that can be used in comparison or
452 arithmetic operation"""
452 arithmetic operation"""
453 rev = ctx.rev()
453 rev = ctx.rev()
454 if rev is None:
454 if rev is None:
455 return wdirrev
455 return wdirrev
456 return rev
456 return rev
457
457
458
458
459 def formatchangeid(ctx):
459 def formatchangeid(ctx):
460 """Format changectx as '{rev}:{node|formatnode}', which is the default
460 """Format changectx as '{rev}:{node|formatnode}', which is the default
461 template provided by logcmdutil.changesettemplater"""
461 template provided by logcmdutil.changesettemplater"""
462 repo = ctx.repo()
462 repo = ctx.repo()
463 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
463 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
464
464
465
465
466 def formatrevnode(ui, rev, node):
466 def formatrevnode(ui, rev, node):
467 """Format given revision and node depending on the current verbosity"""
467 """Format given revision and node depending on the current verbosity"""
468 if ui.debugflag:
468 if ui.debugflag:
469 hexfunc = hex
469 hexfunc = hex
470 else:
470 else:
471 hexfunc = short
471 hexfunc = short
472 return b'%d:%s' % (rev, hexfunc(node))
472 return b'%d:%s' % (rev, hexfunc(node))
473
473
474
474
475 def resolvehexnodeidprefix(repo, prefix):
475 def resolvehexnodeidprefix(repo, prefix):
476 if prefix.startswith(b'x'):
476 if prefix.startswith(b'x'):
477 prefix = prefix[1:]
477 prefix = prefix[1:]
478 try:
478 try:
479 # Uses unfiltered repo because it's faster when prefix is ambiguous/
479 # Uses unfiltered repo because it's faster when prefix is ambiguous/
480 # This matches the shortesthexnodeidprefix() function below.
480 # This matches the shortesthexnodeidprefix() function below.
481 node = repo.unfiltered().changelog._partialmatch(prefix)
481 node = repo.unfiltered().changelog._partialmatch(prefix)
482 except error.AmbiguousPrefixLookupError:
482 except error.AmbiguousPrefixLookupError:
483 revset = repo.ui.config(
483 revset = repo.ui.config(
484 b'experimental', b'revisions.disambiguatewithin'
484 b'experimental', b'revisions.disambiguatewithin'
485 )
485 )
486 if revset:
486 if revset:
487 # Clear config to avoid infinite recursion
487 # Clear config to avoid infinite recursion
488 configoverrides = {
488 configoverrides = {
489 (b'experimental', b'revisions.disambiguatewithin'): None
489 (b'experimental', b'revisions.disambiguatewithin'): None
490 }
490 }
491 with repo.ui.configoverride(configoverrides):
491 with repo.ui.configoverride(configoverrides):
492 revs = repo.anyrevs([revset], user=True)
492 revs = repo.anyrevs([revset], user=True)
493 matches = []
493 matches = []
494 for rev in revs:
494 for rev in revs:
495 node = repo.changelog.node(rev)
495 node = repo.changelog.node(rev)
496 if hex(node).startswith(prefix):
496 if hex(node).startswith(prefix):
497 matches.append(node)
497 matches.append(node)
498 if len(matches) == 1:
498 if len(matches) == 1:
499 return matches[0]
499 return matches[0]
500 raise
500 raise
501 if node is None:
501 if node is None:
502 return
502 return
503 repo.changelog.rev(node) # make sure node isn't filtered
503 repo.changelog.rev(node) # make sure node isn't filtered
504 return node
504 return node
505
505
506
506
507 def mayberevnum(repo, prefix):
507 def mayberevnum(repo, prefix):
508 """Checks if the given prefix may be mistaken for a revision number"""
508 """Checks if the given prefix may be mistaken for a revision number"""
509 try:
509 try:
510 i = int(prefix)
510 i = int(prefix)
511 # if we are a pure int, then starting with zero will not be
511 # if we are a pure int, then starting with zero will not be
512 # confused as a rev; or, obviously, if the int is larger
512 # confused as a rev; or, obviously, if the int is larger
513 # than the value of the tip rev. We still need to disambiguate if
513 # than the value of the tip rev. We still need to disambiguate if
514 # prefix == '0', since that *is* a valid revnum.
514 # prefix == '0', since that *is* a valid revnum.
515 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
515 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
516 return False
516 return False
517 return True
517 return True
518 except ValueError:
518 except ValueError:
519 return False
519 return False
520
520
521
521
522 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
522 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
523 """Find the shortest unambiguous prefix that matches hexnode.
523 """Find the shortest unambiguous prefix that matches hexnode.
524
524
525 If "cache" is not None, it must be a dictionary that can be used for
525 If "cache" is not None, it must be a dictionary that can be used for
526 caching between calls to this method.
526 caching between calls to this method.
527 """
527 """
528 # _partialmatch() of filtered changelog could take O(len(repo)) time,
528 # _partialmatch() of filtered changelog could take O(len(repo)) time,
529 # which would be unacceptably slow. so we look for hash collision in
529 # which would be unacceptably slow. so we look for hash collision in
530 # unfiltered space, which means some hashes may be slightly longer.
530 # unfiltered space, which means some hashes may be slightly longer.
531
531
532 minlength = max(minlength, 1)
532 minlength = max(minlength, 1)
533
533
534 def disambiguate(prefix):
534 def disambiguate(prefix):
535 """Disambiguate against revnums."""
535 """Disambiguate against revnums."""
536 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
536 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
537 if mayberevnum(repo, prefix):
537 if mayberevnum(repo, prefix):
538 return b'x' + prefix
538 return b'x' + prefix
539 else:
539 else:
540 return prefix
540 return prefix
541
541
542 hexnode = hex(node)
542 hexnode = hex(node)
543 for length in range(len(prefix), len(hexnode) + 1):
543 for length in range(len(prefix), len(hexnode) + 1):
544 prefix = hexnode[:length]
544 prefix = hexnode[:length]
545 if not mayberevnum(repo, prefix):
545 if not mayberevnum(repo, prefix):
546 return prefix
546 return prefix
547
547
548 cl = repo.unfiltered().changelog
548 cl = repo.unfiltered().changelog
549 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
549 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
550 if revset:
550 if revset:
551 revs = None
551 revs = None
552 if cache is not None:
552 if cache is not None:
553 revs = cache.get(b'disambiguationrevset')
553 revs = cache.get(b'disambiguationrevset')
554 if revs is None:
554 if revs is None:
555 revs = repo.anyrevs([revset], user=True)
555 revs = repo.anyrevs([revset], user=True)
556 if cache is not None:
556 if cache is not None:
557 cache[b'disambiguationrevset'] = revs
557 cache[b'disambiguationrevset'] = revs
558 if cl.rev(node) in revs:
558 if cl.rev(node) in revs:
559 hexnode = hex(node)
559 hexnode = hex(node)
560 nodetree = None
560 nodetree = None
561 if cache is not None:
561 if cache is not None:
562 nodetree = cache.get(b'disambiguationnodetree')
562 nodetree = cache.get(b'disambiguationnodetree')
563 if not nodetree:
563 if not nodetree:
564 if util.safehasattr(parsers, 'nodetree'):
564 if util.safehasattr(parsers, 'nodetree'):
565 # The CExt is the only implementation to provide a nodetree
565 # The CExt is the only implementation to provide a nodetree
566 # class so far.
566 # class so far.
567 index = cl.index
567 index = cl.index
568 if util.safehasattr(index, 'get_cindex'):
568 if util.safehasattr(index, 'get_cindex'):
569 # the rust wrapped need to give access to its internal index
569 # the rust wrapped need to give access to its internal index
570 index = index.get_cindex()
570 index = index.get_cindex()
571 nodetree = parsers.nodetree(index, len(revs))
571 nodetree = parsers.nodetree(index, len(revs))
572 for r in revs:
572 for r in revs:
573 nodetree.insert(r)
573 nodetree.insert(r)
574 if cache is not None:
574 if cache is not None:
575 cache[b'disambiguationnodetree'] = nodetree
575 cache[b'disambiguationnodetree'] = nodetree
576 if nodetree is not None:
576 if nodetree is not None:
577 length = max(nodetree.shortest(node), minlength)
577 length = max(nodetree.shortest(node), minlength)
578 prefix = hexnode[:length]
578 prefix = hexnode[:length]
579 return disambiguate(prefix)
579 return disambiguate(prefix)
580 for length in range(minlength, len(hexnode) + 1):
580 for length in range(minlength, len(hexnode) + 1):
581 matches = []
581 matches = []
582 prefix = hexnode[:length]
582 prefix = hexnode[:length]
583 for rev in revs:
583 for rev in revs:
584 otherhexnode = repo[rev].hex()
584 otherhexnode = repo[rev].hex()
585 if prefix == otherhexnode[:length]:
585 if prefix == otherhexnode[:length]:
586 matches.append(otherhexnode)
586 matches.append(otherhexnode)
587 if len(matches) == 1:
587 if len(matches) == 1:
588 return disambiguate(prefix)
588 return disambiguate(prefix)
589
589
590 try:
590 try:
591 return disambiguate(cl.shortest(node, minlength))
591 return disambiguate(cl.shortest(node, minlength))
592 except error.LookupError:
592 except error.LookupError:
593 raise error.RepoLookupError()
593 raise error.RepoLookupError()
594
594
595
595
596 def isrevsymbol(repo, symbol):
596 def isrevsymbol(repo, symbol):
597 """Checks if a symbol exists in the repo.
597 """Checks if a symbol exists in the repo.
598
598
599 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
599 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
600 symbol is an ambiguous nodeid prefix.
600 symbol is an ambiguous nodeid prefix.
601 """
601 """
602 try:
602 try:
603 revsymbol(repo, symbol)
603 revsymbol(repo, symbol)
604 return True
604 return True
605 except error.RepoLookupError:
605 except error.RepoLookupError:
606 return False
606 return False
607
607
608
608
609 def revsymbol(repo, symbol):
609 def revsymbol(repo, symbol):
610 """Returns a context given a single revision symbol (as string).
610 """Returns a context given a single revision symbol (as string).
611
611
612 This is similar to revsingle(), but accepts only a single revision symbol,
612 This is similar to revsingle(), but accepts only a single revision symbol,
613 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
613 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
614 not "max(public())".
614 not "max(public())".
615 """
615 """
616 if not isinstance(symbol, bytes):
616 if not isinstance(symbol, bytes):
617 msg = (
617 msg = (
618 b"symbol (%s of type %s) was not a string, did you mean "
618 b"symbol (%s of type %s) was not a string, did you mean "
619 b"repo[symbol]?" % (symbol, type(symbol))
619 b"repo[symbol]?" % (symbol, type(symbol))
620 )
620 )
621 raise error.ProgrammingError(msg)
621 raise error.ProgrammingError(msg)
622 try:
622 try:
623 if symbol in (b'.', b'tip', b'null'):
623 if symbol in (b'.', b'tip', b'null'):
624 return repo[symbol]
624 return repo[symbol]
625
625
626 try:
626 try:
627 r = int(symbol)
627 r = int(symbol)
628 if b'%d' % r != symbol:
628 if b'%d' % r != symbol:
629 raise ValueError
629 raise ValueError
630 l = len(repo.changelog)
630 l = len(repo.changelog)
631 if r < 0:
631 if r < 0:
632 r += l
632 r += l
633 if r < 0 or r >= l and r != wdirrev:
633 if r < 0 or r >= l and r != wdirrev:
634 raise ValueError
634 raise ValueError
635 return repo[r]
635 return repo[r]
636 except error.FilteredIndexError:
636 except error.FilteredIndexError:
637 raise
637 raise
638 except (ValueError, OverflowError, IndexError):
638 except (ValueError, OverflowError, IndexError):
639 pass
639 pass
640
640
641 if len(symbol) == 40:
641 if len(symbol) == 40:
642 try:
642 try:
643 node = bin(symbol)
643 node = bin(symbol)
644 rev = repo.changelog.rev(node)
644 rev = repo.changelog.rev(node)
645 return repo[rev]
645 return repo[rev]
646 except error.FilteredLookupError:
646 except error.FilteredLookupError:
647 raise
647 raise
648 except (TypeError, LookupError):
648 except (TypeError, LookupError):
649 pass
649 pass
650
650
651 # look up bookmarks through the name interface
651 # look up bookmarks through the name interface
652 try:
652 try:
653 node = repo.names.singlenode(repo, symbol)
653 node = repo.names.singlenode(repo, symbol)
654 rev = repo.changelog.rev(node)
654 rev = repo.changelog.rev(node)
655 return repo[rev]
655 return repo[rev]
656 except KeyError:
656 except KeyError:
657 pass
657 pass
658
658
659 node = resolvehexnodeidprefix(repo, symbol)
659 node = resolvehexnodeidprefix(repo, symbol)
660 if node is not None:
660 if node is not None:
661 rev = repo.changelog.rev(node)
661 rev = repo.changelog.rev(node)
662 return repo[rev]
662 return repo[rev]
663
663
664 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
664 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
665
665
666 except error.WdirUnsupported:
666 except error.WdirUnsupported:
667 return repo[None]
667 return repo[None]
668 except (
668 except (
669 error.FilteredIndexError,
669 error.FilteredIndexError,
670 error.FilteredLookupError,
670 error.FilteredLookupError,
671 error.FilteredRepoLookupError,
671 error.FilteredRepoLookupError,
672 ):
672 ):
673 raise _filterederror(repo, symbol)
673 raise _filterederror(repo, symbol)
674
674
675
675
676 def _filterederror(repo, changeid):
676 def _filterederror(repo, changeid):
677 """build an exception to be raised about a filtered changeid
677 """build an exception to be raised about a filtered changeid
678
678
679 This is extracted in a function to help extensions (eg: evolve) to
679 This is extracted in a function to help extensions (eg: evolve) to
680 experiment with various message variants."""
680 experiment with various message variants."""
681 if repo.filtername.startswith(b'visible'):
681 if repo.filtername.startswith(b'visible'):
682
682
683 # Check if the changeset is obsolete
683 # Check if the changeset is obsolete
684 unfilteredrepo = repo.unfiltered()
684 unfilteredrepo = repo.unfiltered()
685 ctx = revsymbol(unfilteredrepo, changeid)
685 ctx = revsymbol(unfilteredrepo, changeid)
686
686
687 # If the changeset is obsolete, enrich the message with the reason
687 # If the changeset is obsolete, enrich the message with the reason
688 # that made this changeset not visible
688 # that made this changeset not visible
689 if ctx.obsolete():
689 if ctx.obsolete():
690 msg = obsutil._getfilteredreason(repo, changeid, ctx)
690 msg = obsutil._getfilteredreason(repo, changeid, ctx)
691 else:
691 else:
692 msg = _(b"hidden revision '%s'") % changeid
692 msg = _(b"hidden revision '%s'") % changeid
693
693
694 hint = _(b'use --hidden to access hidden revisions')
694 hint = _(b'use --hidden to access hidden revisions')
695
695
696 return error.FilteredRepoLookupError(msg, hint=hint)
696 return error.FilteredRepoLookupError(msg, hint=hint)
697 msg = _(b"filtered revision '%s' (not in '%s' subset)")
697 msg = _(b"filtered revision '%s' (not in '%s' subset)")
698 msg %= (changeid, repo.filtername)
698 msg %= (changeid, repo.filtername)
699 return error.FilteredRepoLookupError(msg)
699 return error.FilteredRepoLookupError(msg)
700
700
701
701
702 def revsingle(repo, revspec, default=b'.', localalias=None):
702 def revsingle(repo, revspec, default=b'.', localalias=None):
703 if not revspec and revspec != 0:
703 if not revspec and revspec != 0:
704 return repo[default]
704 return repo[default]
705
705
706 l = revrange(repo, [revspec], localalias=localalias)
706 l = revrange(repo, [revspec], localalias=localalias)
707 if not l:
707 if not l:
708 raise error.Abort(_(b'empty revision set'))
708 raise error.Abort(_(b'empty revision set'))
709 return repo[l.last()]
709 return repo[l.last()]
710
710
711
711
712 def _pairspec(revspec):
712 def _pairspec(revspec):
713 tree = revsetlang.parse(revspec)
713 tree = revsetlang.parse(revspec)
714 return tree and tree[0] in (
714 return tree and tree[0] in (
715 b'range',
715 b'range',
716 b'rangepre',
716 b'rangepre',
717 b'rangepost',
717 b'rangepost',
718 b'rangeall',
718 b'rangeall',
719 )
719 )
720
720
721
721
722 def revpair(repo, revs):
722 def revpair(repo, revs):
723 if not revs:
723 if not revs:
724 return repo[b'.'], repo[None]
724 return repo[b'.'], repo[None]
725
725
726 l = revrange(repo, revs)
726 l = revrange(repo, revs)
727
727
728 if not l:
728 if not l:
729 raise error.Abort(_(b'empty revision range'))
729 raise error.Abort(_(b'empty revision range'))
730
730
731 first = l.first()
731 first = l.first()
732 second = l.last()
732 second = l.last()
733
733
734 if (
734 if (
735 first == second
735 first == second
736 and len(revs) >= 2
736 and len(revs) >= 2
737 and not all(revrange(repo, [r]) for r in revs)
737 and not all(revrange(repo, [r]) for r in revs)
738 ):
738 ):
739 raise error.Abort(_(b'empty revision on one side of range'))
739 raise error.Abort(_(b'empty revision on one side of range'))
740
740
741 # if top-level is range expression, the result must always be a pair
741 # if top-level is range expression, the result must always be a pair
742 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
742 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
743 return repo[first], repo[None]
743 return repo[first], repo[None]
744
744
745 return repo[first], repo[second]
745 return repo[first], repo[second]
746
746
747
747
748 def revrange(repo, specs, localalias=None):
748 def revrange(repo, specs, localalias=None):
749 """Execute 1 to many revsets and return the union.
749 """Execute 1 to many revsets and return the union.
750
750
751 This is the preferred mechanism for executing revsets using user-specified
751 This is the preferred mechanism for executing revsets using user-specified
752 config options, such as revset aliases.
752 config options, such as revset aliases.
753
753
754 The revsets specified by ``specs`` will be executed via a chained ``OR``
754 The revsets specified by ``specs`` will be executed via a chained ``OR``
755 expression. If ``specs`` is empty, an empty result is returned.
755 expression. If ``specs`` is empty, an empty result is returned.
756
756
757 ``specs`` can contain integers, in which case they are assumed to be
757 ``specs`` can contain integers, in which case they are assumed to be
758 revision numbers.
758 revision numbers.
759
759
760 It is assumed the revsets are already formatted. If you have arguments
760 It is assumed the revsets are already formatted. If you have arguments
761 that need to be expanded in the revset, call ``revsetlang.formatspec()``
761 that need to be expanded in the revset, call ``revsetlang.formatspec()``
762 and pass the result as an element of ``specs``.
762 and pass the result as an element of ``specs``.
763
763
764 Specifying a single revset is allowed.
764 Specifying a single revset is allowed.
765
765
766 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
766 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
767 integer revisions.
767 integer revisions.
768 """
768 """
769 allspecs = []
769 allspecs = []
770 for spec in specs:
770 for spec in specs:
771 if isinstance(spec, int):
771 if isinstance(spec, int):
772 spec = revsetlang.formatspec(b'%d', spec)
772 spec = revsetlang.formatspec(b'%d', spec)
773 allspecs.append(spec)
773 allspecs.append(spec)
774 return repo.anyrevs(allspecs, user=True, localalias=localalias)
774 return repo.anyrevs(allspecs, user=True, localalias=localalias)
775
775
776
776
777 def increasingwindows(windowsize=8, sizelimit=512):
777 def increasingwindows(windowsize=8, sizelimit=512):
778 while True:
778 while True:
779 yield windowsize
779 yield windowsize
780 if windowsize < sizelimit:
780 if windowsize < sizelimit:
781 windowsize *= 2
781 windowsize *= 2
782
782
783
783
784 def walkchangerevs(repo, revs, makefilematcher, prepare):
784 def walkchangerevs(repo, revs, makefilematcher, prepare):
785 '''Iterate over files and the revs in a "windowed" way.
785 '''Iterate over files and the revs in a "windowed" way.
786
786
787 Callers most commonly need to iterate backwards over the history
787 Callers most commonly need to iterate backwards over the history
788 in which they are interested. Doing so has awful (quadratic-looking)
788 in which they are interested. Doing so has awful (quadratic-looking)
789 performance, so we use iterators in a "windowed" way.
789 performance, so we use iterators in a "windowed" way.
790
790
791 We walk a window of revisions in the desired order. Within the
791 We walk a window of revisions in the desired order. Within the
792 window, we first walk forwards to gather data, then in the desired
792 window, we first walk forwards to gather data, then in the desired
793 order (usually backwards) to display it.
793 order (usually backwards) to display it.
794
794
795 This function returns an iterator yielding contexts. Before
795 This function returns an iterator yielding contexts. Before
796 yielding each context, the iterator will first call the prepare
796 yielding each context, the iterator will first call the prepare
797 function on each context in the window in forward order.'''
797 function on each context in the window in forward order.'''
798
798
799 if not revs:
799 if not revs:
800 return []
800 return []
801 change = repo.__getitem__
801 change = repo.__getitem__
802
802
803 def iterate():
803 def iterate():
804 it = iter(revs)
804 it = iter(revs)
805 stopiteration = False
805 stopiteration = False
806 for windowsize in increasingwindows():
806 for windowsize in increasingwindows():
807 nrevs = []
807 nrevs = []
808 for i in pycompat.xrange(windowsize):
808 for i in pycompat.xrange(windowsize):
809 rev = next(it, None)
809 rev = next(it, None)
810 if rev is None:
810 if rev is None:
811 stopiteration = True
811 stopiteration = True
812 break
812 break
813 nrevs.append(rev)
813 nrevs.append(rev)
814 for rev in sorted(nrevs):
814 for rev in sorted(nrevs):
815 ctx = change(rev)
815 ctx = change(rev)
816 prepare(ctx, makefilematcher(ctx))
816 prepare(ctx, makefilematcher(ctx))
817 for rev in nrevs:
817 for rev in nrevs:
818 yield change(rev)
818 yield change(rev)
819
819
820 if stopiteration:
820 if stopiteration:
821 break
821 break
822
822
823 return iterate()
823 return iterate()
824
824
825
825
826 def meaningfulparents(repo, ctx):
826 def meaningfulparents(repo, ctx):
827 """Return list of meaningful (or all if debug) parentrevs for rev.
827 """Return list of meaningful (or all if debug) parentrevs for rev.
828
828
829 For merges (two non-nullrev revisions) both parents are meaningful.
829 For merges (two non-nullrev revisions) both parents are meaningful.
830 Otherwise the first parent revision is considered meaningful if it
830 Otherwise the first parent revision is considered meaningful if it
831 is not the preceding revision.
831 is not the preceding revision.
832 """
832 """
833 parents = ctx.parents()
833 parents = ctx.parents()
834 if len(parents) > 1:
834 if len(parents) > 1:
835 return parents
835 return parents
836 if repo.ui.debugflag:
836 if repo.ui.debugflag:
837 return [parents[0], repo[nullrev]]
837 return [parents[0], repo[nullrev]]
838 if parents[0].rev() >= intrev(ctx) - 1:
838 if parents[0].rev() >= intrev(ctx) - 1:
839 return []
839 return []
840 return parents
840 return parents
841
841
842
842
843 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
843 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
844 """Return a function that produced paths for presenting to the user.
844 """Return a function that produced paths for presenting to the user.
845
845
846 The returned function takes a repo-relative path and produces a path
846 The returned function takes a repo-relative path and produces a path
847 that can be presented in the UI.
847 that can be presented in the UI.
848
848
849 Depending on the value of ui.relative-paths, either a repo-relative or
849 Depending on the value of ui.relative-paths, either a repo-relative or
850 cwd-relative path will be produced.
850 cwd-relative path will be produced.
851
851
852 legacyrelativevalue is the value to use if ui.relative-paths=legacy
852 legacyrelativevalue is the value to use if ui.relative-paths=legacy
853
853
854 If forcerelativevalue is not None, then that value will be used regardless
854 If forcerelativevalue is not None, then that value will be used regardless
855 of what ui.relative-paths is set to.
855 of what ui.relative-paths is set to.
856 """
856 """
857 if forcerelativevalue is not None:
857 if forcerelativevalue is not None:
858 relative = forcerelativevalue
858 relative = forcerelativevalue
859 else:
859 else:
860 config = repo.ui.config(b'ui', b'relative-paths')
860 config = repo.ui.config(b'ui', b'relative-paths')
861 if config == b'legacy':
861 if config == b'legacy':
862 relative = legacyrelativevalue
862 relative = legacyrelativevalue
863 else:
863 else:
864 relative = stringutil.parsebool(config)
864 relative = stringutil.parsebool(config)
865 if relative is None:
865 if relative is None:
866 raise error.ConfigError(
866 raise error.ConfigError(
867 _(b"ui.relative-paths is not a boolean ('%s')") % config
867 _(b"ui.relative-paths is not a boolean ('%s')") % config
868 )
868 )
869
869
870 if relative:
870 if relative:
871 cwd = repo.getcwd()
871 cwd = repo.getcwd()
872 if cwd != b'':
872 if cwd != b'':
873 # this branch would work even if cwd == b'' (ie cwd = repo
873 # this branch would work even if cwd == b'' (ie cwd = repo
874 # root), but its generality makes the returned function slower
874 # root), but its generality makes the returned function slower
875 pathto = repo.pathto
875 pathto = repo.pathto
876 return lambda f: pathto(f, cwd)
876 return lambda f: pathto(f, cwd)
877 if repo.ui.configbool(b'ui', b'slash'):
877 if repo.ui.configbool(b'ui', b'slash'):
878 return lambda f: f
878 return lambda f: f
879 else:
879 else:
880 return util.localpath
880 return util.localpath
881
881
882
882
883 def subdiruipathfn(subpath, uipathfn):
883 def subdiruipathfn(subpath, uipathfn):
884 '''Create a new uipathfn that treats the file as relative to subpath.'''
884 '''Create a new uipathfn that treats the file as relative to subpath.'''
885 return lambda f: uipathfn(posixpath.join(subpath, f))
885 return lambda f: uipathfn(posixpath.join(subpath, f))
886
886
887
887
888 def anypats(pats, opts):
888 def anypats(pats, opts):
889 '''Checks if any patterns, including --include and --exclude were given.
889 '''Checks if any patterns, including --include and --exclude were given.
890
890
891 Some commands (e.g. addremove) use this condition for deciding whether to
891 Some commands (e.g. addremove) use this condition for deciding whether to
892 print absolute or relative paths.
892 print absolute or relative paths.
893 '''
893 '''
894 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
894 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
895
895
896
896
897 def expandpats(pats):
897 def expandpats(pats):
898 '''Expand bare globs when running on windows.
898 '''Expand bare globs when running on windows.
899 On posix we assume it already has already been done by sh.'''
899 On posix we assume it already has already been done by sh.'''
900 if not util.expandglobs:
900 if not util.expandglobs:
901 return list(pats)
901 return list(pats)
902 ret = []
902 ret = []
903 for kindpat in pats:
903 for kindpat in pats:
904 kind, pat = matchmod._patsplit(kindpat, None)
904 kind, pat = matchmod._patsplit(kindpat, None)
905 if kind is None:
905 if kind is None:
906 try:
906 try:
907 globbed = glob.glob(pat)
907 globbed = glob.glob(pat)
908 except re.error:
908 except re.error:
909 globbed = [pat]
909 globbed = [pat]
910 if globbed:
910 if globbed:
911 ret.extend(globbed)
911 ret.extend(globbed)
912 continue
912 continue
913 ret.append(kindpat)
913 ret.append(kindpat)
914 return ret
914 return ret
915
915
916
916
917 def matchandpats(
917 def matchandpats(
918 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
918 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
919 ):
919 ):
920 '''Return a matcher and the patterns that were used.
920 '''Return a matcher and the patterns that were used.
921 The matcher will warn about bad matches, unless an alternate badfn callback
921 The matcher will warn about bad matches, unless an alternate badfn callback
922 is provided.'''
922 is provided.'''
923 if opts is None:
923 if opts is None:
924 opts = {}
924 opts = {}
925 if not globbed and default == b'relpath':
925 if not globbed and default == b'relpath':
926 pats = expandpats(pats or [])
926 pats = expandpats(pats or [])
927
927
928 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
928 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
929
929
930 def bad(f, msg):
930 def bad(f, msg):
931 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
931 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
932
932
933 if badfn is None:
933 if badfn is None:
934 badfn = bad
934 badfn = bad
935
935
936 m = ctx.match(
936 m = ctx.match(
937 pats,
937 pats,
938 opts.get(b'include'),
938 opts.get(b'include'),
939 opts.get(b'exclude'),
939 opts.get(b'exclude'),
940 default,
940 default,
941 listsubrepos=opts.get(b'subrepos'),
941 listsubrepos=opts.get(b'subrepos'),
942 badfn=badfn,
942 badfn=badfn,
943 )
943 )
944
944
945 if m.always():
945 if m.always():
946 pats = []
946 pats = []
947 return m, pats
947 return m, pats
948
948
949
949
950 def match(
950 def match(
951 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
951 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
952 ):
952 ):
953 '''Return a matcher that will warn about bad matches.'''
953 '''Return a matcher that will warn about bad matches.'''
954 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
954 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
955
955
956
956
957 def matchall(repo):
957 def matchall(repo):
958 '''Return a matcher that will efficiently match everything.'''
958 '''Return a matcher that will efficiently match everything.'''
959 return matchmod.always()
959 return matchmod.always()
960
960
961
961
962 def matchfiles(repo, files, badfn=None):
962 def matchfiles(repo, files, badfn=None):
963 '''Return a matcher that will efficiently match exactly these files.'''
963 '''Return a matcher that will efficiently match exactly these files.'''
964 return matchmod.exact(files, badfn=badfn)
964 return matchmod.exact(files, badfn=badfn)
965
965
966
966
967 def parsefollowlinespattern(repo, rev, pat, msg):
967 def parsefollowlinespattern(repo, rev, pat, msg):
968 """Return a file name from `pat` pattern suitable for usage in followlines
968 """Return a file name from `pat` pattern suitable for usage in followlines
969 logic.
969 logic.
970 """
970 """
971 if not matchmod.patkind(pat):
971 if not matchmod.patkind(pat):
972 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
972 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
973 else:
973 else:
974 ctx = repo[rev]
974 ctx = repo[rev]
975 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
975 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
976 files = [f for f in ctx if m(f)]
976 files = [f for f in ctx if m(f)]
977 if len(files) != 1:
977 if len(files) != 1:
978 raise error.ParseError(msg)
978 raise error.ParseError(msg)
979 return files[0]
979 return files[0]
980
980
981
981
982 def getorigvfs(ui, repo):
982 def getorigvfs(ui, repo):
983 """return a vfs suitable to save 'orig' file
983 """return a vfs suitable to save 'orig' file
984
984
985 return None if no special directory is configured"""
985 return None if no special directory is configured"""
986 origbackuppath = ui.config(b'ui', b'origbackuppath')
986 origbackuppath = ui.config(b'ui', b'origbackuppath')
987 if not origbackuppath:
987 if not origbackuppath:
988 return None
988 return None
989 return vfs.vfs(repo.wvfs.join(origbackuppath))
989 return vfs.vfs(repo.wvfs.join(origbackuppath))
990
990
991
991
992 def backuppath(ui, repo, filepath):
992 def backuppath(ui, repo, filepath):
993 '''customize where working copy backup files (.orig files) are created
993 '''customize where working copy backup files (.orig files) are created
994
994
995 Fetch user defined path from config file: [ui] origbackuppath = <path>
995 Fetch user defined path from config file: [ui] origbackuppath = <path>
996 Fall back to default (filepath with .orig suffix) if not specified
996 Fall back to default (filepath with .orig suffix) if not specified
997
997
998 filepath is repo-relative
998 filepath is repo-relative
999
999
1000 Returns an absolute path
1000 Returns an absolute path
1001 '''
1001 '''
1002 origvfs = getorigvfs(ui, repo)
1002 origvfs = getorigvfs(ui, repo)
1003 if origvfs is None:
1003 if origvfs is None:
1004 return repo.wjoin(filepath + b".orig")
1004 return repo.wjoin(filepath + b".orig")
1005
1005
1006 origbackupdir = origvfs.dirname(filepath)
1006 origbackupdir = origvfs.dirname(filepath)
1007 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
1007 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
1008 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
1008 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
1009
1009
1010 # Remove any files that conflict with the backup file's path
1010 # Remove any files that conflict with the backup file's path
1011 for f in reversed(list(pathutil.finddirs(filepath))):
1011 for f in reversed(list(pathutil.finddirs(filepath))):
1012 if origvfs.isfileorlink(f):
1012 if origvfs.isfileorlink(f):
1013 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
1013 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
1014 origvfs.unlink(f)
1014 origvfs.unlink(f)
1015 break
1015 break
1016
1016
1017 origvfs.makedirs(origbackupdir)
1017 origvfs.makedirs(origbackupdir)
1018
1018
1019 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
1019 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
1020 ui.note(
1020 ui.note(
1021 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
1021 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
1022 )
1022 )
1023 origvfs.rmtree(filepath, forcibly=True)
1023 origvfs.rmtree(filepath, forcibly=True)
1024
1024
1025 return origvfs.join(filepath)
1025 return origvfs.join(filepath)
1026
1026
1027
1027
1028 class _containsnode(object):
1028 class _containsnode(object):
1029 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1029 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1030
1030
1031 def __init__(self, repo, revcontainer):
1031 def __init__(self, repo, revcontainer):
1032 self._torev = repo.changelog.rev
1032 self._torev = repo.changelog.rev
1033 self._revcontains = revcontainer.__contains__
1033 self._revcontains = revcontainer.__contains__
1034
1034
1035 def __contains__(self, node):
1035 def __contains__(self, node):
1036 return self._revcontains(self._torev(node))
1036 return self._revcontains(self._torev(node))
1037
1037
1038
1038
1039 def cleanupnodes(
1039 def cleanupnodes(
1040 repo,
1040 repo,
1041 replacements,
1041 replacements,
1042 operation,
1042 operation,
1043 moves=None,
1043 moves=None,
1044 metadata=None,
1044 metadata=None,
1045 fixphase=False,
1045 fixphase=False,
1046 targetphase=None,
1046 targetphase=None,
1047 backup=True,
1047 backup=True,
1048 ):
1048 ):
1049 """do common cleanups when old nodes are replaced by new nodes
1049 """do common cleanups when old nodes are replaced by new nodes
1050
1050
1051 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1051 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1052 (we might also want to move working directory parent in the future)
1052 (we might also want to move working directory parent in the future)
1053
1053
1054 By default, bookmark moves are calculated automatically from 'replacements',
1054 By default, bookmark moves are calculated automatically from 'replacements',
1055 but 'moves' can be used to override that. Also, 'moves' may include
1055 but 'moves' can be used to override that. Also, 'moves' may include
1056 additional bookmark moves that should not have associated obsmarkers.
1056 additional bookmark moves that should not have associated obsmarkers.
1057
1057
1058 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1058 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1059 have replacements. operation is a string, like "rebase".
1059 have replacements. operation is a string, like "rebase".
1060
1060
1061 metadata is dictionary containing metadata to be stored in obsmarker if
1061 metadata is dictionary containing metadata to be stored in obsmarker if
1062 obsolescence is enabled.
1062 obsolescence is enabled.
1063 """
1063 """
1064 assert fixphase or targetphase is None
1064 assert fixphase or targetphase is None
1065 if not replacements and not moves:
1065 if not replacements and not moves:
1066 return
1066 return
1067
1067
1068 # translate mapping's other forms
1068 # translate mapping's other forms
1069 if not util.safehasattr(replacements, b'items'):
1069 if not util.safehasattr(replacements, b'items'):
1070 replacements = {(n,): () for n in replacements}
1070 replacements = {(n,): () for n in replacements}
1071 else:
1071 else:
1072 # upgrading non tuple "source" to tuple ones for BC
1072 # upgrading non tuple "source" to tuple ones for BC
1073 repls = {}
1073 repls = {}
1074 for key, value in replacements.items():
1074 for key, value in replacements.items():
1075 if not isinstance(key, tuple):
1075 if not isinstance(key, tuple):
1076 key = (key,)
1076 key = (key,)
1077 repls[key] = value
1077 repls[key] = value
1078 replacements = repls
1078 replacements = repls
1079
1079
1080 # Unfiltered repo is needed since nodes in replacements might be hidden.
1080 # Unfiltered repo is needed since nodes in replacements might be hidden.
1081 unfi = repo.unfiltered()
1081 unfi = repo.unfiltered()
1082
1082
1083 # Calculate bookmark movements
1083 # Calculate bookmark movements
1084 if moves is None:
1084 if moves is None:
1085 moves = {}
1085 moves = {}
1086 for oldnodes, newnodes in replacements.items():
1086 for oldnodes, newnodes in replacements.items():
1087 for oldnode in oldnodes:
1087 for oldnode in oldnodes:
1088 if oldnode in moves:
1088 if oldnode in moves:
1089 continue
1089 continue
1090 if len(newnodes) > 1:
1090 if len(newnodes) > 1:
1091 # usually a split, take the one with biggest rev number
1091 # usually a split, take the one with biggest rev number
1092 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1092 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1093 elif len(newnodes) == 0:
1093 elif len(newnodes) == 0:
1094 # move bookmark backwards
1094 # move bookmark backwards
1095 allreplaced = []
1095 allreplaced = []
1096 for rep in replacements:
1096 for rep in replacements:
1097 allreplaced.extend(rep)
1097 allreplaced.extend(rep)
1098 roots = list(
1098 roots = list(
1099 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1099 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1100 )
1100 )
1101 if roots:
1101 if roots:
1102 newnode = roots[0].node()
1102 newnode = roots[0].node()
1103 else:
1103 else:
1104 newnode = nullid
1104 newnode = nullid
1105 else:
1105 else:
1106 newnode = newnodes[0]
1106 newnode = newnodes[0]
1107 moves[oldnode] = newnode
1107 moves[oldnode] = newnode
1108
1108
1109 allnewnodes = [n for ns in replacements.values() for n in ns]
1109 allnewnodes = [n for ns in replacements.values() for n in ns]
1110 toretract = {}
1110 toretract = {}
1111 toadvance = {}
1111 toadvance = {}
1112 if fixphase:
1112 if fixphase:
1113 precursors = {}
1113 precursors = {}
1114 for oldnodes, newnodes in replacements.items():
1114 for oldnodes, newnodes in replacements.items():
1115 for oldnode in oldnodes:
1115 for oldnode in oldnodes:
1116 for newnode in newnodes:
1116 for newnode in newnodes:
1117 precursors.setdefault(newnode, []).append(oldnode)
1117 precursors.setdefault(newnode, []).append(oldnode)
1118
1118
1119 allnewnodes.sort(key=lambda n: unfi[n].rev())
1119 allnewnodes.sort(key=lambda n: unfi[n].rev())
1120 newphases = {}
1120 newphases = {}
1121
1121
1122 def phase(ctx):
1122 def phase(ctx):
1123 return newphases.get(ctx.node(), ctx.phase())
1123 return newphases.get(ctx.node(), ctx.phase())
1124
1124
1125 for newnode in allnewnodes:
1125 for newnode in allnewnodes:
1126 ctx = unfi[newnode]
1126 ctx = unfi[newnode]
1127 parentphase = max(phase(p) for p in ctx.parents())
1127 parentphase = max(phase(p) for p in ctx.parents())
1128 if targetphase is None:
1128 if targetphase is None:
1129 oldphase = max(
1129 oldphase = max(
1130 unfi[oldnode].phase() for oldnode in precursors[newnode]
1130 unfi[oldnode].phase() for oldnode in precursors[newnode]
1131 )
1131 )
1132 newphase = max(oldphase, parentphase)
1132 newphase = max(oldphase, parentphase)
1133 else:
1133 else:
1134 newphase = max(targetphase, parentphase)
1134 newphase = max(targetphase, parentphase)
1135 newphases[newnode] = newphase
1135 newphases[newnode] = newphase
1136 if newphase > ctx.phase():
1136 if newphase > ctx.phase():
1137 toretract.setdefault(newphase, []).append(newnode)
1137 toretract.setdefault(newphase, []).append(newnode)
1138 elif newphase < ctx.phase():
1138 elif newphase < ctx.phase():
1139 toadvance.setdefault(newphase, []).append(newnode)
1139 toadvance.setdefault(newphase, []).append(newnode)
1140
1140
1141 with repo.transaction(b'cleanup') as tr:
1141 with repo.transaction(b'cleanup') as tr:
1142 # Move bookmarks
1142 # Move bookmarks
1143 bmarks = repo._bookmarks
1143 bmarks = repo._bookmarks
1144 bmarkchanges = []
1144 bmarkchanges = []
1145 for oldnode, newnode in moves.items():
1145 for oldnode, newnode in moves.items():
1146 oldbmarks = repo.nodebookmarks(oldnode)
1146 oldbmarks = repo.nodebookmarks(oldnode)
1147 if not oldbmarks:
1147 if not oldbmarks:
1148 continue
1148 continue
1149 from . import bookmarks # avoid import cycle
1149 from . import bookmarks # avoid import cycle
1150
1150
1151 repo.ui.debug(
1151 repo.ui.debug(
1152 b'moving bookmarks %r from %s to %s\n'
1152 b'moving bookmarks %r from %s to %s\n'
1153 % (
1153 % (
1154 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1154 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1155 hex(oldnode),
1155 hex(oldnode),
1156 hex(newnode),
1156 hex(newnode),
1157 )
1157 )
1158 )
1158 )
1159 # Delete divergent bookmarks being parents of related newnodes
1159 # Delete divergent bookmarks being parents of related newnodes
1160 deleterevs = repo.revs(
1160 deleterevs = repo.revs(
1161 b'parents(roots(%ln & (::%n))) - parents(%n)',
1161 b'parents(roots(%ln & (::%n))) - parents(%n)',
1162 allnewnodes,
1162 allnewnodes,
1163 newnode,
1163 newnode,
1164 oldnode,
1164 oldnode,
1165 )
1165 )
1166 deletenodes = _containsnode(repo, deleterevs)
1166 deletenodes = _containsnode(repo, deleterevs)
1167 for name in oldbmarks:
1167 for name in oldbmarks:
1168 bmarkchanges.append((name, newnode))
1168 bmarkchanges.append((name, newnode))
1169 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1169 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1170 bmarkchanges.append((b, None))
1170 bmarkchanges.append((b, None))
1171
1171
1172 if bmarkchanges:
1172 if bmarkchanges:
1173 bmarks.applychanges(repo, tr, bmarkchanges)
1173 bmarks.applychanges(repo, tr, bmarkchanges)
1174
1174
1175 for phase, nodes in toretract.items():
1175 for phase, nodes in toretract.items():
1176 phases.retractboundary(repo, tr, phase, nodes)
1176 phases.retractboundary(repo, tr, phase, nodes)
1177 for phase, nodes in toadvance.items():
1177 for phase, nodes in toadvance.items():
1178 phases.advanceboundary(repo, tr, phase, nodes)
1178 phases.advanceboundary(repo, tr, phase, nodes)
1179
1179
1180 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1180 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1181 # Obsolete or strip nodes
1181 # Obsolete or strip nodes
1182 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1182 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1183 # If a node is already obsoleted, and we want to obsolete it
1183 # If a node is already obsoleted, and we want to obsolete it
1184 # without a successor, skip that obssolete request since it's
1184 # without a successor, skip that obssolete request since it's
1185 # unnecessary. That's the "if s or not isobs(n)" check below.
1185 # unnecessary. That's the "if s or not isobs(n)" check below.
1186 # Also sort the node in topology order, that might be useful for
1186 # Also sort the node in topology order, that might be useful for
1187 # some obsstore logic.
1187 # some obsstore logic.
1188 # NOTE: the sorting might belong to createmarkers.
1188 # NOTE: the sorting might belong to createmarkers.
1189 torev = unfi.changelog.rev
1189 torev = unfi.changelog.rev
1190 sortfunc = lambda ns: torev(ns[0][0])
1190 sortfunc = lambda ns: torev(ns[0][0])
1191 rels = []
1191 rels = []
1192 for ns, s in sorted(replacements.items(), key=sortfunc):
1192 for ns, s in sorted(replacements.items(), key=sortfunc):
1193 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1193 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1194 rels.append(rel)
1194 rels.append(rel)
1195 if rels:
1195 if rels:
1196 obsolete.createmarkers(
1196 obsolete.createmarkers(
1197 repo, rels, operation=operation, metadata=metadata
1197 repo, rels, operation=operation, metadata=metadata
1198 )
1198 )
1199 elif phases.supportinternal(repo) and mayusearchived:
1199 elif phases.supportinternal(repo) and mayusearchived:
1200 # this assume we do not have "unstable" nodes above the cleaned ones
1200 # this assume we do not have "unstable" nodes above the cleaned ones
1201 allreplaced = set()
1201 allreplaced = set()
1202 for ns in replacements.keys():
1202 for ns in replacements.keys():
1203 allreplaced.update(ns)
1203 allreplaced.update(ns)
1204 if backup:
1204 if backup:
1205 from . import repair # avoid import cycle
1205 from . import repair # avoid import cycle
1206
1206
1207 node = min(allreplaced, key=repo.changelog.rev)
1207 node = min(allreplaced, key=repo.changelog.rev)
1208 repair.backupbundle(
1208 repair.backupbundle(
1209 repo, allreplaced, allreplaced, node, operation
1209 repo, allreplaced, allreplaced, node, operation
1210 )
1210 )
1211 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1211 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1212 else:
1212 else:
1213 from . import repair # avoid import cycle
1213 from . import repair # avoid import cycle
1214
1214
1215 tostrip = list(n for ns in replacements for n in ns)
1215 tostrip = list(n for ns in replacements for n in ns)
1216 if tostrip:
1216 if tostrip:
1217 repair.delayedstrip(
1217 repair.delayedstrip(
1218 repo.ui, repo, tostrip, operation, backup=backup
1218 repo.ui, repo, tostrip, operation, backup=backup
1219 )
1219 )
1220
1220
1221
1221
1222 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1222 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1223 if opts is None:
1223 if opts is None:
1224 opts = {}
1224 opts = {}
1225 m = matcher
1225 m = matcher
1226 dry_run = opts.get(b'dry_run')
1226 dry_run = opts.get(b'dry_run')
1227 try:
1227 try:
1228 similarity = float(opts.get(b'similarity') or 0)
1228 similarity = float(opts.get(b'similarity') or 0)
1229 except ValueError:
1229 except ValueError:
1230 raise error.Abort(_(b'similarity must be a number'))
1230 raise error.Abort(_(b'similarity must be a number'))
1231 if similarity < 0 or similarity > 100:
1231 if similarity < 0 or similarity > 100:
1232 raise error.Abort(_(b'similarity must be between 0 and 100'))
1232 raise error.Abort(_(b'similarity must be between 0 and 100'))
1233 similarity /= 100.0
1233 similarity /= 100.0
1234
1234
1235 ret = 0
1235 ret = 0
1236
1236
1237 wctx = repo[None]
1237 wctx = repo[None]
1238 for subpath in sorted(wctx.substate):
1238 for subpath in sorted(wctx.substate):
1239 submatch = matchmod.subdirmatcher(subpath, m)
1239 submatch = matchmod.subdirmatcher(subpath, m)
1240 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1240 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1241 sub = wctx.sub(subpath)
1241 sub = wctx.sub(subpath)
1242 subprefix = repo.wvfs.reljoin(prefix, subpath)
1242 subprefix = repo.wvfs.reljoin(prefix, subpath)
1243 subuipathfn = subdiruipathfn(subpath, uipathfn)
1243 subuipathfn = subdiruipathfn(subpath, uipathfn)
1244 try:
1244 try:
1245 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1245 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1246 ret = 1
1246 ret = 1
1247 except error.LookupError:
1247 except error.LookupError:
1248 repo.ui.status(
1248 repo.ui.status(
1249 _(b"skipping missing subrepository: %s\n")
1249 _(b"skipping missing subrepository: %s\n")
1250 % uipathfn(subpath)
1250 % uipathfn(subpath)
1251 )
1251 )
1252
1252
1253 rejected = []
1253 rejected = []
1254
1254
1255 def badfn(f, msg):
1255 def badfn(f, msg):
1256 if f in m.files():
1256 if f in m.files():
1257 m.bad(f, msg)
1257 m.bad(f, msg)
1258 rejected.append(f)
1258 rejected.append(f)
1259
1259
1260 badmatch = matchmod.badmatch(m, badfn)
1260 badmatch = matchmod.badmatch(m, badfn)
1261 added, unknown, deleted, removed, forgotten = _interestingfiles(
1261 added, unknown, deleted, removed, forgotten = _interestingfiles(
1262 repo, badmatch
1262 repo, badmatch
1263 )
1263 )
1264
1264
1265 unknownset = set(unknown + forgotten)
1265 unknownset = set(unknown + forgotten)
1266 toprint = unknownset.copy()
1266 toprint = unknownset.copy()
1267 toprint.update(deleted)
1267 toprint.update(deleted)
1268 for abs in sorted(toprint):
1268 for abs in sorted(toprint):
1269 if repo.ui.verbose or not m.exact(abs):
1269 if repo.ui.verbose or not m.exact(abs):
1270 if abs in unknownset:
1270 if abs in unknownset:
1271 status = _(b'adding %s\n') % uipathfn(abs)
1271 status = _(b'adding %s\n') % uipathfn(abs)
1272 label = b'ui.addremove.added'
1272 label = b'ui.addremove.added'
1273 else:
1273 else:
1274 status = _(b'removing %s\n') % uipathfn(abs)
1274 status = _(b'removing %s\n') % uipathfn(abs)
1275 label = b'ui.addremove.removed'
1275 label = b'ui.addremove.removed'
1276 repo.ui.status(status, label=label)
1276 repo.ui.status(status, label=label)
1277
1277
1278 renames = _findrenames(
1278 renames = _findrenames(
1279 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1279 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1280 )
1280 )
1281
1281
1282 if not dry_run:
1282 if not dry_run:
1283 _markchanges(repo, unknown + forgotten, deleted, renames)
1283 _markchanges(repo, unknown + forgotten, deleted, renames)
1284
1284
1285 for f in rejected:
1285 for f in rejected:
1286 if f in m.files():
1286 if f in m.files():
1287 return 1
1287 return 1
1288 return ret
1288 return ret
1289
1289
1290
1290
1291 def marktouched(repo, files, similarity=0.0):
1291 def marktouched(repo, files, similarity=0.0):
1292 '''Assert that files have somehow been operated upon. files are relative to
1292 '''Assert that files have somehow been operated upon. files are relative to
1293 the repo root.'''
1293 the repo root.'''
1294 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1294 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1295 rejected = []
1295 rejected = []
1296
1296
1297 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1297 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1298
1298
1299 if repo.ui.verbose:
1299 if repo.ui.verbose:
1300 unknownset = set(unknown + forgotten)
1300 unknownset = set(unknown + forgotten)
1301 toprint = unknownset.copy()
1301 toprint = unknownset.copy()
1302 toprint.update(deleted)
1302 toprint.update(deleted)
1303 for abs in sorted(toprint):
1303 for abs in sorted(toprint):
1304 if abs in unknownset:
1304 if abs in unknownset:
1305 status = _(b'adding %s\n') % abs
1305 status = _(b'adding %s\n') % abs
1306 else:
1306 else:
1307 status = _(b'removing %s\n') % abs
1307 status = _(b'removing %s\n') % abs
1308 repo.ui.status(status)
1308 repo.ui.status(status)
1309
1309
1310 # TODO: We should probably have the caller pass in uipathfn and apply it to
1310 # TODO: We should probably have the caller pass in uipathfn and apply it to
1311 # the messages above too. legacyrelativevalue=True is consistent with how
1311 # the messages above too. legacyrelativevalue=True is consistent with how
1312 # it used to work.
1312 # it used to work.
1313 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1313 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1314 renames = _findrenames(
1314 renames = _findrenames(
1315 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1315 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1316 )
1316 )
1317
1317
1318 _markchanges(repo, unknown + forgotten, deleted, renames)
1318 _markchanges(repo, unknown + forgotten, deleted, renames)
1319
1319
1320 for f in rejected:
1320 for f in rejected:
1321 if f in m.files():
1321 if f in m.files():
1322 return 1
1322 return 1
1323 return 0
1323 return 0
1324
1324
1325
1325
1326 def _interestingfiles(repo, matcher):
1326 def _interestingfiles(repo, matcher):
1327 '''Walk dirstate with matcher, looking for files that addremove would care
1327 '''Walk dirstate with matcher, looking for files that addremove would care
1328 about.
1328 about.
1329
1329
1330 This is different from dirstate.status because it doesn't care about
1330 This is different from dirstate.status because it doesn't care about
1331 whether files are modified or clean.'''
1331 whether files are modified or clean.'''
1332 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1332 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1333 audit_path = pathutil.pathauditor(repo.root, cached=True)
1333 audit_path = pathutil.pathauditor(repo.root, cached=True)
1334
1334
1335 ctx = repo[None]
1335 ctx = repo[None]
1336 dirstate = repo.dirstate
1336 dirstate = repo.dirstate
1337 matcher = repo.narrowmatch(matcher, includeexact=True)
1337 matcher = repo.narrowmatch(matcher, includeexact=True)
1338 walkresults = dirstate.walk(
1338 walkresults = dirstate.walk(
1339 matcher,
1339 matcher,
1340 subrepos=sorted(ctx.substate),
1340 subrepos=sorted(ctx.substate),
1341 unknown=True,
1341 unknown=True,
1342 ignored=False,
1342 ignored=False,
1343 full=False,
1343 full=False,
1344 )
1344 )
1345 for abs, st in pycompat.iteritems(walkresults):
1345 for abs, st in pycompat.iteritems(walkresults):
1346 dstate = dirstate[abs]
1346 dstate = dirstate[abs]
1347 if dstate == b'?' and audit_path.check(abs):
1347 if dstate == b'?' and audit_path.check(abs):
1348 unknown.append(abs)
1348 unknown.append(abs)
1349 elif dstate != b'r' and not st:
1349 elif dstate != b'r' and not st:
1350 deleted.append(abs)
1350 deleted.append(abs)
1351 elif dstate == b'r' and st:
1351 elif dstate == b'r' and st:
1352 forgotten.append(abs)
1352 forgotten.append(abs)
1353 # for finding renames
1353 # for finding renames
1354 elif dstate == b'r' and not st:
1354 elif dstate == b'r' and not st:
1355 removed.append(abs)
1355 removed.append(abs)
1356 elif dstate == b'a':
1356 elif dstate == b'a':
1357 added.append(abs)
1357 added.append(abs)
1358
1358
1359 return added, unknown, deleted, removed, forgotten
1359 return added, unknown, deleted, removed, forgotten
1360
1360
1361
1361
1362 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1362 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1363 '''Find renames from removed files to added ones.'''
1363 '''Find renames from removed files to added ones.'''
1364 renames = {}
1364 renames = {}
1365 if similarity > 0:
1365 if similarity > 0:
1366 for old, new, score in similar.findrenames(
1366 for old, new, score in similar.findrenames(
1367 repo, added, removed, similarity
1367 repo, added, removed, similarity
1368 ):
1368 ):
1369 if (
1369 if (
1370 repo.ui.verbose
1370 repo.ui.verbose
1371 or not matcher.exact(old)
1371 or not matcher.exact(old)
1372 or not matcher.exact(new)
1372 or not matcher.exact(new)
1373 ):
1373 ):
1374 repo.ui.status(
1374 repo.ui.status(
1375 _(
1375 _(
1376 b'recording removal of %s as rename to %s '
1376 b'recording removal of %s as rename to %s '
1377 b'(%d%% similar)\n'
1377 b'(%d%% similar)\n'
1378 )
1378 )
1379 % (uipathfn(old), uipathfn(new), score * 100)
1379 % (uipathfn(old), uipathfn(new), score * 100)
1380 )
1380 )
1381 renames[new] = old
1381 renames[new] = old
1382 return renames
1382 return renames
1383
1383
1384
1384
1385 def _markchanges(repo, unknown, deleted, renames):
1385 def _markchanges(repo, unknown, deleted, renames):
1386 '''Marks the files in unknown as added, the files in deleted as removed,
1386 '''Marks the files in unknown as added, the files in deleted as removed,
1387 and the files in renames as copied.'''
1387 and the files in renames as copied.'''
1388 wctx = repo[None]
1388 wctx = repo[None]
1389 with repo.wlock():
1389 with repo.wlock():
1390 wctx.forget(deleted)
1390 wctx.forget(deleted)
1391 wctx.add(unknown)
1391 wctx.add(unknown)
1392 for new, old in pycompat.iteritems(renames):
1392 for new, old in pycompat.iteritems(renames):
1393 wctx.copy(old, new)
1393 wctx.copy(old, new)
1394
1394
1395
1395
1396 def getrenamedfn(repo, endrev=None):
1396 def getrenamedfn(repo, endrev=None):
1397 if copiesmod.usechangesetcentricalgo(repo):
1397 if copiesmod.usechangesetcentricalgo(repo):
1398
1398
1399 def getrenamed(fn, rev):
1399 def getrenamed(fn, rev):
1400 ctx = repo[rev]
1400 ctx = repo[rev]
1401 p1copies = ctx.p1copies()
1401 p1copies = ctx.p1copies()
1402 if fn in p1copies:
1402 if fn in p1copies:
1403 return p1copies[fn]
1403 return p1copies[fn]
1404 p2copies = ctx.p2copies()
1404 p2copies = ctx.p2copies()
1405 if fn in p2copies:
1405 if fn in p2copies:
1406 return p2copies[fn]
1406 return p2copies[fn]
1407 return None
1407 return None
1408
1408
1409 return getrenamed
1409 return getrenamed
1410
1410
1411 rcache = {}
1411 rcache = {}
1412 if endrev is None:
1412 if endrev is None:
1413 endrev = len(repo)
1413 endrev = len(repo)
1414
1414
1415 def getrenamed(fn, rev):
1415 def getrenamed(fn, rev):
1416 '''looks up all renames for a file (up to endrev) the first
1416 '''looks up all renames for a file (up to endrev) the first
1417 time the file is given. It indexes on the changerev and only
1417 time the file is given. It indexes on the changerev and only
1418 parses the manifest if linkrev != changerev.
1418 parses the manifest if linkrev != changerev.
1419 Returns rename info for fn at changerev rev.'''
1419 Returns rename info for fn at changerev rev.'''
1420 if fn not in rcache:
1420 if fn not in rcache:
1421 rcache[fn] = {}
1421 rcache[fn] = {}
1422 fl = repo.file(fn)
1422 fl = repo.file(fn)
1423 for i in fl:
1423 for i in fl:
1424 lr = fl.linkrev(i)
1424 lr = fl.linkrev(i)
1425 renamed = fl.renamed(fl.node(i))
1425 renamed = fl.renamed(fl.node(i))
1426 rcache[fn][lr] = renamed and renamed[0]
1426 rcache[fn][lr] = renamed and renamed[0]
1427 if lr >= endrev:
1427 if lr >= endrev:
1428 break
1428 break
1429 if rev in rcache[fn]:
1429 if rev in rcache[fn]:
1430 return rcache[fn][rev]
1430 return rcache[fn][rev]
1431
1431
1432 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1432 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1433 # filectx logic.
1433 # filectx logic.
1434 try:
1434 try:
1435 return repo[rev][fn].copysource()
1435 return repo[rev][fn].copysource()
1436 except error.LookupError:
1436 except error.LookupError:
1437 return None
1437 return None
1438
1438
1439 return getrenamed
1439 return getrenamed
1440
1440
1441
1441
1442 def getcopiesfn(repo, endrev=None):
1442 def getcopiesfn(repo, endrev=None):
1443 if copiesmod.usechangesetcentricalgo(repo):
1443 if copiesmod.usechangesetcentricalgo(repo):
1444
1444
1445 def copiesfn(ctx):
1445 def copiesfn(ctx):
1446 if ctx.p2copies():
1446 if ctx.p2copies():
1447 allcopies = ctx.p1copies().copy()
1447 allcopies = ctx.p1copies().copy()
1448 # There should be no overlap
1448 # There should be no overlap
1449 allcopies.update(ctx.p2copies())
1449 allcopies.update(ctx.p2copies())
1450 return sorted(allcopies.items())
1450 return sorted(allcopies.items())
1451 else:
1451 else:
1452 return sorted(ctx.p1copies().items())
1452 return sorted(ctx.p1copies().items())
1453
1453
1454 else:
1454 else:
1455 getrenamed = getrenamedfn(repo, endrev)
1455 getrenamed = getrenamedfn(repo, endrev)
1456
1456
1457 def copiesfn(ctx):
1457 def copiesfn(ctx):
1458 copies = []
1458 copies = []
1459 for fn in ctx.files():
1459 for fn in ctx.files():
1460 rename = getrenamed(fn, ctx.rev())
1460 rename = getrenamed(fn, ctx.rev())
1461 if rename:
1461 if rename:
1462 copies.append((fn, rename))
1462 copies.append((fn, rename))
1463 return copies
1463 return copies
1464
1464
1465 return copiesfn
1465 return copiesfn
1466
1466
1467
1467
1468 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1468 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1469 """Update the dirstate to reflect the intent of copying src to dst. For
1469 """Update the dirstate to reflect the intent of copying src to dst. For
1470 different reasons it might not end with dst being marked as copied from src.
1470 different reasons it might not end with dst being marked as copied from src.
1471 """
1471 """
1472 origsrc = repo.dirstate.copied(src) or src
1472 origsrc = repo.dirstate.copied(src) or src
1473 if dst == origsrc: # copying back a copy?
1473 if dst == origsrc: # copying back a copy?
1474 if repo.dirstate[dst] not in b'mn' and not dryrun:
1474 if repo.dirstate[dst] not in b'mn' and not dryrun:
1475 repo.dirstate.normallookup(dst)
1475 repo.dirstate.normallookup(dst)
1476 else:
1476 else:
1477 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1477 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1478 if not ui.quiet:
1478 if not ui.quiet:
1479 ui.warn(
1479 ui.warn(
1480 _(
1480 _(
1481 b"%s has not been committed yet, so no copy "
1481 b"%s has not been committed yet, so no copy "
1482 b"data will be stored for %s.\n"
1482 b"data will be stored for %s.\n"
1483 )
1483 )
1484 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1484 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1485 )
1485 )
1486 if repo.dirstate[dst] in b'?r' and not dryrun:
1486 if repo.dirstate[dst] in b'?r' and not dryrun:
1487 wctx.add([dst])
1487 wctx.add([dst])
1488 elif not dryrun:
1488 elif not dryrun:
1489 wctx.copy(origsrc, dst)
1489 wctx.copy(origsrc, dst)
1490
1490
1491
1491
1492 def movedirstate(repo, newctx, match=None):
1492 def movedirstate(repo, newctx, match=None):
1493 """Move the dirstate to newctx and adjust it as necessary.
1493 """Move the dirstate to newctx and adjust it as necessary.
1494
1494
1495 A matcher can be provided as an optimization. It is probably a bug to pass
1495 A matcher can be provided as an optimization. It is probably a bug to pass
1496 a matcher that doesn't match all the differences between the parent of the
1496 a matcher that doesn't match all the differences between the parent of the
1497 working copy and newctx.
1497 working copy and newctx.
1498 """
1498 """
1499 oldctx = repo[b'.']
1499 oldctx = repo[b'.']
1500 ds = repo.dirstate
1500 ds = repo.dirstate
1501 copies = dict(ds.copies())
1501 copies = dict(ds.copies())
1502 ds.setparents(newctx.node(), nullid)
1502 ds.setparents(newctx.node(), nullid)
1503 s = newctx.status(oldctx, match=match)
1503 s = newctx.status(oldctx, match=match)
1504 for f in s.modified:
1504 for f in s.modified:
1505 if ds[f] == b'r':
1505 if ds[f] == b'r':
1506 # modified + removed -> removed
1506 # modified + removed -> removed
1507 continue
1507 continue
1508 ds.normallookup(f)
1508 ds.normallookup(f)
1509
1509
1510 for f in s.added:
1510 for f in s.added:
1511 if ds[f] == b'r':
1511 if ds[f] == b'r':
1512 # added + removed -> unknown
1512 # added + removed -> unknown
1513 ds.drop(f)
1513 ds.drop(f)
1514 elif ds[f] != b'a':
1514 elif ds[f] != b'a':
1515 ds.add(f)
1515 ds.add(f)
1516
1516
1517 for f in s.removed:
1517 for f in s.removed:
1518 if ds[f] == b'a':
1518 if ds[f] == b'a':
1519 # removed + added -> normal
1519 # removed + added -> normal
1520 ds.normallookup(f)
1520 ds.normallookup(f)
1521 elif ds[f] != b'r':
1521 elif ds[f] != b'r':
1522 ds.remove(f)
1522 ds.remove(f)
1523
1523
1524 # Merge old parent and old working dir copies
1524 # Merge old parent and old working dir copies
1525 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1525 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1526 oldcopies.update(copies)
1526 oldcopies.update(copies)
1527 copies = {
1527 copies = {
1528 dst: oldcopies.get(src, src)
1528 dst: oldcopies.get(src, src)
1529 for dst, src in pycompat.iteritems(oldcopies)
1529 for dst, src in pycompat.iteritems(oldcopies)
1530 }
1530 }
1531 # Adjust the dirstate copies
1531 # Adjust the dirstate copies
1532 for dst, src in pycompat.iteritems(copies):
1532 for dst, src in pycompat.iteritems(copies):
1533 if src not in newctx or dst in newctx or ds[dst] != b'a':
1533 if src not in newctx or dst in newctx or ds[dst] != b'a':
1534 src = None
1534 src = None
1535 ds.copy(src, dst)
1535 ds.copy(src, dst)
1536 repo._quick_access_changeid_invalidate()
1536 repo._quick_access_changeid_invalidate()
1537
1537
1538
1538
1539 def filterrequirements(requirements):
1539 def filterrequirements(requirements):
1540 """ filters the requirements into two sets:
1540 """ filters the requirements into two sets:
1541
1541
1542 wcreq: requirements which should be written in .hg/requires
1542 wcreq: requirements which should be written in .hg/requires
1543 storereq: which should be written in .hg/store/requires
1543 storereq: which should be written in .hg/store/requires
1544
1544
1545 Returns (wcreq, storereq)
1545 Returns (wcreq, storereq)
1546 """
1546 """
1547 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
1547 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
1548 wc, store = set(), set()
1548 wc, store = set(), set()
1549 for r in requirements:
1549 for r in requirements:
1550 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
1550 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
1551 wc.add(r)
1551 wc.add(r)
1552 else:
1552 else:
1553 store.add(r)
1553 store.add(r)
1554 return wc, store
1554 return wc, store
1555 return requirements, None
1555 return requirements, None
1556
1556
1557
1557
1558 def istreemanifest(repo):
1558 def istreemanifest(repo):
1559 """ returns whether the repository is using treemanifest or not """
1559 """ returns whether the repository is using treemanifest or not """
1560 return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements
1560 return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements
1561
1561
1562
1562
1563 def writereporequirements(repo, requirements=None):
1563 def writereporequirements(repo, requirements=None):
1564 """ writes requirements for the repo to .hg/requires """
1564 """ writes requirements for the repo to .hg/requires """
1565 if requirements:
1565 if requirements:
1566 repo.requirements = requirements
1566 repo.requirements = requirements
1567 wcreq, storereq = filterrequirements(repo.requirements)
1567 wcreq, storereq = filterrequirements(repo.requirements)
1568 if wcreq is not None:
1568 if wcreq is not None:
1569 writerequires(repo.vfs, wcreq)
1569 writerequires(repo.vfs, wcreq)
1570 if storereq is not None:
1570 if storereq is not None:
1571 writerequires(repo.svfs, storereq)
1571 writerequires(repo.svfs, storereq)
1572
1572
1573
1573
1574 def writerequires(opener, requirements):
1574 def writerequires(opener, requirements):
1575 with opener(b'requires', b'w', atomictemp=True) as fp:
1575 with opener(b'requires', b'w', atomictemp=True) as fp:
1576 for r in sorted(requirements):
1576 for r in sorted(requirements):
1577 fp.write(b"%s\n" % r)
1577 fp.write(b"%s\n" % r)
1578
1578
1579
1579
1580 class filecachesubentry(object):
1580 class filecachesubentry(object):
1581 def __init__(self, path, stat):
1581 def __init__(self, path, stat):
1582 self.path = path
1582 self.path = path
1583 self.cachestat = None
1583 self.cachestat = None
1584 self._cacheable = None
1584 self._cacheable = None
1585
1585
1586 if stat:
1586 if stat:
1587 self.cachestat = filecachesubentry.stat(self.path)
1587 self.cachestat = filecachesubentry.stat(self.path)
1588
1588
1589 if self.cachestat:
1589 if self.cachestat:
1590 self._cacheable = self.cachestat.cacheable()
1590 self._cacheable = self.cachestat.cacheable()
1591 else:
1591 else:
1592 # None means we don't know yet
1592 # None means we don't know yet
1593 self._cacheable = None
1593 self._cacheable = None
1594
1594
1595 def refresh(self):
1595 def refresh(self):
1596 if self.cacheable():
1596 if self.cacheable():
1597 self.cachestat = filecachesubentry.stat(self.path)
1597 self.cachestat = filecachesubentry.stat(self.path)
1598
1598
1599 def cacheable(self):
1599 def cacheable(self):
1600 if self._cacheable is not None:
1600 if self._cacheable is not None:
1601 return self._cacheable
1601 return self._cacheable
1602
1602
1603 # we don't know yet, assume it is for now
1603 # we don't know yet, assume it is for now
1604 return True
1604 return True
1605
1605
1606 def changed(self):
1606 def changed(self):
1607 # no point in going further if we can't cache it
1607 # no point in going further if we can't cache it
1608 if not self.cacheable():
1608 if not self.cacheable():
1609 return True
1609 return True
1610
1610
1611 newstat = filecachesubentry.stat(self.path)
1611 newstat = filecachesubentry.stat(self.path)
1612
1612
1613 # we may not know if it's cacheable yet, check again now
1613 # we may not know if it's cacheable yet, check again now
1614 if newstat and self._cacheable is None:
1614 if newstat and self._cacheable is None:
1615 self._cacheable = newstat.cacheable()
1615 self._cacheable = newstat.cacheable()
1616
1616
1617 # check again
1617 # check again
1618 if not self._cacheable:
1618 if not self._cacheable:
1619 return True
1619 return True
1620
1620
1621 if self.cachestat != newstat:
1621 if self.cachestat != newstat:
1622 self.cachestat = newstat
1622 self.cachestat = newstat
1623 return True
1623 return True
1624 else:
1624 else:
1625 return False
1625 return False
1626
1626
1627 @staticmethod
1627 @staticmethod
1628 def stat(path):
1628 def stat(path):
1629 try:
1629 try:
1630 return util.cachestat(path)
1630 return util.cachestat(path)
1631 except OSError as e:
1631 except OSError as e:
1632 if e.errno != errno.ENOENT:
1632 if e.errno != errno.ENOENT:
1633 raise
1633 raise
1634
1634
1635
1635
1636 class filecacheentry(object):
1636 class filecacheentry(object):
1637 def __init__(self, paths, stat=True):
1637 def __init__(self, paths, stat=True):
1638 self._entries = []
1638 self._entries = []
1639 for path in paths:
1639 for path in paths:
1640 self._entries.append(filecachesubentry(path, stat))
1640 self._entries.append(filecachesubentry(path, stat))
1641
1641
1642 def changed(self):
1642 def changed(self):
1643 '''true if any entry has changed'''
1643 '''true if any entry has changed'''
1644 for entry in self._entries:
1644 for entry in self._entries:
1645 if entry.changed():
1645 if entry.changed():
1646 return True
1646 return True
1647 return False
1647 return False
1648
1648
1649 def refresh(self):
1649 def refresh(self):
1650 for entry in self._entries:
1650 for entry in self._entries:
1651 entry.refresh()
1651 entry.refresh()
1652
1652
1653
1653
1654 class filecache(object):
1654 class filecache(object):
1655 """A property like decorator that tracks files under .hg/ for updates.
1655 """A property like decorator that tracks files under .hg/ for updates.
1656
1656
1657 On first access, the files defined as arguments are stat()ed and the
1657 On first access, the files defined as arguments are stat()ed and the
1658 results cached. The decorated function is called. The results are stashed
1658 results cached. The decorated function is called. The results are stashed
1659 away in a ``_filecache`` dict on the object whose method is decorated.
1659 away in a ``_filecache`` dict on the object whose method is decorated.
1660
1660
1661 On subsequent access, the cached result is used as it is set to the
1661 On subsequent access, the cached result is used as it is set to the
1662 instance dictionary.
1662 instance dictionary.
1663
1663
1664 On external property set/delete operations, the caller must update the
1664 On external property set/delete operations, the caller must update the
1665 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1665 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1666 instead of directly setting <attr>.
1666 instead of directly setting <attr>.
1667
1667
1668 When using the property API, the cached data is always used if available.
1668 When using the property API, the cached data is always used if available.
1669 No stat() is performed to check if the file has changed.
1669 No stat() is performed to check if the file has changed.
1670
1670
1671 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1671 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1672 can populate an entry before the property's getter is called. In this case,
1672 can populate an entry before the property's getter is called. In this case,
1673 entries in ``_filecache`` will be used during property operations,
1673 entries in ``_filecache`` will be used during property operations,
1674 if available. If the underlying file changes, it is up to external callers
1674 if available. If the underlying file changes, it is up to external callers
1675 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1675 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1676 method result as well as possibly calling ``del obj._filecache[attr]`` to
1676 method result as well as possibly calling ``del obj._filecache[attr]`` to
1677 remove the ``filecacheentry``.
1677 remove the ``filecacheentry``.
1678 """
1678 """
1679
1679
1680 def __init__(self, *paths):
1680 def __init__(self, *paths):
1681 self.paths = paths
1681 self.paths = paths
1682
1682
1683 def join(self, obj, fname):
1683 def join(self, obj, fname):
1684 """Used to compute the runtime path of a cached file.
1684 """Used to compute the runtime path of a cached file.
1685
1685
1686 Users should subclass filecache and provide their own version of this
1686 Users should subclass filecache and provide their own version of this
1687 function to call the appropriate join function on 'obj' (an instance
1687 function to call the appropriate join function on 'obj' (an instance
1688 of the class that its member function was decorated).
1688 of the class that its member function was decorated).
1689 """
1689 """
1690 raise NotImplementedError
1690 raise NotImplementedError
1691
1691
1692 def __call__(self, func):
1692 def __call__(self, func):
1693 self.func = func
1693 self.func = func
1694 self.sname = func.__name__
1694 self.sname = func.__name__
1695 self.name = pycompat.sysbytes(self.sname)
1695 self.name = pycompat.sysbytes(self.sname)
1696 return self
1696 return self
1697
1697
1698 def __get__(self, obj, type=None):
1698 def __get__(self, obj, type=None):
1699 # if accessed on the class, return the descriptor itself.
1699 # if accessed on the class, return the descriptor itself.
1700 if obj is None:
1700 if obj is None:
1701 return self
1701 return self
1702
1702
1703 assert self.sname not in obj.__dict__
1703 assert self.sname not in obj.__dict__
1704
1704
1705 entry = obj._filecache.get(self.name)
1705 entry = obj._filecache.get(self.name)
1706
1706
1707 if entry:
1707 if entry:
1708 if entry.changed():
1708 if entry.changed():
1709 entry.obj = self.func(obj)
1709 entry.obj = self.func(obj)
1710 else:
1710 else:
1711 paths = [self.join(obj, path) for path in self.paths]
1711 paths = [self.join(obj, path) for path in self.paths]
1712
1712
1713 # We stat -before- creating the object so our cache doesn't lie if
1713 # We stat -before- creating the object so our cache doesn't lie if
1714 # a writer modified between the time we read and stat
1714 # a writer modified between the time we read and stat
1715 entry = filecacheentry(paths, True)
1715 entry = filecacheentry(paths, True)
1716 entry.obj = self.func(obj)
1716 entry.obj = self.func(obj)
1717
1717
1718 obj._filecache[self.name] = entry
1718 obj._filecache[self.name] = entry
1719
1719
1720 obj.__dict__[self.sname] = entry.obj
1720 obj.__dict__[self.sname] = entry.obj
1721 return entry.obj
1721 return entry.obj
1722
1722
1723 # don't implement __set__(), which would make __dict__ lookup as slow as
1723 # don't implement __set__(), which would make __dict__ lookup as slow as
1724 # function call.
1724 # function call.
1725
1725
1726 def set(self, obj, value):
1726 def set(self, obj, value):
1727 if self.name not in obj._filecache:
1727 if self.name not in obj._filecache:
1728 # we add an entry for the missing value because X in __dict__
1728 # we add an entry for the missing value because X in __dict__
1729 # implies X in _filecache
1729 # implies X in _filecache
1730 paths = [self.join(obj, path) for path in self.paths]
1730 paths = [self.join(obj, path) for path in self.paths]
1731 ce = filecacheentry(paths, False)
1731 ce = filecacheentry(paths, False)
1732 obj._filecache[self.name] = ce
1732 obj._filecache[self.name] = ce
1733 else:
1733 else:
1734 ce = obj._filecache[self.name]
1734 ce = obj._filecache[self.name]
1735
1735
1736 ce.obj = value # update cached copy
1736 ce.obj = value # update cached copy
1737 obj.__dict__[self.sname] = value # update copy returned by obj.x
1737 obj.__dict__[self.sname] = value # update copy returned by obj.x
1738
1738
1739
1739
1740 def extdatasource(repo, source):
1740 def extdatasource(repo, source):
1741 """Gather a map of rev -> value dict from the specified source
1741 """Gather a map of rev -> value dict from the specified source
1742
1742
1743 A source spec is treated as a URL, with a special case shell: type
1743 A source spec is treated as a URL, with a special case shell: type
1744 for parsing the output from a shell command.
1744 for parsing the output from a shell command.
1745
1745
1746 The data is parsed as a series of newline-separated records where
1746 The data is parsed as a series of newline-separated records where
1747 each record is a revision specifier optionally followed by a space
1747 each record is a revision specifier optionally followed by a space
1748 and a freeform string value. If the revision is known locally, it
1748 and a freeform string value. If the revision is known locally, it
1749 is converted to a rev, otherwise the record is skipped.
1749 is converted to a rev, otherwise the record is skipped.
1750
1750
1751 Note that both key and value are treated as UTF-8 and converted to
1751 Note that both key and value are treated as UTF-8 and converted to
1752 the local encoding. This allows uniformity between local and
1752 the local encoding. This allows uniformity between local and
1753 remote data sources.
1753 remote data sources.
1754 """
1754 """
1755
1755
1756 spec = repo.ui.config(b"extdata", source)
1756 spec = repo.ui.config(b"extdata", source)
1757 if not spec:
1757 if not spec:
1758 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1758 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1759
1759
1760 data = {}
1760 data = {}
1761 src = proc = None
1761 src = proc = None
1762 try:
1762 try:
1763 if spec.startswith(b"shell:"):
1763 if spec.startswith(b"shell:"):
1764 # external commands should be run relative to the repo root
1764 # external commands should be run relative to the repo root
1765 cmd = spec[6:]
1765 cmd = spec[6:]
1766 proc = subprocess.Popen(
1766 proc = subprocess.Popen(
1767 procutil.tonativestr(cmd),
1767 procutil.tonativestr(cmd),
1768 shell=True,
1768 shell=True,
1769 bufsize=-1,
1769 bufsize=-1,
1770 close_fds=procutil.closefds,
1770 close_fds=procutil.closefds,
1771 stdout=subprocess.PIPE,
1771 stdout=subprocess.PIPE,
1772 cwd=procutil.tonativestr(repo.root),
1772 cwd=procutil.tonativestr(repo.root),
1773 )
1773 )
1774 src = proc.stdout
1774 src = proc.stdout
1775 else:
1775 else:
1776 # treat as a URL or file
1776 # treat as a URL or file
1777 src = url.open(repo.ui, spec)
1777 src = url.open(repo.ui, spec)
1778 for l in src:
1778 for l in src:
1779 if b" " in l:
1779 if b" " in l:
1780 k, v = l.strip().split(b" ", 1)
1780 k, v = l.strip().split(b" ", 1)
1781 else:
1781 else:
1782 k, v = l.strip(), b""
1782 k, v = l.strip(), b""
1783
1783
1784 k = encoding.tolocal(k)
1784 k = encoding.tolocal(k)
1785 try:
1785 try:
1786 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1786 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1787 except (error.LookupError, error.RepoLookupError):
1787 except (error.LookupError, error.RepoLookupError):
1788 pass # we ignore data for nodes that don't exist locally
1788 pass # we ignore data for nodes that don't exist locally
1789 finally:
1789 finally:
1790 if proc:
1790 if proc:
1791 try:
1791 try:
1792 proc.communicate()
1792 proc.communicate()
1793 except ValueError:
1793 except ValueError:
1794 # This happens if we started iterating src and then
1794 # This happens if we started iterating src and then
1795 # get a parse error on a line. It should be safe to ignore.
1795 # get a parse error on a line. It should be safe to ignore.
1796 pass
1796 pass
1797 if src:
1797 if src:
1798 src.close()
1798 src.close()
1799 if proc and proc.returncode != 0:
1799 if proc and proc.returncode != 0:
1800 raise error.Abort(
1800 raise error.Abort(
1801 _(b"extdata command '%s' failed: %s")
1801 _(b"extdata command '%s' failed: %s")
1802 % (cmd, procutil.explainexit(proc.returncode))
1802 % (cmd, procutil.explainexit(proc.returncode))
1803 )
1803 )
1804
1804
1805 return data
1805 return data
1806
1806
1807
1807
1808 class progress(object):
1808 class progress(object):
1809 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1809 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1810 self.ui = ui
1810 self.ui = ui
1811 self.pos = 0
1811 self.pos = 0
1812 self.topic = topic
1812 self.topic = topic
1813 self.unit = unit
1813 self.unit = unit
1814 self.total = total
1814 self.total = total
1815 self.debug = ui.configbool(b'progress', b'debug')
1815 self.debug = ui.configbool(b'progress', b'debug')
1816 self._updatebar = updatebar
1816 self._updatebar = updatebar
1817
1817
1818 def __enter__(self):
1818 def __enter__(self):
1819 return self
1819 return self
1820
1820
1821 def __exit__(self, exc_type, exc_value, exc_tb):
1821 def __exit__(self, exc_type, exc_value, exc_tb):
1822 self.complete()
1822 self.complete()
1823
1823
1824 def update(self, pos, item=b"", total=None):
1824 def update(self, pos, item=b"", total=None):
1825 assert pos is not None
1825 assert pos is not None
1826 if total:
1826 if total:
1827 self.total = total
1827 self.total = total
1828 self.pos = pos
1828 self.pos = pos
1829 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1829 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1830 if self.debug:
1830 if self.debug:
1831 self._printdebug(item)
1831 self._printdebug(item)
1832
1832
1833 def increment(self, step=1, item=b"", total=None):
1833 def increment(self, step=1, item=b"", total=None):
1834 self.update(self.pos + step, item, total)
1834 self.update(self.pos + step, item, total)
1835
1835
1836 def complete(self):
1836 def complete(self):
1837 self.pos = None
1837 self.pos = None
1838 self.unit = b""
1838 self.unit = b""
1839 self.total = None
1839 self.total = None
1840 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1840 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1841
1841
1842 def _printdebug(self, item):
1842 def _printdebug(self, item):
1843 unit = b''
1843 unit = b''
1844 if self.unit:
1844 if self.unit:
1845 unit = b' ' + self.unit
1845 unit = b' ' + self.unit
1846 if item:
1846 if item:
1847 item = b' ' + item
1847 item = b' ' + item
1848
1848
1849 if self.total:
1849 if self.total:
1850 pct = 100.0 * self.pos / self.total
1850 pct = 100.0 * self.pos / self.total
1851 self.ui.debug(
1851 self.ui.debug(
1852 b'%s:%s %d/%d%s (%4.2f%%)\n'
1852 b'%s:%s %d/%d%s (%4.2f%%)\n'
1853 % (self.topic, item, self.pos, self.total, unit, pct)
1853 % (self.topic, item, self.pos, self.total, unit, pct)
1854 )
1854 )
1855 else:
1855 else:
1856 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1856 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1857
1857
1858
1858
1859 def gdinitconfig(ui):
1859 def gdinitconfig(ui):
1860 """helper function to know if a repo should be created as general delta
1860 """helper function to know if a repo should be created as general delta
1861 """
1861 """
1862 # experimental config: format.generaldelta
1862 # experimental config: format.generaldelta
1863 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1863 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1864 b'format', b'usegeneraldelta'
1864 b'format', b'usegeneraldelta'
1865 )
1865 )
1866
1866
1867
1867
1868 def gddeltaconfig(ui):
1868 def gddeltaconfig(ui):
1869 """helper function to know if incoming delta should be optimised
1869 """helper function to know if incoming delta should be optimised
1870 """
1870 """
1871 # experimental config: format.generaldelta
1871 # experimental config: format.generaldelta
1872 return ui.configbool(b'format', b'generaldelta')
1872 return ui.configbool(b'format', b'generaldelta')
1873
1873
1874
1874
1875 class simplekeyvaluefile(object):
1875 class simplekeyvaluefile(object):
1876 """A simple file with key=value lines
1876 """A simple file with key=value lines
1877
1877
1878 Keys must be alphanumerics and start with a letter, values must not
1878 Keys must be alphanumerics and start with a letter, values must not
1879 contain '\n' characters"""
1879 contain '\n' characters"""
1880
1880
1881 firstlinekey = b'__firstline'
1881 firstlinekey = b'__firstline'
1882
1882
1883 def __init__(self, vfs, path, keys=None):
1883 def __init__(self, vfs, path, keys=None):
1884 self.vfs = vfs
1884 self.vfs = vfs
1885 self.path = path
1885 self.path = path
1886
1886
1887 def read(self, firstlinenonkeyval=False):
1887 def read(self, firstlinenonkeyval=False):
1888 """Read the contents of a simple key-value file
1888 """Read the contents of a simple key-value file
1889
1889
1890 'firstlinenonkeyval' indicates whether the first line of file should
1890 'firstlinenonkeyval' indicates whether the first line of file should
1891 be treated as a key-value pair or reuturned fully under the
1891 be treated as a key-value pair or reuturned fully under the
1892 __firstline key."""
1892 __firstline key."""
1893 lines = self.vfs.readlines(self.path)
1893 lines = self.vfs.readlines(self.path)
1894 d = {}
1894 d = {}
1895 if firstlinenonkeyval:
1895 if firstlinenonkeyval:
1896 if not lines:
1896 if not lines:
1897 e = _(b"empty simplekeyvalue file")
1897 e = _(b"empty simplekeyvalue file")
1898 raise error.CorruptedState(e)
1898 raise error.CorruptedState(e)
1899 # we don't want to include '\n' in the __firstline
1899 # we don't want to include '\n' in the __firstline
1900 d[self.firstlinekey] = lines[0][:-1]
1900 d[self.firstlinekey] = lines[0][:-1]
1901 del lines[0]
1901 del lines[0]
1902
1902
1903 try:
1903 try:
1904 # the 'if line.strip()' part prevents us from failing on empty
1904 # the 'if line.strip()' part prevents us from failing on empty
1905 # lines which only contain '\n' therefore are not skipped
1905 # lines which only contain '\n' therefore are not skipped
1906 # by 'if line'
1906 # by 'if line'
1907 updatedict = dict(
1907 updatedict = dict(
1908 line[:-1].split(b'=', 1) for line in lines if line.strip()
1908 line[:-1].split(b'=', 1) for line in lines if line.strip()
1909 )
1909 )
1910 if self.firstlinekey in updatedict:
1910 if self.firstlinekey in updatedict:
1911 e = _(b"%r can't be used as a key")
1911 e = _(b"%r can't be used as a key")
1912 raise error.CorruptedState(e % self.firstlinekey)
1912 raise error.CorruptedState(e % self.firstlinekey)
1913 d.update(updatedict)
1913 d.update(updatedict)
1914 except ValueError as e:
1914 except ValueError as e:
1915 raise error.CorruptedState(stringutil.forcebytestr(e))
1915 raise error.CorruptedState(stringutil.forcebytestr(e))
1916 return d
1916 return d
1917
1917
1918 def write(self, data, firstline=None):
1918 def write(self, data, firstline=None):
1919 """Write key=>value mapping to a file
1919 """Write key=>value mapping to a file
1920 data is a dict. Keys must be alphanumerical and start with a letter.
1920 data is a dict. Keys must be alphanumerical and start with a letter.
1921 Values must not contain newline characters.
1921 Values must not contain newline characters.
1922
1922
1923 If 'firstline' is not None, it is written to file before
1923 If 'firstline' is not None, it is written to file before
1924 everything else, as it is, not in a key=value form"""
1924 everything else, as it is, not in a key=value form"""
1925 lines = []
1925 lines = []
1926 if firstline is not None:
1926 if firstline is not None:
1927 lines.append(b'%s\n' % firstline)
1927 lines.append(b'%s\n' % firstline)
1928
1928
1929 for k, v in data.items():
1929 for k, v in data.items():
1930 if k == self.firstlinekey:
1930 if k == self.firstlinekey:
1931 e = b"key name '%s' is reserved" % self.firstlinekey
1931 e = b"key name '%s' is reserved" % self.firstlinekey
1932 raise error.ProgrammingError(e)
1932 raise error.ProgrammingError(e)
1933 if not k[0:1].isalpha():
1933 if not k[0:1].isalpha():
1934 e = b"keys must start with a letter in a key-value file"
1934 e = b"keys must start with a letter in a key-value file"
1935 raise error.ProgrammingError(e)
1935 raise error.ProgrammingError(e)
1936 if not k.isalnum():
1936 if not k.isalnum():
1937 e = b"invalid key name in a simple key-value file"
1937 e = b"invalid key name in a simple key-value file"
1938 raise error.ProgrammingError(e)
1938 raise error.ProgrammingError(e)
1939 if b'\n' in v:
1939 if b'\n' in v:
1940 e = b"invalid value in a simple key-value file"
1940 e = b"invalid value in a simple key-value file"
1941 raise error.ProgrammingError(e)
1941 raise error.ProgrammingError(e)
1942 lines.append(b"%s=%s\n" % (k, v))
1942 lines.append(b"%s=%s\n" % (k, v))
1943 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1943 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1944 fp.write(b''.join(lines))
1944 fp.write(b''.join(lines))
1945
1945
1946
1946
1947 _reportobsoletedsource = [
1947 _reportobsoletedsource = [
1948 b'debugobsolete',
1948 b'debugobsolete',
1949 b'pull',
1949 b'pull',
1950 b'push',
1950 b'push',
1951 b'serve',
1951 b'serve',
1952 b'unbundle',
1952 b'unbundle',
1953 ]
1953 ]
1954
1954
1955 _reportnewcssource = [
1955 _reportnewcssource = [
1956 b'pull',
1956 b'pull',
1957 b'unbundle',
1957 b'unbundle',
1958 ]
1958 ]
1959
1959
1960
1960
1961 def prefetchfiles(repo, revmatches):
1961 def prefetchfiles(repo, revmatches):
1962 """Invokes the registered file prefetch functions, allowing extensions to
1962 """Invokes the registered file prefetch functions, allowing extensions to
1963 ensure the corresponding files are available locally, before the command
1963 ensure the corresponding files are available locally, before the command
1964 uses them.
1964 uses them.
1965
1965
1966 Args:
1966 Args:
1967 revmatches: a list of (revision, match) tuples to indicate the files to
1967 revmatches: a list of (revision, match) tuples to indicate the files to
1968 fetch at each revision. If any of the match elements is None, it matches
1968 fetch at each revision. If any of the match elements is None, it matches
1969 all files.
1969 all files.
1970 """
1970 """
1971
1971
1972 def _matcher(m):
1972 def _matcher(m):
1973 if m:
1973 if m:
1974 assert isinstance(m, matchmod.basematcher)
1974 assert isinstance(m, matchmod.basematcher)
1975 # The command itself will complain about files that don't exist, so
1975 # The command itself will complain about files that don't exist, so
1976 # don't duplicate the message.
1976 # don't duplicate the message.
1977 return matchmod.badmatch(m, lambda fn, msg: None)
1977 return matchmod.badmatch(m, lambda fn, msg: None)
1978 else:
1978 else:
1979 return matchall(repo)
1979 return matchall(repo)
1980
1980
1981 revbadmatches = [(rev, _matcher(match)) for (rev, match) in revmatches]
1981 revbadmatches = [(rev, _matcher(match)) for (rev, match) in revmatches]
1982
1982
1983 fileprefetchhooks(repo, revbadmatches)
1983 fileprefetchhooks(repo, revbadmatches)
1984
1984
1985
1985
1986 # a list of (repo, revs, match) prefetch functions
1986 # a list of (repo, revs, match) prefetch functions
1987 fileprefetchhooks = util.hooks()
1987 fileprefetchhooks = util.hooks()
1988
1988
1989 # A marker that tells the evolve extension to suppress its own reporting
1989 # A marker that tells the evolve extension to suppress its own reporting
1990 _reportstroubledchangesets = True
1990 _reportstroubledchangesets = True
1991
1991
1992
1992
1993 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
1993 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
1994 """register a callback to issue a summary after the transaction is closed
1994 """register a callback to issue a summary after the transaction is closed
1995
1995
1996 If as_validator is true, then the callbacks are registered as transaction
1996 If as_validator is true, then the callbacks are registered as transaction
1997 validators instead
1997 validators instead
1998 """
1998 """
1999
1999
2000 def txmatch(sources):
2000 def txmatch(sources):
2001 return any(txnname.startswith(source) for source in sources)
2001 return any(txnname.startswith(source) for source in sources)
2002
2002
2003 categories = []
2003 categories = []
2004
2004
2005 def reportsummary(func):
2005 def reportsummary(func):
2006 """decorator for report callbacks."""
2006 """decorator for report callbacks."""
2007 # The repoview life cycle is shorter than the one of the actual
2007 # The repoview life cycle is shorter than the one of the actual
2008 # underlying repository. So the filtered object can die before the
2008 # underlying repository. So the filtered object can die before the
2009 # weakref is used leading to troubles. We keep a reference to the
2009 # weakref is used leading to troubles. We keep a reference to the
2010 # unfiltered object and restore the filtering when retrieving the
2010 # unfiltered object and restore the filtering when retrieving the
2011 # repository through the weakref.
2011 # repository through the weakref.
2012 filtername = repo.filtername
2012 filtername = repo.filtername
2013 reporef = weakref.ref(repo.unfiltered())
2013 reporef = weakref.ref(repo.unfiltered())
2014
2014
2015 def wrapped(tr):
2015 def wrapped(tr):
2016 repo = reporef()
2016 repo = reporef()
2017 if filtername:
2017 if filtername:
2018 assert repo is not None # help pytype
2018 assert repo is not None # help pytype
2019 repo = repo.filtered(filtername)
2019 repo = repo.filtered(filtername)
2020 func(repo, tr)
2020 func(repo, tr)
2021
2021
2022 newcat = b'%02i-txnreport' % len(categories)
2022 newcat = b'%02i-txnreport' % len(categories)
2023 if as_validator:
2023 if as_validator:
2024 otr.addvalidator(newcat, wrapped)
2024 otr.addvalidator(newcat, wrapped)
2025 else:
2025 else:
2026 otr.addpostclose(newcat, wrapped)
2026 otr.addpostclose(newcat, wrapped)
2027 categories.append(newcat)
2027 categories.append(newcat)
2028 return wrapped
2028 return wrapped
2029
2029
2030 @reportsummary
2030 @reportsummary
2031 def reportchangegroup(repo, tr):
2031 def reportchangegroup(repo, tr):
2032 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
2032 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
2033 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
2033 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
2034 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
2034 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
2035 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
2035 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
2036 if cgchangesets or cgrevisions or cgfiles:
2036 if cgchangesets or cgrevisions or cgfiles:
2037 htext = b""
2037 htext = b""
2038 if cgheads:
2038 if cgheads:
2039 htext = _(b" (%+d heads)") % cgheads
2039 htext = _(b" (%+d heads)") % cgheads
2040 msg = _(b"added %d changesets with %d changes to %d files%s\n")
2040 msg = _(b"added %d changesets with %d changes to %d files%s\n")
2041 if as_validator:
2041 if as_validator:
2042 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
2042 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
2043 assert repo is not None # help pytype
2043 assert repo is not None # help pytype
2044 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
2044 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
2045
2045
2046 if txmatch(_reportobsoletedsource):
2046 if txmatch(_reportobsoletedsource):
2047
2047
2048 @reportsummary
2048 @reportsummary
2049 def reportobsoleted(repo, tr):
2049 def reportobsoleted(repo, tr):
2050 obsoleted = obsutil.getobsoleted(repo, tr)
2050 obsoleted = obsutil.getobsoleted(repo, tr)
2051 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
2051 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
2052 if newmarkers:
2052 if newmarkers:
2053 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
2053 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
2054 if obsoleted:
2054 if obsoleted:
2055 msg = _(b'obsoleted %i changesets\n')
2055 msg = _(b'obsoleted %i changesets\n')
2056 if as_validator:
2056 if as_validator:
2057 msg = _(b'obsoleting %i changesets\n')
2057 msg = _(b'obsoleting %i changesets\n')
2058 repo.ui.status(msg % len(obsoleted))
2058 repo.ui.status(msg % len(obsoleted))
2059
2059
2060 if obsolete.isenabled(
2060 if obsolete.isenabled(
2061 repo, obsolete.createmarkersopt
2061 repo, obsolete.createmarkersopt
2062 ) and repo.ui.configbool(
2062 ) and repo.ui.configbool(
2063 b'experimental', b'evolution.report-instabilities'
2063 b'experimental', b'evolution.report-instabilities'
2064 ):
2064 ):
2065 instabilitytypes = [
2065 instabilitytypes = [
2066 (b'orphan', b'orphan'),
2066 (b'orphan', b'orphan'),
2067 (b'phase-divergent', b'phasedivergent'),
2067 (b'phase-divergent', b'phasedivergent'),
2068 (b'content-divergent', b'contentdivergent'),
2068 (b'content-divergent', b'contentdivergent'),
2069 ]
2069 ]
2070
2070
2071 def getinstabilitycounts(repo):
2071 def getinstabilitycounts(repo):
2072 filtered = repo.changelog.filteredrevs
2072 filtered = repo.changelog.filteredrevs
2073 counts = {}
2073 counts = {}
2074 for instability, revset in instabilitytypes:
2074 for instability, revset in instabilitytypes:
2075 counts[instability] = len(
2075 counts[instability] = len(
2076 set(obsolete.getrevs(repo, revset)) - filtered
2076 set(obsolete.getrevs(repo, revset)) - filtered
2077 )
2077 )
2078 return counts
2078 return counts
2079
2079
2080 oldinstabilitycounts = getinstabilitycounts(repo)
2080 oldinstabilitycounts = getinstabilitycounts(repo)
2081
2081
2082 @reportsummary
2082 @reportsummary
2083 def reportnewinstabilities(repo, tr):
2083 def reportnewinstabilities(repo, tr):
2084 newinstabilitycounts = getinstabilitycounts(repo)
2084 newinstabilitycounts = getinstabilitycounts(repo)
2085 for instability, revset in instabilitytypes:
2085 for instability, revset in instabilitytypes:
2086 delta = (
2086 delta = (
2087 newinstabilitycounts[instability]
2087 newinstabilitycounts[instability]
2088 - oldinstabilitycounts[instability]
2088 - oldinstabilitycounts[instability]
2089 )
2089 )
2090 msg = getinstabilitymessage(delta, instability)
2090 msg = getinstabilitymessage(delta, instability)
2091 if msg:
2091 if msg:
2092 repo.ui.warn(msg)
2092 repo.ui.warn(msg)
2093
2093
2094 if txmatch(_reportnewcssource):
2094 if txmatch(_reportnewcssource):
2095
2095
2096 @reportsummary
2096 @reportsummary
2097 def reportnewcs(repo, tr):
2097 def reportnewcs(repo, tr):
2098 """Report the range of new revisions pulled/unbundled."""
2098 """Report the range of new revisions pulled/unbundled."""
2099 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2099 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2100 unfi = repo.unfiltered()
2100 unfi = repo.unfiltered()
2101 if origrepolen >= len(unfi):
2101 if origrepolen >= len(unfi):
2102 return
2102 return
2103
2103
2104 # Compute the bounds of new visible revisions' range.
2104 # Compute the bounds of new visible revisions' range.
2105 revs = smartset.spanset(repo, start=origrepolen)
2105 revs = smartset.spanset(repo, start=origrepolen)
2106 if revs:
2106 if revs:
2107 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2107 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2108
2108
2109 if minrev == maxrev:
2109 if minrev == maxrev:
2110 revrange = minrev
2110 revrange = minrev
2111 else:
2111 else:
2112 revrange = b'%s:%s' % (minrev, maxrev)
2112 revrange = b'%s:%s' % (minrev, maxrev)
2113 draft = len(repo.revs(b'%ld and draft()', revs))
2113 draft = len(repo.revs(b'%ld and draft()', revs))
2114 secret = len(repo.revs(b'%ld and secret()', revs))
2114 secret = len(repo.revs(b'%ld and secret()', revs))
2115 if not (draft or secret):
2115 if not (draft or secret):
2116 msg = _(b'new changesets %s\n') % revrange
2116 msg = _(b'new changesets %s\n') % revrange
2117 elif draft and secret:
2117 elif draft and secret:
2118 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2118 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2119 msg %= (revrange, draft, secret)
2119 msg %= (revrange, draft, secret)
2120 elif draft:
2120 elif draft:
2121 msg = _(b'new changesets %s (%d drafts)\n')
2121 msg = _(b'new changesets %s (%d drafts)\n')
2122 msg %= (revrange, draft)
2122 msg %= (revrange, draft)
2123 elif secret:
2123 elif secret:
2124 msg = _(b'new changesets %s (%d secrets)\n')
2124 msg = _(b'new changesets %s (%d secrets)\n')
2125 msg %= (revrange, secret)
2125 msg %= (revrange, secret)
2126 else:
2126 else:
2127 errormsg = b'entered unreachable condition'
2127 errormsg = b'entered unreachable condition'
2128 raise error.ProgrammingError(errormsg)
2128 raise error.ProgrammingError(errormsg)
2129 repo.ui.status(msg)
2129 repo.ui.status(msg)
2130
2130
2131 # search new changesets directly pulled as obsolete
2131 # search new changesets directly pulled as obsolete
2132 duplicates = tr.changes.get(b'revduplicates', ())
2132 duplicates = tr.changes.get(b'revduplicates', ())
2133 obsadded = unfi.revs(
2133 obsadded = unfi.revs(
2134 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2134 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2135 )
2135 )
2136 cl = repo.changelog
2136 cl = repo.changelog
2137 extinctadded = [r for r in obsadded if r not in cl]
2137 extinctadded = [r for r in obsadded if r not in cl]
2138 if extinctadded:
2138 if extinctadded:
2139 # They are not just obsolete, but obsolete and invisible
2139 # They are not just obsolete, but obsolete and invisible
2140 # we call them "extinct" internally but the terms have not been
2140 # we call them "extinct" internally but the terms have not been
2141 # exposed to users.
2141 # exposed to users.
2142 msg = b'(%d other changesets obsolete on arrival)\n'
2142 msg = b'(%d other changesets obsolete on arrival)\n'
2143 repo.ui.status(msg % len(extinctadded))
2143 repo.ui.status(msg % len(extinctadded))
2144
2144
2145 @reportsummary
2145 @reportsummary
2146 def reportphasechanges(repo, tr):
2146 def reportphasechanges(repo, tr):
2147 """Report statistics of phase changes for changesets pre-existing
2147 """Report statistics of phase changes for changesets pre-existing
2148 pull/unbundle.
2148 pull/unbundle.
2149 """
2149 """
2150 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2150 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2151 published = []
2151 published = []
2152 for revs, (old, new) in tr.changes.get(b'phases', []):
2152 for revs, (old, new) in tr.changes.get(b'phases', []):
2153 if new != phases.public:
2153 if new != phases.public:
2154 continue
2154 continue
2155 published.extend(rev for rev in revs if rev < origrepolen)
2155 published.extend(rev for rev in revs if rev < origrepolen)
2156 if not published:
2156 if not published:
2157 return
2157 return
2158 msg = _(b'%d local changesets published\n')
2158 msg = _(b'%d local changesets published\n')
2159 if as_validator:
2159 if as_validator:
2160 msg = _(b'%d local changesets will be published\n')
2160 msg = _(b'%d local changesets will be published\n')
2161 repo.ui.status(msg % len(published))
2161 repo.ui.status(msg % len(published))
2162
2162
2163
2163
2164 def getinstabilitymessage(delta, instability):
2164 def getinstabilitymessage(delta, instability):
2165 """function to return the message to show warning about new instabilities
2165 """function to return the message to show warning about new instabilities
2166
2166
2167 exists as a separate function so that extension can wrap to show more
2167 exists as a separate function so that extension can wrap to show more
2168 information like how to fix instabilities"""
2168 information like how to fix instabilities"""
2169 if delta > 0:
2169 if delta > 0:
2170 return _(b'%i new %s changesets\n') % (delta, instability)
2170 return _(b'%i new %s changesets\n') % (delta, instability)
2171
2171
2172
2172
2173 def nodesummaries(repo, nodes, maxnumnodes=4):
2173 def nodesummaries(repo, nodes, maxnumnodes=4):
2174 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2174 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2175 return b' '.join(short(h) for h in nodes)
2175 return b' '.join(short(h) for h in nodes)
2176 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2176 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2177 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2177 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2178
2178
2179
2179
2180 def enforcesinglehead(repo, tr, desc, accountclosed=False):
2180 def enforcesinglehead(repo, tr, desc, accountclosed=False):
2181 """check that no named branch has multiple heads"""
2181 """check that no named branch has multiple heads"""
2182 if desc in (b'strip', b'repair'):
2182 if desc in (b'strip', b'repair'):
2183 # skip the logic during strip
2183 # skip the logic during strip
2184 return
2184 return
2185 visible = repo.filtered(b'visible')
2185 visible = repo.filtered(b'visible')
2186 # possible improvement: we could restrict the check to affected branch
2186 # possible improvement: we could restrict the check to affected branch
2187 bm = visible.branchmap()
2187 bm = visible.branchmap()
2188 for name in bm:
2188 for name in bm:
2189 heads = bm.branchheads(name, closed=accountclosed)
2189 heads = bm.branchheads(name, closed=accountclosed)
2190 if len(heads) > 1:
2190 if len(heads) > 1:
2191 msg = _(b'rejecting multiple heads on branch "%s"')
2191 msg = _(b'rejecting multiple heads on branch "%s"')
2192 msg %= name
2192 msg %= name
2193 hint = _(b'%d heads: %s')
2193 hint = _(b'%d heads: %s')
2194 hint %= (len(heads), nodesummaries(repo, heads))
2194 hint %= (len(heads), nodesummaries(repo, heads))
2195 raise error.Abort(msg, hint=hint)
2195 raise error.Abort(msg, hint=hint)
2196
2196
2197
2197
2198 def wrapconvertsink(sink):
2198 def wrapconvertsink(sink):
2199 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2199 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2200 before it is used, whether or not the convert extension was formally loaded.
2200 before it is used, whether or not the convert extension was formally loaded.
2201 """
2201 """
2202 return sink
2202 return sink
2203
2203
2204
2204
2205 def unhidehashlikerevs(repo, specs, hiddentype):
2205 def unhidehashlikerevs(repo, specs, hiddentype):
2206 """parse the user specs and unhide changesets whose hash or revision number
2206 """parse the user specs and unhide changesets whose hash or revision number
2207 is passed.
2207 is passed.
2208
2208
2209 hiddentype can be: 1) 'warn': warn while unhiding changesets
2209 hiddentype can be: 1) 'warn': warn while unhiding changesets
2210 2) 'nowarn': don't warn while unhiding changesets
2210 2) 'nowarn': don't warn while unhiding changesets
2211
2211
2212 returns a repo object with the required changesets unhidden
2212 returns a repo object with the required changesets unhidden
2213 """
2213 """
2214 if not repo.filtername or not repo.ui.configbool(
2214 if not repo.filtername or not repo.ui.configbool(
2215 b'experimental', b'directaccess'
2215 b'experimental', b'directaccess'
2216 ):
2216 ):
2217 return repo
2217 return repo
2218
2218
2219 if repo.filtername not in (b'visible', b'visible-hidden'):
2219 if repo.filtername not in (b'visible', b'visible-hidden'):
2220 return repo
2220 return repo
2221
2221
2222 symbols = set()
2222 symbols = set()
2223 for spec in specs:
2223 for spec in specs:
2224 try:
2224 try:
2225 tree = revsetlang.parse(spec)
2225 tree = revsetlang.parse(spec)
2226 except error.ParseError: # will be reported by scmutil.revrange()
2226 except error.ParseError: # will be reported by scmutil.revrange()
2227 continue
2227 continue
2228
2228
2229 symbols.update(revsetlang.gethashlikesymbols(tree))
2229 symbols.update(revsetlang.gethashlikesymbols(tree))
2230
2230
2231 if not symbols:
2231 if not symbols:
2232 return repo
2232 return repo
2233
2233
2234 revs = _getrevsfromsymbols(repo, symbols)
2234 revs = _getrevsfromsymbols(repo, symbols)
2235
2235
2236 if not revs:
2236 if not revs:
2237 return repo
2237 return repo
2238
2238
2239 if hiddentype == b'warn':
2239 if hiddentype == b'warn':
2240 unfi = repo.unfiltered()
2240 unfi = repo.unfiltered()
2241 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2241 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2242 repo.ui.warn(
2242 repo.ui.warn(
2243 _(
2243 _(
2244 b"warning: accessing hidden changesets for write "
2244 b"warning: accessing hidden changesets for write "
2245 b"operation: %s\n"
2245 b"operation: %s\n"
2246 )
2246 )
2247 % revstr
2247 % revstr
2248 )
2248 )
2249
2249
2250 # we have to use new filtername to separate branch/tags cache until we can
2250 # we have to use new filtername to separate branch/tags cache until we can
2251 # disbale these cache when revisions are dynamically pinned.
2251 # disbale these cache when revisions are dynamically pinned.
2252 return repo.filtered(b'visible-hidden', revs)
2252 return repo.filtered(b'visible-hidden', revs)
2253
2253
2254
2254
2255 def _getrevsfromsymbols(repo, symbols):
2255 def _getrevsfromsymbols(repo, symbols):
2256 """parse the list of symbols and returns a set of revision numbers of hidden
2256 """parse the list of symbols and returns a set of revision numbers of hidden
2257 changesets present in symbols"""
2257 changesets present in symbols"""
2258 revs = set()
2258 revs = set()
2259 unfi = repo.unfiltered()
2259 unfi = repo.unfiltered()
2260 unficl = unfi.changelog
2260 unficl = unfi.changelog
2261 cl = repo.changelog
2261 cl = repo.changelog
2262 tiprev = len(unficl)
2262 tiprev = len(unficl)
2263 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2263 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2264 for s in symbols:
2264 for s in symbols:
2265 try:
2265 try:
2266 n = int(s)
2266 n = int(s)
2267 if n <= tiprev:
2267 if n <= tiprev:
2268 if not allowrevnums:
2268 if not allowrevnums:
2269 continue
2269 continue
2270 else:
2270 else:
2271 if n not in cl:
2271 if n not in cl:
2272 revs.add(n)
2272 revs.add(n)
2273 continue
2273 continue
2274 except ValueError:
2274 except ValueError:
2275 pass
2275 pass
2276
2276
2277 try:
2277 try:
2278 s = resolvehexnodeidprefix(unfi, s)
2278 s = resolvehexnodeidprefix(unfi, s)
2279 except (error.LookupError, error.WdirUnsupported):
2279 except (error.LookupError, error.WdirUnsupported):
2280 s = None
2280 s = None
2281
2281
2282 if s is not None:
2282 if s is not None:
2283 rev = unficl.rev(s)
2283 rev = unficl.rev(s)
2284 if rev not in cl:
2284 if rev not in cl:
2285 revs.add(rev)
2285 revs.add(rev)
2286
2286
2287 return revs
2287 return revs
2288
2288
2289
2289
2290 def bookmarkrevs(repo, mark):
2290 def bookmarkrevs(repo, mark):
2291 """
2291 """
2292 Select revisions reachable by a given bookmark
2292 Select revisions reachable by a given bookmark
2293 """
2293 """
2294 return repo.revs(
2294 return repo.revs(
2295 b"ancestors(bookmark(%s)) - "
2295 b"ancestors(bookmark(%s)) - "
2296 b"ancestors(head() and not bookmark(%s)) - "
2296 b"ancestors(head() and not bookmark(%s)) - "
2297 b"ancestors(bookmark() and not bookmark(%s))",
2297 b"ancestors(bookmark() and not bookmark(%s))",
2298 mark,
2298 mark,
2299 mark,
2299 mark,
2300 mark,
2300 mark,
2301 )
2301 )
General Comments 0
You need to be logged in to leave comments. Login now