##// END OF EJS Templates
errors: introduce a class for remote errors...
Martin von Zweigbergk -
r47738:f9482db1 default
parent child Browse files
Show More
@@ -1,628 +1,632 b''
1 # error.py - Mercurial exceptions
1 # error.py - Mercurial exceptions
2 #
2 #
3 # Copyright 2005-2008 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2008 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """Mercurial exceptions.
8 """Mercurial exceptions.
9
9
10 This allows us to catch exceptions at higher levels without forcing
10 This allows us to catch exceptions at higher levels without forcing
11 imports.
11 imports.
12 """
12 """
13
13
14 from __future__ import absolute_import
14 from __future__ import absolute_import
15
15
16 import difflib
16 import difflib
17
17
18 # Do not import anything but pycompat here, please
18 # Do not import anything but pycompat here, please
19 from . import pycompat
19 from . import pycompat
20
20
21 if pycompat.TYPE_CHECKING:
21 if pycompat.TYPE_CHECKING:
22 from typing import (
22 from typing import (
23 Any,
23 Any,
24 AnyStr,
24 AnyStr,
25 Iterable,
25 Iterable,
26 List,
26 List,
27 Optional,
27 Optional,
28 Sequence,
28 Sequence,
29 Union,
29 Union,
30 )
30 )
31
31
32
32
33 def _tobytes(exc):
33 def _tobytes(exc):
34 """Byte-stringify exception in the same way as BaseException_str()"""
34 """Byte-stringify exception in the same way as BaseException_str()"""
35 if not exc.args:
35 if not exc.args:
36 return b''
36 return b''
37 if len(exc.args) == 1:
37 if len(exc.args) == 1:
38 return pycompat.bytestr(exc.args[0])
38 return pycompat.bytestr(exc.args[0])
39 return b'(%s)' % b', '.join(b"'%s'" % pycompat.bytestr(a) for a in exc.args)
39 return b'(%s)' % b', '.join(b"'%s'" % pycompat.bytestr(a) for a in exc.args)
40
40
41
41
42 class Hint(object):
42 class Hint(object):
43 """Mix-in to provide a hint of an error
43 """Mix-in to provide a hint of an error
44
44
45 This should come first in the inheritance list to consume a hint and
45 This should come first in the inheritance list to consume a hint and
46 pass remaining arguments to the exception class.
46 pass remaining arguments to the exception class.
47 """
47 """
48
48
49 def __init__(self, *args, **kw):
49 def __init__(self, *args, **kw):
50 self.hint = kw.pop('hint', None)
50 self.hint = kw.pop('hint', None)
51 super(Hint, self).__init__(*args, **kw)
51 super(Hint, self).__init__(*args, **kw)
52
52
53
53
54 class StorageError(Hint, Exception):
54 class StorageError(Hint, Exception):
55 """Raised when an error occurs in a storage layer.
55 """Raised when an error occurs in a storage layer.
56
56
57 Usually subclassed by a storage-specific exception.
57 Usually subclassed by a storage-specific exception.
58 """
58 """
59
59
60 __bytes__ = _tobytes
60 __bytes__ = _tobytes
61
61
62
62
63 class RevlogError(StorageError):
63 class RevlogError(StorageError):
64 pass
64 pass
65
65
66
66
67 class SidedataHashError(RevlogError):
67 class SidedataHashError(RevlogError):
68 def __init__(self, key, expected, got):
68 def __init__(self, key, expected, got):
69 self.hint = None
69 self.hint = None
70 self.sidedatakey = key
70 self.sidedatakey = key
71 self.expecteddigest = expected
71 self.expecteddigest = expected
72 self.actualdigest = got
72 self.actualdigest = got
73
73
74
74
75 class FilteredIndexError(IndexError):
75 class FilteredIndexError(IndexError):
76 __bytes__ = _tobytes
76 __bytes__ = _tobytes
77
77
78
78
79 class LookupError(RevlogError, KeyError):
79 class LookupError(RevlogError, KeyError):
80 def __init__(self, name, index, message):
80 def __init__(self, name, index, message):
81 self.name = name
81 self.name = name
82 self.index = index
82 self.index = index
83 # this can't be called 'message' because at least some installs of
83 # this can't be called 'message' because at least some installs of
84 # Python 2.6+ complain about the 'message' property being deprecated
84 # Python 2.6+ complain about the 'message' property being deprecated
85 self.lookupmessage = message
85 self.lookupmessage = message
86 if isinstance(name, bytes) and len(name) == 20:
86 if isinstance(name, bytes) and len(name) == 20:
87 from .node import hex
87 from .node import hex
88
88
89 name = hex(name)
89 name = hex(name)
90 # if name is a binary node, it can be None
90 # if name is a binary node, it can be None
91 RevlogError.__init__(
91 RevlogError.__init__(
92 self, b'%s@%s: %s' % (index, pycompat.bytestr(name), message)
92 self, b'%s@%s: %s' % (index, pycompat.bytestr(name), message)
93 )
93 )
94
94
95 def __bytes__(self):
95 def __bytes__(self):
96 return RevlogError.__bytes__(self)
96 return RevlogError.__bytes__(self)
97
97
98 def __str__(self):
98 def __str__(self):
99 return RevlogError.__str__(self)
99 return RevlogError.__str__(self)
100
100
101
101
102 class AmbiguousPrefixLookupError(LookupError):
102 class AmbiguousPrefixLookupError(LookupError):
103 pass
103 pass
104
104
105
105
106 class FilteredLookupError(LookupError):
106 class FilteredLookupError(LookupError):
107 pass
107 pass
108
108
109
109
110 class ManifestLookupError(LookupError):
110 class ManifestLookupError(LookupError):
111 pass
111 pass
112
112
113
113
114 class CommandError(Exception):
114 class CommandError(Exception):
115 """Exception raised on errors in parsing the command line."""
115 """Exception raised on errors in parsing the command line."""
116
116
117 def __init__(self, command, message):
117 def __init__(self, command, message):
118 # type: (bytes, bytes) -> None
118 # type: (bytes, bytes) -> None
119 self.command = command
119 self.command = command
120 self.message = message
120 self.message = message
121 super(CommandError, self).__init__()
121 super(CommandError, self).__init__()
122
122
123 __bytes__ = _tobytes
123 __bytes__ = _tobytes
124
124
125
125
126 class UnknownCommand(Exception):
126 class UnknownCommand(Exception):
127 """Exception raised if command is not in the command table."""
127 """Exception raised if command is not in the command table."""
128
128
129 def __init__(self, command, all_commands=None):
129 def __init__(self, command, all_commands=None):
130 # type: (bytes, Optional[List[bytes]]) -> None
130 # type: (bytes, Optional[List[bytes]]) -> None
131 self.command = command
131 self.command = command
132 self.all_commands = all_commands
132 self.all_commands = all_commands
133 super(UnknownCommand, self).__init__()
133 super(UnknownCommand, self).__init__()
134
134
135 __bytes__ = _tobytes
135 __bytes__ = _tobytes
136
136
137
137
138 class AmbiguousCommand(Exception):
138 class AmbiguousCommand(Exception):
139 """Exception raised if command shortcut matches more than one command."""
139 """Exception raised if command shortcut matches more than one command."""
140
140
141 def __init__(self, prefix, matches):
141 def __init__(self, prefix, matches):
142 # type: (bytes, List[bytes]) -> None
142 # type: (bytes, List[bytes]) -> None
143 self.prefix = prefix
143 self.prefix = prefix
144 self.matches = matches
144 self.matches = matches
145 super(AmbiguousCommand, self).__init__()
145 super(AmbiguousCommand, self).__init__()
146
146
147 __bytes__ = _tobytes
147 __bytes__ = _tobytes
148
148
149
149
150 class WorkerError(Exception):
150 class WorkerError(Exception):
151 """Exception raised when a worker process dies."""
151 """Exception raised when a worker process dies."""
152
152
153 def __init__(self, status_code):
153 def __init__(self, status_code):
154 # type: (int) -> None
154 # type: (int) -> None
155 self.status_code = status_code
155 self.status_code = status_code
156 # Pass status code to superclass just so it becomes part of __bytes__
156 # Pass status code to superclass just so it becomes part of __bytes__
157 super(WorkerError, self).__init__(status_code)
157 super(WorkerError, self).__init__(status_code)
158
158
159 __bytes__ = _tobytes
159 __bytes__ = _tobytes
160
160
161
161
162 class InterventionRequired(Hint, Exception):
162 class InterventionRequired(Hint, Exception):
163 """Exception raised when a command requires human intervention."""
163 """Exception raised when a command requires human intervention."""
164
164
165 __bytes__ = _tobytes
165 __bytes__ = _tobytes
166
166
167
167
168 class ConflictResolutionRequired(InterventionRequired):
168 class ConflictResolutionRequired(InterventionRequired):
169 """Exception raised when a continuable command required merge conflict resolution."""
169 """Exception raised when a continuable command required merge conflict resolution."""
170
170
171 def __init__(self, opname):
171 def __init__(self, opname):
172 # type: (bytes) -> None
172 # type: (bytes) -> None
173 from .i18n import _
173 from .i18n import _
174
174
175 self.opname = opname
175 self.opname = opname
176 InterventionRequired.__init__(
176 InterventionRequired.__init__(
177 self,
177 self,
178 _(
178 _(
179 b"unresolved conflicts (see 'hg resolve', then 'hg %s --continue')"
179 b"unresolved conflicts (see 'hg resolve', then 'hg %s --continue')"
180 )
180 )
181 % opname,
181 % opname,
182 )
182 )
183
183
184
184
185 class Abort(Hint, Exception):
185 class Abort(Hint, Exception):
186 """Raised if a command needs to print an error and exit."""
186 """Raised if a command needs to print an error and exit."""
187
187
188 def __init__(self, message, hint=None):
188 def __init__(self, message, hint=None):
189 # type: (bytes, Optional[bytes]) -> None
189 # type: (bytes, Optional[bytes]) -> None
190 self.message = message
190 self.message = message
191 self.hint = hint
191 self.hint = hint
192 # Pass the message into the Exception constructor to help extensions
192 # Pass the message into the Exception constructor to help extensions
193 # that look for exc.args[0].
193 # that look for exc.args[0].
194 Exception.__init__(self, message)
194 Exception.__init__(self, message)
195
195
196 def __bytes__(self):
196 def __bytes__(self):
197 return self.message
197 return self.message
198
198
199 if pycompat.ispy3:
199 if pycompat.ispy3:
200
200
201 def __str__(self):
201 def __str__(self):
202 # the output would be unreadable if the message was translated,
202 # the output would be unreadable if the message was translated,
203 # but do not replace it with encoding.strfromlocal(), which
203 # but do not replace it with encoding.strfromlocal(), which
204 # may raise another exception.
204 # may raise another exception.
205 return pycompat.sysstr(self.__bytes__())
205 return pycompat.sysstr(self.__bytes__())
206
206
207 def format(self):
207 def format(self):
208 # type: () -> bytes
208 # type: () -> bytes
209 from .i18n import _
209 from .i18n import _
210
210
211 message = _(b"abort: %s\n") % self.message
211 message = _(b"abort: %s\n") % self.message
212 if self.hint:
212 if self.hint:
213 message += _(b"(%s)\n") % self.hint
213 message += _(b"(%s)\n") % self.hint
214 return message
214 return message
215
215
216
216
217 class InputError(Abort):
217 class InputError(Abort):
218 """Indicates that the user made an error in their input.
218 """Indicates that the user made an error in their input.
219
219
220 Examples: Invalid command, invalid flags, invalid revision.
220 Examples: Invalid command, invalid flags, invalid revision.
221 """
221 """
222
222
223
223
224 class StateError(Abort):
224 class StateError(Abort):
225 """Indicates that the operation might work if retried in a different state.
225 """Indicates that the operation might work if retried in a different state.
226
226
227 Examples: Unresolved merge conflicts, unfinished operations.
227 Examples: Unresolved merge conflicts, unfinished operations.
228 """
228 """
229
229
230
230
231 class CanceledError(Abort):
231 class CanceledError(Abort):
232 """Indicates that the user canceled the operation.
232 """Indicates that the user canceled the operation.
233
233
234 Examples: Close commit editor with error status, quit chistedit.
234 Examples: Close commit editor with error status, quit chistedit.
235 """
235 """
236
236
237
237
238 class SecurityError(Abort):
238 class SecurityError(Abort):
239 """Indicates that some aspect of security failed.
239 """Indicates that some aspect of security failed.
240
240
241 Examples: Bad server credentials, expired local credentials for network
241 Examples: Bad server credentials, expired local credentials for network
242 filesystem, mismatched GPG signature, DoS protection.
242 filesystem, mismatched GPG signature, DoS protection.
243 """
243 """
244
244
245
245
246 class HookLoadError(Abort):
246 class HookLoadError(Abort):
247 """raised when loading a hook fails, aborting an operation
247 """raised when loading a hook fails, aborting an operation
248
248
249 Exists to allow more specialized catching."""
249 Exists to allow more specialized catching."""
250
250
251
251
252 class HookAbort(Abort):
252 class HookAbort(Abort):
253 """raised when a validation hook fails, aborting an operation
253 """raised when a validation hook fails, aborting an operation
254
254
255 Exists to allow more specialized catching."""
255 Exists to allow more specialized catching."""
256
256
257
257
258 class ConfigError(Abort):
258 class ConfigError(Abort):
259 """Exception raised when parsing config files"""
259 """Exception raised when parsing config files"""
260
260
261 def __init__(self, message, location=None, hint=None):
261 def __init__(self, message, location=None, hint=None):
262 # type: (bytes, Optional[bytes], Optional[bytes]) -> None
262 # type: (bytes, Optional[bytes], Optional[bytes]) -> None
263 super(ConfigError, self).__init__(message, hint=hint)
263 super(ConfigError, self).__init__(message, hint=hint)
264 self.location = location
264 self.location = location
265
265
266 def format(self):
266 def format(self):
267 # type: () -> bytes
267 # type: () -> bytes
268 from .i18n import _
268 from .i18n import _
269
269
270 if self.location is not None:
270 if self.location is not None:
271 message = _(b"config error at %s: %s\n") % (
271 message = _(b"config error at %s: %s\n") % (
272 pycompat.bytestr(self.location),
272 pycompat.bytestr(self.location),
273 self.message,
273 self.message,
274 )
274 )
275 else:
275 else:
276 message = _(b"config error: %s\n") % self.message
276 message = _(b"config error: %s\n") % self.message
277 if self.hint:
277 if self.hint:
278 message += _(b"(%s)\n") % self.hint
278 message += _(b"(%s)\n") % self.hint
279 return message
279 return message
280
280
281
281
282 class UpdateAbort(Abort):
282 class UpdateAbort(Abort):
283 """Raised when an update is aborted for destination issue"""
283 """Raised when an update is aborted for destination issue"""
284
284
285
285
286 class MergeDestAbort(Abort):
286 class MergeDestAbort(Abort):
287 """Raised when an update is aborted for destination issues"""
287 """Raised when an update is aborted for destination issues"""
288
288
289
289
290 class NoMergeDestAbort(MergeDestAbort):
290 class NoMergeDestAbort(MergeDestAbort):
291 """Raised when an update is aborted because there is nothing to merge"""
291 """Raised when an update is aborted because there is nothing to merge"""
292
292
293
293
294 class ManyMergeDestAbort(MergeDestAbort):
294 class ManyMergeDestAbort(MergeDestAbort):
295 """Raised when an update is aborted because destination is ambiguous"""
295 """Raised when an update is aborted because destination is ambiguous"""
296
296
297
297
298 class ResponseExpected(Abort):
298 class ResponseExpected(Abort):
299 """Raised when an EOF is received for a prompt"""
299 """Raised when an EOF is received for a prompt"""
300
300
301 def __init__(self):
301 def __init__(self):
302 from .i18n import _
302 from .i18n import _
303
303
304 Abort.__init__(self, _(b'response expected'))
304 Abort.__init__(self, _(b'response expected'))
305
305
306
306
307 class OutOfBandError(Abort):
307 class RemoteError(Abort):
308 """Exception raised when interacting with a remote repo fails"""
309
310
311 class OutOfBandError(RemoteError):
308 """Exception raised when a remote repo reports failure"""
312 """Exception raised when a remote repo reports failure"""
309
313
310 def __init__(self, *messages, **kwargs):
314 def __init__(self, *messages, **kwargs):
311 from .i18n import _
315 from .i18n import _
312
316
313 if messages:
317 if messages:
314 message = _(b"remote error:\n%s") % b''.join(messages)
318 message = _(b"remote error:\n%s") % b''.join(messages)
315 # Abort.format() adds a trailing newline
319 # Abort.format() adds a trailing newline
316 message = message.rstrip(b'\n')
320 message = message.rstrip(b'\n')
317 else:
321 else:
318 message = _(b"remote error")
322 message = _(b"remote error")
319 super(OutOfBandError, self).__init__(message, **kwargs)
323 super(OutOfBandError, self).__init__(message, **kwargs)
320
324
321
325
322 class ParseError(Abort):
326 class ParseError(Abort):
323 """Raised when parsing config files and {rev,file}sets (msg[, pos])"""
327 """Raised when parsing config files and {rev,file}sets (msg[, pos])"""
324
328
325 def __init__(self, message, location=None, hint=None):
329 def __init__(self, message, location=None, hint=None):
326 # type: (bytes, Optional[Union[bytes, int]], Optional[bytes]) -> None
330 # type: (bytes, Optional[Union[bytes, int]], Optional[bytes]) -> None
327 super(ParseError, self).__init__(message, hint=hint)
331 super(ParseError, self).__init__(message, hint=hint)
328 self.location = location
332 self.location = location
329
333
330 def format(self):
334 def format(self):
331 # type: () -> bytes
335 # type: () -> bytes
332 from .i18n import _
336 from .i18n import _
333
337
334 if self.location is not None:
338 if self.location is not None:
335 message = _(b"hg: parse error at %s: %s\n") % (
339 message = _(b"hg: parse error at %s: %s\n") % (
336 pycompat.bytestr(self.location),
340 pycompat.bytestr(self.location),
337 self.message,
341 self.message,
338 )
342 )
339 else:
343 else:
340 message = _(b"hg: parse error: %s\n") % self.message
344 message = _(b"hg: parse error: %s\n") % self.message
341 if self.hint:
345 if self.hint:
342 message += _(b"(%s)\n") % self.hint
346 message += _(b"(%s)\n") % self.hint
343 return message
347 return message
344
348
345
349
346 class PatchError(Exception):
350 class PatchError(Exception):
347 __bytes__ = _tobytes
351 __bytes__ = _tobytes
348
352
349
353
350 def getsimilar(symbols, value):
354 def getsimilar(symbols, value):
351 # type: (Iterable[bytes], bytes) -> List[bytes]
355 # type: (Iterable[bytes], bytes) -> List[bytes]
352 sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
356 sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
353 # The cutoff for similarity here is pretty arbitrary. It should
357 # The cutoff for similarity here is pretty arbitrary. It should
354 # probably be investigated and tweaked.
358 # probably be investigated and tweaked.
355 return [s for s in symbols if sim(s) > 0.6]
359 return [s for s in symbols if sim(s) > 0.6]
356
360
357
361
358 def similarity_hint(similar):
362 def similarity_hint(similar):
359 # type: (List[bytes]) -> Optional[bytes]
363 # type: (List[bytes]) -> Optional[bytes]
360 from .i18n import _
364 from .i18n import _
361
365
362 if len(similar) == 1:
366 if len(similar) == 1:
363 return _(b"did you mean %s?") % similar[0]
367 return _(b"did you mean %s?") % similar[0]
364 elif similar:
368 elif similar:
365 ss = b", ".join(sorted(similar))
369 ss = b", ".join(sorted(similar))
366 return _(b"did you mean one of %s?") % ss
370 return _(b"did you mean one of %s?") % ss
367 else:
371 else:
368 return None
372 return None
369
373
370
374
371 class UnknownIdentifier(ParseError):
375 class UnknownIdentifier(ParseError):
372 """Exception raised when a {rev,file}set references an unknown identifier"""
376 """Exception raised when a {rev,file}set references an unknown identifier"""
373
377
374 def __init__(self, function, symbols):
378 def __init__(self, function, symbols):
375 # type: (bytes, Iterable[bytes]) -> None
379 # type: (bytes, Iterable[bytes]) -> None
376 from .i18n import _
380 from .i18n import _
377
381
378 similar = getsimilar(symbols, function)
382 similar = getsimilar(symbols, function)
379 hint = similarity_hint(similar)
383 hint = similarity_hint(similar)
380
384
381 ParseError.__init__(
385 ParseError.__init__(
382 self, _(b"unknown identifier: %s") % function, hint=hint
386 self, _(b"unknown identifier: %s") % function, hint=hint
383 )
387 )
384
388
385
389
386 class RepoError(Hint, Exception):
390 class RepoError(Hint, Exception):
387 __bytes__ = _tobytes
391 __bytes__ = _tobytes
388
392
389
393
390 class RepoLookupError(RepoError):
394 class RepoLookupError(RepoError):
391 pass
395 pass
392
396
393
397
394 class FilteredRepoLookupError(RepoLookupError):
398 class FilteredRepoLookupError(RepoLookupError):
395 pass
399 pass
396
400
397
401
398 class CapabilityError(RepoError):
402 class CapabilityError(RepoError):
399 pass
403 pass
400
404
401
405
402 class RequirementError(RepoError):
406 class RequirementError(RepoError):
403 """Exception raised if .hg/requires has an unknown entry."""
407 """Exception raised if .hg/requires has an unknown entry."""
404
408
405
409
406 class StdioError(IOError):
410 class StdioError(IOError):
407 """Raised if I/O to stdout or stderr fails"""
411 """Raised if I/O to stdout or stderr fails"""
408
412
409 def __init__(self, err):
413 def __init__(self, err):
410 # type: (IOError) -> None
414 # type: (IOError) -> None
411 IOError.__init__(self, err.errno, err.strerror)
415 IOError.__init__(self, err.errno, err.strerror)
412
416
413 # no __bytes__() because error message is derived from the standard IOError
417 # no __bytes__() because error message is derived from the standard IOError
414
418
415
419
416 class UnsupportedMergeRecords(Abort):
420 class UnsupportedMergeRecords(Abort):
417 def __init__(self, recordtypes):
421 def __init__(self, recordtypes):
418 # type: (Iterable[bytes]) -> None
422 # type: (Iterable[bytes]) -> None
419 from .i18n import _
423 from .i18n import _
420
424
421 self.recordtypes = sorted(recordtypes)
425 self.recordtypes = sorted(recordtypes)
422 s = b' '.join(self.recordtypes)
426 s = b' '.join(self.recordtypes)
423 Abort.__init__(
427 Abort.__init__(
424 self,
428 self,
425 _(b'unsupported merge state records: %s') % s,
429 _(b'unsupported merge state records: %s') % s,
426 hint=_(
430 hint=_(
427 b'see https://mercurial-scm.org/wiki/MergeStateRecords for '
431 b'see https://mercurial-scm.org/wiki/MergeStateRecords for '
428 b'more information'
432 b'more information'
429 ),
433 ),
430 )
434 )
431
435
432
436
433 class UnknownVersion(Abort):
437 class UnknownVersion(Abort):
434 """generic exception for aborting from an encounter with an unknown version"""
438 """generic exception for aborting from an encounter with an unknown version"""
435
439
436 def __init__(self, msg, hint=None, version=None):
440 def __init__(self, msg, hint=None, version=None):
437 # type: (bytes, Optional[bytes], Optional[bytes]) -> None
441 # type: (bytes, Optional[bytes], Optional[bytes]) -> None
438 self.version = version
442 self.version = version
439 super(UnknownVersion, self).__init__(msg, hint=hint)
443 super(UnknownVersion, self).__init__(msg, hint=hint)
440
444
441
445
442 class LockError(IOError):
446 class LockError(IOError):
443 def __init__(self, errno, strerror, filename, desc):
447 def __init__(self, errno, strerror, filename, desc):
444 # TODO: figure out if this should be bytes or str
448 # TODO: figure out if this should be bytes or str
445 # _type: (int, str, str, bytes) -> None
449 # _type: (int, str, str, bytes) -> None
446 IOError.__init__(self, errno, strerror, filename)
450 IOError.__init__(self, errno, strerror, filename)
447 self.desc = desc
451 self.desc = desc
448
452
449 # no __bytes__() because error message is derived from the standard IOError
453 # no __bytes__() because error message is derived from the standard IOError
450
454
451
455
452 class LockHeld(LockError):
456 class LockHeld(LockError):
453 def __init__(self, errno, filename, desc, locker):
457 def __init__(self, errno, filename, desc, locker):
454 LockError.__init__(self, errno, b'Lock held', filename, desc)
458 LockError.__init__(self, errno, b'Lock held', filename, desc)
455 self.locker = locker
459 self.locker = locker
456
460
457
461
458 class LockUnavailable(LockError):
462 class LockUnavailable(LockError):
459 pass
463 pass
460
464
461
465
462 # LockError is for errors while acquiring the lock -- this is unrelated
466 # LockError is for errors while acquiring the lock -- this is unrelated
463 class LockInheritanceContractViolation(RuntimeError):
467 class LockInheritanceContractViolation(RuntimeError):
464 __bytes__ = _tobytes
468 __bytes__ = _tobytes
465
469
466
470
467 class ResponseError(Exception):
471 class ResponseError(Exception):
468 """Raised to print an error with part of output and exit."""
472 """Raised to print an error with part of output and exit."""
469
473
470 __bytes__ = _tobytes
474 __bytes__ = _tobytes
471
475
472
476
473 # derived from KeyboardInterrupt to simplify some breakout code
477 # derived from KeyboardInterrupt to simplify some breakout code
474 class SignalInterrupt(KeyboardInterrupt):
478 class SignalInterrupt(KeyboardInterrupt):
475 """Exception raised on SIGTERM and SIGHUP."""
479 """Exception raised on SIGTERM and SIGHUP."""
476
480
477
481
478 class SignatureError(Exception):
482 class SignatureError(Exception):
479 __bytes__ = _tobytes
483 __bytes__ = _tobytes
480
484
481
485
482 class PushRaced(RuntimeError):
486 class PushRaced(RuntimeError):
483 """An exception raised during unbundling that indicate a push race"""
487 """An exception raised during unbundling that indicate a push race"""
484
488
485 __bytes__ = _tobytes
489 __bytes__ = _tobytes
486
490
487
491
488 class ProgrammingError(Hint, RuntimeError):
492 class ProgrammingError(Hint, RuntimeError):
489 """Raised if a mercurial (core or extension) developer made a mistake"""
493 """Raised if a mercurial (core or extension) developer made a mistake"""
490
494
491 def __init__(self, msg, *args, **kwargs):
495 def __init__(self, msg, *args, **kwargs):
492 # type: (AnyStr, Any, Any) -> None
496 # type: (AnyStr, Any, Any) -> None
493 # On Python 3, turn the message back into a string since this is
497 # On Python 3, turn the message back into a string since this is
494 # an internal-only error that won't be printed except in a
498 # an internal-only error that won't be printed except in a
495 # stack traces.
499 # stack traces.
496 msg = pycompat.sysstr(msg)
500 msg = pycompat.sysstr(msg)
497 super(ProgrammingError, self).__init__(msg, *args, **kwargs)
501 super(ProgrammingError, self).__init__(msg, *args, **kwargs)
498
502
499 __bytes__ = _tobytes
503 __bytes__ = _tobytes
500
504
501
505
502 class WdirUnsupported(Exception):
506 class WdirUnsupported(Exception):
503 """An exception which is raised when 'wdir()' is not supported"""
507 """An exception which is raised when 'wdir()' is not supported"""
504
508
505 __bytes__ = _tobytes
509 __bytes__ = _tobytes
506
510
507
511
508 # bundle2 related errors
512 # bundle2 related errors
509 class BundleValueError(ValueError):
513 class BundleValueError(ValueError):
510 """error raised when bundle2 cannot be processed"""
514 """error raised when bundle2 cannot be processed"""
511
515
512 __bytes__ = _tobytes
516 __bytes__ = _tobytes
513
517
514
518
515 class BundleUnknownFeatureError(BundleValueError):
519 class BundleUnknownFeatureError(BundleValueError):
516 def __init__(self, parttype=None, params=(), values=()):
520 def __init__(self, parttype=None, params=(), values=()):
517 self.parttype = parttype
521 self.parttype = parttype
518 self.params = params
522 self.params = params
519 self.values = values
523 self.values = values
520 if self.parttype is None:
524 if self.parttype is None:
521 msg = b'Stream Parameter'
525 msg = b'Stream Parameter'
522 else:
526 else:
523 msg = parttype
527 msg = parttype
524 entries = self.params
528 entries = self.params
525 if self.params and self.values:
529 if self.params and self.values:
526 assert len(self.params) == len(self.values)
530 assert len(self.params) == len(self.values)
527 entries = []
531 entries = []
528 for idx, par in enumerate(self.params):
532 for idx, par in enumerate(self.params):
529 val = self.values[idx]
533 val = self.values[idx]
530 if val is None:
534 if val is None:
531 entries.append(val)
535 entries.append(val)
532 else:
536 else:
533 entries.append(b"%s=%r" % (par, pycompat.maybebytestr(val)))
537 entries.append(b"%s=%r" % (par, pycompat.maybebytestr(val)))
534 if entries:
538 if entries:
535 msg = b'%s - %s' % (msg, b', '.join(entries))
539 msg = b'%s - %s' % (msg, b', '.join(entries))
536 ValueError.__init__(self, msg) # TODO: convert to str?
540 ValueError.__init__(self, msg) # TODO: convert to str?
537
541
538
542
539 class ReadOnlyPartError(RuntimeError):
543 class ReadOnlyPartError(RuntimeError):
540 """error raised when code tries to alter a part being generated"""
544 """error raised when code tries to alter a part being generated"""
541
545
542 __bytes__ = _tobytes
546 __bytes__ = _tobytes
543
547
544
548
545 class PushkeyFailed(Abort):
549 class PushkeyFailed(Abort):
546 """error raised when a pushkey part failed to update a value"""
550 """error raised when a pushkey part failed to update a value"""
547
551
548 def __init__(
552 def __init__(
549 self, partid, namespace=None, key=None, new=None, old=None, ret=None
553 self, partid, namespace=None, key=None, new=None, old=None, ret=None
550 ):
554 ):
551 self.partid = partid
555 self.partid = partid
552 self.namespace = namespace
556 self.namespace = namespace
553 self.key = key
557 self.key = key
554 self.new = new
558 self.new = new
555 self.old = old
559 self.old = old
556 self.ret = ret
560 self.ret = ret
557 # no i18n expected to be processed into a better message
561 # no i18n expected to be processed into a better message
558 Abort.__init__(
562 Abort.__init__(
559 self, b'failed to update value for "%s/%s"' % (namespace, key)
563 self, b'failed to update value for "%s/%s"' % (namespace, key)
560 )
564 )
561
565
562
566
563 class CensoredNodeError(StorageError):
567 class CensoredNodeError(StorageError):
564 """error raised when content verification fails on a censored node
568 """error raised when content verification fails on a censored node
565
569
566 Also contains the tombstone data substituted for the uncensored data.
570 Also contains the tombstone data substituted for the uncensored data.
567 """
571 """
568
572
569 def __init__(self, filename, node, tombstone):
573 def __init__(self, filename, node, tombstone):
570 # type: (bytes, bytes, bytes) -> None
574 # type: (bytes, bytes, bytes) -> None
571 from .node import short
575 from .node import short
572
576
573 StorageError.__init__(self, b'%s:%s' % (filename, short(node)))
577 StorageError.__init__(self, b'%s:%s' % (filename, short(node)))
574 self.tombstone = tombstone
578 self.tombstone = tombstone
575
579
576
580
577 class CensoredBaseError(StorageError):
581 class CensoredBaseError(StorageError):
578 """error raised when a delta is rejected because its base is censored
582 """error raised when a delta is rejected because its base is censored
579
583
580 A delta based on a censored revision must be formed as single patch
584 A delta based on a censored revision must be formed as single patch
581 operation which replaces the entire base with new content. This ensures
585 operation which replaces the entire base with new content. This ensures
582 the delta may be applied by clones which have not censored the base.
586 the delta may be applied by clones which have not censored the base.
583 """
587 """
584
588
585
589
586 class InvalidBundleSpecification(Exception):
590 class InvalidBundleSpecification(Exception):
587 """error raised when a bundle specification is invalid.
591 """error raised when a bundle specification is invalid.
588
592
589 This is used for syntax errors as opposed to support errors.
593 This is used for syntax errors as opposed to support errors.
590 """
594 """
591
595
592 __bytes__ = _tobytes
596 __bytes__ = _tobytes
593
597
594
598
595 class UnsupportedBundleSpecification(Exception):
599 class UnsupportedBundleSpecification(Exception):
596 """error raised when a bundle specification is not supported."""
600 """error raised when a bundle specification is not supported."""
597
601
598 __bytes__ = _tobytes
602 __bytes__ = _tobytes
599
603
600
604
601 class CorruptedState(Exception):
605 class CorruptedState(Exception):
602 """error raised when a command is not able to read its state from file"""
606 """error raised when a command is not able to read its state from file"""
603
607
604 __bytes__ = _tobytes
608 __bytes__ = _tobytes
605
609
606
610
607 class PeerTransportError(Abort):
611 class PeerTransportError(Abort):
608 """Transport-level I/O error when communicating with a peer repo."""
612 """Transport-level I/O error when communicating with a peer repo."""
609
613
610
614
611 class InMemoryMergeConflictsError(Exception):
615 class InMemoryMergeConflictsError(Exception):
612 """Exception raised when merge conflicts arose during an in-memory merge."""
616 """Exception raised when merge conflicts arose during an in-memory merge."""
613
617
614 __bytes__ = _tobytes
618 __bytes__ = _tobytes
615
619
616
620
617 class WireprotoCommandError(Exception):
621 class WireprotoCommandError(Exception):
618 """Represents an error during execution of a wire protocol command.
622 """Represents an error during execution of a wire protocol command.
619
623
620 Should only be thrown by wire protocol version 2 commands.
624 Should only be thrown by wire protocol version 2 commands.
621
625
622 The error is a formatter string and an optional iterable of arguments.
626 The error is a formatter string and an optional iterable of arguments.
623 """
627 """
624
628
625 def __init__(self, message, args=None):
629 def __init__(self, message, args=None):
626 # type: (bytes, Optional[Sequence[bytes]]) -> None
630 # type: (bytes, Optional[Sequence[bytes]]) -> None
627 self.message = message
631 self.message = message
628 self.messageargs = args
632 self.messageargs = args
@@ -1,2322 +1,2322 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Olivia Mackall <olivia@selenic.com>
3 # Copyright Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import os
12 import os
13 import posixpath
13 import posixpath
14 import re
14 import re
15 import subprocess
15 import subprocess
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 bin,
20 bin,
21 hex,
21 hex,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirrev,
26 wdirrev,
27 )
27 )
28 from .pycompat import getattr
28 from .pycompat import getattr
29 from .thirdparty import attr
29 from .thirdparty import attr
30 from . import (
30 from . import (
31 copies as copiesmod,
31 copies as copiesmod,
32 encoding,
32 encoding,
33 error,
33 error,
34 match as matchmod,
34 match as matchmod,
35 obsolete,
35 obsolete,
36 obsutil,
36 obsutil,
37 pathutil,
37 pathutil,
38 phases,
38 phases,
39 policy,
39 policy,
40 pycompat,
40 pycompat,
41 requirements as requirementsmod,
41 requirements as requirementsmod,
42 revsetlang,
42 revsetlang,
43 similar,
43 similar,
44 smartset,
44 smartset,
45 url,
45 url,
46 util,
46 util,
47 vfs,
47 vfs,
48 )
48 )
49
49
50 from .utils import (
50 from .utils import (
51 hashutil,
51 hashutil,
52 procutil,
52 procutil,
53 stringutil,
53 stringutil,
54 )
54 )
55
55
56 if pycompat.iswindows:
56 if pycompat.iswindows:
57 from . import scmwindows as scmplatform
57 from . import scmwindows as scmplatform
58 else:
58 else:
59 from . import scmposix as scmplatform
59 from . import scmposix as scmplatform
60
60
61 parsers = policy.importmod('parsers')
61 parsers = policy.importmod('parsers')
62 rustrevlog = policy.importrust('revlog')
62 rustrevlog = policy.importrust('revlog')
63
63
64 termsize = scmplatform.termsize
64 termsize = scmplatform.termsize
65
65
66
66
67 @attr.s(slots=True, repr=False)
67 @attr.s(slots=True, repr=False)
68 class status(object):
68 class status(object):
69 """Struct with a list of files per status.
69 """Struct with a list of files per status.
70
70
71 The 'deleted', 'unknown' and 'ignored' properties are only
71 The 'deleted', 'unknown' and 'ignored' properties are only
72 relevant to the working copy.
72 relevant to the working copy.
73 """
73 """
74
74
75 modified = attr.ib(default=attr.Factory(list))
75 modified = attr.ib(default=attr.Factory(list))
76 added = attr.ib(default=attr.Factory(list))
76 added = attr.ib(default=attr.Factory(list))
77 removed = attr.ib(default=attr.Factory(list))
77 removed = attr.ib(default=attr.Factory(list))
78 deleted = attr.ib(default=attr.Factory(list))
78 deleted = attr.ib(default=attr.Factory(list))
79 unknown = attr.ib(default=attr.Factory(list))
79 unknown = attr.ib(default=attr.Factory(list))
80 ignored = attr.ib(default=attr.Factory(list))
80 ignored = attr.ib(default=attr.Factory(list))
81 clean = attr.ib(default=attr.Factory(list))
81 clean = attr.ib(default=attr.Factory(list))
82
82
83 def __iter__(self):
83 def __iter__(self):
84 yield self.modified
84 yield self.modified
85 yield self.added
85 yield self.added
86 yield self.removed
86 yield self.removed
87 yield self.deleted
87 yield self.deleted
88 yield self.unknown
88 yield self.unknown
89 yield self.ignored
89 yield self.ignored
90 yield self.clean
90 yield self.clean
91
91
92 def __repr__(self):
92 def __repr__(self):
93 return (
93 return (
94 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
94 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
95 r'unknown=%s, ignored=%s, clean=%s>'
95 r'unknown=%s, ignored=%s, clean=%s>'
96 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
96 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
97
97
98
98
99 def itersubrepos(ctx1, ctx2):
99 def itersubrepos(ctx1, ctx2):
100 """find subrepos in ctx1 or ctx2"""
100 """find subrepos in ctx1 or ctx2"""
101 # Create a (subpath, ctx) mapping where we prefer subpaths from
101 # Create a (subpath, ctx) mapping where we prefer subpaths from
102 # ctx1. The subpaths from ctx2 are important when the .hgsub file
102 # ctx1. The subpaths from ctx2 are important when the .hgsub file
103 # has been modified (in ctx2) but not yet committed (in ctx1).
103 # has been modified (in ctx2) but not yet committed (in ctx1).
104 subpaths = dict.fromkeys(ctx2.substate, ctx2)
104 subpaths = dict.fromkeys(ctx2.substate, ctx2)
105 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
105 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
106
106
107 missing = set()
107 missing = set()
108
108
109 for subpath in ctx2.substate:
109 for subpath in ctx2.substate:
110 if subpath not in ctx1.substate:
110 if subpath not in ctx1.substate:
111 del subpaths[subpath]
111 del subpaths[subpath]
112 missing.add(subpath)
112 missing.add(subpath)
113
113
114 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
114 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
115 yield subpath, ctx.sub(subpath)
115 yield subpath, ctx.sub(subpath)
116
116
117 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
117 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
118 # status and diff will have an accurate result when it does
118 # status and diff will have an accurate result when it does
119 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
119 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
120 # against itself.
120 # against itself.
121 for subpath in missing:
121 for subpath in missing:
122 yield subpath, ctx2.nullsub(subpath, ctx1)
122 yield subpath, ctx2.nullsub(subpath, ctx1)
123
123
124
124
125 def nochangesfound(ui, repo, excluded=None):
125 def nochangesfound(ui, repo, excluded=None):
126 """Report no changes for push/pull, excluded is None or a list of
126 """Report no changes for push/pull, excluded is None or a list of
127 nodes excluded from the push/pull.
127 nodes excluded from the push/pull.
128 """
128 """
129 secretlist = []
129 secretlist = []
130 if excluded:
130 if excluded:
131 for n in excluded:
131 for n in excluded:
132 ctx = repo[n]
132 ctx = repo[n]
133 if ctx.phase() >= phases.secret and not ctx.extinct():
133 if ctx.phase() >= phases.secret and not ctx.extinct():
134 secretlist.append(n)
134 secretlist.append(n)
135
135
136 if secretlist:
136 if secretlist:
137 ui.status(
137 ui.status(
138 _(b"no changes found (ignored %d secret changesets)\n")
138 _(b"no changes found (ignored %d secret changesets)\n")
139 % len(secretlist)
139 % len(secretlist)
140 )
140 )
141 else:
141 else:
142 ui.status(_(b"no changes found\n"))
142 ui.status(_(b"no changes found\n"))
143
143
144
144
145 def callcatch(ui, func):
145 def callcatch(ui, func):
146 """call func() with global exception handling
146 """call func() with global exception handling
147
147
148 return func() if no exception happens. otherwise do some error handling
148 return func() if no exception happens. otherwise do some error handling
149 and return an exit code accordingly. does not handle all exceptions.
149 and return an exit code accordingly. does not handle all exceptions.
150 """
150 """
151 coarse_exit_code = -1
151 coarse_exit_code = -1
152 detailed_exit_code = -1
152 detailed_exit_code = -1
153 try:
153 try:
154 try:
154 try:
155 return func()
155 return func()
156 except: # re-raises
156 except: # re-raises
157 ui.traceback()
157 ui.traceback()
158 raise
158 raise
159 # Global exception handling, alphabetically
159 # Global exception handling, alphabetically
160 # Mercurial-specific first, followed by built-in and library exceptions
160 # Mercurial-specific first, followed by built-in and library exceptions
161 except error.LockHeld as inst:
161 except error.LockHeld as inst:
162 detailed_exit_code = 20
162 detailed_exit_code = 20
163 if inst.errno == errno.ETIMEDOUT:
163 if inst.errno == errno.ETIMEDOUT:
164 reason = _(b'timed out waiting for lock held by %r') % (
164 reason = _(b'timed out waiting for lock held by %r') % (
165 pycompat.bytestr(inst.locker)
165 pycompat.bytestr(inst.locker)
166 )
166 )
167 else:
167 else:
168 reason = _(b'lock held by %r') % inst.locker
168 reason = _(b'lock held by %r') % inst.locker
169 ui.error(
169 ui.error(
170 _(b"abort: %s: %s\n")
170 _(b"abort: %s: %s\n")
171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
172 )
172 )
173 if not inst.locker:
173 if not inst.locker:
174 ui.error(_(b"(lock might be very busy)\n"))
174 ui.error(_(b"(lock might be very busy)\n"))
175 except error.LockUnavailable as inst:
175 except error.LockUnavailable as inst:
176 detailed_exit_code = 20
176 detailed_exit_code = 20
177 ui.error(
177 ui.error(
178 _(b"abort: could not lock %s: %s\n")
178 _(b"abort: could not lock %s: %s\n")
179 % (
179 % (
180 inst.desc or stringutil.forcebytestr(inst.filename),
180 inst.desc or stringutil.forcebytestr(inst.filename),
181 encoding.strtolocal(inst.strerror),
181 encoding.strtolocal(inst.strerror),
182 )
182 )
183 )
183 )
184 except error.RepoError as inst:
184 except error.RepoError as inst:
185 ui.error(_(b"abort: %s\n") % inst)
185 ui.error(_(b"abort: %s\n") % inst)
186 if inst.hint:
186 if inst.hint:
187 ui.error(_(b"(%s)\n") % inst.hint)
187 ui.error(_(b"(%s)\n") % inst.hint)
188 except error.ResponseError as inst:
188 except error.ResponseError as inst:
189 ui.error(_(b"abort: %s") % inst.args[0])
189 ui.error(_(b"abort: %s") % inst.args[0])
190 msg = inst.args[1]
190 msg = inst.args[1]
191 if isinstance(msg, type(u'')):
191 if isinstance(msg, type(u'')):
192 msg = pycompat.sysbytes(msg)
192 msg = pycompat.sysbytes(msg)
193 if msg is None:
193 if msg is None:
194 ui.error(b"\n")
194 ui.error(b"\n")
195 elif not isinstance(msg, bytes):
195 elif not isinstance(msg, bytes):
196 ui.error(b" %r\n" % (msg,))
196 ui.error(b" %r\n" % (msg,))
197 elif not msg:
197 elif not msg:
198 ui.error(_(b" empty string\n"))
198 ui.error(_(b" empty string\n"))
199 else:
199 else:
200 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
200 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
201 except error.CensoredNodeError as inst:
201 except error.CensoredNodeError as inst:
202 ui.error(_(b"abort: file censored %s\n") % inst)
202 ui.error(_(b"abort: file censored %s\n") % inst)
203 except error.StorageError as inst:
203 except error.StorageError as inst:
204 ui.error(_(b"abort: %s\n") % inst)
204 ui.error(_(b"abort: %s\n") % inst)
205 if inst.hint:
205 if inst.hint:
206 ui.error(_(b"(%s)\n") % inst.hint)
206 ui.error(_(b"(%s)\n") % inst.hint)
207 detailed_exit_code = 50
207 detailed_exit_code = 50
208 except error.InterventionRequired as inst:
208 except error.InterventionRequired as inst:
209 ui.error(b"%s\n" % inst)
209 ui.error(b"%s\n" % inst)
210 if inst.hint:
210 if inst.hint:
211 ui.error(_(b"(%s)\n") % inst.hint)
211 ui.error(_(b"(%s)\n") % inst.hint)
212 detailed_exit_code = 240
212 detailed_exit_code = 240
213 coarse_exit_code = 1
213 coarse_exit_code = 1
214 except error.WdirUnsupported:
214 except error.WdirUnsupported:
215 ui.error(_(b"abort: working directory revision cannot be specified\n"))
215 ui.error(_(b"abort: working directory revision cannot be specified\n"))
216 except error.Abort as inst:
216 except error.Abort as inst:
217 if isinstance(inst, (error.InputError, error.ParseError)):
217 if isinstance(inst, (error.InputError, error.ParseError)):
218 detailed_exit_code = 10
218 detailed_exit_code = 10
219 elif isinstance(inst, error.StateError):
219 elif isinstance(inst, error.StateError):
220 detailed_exit_code = 20
220 detailed_exit_code = 20
221 elif isinstance(inst, error.ConfigError):
221 elif isinstance(inst, error.ConfigError):
222 detailed_exit_code = 30
222 detailed_exit_code = 30
223 elif isinstance(inst, error.HookAbort):
223 elif isinstance(inst, error.HookAbort):
224 detailed_exit_code = 40
224 detailed_exit_code = 40
225 elif isinstance(inst, error.OutOfBandError):
225 elif isinstance(inst, error.RemoteError):
226 detailed_exit_code = 100
226 detailed_exit_code = 100
227 elif isinstance(inst, error.SecurityError):
227 elif isinstance(inst, error.SecurityError):
228 detailed_exit_code = 150
228 detailed_exit_code = 150
229 elif isinstance(inst, error.CanceledError):
229 elif isinstance(inst, error.CanceledError):
230 detailed_exit_code = 250
230 detailed_exit_code = 250
231 ui.error(inst.format())
231 ui.error(inst.format())
232 except error.WorkerError as inst:
232 except error.WorkerError as inst:
233 # Don't print a message -- the worker already should have
233 # Don't print a message -- the worker already should have
234 return inst.status_code
234 return inst.status_code
235 except ImportError as inst:
235 except ImportError as inst:
236 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
236 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
237 m = stringutil.forcebytestr(inst).split()[-1]
237 m = stringutil.forcebytestr(inst).split()[-1]
238 if m in b"mpatch bdiff".split():
238 if m in b"mpatch bdiff".split():
239 ui.error(_(b"(did you forget to compile extensions?)\n"))
239 ui.error(_(b"(did you forget to compile extensions?)\n"))
240 elif m in b"zlib".split():
240 elif m in b"zlib".split():
241 ui.error(_(b"(is your Python install correct?)\n"))
241 ui.error(_(b"(is your Python install correct?)\n"))
242 except util.urlerr.httperror as inst:
242 except util.urlerr.httperror as inst:
243 detailed_exit_code = 100
243 detailed_exit_code = 100
244 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
244 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
245 except util.urlerr.urlerror as inst:
245 except util.urlerr.urlerror as inst:
246 detailed_exit_code = 100
246 detailed_exit_code = 100
247 try: # usually it is in the form (errno, strerror)
247 try: # usually it is in the form (errno, strerror)
248 reason = inst.reason.args[1]
248 reason = inst.reason.args[1]
249 except (AttributeError, IndexError):
249 except (AttributeError, IndexError):
250 # it might be anything, for example a string
250 # it might be anything, for example a string
251 reason = inst.reason
251 reason = inst.reason
252 if isinstance(reason, pycompat.unicode):
252 if isinstance(reason, pycompat.unicode):
253 # SSLError of Python 2.7.9 contains a unicode
253 # SSLError of Python 2.7.9 contains a unicode
254 reason = encoding.unitolocal(reason)
254 reason = encoding.unitolocal(reason)
255 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
255 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
256 except (IOError, OSError) as inst:
256 except (IOError, OSError) as inst:
257 if (
257 if (
258 util.safehasattr(inst, b"args")
258 util.safehasattr(inst, b"args")
259 and inst.args
259 and inst.args
260 and inst.args[0] == errno.EPIPE
260 and inst.args[0] == errno.EPIPE
261 ):
261 ):
262 pass
262 pass
263 elif getattr(inst, "strerror", None): # common IOError or OSError
263 elif getattr(inst, "strerror", None): # common IOError or OSError
264 if getattr(inst, "filename", None) is not None:
264 if getattr(inst, "filename", None) is not None:
265 ui.error(
265 ui.error(
266 _(b"abort: %s: '%s'\n")
266 _(b"abort: %s: '%s'\n")
267 % (
267 % (
268 encoding.strtolocal(inst.strerror),
268 encoding.strtolocal(inst.strerror),
269 stringutil.forcebytestr(inst.filename),
269 stringutil.forcebytestr(inst.filename),
270 )
270 )
271 )
271 )
272 else:
272 else:
273 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
273 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
274 else: # suspicious IOError
274 else: # suspicious IOError
275 raise
275 raise
276 except MemoryError:
276 except MemoryError:
277 ui.error(_(b"abort: out of memory\n"))
277 ui.error(_(b"abort: out of memory\n"))
278 except SystemExit as inst:
278 except SystemExit as inst:
279 # Commands shouldn't sys.exit directly, but give a return code.
279 # Commands shouldn't sys.exit directly, but give a return code.
280 # Just in case catch this and and pass exit code to caller.
280 # Just in case catch this and and pass exit code to caller.
281 detailed_exit_code = 254
281 detailed_exit_code = 254
282 coarse_exit_code = inst.code
282 coarse_exit_code = inst.code
283
283
284 if ui.configbool(b'ui', b'detailed-exit-code'):
284 if ui.configbool(b'ui', b'detailed-exit-code'):
285 return detailed_exit_code
285 return detailed_exit_code
286 else:
286 else:
287 return coarse_exit_code
287 return coarse_exit_code
288
288
289
289
290 def checknewlabel(repo, lbl, kind):
290 def checknewlabel(repo, lbl, kind):
291 # Do not use the "kind" parameter in ui output.
291 # Do not use the "kind" parameter in ui output.
292 # It makes strings difficult to translate.
292 # It makes strings difficult to translate.
293 if lbl in [b'tip', b'.', b'null']:
293 if lbl in [b'tip', b'.', b'null']:
294 raise error.InputError(_(b"the name '%s' is reserved") % lbl)
294 raise error.InputError(_(b"the name '%s' is reserved") % lbl)
295 for c in (b':', b'\0', b'\n', b'\r'):
295 for c in (b':', b'\0', b'\n', b'\r'):
296 if c in lbl:
296 if c in lbl:
297 raise error.InputError(
297 raise error.InputError(
298 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
298 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
299 )
299 )
300 try:
300 try:
301 int(lbl)
301 int(lbl)
302 raise error.InputError(_(b"cannot use an integer as a name"))
302 raise error.InputError(_(b"cannot use an integer as a name"))
303 except ValueError:
303 except ValueError:
304 pass
304 pass
305 if lbl.strip() != lbl:
305 if lbl.strip() != lbl:
306 raise error.InputError(
306 raise error.InputError(
307 _(b"leading or trailing whitespace in name %r") % lbl
307 _(b"leading or trailing whitespace in name %r") % lbl
308 )
308 )
309
309
310
310
311 def checkfilename(f):
311 def checkfilename(f):
312 '''Check that the filename f is an acceptable filename for a tracked file'''
312 '''Check that the filename f is an acceptable filename for a tracked file'''
313 if b'\r' in f or b'\n' in f:
313 if b'\r' in f or b'\n' in f:
314 raise error.InputError(
314 raise error.InputError(
315 _(b"'\\n' and '\\r' disallowed in filenames: %r")
315 _(b"'\\n' and '\\r' disallowed in filenames: %r")
316 % pycompat.bytestr(f)
316 % pycompat.bytestr(f)
317 )
317 )
318
318
319
319
320 def checkportable(ui, f):
320 def checkportable(ui, f):
321 '''Check if filename f is portable and warn or abort depending on config'''
321 '''Check if filename f is portable and warn or abort depending on config'''
322 checkfilename(f)
322 checkfilename(f)
323 abort, warn = checkportabilityalert(ui)
323 abort, warn = checkportabilityalert(ui)
324 if abort or warn:
324 if abort or warn:
325 msg = util.checkwinfilename(f)
325 msg = util.checkwinfilename(f)
326 if msg:
326 if msg:
327 msg = b"%s: %s" % (msg, procutil.shellquote(f))
327 msg = b"%s: %s" % (msg, procutil.shellquote(f))
328 if abort:
328 if abort:
329 raise error.InputError(msg)
329 raise error.InputError(msg)
330 ui.warn(_(b"warning: %s\n") % msg)
330 ui.warn(_(b"warning: %s\n") % msg)
331
331
332
332
333 def checkportabilityalert(ui):
333 def checkportabilityalert(ui):
334 """check if the user's config requests nothing, a warning, or abort for
334 """check if the user's config requests nothing, a warning, or abort for
335 non-portable filenames"""
335 non-portable filenames"""
336 val = ui.config(b'ui', b'portablefilenames')
336 val = ui.config(b'ui', b'portablefilenames')
337 lval = val.lower()
337 lval = val.lower()
338 bval = stringutil.parsebool(val)
338 bval = stringutil.parsebool(val)
339 abort = pycompat.iswindows or lval == b'abort'
339 abort = pycompat.iswindows or lval == b'abort'
340 warn = bval or lval == b'warn'
340 warn = bval or lval == b'warn'
341 if bval is None and not (warn or abort or lval == b'ignore'):
341 if bval is None and not (warn or abort or lval == b'ignore'):
342 raise error.ConfigError(
342 raise error.ConfigError(
343 _(b"ui.portablefilenames value is invalid ('%s')") % val
343 _(b"ui.portablefilenames value is invalid ('%s')") % val
344 )
344 )
345 return abort, warn
345 return abort, warn
346
346
347
347
348 class casecollisionauditor(object):
348 class casecollisionauditor(object):
349 def __init__(self, ui, abort, dirstate):
349 def __init__(self, ui, abort, dirstate):
350 self._ui = ui
350 self._ui = ui
351 self._abort = abort
351 self._abort = abort
352 allfiles = b'\0'.join(dirstate)
352 allfiles = b'\0'.join(dirstate)
353 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
353 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
354 self._dirstate = dirstate
354 self._dirstate = dirstate
355 # The purpose of _newfiles is so that we don't complain about
355 # The purpose of _newfiles is so that we don't complain about
356 # case collisions if someone were to call this object with the
356 # case collisions if someone were to call this object with the
357 # same filename twice.
357 # same filename twice.
358 self._newfiles = set()
358 self._newfiles = set()
359
359
360 def __call__(self, f):
360 def __call__(self, f):
361 if f in self._newfiles:
361 if f in self._newfiles:
362 return
362 return
363 fl = encoding.lower(f)
363 fl = encoding.lower(f)
364 if fl in self._loweredfiles and f not in self._dirstate:
364 if fl in self._loweredfiles and f not in self._dirstate:
365 msg = _(b'possible case-folding collision for %s') % f
365 msg = _(b'possible case-folding collision for %s') % f
366 if self._abort:
366 if self._abort:
367 raise error.Abort(msg)
367 raise error.Abort(msg)
368 self._ui.warn(_(b"warning: %s\n") % msg)
368 self._ui.warn(_(b"warning: %s\n") % msg)
369 self._loweredfiles.add(fl)
369 self._loweredfiles.add(fl)
370 self._newfiles.add(f)
370 self._newfiles.add(f)
371
371
372
372
373 def filteredhash(repo, maxrev):
373 def filteredhash(repo, maxrev):
374 """build hash of filtered revisions in the current repoview.
374 """build hash of filtered revisions in the current repoview.
375
375
376 Multiple caches perform up-to-date validation by checking that the
376 Multiple caches perform up-to-date validation by checking that the
377 tiprev and tipnode stored in the cache file match the current repository.
377 tiprev and tipnode stored in the cache file match the current repository.
378 However, this is not sufficient for validating repoviews because the set
378 However, this is not sufficient for validating repoviews because the set
379 of revisions in the view may change without the repository tiprev and
379 of revisions in the view may change without the repository tiprev and
380 tipnode changing.
380 tipnode changing.
381
381
382 This function hashes all the revs filtered from the view and returns
382 This function hashes all the revs filtered from the view and returns
383 that SHA-1 digest.
383 that SHA-1 digest.
384 """
384 """
385 cl = repo.changelog
385 cl = repo.changelog
386 if not cl.filteredrevs:
386 if not cl.filteredrevs:
387 return None
387 return None
388 key = cl._filteredrevs_hashcache.get(maxrev)
388 key = cl._filteredrevs_hashcache.get(maxrev)
389 if not key:
389 if not key:
390 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
390 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
391 if revs:
391 if revs:
392 s = hashutil.sha1()
392 s = hashutil.sha1()
393 for rev in revs:
393 for rev in revs:
394 s.update(b'%d;' % rev)
394 s.update(b'%d;' % rev)
395 key = s.digest()
395 key = s.digest()
396 cl._filteredrevs_hashcache[maxrev] = key
396 cl._filteredrevs_hashcache[maxrev] = key
397 return key
397 return key
398
398
399
399
400 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
400 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
401 """yield every hg repository under path, always recursively.
401 """yield every hg repository under path, always recursively.
402 The recurse flag will only control recursion into repo working dirs"""
402 The recurse flag will only control recursion into repo working dirs"""
403
403
404 def errhandler(err):
404 def errhandler(err):
405 if err.filename == path:
405 if err.filename == path:
406 raise err
406 raise err
407
407
408 samestat = getattr(os.path, 'samestat', None)
408 samestat = getattr(os.path, 'samestat', None)
409 if followsym and samestat is not None:
409 if followsym and samestat is not None:
410
410
411 def adddir(dirlst, dirname):
411 def adddir(dirlst, dirname):
412 dirstat = os.stat(dirname)
412 dirstat = os.stat(dirname)
413 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
413 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
414 if not match:
414 if not match:
415 dirlst.append(dirstat)
415 dirlst.append(dirstat)
416 return not match
416 return not match
417
417
418 else:
418 else:
419 followsym = False
419 followsym = False
420
420
421 if (seen_dirs is None) and followsym:
421 if (seen_dirs is None) and followsym:
422 seen_dirs = []
422 seen_dirs = []
423 adddir(seen_dirs, path)
423 adddir(seen_dirs, path)
424 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
424 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
425 dirs.sort()
425 dirs.sort()
426 if b'.hg' in dirs:
426 if b'.hg' in dirs:
427 yield root # found a repository
427 yield root # found a repository
428 qroot = os.path.join(root, b'.hg', b'patches')
428 qroot = os.path.join(root, b'.hg', b'patches')
429 if os.path.isdir(os.path.join(qroot, b'.hg')):
429 if os.path.isdir(os.path.join(qroot, b'.hg')):
430 yield qroot # we have a patch queue repo here
430 yield qroot # we have a patch queue repo here
431 if recurse:
431 if recurse:
432 # avoid recursing inside the .hg directory
432 # avoid recursing inside the .hg directory
433 dirs.remove(b'.hg')
433 dirs.remove(b'.hg')
434 else:
434 else:
435 dirs[:] = [] # don't descend further
435 dirs[:] = [] # don't descend further
436 elif followsym:
436 elif followsym:
437 newdirs = []
437 newdirs = []
438 for d in dirs:
438 for d in dirs:
439 fname = os.path.join(root, d)
439 fname = os.path.join(root, d)
440 if adddir(seen_dirs, fname):
440 if adddir(seen_dirs, fname):
441 if os.path.islink(fname):
441 if os.path.islink(fname):
442 for hgname in walkrepos(fname, True, seen_dirs):
442 for hgname in walkrepos(fname, True, seen_dirs):
443 yield hgname
443 yield hgname
444 else:
444 else:
445 newdirs.append(d)
445 newdirs.append(d)
446 dirs[:] = newdirs
446 dirs[:] = newdirs
447
447
448
448
449 def binnode(ctx):
449 def binnode(ctx):
450 """Return binary node id for a given basectx"""
450 """Return binary node id for a given basectx"""
451 node = ctx.node()
451 node = ctx.node()
452 if node is None:
452 if node is None:
453 return wdirid
453 return wdirid
454 return node
454 return node
455
455
456
456
457 def intrev(ctx):
457 def intrev(ctx):
458 """Return integer for a given basectx that can be used in comparison or
458 """Return integer for a given basectx that can be used in comparison or
459 arithmetic operation"""
459 arithmetic operation"""
460 rev = ctx.rev()
460 rev = ctx.rev()
461 if rev is None:
461 if rev is None:
462 return wdirrev
462 return wdirrev
463 return rev
463 return rev
464
464
465
465
466 def formatchangeid(ctx):
466 def formatchangeid(ctx):
467 """Format changectx as '{rev}:{node|formatnode}', which is the default
467 """Format changectx as '{rev}:{node|formatnode}', which is the default
468 template provided by logcmdutil.changesettemplater"""
468 template provided by logcmdutil.changesettemplater"""
469 repo = ctx.repo()
469 repo = ctx.repo()
470 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
470 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
471
471
472
472
473 def formatrevnode(ui, rev, node):
473 def formatrevnode(ui, rev, node):
474 """Format given revision and node depending on the current verbosity"""
474 """Format given revision and node depending on the current verbosity"""
475 if ui.debugflag:
475 if ui.debugflag:
476 hexfunc = hex
476 hexfunc = hex
477 else:
477 else:
478 hexfunc = short
478 hexfunc = short
479 return b'%d:%s' % (rev, hexfunc(node))
479 return b'%d:%s' % (rev, hexfunc(node))
480
480
481
481
482 def resolvehexnodeidprefix(repo, prefix):
482 def resolvehexnodeidprefix(repo, prefix):
483 if prefix.startswith(b'x'):
483 if prefix.startswith(b'x'):
484 prefix = prefix[1:]
484 prefix = prefix[1:]
485 try:
485 try:
486 # Uses unfiltered repo because it's faster when prefix is ambiguous/
486 # Uses unfiltered repo because it's faster when prefix is ambiguous/
487 # This matches the shortesthexnodeidprefix() function below.
487 # This matches the shortesthexnodeidprefix() function below.
488 node = repo.unfiltered().changelog._partialmatch(prefix)
488 node = repo.unfiltered().changelog._partialmatch(prefix)
489 except error.AmbiguousPrefixLookupError:
489 except error.AmbiguousPrefixLookupError:
490 revset = repo.ui.config(
490 revset = repo.ui.config(
491 b'experimental', b'revisions.disambiguatewithin'
491 b'experimental', b'revisions.disambiguatewithin'
492 )
492 )
493 if revset:
493 if revset:
494 # Clear config to avoid infinite recursion
494 # Clear config to avoid infinite recursion
495 configoverrides = {
495 configoverrides = {
496 (b'experimental', b'revisions.disambiguatewithin'): None
496 (b'experimental', b'revisions.disambiguatewithin'): None
497 }
497 }
498 with repo.ui.configoverride(configoverrides):
498 with repo.ui.configoverride(configoverrides):
499 revs = repo.anyrevs([revset], user=True)
499 revs = repo.anyrevs([revset], user=True)
500 matches = []
500 matches = []
501 for rev in revs:
501 for rev in revs:
502 node = repo.changelog.node(rev)
502 node = repo.changelog.node(rev)
503 if hex(node).startswith(prefix):
503 if hex(node).startswith(prefix):
504 matches.append(node)
504 matches.append(node)
505 if len(matches) == 1:
505 if len(matches) == 1:
506 return matches[0]
506 return matches[0]
507 raise
507 raise
508 if node is None:
508 if node is None:
509 return
509 return
510 repo.changelog.rev(node) # make sure node isn't filtered
510 repo.changelog.rev(node) # make sure node isn't filtered
511 return node
511 return node
512
512
513
513
514 def mayberevnum(repo, prefix):
514 def mayberevnum(repo, prefix):
515 """Checks if the given prefix may be mistaken for a revision number"""
515 """Checks if the given prefix may be mistaken for a revision number"""
516 try:
516 try:
517 i = int(prefix)
517 i = int(prefix)
518 # if we are a pure int, then starting with zero will not be
518 # if we are a pure int, then starting with zero will not be
519 # confused as a rev; or, obviously, if the int is larger
519 # confused as a rev; or, obviously, if the int is larger
520 # than the value of the tip rev. We still need to disambiguate if
520 # than the value of the tip rev. We still need to disambiguate if
521 # prefix == '0', since that *is* a valid revnum.
521 # prefix == '0', since that *is* a valid revnum.
522 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
522 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
523 return False
523 return False
524 return True
524 return True
525 except ValueError:
525 except ValueError:
526 return False
526 return False
527
527
528
528
529 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
529 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
530 """Find the shortest unambiguous prefix that matches hexnode.
530 """Find the shortest unambiguous prefix that matches hexnode.
531
531
532 If "cache" is not None, it must be a dictionary that can be used for
532 If "cache" is not None, it must be a dictionary that can be used for
533 caching between calls to this method.
533 caching between calls to this method.
534 """
534 """
535 # _partialmatch() of filtered changelog could take O(len(repo)) time,
535 # _partialmatch() of filtered changelog could take O(len(repo)) time,
536 # which would be unacceptably slow. so we look for hash collision in
536 # which would be unacceptably slow. so we look for hash collision in
537 # unfiltered space, which means some hashes may be slightly longer.
537 # unfiltered space, which means some hashes may be slightly longer.
538
538
539 minlength = max(minlength, 1)
539 minlength = max(minlength, 1)
540
540
541 def disambiguate(prefix):
541 def disambiguate(prefix):
542 """Disambiguate against revnums."""
542 """Disambiguate against revnums."""
543 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
543 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
544 if mayberevnum(repo, prefix):
544 if mayberevnum(repo, prefix):
545 return b'x' + prefix
545 return b'x' + prefix
546 else:
546 else:
547 return prefix
547 return prefix
548
548
549 hexnode = hex(node)
549 hexnode = hex(node)
550 for length in range(len(prefix), len(hexnode) + 1):
550 for length in range(len(prefix), len(hexnode) + 1):
551 prefix = hexnode[:length]
551 prefix = hexnode[:length]
552 if not mayberevnum(repo, prefix):
552 if not mayberevnum(repo, prefix):
553 return prefix
553 return prefix
554
554
555 cl = repo.unfiltered().changelog
555 cl = repo.unfiltered().changelog
556 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
556 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
557 if revset:
557 if revset:
558 revs = None
558 revs = None
559 if cache is not None:
559 if cache is not None:
560 revs = cache.get(b'disambiguationrevset')
560 revs = cache.get(b'disambiguationrevset')
561 if revs is None:
561 if revs is None:
562 revs = repo.anyrevs([revset], user=True)
562 revs = repo.anyrevs([revset], user=True)
563 if cache is not None:
563 if cache is not None:
564 cache[b'disambiguationrevset'] = revs
564 cache[b'disambiguationrevset'] = revs
565 if cl.rev(node) in revs:
565 if cl.rev(node) in revs:
566 hexnode = hex(node)
566 hexnode = hex(node)
567 nodetree = None
567 nodetree = None
568 if cache is not None:
568 if cache is not None:
569 nodetree = cache.get(b'disambiguationnodetree')
569 nodetree = cache.get(b'disambiguationnodetree')
570 if not nodetree:
570 if not nodetree:
571 if util.safehasattr(parsers, 'nodetree'):
571 if util.safehasattr(parsers, 'nodetree'):
572 # The CExt is the only implementation to provide a nodetree
572 # The CExt is the only implementation to provide a nodetree
573 # class so far.
573 # class so far.
574 index = cl.index
574 index = cl.index
575 if util.safehasattr(index, 'get_cindex'):
575 if util.safehasattr(index, 'get_cindex'):
576 # the rust wrapped need to give access to its internal index
576 # the rust wrapped need to give access to its internal index
577 index = index.get_cindex()
577 index = index.get_cindex()
578 nodetree = parsers.nodetree(index, len(revs))
578 nodetree = parsers.nodetree(index, len(revs))
579 for r in revs:
579 for r in revs:
580 nodetree.insert(r)
580 nodetree.insert(r)
581 if cache is not None:
581 if cache is not None:
582 cache[b'disambiguationnodetree'] = nodetree
582 cache[b'disambiguationnodetree'] = nodetree
583 if nodetree is not None:
583 if nodetree is not None:
584 length = max(nodetree.shortest(node), minlength)
584 length = max(nodetree.shortest(node), minlength)
585 prefix = hexnode[:length]
585 prefix = hexnode[:length]
586 return disambiguate(prefix)
586 return disambiguate(prefix)
587 for length in range(minlength, len(hexnode) + 1):
587 for length in range(minlength, len(hexnode) + 1):
588 matches = []
588 matches = []
589 prefix = hexnode[:length]
589 prefix = hexnode[:length]
590 for rev in revs:
590 for rev in revs:
591 otherhexnode = repo[rev].hex()
591 otherhexnode = repo[rev].hex()
592 if prefix == otherhexnode[:length]:
592 if prefix == otherhexnode[:length]:
593 matches.append(otherhexnode)
593 matches.append(otherhexnode)
594 if len(matches) == 1:
594 if len(matches) == 1:
595 return disambiguate(prefix)
595 return disambiguate(prefix)
596
596
597 try:
597 try:
598 return disambiguate(cl.shortest(node, minlength))
598 return disambiguate(cl.shortest(node, minlength))
599 except error.LookupError:
599 except error.LookupError:
600 raise error.RepoLookupError()
600 raise error.RepoLookupError()
601
601
602
602
603 def isrevsymbol(repo, symbol):
603 def isrevsymbol(repo, symbol):
604 """Checks if a symbol exists in the repo.
604 """Checks if a symbol exists in the repo.
605
605
606 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
606 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
607 symbol is an ambiguous nodeid prefix.
607 symbol is an ambiguous nodeid prefix.
608 """
608 """
609 try:
609 try:
610 revsymbol(repo, symbol)
610 revsymbol(repo, symbol)
611 return True
611 return True
612 except error.RepoLookupError:
612 except error.RepoLookupError:
613 return False
613 return False
614
614
615
615
616 def revsymbol(repo, symbol):
616 def revsymbol(repo, symbol):
617 """Returns a context given a single revision symbol (as string).
617 """Returns a context given a single revision symbol (as string).
618
618
619 This is similar to revsingle(), but accepts only a single revision symbol,
619 This is similar to revsingle(), but accepts only a single revision symbol,
620 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
620 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
621 not "max(public())".
621 not "max(public())".
622 """
622 """
623 if not isinstance(symbol, bytes):
623 if not isinstance(symbol, bytes):
624 msg = (
624 msg = (
625 b"symbol (%s of type %s) was not a string, did you mean "
625 b"symbol (%s of type %s) was not a string, did you mean "
626 b"repo[symbol]?" % (symbol, type(symbol))
626 b"repo[symbol]?" % (symbol, type(symbol))
627 )
627 )
628 raise error.ProgrammingError(msg)
628 raise error.ProgrammingError(msg)
629 try:
629 try:
630 if symbol in (b'.', b'tip', b'null'):
630 if symbol in (b'.', b'tip', b'null'):
631 return repo[symbol]
631 return repo[symbol]
632
632
633 try:
633 try:
634 r = int(symbol)
634 r = int(symbol)
635 if b'%d' % r != symbol:
635 if b'%d' % r != symbol:
636 raise ValueError
636 raise ValueError
637 l = len(repo.changelog)
637 l = len(repo.changelog)
638 if r < 0:
638 if r < 0:
639 r += l
639 r += l
640 if r < 0 or r >= l and r != wdirrev:
640 if r < 0 or r >= l and r != wdirrev:
641 raise ValueError
641 raise ValueError
642 return repo[r]
642 return repo[r]
643 except error.FilteredIndexError:
643 except error.FilteredIndexError:
644 raise
644 raise
645 except (ValueError, OverflowError, IndexError):
645 except (ValueError, OverflowError, IndexError):
646 pass
646 pass
647
647
648 if len(symbol) == 40:
648 if len(symbol) == 40:
649 try:
649 try:
650 node = bin(symbol)
650 node = bin(symbol)
651 rev = repo.changelog.rev(node)
651 rev = repo.changelog.rev(node)
652 return repo[rev]
652 return repo[rev]
653 except error.FilteredLookupError:
653 except error.FilteredLookupError:
654 raise
654 raise
655 except (TypeError, LookupError):
655 except (TypeError, LookupError):
656 pass
656 pass
657
657
658 # look up bookmarks through the name interface
658 # look up bookmarks through the name interface
659 try:
659 try:
660 node = repo.names.singlenode(repo, symbol)
660 node = repo.names.singlenode(repo, symbol)
661 rev = repo.changelog.rev(node)
661 rev = repo.changelog.rev(node)
662 return repo[rev]
662 return repo[rev]
663 except KeyError:
663 except KeyError:
664 pass
664 pass
665
665
666 node = resolvehexnodeidprefix(repo, symbol)
666 node = resolvehexnodeidprefix(repo, symbol)
667 if node is not None:
667 if node is not None:
668 rev = repo.changelog.rev(node)
668 rev = repo.changelog.rev(node)
669 return repo[rev]
669 return repo[rev]
670
670
671 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
671 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
672
672
673 except error.WdirUnsupported:
673 except error.WdirUnsupported:
674 return repo[None]
674 return repo[None]
675 except (
675 except (
676 error.FilteredIndexError,
676 error.FilteredIndexError,
677 error.FilteredLookupError,
677 error.FilteredLookupError,
678 error.FilteredRepoLookupError,
678 error.FilteredRepoLookupError,
679 ):
679 ):
680 raise _filterederror(repo, symbol)
680 raise _filterederror(repo, symbol)
681
681
682
682
683 def _filterederror(repo, changeid):
683 def _filterederror(repo, changeid):
684 """build an exception to be raised about a filtered changeid
684 """build an exception to be raised about a filtered changeid
685
685
686 This is extracted in a function to help extensions (eg: evolve) to
686 This is extracted in a function to help extensions (eg: evolve) to
687 experiment with various message variants."""
687 experiment with various message variants."""
688 if repo.filtername.startswith(b'visible'):
688 if repo.filtername.startswith(b'visible'):
689
689
690 # Check if the changeset is obsolete
690 # Check if the changeset is obsolete
691 unfilteredrepo = repo.unfiltered()
691 unfilteredrepo = repo.unfiltered()
692 ctx = revsymbol(unfilteredrepo, changeid)
692 ctx = revsymbol(unfilteredrepo, changeid)
693
693
694 # If the changeset is obsolete, enrich the message with the reason
694 # If the changeset is obsolete, enrich the message with the reason
695 # that made this changeset not visible
695 # that made this changeset not visible
696 if ctx.obsolete():
696 if ctx.obsolete():
697 msg = obsutil._getfilteredreason(repo, changeid, ctx)
697 msg = obsutil._getfilteredreason(repo, changeid, ctx)
698 else:
698 else:
699 msg = _(b"hidden revision '%s'") % changeid
699 msg = _(b"hidden revision '%s'") % changeid
700
700
701 hint = _(b'use --hidden to access hidden revisions')
701 hint = _(b'use --hidden to access hidden revisions')
702
702
703 return error.FilteredRepoLookupError(msg, hint=hint)
703 return error.FilteredRepoLookupError(msg, hint=hint)
704 msg = _(b"filtered revision '%s' (not in '%s' subset)")
704 msg = _(b"filtered revision '%s' (not in '%s' subset)")
705 msg %= (changeid, repo.filtername)
705 msg %= (changeid, repo.filtername)
706 return error.FilteredRepoLookupError(msg)
706 return error.FilteredRepoLookupError(msg)
707
707
708
708
709 def revsingle(repo, revspec, default=b'.', localalias=None):
709 def revsingle(repo, revspec, default=b'.', localalias=None):
710 if not revspec and revspec != 0:
710 if not revspec and revspec != 0:
711 return repo[default]
711 return repo[default]
712
712
713 l = revrange(repo, [revspec], localalias=localalias)
713 l = revrange(repo, [revspec], localalias=localalias)
714 if not l:
714 if not l:
715 raise error.Abort(_(b'empty revision set'))
715 raise error.Abort(_(b'empty revision set'))
716 return repo[l.last()]
716 return repo[l.last()]
717
717
718
718
719 def _pairspec(revspec):
719 def _pairspec(revspec):
720 tree = revsetlang.parse(revspec)
720 tree = revsetlang.parse(revspec)
721 return tree and tree[0] in (
721 return tree and tree[0] in (
722 b'range',
722 b'range',
723 b'rangepre',
723 b'rangepre',
724 b'rangepost',
724 b'rangepost',
725 b'rangeall',
725 b'rangeall',
726 )
726 )
727
727
728
728
729 def revpair(repo, revs):
729 def revpair(repo, revs):
730 if not revs:
730 if not revs:
731 return repo[b'.'], repo[None]
731 return repo[b'.'], repo[None]
732
732
733 l = revrange(repo, revs)
733 l = revrange(repo, revs)
734
734
735 if not l:
735 if not l:
736 raise error.Abort(_(b'empty revision range'))
736 raise error.Abort(_(b'empty revision range'))
737
737
738 first = l.first()
738 first = l.first()
739 second = l.last()
739 second = l.last()
740
740
741 if (
741 if (
742 first == second
742 first == second
743 and len(revs) >= 2
743 and len(revs) >= 2
744 and not all(revrange(repo, [r]) for r in revs)
744 and not all(revrange(repo, [r]) for r in revs)
745 ):
745 ):
746 raise error.Abort(_(b'empty revision on one side of range'))
746 raise error.Abort(_(b'empty revision on one side of range'))
747
747
748 # if top-level is range expression, the result must always be a pair
748 # if top-level is range expression, the result must always be a pair
749 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
749 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
750 return repo[first], repo[None]
750 return repo[first], repo[None]
751
751
752 return repo[first], repo[second]
752 return repo[first], repo[second]
753
753
754
754
755 def revrange(repo, specs, localalias=None):
755 def revrange(repo, specs, localalias=None):
756 """Execute 1 to many revsets and return the union.
756 """Execute 1 to many revsets and return the union.
757
757
758 This is the preferred mechanism for executing revsets using user-specified
758 This is the preferred mechanism for executing revsets using user-specified
759 config options, such as revset aliases.
759 config options, such as revset aliases.
760
760
761 The revsets specified by ``specs`` will be executed via a chained ``OR``
761 The revsets specified by ``specs`` will be executed via a chained ``OR``
762 expression. If ``specs`` is empty, an empty result is returned.
762 expression. If ``specs`` is empty, an empty result is returned.
763
763
764 ``specs`` can contain integers, in which case they are assumed to be
764 ``specs`` can contain integers, in which case they are assumed to be
765 revision numbers.
765 revision numbers.
766
766
767 It is assumed the revsets are already formatted. If you have arguments
767 It is assumed the revsets are already formatted. If you have arguments
768 that need to be expanded in the revset, call ``revsetlang.formatspec()``
768 that need to be expanded in the revset, call ``revsetlang.formatspec()``
769 and pass the result as an element of ``specs``.
769 and pass the result as an element of ``specs``.
770
770
771 Specifying a single revset is allowed.
771 Specifying a single revset is allowed.
772
772
773 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
773 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
774 integer revisions.
774 integer revisions.
775 """
775 """
776 allspecs = []
776 allspecs = []
777 for spec in specs:
777 for spec in specs:
778 if isinstance(spec, int):
778 if isinstance(spec, int):
779 spec = revsetlang.formatspec(b'%d', spec)
779 spec = revsetlang.formatspec(b'%d', spec)
780 allspecs.append(spec)
780 allspecs.append(spec)
781 return repo.anyrevs(allspecs, user=True, localalias=localalias)
781 return repo.anyrevs(allspecs, user=True, localalias=localalias)
782
782
783
783
784 def increasingwindows(windowsize=8, sizelimit=512):
784 def increasingwindows(windowsize=8, sizelimit=512):
785 while True:
785 while True:
786 yield windowsize
786 yield windowsize
787 if windowsize < sizelimit:
787 if windowsize < sizelimit:
788 windowsize *= 2
788 windowsize *= 2
789
789
790
790
791 def walkchangerevs(repo, revs, makefilematcher, prepare):
791 def walkchangerevs(repo, revs, makefilematcher, prepare):
792 """Iterate over files and the revs in a "windowed" way.
792 """Iterate over files and the revs in a "windowed" way.
793
793
794 Callers most commonly need to iterate backwards over the history
794 Callers most commonly need to iterate backwards over the history
795 in which they are interested. Doing so has awful (quadratic-looking)
795 in which they are interested. Doing so has awful (quadratic-looking)
796 performance, so we use iterators in a "windowed" way.
796 performance, so we use iterators in a "windowed" way.
797
797
798 We walk a window of revisions in the desired order. Within the
798 We walk a window of revisions in the desired order. Within the
799 window, we first walk forwards to gather data, then in the desired
799 window, we first walk forwards to gather data, then in the desired
800 order (usually backwards) to display it.
800 order (usually backwards) to display it.
801
801
802 This function returns an iterator yielding contexts. Before
802 This function returns an iterator yielding contexts. Before
803 yielding each context, the iterator will first call the prepare
803 yielding each context, the iterator will first call the prepare
804 function on each context in the window in forward order."""
804 function on each context in the window in forward order."""
805
805
806 if not revs:
806 if not revs:
807 return []
807 return []
808 change = repo.__getitem__
808 change = repo.__getitem__
809
809
810 def iterate():
810 def iterate():
811 it = iter(revs)
811 it = iter(revs)
812 stopiteration = False
812 stopiteration = False
813 for windowsize in increasingwindows():
813 for windowsize in increasingwindows():
814 nrevs = []
814 nrevs = []
815 for i in pycompat.xrange(windowsize):
815 for i in pycompat.xrange(windowsize):
816 rev = next(it, None)
816 rev = next(it, None)
817 if rev is None:
817 if rev is None:
818 stopiteration = True
818 stopiteration = True
819 break
819 break
820 nrevs.append(rev)
820 nrevs.append(rev)
821 for rev in sorted(nrevs):
821 for rev in sorted(nrevs):
822 ctx = change(rev)
822 ctx = change(rev)
823 prepare(ctx, makefilematcher(ctx))
823 prepare(ctx, makefilematcher(ctx))
824 for rev in nrevs:
824 for rev in nrevs:
825 yield change(rev)
825 yield change(rev)
826
826
827 if stopiteration:
827 if stopiteration:
828 break
828 break
829
829
830 return iterate()
830 return iterate()
831
831
832
832
833 def meaningfulparents(repo, ctx):
833 def meaningfulparents(repo, ctx):
834 """Return list of meaningful (or all if debug) parentrevs for rev.
834 """Return list of meaningful (or all if debug) parentrevs for rev.
835
835
836 For merges (two non-nullrev revisions) both parents are meaningful.
836 For merges (two non-nullrev revisions) both parents are meaningful.
837 Otherwise the first parent revision is considered meaningful if it
837 Otherwise the first parent revision is considered meaningful if it
838 is not the preceding revision.
838 is not the preceding revision.
839 """
839 """
840 parents = ctx.parents()
840 parents = ctx.parents()
841 if len(parents) > 1:
841 if len(parents) > 1:
842 return parents
842 return parents
843 if repo.ui.debugflag:
843 if repo.ui.debugflag:
844 return [parents[0], repo[nullrev]]
844 return [parents[0], repo[nullrev]]
845 if parents[0].rev() >= intrev(ctx) - 1:
845 if parents[0].rev() >= intrev(ctx) - 1:
846 return []
846 return []
847 return parents
847 return parents
848
848
849
849
850 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
850 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
851 """Return a function that produced paths for presenting to the user.
851 """Return a function that produced paths for presenting to the user.
852
852
853 The returned function takes a repo-relative path and produces a path
853 The returned function takes a repo-relative path and produces a path
854 that can be presented in the UI.
854 that can be presented in the UI.
855
855
856 Depending on the value of ui.relative-paths, either a repo-relative or
856 Depending on the value of ui.relative-paths, either a repo-relative or
857 cwd-relative path will be produced.
857 cwd-relative path will be produced.
858
858
859 legacyrelativevalue is the value to use if ui.relative-paths=legacy
859 legacyrelativevalue is the value to use if ui.relative-paths=legacy
860
860
861 If forcerelativevalue is not None, then that value will be used regardless
861 If forcerelativevalue is not None, then that value will be used regardless
862 of what ui.relative-paths is set to.
862 of what ui.relative-paths is set to.
863 """
863 """
864 if forcerelativevalue is not None:
864 if forcerelativevalue is not None:
865 relative = forcerelativevalue
865 relative = forcerelativevalue
866 else:
866 else:
867 config = repo.ui.config(b'ui', b'relative-paths')
867 config = repo.ui.config(b'ui', b'relative-paths')
868 if config == b'legacy':
868 if config == b'legacy':
869 relative = legacyrelativevalue
869 relative = legacyrelativevalue
870 else:
870 else:
871 relative = stringutil.parsebool(config)
871 relative = stringutil.parsebool(config)
872 if relative is None:
872 if relative is None:
873 raise error.ConfigError(
873 raise error.ConfigError(
874 _(b"ui.relative-paths is not a boolean ('%s')") % config
874 _(b"ui.relative-paths is not a boolean ('%s')") % config
875 )
875 )
876
876
877 if relative:
877 if relative:
878 cwd = repo.getcwd()
878 cwd = repo.getcwd()
879 if cwd != b'':
879 if cwd != b'':
880 # this branch would work even if cwd == b'' (ie cwd = repo
880 # this branch would work even if cwd == b'' (ie cwd = repo
881 # root), but its generality makes the returned function slower
881 # root), but its generality makes the returned function slower
882 pathto = repo.pathto
882 pathto = repo.pathto
883 return lambda f: pathto(f, cwd)
883 return lambda f: pathto(f, cwd)
884 if repo.ui.configbool(b'ui', b'slash'):
884 if repo.ui.configbool(b'ui', b'slash'):
885 return lambda f: f
885 return lambda f: f
886 else:
886 else:
887 return util.localpath
887 return util.localpath
888
888
889
889
890 def subdiruipathfn(subpath, uipathfn):
890 def subdiruipathfn(subpath, uipathfn):
891 '''Create a new uipathfn that treats the file as relative to subpath.'''
891 '''Create a new uipathfn that treats the file as relative to subpath.'''
892 return lambda f: uipathfn(posixpath.join(subpath, f))
892 return lambda f: uipathfn(posixpath.join(subpath, f))
893
893
894
894
895 def anypats(pats, opts):
895 def anypats(pats, opts):
896 """Checks if any patterns, including --include and --exclude were given.
896 """Checks if any patterns, including --include and --exclude were given.
897
897
898 Some commands (e.g. addremove) use this condition for deciding whether to
898 Some commands (e.g. addremove) use this condition for deciding whether to
899 print absolute or relative paths.
899 print absolute or relative paths.
900 """
900 """
901 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
901 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
902
902
903
903
904 def expandpats(pats):
904 def expandpats(pats):
905 """Expand bare globs when running on windows.
905 """Expand bare globs when running on windows.
906 On posix we assume it already has already been done by sh."""
906 On posix we assume it already has already been done by sh."""
907 if not util.expandglobs:
907 if not util.expandglobs:
908 return list(pats)
908 return list(pats)
909 ret = []
909 ret = []
910 for kindpat in pats:
910 for kindpat in pats:
911 kind, pat = matchmod._patsplit(kindpat, None)
911 kind, pat = matchmod._patsplit(kindpat, None)
912 if kind is None:
912 if kind is None:
913 try:
913 try:
914 globbed = glob.glob(pat)
914 globbed = glob.glob(pat)
915 except re.error:
915 except re.error:
916 globbed = [pat]
916 globbed = [pat]
917 if globbed:
917 if globbed:
918 ret.extend(globbed)
918 ret.extend(globbed)
919 continue
919 continue
920 ret.append(kindpat)
920 ret.append(kindpat)
921 return ret
921 return ret
922
922
923
923
924 def matchandpats(
924 def matchandpats(
925 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
925 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
926 ):
926 ):
927 """Return a matcher and the patterns that were used.
927 """Return a matcher and the patterns that were used.
928 The matcher will warn about bad matches, unless an alternate badfn callback
928 The matcher will warn about bad matches, unless an alternate badfn callback
929 is provided."""
929 is provided."""
930 if opts is None:
930 if opts is None:
931 opts = {}
931 opts = {}
932 if not globbed and default == b'relpath':
932 if not globbed and default == b'relpath':
933 pats = expandpats(pats or [])
933 pats = expandpats(pats or [])
934
934
935 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
935 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
936
936
937 def bad(f, msg):
937 def bad(f, msg):
938 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
938 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
939
939
940 if badfn is None:
940 if badfn is None:
941 badfn = bad
941 badfn = bad
942
942
943 m = ctx.match(
943 m = ctx.match(
944 pats,
944 pats,
945 opts.get(b'include'),
945 opts.get(b'include'),
946 opts.get(b'exclude'),
946 opts.get(b'exclude'),
947 default,
947 default,
948 listsubrepos=opts.get(b'subrepos'),
948 listsubrepos=opts.get(b'subrepos'),
949 badfn=badfn,
949 badfn=badfn,
950 )
950 )
951
951
952 if m.always():
952 if m.always():
953 pats = []
953 pats = []
954 return m, pats
954 return m, pats
955
955
956
956
957 def match(
957 def match(
958 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
958 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
959 ):
959 ):
960 '''Return a matcher that will warn about bad matches.'''
960 '''Return a matcher that will warn about bad matches.'''
961 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
961 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
962
962
963
963
964 def matchall(repo):
964 def matchall(repo):
965 '''Return a matcher that will efficiently match everything.'''
965 '''Return a matcher that will efficiently match everything.'''
966 return matchmod.always()
966 return matchmod.always()
967
967
968
968
969 def matchfiles(repo, files, badfn=None):
969 def matchfiles(repo, files, badfn=None):
970 '''Return a matcher that will efficiently match exactly these files.'''
970 '''Return a matcher that will efficiently match exactly these files.'''
971 return matchmod.exact(files, badfn=badfn)
971 return matchmod.exact(files, badfn=badfn)
972
972
973
973
974 def parsefollowlinespattern(repo, rev, pat, msg):
974 def parsefollowlinespattern(repo, rev, pat, msg):
975 """Return a file name from `pat` pattern suitable for usage in followlines
975 """Return a file name from `pat` pattern suitable for usage in followlines
976 logic.
976 logic.
977 """
977 """
978 if not matchmod.patkind(pat):
978 if not matchmod.patkind(pat):
979 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
979 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
980 else:
980 else:
981 ctx = repo[rev]
981 ctx = repo[rev]
982 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
982 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
983 files = [f for f in ctx if m(f)]
983 files = [f for f in ctx if m(f)]
984 if len(files) != 1:
984 if len(files) != 1:
985 raise error.ParseError(msg)
985 raise error.ParseError(msg)
986 return files[0]
986 return files[0]
987
987
988
988
989 def getorigvfs(ui, repo):
989 def getorigvfs(ui, repo):
990 """return a vfs suitable to save 'orig' file
990 """return a vfs suitable to save 'orig' file
991
991
992 return None if no special directory is configured"""
992 return None if no special directory is configured"""
993 origbackuppath = ui.config(b'ui', b'origbackuppath')
993 origbackuppath = ui.config(b'ui', b'origbackuppath')
994 if not origbackuppath:
994 if not origbackuppath:
995 return None
995 return None
996 return vfs.vfs(repo.wvfs.join(origbackuppath))
996 return vfs.vfs(repo.wvfs.join(origbackuppath))
997
997
998
998
999 def backuppath(ui, repo, filepath):
999 def backuppath(ui, repo, filepath):
1000 """customize where working copy backup files (.orig files) are created
1000 """customize where working copy backup files (.orig files) are created
1001
1001
1002 Fetch user defined path from config file: [ui] origbackuppath = <path>
1002 Fetch user defined path from config file: [ui] origbackuppath = <path>
1003 Fall back to default (filepath with .orig suffix) if not specified
1003 Fall back to default (filepath with .orig suffix) if not specified
1004
1004
1005 filepath is repo-relative
1005 filepath is repo-relative
1006
1006
1007 Returns an absolute path
1007 Returns an absolute path
1008 """
1008 """
1009 origvfs = getorigvfs(ui, repo)
1009 origvfs = getorigvfs(ui, repo)
1010 if origvfs is None:
1010 if origvfs is None:
1011 return repo.wjoin(filepath + b".orig")
1011 return repo.wjoin(filepath + b".orig")
1012
1012
1013 origbackupdir = origvfs.dirname(filepath)
1013 origbackupdir = origvfs.dirname(filepath)
1014 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
1014 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
1015 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
1015 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
1016
1016
1017 # Remove any files that conflict with the backup file's path
1017 # Remove any files that conflict with the backup file's path
1018 for f in reversed(list(pathutil.finddirs(filepath))):
1018 for f in reversed(list(pathutil.finddirs(filepath))):
1019 if origvfs.isfileorlink(f):
1019 if origvfs.isfileorlink(f):
1020 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
1020 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
1021 origvfs.unlink(f)
1021 origvfs.unlink(f)
1022 break
1022 break
1023
1023
1024 origvfs.makedirs(origbackupdir)
1024 origvfs.makedirs(origbackupdir)
1025
1025
1026 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
1026 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
1027 ui.note(
1027 ui.note(
1028 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
1028 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
1029 )
1029 )
1030 origvfs.rmtree(filepath, forcibly=True)
1030 origvfs.rmtree(filepath, forcibly=True)
1031
1031
1032 return origvfs.join(filepath)
1032 return origvfs.join(filepath)
1033
1033
1034
1034
1035 class _containsnode(object):
1035 class _containsnode(object):
1036 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1036 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1037
1037
1038 def __init__(self, repo, revcontainer):
1038 def __init__(self, repo, revcontainer):
1039 self._torev = repo.changelog.rev
1039 self._torev = repo.changelog.rev
1040 self._revcontains = revcontainer.__contains__
1040 self._revcontains = revcontainer.__contains__
1041
1041
1042 def __contains__(self, node):
1042 def __contains__(self, node):
1043 return self._revcontains(self._torev(node))
1043 return self._revcontains(self._torev(node))
1044
1044
1045
1045
1046 def cleanupnodes(
1046 def cleanupnodes(
1047 repo,
1047 repo,
1048 replacements,
1048 replacements,
1049 operation,
1049 operation,
1050 moves=None,
1050 moves=None,
1051 metadata=None,
1051 metadata=None,
1052 fixphase=False,
1052 fixphase=False,
1053 targetphase=None,
1053 targetphase=None,
1054 backup=True,
1054 backup=True,
1055 ):
1055 ):
1056 """do common cleanups when old nodes are replaced by new nodes
1056 """do common cleanups when old nodes are replaced by new nodes
1057
1057
1058 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1058 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1059 (we might also want to move working directory parent in the future)
1059 (we might also want to move working directory parent in the future)
1060
1060
1061 By default, bookmark moves are calculated automatically from 'replacements',
1061 By default, bookmark moves are calculated automatically from 'replacements',
1062 but 'moves' can be used to override that. Also, 'moves' may include
1062 but 'moves' can be used to override that. Also, 'moves' may include
1063 additional bookmark moves that should not have associated obsmarkers.
1063 additional bookmark moves that should not have associated obsmarkers.
1064
1064
1065 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1065 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1066 have replacements. operation is a string, like "rebase".
1066 have replacements. operation is a string, like "rebase".
1067
1067
1068 metadata is dictionary containing metadata to be stored in obsmarker if
1068 metadata is dictionary containing metadata to be stored in obsmarker if
1069 obsolescence is enabled.
1069 obsolescence is enabled.
1070 """
1070 """
1071 assert fixphase or targetphase is None
1071 assert fixphase or targetphase is None
1072 if not replacements and not moves:
1072 if not replacements and not moves:
1073 return
1073 return
1074
1074
1075 # translate mapping's other forms
1075 # translate mapping's other forms
1076 if not util.safehasattr(replacements, b'items'):
1076 if not util.safehasattr(replacements, b'items'):
1077 replacements = {(n,): () for n in replacements}
1077 replacements = {(n,): () for n in replacements}
1078 else:
1078 else:
1079 # upgrading non tuple "source" to tuple ones for BC
1079 # upgrading non tuple "source" to tuple ones for BC
1080 repls = {}
1080 repls = {}
1081 for key, value in replacements.items():
1081 for key, value in replacements.items():
1082 if not isinstance(key, tuple):
1082 if not isinstance(key, tuple):
1083 key = (key,)
1083 key = (key,)
1084 repls[key] = value
1084 repls[key] = value
1085 replacements = repls
1085 replacements = repls
1086
1086
1087 # Unfiltered repo is needed since nodes in replacements might be hidden.
1087 # Unfiltered repo is needed since nodes in replacements might be hidden.
1088 unfi = repo.unfiltered()
1088 unfi = repo.unfiltered()
1089
1089
1090 # Calculate bookmark movements
1090 # Calculate bookmark movements
1091 if moves is None:
1091 if moves is None:
1092 moves = {}
1092 moves = {}
1093 for oldnodes, newnodes in replacements.items():
1093 for oldnodes, newnodes in replacements.items():
1094 for oldnode in oldnodes:
1094 for oldnode in oldnodes:
1095 if oldnode in moves:
1095 if oldnode in moves:
1096 continue
1096 continue
1097 if len(newnodes) > 1:
1097 if len(newnodes) > 1:
1098 # usually a split, take the one with biggest rev number
1098 # usually a split, take the one with biggest rev number
1099 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1099 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1100 elif len(newnodes) == 0:
1100 elif len(newnodes) == 0:
1101 # move bookmark backwards
1101 # move bookmark backwards
1102 allreplaced = []
1102 allreplaced = []
1103 for rep in replacements:
1103 for rep in replacements:
1104 allreplaced.extend(rep)
1104 allreplaced.extend(rep)
1105 roots = list(
1105 roots = list(
1106 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1106 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1107 )
1107 )
1108 if roots:
1108 if roots:
1109 newnode = roots[0].node()
1109 newnode = roots[0].node()
1110 else:
1110 else:
1111 newnode = nullid
1111 newnode = nullid
1112 else:
1112 else:
1113 newnode = newnodes[0]
1113 newnode = newnodes[0]
1114 moves[oldnode] = newnode
1114 moves[oldnode] = newnode
1115
1115
1116 allnewnodes = [n for ns in replacements.values() for n in ns]
1116 allnewnodes = [n for ns in replacements.values() for n in ns]
1117 toretract = {}
1117 toretract = {}
1118 toadvance = {}
1118 toadvance = {}
1119 if fixphase:
1119 if fixphase:
1120 precursors = {}
1120 precursors = {}
1121 for oldnodes, newnodes in replacements.items():
1121 for oldnodes, newnodes in replacements.items():
1122 for oldnode in oldnodes:
1122 for oldnode in oldnodes:
1123 for newnode in newnodes:
1123 for newnode in newnodes:
1124 precursors.setdefault(newnode, []).append(oldnode)
1124 precursors.setdefault(newnode, []).append(oldnode)
1125
1125
1126 allnewnodes.sort(key=lambda n: unfi[n].rev())
1126 allnewnodes.sort(key=lambda n: unfi[n].rev())
1127 newphases = {}
1127 newphases = {}
1128
1128
1129 def phase(ctx):
1129 def phase(ctx):
1130 return newphases.get(ctx.node(), ctx.phase())
1130 return newphases.get(ctx.node(), ctx.phase())
1131
1131
1132 for newnode in allnewnodes:
1132 for newnode in allnewnodes:
1133 ctx = unfi[newnode]
1133 ctx = unfi[newnode]
1134 parentphase = max(phase(p) for p in ctx.parents())
1134 parentphase = max(phase(p) for p in ctx.parents())
1135 if targetphase is None:
1135 if targetphase is None:
1136 oldphase = max(
1136 oldphase = max(
1137 unfi[oldnode].phase() for oldnode in precursors[newnode]
1137 unfi[oldnode].phase() for oldnode in precursors[newnode]
1138 )
1138 )
1139 newphase = max(oldphase, parentphase)
1139 newphase = max(oldphase, parentphase)
1140 else:
1140 else:
1141 newphase = max(targetphase, parentphase)
1141 newphase = max(targetphase, parentphase)
1142 newphases[newnode] = newphase
1142 newphases[newnode] = newphase
1143 if newphase > ctx.phase():
1143 if newphase > ctx.phase():
1144 toretract.setdefault(newphase, []).append(newnode)
1144 toretract.setdefault(newphase, []).append(newnode)
1145 elif newphase < ctx.phase():
1145 elif newphase < ctx.phase():
1146 toadvance.setdefault(newphase, []).append(newnode)
1146 toadvance.setdefault(newphase, []).append(newnode)
1147
1147
1148 with repo.transaction(b'cleanup') as tr:
1148 with repo.transaction(b'cleanup') as tr:
1149 # Move bookmarks
1149 # Move bookmarks
1150 bmarks = repo._bookmarks
1150 bmarks = repo._bookmarks
1151 bmarkchanges = []
1151 bmarkchanges = []
1152 for oldnode, newnode in moves.items():
1152 for oldnode, newnode in moves.items():
1153 oldbmarks = repo.nodebookmarks(oldnode)
1153 oldbmarks = repo.nodebookmarks(oldnode)
1154 if not oldbmarks:
1154 if not oldbmarks:
1155 continue
1155 continue
1156 from . import bookmarks # avoid import cycle
1156 from . import bookmarks # avoid import cycle
1157
1157
1158 repo.ui.debug(
1158 repo.ui.debug(
1159 b'moving bookmarks %r from %s to %s\n'
1159 b'moving bookmarks %r from %s to %s\n'
1160 % (
1160 % (
1161 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1161 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1162 hex(oldnode),
1162 hex(oldnode),
1163 hex(newnode),
1163 hex(newnode),
1164 )
1164 )
1165 )
1165 )
1166 # Delete divergent bookmarks being parents of related newnodes
1166 # Delete divergent bookmarks being parents of related newnodes
1167 deleterevs = repo.revs(
1167 deleterevs = repo.revs(
1168 b'parents(roots(%ln & (::%n))) - parents(%n)',
1168 b'parents(roots(%ln & (::%n))) - parents(%n)',
1169 allnewnodes,
1169 allnewnodes,
1170 newnode,
1170 newnode,
1171 oldnode,
1171 oldnode,
1172 )
1172 )
1173 deletenodes = _containsnode(repo, deleterevs)
1173 deletenodes = _containsnode(repo, deleterevs)
1174 for name in oldbmarks:
1174 for name in oldbmarks:
1175 bmarkchanges.append((name, newnode))
1175 bmarkchanges.append((name, newnode))
1176 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1176 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1177 bmarkchanges.append((b, None))
1177 bmarkchanges.append((b, None))
1178
1178
1179 if bmarkchanges:
1179 if bmarkchanges:
1180 bmarks.applychanges(repo, tr, bmarkchanges)
1180 bmarks.applychanges(repo, tr, bmarkchanges)
1181
1181
1182 for phase, nodes in toretract.items():
1182 for phase, nodes in toretract.items():
1183 phases.retractboundary(repo, tr, phase, nodes)
1183 phases.retractboundary(repo, tr, phase, nodes)
1184 for phase, nodes in toadvance.items():
1184 for phase, nodes in toadvance.items():
1185 phases.advanceboundary(repo, tr, phase, nodes)
1185 phases.advanceboundary(repo, tr, phase, nodes)
1186
1186
1187 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1187 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1188 # Obsolete or strip nodes
1188 # Obsolete or strip nodes
1189 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1189 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1190 # If a node is already obsoleted, and we want to obsolete it
1190 # If a node is already obsoleted, and we want to obsolete it
1191 # without a successor, skip that obssolete request since it's
1191 # without a successor, skip that obssolete request since it's
1192 # unnecessary. That's the "if s or not isobs(n)" check below.
1192 # unnecessary. That's the "if s or not isobs(n)" check below.
1193 # Also sort the node in topology order, that might be useful for
1193 # Also sort the node in topology order, that might be useful for
1194 # some obsstore logic.
1194 # some obsstore logic.
1195 # NOTE: the sorting might belong to createmarkers.
1195 # NOTE: the sorting might belong to createmarkers.
1196 torev = unfi.changelog.rev
1196 torev = unfi.changelog.rev
1197 sortfunc = lambda ns: torev(ns[0][0])
1197 sortfunc = lambda ns: torev(ns[0][0])
1198 rels = []
1198 rels = []
1199 for ns, s in sorted(replacements.items(), key=sortfunc):
1199 for ns, s in sorted(replacements.items(), key=sortfunc):
1200 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1200 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1201 rels.append(rel)
1201 rels.append(rel)
1202 if rels:
1202 if rels:
1203 obsolete.createmarkers(
1203 obsolete.createmarkers(
1204 repo, rels, operation=operation, metadata=metadata
1204 repo, rels, operation=operation, metadata=metadata
1205 )
1205 )
1206 elif phases.supportinternal(repo) and mayusearchived:
1206 elif phases.supportinternal(repo) and mayusearchived:
1207 # this assume we do not have "unstable" nodes above the cleaned ones
1207 # this assume we do not have "unstable" nodes above the cleaned ones
1208 allreplaced = set()
1208 allreplaced = set()
1209 for ns in replacements.keys():
1209 for ns in replacements.keys():
1210 allreplaced.update(ns)
1210 allreplaced.update(ns)
1211 if backup:
1211 if backup:
1212 from . import repair # avoid import cycle
1212 from . import repair # avoid import cycle
1213
1213
1214 node = min(allreplaced, key=repo.changelog.rev)
1214 node = min(allreplaced, key=repo.changelog.rev)
1215 repair.backupbundle(
1215 repair.backupbundle(
1216 repo, allreplaced, allreplaced, node, operation
1216 repo, allreplaced, allreplaced, node, operation
1217 )
1217 )
1218 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1218 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1219 else:
1219 else:
1220 from . import repair # avoid import cycle
1220 from . import repair # avoid import cycle
1221
1221
1222 tostrip = list(n for ns in replacements for n in ns)
1222 tostrip = list(n for ns in replacements for n in ns)
1223 if tostrip:
1223 if tostrip:
1224 repair.delayedstrip(
1224 repair.delayedstrip(
1225 repo.ui, repo, tostrip, operation, backup=backup
1225 repo.ui, repo, tostrip, operation, backup=backup
1226 )
1226 )
1227
1227
1228
1228
1229 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1229 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1230 if opts is None:
1230 if opts is None:
1231 opts = {}
1231 opts = {}
1232 m = matcher
1232 m = matcher
1233 dry_run = opts.get(b'dry_run')
1233 dry_run = opts.get(b'dry_run')
1234 try:
1234 try:
1235 similarity = float(opts.get(b'similarity') or 0)
1235 similarity = float(opts.get(b'similarity') or 0)
1236 except ValueError:
1236 except ValueError:
1237 raise error.Abort(_(b'similarity must be a number'))
1237 raise error.Abort(_(b'similarity must be a number'))
1238 if similarity < 0 or similarity > 100:
1238 if similarity < 0 or similarity > 100:
1239 raise error.Abort(_(b'similarity must be between 0 and 100'))
1239 raise error.Abort(_(b'similarity must be between 0 and 100'))
1240 similarity /= 100.0
1240 similarity /= 100.0
1241
1241
1242 ret = 0
1242 ret = 0
1243
1243
1244 wctx = repo[None]
1244 wctx = repo[None]
1245 for subpath in sorted(wctx.substate):
1245 for subpath in sorted(wctx.substate):
1246 submatch = matchmod.subdirmatcher(subpath, m)
1246 submatch = matchmod.subdirmatcher(subpath, m)
1247 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1247 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1248 sub = wctx.sub(subpath)
1248 sub = wctx.sub(subpath)
1249 subprefix = repo.wvfs.reljoin(prefix, subpath)
1249 subprefix = repo.wvfs.reljoin(prefix, subpath)
1250 subuipathfn = subdiruipathfn(subpath, uipathfn)
1250 subuipathfn = subdiruipathfn(subpath, uipathfn)
1251 try:
1251 try:
1252 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1252 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1253 ret = 1
1253 ret = 1
1254 except error.LookupError:
1254 except error.LookupError:
1255 repo.ui.status(
1255 repo.ui.status(
1256 _(b"skipping missing subrepository: %s\n")
1256 _(b"skipping missing subrepository: %s\n")
1257 % uipathfn(subpath)
1257 % uipathfn(subpath)
1258 )
1258 )
1259
1259
1260 rejected = []
1260 rejected = []
1261
1261
1262 def badfn(f, msg):
1262 def badfn(f, msg):
1263 if f in m.files():
1263 if f in m.files():
1264 m.bad(f, msg)
1264 m.bad(f, msg)
1265 rejected.append(f)
1265 rejected.append(f)
1266
1266
1267 badmatch = matchmod.badmatch(m, badfn)
1267 badmatch = matchmod.badmatch(m, badfn)
1268 added, unknown, deleted, removed, forgotten = _interestingfiles(
1268 added, unknown, deleted, removed, forgotten = _interestingfiles(
1269 repo, badmatch
1269 repo, badmatch
1270 )
1270 )
1271
1271
1272 unknownset = set(unknown + forgotten)
1272 unknownset = set(unknown + forgotten)
1273 toprint = unknownset.copy()
1273 toprint = unknownset.copy()
1274 toprint.update(deleted)
1274 toprint.update(deleted)
1275 for abs in sorted(toprint):
1275 for abs in sorted(toprint):
1276 if repo.ui.verbose or not m.exact(abs):
1276 if repo.ui.verbose or not m.exact(abs):
1277 if abs in unknownset:
1277 if abs in unknownset:
1278 status = _(b'adding %s\n') % uipathfn(abs)
1278 status = _(b'adding %s\n') % uipathfn(abs)
1279 label = b'ui.addremove.added'
1279 label = b'ui.addremove.added'
1280 else:
1280 else:
1281 status = _(b'removing %s\n') % uipathfn(abs)
1281 status = _(b'removing %s\n') % uipathfn(abs)
1282 label = b'ui.addremove.removed'
1282 label = b'ui.addremove.removed'
1283 repo.ui.status(status, label=label)
1283 repo.ui.status(status, label=label)
1284
1284
1285 renames = _findrenames(
1285 renames = _findrenames(
1286 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1286 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1287 )
1287 )
1288
1288
1289 if not dry_run:
1289 if not dry_run:
1290 _markchanges(repo, unknown + forgotten, deleted, renames)
1290 _markchanges(repo, unknown + forgotten, deleted, renames)
1291
1291
1292 for f in rejected:
1292 for f in rejected:
1293 if f in m.files():
1293 if f in m.files():
1294 return 1
1294 return 1
1295 return ret
1295 return ret
1296
1296
1297
1297
1298 def marktouched(repo, files, similarity=0.0):
1298 def marktouched(repo, files, similarity=0.0):
1299 """Assert that files have somehow been operated upon. files are relative to
1299 """Assert that files have somehow been operated upon. files are relative to
1300 the repo root."""
1300 the repo root."""
1301 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1301 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1302 rejected = []
1302 rejected = []
1303
1303
1304 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1304 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1305
1305
1306 if repo.ui.verbose:
1306 if repo.ui.verbose:
1307 unknownset = set(unknown + forgotten)
1307 unknownset = set(unknown + forgotten)
1308 toprint = unknownset.copy()
1308 toprint = unknownset.copy()
1309 toprint.update(deleted)
1309 toprint.update(deleted)
1310 for abs in sorted(toprint):
1310 for abs in sorted(toprint):
1311 if abs in unknownset:
1311 if abs in unknownset:
1312 status = _(b'adding %s\n') % abs
1312 status = _(b'adding %s\n') % abs
1313 else:
1313 else:
1314 status = _(b'removing %s\n') % abs
1314 status = _(b'removing %s\n') % abs
1315 repo.ui.status(status)
1315 repo.ui.status(status)
1316
1316
1317 # TODO: We should probably have the caller pass in uipathfn and apply it to
1317 # TODO: We should probably have the caller pass in uipathfn and apply it to
1318 # the messages above too. legacyrelativevalue=True is consistent with how
1318 # the messages above too. legacyrelativevalue=True is consistent with how
1319 # it used to work.
1319 # it used to work.
1320 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1320 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1321 renames = _findrenames(
1321 renames = _findrenames(
1322 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1322 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1323 )
1323 )
1324
1324
1325 _markchanges(repo, unknown + forgotten, deleted, renames)
1325 _markchanges(repo, unknown + forgotten, deleted, renames)
1326
1326
1327 for f in rejected:
1327 for f in rejected:
1328 if f in m.files():
1328 if f in m.files():
1329 return 1
1329 return 1
1330 return 0
1330 return 0
1331
1331
1332
1332
1333 def _interestingfiles(repo, matcher):
1333 def _interestingfiles(repo, matcher):
1334 """Walk dirstate with matcher, looking for files that addremove would care
1334 """Walk dirstate with matcher, looking for files that addremove would care
1335 about.
1335 about.
1336
1336
1337 This is different from dirstate.status because it doesn't care about
1337 This is different from dirstate.status because it doesn't care about
1338 whether files are modified or clean."""
1338 whether files are modified or clean."""
1339 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1339 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1340 audit_path = pathutil.pathauditor(repo.root, cached=True)
1340 audit_path = pathutil.pathauditor(repo.root, cached=True)
1341
1341
1342 ctx = repo[None]
1342 ctx = repo[None]
1343 dirstate = repo.dirstate
1343 dirstate = repo.dirstate
1344 matcher = repo.narrowmatch(matcher, includeexact=True)
1344 matcher = repo.narrowmatch(matcher, includeexact=True)
1345 walkresults = dirstate.walk(
1345 walkresults = dirstate.walk(
1346 matcher,
1346 matcher,
1347 subrepos=sorted(ctx.substate),
1347 subrepos=sorted(ctx.substate),
1348 unknown=True,
1348 unknown=True,
1349 ignored=False,
1349 ignored=False,
1350 full=False,
1350 full=False,
1351 )
1351 )
1352 for abs, st in pycompat.iteritems(walkresults):
1352 for abs, st in pycompat.iteritems(walkresults):
1353 dstate = dirstate[abs]
1353 dstate = dirstate[abs]
1354 if dstate == b'?' and audit_path.check(abs):
1354 if dstate == b'?' and audit_path.check(abs):
1355 unknown.append(abs)
1355 unknown.append(abs)
1356 elif dstate != b'r' and not st:
1356 elif dstate != b'r' and not st:
1357 deleted.append(abs)
1357 deleted.append(abs)
1358 elif dstate == b'r' and st:
1358 elif dstate == b'r' and st:
1359 forgotten.append(abs)
1359 forgotten.append(abs)
1360 # for finding renames
1360 # for finding renames
1361 elif dstate == b'r' and not st:
1361 elif dstate == b'r' and not st:
1362 removed.append(abs)
1362 removed.append(abs)
1363 elif dstate == b'a':
1363 elif dstate == b'a':
1364 added.append(abs)
1364 added.append(abs)
1365
1365
1366 return added, unknown, deleted, removed, forgotten
1366 return added, unknown, deleted, removed, forgotten
1367
1367
1368
1368
1369 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1369 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1370 '''Find renames from removed files to added ones.'''
1370 '''Find renames from removed files to added ones.'''
1371 renames = {}
1371 renames = {}
1372 if similarity > 0:
1372 if similarity > 0:
1373 for old, new, score in similar.findrenames(
1373 for old, new, score in similar.findrenames(
1374 repo, added, removed, similarity
1374 repo, added, removed, similarity
1375 ):
1375 ):
1376 if (
1376 if (
1377 repo.ui.verbose
1377 repo.ui.verbose
1378 or not matcher.exact(old)
1378 or not matcher.exact(old)
1379 or not matcher.exact(new)
1379 or not matcher.exact(new)
1380 ):
1380 ):
1381 repo.ui.status(
1381 repo.ui.status(
1382 _(
1382 _(
1383 b'recording removal of %s as rename to %s '
1383 b'recording removal of %s as rename to %s '
1384 b'(%d%% similar)\n'
1384 b'(%d%% similar)\n'
1385 )
1385 )
1386 % (uipathfn(old), uipathfn(new), score * 100)
1386 % (uipathfn(old), uipathfn(new), score * 100)
1387 )
1387 )
1388 renames[new] = old
1388 renames[new] = old
1389 return renames
1389 return renames
1390
1390
1391
1391
1392 def _markchanges(repo, unknown, deleted, renames):
1392 def _markchanges(repo, unknown, deleted, renames):
1393 """Marks the files in unknown as added, the files in deleted as removed,
1393 """Marks the files in unknown as added, the files in deleted as removed,
1394 and the files in renames as copied."""
1394 and the files in renames as copied."""
1395 wctx = repo[None]
1395 wctx = repo[None]
1396 with repo.wlock():
1396 with repo.wlock():
1397 wctx.forget(deleted)
1397 wctx.forget(deleted)
1398 wctx.add(unknown)
1398 wctx.add(unknown)
1399 for new, old in pycompat.iteritems(renames):
1399 for new, old in pycompat.iteritems(renames):
1400 wctx.copy(old, new)
1400 wctx.copy(old, new)
1401
1401
1402
1402
1403 def getrenamedfn(repo, endrev=None):
1403 def getrenamedfn(repo, endrev=None):
1404 if copiesmod.usechangesetcentricalgo(repo):
1404 if copiesmod.usechangesetcentricalgo(repo):
1405
1405
1406 def getrenamed(fn, rev):
1406 def getrenamed(fn, rev):
1407 ctx = repo[rev]
1407 ctx = repo[rev]
1408 p1copies = ctx.p1copies()
1408 p1copies = ctx.p1copies()
1409 if fn in p1copies:
1409 if fn in p1copies:
1410 return p1copies[fn]
1410 return p1copies[fn]
1411 p2copies = ctx.p2copies()
1411 p2copies = ctx.p2copies()
1412 if fn in p2copies:
1412 if fn in p2copies:
1413 return p2copies[fn]
1413 return p2copies[fn]
1414 return None
1414 return None
1415
1415
1416 return getrenamed
1416 return getrenamed
1417
1417
1418 rcache = {}
1418 rcache = {}
1419 if endrev is None:
1419 if endrev is None:
1420 endrev = len(repo)
1420 endrev = len(repo)
1421
1421
1422 def getrenamed(fn, rev):
1422 def getrenamed(fn, rev):
1423 """looks up all renames for a file (up to endrev) the first
1423 """looks up all renames for a file (up to endrev) the first
1424 time the file is given. It indexes on the changerev and only
1424 time the file is given. It indexes on the changerev and only
1425 parses the manifest if linkrev != changerev.
1425 parses the manifest if linkrev != changerev.
1426 Returns rename info for fn at changerev rev."""
1426 Returns rename info for fn at changerev rev."""
1427 if fn not in rcache:
1427 if fn not in rcache:
1428 rcache[fn] = {}
1428 rcache[fn] = {}
1429 fl = repo.file(fn)
1429 fl = repo.file(fn)
1430 for i in fl:
1430 for i in fl:
1431 lr = fl.linkrev(i)
1431 lr = fl.linkrev(i)
1432 renamed = fl.renamed(fl.node(i))
1432 renamed = fl.renamed(fl.node(i))
1433 rcache[fn][lr] = renamed and renamed[0]
1433 rcache[fn][lr] = renamed and renamed[0]
1434 if lr >= endrev:
1434 if lr >= endrev:
1435 break
1435 break
1436 if rev in rcache[fn]:
1436 if rev in rcache[fn]:
1437 return rcache[fn][rev]
1437 return rcache[fn][rev]
1438
1438
1439 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1439 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1440 # filectx logic.
1440 # filectx logic.
1441 try:
1441 try:
1442 return repo[rev][fn].copysource()
1442 return repo[rev][fn].copysource()
1443 except error.LookupError:
1443 except error.LookupError:
1444 return None
1444 return None
1445
1445
1446 return getrenamed
1446 return getrenamed
1447
1447
1448
1448
1449 def getcopiesfn(repo, endrev=None):
1449 def getcopiesfn(repo, endrev=None):
1450 if copiesmod.usechangesetcentricalgo(repo):
1450 if copiesmod.usechangesetcentricalgo(repo):
1451
1451
1452 def copiesfn(ctx):
1452 def copiesfn(ctx):
1453 if ctx.p2copies():
1453 if ctx.p2copies():
1454 allcopies = ctx.p1copies().copy()
1454 allcopies = ctx.p1copies().copy()
1455 # There should be no overlap
1455 # There should be no overlap
1456 allcopies.update(ctx.p2copies())
1456 allcopies.update(ctx.p2copies())
1457 return sorted(allcopies.items())
1457 return sorted(allcopies.items())
1458 else:
1458 else:
1459 return sorted(ctx.p1copies().items())
1459 return sorted(ctx.p1copies().items())
1460
1460
1461 else:
1461 else:
1462 getrenamed = getrenamedfn(repo, endrev)
1462 getrenamed = getrenamedfn(repo, endrev)
1463
1463
1464 def copiesfn(ctx):
1464 def copiesfn(ctx):
1465 copies = []
1465 copies = []
1466 for fn in ctx.files():
1466 for fn in ctx.files():
1467 rename = getrenamed(fn, ctx.rev())
1467 rename = getrenamed(fn, ctx.rev())
1468 if rename:
1468 if rename:
1469 copies.append((fn, rename))
1469 copies.append((fn, rename))
1470 return copies
1470 return copies
1471
1471
1472 return copiesfn
1472 return copiesfn
1473
1473
1474
1474
1475 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1475 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1476 """Update the dirstate to reflect the intent of copying src to dst. For
1476 """Update the dirstate to reflect the intent of copying src to dst. For
1477 different reasons it might not end with dst being marked as copied from src.
1477 different reasons it might not end with dst being marked as copied from src.
1478 """
1478 """
1479 origsrc = repo.dirstate.copied(src) or src
1479 origsrc = repo.dirstate.copied(src) or src
1480 if dst == origsrc: # copying back a copy?
1480 if dst == origsrc: # copying back a copy?
1481 if repo.dirstate[dst] not in b'mn' and not dryrun:
1481 if repo.dirstate[dst] not in b'mn' and not dryrun:
1482 repo.dirstate.normallookup(dst)
1482 repo.dirstate.normallookup(dst)
1483 else:
1483 else:
1484 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1484 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1485 if not ui.quiet:
1485 if not ui.quiet:
1486 ui.warn(
1486 ui.warn(
1487 _(
1487 _(
1488 b"%s has not been committed yet, so no copy "
1488 b"%s has not been committed yet, so no copy "
1489 b"data will be stored for %s.\n"
1489 b"data will be stored for %s.\n"
1490 )
1490 )
1491 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1491 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1492 )
1492 )
1493 if repo.dirstate[dst] in b'?r' and not dryrun:
1493 if repo.dirstate[dst] in b'?r' and not dryrun:
1494 wctx.add([dst])
1494 wctx.add([dst])
1495 elif not dryrun:
1495 elif not dryrun:
1496 wctx.copy(origsrc, dst)
1496 wctx.copy(origsrc, dst)
1497
1497
1498
1498
1499 def movedirstate(repo, newctx, match=None):
1499 def movedirstate(repo, newctx, match=None):
1500 """Move the dirstate to newctx and adjust it as necessary.
1500 """Move the dirstate to newctx and adjust it as necessary.
1501
1501
1502 A matcher can be provided as an optimization. It is probably a bug to pass
1502 A matcher can be provided as an optimization. It is probably a bug to pass
1503 a matcher that doesn't match all the differences between the parent of the
1503 a matcher that doesn't match all the differences between the parent of the
1504 working copy and newctx.
1504 working copy and newctx.
1505 """
1505 """
1506 oldctx = repo[b'.']
1506 oldctx = repo[b'.']
1507 ds = repo.dirstate
1507 ds = repo.dirstate
1508 copies = dict(ds.copies())
1508 copies = dict(ds.copies())
1509 ds.setparents(newctx.node(), nullid)
1509 ds.setparents(newctx.node(), nullid)
1510 s = newctx.status(oldctx, match=match)
1510 s = newctx.status(oldctx, match=match)
1511 for f in s.modified:
1511 for f in s.modified:
1512 if ds[f] == b'r':
1512 if ds[f] == b'r':
1513 # modified + removed -> removed
1513 # modified + removed -> removed
1514 continue
1514 continue
1515 ds.normallookup(f)
1515 ds.normallookup(f)
1516
1516
1517 for f in s.added:
1517 for f in s.added:
1518 if ds[f] == b'r':
1518 if ds[f] == b'r':
1519 # added + removed -> unknown
1519 # added + removed -> unknown
1520 ds.drop(f)
1520 ds.drop(f)
1521 elif ds[f] != b'a':
1521 elif ds[f] != b'a':
1522 ds.add(f)
1522 ds.add(f)
1523
1523
1524 for f in s.removed:
1524 for f in s.removed:
1525 if ds[f] == b'a':
1525 if ds[f] == b'a':
1526 # removed + added -> normal
1526 # removed + added -> normal
1527 ds.normallookup(f)
1527 ds.normallookup(f)
1528 elif ds[f] != b'r':
1528 elif ds[f] != b'r':
1529 ds.remove(f)
1529 ds.remove(f)
1530
1530
1531 # Merge old parent and old working dir copies
1531 # Merge old parent and old working dir copies
1532 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1532 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1533 oldcopies.update(copies)
1533 oldcopies.update(copies)
1534 copies = {
1534 copies = {
1535 dst: oldcopies.get(src, src)
1535 dst: oldcopies.get(src, src)
1536 for dst, src in pycompat.iteritems(oldcopies)
1536 for dst, src in pycompat.iteritems(oldcopies)
1537 }
1537 }
1538 # Adjust the dirstate copies
1538 # Adjust the dirstate copies
1539 for dst, src in pycompat.iteritems(copies):
1539 for dst, src in pycompat.iteritems(copies):
1540 if src not in newctx or dst in newctx or ds[dst] != b'a':
1540 if src not in newctx or dst in newctx or ds[dst] != b'a':
1541 src = None
1541 src = None
1542 ds.copy(src, dst)
1542 ds.copy(src, dst)
1543 repo._quick_access_changeid_invalidate()
1543 repo._quick_access_changeid_invalidate()
1544
1544
1545
1545
1546 def filterrequirements(requirements):
1546 def filterrequirements(requirements):
1547 """filters the requirements into two sets:
1547 """filters the requirements into two sets:
1548
1548
1549 wcreq: requirements which should be written in .hg/requires
1549 wcreq: requirements which should be written in .hg/requires
1550 storereq: which should be written in .hg/store/requires
1550 storereq: which should be written in .hg/store/requires
1551
1551
1552 Returns (wcreq, storereq)
1552 Returns (wcreq, storereq)
1553 """
1553 """
1554 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
1554 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
1555 wc, store = set(), set()
1555 wc, store = set(), set()
1556 for r in requirements:
1556 for r in requirements:
1557 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
1557 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
1558 wc.add(r)
1558 wc.add(r)
1559 else:
1559 else:
1560 store.add(r)
1560 store.add(r)
1561 return wc, store
1561 return wc, store
1562 return requirements, None
1562 return requirements, None
1563
1563
1564
1564
1565 def istreemanifest(repo):
1565 def istreemanifest(repo):
1566 """ returns whether the repository is using treemanifest or not """
1566 """ returns whether the repository is using treemanifest or not """
1567 return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements
1567 return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements
1568
1568
1569
1569
1570 def writereporequirements(repo, requirements=None):
1570 def writereporequirements(repo, requirements=None):
1571 """writes requirements for the repo
1571 """writes requirements for the repo
1572
1572
1573 Requirements are written to .hg/requires and .hg/store/requires based
1573 Requirements are written to .hg/requires and .hg/store/requires based
1574 on whether share-safe mode is enabled and which requirements are wdir
1574 on whether share-safe mode is enabled and which requirements are wdir
1575 requirements and which are store requirements
1575 requirements and which are store requirements
1576 """
1576 """
1577 if requirements:
1577 if requirements:
1578 repo.requirements = requirements
1578 repo.requirements = requirements
1579 wcreq, storereq = filterrequirements(repo.requirements)
1579 wcreq, storereq = filterrequirements(repo.requirements)
1580 if wcreq is not None:
1580 if wcreq is not None:
1581 writerequires(repo.vfs, wcreq)
1581 writerequires(repo.vfs, wcreq)
1582 if storereq is not None:
1582 if storereq is not None:
1583 writerequires(repo.svfs, storereq)
1583 writerequires(repo.svfs, storereq)
1584 elif repo.ui.configbool(b'format', b'usestore'):
1584 elif repo.ui.configbool(b'format', b'usestore'):
1585 # only remove store requires if we are using store
1585 # only remove store requires if we are using store
1586 repo.svfs.tryunlink(b'requires')
1586 repo.svfs.tryunlink(b'requires')
1587
1587
1588
1588
1589 def writerequires(opener, requirements):
1589 def writerequires(opener, requirements):
1590 with opener(b'requires', b'w', atomictemp=True) as fp:
1590 with opener(b'requires', b'w', atomictemp=True) as fp:
1591 for r in sorted(requirements):
1591 for r in sorted(requirements):
1592 fp.write(b"%s\n" % r)
1592 fp.write(b"%s\n" % r)
1593
1593
1594
1594
1595 class filecachesubentry(object):
1595 class filecachesubentry(object):
1596 def __init__(self, path, stat):
1596 def __init__(self, path, stat):
1597 self.path = path
1597 self.path = path
1598 self.cachestat = None
1598 self.cachestat = None
1599 self._cacheable = None
1599 self._cacheable = None
1600
1600
1601 if stat:
1601 if stat:
1602 self.cachestat = filecachesubentry.stat(self.path)
1602 self.cachestat = filecachesubentry.stat(self.path)
1603
1603
1604 if self.cachestat:
1604 if self.cachestat:
1605 self._cacheable = self.cachestat.cacheable()
1605 self._cacheable = self.cachestat.cacheable()
1606 else:
1606 else:
1607 # None means we don't know yet
1607 # None means we don't know yet
1608 self._cacheable = None
1608 self._cacheable = None
1609
1609
1610 def refresh(self):
1610 def refresh(self):
1611 if self.cacheable():
1611 if self.cacheable():
1612 self.cachestat = filecachesubentry.stat(self.path)
1612 self.cachestat = filecachesubentry.stat(self.path)
1613
1613
1614 def cacheable(self):
1614 def cacheable(self):
1615 if self._cacheable is not None:
1615 if self._cacheable is not None:
1616 return self._cacheable
1616 return self._cacheable
1617
1617
1618 # we don't know yet, assume it is for now
1618 # we don't know yet, assume it is for now
1619 return True
1619 return True
1620
1620
1621 def changed(self):
1621 def changed(self):
1622 # no point in going further if we can't cache it
1622 # no point in going further if we can't cache it
1623 if not self.cacheable():
1623 if not self.cacheable():
1624 return True
1624 return True
1625
1625
1626 newstat = filecachesubentry.stat(self.path)
1626 newstat = filecachesubentry.stat(self.path)
1627
1627
1628 # we may not know if it's cacheable yet, check again now
1628 # we may not know if it's cacheable yet, check again now
1629 if newstat and self._cacheable is None:
1629 if newstat and self._cacheable is None:
1630 self._cacheable = newstat.cacheable()
1630 self._cacheable = newstat.cacheable()
1631
1631
1632 # check again
1632 # check again
1633 if not self._cacheable:
1633 if not self._cacheable:
1634 return True
1634 return True
1635
1635
1636 if self.cachestat != newstat:
1636 if self.cachestat != newstat:
1637 self.cachestat = newstat
1637 self.cachestat = newstat
1638 return True
1638 return True
1639 else:
1639 else:
1640 return False
1640 return False
1641
1641
1642 @staticmethod
1642 @staticmethod
1643 def stat(path):
1643 def stat(path):
1644 try:
1644 try:
1645 return util.cachestat(path)
1645 return util.cachestat(path)
1646 except OSError as e:
1646 except OSError as e:
1647 if e.errno != errno.ENOENT:
1647 if e.errno != errno.ENOENT:
1648 raise
1648 raise
1649
1649
1650
1650
1651 class filecacheentry(object):
1651 class filecacheentry(object):
1652 def __init__(self, paths, stat=True):
1652 def __init__(self, paths, stat=True):
1653 self._entries = []
1653 self._entries = []
1654 for path in paths:
1654 for path in paths:
1655 self._entries.append(filecachesubentry(path, stat))
1655 self._entries.append(filecachesubentry(path, stat))
1656
1656
1657 def changed(self):
1657 def changed(self):
1658 '''true if any entry has changed'''
1658 '''true if any entry has changed'''
1659 for entry in self._entries:
1659 for entry in self._entries:
1660 if entry.changed():
1660 if entry.changed():
1661 return True
1661 return True
1662 return False
1662 return False
1663
1663
1664 def refresh(self):
1664 def refresh(self):
1665 for entry in self._entries:
1665 for entry in self._entries:
1666 entry.refresh()
1666 entry.refresh()
1667
1667
1668
1668
1669 class filecache(object):
1669 class filecache(object):
1670 """A property like decorator that tracks files under .hg/ for updates.
1670 """A property like decorator that tracks files under .hg/ for updates.
1671
1671
1672 On first access, the files defined as arguments are stat()ed and the
1672 On first access, the files defined as arguments are stat()ed and the
1673 results cached. The decorated function is called. The results are stashed
1673 results cached. The decorated function is called. The results are stashed
1674 away in a ``_filecache`` dict on the object whose method is decorated.
1674 away in a ``_filecache`` dict on the object whose method is decorated.
1675
1675
1676 On subsequent access, the cached result is used as it is set to the
1676 On subsequent access, the cached result is used as it is set to the
1677 instance dictionary.
1677 instance dictionary.
1678
1678
1679 On external property set/delete operations, the caller must update the
1679 On external property set/delete operations, the caller must update the
1680 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1680 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1681 instead of directly setting <attr>.
1681 instead of directly setting <attr>.
1682
1682
1683 When using the property API, the cached data is always used if available.
1683 When using the property API, the cached data is always used if available.
1684 No stat() is performed to check if the file has changed.
1684 No stat() is performed to check if the file has changed.
1685
1685
1686 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1686 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1687 can populate an entry before the property's getter is called. In this case,
1687 can populate an entry before the property's getter is called. In this case,
1688 entries in ``_filecache`` will be used during property operations,
1688 entries in ``_filecache`` will be used during property operations,
1689 if available. If the underlying file changes, it is up to external callers
1689 if available. If the underlying file changes, it is up to external callers
1690 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1690 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1691 method result as well as possibly calling ``del obj._filecache[attr]`` to
1691 method result as well as possibly calling ``del obj._filecache[attr]`` to
1692 remove the ``filecacheentry``.
1692 remove the ``filecacheentry``.
1693 """
1693 """
1694
1694
1695 def __init__(self, *paths):
1695 def __init__(self, *paths):
1696 self.paths = paths
1696 self.paths = paths
1697
1697
1698 def join(self, obj, fname):
1698 def join(self, obj, fname):
1699 """Used to compute the runtime path of a cached file.
1699 """Used to compute the runtime path of a cached file.
1700
1700
1701 Users should subclass filecache and provide their own version of this
1701 Users should subclass filecache and provide their own version of this
1702 function to call the appropriate join function on 'obj' (an instance
1702 function to call the appropriate join function on 'obj' (an instance
1703 of the class that its member function was decorated).
1703 of the class that its member function was decorated).
1704 """
1704 """
1705 raise NotImplementedError
1705 raise NotImplementedError
1706
1706
1707 def __call__(self, func):
1707 def __call__(self, func):
1708 self.func = func
1708 self.func = func
1709 self.sname = func.__name__
1709 self.sname = func.__name__
1710 self.name = pycompat.sysbytes(self.sname)
1710 self.name = pycompat.sysbytes(self.sname)
1711 return self
1711 return self
1712
1712
1713 def __get__(self, obj, type=None):
1713 def __get__(self, obj, type=None):
1714 # if accessed on the class, return the descriptor itself.
1714 # if accessed on the class, return the descriptor itself.
1715 if obj is None:
1715 if obj is None:
1716 return self
1716 return self
1717
1717
1718 assert self.sname not in obj.__dict__
1718 assert self.sname not in obj.__dict__
1719
1719
1720 entry = obj._filecache.get(self.name)
1720 entry = obj._filecache.get(self.name)
1721
1721
1722 if entry:
1722 if entry:
1723 if entry.changed():
1723 if entry.changed():
1724 entry.obj = self.func(obj)
1724 entry.obj = self.func(obj)
1725 else:
1725 else:
1726 paths = [self.join(obj, path) for path in self.paths]
1726 paths = [self.join(obj, path) for path in self.paths]
1727
1727
1728 # We stat -before- creating the object so our cache doesn't lie if
1728 # We stat -before- creating the object so our cache doesn't lie if
1729 # a writer modified between the time we read and stat
1729 # a writer modified between the time we read and stat
1730 entry = filecacheentry(paths, True)
1730 entry = filecacheentry(paths, True)
1731 entry.obj = self.func(obj)
1731 entry.obj = self.func(obj)
1732
1732
1733 obj._filecache[self.name] = entry
1733 obj._filecache[self.name] = entry
1734
1734
1735 obj.__dict__[self.sname] = entry.obj
1735 obj.__dict__[self.sname] = entry.obj
1736 return entry.obj
1736 return entry.obj
1737
1737
1738 # don't implement __set__(), which would make __dict__ lookup as slow as
1738 # don't implement __set__(), which would make __dict__ lookup as slow as
1739 # function call.
1739 # function call.
1740
1740
1741 def set(self, obj, value):
1741 def set(self, obj, value):
1742 if self.name not in obj._filecache:
1742 if self.name not in obj._filecache:
1743 # we add an entry for the missing value because X in __dict__
1743 # we add an entry for the missing value because X in __dict__
1744 # implies X in _filecache
1744 # implies X in _filecache
1745 paths = [self.join(obj, path) for path in self.paths]
1745 paths = [self.join(obj, path) for path in self.paths]
1746 ce = filecacheentry(paths, False)
1746 ce = filecacheentry(paths, False)
1747 obj._filecache[self.name] = ce
1747 obj._filecache[self.name] = ce
1748 else:
1748 else:
1749 ce = obj._filecache[self.name]
1749 ce = obj._filecache[self.name]
1750
1750
1751 ce.obj = value # update cached copy
1751 ce.obj = value # update cached copy
1752 obj.__dict__[self.sname] = value # update copy returned by obj.x
1752 obj.__dict__[self.sname] = value # update copy returned by obj.x
1753
1753
1754
1754
1755 def extdatasource(repo, source):
1755 def extdatasource(repo, source):
1756 """Gather a map of rev -> value dict from the specified source
1756 """Gather a map of rev -> value dict from the specified source
1757
1757
1758 A source spec is treated as a URL, with a special case shell: type
1758 A source spec is treated as a URL, with a special case shell: type
1759 for parsing the output from a shell command.
1759 for parsing the output from a shell command.
1760
1760
1761 The data is parsed as a series of newline-separated records where
1761 The data is parsed as a series of newline-separated records where
1762 each record is a revision specifier optionally followed by a space
1762 each record is a revision specifier optionally followed by a space
1763 and a freeform string value. If the revision is known locally, it
1763 and a freeform string value. If the revision is known locally, it
1764 is converted to a rev, otherwise the record is skipped.
1764 is converted to a rev, otherwise the record is skipped.
1765
1765
1766 Note that both key and value are treated as UTF-8 and converted to
1766 Note that both key and value are treated as UTF-8 and converted to
1767 the local encoding. This allows uniformity between local and
1767 the local encoding. This allows uniformity between local and
1768 remote data sources.
1768 remote data sources.
1769 """
1769 """
1770
1770
1771 spec = repo.ui.config(b"extdata", source)
1771 spec = repo.ui.config(b"extdata", source)
1772 if not spec:
1772 if not spec:
1773 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1773 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1774
1774
1775 data = {}
1775 data = {}
1776 src = proc = None
1776 src = proc = None
1777 try:
1777 try:
1778 if spec.startswith(b"shell:"):
1778 if spec.startswith(b"shell:"):
1779 # external commands should be run relative to the repo root
1779 # external commands should be run relative to the repo root
1780 cmd = spec[6:]
1780 cmd = spec[6:]
1781 proc = subprocess.Popen(
1781 proc = subprocess.Popen(
1782 procutil.tonativestr(cmd),
1782 procutil.tonativestr(cmd),
1783 shell=True,
1783 shell=True,
1784 bufsize=-1,
1784 bufsize=-1,
1785 close_fds=procutil.closefds,
1785 close_fds=procutil.closefds,
1786 stdout=subprocess.PIPE,
1786 stdout=subprocess.PIPE,
1787 cwd=procutil.tonativestr(repo.root),
1787 cwd=procutil.tonativestr(repo.root),
1788 )
1788 )
1789 src = proc.stdout
1789 src = proc.stdout
1790 else:
1790 else:
1791 # treat as a URL or file
1791 # treat as a URL or file
1792 src = url.open(repo.ui, spec)
1792 src = url.open(repo.ui, spec)
1793 for l in src:
1793 for l in src:
1794 if b" " in l:
1794 if b" " in l:
1795 k, v = l.strip().split(b" ", 1)
1795 k, v = l.strip().split(b" ", 1)
1796 else:
1796 else:
1797 k, v = l.strip(), b""
1797 k, v = l.strip(), b""
1798
1798
1799 k = encoding.tolocal(k)
1799 k = encoding.tolocal(k)
1800 try:
1800 try:
1801 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1801 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1802 except (error.LookupError, error.RepoLookupError, error.InputError):
1802 except (error.LookupError, error.RepoLookupError, error.InputError):
1803 pass # we ignore data for nodes that don't exist locally
1803 pass # we ignore data for nodes that don't exist locally
1804 finally:
1804 finally:
1805 if proc:
1805 if proc:
1806 try:
1806 try:
1807 proc.communicate()
1807 proc.communicate()
1808 except ValueError:
1808 except ValueError:
1809 # This happens if we started iterating src and then
1809 # This happens if we started iterating src and then
1810 # get a parse error on a line. It should be safe to ignore.
1810 # get a parse error on a line. It should be safe to ignore.
1811 pass
1811 pass
1812 if src:
1812 if src:
1813 src.close()
1813 src.close()
1814 if proc and proc.returncode != 0:
1814 if proc and proc.returncode != 0:
1815 raise error.Abort(
1815 raise error.Abort(
1816 _(b"extdata command '%s' failed: %s")
1816 _(b"extdata command '%s' failed: %s")
1817 % (cmd, procutil.explainexit(proc.returncode))
1817 % (cmd, procutil.explainexit(proc.returncode))
1818 )
1818 )
1819
1819
1820 return data
1820 return data
1821
1821
1822
1822
1823 class progress(object):
1823 class progress(object):
1824 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1824 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1825 self.ui = ui
1825 self.ui = ui
1826 self.pos = 0
1826 self.pos = 0
1827 self.topic = topic
1827 self.topic = topic
1828 self.unit = unit
1828 self.unit = unit
1829 self.total = total
1829 self.total = total
1830 self.debug = ui.configbool(b'progress', b'debug')
1830 self.debug = ui.configbool(b'progress', b'debug')
1831 self._updatebar = updatebar
1831 self._updatebar = updatebar
1832
1832
1833 def __enter__(self):
1833 def __enter__(self):
1834 return self
1834 return self
1835
1835
1836 def __exit__(self, exc_type, exc_value, exc_tb):
1836 def __exit__(self, exc_type, exc_value, exc_tb):
1837 self.complete()
1837 self.complete()
1838
1838
1839 def update(self, pos, item=b"", total=None):
1839 def update(self, pos, item=b"", total=None):
1840 assert pos is not None
1840 assert pos is not None
1841 if total:
1841 if total:
1842 self.total = total
1842 self.total = total
1843 self.pos = pos
1843 self.pos = pos
1844 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1844 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1845 if self.debug:
1845 if self.debug:
1846 self._printdebug(item)
1846 self._printdebug(item)
1847
1847
1848 def increment(self, step=1, item=b"", total=None):
1848 def increment(self, step=1, item=b"", total=None):
1849 self.update(self.pos + step, item, total)
1849 self.update(self.pos + step, item, total)
1850
1850
1851 def complete(self):
1851 def complete(self):
1852 self.pos = None
1852 self.pos = None
1853 self.unit = b""
1853 self.unit = b""
1854 self.total = None
1854 self.total = None
1855 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1855 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1856
1856
1857 def _printdebug(self, item):
1857 def _printdebug(self, item):
1858 unit = b''
1858 unit = b''
1859 if self.unit:
1859 if self.unit:
1860 unit = b' ' + self.unit
1860 unit = b' ' + self.unit
1861 if item:
1861 if item:
1862 item = b' ' + item
1862 item = b' ' + item
1863
1863
1864 if self.total:
1864 if self.total:
1865 pct = 100.0 * self.pos / self.total
1865 pct = 100.0 * self.pos / self.total
1866 self.ui.debug(
1866 self.ui.debug(
1867 b'%s:%s %d/%d%s (%4.2f%%)\n'
1867 b'%s:%s %d/%d%s (%4.2f%%)\n'
1868 % (self.topic, item, self.pos, self.total, unit, pct)
1868 % (self.topic, item, self.pos, self.total, unit, pct)
1869 )
1869 )
1870 else:
1870 else:
1871 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1871 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1872
1872
1873
1873
1874 def gdinitconfig(ui):
1874 def gdinitconfig(ui):
1875 """helper function to know if a repo should be created as general delta"""
1875 """helper function to know if a repo should be created as general delta"""
1876 # experimental config: format.generaldelta
1876 # experimental config: format.generaldelta
1877 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1877 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1878 b'format', b'usegeneraldelta'
1878 b'format', b'usegeneraldelta'
1879 )
1879 )
1880
1880
1881
1881
1882 def gddeltaconfig(ui):
1882 def gddeltaconfig(ui):
1883 """helper function to know if incoming delta should be optimised"""
1883 """helper function to know if incoming delta should be optimised"""
1884 # experimental config: format.generaldelta
1884 # experimental config: format.generaldelta
1885 return ui.configbool(b'format', b'generaldelta')
1885 return ui.configbool(b'format', b'generaldelta')
1886
1886
1887
1887
1888 class simplekeyvaluefile(object):
1888 class simplekeyvaluefile(object):
1889 """A simple file with key=value lines
1889 """A simple file with key=value lines
1890
1890
1891 Keys must be alphanumerics and start with a letter, values must not
1891 Keys must be alphanumerics and start with a letter, values must not
1892 contain '\n' characters"""
1892 contain '\n' characters"""
1893
1893
1894 firstlinekey = b'__firstline'
1894 firstlinekey = b'__firstline'
1895
1895
1896 def __init__(self, vfs, path, keys=None):
1896 def __init__(self, vfs, path, keys=None):
1897 self.vfs = vfs
1897 self.vfs = vfs
1898 self.path = path
1898 self.path = path
1899
1899
1900 def read(self, firstlinenonkeyval=False):
1900 def read(self, firstlinenonkeyval=False):
1901 """Read the contents of a simple key-value file
1901 """Read the contents of a simple key-value file
1902
1902
1903 'firstlinenonkeyval' indicates whether the first line of file should
1903 'firstlinenonkeyval' indicates whether the first line of file should
1904 be treated as a key-value pair or reuturned fully under the
1904 be treated as a key-value pair or reuturned fully under the
1905 __firstline key."""
1905 __firstline key."""
1906 lines = self.vfs.readlines(self.path)
1906 lines = self.vfs.readlines(self.path)
1907 d = {}
1907 d = {}
1908 if firstlinenonkeyval:
1908 if firstlinenonkeyval:
1909 if not lines:
1909 if not lines:
1910 e = _(b"empty simplekeyvalue file")
1910 e = _(b"empty simplekeyvalue file")
1911 raise error.CorruptedState(e)
1911 raise error.CorruptedState(e)
1912 # we don't want to include '\n' in the __firstline
1912 # we don't want to include '\n' in the __firstline
1913 d[self.firstlinekey] = lines[0][:-1]
1913 d[self.firstlinekey] = lines[0][:-1]
1914 del lines[0]
1914 del lines[0]
1915
1915
1916 try:
1916 try:
1917 # the 'if line.strip()' part prevents us from failing on empty
1917 # the 'if line.strip()' part prevents us from failing on empty
1918 # lines which only contain '\n' therefore are not skipped
1918 # lines which only contain '\n' therefore are not skipped
1919 # by 'if line'
1919 # by 'if line'
1920 updatedict = dict(
1920 updatedict = dict(
1921 line[:-1].split(b'=', 1) for line in lines if line.strip()
1921 line[:-1].split(b'=', 1) for line in lines if line.strip()
1922 )
1922 )
1923 if self.firstlinekey in updatedict:
1923 if self.firstlinekey in updatedict:
1924 e = _(b"%r can't be used as a key")
1924 e = _(b"%r can't be used as a key")
1925 raise error.CorruptedState(e % self.firstlinekey)
1925 raise error.CorruptedState(e % self.firstlinekey)
1926 d.update(updatedict)
1926 d.update(updatedict)
1927 except ValueError as e:
1927 except ValueError as e:
1928 raise error.CorruptedState(stringutil.forcebytestr(e))
1928 raise error.CorruptedState(stringutil.forcebytestr(e))
1929 return d
1929 return d
1930
1930
1931 def write(self, data, firstline=None):
1931 def write(self, data, firstline=None):
1932 """Write key=>value mapping to a file
1932 """Write key=>value mapping to a file
1933 data is a dict. Keys must be alphanumerical and start with a letter.
1933 data is a dict. Keys must be alphanumerical and start with a letter.
1934 Values must not contain newline characters.
1934 Values must not contain newline characters.
1935
1935
1936 If 'firstline' is not None, it is written to file before
1936 If 'firstline' is not None, it is written to file before
1937 everything else, as it is, not in a key=value form"""
1937 everything else, as it is, not in a key=value form"""
1938 lines = []
1938 lines = []
1939 if firstline is not None:
1939 if firstline is not None:
1940 lines.append(b'%s\n' % firstline)
1940 lines.append(b'%s\n' % firstline)
1941
1941
1942 for k, v in data.items():
1942 for k, v in data.items():
1943 if k == self.firstlinekey:
1943 if k == self.firstlinekey:
1944 e = b"key name '%s' is reserved" % self.firstlinekey
1944 e = b"key name '%s' is reserved" % self.firstlinekey
1945 raise error.ProgrammingError(e)
1945 raise error.ProgrammingError(e)
1946 if not k[0:1].isalpha():
1946 if not k[0:1].isalpha():
1947 e = b"keys must start with a letter in a key-value file"
1947 e = b"keys must start with a letter in a key-value file"
1948 raise error.ProgrammingError(e)
1948 raise error.ProgrammingError(e)
1949 if not k.isalnum():
1949 if not k.isalnum():
1950 e = b"invalid key name in a simple key-value file"
1950 e = b"invalid key name in a simple key-value file"
1951 raise error.ProgrammingError(e)
1951 raise error.ProgrammingError(e)
1952 if b'\n' in v:
1952 if b'\n' in v:
1953 e = b"invalid value in a simple key-value file"
1953 e = b"invalid value in a simple key-value file"
1954 raise error.ProgrammingError(e)
1954 raise error.ProgrammingError(e)
1955 lines.append(b"%s=%s\n" % (k, v))
1955 lines.append(b"%s=%s\n" % (k, v))
1956 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1956 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1957 fp.write(b''.join(lines))
1957 fp.write(b''.join(lines))
1958
1958
1959
1959
1960 _reportobsoletedsource = [
1960 _reportobsoletedsource = [
1961 b'debugobsolete',
1961 b'debugobsolete',
1962 b'pull',
1962 b'pull',
1963 b'push',
1963 b'push',
1964 b'serve',
1964 b'serve',
1965 b'unbundle',
1965 b'unbundle',
1966 ]
1966 ]
1967
1967
1968 _reportnewcssource = [
1968 _reportnewcssource = [
1969 b'pull',
1969 b'pull',
1970 b'unbundle',
1970 b'unbundle',
1971 ]
1971 ]
1972
1972
1973
1973
1974 def prefetchfiles(repo, revmatches):
1974 def prefetchfiles(repo, revmatches):
1975 """Invokes the registered file prefetch functions, allowing extensions to
1975 """Invokes the registered file prefetch functions, allowing extensions to
1976 ensure the corresponding files are available locally, before the command
1976 ensure the corresponding files are available locally, before the command
1977 uses them.
1977 uses them.
1978
1978
1979 Args:
1979 Args:
1980 revmatches: a list of (revision, match) tuples to indicate the files to
1980 revmatches: a list of (revision, match) tuples to indicate the files to
1981 fetch at each revision. If any of the match elements is None, it matches
1981 fetch at each revision. If any of the match elements is None, it matches
1982 all files.
1982 all files.
1983 """
1983 """
1984
1984
1985 def _matcher(m):
1985 def _matcher(m):
1986 if m:
1986 if m:
1987 assert isinstance(m, matchmod.basematcher)
1987 assert isinstance(m, matchmod.basematcher)
1988 # The command itself will complain about files that don't exist, so
1988 # The command itself will complain about files that don't exist, so
1989 # don't duplicate the message.
1989 # don't duplicate the message.
1990 return matchmod.badmatch(m, lambda fn, msg: None)
1990 return matchmod.badmatch(m, lambda fn, msg: None)
1991 else:
1991 else:
1992 return matchall(repo)
1992 return matchall(repo)
1993
1993
1994 revbadmatches = [(rev, _matcher(match)) for (rev, match) in revmatches]
1994 revbadmatches = [(rev, _matcher(match)) for (rev, match) in revmatches]
1995
1995
1996 fileprefetchhooks(repo, revbadmatches)
1996 fileprefetchhooks(repo, revbadmatches)
1997
1997
1998
1998
1999 # a list of (repo, revs, match) prefetch functions
1999 # a list of (repo, revs, match) prefetch functions
2000 fileprefetchhooks = util.hooks()
2000 fileprefetchhooks = util.hooks()
2001
2001
2002 # A marker that tells the evolve extension to suppress its own reporting
2002 # A marker that tells the evolve extension to suppress its own reporting
2003 _reportstroubledchangesets = True
2003 _reportstroubledchangesets = True
2004
2004
2005
2005
2006 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
2006 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
2007 """register a callback to issue a summary after the transaction is closed
2007 """register a callback to issue a summary after the transaction is closed
2008
2008
2009 If as_validator is true, then the callbacks are registered as transaction
2009 If as_validator is true, then the callbacks are registered as transaction
2010 validators instead
2010 validators instead
2011 """
2011 """
2012
2012
2013 def txmatch(sources):
2013 def txmatch(sources):
2014 return any(txnname.startswith(source) for source in sources)
2014 return any(txnname.startswith(source) for source in sources)
2015
2015
2016 categories = []
2016 categories = []
2017
2017
2018 def reportsummary(func):
2018 def reportsummary(func):
2019 """decorator for report callbacks."""
2019 """decorator for report callbacks."""
2020 # The repoview life cycle is shorter than the one of the actual
2020 # The repoview life cycle is shorter than the one of the actual
2021 # underlying repository. So the filtered object can die before the
2021 # underlying repository. So the filtered object can die before the
2022 # weakref is used leading to troubles. We keep a reference to the
2022 # weakref is used leading to troubles. We keep a reference to the
2023 # unfiltered object and restore the filtering when retrieving the
2023 # unfiltered object and restore the filtering when retrieving the
2024 # repository through the weakref.
2024 # repository through the weakref.
2025 filtername = repo.filtername
2025 filtername = repo.filtername
2026 reporef = weakref.ref(repo.unfiltered())
2026 reporef = weakref.ref(repo.unfiltered())
2027
2027
2028 def wrapped(tr):
2028 def wrapped(tr):
2029 repo = reporef()
2029 repo = reporef()
2030 if filtername:
2030 if filtername:
2031 assert repo is not None # help pytype
2031 assert repo is not None # help pytype
2032 repo = repo.filtered(filtername)
2032 repo = repo.filtered(filtername)
2033 func(repo, tr)
2033 func(repo, tr)
2034
2034
2035 newcat = b'%02i-txnreport' % len(categories)
2035 newcat = b'%02i-txnreport' % len(categories)
2036 if as_validator:
2036 if as_validator:
2037 otr.addvalidator(newcat, wrapped)
2037 otr.addvalidator(newcat, wrapped)
2038 else:
2038 else:
2039 otr.addpostclose(newcat, wrapped)
2039 otr.addpostclose(newcat, wrapped)
2040 categories.append(newcat)
2040 categories.append(newcat)
2041 return wrapped
2041 return wrapped
2042
2042
2043 @reportsummary
2043 @reportsummary
2044 def reportchangegroup(repo, tr):
2044 def reportchangegroup(repo, tr):
2045 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
2045 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
2046 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
2046 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
2047 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
2047 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
2048 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
2048 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
2049 if cgchangesets or cgrevisions or cgfiles:
2049 if cgchangesets or cgrevisions or cgfiles:
2050 htext = b""
2050 htext = b""
2051 if cgheads:
2051 if cgheads:
2052 htext = _(b" (%+d heads)") % cgheads
2052 htext = _(b" (%+d heads)") % cgheads
2053 msg = _(b"added %d changesets with %d changes to %d files%s\n")
2053 msg = _(b"added %d changesets with %d changes to %d files%s\n")
2054 if as_validator:
2054 if as_validator:
2055 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
2055 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
2056 assert repo is not None # help pytype
2056 assert repo is not None # help pytype
2057 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
2057 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
2058
2058
2059 if txmatch(_reportobsoletedsource):
2059 if txmatch(_reportobsoletedsource):
2060
2060
2061 @reportsummary
2061 @reportsummary
2062 def reportobsoleted(repo, tr):
2062 def reportobsoleted(repo, tr):
2063 obsoleted = obsutil.getobsoleted(repo, tr)
2063 obsoleted = obsutil.getobsoleted(repo, tr)
2064 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
2064 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
2065 if newmarkers:
2065 if newmarkers:
2066 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
2066 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
2067 if obsoleted:
2067 if obsoleted:
2068 msg = _(b'obsoleted %i changesets\n')
2068 msg = _(b'obsoleted %i changesets\n')
2069 if as_validator:
2069 if as_validator:
2070 msg = _(b'obsoleting %i changesets\n')
2070 msg = _(b'obsoleting %i changesets\n')
2071 repo.ui.status(msg % len(obsoleted))
2071 repo.ui.status(msg % len(obsoleted))
2072
2072
2073 if obsolete.isenabled(
2073 if obsolete.isenabled(
2074 repo, obsolete.createmarkersopt
2074 repo, obsolete.createmarkersopt
2075 ) and repo.ui.configbool(
2075 ) and repo.ui.configbool(
2076 b'experimental', b'evolution.report-instabilities'
2076 b'experimental', b'evolution.report-instabilities'
2077 ):
2077 ):
2078 instabilitytypes = [
2078 instabilitytypes = [
2079 (b'orphan', b'orphan'),
2079 (b'orphan', b'orphan'),
2080 (b'phase-divergent', b'phasedivergent'),
2080 (b'phase-divergent', b'phasedivergent'),
2081 (b'content-divergent', b'contentdivergent'),
2081 (b'content-divergent', b'contentdivergent'),
2082 ]
2082 ]
2083
2083
2084 def getinstabilitycounts(repo):
2084 def getinstabilitycounts(repo):
2085 filtered = repo.changelog.filteredrevs
2085 filtered = repo.changelog.filteredrevs
2086 counts = {}
2086 counts = {}
2087 for instability, revset in instabilitytypes:
2087 for instability, revset in instabilitytypes:
2088 counts[instability] = len(
2088 counts[instability] = len(
2089 set(obsolete.getrevs(repo, revset)) - filtered
2089 set(obsolete.getrevs(repo, revset)) - filtered
2090 )
2090 )
2091 return counts
2091 return counts
2092
2092
2093 oldinstabilitycounts = getinstabilitycounts(repo)
2093 oldinstabilitycounts = getinstabilitycounts(repo)
2094
2094
2095 @reportsummary
2095 @reportsummary
2096 def reportnewinstabilities(repo, tr):
2096 def reportnewinstabilities(repo, tr):
2097 newinstabilitycounts = getinstabilitycounts(repo)
2097 newinstabilitycounts = getinstabilitycounts(repo)
2098 for instability, revset in instabilitytypes:
2098 for instability, revset in instabilitytypes:
2099 delta = (
2099 delta = (
2100 newinstabilitycounts[instability]
2100 newinstabilitycounts[instability]
2101 - oldinstabilitycounts[instability]
2101 - oldinstabilitycounts[instability]
2102 )
2102 )
2103 msg = getinstabilitymessage(delta, instability)
2103 msg = getinstabilitymessage(delta, instability)
2104 if msg:
2104 if msg:
2105 repo.ui.warn(msg)
2105 repo.ui.warn(msg)
2106
2106
2107 if txmatch(_reportnewcssource):
2107 if txmatch(_reportnewcssource):
2108
2108
2109 @reportsummary
2109 @reportsummary
2110 def reportnewcs(repo, tr):
2110 def reportnewcs(repo, tr):
2111 """Report the range of new revisions pulled/unbundled."""
2111 """Report the range of new revisions pulled/unbundled."""
2112 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2112 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2113 unfi = repo.unfiltered()
2113 unfi = repo.unfiltered()
2114 if origrepolen >= len(unfi):
2114 if origrepolen >= len(unfi):
2115 return
2115 return
2116
2116
2117 # Compute the bounds of new visible revisions' range.
2117 # Compute the bounds of new visible revisions' range.
2118 revs = smartset.spanset(repo, start=origrepolen)
2118 revs = smartset.spanset(repo, start=origrepolen)
2119 if revs:
2119 if revs:
2120 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2120 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2121
2121
2122 if minrev == maxrev:
2122 if minrev == maxrev:
2123 revrange = minrev
2123 revrange = minrev
2124 else:
2124 else:
2125 revrange = b'%s:%s' % (minrev, maxrev)
2125 revrange = b'%s:%s' % (minrev, maxrev)
2126 draft = len(repo.revs(b'%ld and draft()', revs))
2126 draft = len(repo.revs(b'%ld and draft()', revs))
2127 secret = len(repo.revs(b'%ld and secret()', revs))
2127 secret = len(repo.revs(b'%ld and secret()', revs))
2128 if not (draft or secret):
2128 if not (draft or secret):
2129 msg = _(b'new changesets %s\n') % revrange
2129 msg = _(b'new changesets %s\n') % revrange
2130 elif draft and secret:
2130 elif draft and secret:
2131 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2131 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2132 msg %= (revrange, draft, secret)
2132 msg %= (revrange, draft, secret)
2133 elif draft:
2133 elif draft:
2134 msg = _(b'new changesets %s (%d drafts)\n')
2134 msg = _(b'new changesets %s (%d drafts)\n')
2135 msg %= (revrange, draft)
2135 msg %= (revrange, draft)
2136 elif secret:
2136 elif secret:
2137 msg = _(b'new changesets %s (%d secrets)\n')
2137 msg = _(b'new changesets %s (%d secrets)\n')
2138 msg %= (revrange, secret)
2138 msg %= (revrange, secret)
2139 else:
2139 else:
2140 errormsg = b'entered unreachable condition'
2140 errormsg = b'entered unreachable condition'
2141 raise error.ProgrammingError(errormsg)
2141 raise error.ProgrammingError(errormsg)
2142 repo.ui.status(msg)
2142 repo.ui.status(msg)
2143
2143
2144 # search new changesets directly pulled as obsolete
2144 # search new changesets directly pulled as obsolete
2145 duplicates = tr.changes.get(b'revduplicates', ())
2145 duplicates = tr.changes.get(b'revduplicates', ())
2146 obsadded = unfi.revs(
2146 obsadded = unfi.revs(
2147 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2147 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2148 )
2148 )
2149 cl = repo.changelog
2149 cl = repo.changelog
2150 extinctadded = [r for r in obsadded if r not in cl]
2150 extinctadded = [r for r in obsadded if r not in cl]
2151 if extinctadded:
2151 if extinctadded:
2152 # They are not just obsolete, but obsolete and invisible
2152 # They are not just obsolete, but obsolete and invisible
2153 # we call them "extinct" internally but the terms have not been
2153 # we call them "extinct" internally but the terms have not been
2154 # exposed to users.
2154 # exposed to users.
2155 msg = b'(%d other changesets obsolete on arrival)\n'
2155 msg = b'(%d other changesets obsolete on arrival)\n'
2156 repo.ui.status(msg % len(extinctadded))
2156 repo.ui.status(msg % len(extinctadded))
2157
2157
2158 @reportsummary
2158 @reportsummary
2159 def reportphasechanges(repo, tr):
2159 def reportphasechanges(repo, tr):
2160 """Report statistics of phase changes for changesets pre-existing
2160 """Report statistics of phase changes for changesets pre-existing
2161 pull/unbundle.
2161 pull/unbundle.
2162 """
2162 """
2163 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2163 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2164 published = []
2164 published = []
2165 for revs, (old, new) in tr.changes.get(b'phases', []):
2165 for revs, (old, new) in tr.changes.get(b'phases', []):
2166 if new != phases.public:
2166 if new != phases.public:
2167 continue
2167 continue
2168 published.extend(rev for rev in revs if rev < origrepolen)
2168 published.extend(rev for rev in revs if rev < origrepolen)
2169 if not published:
2169 if not published:
2170 return
2170 return
2171 msg = _(b'%d local changesets published\n')
2171 msg = _(b'%d local changesets published\n')
2172 if as_validator:
2172 if as_validator:
2173 msg = _(b'%d local changesets will be published\n')
2173 msg = _(b'%d local changesets will be published\n')
2174 repo.ui.status(msg % len(published))
2174 repo.ui.status(msg % len(published))
2175
2175
2176
2176
2177 def getinstabilitymessage(delta, instability):
2177 def getinstabilitymessage(delta, instability):
2178 """function to return the message to show warning about new instabilities
2178 """function to return the message to show warning about new instabilities
2179
2179
2180 exists as a separate function so that extension can wrap to show more
2180 exists as a separate function so that extension can wrap to show more
2181 information like how to fix instabilities"""
2181 information like how to fix instabilities"""
2182 if delta > 0:
2182 if delta > 0:
2183 return _(b'%i new %s changesets\n') % (delta, instability)
2183 return _(b'%i new %s changesets\n') % (delta, instability)
2184
2184
2185
2185
2186 def nodesummaries(repo, nodes, maxnumnodes=4):
2186 def nodesummaries(repo, nodes, maxnumnodes=4):
2187 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2187 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2188 return b' '.join(short(h) for h in nodes)
2188 return b' '.join(short(h) for h in nodes)
2189 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2189 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2190 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2190 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2191
2191
2192
2192
2193 def enforcesinglehead(repo, tr, desc, accountclosed, filtername):
2193 def enforcesinglehead(repo, tr, desc, accountclosed, filtername):
2194 """check that no named branch has multiple heads"""
2194 """check that no named branch has multiple heads"""
2195 if desc in (b'strip', b'repair'):
2195 if desc in (b'strip', b'repair'):
2196 # skip the logic during strip
2196 # skip the logic during strip
2197 return
2197 return
2198 visible = repo.filtered(filtername)
2198 visible = repo.filtered(filtername)
2199 # possible improvement: we could restrict the check to affected branch
2199 # possible improvement: we could restrict the check to affected branch
2200 bm = visible.branchmap()
2200 bm = visible.branchmap()
2201 for name in bm:
2201 for name in bm:
2202 heads = bm.branchheads(name, closed=accountclosed)
2202 heads = bm.branchheads(name, closed=accountclosed)
2203 if len(heads) > 1:
2203 if len(heads) > 1:
2204 msg = _(b'rejecting multiple heads on branch "%s"')
2204 msg = _(b'rejecting multiple heads on branch "%s"')
2205 msg %= name
2205 msg %= name
2206 hint = _(b'%d heads: %s')
2206 hint = _(b'%d heads: %s')
2207 hint %= (len(heads), nodesummaries(repo, heads))
2207 hint %= (len(heads), nodesummaries(repo, heads))
2208 raise error.Abort(msg, hint=hint)
2208 raise error.Abort(msg, hint=hint)
2209
2209
2210
2210
2211 def wrapconvertsink(sink):
2211 def wrapconvertsink(sink):
2212 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2212 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2213 before it is used, whether or not the convert extension was formally loaded.
2213 before it is used, whether or not the convert extension was formally loaded.
2214 """
2214 """
2215 return sink
2215 return sink
2216
2216
2217
2217
2218 def unhidehashlikerevs(repo, specs, hiddentype):
2218 def unhidehashlikerevs(repo, specs, hiddentype):
2219 """parse the user specs and unhide changesets whose hash or revision number
2219 """parse the user specs and unhide changesets whose hash or revision number
2220 is passed.
2220 is passed.
2221
2221
2222 hiddentype can be: 1) 'warn': warn while unhiding changesets
2222 hiddentype can be: 1) 'warn': warn while unhiding changesets
2223 2) 'nowarn': don't warn while unhiding changesets
2223 2) 'nowarn': don't warn while unhiding changesets
2224
2224
2225 returns a repo object with the required changesets unhidden
2225 returns a repo object with the required changesets unhidden
2226 """
2226 """
2227 if not repo.filtername or not repo.ui.configbool(
2227 if not repo.filtername or not repo.ui.configbool(
2228 b'experimental', b'directaccess'
2228 b'experimental', b'directaccess'
2229 ):
2229 ):
2230 return repo
2230 return repo
2231
2231
2232 if repo.filtername not in (b'visible', b'visible-hidden'):
2232 if repo.filtername not in (b'visible', b'visible-hidden'):
2233 return repo
2233 return repo
2234
2234
2235 symbols = set()
2235 symbols = set()
2236 for spec in specs:
2236 for spec in specs:
2237 try:
2237 try:
2238 tree = revsetlang.parse(spec)
2238 tree = revsetlang.parse(spec)
2239 except error.ParseError: # will be reported by scmutil.revrange()
2239 except error.ParseError: # will be reported by scmutil.revrange()
2240 continue
2240 continue
2241
2241
2242 symbols.update(revsetlang.gethashlikesymbols(tree))
2242 symbols.update(revsetlang.gethashlikesymbols(tree))
2243
2243
2244 if not symbols:
2244 if not symbols:
2245 return repo
2245 return repo
2246
2246
2247 revs = _getrevsfromsymbols(repo, symbols)
2247 revs = _getrevsfromsymbols(repo, symbols)
2248
2248
2249 if not revs:
2249 if not revs:
2250 return repo
2250 return repo
2251
2251
2252 if hiddentype == b'warn':
2252 if hiddentype == b'warn':
2253 unfi = repo.unfiltered()
2253 unfi = repo.unfiltered()
2254 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2254 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2255 repo.ui.warn(
2255 repo.ui.warn(
2256 _(
2256 _(
2257 b"warning: accessing hidden changesets for write "
2257 b"warning: accessing hidden changesets for write "
2258 b"operation: %s\n"
2258 b"operation: %s\n"
2259 )
2259 )
2260 % revstr
2260 % revstr
2261 )
2261 )
2262
2262
2263 # we have to use new filtername to separate branch/tags cache until we can
2263 # we have to use new filtername to separate branch/tags cache until we can
2264 # disbale these cache when revisions are dynamically pinned.
2264 # disbale these cache when revisions are dynamically pinned.
2265 return repo.filtered(b'visible-hidden', revs)
2265 return repo.filtered(b'visible-hidden', revs)
2266
2266
2267
2267
2268 def _getrevsfromsymbols(repo, symbols):
2268 def _getrevsfromsymbols(repo, symbols):
2269 """parse the list of symbols and returns a set of revision numbers of hidden
2269 """parse the list of symbols and returns a set of revision numbers of hidden
2270 changesets present in symbols"""
2270 changesets present in symbols"""
2271 revs = set()
2271 revs = set()
2272 unfi = repo.unfiltered()
2272 unfi = repo.unfiltered()
2273 unficl = unfi.changelog
2273 unficl = unfi.changelog
2274 cl = repo.changelog
2274 cl = repo.changelog
2275 tiprev = len(unficl)
2275 tiprev = len(unficl)
2276 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2276 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2277 for s in symbols:
2277 for s in symbols:
2278 try:
2278 try:
2279 n = int(s)
2279 n = int(s)
2280 if n <= tiprev:
2280 if n <= tiprev:
2281 if not allowrevnums:
2281 if not allowrevnums:
2282 continue
2282 continue
2283 else:
2283 else:
2284 if n not in cl:
2284 if n not in cl:
2285 revs.add(n)
2285 revs.add(n)
2286 continue
2286 continue
2287 except ValueError:
2287 except ValueError:
2288 pass
2288 pass
2289
2289
2290 try:
2290 try:
2291 s = resolvehexnodeidprefix(unfi, s)
2291 s = resolvehexnodeidprefix(unfi, s)
2292 except (error.LookupError, error.WdirUnsupported):
2292 except (error.LookupError, error.WdirUnsupported):
2293 s = None
2293 s = None
2294
2294
2295 if s is not None:
2295 if s is not None:
2296 rev = unficl.rev(s)
2296 rev = unficl.rev(s)
2297 if rev not in cl:
2297 if rev not in cl:
2298 revs.add(rev)
2298 revs.add(rev)
2299
2299
2300 return revs
2300 return revs
2301
2301
2302
2302
2303 def bookmarkrevs(repo, mark):
2303 def bookmarkrevs(repo, mark):
2304 """Select revisions reachable by a given bookmark
2304 """Select revisions reachable by a given bookmark
2305
2305
2306 If the bookmarked revision isn't a head, an empty set will be returned.
2306 If the bookmarked revision isn't a head, an empty set will be returned.
2307 """
2307 """
2308 return repo.revs(format_bookmark_revspec(mark))
2308 return repo.revs(format_bookmark_revspec(mark))
2309
2309
2310
2310
2311 def format_bookmark_revspec(mark):
2311 def format_bookmark_revspec(mark):
2312 """Build a revset expression to select revisions reachable by a given
2312 """Build a revset expression to select revisions reachable by a given
2313 bookmark"""
2313 bookmark"""
2314 mark = b'literal:' + mark
2314 mark = b'literal:' + mark
2315 return revsetlang.formatspec(
2315 return revsetlang.formatspec(
2316 b"ancestors(bookmark(%s)) - "
2316 b"ancestors(bookmark(%s)) - "
2317 b"ancestors(head() and not bookmark(%s)) - "
2317 b"ancestors(head() and not bookmark(%s)) - "
2318 b"ancestors(bookmark() and not bookmark(%s))",
2318 b"ancestors(bookmark() and not bookmark(%s))",
2319 mark,
2319 mark,
2320 mark,
2320 mark,
2321 mark,
2321 mark,
2322 )
2322 )
General Comments 0
You need to be logged in to leave comments. Login now