##// END OF EJS Templates
errors: make StorageError subclass Error, attaching an exit code to it...
Martin von Zweigbergk -
r48075:dd339191 default
parent child Browse files
Show More
@@ -1,685 +1,688 b''
1 # error.py - Mercurial exceptions
1 # error.py - Mercurial exceptions
2 #
2 #
3 # Copyright 2005-2008 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2008 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """Mercurial exceptions.
8 """Mercurial exceptions.
9
9
10 This allows us to catch exceptions at higher levels without forcing
10 This allows us to catch exceptions at higher levels without forcing
11 imports.
11 imports.
12 """
12 """
13
13
14 from __future__ import absolute_import
14 from __future__ import absolute_import
15
15
16 import difflib
16 import difflib
17
17
18 # Do not import anything but pycompat here, please
18 # Do not import anything but pycompat here, please
19 from . import pycompat
19 from . import pycompat
20
20
21 if pycompat.TYPE_CHECKING:
21 if pycompat.TYPE_CHECKING:
22 from typing import (
22 from typing import (
23 Any,
23 Any,
24 AnyStr,
24 AnyStr,
25 Iterable,
25 Iterable,
26 List,
26 List,
27 Optional,
27 Optional,
28 Sequence,
28 Sequence,
29 Union,
29 Union,
30 )
30 )
31
31
32
32
33 def _tobytes(exc):
33 def _tobytes(exc):
34 """Byte-stringify exception in the same way as BaseException_str()"""
34 """Byte-stringify exception in the same way as BaseException_str()"""
35 if not exc.args:
35 if not exc.args:
36 return b''
36 return b''
37 if len(exc.args) == 1:
37 if len(exc.args) == 1:
38 return pycompat.bytestr(exc.args[0])
38 return pycompat.bytestr(exc.args[0])
39 return b'(%s)' % b', '.join(b"'%s'" % pycompat.bytestr(a) for a in exc.args)
39 return b'(%s)' % b', '.join(b"'%s'" % pycompat.bytestr(a) for a in exc.args)
40
40
41
41
42 class Hint(object):
42 class Hint(object):
43 """Mix-in to provide a hint of an error
43 """Mix-in to provide a hint of an error
44
44
45 This should come first in the inheritance list to consume a hint and
45 This should come first in the inheritance list to consume a hint and
46 pass remaining arguments to the exception class.
46 pass remaining arguments to the exception class.
47 """
47 """
48
48
49 def __init__(self, *args, **kw):
49 def __init__(self, *args, **kw):
50 self.hint = kw.pop('hint', None)
50 self.hint = kw.pop('hint', None)
51 super(Hint, self).__init__(*args, **kw)
51 super(Hint, self).__init__(*args, **kw)
52
52
53
53
54 class Error(Hint, Exception):
54 class Error(Hint, Exception):
55 """Base class for Mercurial errors."""
55 """Base class for Mercurial errors."""
56
56
57 def __init__(
57 def __init__(
58 self, message, hint=None, coarse_exit_code=None, detailed_exit_code=None
58 self, message, hint=None, coarse_exit_code=None, detailed_exit_code=None
59 ):
59 ):
60 # type: (bytes, Optional[bytes]) -> None
60 # type: (bytes, Optional[bytes]) -> None
61 self.message = message
61 self.message = message
62 self.hint = hint
62 self.hint = hint
63 self.coarse_exit_code = coarse_exit_code
63 self.coarse_exit_code = coarse_exit_code
64 self.detailed_exit_code = detailed_exit_code
64 self.detailed_exit_code = detailed_exit_code
65 # Pass the message into the Exception constructor to help extensions
65 # Pass the message into the Exception constructor to help extensions
66 # that look for exc.args[0].
66 # that look for exc.args[0].
67 Exception.__init__(self, message)
67 Exception.__init__(self, message)
68
68
69 def __bytes__(self):
69 def __bytes__(self):
70 return self.message
70 return self.message
71
71
72 if pycompat.ispy3:
72 if pycompat.ispy3:
73
73
74 def __str__(self):
74 def __str__(self):
75 # the output would be unreadable if the message was translated,
75 # the output would be unreadable if the message was translated,
76 # but do not replace it with encoding.strfromlocal(), which
76 # but do not replace it with encoding.strfromlocal(), which
77 # may raise another exception.
77 # may raise another exception.
78 return pycompat.sysstr(self.__bytes__())
78 return pycompat.sysstr(self.__bytes__())
79
79
80 def format(self):
80 def format(self):
81 # type: () -> bytes
81 # type: () -> bytes
82 from .i18n import _
82 from .i18n import _
83
83
84 message = _(b"abort: %s\n") % self.message
84 message = _(b"abort: %s\n") % self.message
85 if self.hint:
85 if self.hint:
86 message += _(b"(%s)\n") % self.hint
86 message += _(b"(%s)\n") % self.hint
87 return message
87 return message
88
88
89
89
90 class Abort(Error):
90 class Abort(Error):
91 """Raised if a command needs to print an error and exit."""
91 """Raised if a command needs to print an error and exit."""
92
92
93
93
94 class StorageError(Hint, Exception):
94 class StorageError(Error):
95 """Raised when an error occurs in a storage layer.
95 """Raised when an error occurs in a storage layer.
96
96
97 Usually subclassed by a storage-specific exception.
97 Usually subclassed by a storage-specific exception.
98 """
98 """
99
99
100 __bytes__ = _tobytes
100 def __init__(self, message, hint=None):
101 super(StorageError, self).__init__(
102 message, hint=hint, detailed_exit_code=50
103 )
101
104
102
105
103 class RevlogError(StorageError):
106 class RevlogError(StorageError):
104 pass
107 pass
105
108
106
109
107 class SidedataHashError(RevlogError):
110 class SidedataHashError(RevlogError):
108 def __init__(self, key, expected, got):
111 def __init__(self, key, expected, got):
109 self.hint = None
112 self.hint = None
110 self.sidedatakey = key
113 self.sidedatakey = key
111 self.expecteddigest = expected
114 self.expecteddigest = expected
112 self.actualdigest = got
115 self.actualdigest = got
113
116
114
117
115 class FilteredIndexError(IndexError):
118 class FilteredIndexError(IndexError):
116 __bytes__ = _tobytes
119 __bytes__ = _tobytes
117
120
118
121
119 class LookupError(RevlogError, KeyError):
122 class LookupError(RevlogError, KeyError):
120 def __init__(self, name, index, message):
123 def __init__(self, name, index, message):
121 self.name = name
124 self.name = name
122 self.index = index
125 self.index = index
123 # this can't be called 'message' because at least some installs of
126 # this can't be called 'message' because at least some installs of
124 # Python 2.6+ complain about the 'message' property being deprecated
127 # Python 2.6+ complain about the 'message' property being deprecated
125 self.lookupmessage = message
128 self.lookupmessage = message
126 if isinstance(name, bytes) and len(name) == 20:
129 if isinstance(name, bytes) and len(name) == 20:
127 from .node import hex
130 from .node import hex
128
131
129 name = hex(name)
132 name = hex(name)
130 # if name is a binary node, it can be None
133 # if name is a binary node, it can be None
131 RevlogError.__init__(
134 RevlogError.__init__(
132 self, b'%s@%s: %s' % (index, pycompat.bytestr(name), message)
135 self, b'%s@%s: %s' % (index, pycompat.bytestr(name), message)
133 )
136 )
134
137
135 def __bytes__(self):
138 def __bytes__(self):
136 return RevlogError.__bytes__(self)
139 return RevlogError.__bytes__(self)
137
140
138 def __str__(self):
141 def __str__(self):
139 return RevlogError.__str__(self)
142 return RevlogError.__str__(self)
140
143
141
144
142 class AmbiguousPrefixLookupError(LookupError):
145 class AmbiguousPrefixLookupError(LookupError):
143 pass
146 pass
144
147
145
148
146 class FilteredLookupError(LookupError):
149 class FilteredLookupError(LookupError):
147 pass
150 pass
148
151
149
152
150 class ManifestLookupError(LookupError):
153 class ManifestLookupError(LookupError):
151 pass
154 pass
152
155
153
156
154 class CommandError(Exception):
157 class CommandError(Exception):
155 """Exception raised on errors in parsing the command line."""
158 """Exception raised on errors in parsing the command line."""
156
159
157 def __init__(self, command, message):
160 def __init__(self, command, message):
158 # type: (bytes, bytes) -> None
161 # type: (bytes, bytes) -> None
159 self.command = command
162 self.command = command
160 self.message = message
163 self.message = message
161 super(CommandError, self).__init__()
164 super(CommandError, self).__init__()
162
165
163 __bytes__ = _tobytes
166 __bytes__ = _tobytes
164
167
165
168
166 class UnknownCommand(Exception):
169 class UnknownCommand(Exception):
167 """Exception raised if command is not in the command table."""
170 """Exception raised if command is not in the command table."""
168
171
169 def __init__(self, command, all_commands=None):
172 def __init__(self, command, all_commands=None):
170 # type: (bytes, Optional[List[bytes]]) -> None
173 # type: (bytes, Optional[List[bytes]]) -> None
171 self.command = command
174 self.command = command
172 self.all_commands = all_commands
175 self.all_commands = all_commands
173 super(UnknownCommand, self).__init__()
176 super(UnknownCommand, self).__init__()
174
177
175 __bytes__ = _tobytes
178 __bytes__ = _tobytes
176
179
177
180
178 class AmbiguousCommand(Exception):
181 class AmbiguousCommand(Exception):
179 """Exception raised if command shortcut matches more than one command."""
182 """Exception raised if command shortcut matches more than one command."""
180
183
181 def __init__(self, prefix, matches):
184 def __init__(self, prefix, matches):
182 # type: (bytes, List[bytes]) -> None
185 # type: (bytes, List[bytes]) -> None
183 self.prefix = prefix
186 self.prefix = prefix
184 self.matches = matches
187 self.matches = matches
185 super(AmbiguousCommand, self).__init__()
188 super(AmbiguousCommand, self).__init__()
186
189
187 __bytes__ = _tobytes
190 __bytes__ = _tobytes
188
191
189
192
190 class WorkerError(Exception):
193 class WorkerError(Exception):
191 """Exception raised when a worker process dies."""
194 """Exception raised when a worker process dies."""
192
195
193 def __init__(self, status_code):
196 def __init__(self, status_code):
194 # type: (int) -> None
197 # type: (int) -> None
195 self.status_code = status_code
198 self.status_code = status_code
196 # Pass status code to superclass just so it becomes part of __bytes__
199 # Pass status code to superclass just so it becomes part of __bytes__
197 super(WorkerError, self).__init__(status_code)
200 super(WorkerError, self).__init__(status_code)
198
201
199 __bytes__ = _tobytes
202 __bytes__ = _tobytes
200
203
201
204
202 class InterventionRequired(Abort):
205 class InterventionRequired(Abort):
203 """Exception raised when a command requires human intervention."""
206 """Exception raised when a command requires human intervention."""
204
207
205 def __init__(self, message, hint=None):
208 def __init__(self, message, hint=None):
206 super(InterventionRequired, self).__init__(
209 super(InterventionRequired, self).__init__(
207 message, hint=hint, coarse_exit_code=1, detailed_exit_code=240
210 message, hint=hint, coarse_exit_code=1, detailed_exit_code=240
208 )
211 )
209
212
210 def format(self):
213 def format(self):
211 # type: () -> bytes
214 # type: () -> bytes
212 from .i18n import _
215 from .i18n import _
213
216
214 message = _(b"%s\n") % self.message
217 message = _(b"%s\n") % self.message
215 if self.hint:
218 if self.hint:
216 message += _(b"(%s)\n") % self.hint
219 message += _(b"(%s)\n") % self.hint
217 return message
220 return message
218
221
219
222
220 class ConflictResolutionRequired(InterventionRequired):
223 class ConflictResolutionRequired(InterventionRequired):
221 """Exception raised when a continuable command required merge conflict resolution."""
224 """Exception raised when a continuable command required merge conflict resolution."""
222
225
223 def __init__(self, opname):
226 def __init__(self, opname):
224 # type: (bytes) -> None
227 # type: (bytes) -> None
225 from .i18n import _
228 from .i18n import _
226
229
227 self.opname = opname
230 self.opname = opname
228 InterventionRequired.__init__(
231 InterventionRequired.__init__(
229 self,
232 self,
230 _(
233 _(
231 b"unresolved conflicts (see 'hg resolve', then 'hg %s --continue')"
234 b"unresolved conflicts (see 'hg resolve', then 'hg %s --continue')"
232 )
235 )
233 % opname,
236 % opname,
234 )
237 )
235
238
236
239
237 class InputError(Abort):
240 class InputError(Abort):
238 """Indicates that the user made an error in their input.
241 """Indicates that the user made an error in their input.
239
242
240 Examples: Invalid command, invalid flags, invalid revision.
243 Examples: Invalid command, invalid flags, invalid revision.
241 """
244 """
242
245
243 def __init__(self, message, hint=None):
246 def __init__(self, message, hint=None):
244 super(InputError, self).__init__(
247 super(InputError, self).__init__(
245 message, hint=hint, detailed_exit_code=10
248 message, hint=hint, detailed_exit_code=10
246 )
249 )
247
250
248
251
249 class StateError(Abort):
252 class StateError(Abort):
250 """Indicates that the operation might work if retried in a different state.
253 """Indicates that the operation might work if retried in a different state.
251
254
252 Examples: Unresolved merge conflicts, unfinished operations.
255 Examples: Unresolved merge conflicts, unfinished operations.
253 """
256 """
254
257
255 def __init__(self, message, hint=None):
258 def __init__(self, message, hint=None):
256 super(StateError, self).__init__(
259 super(StateError, self).__init__(
257 message, hint=hint, detailed_exit_code=20
260 message, hint=hint, detailed_exit_code=20
258 )
261 )
259
262
260
263
261 class CanceledError(Abort):
264 class CanceledError(Abort):
262 """Indicates that the user canceled the operation.
265 """Indicates that the user canceled the operation.
263
266
264 Examples: Close commit editor with error status, quit chistedit.
267 Examples: Close commit editor with error status, quit chistedit.
265 """
268 """
266
269
267 def __init__(self, message, hint=None):
270 def __init__(self, message, hint=None):
268 super(CanceledError, self).__init__(
271 super(CanceledError, self).__init__(
269 message, hint=hint, detailed_exit_code=250
272 message, hint=hint, detailed_exit_code=250
270 )
273 )
271
274
272
275
273 class SecurityError(Abort):
276 class SecurityError(Abort):
274 """Indicates that some aspect of security failed.
277 """Indicates that some aspect of security failed.
275
278
276 Examples: Bad server credentials, expired local credentials for network
279 Examples: Bad server credentials, expired local credentials for network
277 filesystem, mismatched GPG signature, DoS protection.
280 filesystem, mismatched GPG signature, DoS protection.
278 """
281 """
279
282
280 def __init__(self, message, hint=None):
283 def __init__(self, message, hint=None):
281 super(SecurityError, self).__init__(
284 super(SecurityError, self).__init__(
282 message, hint=hint, detailed_exit_code=150
285 message, hint=hint, detailed_exit_code=150
283 )
286 )
284
287
285
288
286 class HookLoadError(Abort):
289 class HookLoadError(Abort):
287 """raised when loading a hook fails, aborting an operation
290 """raised when loading a hook fails, aborting an operation
288
291
289 Exists to allow more specialized catching."""
292 Exists to allow more specialized catching."""
290
293
291
294
292 class HookAbort(Abort):
295 class HookAbort(Abort):
293 """raised when a validation hook fails, aborting an operation
296 """raised when a validation hook fails, aborting an operation
294
297
295 Exists to allow more specialized catching."""
298 Exists to allow more specialized catching."""
296
299
297 def __init__(self, message, hint=None):
300 def __init__(self, message, hint=None):
298 super(HookAbort, self).__init__(
301 super(HookAbort, self).__init__(
299 message, hint=hint, detailed_exit_code=40
302 message, hint=hint, detailed_exit_code=40
300 )
303 )
301
304
302
305
303 class ConfigError(Abort):
306 class ConfigError(Abort):
304 """Exception raised when parsing config files"""
307 """Exception raised when parsing config files"""
305
308
306 def __init__(self, message, location=None, hint=None):
309 def __init__(self, message, location=None, hint=None):
307 # type: (bytes, Optional[bytes], Optional[bytes]) -> None
310 # type: (bytes, Optional[bytes], Optional[bytes]) -> None
308 super(ConfigError, self).__init__(
311 super(ConfigError, self).__init__(
309 message, hint=hint, detailed_exit_code=30
312 message, hint=hint, detailed_exit_code=30
310 )
313 )
311 self.location = location
314 self.location = location
312
315
313 def format(self):
316 def format(self):
314 # type: () -> bytes
317 # type: () -> bytes
315 from .i18n import _
318 from .i18n import _
316
319
317 if self.location is not None:
320 if self.location is not None:
318 message = _(b"config error at %s: %s\n") % (
321 message = _(b"config error at %s: %s\n") % (
319 pycompat.bytestr(self.location),
322 pycompat.bytestr(self.location),
320 self.message,
323 self.message,
321 )
324 )
322 else:
325 else:
323 message = _(b"config error: %s\n") % self.message
326 message = _(b"config error: %s\n") % self.message
324 if self.hint:
327 if self.hint:
325 message += _(b"(%s)\n") % self.hint
328 message += _(b"(%s)\n") % self.hint
326 return message
329 return message
327
330
328
331
329 class UpdateAbort(Abort):
332 class UpdateAbort(Abort):
330 """Raised when an update is aborted for destination issue"""
333 """Raised when an update is aborted for destination issue"""
331
334
332
335
333 class MergeDestAbort(Abort):
336 class MergeDestAbort(Abort):
334 """Raised when an update is aborted for destination issues"""
337 """Raised when an update is aborted for destination issues"""
335
338
336
339
337 class NoMergeDestAbort(MergeDestAbort):
340 class NoMergeDestAbort(MergeDestAbort):
338 """Raised when an update is aborted because there is nothing to merge"""
341 """Raised when an update is aborted because there is nothing to merge"""
339
342
340
343
341 class ManyMergeDestAbort(MergeDestAbort):
344 class ManyMergeDestAbort(MergeDestAbort):
342 """Raised when an update is aborted because destination is ambiguous"""
345 """Raised when an update is aborted because destination is ambiguous"""
343
346
344
347
345 class ResponseExpected(Abort):
348 class ResponseExpected(Abort):
346 """Raised when an EOF is received for a prompt"""
349 """Raised when an EOF is received for a prompt"""
347
350
348 def __init__(self):
351 def __init__(self):
349 from .i18n import _
352 from .i18n import _
350
353
351 Abort.__init__(self, _(b'response expected'))
354 Abort.__init__(self, _(b'response expected'))
352
355
353
356
354 class RemoteError(Abort):
357 class RemoteError(Abort):
355 """Exception raised when interacting with a remote repo fails"""
358 """Exception raised when interacting with a remote repo fails"""
356
359
357 def __init__(self, message, hint=None):
360 def __init__(self, message, hint=None):
358 super(RemoteError, self).__init__(
361 super(RemoteError, self).__init__(
359 message, hint=hint, detailed_exit_code=100
362 message, hint=hint, detailed_exit_code=100
360 )
363 )
361
364
362
365
363 class OutOfBandError(RemoteError):
366 class OutOfBandError(RemoteError):
364 """Exception raised when a remote repo reports failure"""
367 """Exception raised when a remote repo reports failure"""
365
368
366 def __init__(self, message=None, hint=None):
369 def __init__(self, message=None, hint=None):
367 from .i18n import _
370 from .i18n import _
368
371
369 if message:
372 if message:
370 # Abort.format() adds a trailing newline
373 # Abort.format() adds a trailing newline
371 message = _(b"remote error:\n%s") % message.rstrip(b'\n')
374 message = _(b"remote error:\n%s") % message.rstrip(b'\n')
372 else:
375 else:
373 message = _(b"remote error")
376 message = _(b"remote error")
374 super(OutOfBandError, self).__init__(message, hint=hint)
377 super(OutOfBandError, self).__init__(message, hint=hint)
375
378
376
379
377 class ParseError(Abort):
380 class ParseError(Abort):
378 """Raised when parsing config files and {rev,file}sets (msg[, pos])"""
381 """Raised when parsing config files and {rev,file}sets (msg[, pos])"""
379
382
380 def __init__(self, message, location=None, hint=None):
383 def __init__(self, message, location=None, hint=None):
381 # type: (bytes, Optional[Union[bytes, int]], Optional[bytes]) -> None
384 # type: (bytes, Optional[Union[bytes, int]], Optional[bytes]) -> None
382 super(ParseError, self).__init__(
385 super(ParseError, self).__init__(
383 message, hint=hint, detailed_exit_code=10
386 message, hint=hint, detailed_exit_code=10
384 )
387 )
385 self.location = location
388 self.location = location
386
389
387 def format(self):
390 def format(self):
388 # type: () -> bytes
391 # type: () -> bytes
389 from .i18n import _
392 from .i18n import _
390
393
391 if self.location is not None:
394 if self.location is not None:
392 message = _(b"hg: parse error at %s: %s\n") % (
395 message = _(b"hg: parse error at %s: %s\n") % (
393 pycompat.bytestr(self.location),
396 pycompat.bytestr(self.location),
394 self.message,
397 self.message,
395 )
398 )
396 else:
399 else:
397 message = _(b"hg: parse error: %s\n") % self.message
400 message = _(b"hg: parse error: %s\n") % self.message
398 if self.hint:
401 if self.hint:
399 message += _(b"(%s)\n") % self.hint
402 message += _(b"(%s)\n") % self.hint
400 return message
403 return message
401
404
402
405
403 class PatchError(Exception):
406 class PatchError(Exception):
404 __bytes__ = _tobytes
407 __bytes__ = _tobytes
405
408
406
409
407 def getsimilar(symbols, value):
410 def getsimilar(symbols, value):
408 # type: (Iterable[bytes], bytes) -> List[bytes]
411 # type: (Iterable[bytes], bytes) -> List[bytes]
409 sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
412 sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
410 # The cutoff for similarity here is pretty arbitrary. It should
413 # The cutoff for similarity here is pretty arbitrary. It should
411 # probably be investigated and tweaked.
414 # probably be investigated and tweaked.
412 return [s for s in symbols if sim(s) > 0.6]
415 return [s for s in symbols if sim(s) > 0.6]
413
416
414
417
415 def similarity_hint(similar):
418 def similarity_hint(similar):
416 # type: (List[bytes]) -> Optional[bytes]
419 # type: (List[bytes]) -> Optional[bytes]
417 from .i18n import _
420 from .i18n import _
418
421
419 if len(similar) == 1:
422 if len(similar) == 1:
420 return _(b"did you mean %s?") % similar[0]
423 return _(b"did you mean %s?") % similar[0]
421 elif similar:
424 elif similar:
422 ss = b", ".join(sorted(similar))
425 ss = b", ".join(sorted(similar))
423 return _(b"did you mean one of %s?") % ss
426 return _(b"did you mean one of %s?") % ss
424 else:
427 else:
425 return None
428 return None
426
429
427
430
428 class UnknownIdentifier(ParseError):
431 class UnknownIdentifier(ParseError):
429 """Exception raised when a {rev,file}set references an unknown identifier"""
432 """Exception raised when a {rev,file}set references an unknown identifier"""
430
433
431 def __init__(self, function, symbols):
434 def __init__(self, function, symbols):
432 # type: (bytes, Iterable[bytes]) -> None
435 # type: (bytes, Iterable[bytes]) -> None
433 from .i18n import _
436 from .i18n import _
434
437
435 similar = getsimilar(symbols, function)
438 similar = getsimilar(symbols, function)
436 hint = similarity_hint(similar)
439 hint = similarity_hint(similar)
437
440
438 ParseError.__init__(
441 ParseError.__init__(
439 self, _(b"unknown identifier: %s") % function, hint=hint
442 self, _(b"unknown identifier: %s") % function, hint=hint
440 )
443 )
441
444
442
445
443 class RepoError(Hint, Exception):
446 class RepoError(Hint, Exception):
444 __bytes__ = _tobytes
447 __bytes__ = _tobytes
445
448
446
449
447 class RepoLookupError(RepoError):
450 class RepoLookupError(RepoError):
448 pass
451 pass
449
452
450
453
451 class FilteredRepoLookupError(RepoLookupError):
454 class FilteredRepoLookupError(RepoLookupError):
452 pass
455 pass
453
456
454
457
455 class CapabilityError(RepoError):
458 class CapabilityError(RepoError):
456 pass
459 pass
457
460
458
461
459 class RequirementError(RepoError):
462 class RequirementError(RepoError):
460 """Exception raised if .hg/requires has an unknown entry."""
463 """Exception raised if .hg/requires has an unknown entry."""
461
464
462
465
463 class StdioError(IOError):
466 class StdioError(IOError):
464 """Raised if I/O to stdout or stderr fails"""
467 """Raised if I/O to stdout or stderr fails"""
465
468
466 def __init__(self, err):
469 def __init__(self, err):
467 # type: (IOError) -> None
470 # type: (IOError) -> None
468 IOError.__init__(self, err.errno, err.strerror)
471 IOError.__init__(self, err.errno, err.strerror)
469
472
470 # no __bytes__() because error message is derived from the standard IOError
473 # no __bytes__() because error message is derived from the standard IOError
471
474
472
475
473 class UnsupportedMergeRecords(Abort):
476 class UnsupportedMergeRecords(Abort):
474 def __init__(self, recordtypes):
477 def __init__(self, recordtypes):
475 # type: (Iterable[bytes]) -> None
478 # type: (Iterable[bytes]) -> None
476 from .i18n import _
479 from .i18n import _
477
480
478 self.recordtypes = sorted(recordtypes)
481 self.recordtypes = sorted(recordtypes)
479 s = b' '.join(self.recordtypes)
482 s = b' '.join(self.recordtypes)
480 Abort.__init__(
483 Abort.__init__(
481 self,
484 self,
482 _(b'unsupported merge state records: %s') % s,
485 _(b'unsupported merge state records: %s') % s,
483 hint=_(
486 hint=_(
484 b'see https://mercurial-scm.org/wiki/MergeStateRecords for '
487 b'see https://mercurial-scm.org/wiki/MergeStateRecords for '
485 b'more information'
488 b'more information'
486 ),
489 ),
487 )
490 )
488
491
489
492
490 class UnknownVersion(Abort):
493 class UnknownVersion(Abort):
491 """generic exception for aborting from an encounter with an unknown version"""
494 """generic exception for aborting from an encounter with an unknown version"""
492
495
493 def __init__(self, msg, hint=None, version=None):
496 def __init__(self, msg, hint=None, version=None):
494 # type: (bytes, Optional[bytes], Optional[bytes]) -> None
497 # type: (bytes, Optional[bytes], Optional[bytes]) -> None
495 self.version = version
498 self.version = version
496 super(UnknownVersion, self).__init__(msg, hint=hint)
499 super(UnknownVersion, self).__init__(msg, hint=hint)
497
500
498
501
499 class LockError(IOError):
502 class LockError(IOError):
500 def __init__(self, errno, strerror, filename, desc):
503 def __init__(self, errno, strerror, filename, desc):
501 # TODO: figure out if this should be bytes or str
504 # TODO: figure out if this should be bytes or str
502 # _type: (int, str, str, bytes) -> None
505 # _type: (int, str, str, bytes) -> None
503 IOError.__init__(self, errno, strerror, filename)
506 IOError.__init__(self, errno, strerror, filename)
504 self.desc = desc
507 self.desc = desc
505
508
506 # no __bytes__() because error message is derived from the standard IOError
509 # no __bytes__() because error message is derived from the standard IOError
507
510
508
511
509 class LockHeld(LockError):
512 class LockHeld(LockError):
510 def __init__(self, errno, filename, desc, locker):
513 def __init__(self, errno, filename, desc, locker):
511 LockError.__init__(self, errno, b'Lock held', filename, desc)
514 LockError.__init__(self, errno, b'Lock held', filename, desc)
512 self.locker = locker
515 self.locker = locker
513
516
514
517
515 class LockUnavailable(LockError):
518 class LockUnavailable(LockError):
516 pass
519 pass
517
520
518
521
519 # LockError is for errors while acquiring the lock -- this is unrelated
522 # LockError is for errors while acquiring the lock -- this is unrelated
520 class LockInheritanceContractViolation(RuntimeError):
523 class LockInheritanceContractViolation(RuntimeError):
521 __bytes__ = _tobytes
524 __bytes__ = _tobytes
522
525
523
526
524 class ResponseError(Exception):
527 class ResponseError(Exception):
525 """Raised to print an error with part of output and exit."""
528 """Raised to print an error with part of output and exit."""
526
529
527 __bytes__ = _tobytes
530 __bytes__ = _tobytes
528
531
529
532
530 # derived from KeyboardInterrupt to simplify some breakout code
533 # derived from KeyboardInterrupt to simplify some breakout code
531 class SignalInterrupt(KeyboardInterrupt):
534 class SignalInterrupt(KeyboardInterrupt):
532 """Exception raised on SIGTERM and SIGHUP."""
535 """Exception raised on SIGTERM and SIGHUP."""
533
536
534
537
535 class SignatureError(Exception):
538 class SignatureError(Exception):
536 __bytes__ = _tobytes
539 __bytes__ = _tobytes
537
540
538
541
539 class PushRaced(RuntimeError):
542 class PushRaced(RuntimeError):
540 """An exception raised during unbundling that indicate a push race"""
543 """An exception raised during unbundling that indicate a push race"""
541
544
542 __bytes__ = _tobytes
545 __bytes__ = _tobytes
543
546
544
547
545 class ProgrammingError(Hint, RuntimeError):
548 class ProgrammingError(Hint, RuntimeError):
546 """Raised if a mercurial (core or extension) developer made a mistake"""
549 """Raised if a mercurial (core or extension) developer made a mistake"""
547
550
548 def __init__(self, msg, *args, **kwargs):
551 def __init__(self, msg, *args, **kwargs):
549 # type: (AnyStr, Any, Any) -> None
552 # type: (AnyStr, Any, Any) -> None
550 # On Python 3, turn the message back into a string since this is
553 # On Python 3, turn the message back into a string since this is
551 # an internal-only error that won't be printed except in a
554 # an internal-only error that won't be printed except in a
552 # stack traces.
555 # stack traces.
553 msg = pycompat.sysstr(msg)
556 msg = pycompat.sysstr(msg)
554 super(ProgrammingError, self).__init__(msg, *args, **kwargs)
557 super(ProgrammingError, self).__init__(msg, *args, **kwargs)
555
558
556 __bytes__ = _tobytes
559 __bytes__ = _tobytes
557
560
558
561
559 class WdirUnsupported(Exception):
562 class WdirUnsupported(Exception):
560 """An exception which is raised when 'wdir()' is not supported"""
563 """An exception which is raised when 'wdir()' is not supported"""
561
564
562 __bytes__ = _tobytes
565 __bytes__ = _tobytes
563
566
564
567
565 # bundle2 related errors
568 # bundle2 related errors
566 class BundleValueError(ValueError):
569 class BundleValueError(ValueError):
567 """error raised when bundle2 cannot be processed"""
570 """error raised when bundle2 cannot be processed"""
568
571
569 __bytes__ = _tobytes
572 __bytes__ = _tobytes
570
573
571
574
572 class BundleUnknownFeatureError(BundleValueError):
575 class BundleUnknownFeatureError(BundleValueError):
573 def __init__(self, parttype=None, params=(), values=()):
576 def __init__(self, parttype=None, params=(), values=()):
574 self.parttype = parttype
577 self.parttype = parttype
575 self.params = params
578 self.params = params
576 self.values = values
579 self.values = values
577 if self.parttype is None:
580 if self.parttype is None:
578 msg = b'Stream Parameter'
581 msg = b'Stream Parameter'
579 else:
582 else:
580 msg = parttype
583 msg = parttype
581 entries = self.params
584 entries = self.params
582 if self.params and self.values:
585 if self.params and self.values:
583 assert len(self.params) == len(self.values)
586 assert len(self.params) == len(self.values)
584 entries = []
587 entries = []
585 for idx, par in enumerate(self.params):
588 for idx, par in enumerate(self.params):
586 val = self.values[idx]
589 val = self.values[idx]
587 if val is None:
590 if val is None:
588 entries.append(val)
591 entries.append(val)
589 else:
592 else:
590 entries.append(b"%s=%r" % (par, pycompat.maybebytestr(val)))
593 entries.append(b"%s=%r" % (par, pycompat.maybebytestr(val)))
591 if entries:
594 if entries:
592 msg = b'%s - %s' % (msg, b', '.join(entries))
595 msg = b'%s - %s' % (msg, b', '.join(entries))
593 ValueError.__init__(self, msg) # TODO: convert to str?
596 ValueError.__init__(self, msg) # TODO: convert to str?
594
597
595
598
596 class ReadOnlyPartError(RuntimeError):
599 class ReadOnlyPartError(RuntimeError):
597 """error raised when code tries to alter a part being generated"""
600 """error raised when code tries to alter a part being generated"""
598
601
599 __bytes__ = _tobytes
602 __bytes__ = _tobytes
600
603
601
604
602 class PushkeyFailed(Abort):
605 class PushkeyFailed(Abort):
603 """error raised when a pushkey part failed to update a value"""
606 """error raised when a pushkey part failed to update a value"""
604
607
605 def __init__(
608 def __init__(
606 self, partid, namespace=None, key=None, new=None, old=None, ret=None
609 self, partid, namespace=None, key=None, new=None, old=None, ret=None
607 ):
610 ):
608 self.partid = partid
611 self.partid = partid
609 self.namespace = namespace
612 self.namespace = namespace
610 self.key = key
613 self.key = key
611 self.new = new
614 self.new = new
612 self.old = old
615 self.old = old
613 self.ret = ret
616 self.ret = ret
614 # no i18n expected to be processed into a better message
617 # no i18n expected to be processed into a better message
615 Abort.__init__(
618 Abort.__init__(
616 self, b'failed to update value for "%s/%s"' % (namespace, key)
619 self, b'failed to update value for "%s/%s"' % (namespace, key)
617 )
620 )
618
621
619
622
620 class CensoredNodeError(StorageError):
623 class CensoredNodeError(StorageError):
621 """error raised when content verification fails on a censored node
624 """error raised when content verification fails on a censored node
622
625
623 Also contains the tombstone data substituted for the uncensored data.
626 Also contains the tombstone data substituted for the uncensored data.
624 """
627 """
625
628
626 def __init__(self, filename, node, tombstone):
629 def __init__(self, filename, node, tombstone):
627 # type: (bytes, bytes, bytes) -> None
630 # type: (bytes, bytes, bytes) -> None
628 from .node import short
631 from .node import short
629
632
630 StorageError.__init__(self, b'%s:%s' % (filename, short(node)))
633 StorageError.__init__(self, b'%s:%s' % (filename, short(node)))
631 self.tombstone = tombstone
634 self.tombstone = tombstone
632
635
633
636
634 class CensoredBaseError(StorageError):
637 class CensoredBaseError(StorageError):
635 """error raised when a delta is rejected because its base is censored
638 """error raised when a delta is rejected because its base is censored
636
639
637 A delta based on a censored revision must be formed as single patch
640 A delta based on a censored revision must be formed as single patch
638 operation which replaces the entire base with new content. This ensures
641 operation which replaces the entire base with new content. This ensures
639 the delta may be applied by clones which have not censored the base.
642 the delta may be applied by clones which have not censored the base.
640 """
643 """
641
644
642
645
643 class InvalidBundleSpecification(Exception):
646 class InvalidBundleSpecification(Exception):
644 """error raised when a bundle specification is invalid.
647 """error raised when a bundle specification is invalid.
645
648
646 This is used for syntax errors as opposed to support errors.
649 This is used for syntax errors as opposed to support errors.
647 """
650 """
648
651
649 __bytes__ = _tobytes
652 __bytes__ = _tobytes
650
653
651
654
652 class UnsupportedBundleSpecification(Exception):
655 class UnsupportedBundleSpecification(Exception):
653 """error raised when a bundle specification is not supported."""
656 """error raised when a bundle specification is not supported."""
654
657
655 __bytes__ = _tobytes
658 __bytes__ = _tobytes
656
659
657
660
658 class CorruptedState(Exception):
661 class CorruptedState(Exception):
659 """error raised when a command is not able to read its state from file"""
662 """error raised when a command is not able to read its state from file"""
660
663
661 __bytes__ = _tobytes
664 __bytes__ = _tobytes
662
665
663
666
664 class PeerTransportError(Abort):
667 class PeerTransportError(Abort):
665 """Transport-level I/O error when communicating with a peer repo."""
668 """Transport-level I/O error when communicating with a peer repo."""
666
669
667
670
668 class InMemoryMergeConflictsError(Exception):
671 class InMemoryMergeConflictsError(Exception):
669 """Exception raised when merge conflicts arose during an in-memory merge."""
672 """Exception raised when merge conflicts arose during an in-memory merge."""
670
673
671 __bytes__ = _tobytes
674 __bytes__ = _tobytes
672
675
673
676
674 class WireprotoCommandError(Exception):
677 class WireprotoCommandError(Exception):
675 """Represents an error during execution of a wire protocol command.
678 """Represents an error during execution of a wire protocol command.
676
679
677 Should only be thrown by wire protocol version 2 commands.
680 Should only be thrown by wire protocol version 2 commands.
678
681
679 The error is a formatter string and an optional iterable of arguments.
682 The error is a formatter string and an optional iterable of arguments.
680 """
683 """
681
684
682 def __init__(self, message, args=None):
685 def __init__(self, message, args=None):
683 # type: (bytes, Optional[Sequence[bytes]]) -> None
686 # type: (bytes, Optional[Sequence[bytes]]) -> None
684 self.message = message
687 self.message = message
685 self.messageargs = args
688 self.messageargs = args
@@ -1,2304 +1,2299 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Olivia Mackall <olivia@selenic.com>
3 # Copyright Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import os
12 import os
13 import posixpath
13 import posixpath
14 import re
14 import re
15 import subprocess
15 import subprocess
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 bin,
20 bin,
21 hex,
21 hex,
22 nullrev,
22 nullrev,
23 short,
23 short,
24 wdirrev,
24 wdirrev,
25 )
25 )
26 from .pycompat import getattr
26 from .pycompat import getattr
27 from .thirdparty import attr
27 from .thirdparty import attr
28 from . import (
28 from . import (
29 copies as copiesmod,
29 copies as copiesmod,
30 encoding,
30 encoding,
31 error,
31 error,
32 match as matchmod,
32 match as matchmod,
33 obsolete,
33 obsolete,
34 obsutil,
34 obsutil,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 policy,
37 policy,
38 pycompat,
38 pycompat,
39 requirements as requirementsmod,
39 requirements as requirementsmod,
40 revsetlang,
40 revsetlang,
41 similar,
41 similar,
42 smartset,
42 smartset,
43 url,
43 url,
44 util,
44 util,
45 vfs,
45 vfs,
46 )
46 )
47
47
48 from .utils import (
48 from .utils import (
49 hashutil,
49 hashutil,
50 procutil,
50 procutil,
51 stringutil,
51 stringutil,
52 )
52 )
53
53
54 if pycompat.iswindows:
54 if pycompat.iswindows:
55 from . import scmwindows as scmplatform
55 from . import scmwindows as scmplatform
56 else:
56 else:
57 from . import scmposix as scmplatform
57 from . import scmposix as scmplatform
58
58
59 parsers = policy.importmod('parsers')
59 parsers = policy.importmod('parsers')
60 rustrevlog = policy.importrust('revlog')
60 rustrevlog = policy.importrust('revlog')
61
61
62 termsize = scmplatform.termsize
62 termsize = scmplatform.termsize
63
63
64
64
65 @attr.s(slots=True, repr=False)
65 @attr.s(slots=True, repr=False)
66 class status(object):
66 class status(object):
67 """Struct with a list of files per status.
67 """Struct with a list of files per status.
68
68
69 The 'deleted', 'unknown' and 'ignored' properties are only
69 The 'deleted', 'unknown' and 'ignored' properties are only
70 relevant to the working copy.
70 relevant to the working copy.
71 """
71 """
72
72
73 modified = attr.ib(default=attr.Factory(list))
73 modified = attr.ib(default=attr.Factory(list))
74 added = attr.ib(default=attr.Factory(list))
74 added = attr.ib(default=attr.Factory(list))
75 removed = attr.ib(default=attr.Factory(list))
75 removed = attr.ib(default=attr.Factory(list))
76 deleted = attr.ib(default=attr.Factory(list))
76 deleted = attr.ib(default=attr.Factory(list))
77 unknown = attr.ib(default=attr.Factory(list))
77 unknown = attr.ib(default=attr.Factory(list))
78 ignored = attr.ib(default=attr.Factory(list))
78 ignored = attr.ib(default=attr.Factory(list))
79 clean = attr.ib(default=attr.Factory(list))
79 clean = attr.ib(default=attr.Factory(list))
80
80
81 def __iter__(self):
81 def __iter__(self):
82 yield self.modified
82 yield self.modified
83 yield self.added
83 yield self.added
84 yield self.removed
84 yield self.removed
85 yield self.deleted
85 yield self.deleted
86 yield self.unknown
86 yield self.unknown
87 yield self.ignored
87 yield self.ignored
88 yield self.clean
88 yield self.clean
89
89
90 def __repr__(self):
90 def __repr__(self):
91 return (
91 return (
92 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
92 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
93 r'unknown=%s, ignored=%s, clean=%s>'
93 r'unknown=%s, ignored=%s, clean=%s>'
94 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
94 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
95
95
96
96
97 def itersubrepos(ctx1, ctx2):
97 def itersubrepos(ctx1, ctx2):
98 """find subrepos in ctx1 or ctx2"""
98 """find subrepos in ctx1 or ctx2"""
99 # Create a (subpath, ctx) mapping where we prefer subpaths from
99 # Create a (subpath, ctx) mapping where we prefer subpaths from
100 # ctx1. The subpaths from ctx2 are important when the .hgsub file
100 # ctx1. The subpaths from ctx2 are important when the .hgsub file
101 # has been modified (in ctx2) but not yet committed (in ctx1).
101 # has been modified (in ctx2) but not yet committed (in ctx1).
102 subpaths = dict.fromkeys(ctx2.substate, ctx2)
102 subpaths = dict.fromkeys(ctx2.substate, ctx2)
103 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
103 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
104
104
105 missing = set()
105 missing = set()
106
106
107 for subpath in ctx2.substate:
107 for subpath in ctx2.substate:
108 if subpath not in ctx1.substate:
108 if subpath not in ctx1.substate:
109 del subpaths[subpath]
109 del subpaths[subpath]
110 missing.add(subpath)
110 missing.add(subpath)
111
111
112 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
112 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
113 yield subpath, ctx.sub(subpath)
113 yield subpath, ctx.sub(subpath)
114
114
115 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
115 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
116 # status and diff will have an accurate result when it does
116 # status and diff will have an accurate result when it does
117 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
117 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
118 # against itself.
118 # against itself.
119 for subpath in missing:
119 for subpath in missing:
120 yield subpath, ctx2.nullsub(subpath, ctx1)
120 yield subpath, ctx2.nullsub(subpath, ctx1)
121
121
122
122
123 def nochangesfound(ui, repo, excluded=None):
123 def nochangesfound(ui, repo, excluded=None):
124 """Report no changes for push/pull, excluded is None or a list of
124 """Report no changes for push/pull, excluded is None or a list of
125 nodes excluded from the push/pull.
125 nodes excluded from the push/pull.
126 """
126 """
127 secretlist = []
127 secretlist = []
128 if excluded:
128 if excluded:
129 for n in excluded:
129 for n in excluded:
130 ctx = repo[n]
130 ctx = repo[n]
131 if ctx.phase() >= phases.secret and not ctx.extinct():
131 if ctx.phase() >= phases.secret and not ctx.extinct():
132 secretlist.append(n)
132 secretlist.append(n)
133
133
134 if secretlist:
134 if secretlist:
135 ui.status(
135 ui.status(
136 _(b"no changes found (ignored %d secret changesets)\n")
136 _(b"no changes found (ignored %d secret changesets)\n")
137 % len(secretlist)
137 % len(secretlist)
138 )
138 )
139 else:
139 else:
140 ui.status(_(b"no changes found\n"))
140 ui.status(_(b"no changes found\n"))
141
141
142
142
143 def callcatch(ui, func):
143 def callcatch(ui, func):
144 """call func() with global exception handling
144 """call func() with global exception handling
145
145
146 return func() if no exception happens. otherwise do some error handling
146 return func() if no exception happens. otherwise do some error handling
147 and return an exit code accordingly. does not handle all exceptions.
147 and return an exit code accordingly. does not handle all exceptions.
148 """
148 """
149 coarse_exit_code = -1
149 coarse_exit_code = -1
150 detailed_exit_code = -1
150 detailed_exit_code = -1
151 try:
151 try:
152 try:
152 try:
153 return func()
153 return func()
154 except: # re-raises
154 except: # re-raises
155 ui.traceback()
155 ui.traceback()
156 raise
156 raise
157 # Global exception handling, alphabetically
157 # Global exception handling, alphabetically
158 # Mercurial-specific first, followed by built-in and library exceptions
158 # Mercurial-specific first, followed by built-in and library exceptions
159 except error.LockHeld as inst:
159 except error.LockHeld as inst:
160 detailed_exit_code = 20
160 detailed_exit_code = 20
161 if inst.errno == errno.ETIMEDOUT:
161 if inst.errno == errno.ETIMEDOUT:
162 reason = _(b'timed out waiting for lock held by %r') % (
162 reason = _(b'timed out waiting for lock held by %r') % (
163 pycompat.bytestr(inst.locker)
163 pycompat.bytestr(inst.locker)
164 )
164 )
165 else:
165 else:
166 reason = _(b'lock held by %r') % inst.locker
166 reason = _(b'lock held by %r') % inst.locker
167 ui.error(
167 ui.error(
168 _(b"abort: %s: %s\n")
168 _(b"abort: %s: %s\n")
169 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
169 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
170 )
170 )
171 if not inst.locker:
171 if not inst.locker:
172 ui.error(_(b"(lock might be very busy)\n"))
172 ui.error(_(b"(lock might be very busy)\n"))
173 except error.LockUnavailable as inst:
173 except error.LockUnavailable as inst:
174 detailed_exit_code = 20
174 detailed_exit_code = 20
175 ui.error(
175 ui.error(
176 _(b"abort: could not lock %s: %s\n")
176 _(b"abort: could not lock %s: %s\n")
177 % (
177 % (
178 inst.desc or stringutil.forcebytestr(inst.filename),
178 inst.desc or stringutil.forcebytestr(inst.filename),
179 encoding.strtolocal(inst.strerror),
179 encoding.strtolocal(inst.strerror),
180 )
180 )
181 )
181 )
182 except error.RepoError as inst:
182 except error.RepoError as inst:
183 ui.error(_(b"abort: %s\n") % inst)
183 ui.error(_(b"abort: %s\n") % inst)
184 if inst.hint:
184 if inst.hint:
185 ui.error(_(b"(%s)\n") % inst.hint)
185 ui.error(_(b"(%s)\n") % inst.hint)
186 except error.ResponseError as inst:
186 except error.ResponseError as inst:
187 ui.error(_(b"abort: %s") % inst.args[0])
187 ui.error(_(b"abort: %s") % inst.args[0])
188 msg = inst.args[1]
188 msg = inst.args[1]
189 if isinstance(msg, type(u'')):
189 if isinstance(msg, type(u'')):
190 msg = pycompat.sysbytes(msg)
190 msg = pycompat.sysbytes(msg)
191 if msg is None:
191 if msg is None:
192 ui.error(b"\n")
192 ui.error(b"\n")
193 elif not isinstance(msg, bytes):
193 elif not isinstance(msg, bytes):
194 ui.error(b" %r\n" % (msg,))
194 ui.error(b" %r\n" % (msg,))
195 elif not msg:
195 elif not msg:
196 ui.error(_(b" empty string\n"))
196 ui.error(_(b" empty string\n"))
197 else:
197 else:
198 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
198 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
199 except error.CensoredNodeError as inst:
199 except error.CensoredNodeError as inst:
200 ui.error(_(b"abort: file censored %s\n") % inst)
200 ui.error(_(b"abort: file censored %s\n") % inst)
201 except error.StorageError as inst:
202 ui.error(_(b"abort: %s\n") % inst)
203 if inst.hint:
204 ui.error(_(b"(%s)\n") % inst.hint)
205 detailed_exit_code = 50
206 except error.WdirUnsupported:
201 except error.WdirUnsupported:
207 ui.error(_(b"abort: working directory revision cannot be specified\n"))
202 ui.error(_(b"abort: working directory revision cannot be specified\n"))
208 except error.Error as inst:
203 except error.Error as inst:
209 if inst.detailed_exit_code is not None:
204 if inst.detailed_exit_code is not None:
210 detailed_exit_code = inst.detailed_exit_code
205 detailed_exit_code = inst.detailed_exit_code
211 if inst.coarse_exit_code is not None:
206 if inst.coarse_exit_code is not None:
212 coarse_exit_code = inst.coarse_exit_code
207 coarse_exit_code = inst.coarse_exit_code
213 ui.error(inst.format())
208 ui.error(inst.format())
214 except error.WorkerError as inst:
209 except error.WorkerError as inst:
215 # Don't print a message -- the worker already should have
210 # Don't print a message -- the worker already should have
216 return inst.status_code
211 return inst.status_code
217 except ImportError as inst:
212 except ImportError as inst:
218 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
213 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
219 m = stringutil.forcebytestr(inst).split()[-1]
214 m = stringutil.forcebytestr(inst).split()[-1]
220 if m in b"mpatch bdiff".split():
215 if m in b"mpatch bdiff".split():
221 ui.error(_(b"(did you forget to compile extensions?)\n"))
216 ui.error(_(b"(did you forget to compile extensions?)\n"))
222 elif m in b"zlib".split():
217 elif m in b"zlib".split():
223 ui.error(_(b"(is your Python install correct?)\n"))
218 ui.error(_(b"(is your Python install correct?)\n"))
224 except util.urlerr.httperror as inst:
219 except util.urlerr.httperror as inst:
225 detailed_exit_code = 100
220 detailed_exit_code = 100
226 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
221 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
227 except util.urlerr.urlerror as inst:
222 except util.urlerr.urlerror as inst:
228 detailed_exit_code = 100
223 detailed_exit_code = 100
229 try: # usually it is in the form (errno, strerror)
224 try: # usually it is in the form (errno, strerror)
230 reason = inst.reason.args[1]
225 reason = inst.reason.args[1]
231 except (AttributeError, IndexError):
226 except (AttributeError, IndexError):
232 # it might be anything, for example a string
227 # it might be anything, for example a string
233 reason = inst.reason
228 reason = inst.reason
234 if isinstance(reason, pycompat.unicode):
229 if isinstance(reason, pycompat.unicode):
235 # SSLError of Python 2.7.9 contains a unicode
230 # SSLError of Python 2.7.9 contains a unicode
236 reason = encoding.unitolocal(reason)
231 reason = encoding.unitolocal(reason)
237 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
232 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
238 except (IOError, OSError) as inst:
233 except (IOError, OSError) as inst:
239 if (
234 if (
240 util.safehasattr(inst, b"args")
235 util.safehasattr(inst, b"args")
241 and inst.args
236 and inst.args
242 and inst.args[0] == errno.EPIPE
237 and inst.args[0] == errno.EPIPE
243 ):
238 ):
244 pass
239 pass
245 elif getattr(inst, "strerror", None): # common IOError or OSError
240 elif getattr(inst, "strerror", None): # common IOError or OSError
246 if getattr(inst, "filename", None) is not None:
241 if getattr(inst, "filename", None) is not None:
247 ui.error(
242 ui.error(
248 _(b"abort: %s: '%s'\n")
243 _(b"abort: %s: '%s'\n")
249 % (
244 % (
250 encoding.strtolocal(inst.strerror),
245 encoding.strtolocal(inst.strerror),
251 stringutil.forcebytestr(inst.filename),
246 stringutil.forcebytestr(inst.filename),
252 )
247 )
253 )
248 )
254 else:
249 else:
255 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
250 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
256 else: # suspicious IOError
251 else: # suspicious IOError
257 raise
252 raise
258 except MemoryError:
253 except MemoryError:
259 ui.error(_(b"abort: out of memory\n"))
254 ui.error(_(b"abort: out of memory\n"))
260 except SystemExit as inst:
255 except SystemExit as inst:
261 # Commands shouldn't sys.exit directly, but give a return code.
256 # Commands shouldn't sys.exit directly, but give a return code.
262 # Just in case catch this and and pass exit code to caller.
257 # Just in case catch this and and pass exit code to caller.
263 detailed_exit_code = 254
258 detailed_exit_code = 254
264 coarse_exit_code = inst.code
259 coarse_exit_code = inst.code
265
260
266 if ui.configbool(b'ui', b'detailed-exit-code'):
261 if ui.configbool(b'ui', b'detailed-exit-code'):
267 return detailed_exit_code
262 return detailed_exit_code
268 else:
263 else:
269 return coarse_exit_code
264 return coarse_exit_code
270
265
271
266
272 def checknewlabel(repo, lbl, kind):
267 def checknewlabel(repo, lbl, kind):
273 # Do not use the "kind" parameter in ui output.
268 # Do not use the "kind" parameter in ui output.
274 # It makes strings difficult to translate.
269 # It makes strings difficult to translate.
275 if lbl in [b'tip', b'.', b'null']:
270 if lbl in [b'tip', b'.', b'null']:
276 raise error.InputError(_(b"the name '%s' is reserved") % lbl)
271 raise error.InputError(_(b"the name '%s' is reserved") % lbl)
277 for c in (b':', b'\0', b'\n', b'\r'):
272 for c in (b':', b'\0', b'\n', b'\r'):
278 if c in lbl:
273 if c in lbl:
279 raise error.InputError(
274 raise error.InputError(
280 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
275 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
281 )
276 )
282 try:
277 try:
283 int(lbl)
278 int(lbl)
284 raise error.InputError(_(b"cannot use an integer as a name"))
279 raise error.InputError(_(b"cannot use an integer as a name"))
285 except ValueError:
280 except ValueError:
286 pass
281 pass
287 if lbl.strip() != lbl:
282 if lbl.strip() != lbl:
288 raise error.InputError(
283 raise error.InputError(
289 _(b"leading or trailing whitespace in name %r") % lbl
284 _(b"leading or trailing whitespace in name %r") % lbl
290 )
285 )
291
286
292
287
293 def checkfilename(f):
288 def checkfilename(f):
294 '''Check that the filename f is an acceptable filename for a tracked file'''
289 '''Check that the filename f is an acceptable filename for a tracked file'''
295 if b'\r' in f or b'\n' in f:
290 if b'\r' in f or b'\n' in f:
296 raise error.InputError(
291 raise error.InputError(
297 _(b"'\\n' and '\\r' disallowed in filenames: %r")
292 _(b"'\\n' and '\\r' disallowed in filenames: %r")
298 % pycompat.bytestr(f)
293 % pycompat.bytestr(f)
299 )
294 )
300
295
301
296
302 def checkportable(ui, f):
297 def checkportable(ui, f):
303 '''Check if filename f is portable and warn or abort depending on config'''
298 '''Check if filename f is portable and warn or abort depending on config'''
304 checkfilename(f)
299 checkfilename(f)
305 abort, warn = checkportabilityalert(ui)
300 abort, warn = checkportabilityalert(ui)
306 if abort or warn:
301 if abort or warn:
307 msg = util.checkwinfilename(f)
302 msg = util.checkwinfilename(f)
308 if msg:
303 if msg:
309 msg = b"%s: %s" % (msg, procutil.shellquote(f))
304 msg = b"%s: %s" % (msg, procutil.shellquote(f))
310 if abort:
305 if abort:
311 raise error.InputError(msg)
306 raise error.InputError(msg)
312 ui.warn(_(b"warning: %s\n") % msg)
307 ui.warn(_(b"warning: %s\n") % msg)
313
308
314
309
315 def checkportabilityalert(ui):
310 def checkportabilityalert(ui):
316 """check if the user's config requests nothing, a warning, or abort for
311 """check if the user's config requests nothing, a warning, or abort for
317 non-portable filenames"""
312 non-portable filenames"""
318 val = ui.config(b'ui', b'portablefilenames')
313 val = ui.config(b'ui', b'portablefilenames')
319 lval = val.lower()
314 lval = val.lower()
320 bval = stringutil.parsebool(val)
315 bval = stringutil.parsebool(val)
321 abort = pycompat.iswindows or lval == b'abort'
316 abort = pycompat.iswindows or lval == b'abort'
322 warn = bval or lval == b'warn'
317 warn = bval or lval == b'warn'
323 if bval is None and not (warn or abort or lval == b'ignore'):
318 if bval is None and not (warn or abort or lval == b'ignore'):
324 raise error.ConfigError(
319 raise error.ConfigError(
325 _(b"ui.portablefilenames value is invalid ('%s')") % val
320 _(b"ui.portablefilenames value is invalid ('%s')") % val
326 )
321 )
327 return abort, warn
322 return abort, warn
328
323
329
324
330 class casecollisionauditor(object):
325 class casecollisionauditor(object):
331 def __init__(self, ui, abort, dirstate):
326 def __init__(self, ui, abort, dirstate):
332 self._ui = ui
327 self._ui = ui
333 self._abort = abort
328 self._abort = abort
334 allfiles = b'\0'.join(dirstate)
329 allfiles = b'\0'.join(dirstate)
335 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
330 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
336 self._dirstate = dirstate
331 self._dirstate = dirstate
337 # The purpose of _newfiles is so that we don't complain about
332 # The purpose of _newfiles is so that we don't complain about
338 # case collisions if someone were to call this object with the
333 # case collisions if someone were to call this object with the
339 # same filename twice.
334 # same filename twice.
340 self._newfiles = set()
335 self._newfiles = set()
341
336
342 def __call__(self, f):
337 def __call__(self, f):
343 if f in self._newfiles:
338 if f in self._newfiles:
344 return
339 return
345 fl = encoding.lower(f)
340 fl = encoding.lower(f)
346 if fl in self._loweredfiles and f not in self._dirstate:
341 if fl in self._loweredfiles and f not in self._dirstate:
347 msg = _(b'possible case-folding collision for %s') % f
342 msg = _(b'possible case-folding collision for %s') % f
348 if self._abort:
343 if self._abort:
349 raise error.Abort(msg)
344 raise error.Abort(msg)
350 self._ui.warn(_(b"warning: %s\n") % msg)
345 self._ui.warn(_(b"warning: %s\n") % msg)
351 self._loweredfiles.add(fl)
346 self._loweredfiles.add(fl)
352 self._newfiles.add(f)
347 self._newfiles.add(f)
353
348
354
349
355 def filteredhash(repo, maxrev):
350 def filteredhash(repo, maxrev):
356 """build hash of filtered revisions in the current repoview.
351 """build hash of filtered revisions in the current repoview.
357
352
358 Multiple caches perform up-to-date validation by checking that the
353 Multiple caches perform up-to-date validation by checking that the
359 tiprev and tipnode stored in the cache file match the current repository.
354 tiprev and tipnode stored in the cache file match the current repository.
360 However, this is not sufficient for validating repoviews because the set
355 However, this is not sufficient for validating repoviews because the set
361 of revisions in the view may change without the repository tiprev and
356 of revisions in the view may change without the repository tiprev and
362 tipnode changing.
357 tipnode changing.
363
358
364 This function hashes all the revs filtered from the view and returns
359 This function hashes all the revs filtered from the view and returns
365 that SHA-1 digest.
360 that SHA-1 digest.
366 """
361 """
367 cl = repo.changelog
362 cl = repo.changelog
368 if not cl.filteredrevs:
363 if not cl.filteredrevs:
369 return None
364 return None
370 key = cl._filteredrevs_hashcache.get(maxrev)
365 key = cl._filteredrevs_hashcache.get(maxrev)
371 if not key:
366 if not key:
372 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
367 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
373 if revs:
368 if revs:
374 s = hashutil.sha1()
369 s = hashutil.sha1()
375 for rev in revs:
370 for rev in revs:
376 s.update(b'%d;' % rev)
371 s.update(b'%d;' % rev)
377 key = s.digest()
372 key = s.digest()
378 cl._filteredrevs_hashcache[maxrev] = key
373 cl._filteredrevs_hashcache[maxrev] = key
379 return key
374 return key
380
375
381
376
382 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
377 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
383 """yield every hg repository under path, always recursively.
378 """yield every hg repository under path, always recursively.
384 The recurse flag will only control recursion into repo working dirs"""
379 The recurse flag will only control recursion into repo working dirs"""
385
380
386 def errhandler(err):
381 def errhandler(err):
387 if err.filename == path:
382 if err.filename == path:
388 raise err
383 raise err
389
384
390 samestat = getattr(os.path, 'samestat', None)
385 samestat = getattr(os.path, 'samestat', None)
391 if followsym and samestat is not None:
386 if followsym and samestat is not None:
392
387
393 def adddir(dirlst, dirname):
388 def adddir(dirlst, dirname):
394 dirstat = os.stat(dirname)
389 dirstat = os.stat(dirname)
395 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
390 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
396 if not match:
391 if not match:
397 dirlst.append(dirstat)
392 dirlst.append(dirstat)
398 return not match
393 return not match
399
394
400 else:
395 else:
401 followsym = False
396 followsym = False
402
397
403 if (seen_dirs is None) and followsym:
398 if (seen_dirs is None) and followsym:
404 seen_dirs = []
399 seen_dirs = []
405 adddir(seen_dirs, path)
400 adddir(seen_dirs, path)
406 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
401 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
407 dirs.sort()
402 dirs.sort()
408 if b'.hg' in dirs:
403 if b'.hg' in dirs:
409 yield root # found a repository
404 yield root # found a repository
410 qroot = os.path.join(root, b'.hg', b'patches')
405 qroot = os.path.join(root, b'.hg', b'patches')
411 if os.path.isdir(os.path.join(qroot, b'.hg')):
406 if os.path.isdir(os.path.join(qroot, b'.hg')):
412 yield qroot # we have a patch queue repo here
407 yield qroot # we have a patch queue repo here
413 if recurse:
408 if recurse:
414 # avoid recursing inside the .hg directory
409 # avoid recursing inside the .hg directory
415 dirs.remove(b'.hg')
410 dirs.remove(b'.hg')
416 else:
411 else:
417 dirs[:] = [] # don't descend further
412 dirs[:] = [] # don't descend further
418 elif followsym:
413 elif followsym:
419 newdirs = []
414 newdirs = []
420 for d in dirs:
415 for d in dirs:
421 fname = os.path.join(root, d)
416 fname = os.path.join(root, d)
422 if adddir(seen_dirs, fname):
417 if adddir(seen_dirs, fname):
423 if os.path.islink(fname):
418 if os.path.islink(fname):
424 for hgname in walkrepos(fname, True, seen_dirs):
419 for hgname in walkrepos(fname, True, seen_dirs):
425 yield hgname
420 yield hgname
426 else:
421 else:
427 newdirs.append(d)
422 newdirs.append(d)
428 dirs[:] = newdirs
423 dirs[:] = newdirs
429
424
430
425
431 def binnode(ctx):
426 def binnode(ctx):
432 """Return binary node id for a given basectx"""
427 """Return binary node id for a given basectx"""
433 node = ctx.node()
428 node = ctx.node()
434 if node is None:
429 if node is None:
435 return ctx.repo().nodeconstants.wdirid
430 return ctx.repo().nodeconstants.wdirid
436 return node
431 return node
437
432
438
433
439 def intrev(ctx):
434 def intrev(ctx):
440 """Return integer for a given basectx that can be used in comparison or
435 """Return integer for a given basectx that can be used in comparison or
441 arithmetic operation"""
436 arithmetic operation"""
442 rev = ctx.rev()
437 rev = ctx.rev()
443 if rev is None:
438 if rev is None:
444 return wdirrev
439 return wdirrev
445 return rev
440 return rev
446
441
447
442
448 def formatchangeid(ctx):
443 def formatchangeid(ctx):
449 """Format changectx as '{rev}:{node|formatnode}', which is the default
444 """Format changectx as '{rev}:{node|formatnode}', which is the default
450 template provided by logcmdutil.changesettemplater"""
445 template provided by logcmdutil.changesettemplater"""
451 repo = ctx.repo()
446 repo = ctx.repo()
452 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
447 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
453
448
454
449
455 def formatrevnode(ui, rev, node):
450 def formatrevnode(ui, rev, node):
456 """Format given revision and node depending on the current verbosity"""
451 """Format given revision and node depending on the current verbosity"""
457 if ui.debugflag:
452 if ui.debugflag:
458 hexfunc = hex
453 hexfunc = hex
459 else:
454 else:
460 hexfunc = short
455 hexfunc = short
461 return b'%d:%s' % (rev, hexfunc(node))
456 return b'%d:%s' % (rev, hexfunc(node))
462
457
463
458
464 def resolvehexnodeidprefix(repo, prefix):
459 def resolvehexnodeidprefix(repo, prefix):
465 if prefix.startswith(b'x'):
460 if prefix.startswith(b'x'):
466 prefix = prefix[1:]
461 prefix = prefix[1:]
467 try:
462 try:
468 # Uses unfiltered repo because it's faster when prefix is ambiguous/
463 # Uses unfiltered repo because it's faster when prefix is ambiguous/
469 # This matches the shortesthexnodeidprefix() function below.
464 # This matches the shortesthexnodeidprefix() function below.
470 node = repo.unfiltered().changelog._partialmatch(prefix)
465 node = repo.unfiltered().changelog._partialmatch(prefix)
471 except error.AmbiguousPrefixLookupError:
466 except error.AmbiguousPrefixLookupError:
472 revset = repo.ui.config(
467 revset = repo.ui.config(
473 b'experimental', b'revisions.disambiguatewithin'
468 b'experimental', b'revisions.disambiguatewithin'
474 )
469 )
475 if revset:
470 if revset:
476 # Clear config to avoid infinite recursion
471 # Clear config to avoid infinite recursion
477 configoverrides = {
472 configoverrides = {
478 (b'experimental', b'revisions.disambiguatewithin'): None
473 (b'experimental', b'revisions.disambiguatewithin'): None
479 }
474 }
480 with repo.ui.configoverride(configoverrides):
475 with repo.ui.configoverride(configoverrides):
481 revs = repo.anyrevs([revset], user=True)
476 revs = repo.anyrevs([revset], user=True)
482 matches = []
477 matches = []
483 for rev in revs:
478 for rev in revs:
484 node = repo.changelog.node(rev)
479 node = repo.changelog.node(rev)
485 if hex(node).startswith(prefix):
480 if hex(node).startswith(prefix):
486 matches.append(node)
481 matches.append(node)
487 if len(matches) == 1:
482 if len(matches) == 1:
488 return matches[0]
483 return matches[0]
489 raise
484 raise
490 if node is None:
485 if node is None:
491 return
486 return
492 repo.changelog.rev(node) # make sure node isn't filtered
487 repo.changelog.rev(node) # make sure node isn't filtered
493 return node
488 return node
494
489
495
490
496 def mayberevnum(repo, prefix):
491 def mayberevnum(repo, prefix):
497 """Checks if the given prefix may be mistaken for a revision number"""
492 """Checks if the given prefix may be mistaken for a revision number"""
498 try:
493 try:
499 i = int(prefix)
494 i = int(prefix)
500 # if we are a pure int, then starting with zero will not be
495 # if we are a pure int, then starting with zero will not be
501 # confused as a rev; or, obviously, if the int is larger
496 # confused as a rev; or, obviously, if the int is larger
502 # than the value of the tip rev. We still need to disambiguate if
497 # than the value of the tip rev. We still need to disambiguate if
503 # prefix == '0', since that *is* a valid revnum.
498 # prefix == '0', since that *is* a valid revnum.
504 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
499 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
505 return False
500 return False
506 return True
501 return True
507 except ValueError:
502 except ValueError:
508 return False
503 return False
509
504
510
505
511 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
506 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
512 """Find the shortest unambiguous prefix that matches hexnode.
507 """Find the shortest unambiguous prefix that matches hexnode.
513
508
514 If "cache" is not None, it must be a dictionary that can be used for
509 If "cache" is not None, it must be a dictionary that can be used for
515 caching between calls to this method.
510 caching between calls to this method.
516 """
511 """
517 # _partialmatch() of filtered changelog could take O(len(repo)) time,
512 # _partialmatch() of filtered changelog could take O(len(repo)) time,
518 # which would be unacceptably slow. so we look for hash collision in
513 # which would be unacceptably slow. so we look for hash collision in
519 # unfiltered space, which means some hashes may be slightly longer.
514 # unfiltered space, which means some hashes may be slightly longer.
520
515
521 minlength = max(minlength, 1)
516 minlength = max(minlength, 1)
522
517
523 def disambiguate(prefix):
518 def disambiguate(prefix):
524 """Disambiguate against revnums."""
519 """Disambiguate against revnums."""
525 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
520 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
526 if mayberevnum(repo, prefix):
521 if mayberevnum(repo, prefix):
527 return b'x' + prefix
522 return b'x' + prefix
528 else:
523 else:
529 return prefix
524 return prefix
530
525
531 hexnode = hex(node)
526 hexnode = hex(node)
532 for length in range(len(prefix), len(hexnode) + 1):
527 for length in range(len(prefix), len(hexnode) + 1):
533 prefix = hexnode[:length]
528 prefix = hexnode[:length]
534 if not mayberevnum(repo, prefix):
529 if not mayberevnum(repo, prefix):
535 return prefix
530 return prefix
536
531
537 cl = repo.unfiltered().changelog
532 cl = repo.unfiltered().changelog
538 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
533 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
539 if revset:
534 if revset:
540 revs = None
535 revs = None
541 if cache is not None:
536 if cache is not None:
542 revs = cache.get(b'disambiguationrevset')
537 revs = cache.get(b'disambiguationrevset')
543 if revs is None:
538 if revs is None:
544 revs = repo.anyrevs([revset], user=True)
539 revs = repo.anyrevs([revset], user=True)
545 if cache is not None:
540 if cache is not None:
546 cache[b'disambiguationrevset'] = revs
541 cache[b'disambiguationrevset'] = revs
547 if cl.rev(node) in revs:
542 if cl.rev(node) in revs:
548 hexnode = hex(node)
543 hexnode = hex(node)
549 nodetree = None
544 nodetree = None
550 if cache is not None:
545 if cache is not None:
551 nodetree = cache.get(b'disambiguationnodetree')
546 nodetree = cache.get(b'disambiguationnodetree')
552 if not nodetree:
547 if not nodetree:
553 if util.safehasattr(parsers, 'nodetree'):
548 if util.safehasattr(parsers, 'nodetree'):
554 # The CExt is the only implementation to provide a nodetree
549 # The CExt is the only implementation to provide a nodetree
555 # class so far.
550 # class so far.
556 index = cl.index
551 index = cl.index
557 if util.safehasattr(index, 'get_cindex'):
552 if util.safehasattr(index, 'get_cindex'):
558 # the rust wrapped need to give access to its internal index
553 # the rust wrapped need to give access to its internal index
559 index = index.get_cindex()
554 index = index.get_cindex()
560 nodetree = parsers.nodetree(index, len(revs))
555 nodetree = parsers.nodetree(index, len(revs))
561 for r in revs:
556 for r in revs:
562 nodetree.insert(r)
557 nodetree.insert(r)
563 if cache is not None:
558 if cache is not None:
564 cache[b'disambiguationnodetree'] = nodetree
559 cache[b'disambiguationnodetree'] = nodetree
565 if nodetree is not None:
560 if nodetree is not None:
566 length = max(nodetree.shortest(node), minlength)
561 length = max(nodetree.shortest(node), minlength)
567 prefix = hexnode[:length]
562 prefix = hexnode[:length]
568 return disambiguate(prefix)
563 return disambiguate(prefix)
569 for length in range(minlength, len(hexnode) + 1):
564 for length in range(minlength, len(hexnode) + 1):
570 matches = []
565 matches = []
571 prefix = hexnode[:length]
566 prefix = hexnode[:length]
572 for rev in revs:
567 for rev in revs:
573 otherhexnode = repo[rev].hex()
568 otherhexnode = repo[rev].hex()
574 if prefix == otherhexnode[:length]:
569 if prefix == otherhexnode[:length]:
575 matches.append(otherhexnode)
570 matches.append(otherhexnode)
576 if len(matches) == 1:
571 if len(matches) == 1:
577 return disambiguate(prefix)
572 return disambiguate(prefix)
578
573
579 try:
574 try:
580 return disambiguate(cl.shortest(node, minlength))
575 return disambiguate(cl.shortest(node, minlength))
581 except error.LookupError:
576 except error.LookupError:
582 raise error.RepoLookupError()
577 raise error.RepoLookupError()
583
578
584
579
585 def isrevsymbol(repo, symbol):
580 def isrevsymbol(repo, symbol):
586 """Checks if a symbol exists in the repo.
581 """Checks if a symbol exists in the repo.
587
582
588 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
583 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
589 symbol is an ambiguous nodeid prefix.
584 symbol is an ambiguous nodeid prefix.
590 """
585 """
591 try:
586 try:
592 revsymbol(repo, symbol)
587 revsymbol(repo, symbol)
593 return True
588 return True
594 except error.RepoLookupError:
589 except error.RepoLookupError:
595 return False
590 return False
596
591
597
592
598 def revsymbol(repo, symbol):
593 def revsymbol(repo, symbol):
599 """Returns a context given a single revision symbol (as string).
594 """Returns a context given a single revision symbol (as string).
600
595
601 This is similar to revsingle(), but accepts only a single revision symbol,
596 This is similar to revsingle(), but accepts only a single revision symbol,
602 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
597 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
603 not "max(public())".
598 not "max(public())".
604 """
599 """
605 if not isinstance(symbol, bytes):
600 if not isinstance(symbol, bytes):
606 msg = (
601 msg = (
607 b"symbol (%s of type %s) was not a string, did you mean "
602 b"symbol (%s of type %s) was not a string, did you mean "
608 b"repo[symbol]?" % (symbol, type(symbol))
603 b"repo[symbol]?" % (symbol, type(symbol))
609 )
604 )
610 raise error.ProgrammingError(msg)
605 raise error.ProgrammingError(msg)
611 try:
606 try:
612 if symbol in (b'.', b'tip', b'null'):
607 if symbol in (b'.', b'tip', b'null'):
613 return repo[symbol]
608 return repo[symbol]
614
609
615 try:
610 try:
616 r = int(symbol)
611 r = int(symbol)
617 if b'%d' % r != symbol:
612 if b'%d' % r != symbol:
618 raise ValueError
613 raise ValueError
619 l = len(repo.changelog)
614 l = len(repo.changelog)
620 if r < 0:
615 if r < 0:
621 r += l
616 r += l
622 if r < 0 or r >= l and r != wdirrev:
617 if r < 0 or r >= l and r != wdirrev:
623 raise ValueError
618 raise ValueError
624 return repo[r]
619 return repo[r]
625 except error.FilteredIndexError:
620 except error.FilteredIndexError:
626 raise
621 raise
627 except (ValueError, OverflowError, IndexError):
622 except (ValueError, OverflowError, IndexError):
628 pass
623 pass
629
624
630 if len(symbol) == 2 * repo.nodeconstants.nodelen:
625 if len(symbol) == 2 * repo.nodeconstants.nodelen:
631 try:
626 try:
632 node = bin(symbol)
627 node = bin(symbol)
633 rev = repo.changelog.rev(node)
628 rev = repo.changelog.rev(node)
634 return repo[rev]
629 return repo[rev]
635 except error.FilteredLookupError:
630 except error.FilteredLookupError:
636 raise
631 raise
637 except (TypeError, LookupError):
632 except (TypeError, LookupError):
638 pass
633 pass
639
634
640 # look up bookmarks through the name interface
635 # look up bookmarks through the name interface
641 try:
636 try:
642 node = repo.names.singlenode(repo, symbol)
637 node = repo.names.singlenode(repo, symbol)
643 rev = repo.changelog.rev(node)
638 rev = repo.changelog.rev(node)
644 return repo[rev]
639 return repo[rev]
645 except KeyError:
640 except KeyError:
646 pass
641 pass
647
642
648 node = resolvehexnodeidprefix(repo, symbol)
643 node = resolvehexnodeidprefix(repo, symbol)
649 if node is not None:
644 if node is not None:
650 rev = repo.changelog.rev(node)
645 rev = repo.changelog.rev(node)
651 return repo[rev]
646 return repo[rev]
652
647
653 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
648 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
654
649
655 except error.WdirUnsupported:
650 except error.WdirUnsupported:
656 return repo[None]
651 return repo[None]
657 except (
652 except (
658 error.FilteredIndexError,
653 error.FilteredIndexError,
659 error.FilteredLookupError,
654 error.FilteredLookupError,
660 error.FilteredRepoLookupError,
655 error.FilteredRepoLookupError,
661 ):
656 ):
662 raise _filterederror(repo, symbol)
657 raise _filterederror(repo, symbol)
663
658
664
659
665 def _filterederror(repo, changeid):
660 def _filterederror(repo, changeid):
666 """build an exception to be raised about a filtered changeid
661 """build an exception to be raised about a filtered changeid
667
662
668 This is extracted in a function to help extensions (eg: evolve) to
663 This is extracted in a function to help extensions (eg: evolve) to
669 experiment with various message variants."""
664 experiment with various message variants."""
670 if repo.filtername.startswith(b'visible'):
665 if repo.filtername.startswith(b'visible'):
671
666
672 # Check if the changeset is obsolete
667 # Check if the changeset is obsolete
673 unfilteredrepo = repo.unfiltered()
668 unfilteredrepo = repo.unfiltered()
674 ctx = revsymbol(unfilteredrepo, changeid)
669 ctx = revsymbol(unfilteredrepo, changeid)
675
670
676 # If the changeset is obsolete, enrich the message with the reason
671 # If the changeset is obsolete, enrich the message with the reason
677 # that made this changeset not visible
672 # that made this changeset not visible
678 if ctx.obsolete():
673 if ctx.obsolete():
679 msg = obsutil._getfilteredreason(repo, changeid, ctx)
674 msg = obsutil._getfilteredreason(repo, changeid, ctx)
680 else:
675 else:
681 msg = _(b"hidden revision '%s'") % changeid
676 msg = _(b"hidden revision '%s'") % changeid
682
677
683 hint = _(b'use --hidden to access hidden revisions')
678 hint = _(b'use --hidden to access hidden revisions')
684
679
685 return error.FilteredRepoLookupError(msg, hint=hint)
680 return error.FilteredRepoLookupError(msg, hint=hint)
686 msg = _(b"filtered revision '%s' (not in '%s' subset)")
681 msg = _(b"filtered revision '%s' (not in '%s' subset)")
687 msg %= (changeid, repo.filtername)
682 msg %= (changeid, repo.filtername)
688 return error.FilteredRepoLookupError(msg)
683 return error.FilteredRepoLookupError(msg)
689
684
690
685
691 def revsingle(repo, revspec, default=b'.', localalias=None):
686 def revsingle(repo, revspec, default=b'.', localalias=None):
692 if not revspec and revspec != 0:
687 if not revspec and revspec != 0:
693 return repo[default]
688 return repo[default]
694
689
695 l = revrange(repo, [revspec], localalias=localalias)
690 l = revrange(repo, [revspec], localalias=localalias)
696 if not l:
691 if not l:
697 raise error.Abort(_(b'empty revision set'))
692 raise error.Abort(_(b'empty revision set'))
698 return repo[l.last()]
693 return repo[l.last()]
699
694
700
695
701 def _pairspec(revspec):
696 def _pairspec(revspec):
702 tree = revsetlang.parse(revspec)
697 tree = revsetlang.parse(revspec)
703 return tree and tree[0] in (
698 return tree and tree[0] in (
704 b'range',
699 b'range',
705 b'rangepre',
700 b'rangepre',
706 b'rangepost',
701 b'rangepost',
707 b'rangeall',
702 b'rangeall',
708 )
703 )
709
704
710
705
711 def revpair(repo, revs):
706 def revpair(repo, revs):
712 if not revs:
707 if not revs:
713 return repo[b'.'], repo[None]
708 return repo[b'.'], repo[None]
714
709
715 l = revrange(repo, revs)
710 l = revrange(repo, revs)
716
711
717 if not l:
712 if not l:
718 raise error.Abort(_(b'empty revision range'))
713 raise error.Abort(_(b'empty revision range'))
719
714
720 first = l.first()
715 first = l.first()
721 second = l.last()
716 second = l.last()
722
717
723 if (
718 if (
724 first == second
719 first == second
725 and len(revs) >= 2
720 and len(revs) >= 2
726 and not all(revrange(repo, [r]) for r in revs)
721 and not all(revrange(repo, [r]) for r in revs)
727 ):
722 ):
728 raise error.Abort(_(b'empty revision on one side of range'))
723 raise error.Abort(_(b'empty revision on one side of range'))
729
724
730 # if top-level is range expression, the result must always be a pair
725 # if top-level is range expression, the result must always be a pair
731 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
726 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
732 return repo[first], repo[None]
727 return repo[first], repo[None]
733
728
734 return repo[first], repo[second]
729 return repo[first], repo[second]
735
730
736
731
737 def revrange(repo, specs, localalias=None):
732 def revrange(repo, specs, localalias=None):
738 """Execute 1 to many revsets and return the union.
733 """Execute 1 to many revsets and return the union.
739
734
740 This is the preferred mechanism for executing revsets using user-specified
735 This is the preferred mechanism for executing revsets using user-specified
741 config options, such as revset aliases.
736 config options, such as revset aliases.
742
737
743 The revsets specified by ``specs`` will be executed via a chained ``OR``
738 The revsets specified by ``specs`` will be executed via a chained ``OR``
744 expression. If ``specs`` is empty, an empty result is returned.
739 expression. If ``specs`` is empty, an empty result is returned.
745
740
746 ``specs`` can contain integers, in which case they are assumed to be
741 ``specs`` can contain integers, in which case they are assumed to be
747 revision numbers.
742 revision numbers.
748
743
749 It is assumed the revsets are already formatted. If you have arguments
744 It is assumed the revsets are already formatted. If you have arguments
750 that need to be expanded in the revset, call ``revsetlang.formatspec()``
745 that need to be expanded in the revset, call ``revsetlang.formatspec()``
751 and pass the result as an element of ``specs``.
746 and pass the result as an element of ``specs``.
752
747
753 Specifying a single revset is allowed.
748 Specifying a single revset is allowed.
754
749
755 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
750 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
756 integer revisions.
751 integer revisions.
757 """
752 """
758 allspecs = []
753 allspecs = []
759 for spec in specs:
754 for spec in specs:
760 if isinstance(spec, int):
755 if isinstance(spec, int):
761 spec = revsetlang.formatspec(b'%d', spec)
756 spec = revsetlang.formatspec(b'%d', spec)
762 allspecs.append(spec)
757 allspecs.append(spec)
763 return repo.anyrevs(allspecs, user=True, localalias=localalias)
758 return repo.anyrevs(allspecs, user=True, localalias=localalias)
764
759
765
760
766 def increasingwindows(windowsize=8, sizelimit=512):
761 def increasingwindows(windowsize=8, sizelimit=512):
767 while True:
762 while True:
768 yield windowsize
763 yield windowsize
769 if windowsize < sizelimit:
764 if windowsize < sizelimit:
770 windowsize *= 2
765 windowsize *= 2
771
766
772
767
773 def walkchangerevs(repo, revs, makefilematcher, prepare):
768 def walkchangerevs(repo, revs, makefilematcher, prepare):
774 """Iterate over files and the revs in a "windowed" way.
769 """Iterate over files and the revs in a "windowed" way.
775
770
776 Callers most commonly need to iterate backwards over the history
771 Callers most commonly need to iterate backwards over the history
777 in which they are interested. Doing so has awful (quadratic-looking)
772 in which they are interested. Doing so has awful (quadratic-looking)
778 performance, so we use iterators in a "windowed" way.
773 performance, so we use iterators in a "windowed" way.
779
774
780 We walk a window of revisions in the desired order. Within the
775 We walk a window of revisions in the desired order. Within the
781 window, we first walk forwards to gather data, then in the desired
776 window, we first walk forwards to gather data, then in the desired
782 order (usually backwards) to display it.
777 order (usually backwards) to display it.
783
778
784 This function returns an iterator yielding contexts. Before
779 This function returns an iterator yielding contexts. Before
785 yielding each context, the iterator will first call the prepare
780 yielding each context, the iterator will first call the prepare
786 function on each context in the window in forward order."""
781 function on each context in the window in forward order."""
787
782
788 if not revs:
783 if not revs:
789 return []
784 return []
790 change = repo.__getitem__
785 change = repo.__getitem__
791
786
792 def iterate():
787 def iterate():
793 it = iter(revs)
788 it = iter(revs)
794 stopiteration = False
789 stopiteration = False
795 for windowsize in increasingwindows():
790 for windowsize in increasingwindows():
796 nrevs = []
791 nrevs = []
797 for i in pycompat.xrange(windowsize):
792 for i in pycompat.xrange(windowsize):
798 rev = next(it, None)
793 rev = next(it, None)
799 if rev is None:
794 if rev is None:
800 stopiteration = True
795 stopiteration = True
801 break
796 break
802 nrevs.append(rev)
797 nrevs.append(rev)
803 for rev in sorted(nrevs):
798 for rev in sorted(nrevs):
804 ctx = change(rev)
799 ctx = change(rev)
805 prepare(ctx, makefilematcher(ctx))
800 prepare(ctx, makefilematcher(ctx))
806 for rev in nrevs:
801 for rev in nrevs:
807 yield change(rev)
802 yield change(rev)
808
803
809 if stopiteration:
804 if stopiteration:
810 break
805 break
811
806
812 return iterate()
807 return iterate()
813
808
814
809
815 def meaningfulparents(repo, ctx):
810 def meaningfulparents(repo, ctx):
816 """Return list of meaningful (or all if debug) parentrevs for rev.
811 """Return list of meaningful (or all if debug) parentrevs for rev.
817
812
818 For merges (two non-nullrev revisions) both parents are meaningful.
813 For merges (two non-nullrev revisions) both parents are meaningful.
819 Otherwise the first parent revision is considered meaningful if it
814 Otherwise the first parent revision is considered meaningful if it
820 is not the preceding revision.
815 is not the preceding revision.
821 """
816 """
822 parents = ctx.parents()
817 parents = ctx.parents()
823 if len(parents) > 1:
818 if len(parents) > 1:
824 return parents
819 return parents
825 if repo.ui.debugflag:
820 if repo.ui.debugflag:
826 return [parents[0], repo[nullrev]]
821 return [parents[0], repo[nullrev]]
827 if parents[0].rev() >= intrev(ctx) - 1:
822 if parents[0].rev() >= intrev(ctx) - 1:
828 return []
823 return []
829 return parents
824 return parents
830
825
831
826
832 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
827 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
833 """Return a function that produced paths for presenting to the user.
828 """Return a function that produced paths for presenting to the user.
834
829
835 The returned function takes a repo-relative path and produces a path
830 The returned function takes a repo-relative path and produces a path
836 that can be presented in the UI.
831 that can be presented in the UI.
837
832
838 Depending on the value of ui.relative-paths, either a repo-relative or
833 Depending on the value of ui.relative-paths, either a repo-relative or
839 cwd-relative path will be produced.
834 cwd-relative path will be produced.
840
835
841 legacyrelativevalue is the value to use if ui.relative-paths=legacy
836 legacyrelativevalue is the value to use if ui.relative-paths=legacy
842
837
843 If forcerelativevalue is not None, then that value will be used regardless
838 If forcerelativevalue is not None, then that value will be used regardless
844 of what ui.relative-paths is set to.
839 of what ui.relative-paths is set to.
845 """
840 """
846 if forcerelativevalue is not None:
841 if forcerelativevalue is not None:
847 relative = forcerelativevalue
842 relative = forcerelativevalue
848 else:
843 else:
849 config = repo.ui.config(b'ui', b'relative-paths')
844 config = repo.ui.config(b'ui', b'relative-paths')
850 if config == b'legacy':
845 if config == b'legacy':
851 relative = legacyrelativevalue
846 relative = legacyrelativevalue
852 else:
847 else:
853 relative = stringutil.parsebool(config)
848 relative = stringutil.parsebool(config)
854 if relative is None:
849 if relative is None:
855 raise error.ConfigError(
850 raise error.ConfigError(
856 _(b"ui.relative-paths is not a boolean ('%s')") % config
851 _(b"ui.relative-paths is not a boolean ('%s')") % config
857 )
852 )
858
853
859 if relative:
854 if relative:
860 cwd = repo.getcwd()
855 cwd = repo.getcwd()
861 if cwd != b'':
856 if cwd != b'':
862 # this branch would work even if cwd == b'' (ie cwd = repo
857 # this branch would work even if cwd == b'' (ie cwd = repo
863 # root), but its generality makes the returned function slower
858 # root), but its generality makes the returned function slower
864 pathto = repo.pathto
859 pathto = repo.pathto
865 return lambda f: pathto(f, cwd)
860 return lambda f: pathto(f, cwd)
866 if repo.ui.configbool(b'ui', b'slash'):
861 if repo.ui.configbool(b'ui', b'slash'):
867 return lambda f: f
862 return lambda f: f
868 else:
863 else:
869 return util.localpath
864 return util.localpath
870
865
871
866
872 def subdiruipathfn(subpath, uipathfn):
867 def subdiruipathfn(subpath, uipathfn):
873 '''Create a new uipathfn that treats the file as relative to subpath.'''
868 '''Create a new uipathfn that treats the file as relative to subpath.'''
874 return lambda f: uipathfn(posixpath.join(subpath, f))
869 return lambda f: uipathfn(posixpath.join(subpath, f))
875
870
876
871
877 def anypats(pats, opts):
872 def anypats(pats, opts):
878 """Checks if any patterns, including --include and --exclude were given.
873 """Checks if any patterns, including --include and --exclude were given.
879
874
880 Some commands (e.g. addremove) use this condition for deciding whether to
875 Some commands (e.g. addremove) use this condition for deciding whether to
881 print absolute or relative paths.
876 print absolute or relative paths.
882 """
877 """
883 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
878 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
884
879
885
880
886 def expandpats(pats):
881 def expandpats(pats):
887 """Expand bare globs when running on windows.
882 """Expand bare globs when running on windows.
888 On posix we assume it already has already been done by sh."""
883 On posix we assume it already has already been done by sh."""
889 if not util.expandglobs:
884 if not util.expandglobs:
890 return list(pats)
885 return list(pats)
891 ret = []
886 ret = []
892 for kindpat in pats:
887 for kindpat in pats:
893 kind, pat = matchmod._patsplit(kindpat, None)
888 kind, pat = matchmod._patsplit(kindpat, None)
894 if kind is None:
889 if kind is None:
895 try:
890 try:
896 globbed = glob.glob(pat)
891 globbed = glob.glob(pat)
897 except re.error:
892 except re.error:
898 globbed = [pat]
893 globbed = [pat]
899 if globbed:
894 if globbed:
900 ret.extend(globbed)
895 ret.extend(globbed)
901 continue
896 continue
902 ret.append(kindpat)
897 ret.append(kindpat)
903 return ret
898 return ret
904
899
905
900
906 def matchandpats(
901 def matchandpats(
907 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
902 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
908 ):
903 ):
909 """Return a matcher and the patterns that were used.
904 """Return a matcher and the patterns that were used.
910 The matcher will warn about bad matches, unless an alternate badfn callback
905 The matcher will warn about bad matches, unless an alternate badfn callback
911 is provided."""
906 is provided."""
912 if opts is None:
907 if opts is None:
913 opts = {}
908 opts = {}
914 if not globbed and default == b'relpath':
909 if not globbed and default == b'relpath':
915 pats = expandpats(pats or [])
910 pats = expandpats(pats or [])
916
911
917 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
912 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
918
913
919 def bad(f, msg):
914 def bad(f, msg):
920 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
915 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
921
916
922 if badfn is None:
917 if badfn is None:
923 badfn = bad
918 badfn = bad
924
919
925 m = ctx.match(
920 m = ctx.match(
926 pats,
921 pats,
927 opts.get(b'include'),
922 opts.get(b'include'),
928 opts.get(b'exclude'),
923 opts.get(b'exclude'),
929 default,
924 default,
930 listsubrepos=opts.get(b'subrepos'),
925 listsubrepos=opts.get(b'subrepos'),
931 badfn=badfn,
926 badfn=badfn,
932 )
927 )
933
928
934 if m.always():
929 if m.always():
935 pats = []
930 pats = []
936 return m, pats
931 return m, pats
937
932
938
933
939 def match(
934 def match(
940 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
935 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
941 ):
936 ):
942 '''Return a matcher that will warn about bad matches.'''
937 '''Return a matcher that will warn about bad matches.'''
943 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
938 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
944
939
945
940
946 def matchall(repo):
941 def matchall(repo):
947 '''Return a matcher that will efficiently match everything.'''
942 '''Return a matcher that will efficiently match everything.'''
948 return matchmod.always()
943 return matchmod.always()
949
944
950
945
951 def matchfiles(repo, files, badfn=None):
946 def matchfiles(repo, files, badfn=None):
952 '''Return a matcher that will efficiently match exactly these files.'''
947 '''Return a matcher that will efficiently match exactly these files.'''
953 return matchmod.exact(files, badfn=badfn)
948 return matchmod.exact(files, badfn=badfn)
954
949
955
950
956 def parsefollowlinespattern(repo, rev, pat, msg):
951 def parsefollowlinespattern(repo, rev, pat, msg):
957 """Return a file name from `pat` pattern suitable for usage in followlines
952 """Return a file name from `pat` pattern suitable for usage in followlines
958 logic.
953 logic.
959 """
954 """
960 if not matchmod.patkind(pat):
955 if not matchmod.patkind(pat):
961 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
956 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
962 else:
957 else:
963 ctx = repo[rev]
958 ctx = repo[rev]
964 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
959 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
965 files = [f for f in ctx if m(f)]
960 files = [f for f in ctx if m(f)]
966 if len(files) != 1:
961 if len(files) != 1:
967 raise error.ParseError(msg)
962 raise error.ParseError(msg)
968 return files[0]
963 return files[0]
969
964
970
965
971 def getorigvfs(ui, repo):
966 def getorigvfs(ui, repo):
972 """return a vfs suitable to save 'orig' file
967 """return a vfs suitable to save 'orig' file
973
968
974 return None if no special directory is configured"""
969 return None if no special directory is configured"""
975 origbackuppath = ui.config(b'ui', b'origbackuppath')
970 origbackuppath = ui.config(b'ui', b'origbackuppath')
976 if not origbackuppath:
971 if not origbackuppath:
977 return None
972 return None
978 return vfs.vfs(repo.wvfs.join(origbackuppath))
973 return vfs.vfs(repo.wvfs.join(origbackuppath))
979
974
980
975
981 def backuppath(ui, repo, filepath):
976 def backuppath(ui, repo, filepath):
982 """customize where working copy backup files (.orig files) are created
977 """customize where working copy backup files (.orig files) are created
983
978
984 Fetch user defined path from config file: [ui] origbackuppath = <path>
979 Fetch user defined path from config file: [ui] origbackuppath = <path>
985 Fall back to default (filepath with .orig suffix) if not specified
980 Fall back to default (filepath with .orig suffix) if not specified
986
981
987 filepath is repo-relative
982 filepath is repo-relative
988
983
989 Returns an absolute path
984 Returns an absolute path
990 """
985 """
991 origvfs = getorigvfs(ui, repo)
986 origvfs = getorigvfs(ui, repo)
992 if origvfs is None:
987 if origvfs is None:
993 return repo.wjoin(filepath + b".orig")
988 return repo.wjoin(filepath + b".orig")
994
989
995 origbackupdir = origvfs.dirname(filepath)
990 origbackupdir = origvfs.dirname(filepath)
996 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
991 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
997 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
992 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
998
993
999 # Remove any files that conflict with the backup file's path
994 # Remove any files that conflict with the backup file's path
1000 for f in reversed(list(pathutil.finddirs(filepath))):
995 for f in reversed(list(pathutil.finddirs(filepath))):
1001 if origvfs.isfileorlink(f):
996 if origvfs.isfileorlink(f):
1002 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
997 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
1003 origvfs.unlink(f)
998 origvfs.unlink(f)
1004 break
999 break
1005
1000
1006 origvfs.makedirs(origbackupdir)
1001 origvfs.makedirs(origbackupdir)
1007
1002
1008 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
1003 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
1009 ui.note(
1004 ui.note(
1010 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
1005 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
1011 )
1006 )
1012 origvfs.rmtree(filepath, forcibly=True)
1007 origvfs.rmtree(filepath, forcibly=True)
1013
1008
1014 return origvfs.join(filepath)
1009 return origvfs.join(filepath)
1015
1010
1016
1011
1017 class _containsnode(object):
1012 class _containsnode(object):
1018 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1013 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1019
1014
1020 def __init__(self, repo, revcontainer):
1015 def __init__(self, repo, revcontainer):
1021 self._torev = repo.changelog.rev
1016 self._torev = repo.changelog.rev
1022 self._revcontains = revcontainer.__contains__
1017 self._revcontains = revcontainer.__contains__
1023
1018
1024 def __contains__(self, node):
1019 def __contains__(self, node):
1025 return self._revcontains(self._torev(node))
1020 return self._revcontains(self._torev(node))
1026
1021
1027
1022
1028 def cleanupnodes(
1023 def cleanupnodes(
1029 repo,
1024 repo,
1030 replacements,
1025 replacements,
1031 operation,
1026 operation,
1032 moves=None,
1027 moves=None,
1033 metadata=None,
1028 metadata=None,
1034 fixphase=False,
1029 fixphase=False,
1035 targetphase=None,
1030 targetphase=None,
1036 backup=True,
1031 backup=True,
1037 ):
1032 ):
1038 """do common cleanups when old nodes are replaced by new nodes
1033 """do common cleanups when old nodes are replaced by new nodes
1039
1034
1040 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1035 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1041 (we might also want to move working directory parent in the future)
1036 (we might also want to move working directory parent in the future)
1042
1037
1043 By default, bookmark moves are calculated automatically from 'replacements',
1038 By default, bookmark moves are calculated automatically from 'replacements',
1044 but 'moves' can be used to override that. Also, 'moves' may include
1039 but 'moves' can be used to override that. Also, 'moves' may include
1045 additional bookmark moves that should not have associated obsmarkers.
1040 additional bookmark moves that should not have associated obsmarkers.
1046
1041
1047 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1042 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1048 have replacements. operation is a string, like "rebase".
1043 have replacements. operation is a string, like "rebase".
1049
1044
1050 metadata is dictionary containing metadata to be stored in obsmarker if
1045 metadata is dictionary containing metadata to be stored in obsmarker if
1051 obsolescence is enabled.
1046 obsolescence is enabled.
1052 """
1047 """
1053 assert fixphase or targetphase is None
1048 assert fixphase or targetphase is None
1054 if not replacements and not moves:
1049 if not replacements and not moves:
1055 return
1050 return
1056
1051
1057 # translate mapping's other forms
1052 # translate mapping's other forms
1058 if not util.safehasattr(replacements, b'items'):
1053 if not util.safehasattr(replacements, b'items'):
1059 replacements = {(n,): () for n in replacements}
1054 replacements = {(n,): () for n in replacements}
1060 else:
1055 else:
1061 # upgrading non tuple "source" to tuple ones for BC
1056 # upgrading non tuple "source" to tuple ones for BC
1062 repls = {}
1057 repls = {}
1063 for key, value in replacements.items():
1058 for key, value in replacements.items():
1064 if not isinstance(key, tuple):
1059 if not isinstance(key, tuple):
1065 key = (key,)
1060 key = (key,)
1066 repls[key] = value
1061 repls[key] = value
1067 replacements = repls
1062 replacements = repls
1068
1063
1069 # Unfiltered repo is needed since nodes in replacements might be hidden.
1064 # Unfiltered repo is needed since nodes in replacements might be hidden.
1070 unfi = repo.unfiltered()
1065 unfi = repo.unfiltered()
1071
1066
1072 # Calculate bookmark movements
1067 # Calculate bookmark movements
1073 if moves is None:
1068 if moves is None:
1074 moves = {}
1069 moves = {}
1075 for oldnodes, newnodes in replacements.items():
1070 for oldnodes, newnodes in replacements.items():
1076 for oldnode in oldnodes:
1071 for oldnode in oldnodes:
1077 if oldnode in moves:
1072 if oldnode in moves:
1078 continue
1073 continue
1079 if len(newnodes) > 1:
1074 if len(newnodes) > 1:
1080 # usually a split, take the one with biggest rev number
1075 # usually a split, take the one with biggest rev number
1081 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1076 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1082 elif len(newnodes) == 0:
1077 elif len(newnodes) == 0:
1083 # move bookmark backwards
1078 # move bookmark backwards
1084 allreplaced = []
1079 allreplaced = []
1085 for rep in replacements:
1080 for rep in replacements:
1086 allreplaced.extend(rep)
1081 allreplaced.extend(rep)
1087 roots = list(
1082 roots = list(
1088 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1083 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1089 )
1084 )
1090 if roots:
1085 if roots:
1091 newnode = roots[0].node()
1086 newnode = roots[0].node()
1092 else:
1087 else:
1093 newnode = repo.nullid
1088 newnode = repo.nullid
1094 else:
1089 else:
1095 newnode = newnodes[0]
1090 newnode = newnodes[0]
1096 moves[oldnode] = newnode
1091 moves[oldnode] = newnode
1097
1092
1098 allnewnodes = [n for ns in replacements.values() for n in ns]
1093 allnewnodes = [n for ns in replacements.values() for n in ns]
1099 toretract = {}
1094 toretract = {}
1100 toadvance = {}
1095 toadvance = {}
1101 if fixphase:
1096 if fixphase:
1102 precursors = {}
1097 precursors = {}
1103 for oldnodes, newnodes in replacements.items():
1098 for oldnodes, newnodes in replacements.items():
1104 for oldnode in oldnodes:
1099 for oldnode in oldnodes:
1105 for newnode in newnodes:
1100 for newnode in newnodes:
1106 precursors.setdefault(newnode, []).append(oldnode)
1101 precursors.setdefault(newnode, []).append(oldnode)
1107
1102
1108 allnewnodes.sort(key=lambda n: unfi[n].rev())
1103 allnewnodes.sort(key=lambda n: unfi[n].rev())
1109 newphases = {}
1104 newphases = {}
1110
1105
1111 def phase(ctx):
1106 def phase(ctx):
1112 return newphases.get(ctx.node(), ctx.phase())
1107 return newphases.get(ctx.node(), ctx.phase())
1113
1108
1114 for newnode in allnewnodes:
1109 for newnode in allnewnodes:
1115 ctx = unfi[newnode]
1110 ctx = unfi[newnode]
1116 parentphase = max(phase(p) for p in ctx.parents())
1111 parentphase = max(phase(p) for p in ctx.parents())
1117 if targetphase is None:
1112 if targetphase is None:
1118 oldphase = max(
1113 oldphase = max(
1119 unfi[oldnode].phase() for oldnode in precursors[newnode]
1114 unfi[oldnode].phase() for oldnode in precursors[newnode]
1120 )
1115 )
1121 newphase = max(oldphase, parentphase)
1116 newphase = max(oldphase, parentphase)
1122 else:
1117 else:
1123 newphase = max(targetphase, parentphase)
1118 newphase = max(targetphase, parentphase)
1124 newphases[newnode] = newphase
1119 newphases[newnode] = newphase
1125 if newphase > ctx.phase():
1120 if newphase > ctx.phase():
1126 toretract.setdefault(newphase, []).append(newnode)
1121 toretract.setdefault(newphase, []).append(newnode)
1127 elif newphase < ctx.phase():
1122 elif newphase < ctx.phase():
1128 toadvance.setdefault(newphase, []).append(newnode)
1123 toadvance.setdefault(newphase, []).append(newnode)
1129
1124
1130 with repo.transaction(b'cleanup') as tr:
1125 with repo.transaction(b'cleanup') as tr:
1131 # Move bookmarks
1126 # Move bookmarks
1132 bmarks = repo._bookmarks
1127 bmarks = repo._bookmarks
1133 bmarkchanges = []
1128 bmarkchanges = []
1134 for oldnode, newnode in moves.items():
1129 for oldnode, newnode in moves.items():
1135 oldbmarks = repo.nodebookmarks(oldnode)
1130 oldbmarks = repo.nodebookmarks(oldnode)
1136 if not oldbmarks:
1131 if not oldbmarks:
1137 continue
1132 continue
1138 from . import bookmarks # avoid import cycle
1133 from . import bookmarks # avoid import cycle
1139
1134
1140 repo.ui.debug(
1135 repo.ui.debug(
1141 b'moving bookmarks %r from %s to %s\n'
1136 b'moving bookmarks %r from %s to %s\n'
1142 % (
1137 % (
1143 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1138 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1144 hex(oldnode),
1139 hex(oldnode),
1145 hex(newnode),
1140 hex(newnode),
1146 )
1141 )
1147 )
1142 )
1148 # Delete divergent bookmarks being parents of related newnodes
1143 # Delete divergent bookmarks being parents of related newnodes
1149 deleterevs = repo.revs(
1144 deleterevs = repo.revs(
1150 b'parents(roots(%ln & (::%n))) - parents(%n)',
1145 b'parents(roots(%ln & (::%n))) - parents(%n)',
1151 allnewnodes,
1146 allnewnodes,
1152 newnode,
1147 newnode,
1153 oldnode,
1148 oldnode,
1154 )
1149 )
1155 deletenodes = _containsnode(repo, deleterevs)
1150 deletenodes = _containsnode(repo, deleterevs)
1156 for name in oldbmarks:
1151 for name in oldbmarks:
1157 bmarkchanges.append((name, newnode))
1152 bmarkchanges.append((name, newnode))
1158 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1153 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1159 bmarkchanges.append((b, None))
1154 bmarkchanges.append((b, None))
1160
1155
1161 if bmarkchanges:
1156 if bmarkchanges:
1162 bmarks.applychanges(repo, tr, bmarkchanges)
1157 bmarks.applychanges(repo, tr, bmarkchanges)
1163
1158
1164 for phase, nodes in toretract.items():
1159 for phase, nodes in toretract.items():
1165 phases.retractboundary(repo, tr, phase, nodes)
1160 phases.retractboundary(repo, tr, phase, nodes)
1166 for phase, nodes in toadvance.items():
1161 for phase, nodes in toadvance.items():
1167 phases.advanceboundary(repo, tr, phase, nodes)
1162 phases.advanceboundary(repo, tr, phase, nodes)
1168
1163
1169 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1164 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1170 # Obsolete or strip nodes
1165 # Obsolete or strip nodes
1171 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1166 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1172 # If a node is already obsoleted, and we want to obsolete it
1167 # If a node is already obsoleted, and we want to obsolete it
1173 # without a successor, skip that obssolete request since it's
1168 # without a successor, skip that obssolete request since it's
1174 # unnecessary. That's the "if s or not isobs(n)" check below.
1169 # unnecessary. That's the "if s or not isobs(n)" check below.
1175 # Also sort the node in topology order, that might be useful for
1170 # Also sort the node in topology order, that might be useful for
1176 # some obsstore logic.
1171 # some obsstore logic.
1177 # NOTE: the sorting might belong to createmarkers.
1172 # NOTE: the sorting might belong to createmarkers.
1178 torev = unfi.changelog.rev
1173 torev = unfi.changelog.rev
1179 sortfunc = lambda ns: torev(ns[0][0])
1174 sortfunc = lambda ns: torev(ns[0][0])
1180 rels = []
1175 rels = []
1181 for ns, s in sorted(replacements.items(), key=sortfunc):
1176 for ns, s in sorted(replacements.items(), key=sortfunc):
1182 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1177 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1183 rels.append(rel)
1178 rels.append(rel)
1184 if rels:
1179 if rels:
1185 obsolete.createmarkers(
1180 obsolete.createmarkers(
1186 repo, rels, operation=operation, metadata=metadata
1181 repo, rels, operation=operation, metadata=metadata
1187 )
1182 )
1188 elif phases.supportinternal(repo) and mayusearchived:
1183 elif phases.supportinternal(repo) and mayusearchived:
1189 # this assume we do not have "unstable" nodes above the cleaned ones
1184 # this assume we do not have "unstable" nodes above the cleaned ones
1190 allreplaced = set()
1185 allreplaced = set()
1191 for ns in replacements.keys():
1186 for ns in replacements.keys():
1192 allreplaced.update(ns)
1187 allreplaced.update(ns)
1193 if backup:
1188 if backup:
1194 from . import repair # avoid import cycle
1189 from . import repair # avoid import cycle
1195
1190
1196 node = min(allreplaced, key=repo.changelog.rev)
1191 node = min(allreplaced, key=repo.changelog.rev)
1197 repair.backupbundle(
1192 repair.backupbundle(
1198 repo, allreplaced, allreplaced, node, operation
1193 repo, allreplaced, allreplaced, node, operation
1199 )
1194 )
1200 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1195 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1201 else:
1196 else:
1202 from . import repair # avoid import cycle
1197 from . import repair # avoid import cycle
1203
1198
1204 tostrip = list(n for ns in replacements for n in ns)
1199 tostrip = list(n for ns in replacements for n in ns)
1205 if tostrip:
1200 if tostrip:
1206 repair.delayedstrip(
1201 repair.delayedstrip(
1207 repo.ui, repo, tostrip, operation, backup=backup
1202 repo.ui, repo, tostrip, operation, backup=backup
1208 )
1203 )
1209
1204
1210
1205
1211 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1206 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1212 if opts is None:
1207 if opts is None:
1213 opts = {}
1208 opts = {}
1214 m = matcher
1209 m = matcher
1215 dry_run = opts.get(b'dry_run')
1210 dry_run = opts.get(b'dry_run')
1216 try:
1211 try:
1217 similarity = float(opts.get(b'similarity') or 0)
1212 similarity = float(opts.get(b'similarity') or 0)
1218 except ValueError:
1213 except ValueError:
1219 raise error.Abort(_(b'similarity must be a number'))
1214 raise error.Abort(_(b'similarity must be a number'))
1220 if similarity < 0 or similarity > 100:
1215 if similarity < 0 or similarity > 100:
1221 raise error.Abort(_(b'similarity must be between 0 and 100'))
1216 raise error.Abort(_(b'similarity must be between 0 and 100'))
1222 similarity /= 100.0
1217 similarity /= 100.0
1223
1218
1224 ret = 0
1219 ret = 0
1225
1220
1226 wctx = repo[None]
1221 wctx = repo[None]
1227 for subpath in sorted(wctx.substate):
1222 for subpath in sorted(wctx.substate):
1228 submatch = matchmod.subdirmatcher(subpath, m)
1223 submatch = matchmod.subdirmatcher(subpath, m)
1229 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1224 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1230 sub = wctx.sub(subpath)
1225 sub = wctx.sub(subpath)
1231 subprefix = repo.wvfs.reljoin(prefix, subpath)
1226 subprefix = repo.wvfs.reljoin(prefix, subpath)
1232 subuipathfn = subdiruipathfn(subpath, uipathfn)
1227 subuipathfn = subdiruipathfn(subpath, uipathfn)
1233 try:
1228 try:
1234 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1229 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1235 ret = 1
1230 ret = 1
1236 except error.LookupError:
1231 except error.LookupError:
1237 repo.ui.status(
1232 repo.ui.status(
1238 _(b"skipping missing subrepository: %s\n")
1233 _(b"skipping missing subrepository: %s\n")
1239 % uipathfn(subpath)
1234 % uipathfn(subpath)
1240 )
1235 )
1241
1236
1242 rejected = []
1237 rejected = []
1243
1238
1244 def badfn(f, msg):
1239 def badfn(f, msg):
1245 if f in m.files():
1240 if f in m.files():
1246 m.bad(f, msg)
1241 m.bad(f, msg)
1247 rejected.append(f)
1242 rejected.append(f)
1248
1243
1249 badmatch = matchmod.badmatch(m, badfn)
1244 badmatch = matchmod.badmatch(m, badfn)
1250 added, unknown, deleted, removed, forgotten = _interestingfiles(
1245 added, unknown, deleted, removed, forgotten = _interestingfiles(
1251 repo, badmatch
1246 repo, badmatch
1252 )
1247 )
1253
1248
1254 unknownset = set(unknown + forgotten)
1249 unknownset = set(unknown + forgotten)
1255 toprint = unknownset.copy()
1250 toprint = unknownset.copy()
1256 toprint.update(deleted)
1251 toprint.update(deleted)
1257 for abs in sorted(toprint):
1252 for abs in sorted(toprint):
1258 if repo.ui.verbose or not m.exact(abs):
1253 if repo.ui.verbose or not m.exact(abs):
1259 if abs in unknownset:
1254 if abs in unknownset:
1260 status = _(b'adding %s\n') % uipathfn(abs)
1255 status = _(b'adding %s\n') % uipathfn(abs)
1261 label = b'ui.addremove.added'
1256 label = b'ui.addremove.added'
1262 else:
1257 else:
1263 status = _(b'removing %s\n') % uipathfn(abs)
1258 status = _(b'removing %s\n') % uipathfn(abs)
1264 label = b'ui.addremove.removed'
1259 label = b'ui.addremove.removed'
1265 repo.ui.status(status, label=label)
1260 repo.ui.status(status, label=label)
1266
1261
1267 renames = _findrenames(
1262 renames = _findrenames(
1268 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1263 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1269 )
1264 )
1270
1265
1271 if not dry_run:
1266 if not dry_run:
1272 _markchanges(repo, unknown + forgotten, deleted, renames)
1267 _markchanges(repo, unknown + forgotten, deleted, renames)
1273
1268
1274 for f in rejected:
1269 for f in rejected:
1275 if f in m.files():
1270 if f in m.files():
1276 return 1
1271 return 1
1277 return ret
1272 return ret
1278
1273
1279
1274
1280 def marktouched(repo, files, similarity=0.0):
1275 def marktouched(repo, files, similarity=0.0):
1281 """Assert that files have somehow been operated upon. files are relative to
1276 """Assert that files have somehow been operated upon. files are relative to
1282 the repo root."""
1277 the repo root."""
1283 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1278 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1284 rejected = []
1279 rejected = []
1285
1280
1286 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1281 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1287
1282
1288 if repo.ui.verbose:
1283 if repo.ui.verbose:
1289 unknownset = set(unknown + forgotten)
1284 unknownset = set(unknown + forgotten)
1290 toprint = unknownset.copy()
1285 toprint = unknownset.copy()
1291 toprint.update(deleted)
1286 toprint.update(deleted)
1292 for abs in sorted(toprint):
1287 for abs in sorted(toprint):
1293 if abs in unknownset:
1288 if abs in unknownset:
1294 status = _(b'adding %s\n') % abs
1289 status = _(b'adding %s\n') % abs
1295 else:
1290 else:
1296 status = _(b'removing %s\n') % abs
1291 status = _(b'removing %s\n') % abs
1297 repo.ui.status(status)
1292 repo.ui.status(status)
1298
1293
1299 # TODO: We should probably have the caller pass in uipathfn and apply it to
1294 # TODO: We should probably have the caller pass in uipathfn and apply it to
1300 # the messages above too. legacyrelativevalue=True is consistent with how
1295 # the messages above too. legacyrelativevalue=True is consistent with how
1301 # it used to work.
1296 # it used to work.
1302 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1297 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1303 renames = _findrenames(
1298 renames = _findrenames(
1304 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1299 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1305 )
1300 )
1306
1301
1307 _markchanges(repo, unknown + forgotten, deleted, renames)
1302 _markchanges(repo, unknown + forgotten, deleted, renames)
1308
1303
1309 for f in rejected:
1304 for f in rejected:
1310 if f in m.files():
1305 if f in m.files():
1311 return 1
1306 return 1
1312 return 0
1307 return 0
1313
1308
1314
1309
1315 def _interestingfiles(repo, matcher):
1310 def _interestingfiles(repo, matcher):
1316 """Walk dirstate with matcher, looking for files that addremove would care
1311 """Walk dirstate with matcher, looking for files that addremove would care
1317 about.
1312 about.
1318
1313
1319 This is different from dirstate.status because it doesn't care about
1314 This is different from dirstate.status because it doesn't care about
1320 whether files are modified or clean."""
1315 whether files are modified or clean."""
1321 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1316 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1322 audit_path = pathutil.pathauditor(repo.root, cached=True)
1317 audit_path = pathutil.pathauditor(repo.root, cached=True)
1323
1318
1324 ctx = repo[None]
1319 ctx = repo[None]
1325 dirstate = repo.dirstate
1320 dirstate = repo.dirstate
1326 matcher = repo.narrowmatch(matcher, includeexact=True)
1321 matcher = repo.narrowmatch(matcher, includeexact=True)
1327 walkresults = dirstate.walk(
1322 walkresults = dirstate.walk(
1328 matcher,
1323 matcher,
1329 subrepos=sorted(ctx.substate),
1324 subrepos=sorted(ctx.substate),
1330 unknown=True,
1325 unknown=True,
1331 ignored=False,
1326 ignored=False,
1332 full=False,
1327 full=False,
1333 )
1328 )
1334 for abs, st in pycompat.iteritems(walkresults):
1329 for abs, st in pycompat.iteritems(walkresults):
1335 dstate = dirstate[abs]
1330 dstate = dirstate[abs]
1336 if dstate == b'?' and audit_path.check(abs):
1331 if dstate == b'?' and audit_path.check(abs):
1337 unknown.append(abs)
1332 unknown.append(abs)
1338 elif dstate != b'r' and not st:
1333 elif dstate != b'r' and not st:
1339 deleted.append(abs)
1334 deleted.append(abs)
1340 elif dstate == b'r' and st:
1335 elif dstate == b'r' and st:
1341 forgotten.append(abs)
1336 forgotten.append(abs)
1342 # for finding renames
1337 # for finding renames
1343 elif dstate == b'r' and not st:
1338 elif dstate == b'r' and not st:
1344 removed.append(abs)
1339 removed.append(abs)
1345 elif dstate == b'a':
1340 elif dstate == b'a':
1346 added.append(abs)
1341 added.append(abs)
1347
1342
1348 return added, unknown, deleted, removed, forgotten
1343 return added, unknown, deleted, removed, forgotten
1349
1344
1350
1345
1351 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1346 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1352 '''Find renames from removed files to added ones.'''
1347 '''Find renames from removed files to added ones.'''
1353 renames = {}
1348 renames = {}
1354 if similarity > 0:
1349 if similarity > 0:
1355 for old, new, score in similar.findrenames(
1350 for old, new, score in similar.findrenames(
1356 repo, added, removed, similarity
1351 repo, added, removed, similarity
1357 ):
1352 ):
1358 if (
1353 if (
1359 repo.ui.verbose
1354 repo.ui.verbose
1360 or not matcher.exact(old)
1355 or not matcher.exact(old)
1361 or not matcher.exact(new)
1356 or not matcher.exact(new)
1362 ):
1357 ):
1363 repo.ui.status(
1358 repo.ui.status(
1364 _(
1359 _(
1365 b'recording removal of %s as rename to %s '
1360 b'recording removal of %s as rename to %s '
1366 b'(%d%% similar)\n'
1361 b'(%d%% similar)\n'
1367 )
1362 )
1368 % (uipathfn(old), uipathfn(new), score * 100)
1363 % (uipathfn(old), uipathfn(new), score * 100)
1369 )
1364 )
1370 renames[new] = old
1365 renames[new] = old
1371 return renames
1366 return renames
1372
1367
1373
1368
1374 def _markchanges(repo, unknown, deleted, renames):
1369 def _markchanges(repo, unknown, deleted, renames):
1375 """Marks the files in unknown as added, the files in deleted as removed,
1370 """Marks the files in unknown as added, the files in deleted as removed,
1376 and the files in renames as copied."""
1371 and the files in renames as copied."""
1377 wctx = repo[None]
1372 wctx = repo[None]
1378 with repo.wlock():
1373 with repo.wlock():
1379 wctx.forget(deleted)
1374 wctx.forget(deleted)
1380 wctx.add(unknown)
1375 wctx.add(unknown)
1381 for new, old in pycompat.iteritems(renames):
1376 for new, old in pycompat.iteritems(renames):
1382 wctx.copy(old, new)
1377 wctx.copy(old, new)
1383
1378
1384
1379
1385 def getrenamedfn(repo, endrev=None):
1380 def getrenamedfn(repo, endrev=None):
1386 if copiesmod.usechangesetcentricalgo(repo):
1381 if copiesmod.usechangesetcentricalgo(repo):
1387
1382
1388 def getrenamed(fn, rev):
1383 def getrenamed(fn, rev):
1389 ctx = repo[rev]
1384 ctx = repo[rev]
1390 p1copies = ctx.p1copies()
1385 p1copies = ctx.p1copies()
1391 if fn in p1copies:
1386 if fn in p1copies:
1392 return p1copies[fn]
1387 return p1copies[fn]
1393 p2copies = ctx.p2copies()
1388 p2copies = ctx.p2copies()
1394 if fn in p2copies:
1389 if fn in p2copies:
1395 return p2copies[fn]
1390 return p2copies[fn]
1396 return None
1391 return None
1397
1392
1398 return getrenamed
1393 return getrenamed
1399
1394
1400 rcache = {}
1395 rcache = {}
1401 if endrev is None:
1396 if endrev is None:
1402 endrev = len(repo)
1397 endrev = len(repo)
1403
1398
1404 def getrenamed(fn, rev):
1399 def getrenamed(fn, rev):
1405 """looks up all renames for a file (up to endrev) the first
1400 """looks up all renames for a file (up to endrev) the first
1406 time the file is given. It indexes on the changerev and only
1401 time the file is given. It indexes on the changerev and only
1407 parses the manifest if linkrev != changerev.
1402 parses the manifest if linkrev != changerev.
1408 Returns rename info for fn at changerev rev."""
1403 Returns rename info for fn at changerev rev."""
1409 if fn not in rcache:
1404 if fn not in rcache:
1410 rcache[fn] = {}
1405 rcache[fn] = {}
1411 fl = repo.file(fn)
1406 fl = repo.file(fn)
1412 for i in fl:
1407 for i in fl:
1413 lr = fl.linkrev(i)
1408 lr = fl.linkrev(i)
1414 renamed = fl.renamed(fl.node(i))
1409 renamed = fl.renamed(fl.node(i))
1415 rcache[fn][lr] = renamed and renamed[0]
1410 rcache[fn][lr] = renamed and renamed[0]
1416 if lr >= endrev:
1411 if lr >= endrev:
1417 break
1412 break
1418 if rev in rcache[fn]:
1413 if rev in rcache[fn]:
1419 return rcache[fn][rev]
1414 return rcache[fn][rev]
1420
1415
1421 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1416 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1422 # filectx logic.
1417 # filectx logic.
1423 try:
1418 try:
1424 return repo[rev][fn].copysource()
1419 return repo[rev][fn].copysource()
1425 except error.LookupError:
1420 except error.LookupError:
1426 return None
1421 return None
1427
1422
1428 return getrenamed
1423 return getrenamed
1429
1424
1430
1425
1431 def getcopiesfn(repo, endrev=None):
1426 def getcopiesfn(repo, endrev=None):
1432 if copiesmod.usechangesetcentricalgo(repo):
1427 if copiesmod.usechangesetcentricalgo(repo):
1433
1428
1434 def copiesfn(ctx):
1429 def copiesfn(ctx):
1435 if ctx.p2copies():
1430 if ctx.p2copies():
1436 allcopies = ctx.p1copies().copy()
1431 allcopies = ctx.p1copies().copy()
1437 # There should be no overlap
1432 # There should be no overlap
1438 allcopies.update(ctx.p2copies())
1433 allcopies.update(ctx.p2copies())
1439 return sorted(allcopies.items())
1434 return sorted(allcopies.items())
1440 else:
1435 else:
1441 return sorted(ctx.p1copies().items())
1436 return sorted(ctx.p1copies().items())
1442
1437
1443 else:
1438 else:
1444 getrenamed = getrenamedfn(repo, endrev)
1439 getrenamed = getrenamedfn(repo, endrev)
1445
1440
1446 def copiesfn(ctx):
1441 def copiesfn(ctx):
1447 copies = []
1442 copies = []
1448 for fn in ctx.files():
1443 for fn in ctx.files():
1449 rename = getrenamed(fn, ctx.rev())
1444 rename = getrenamed(fn, ctx.rev())
1450 if rename:
1445 if rename:
1451 copies.append((fn, rename))
1446 copies.append((fn, rename))
1452 return copies
1447 return copies
1453
1448
1454 return copiesfn
1449 return copiesfn
1455
1450
1456
1451
1457 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1452 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1458 """Update the dirstate to reflect the intent of copying src to dst. For
1453 """Update the dirstate to reflect the intent of copying src to dst. For
1459 different reasons it might not end with dst being marked as copied from src.
1454 different reasons it might not end with dst being marked as copied from src.
1460 """
1455 """
1461 origsrc = repo.dirstate.copied(src) or src
1456 origsrc = repo.dirstate.copied(src) or src
1462 if dst == origsrc: # copying back a copy?
1457 if dst == origsrc: # copying back a copy?
1463 if repo.dirstate[dst] not in b'mn' and not dryrun:
1458 if repo.dirstate[dst] not in b'mn' and not dryrun:
1464 repo.dirstate.normallookup(dst)
1459 repo.dirstate.normallookup(dst)
1465 else:
1460 else:
1466 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1461 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1467 if not ui.quiet:
1462 if not ui.quiet:
1468 ui.warn(
1463 ui.warn(
1469 _(
1464 _(
1470 b"%s has not been committed yet, so no copy "
1465 b"%s has not been committed yet, so no copy "
1471 b"data will be stored for %s.\n"
1466 b"data will be stored for %s.\n"
1472 )
1467 )
1473 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1468 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1474 )
1469 )
1475 if repo.dirstate[dst] in b'?r' and not dryrun:
1470 if repo.dirstate[dst] in b'?r' and not dryrun:
1476 wctx.add([dst])
1471 wctx.add([dst])
1477 elif not dryrun:
1472 elif not dryrun:
1478 wctx.copy(origsrc, dst)
1473 wctx.copy(origsrc, dst)
1479
1474
1480
1475
1481 def movedirstate(repo, newctx, match=None):
1476 def movedirstate(repo, newctx, match=None):
1482 """Move the dirstate to newctx and adjust it as necessary.
1477 """Move the dirstate to newctx and adjust it as necessary.
1483
1478
1484 A matcher can be provided as an optimization. It is probably a bug to pass
1479 A matcher can be provided as an optimization. It is probably a bug to pass
1485 a matcher that doesn't match all the differences between the parent of the
1480 a matcher that doesn't match all the differences between the parent of the
1486 working copy and newctx.
1481 working copy and newctx.
1487 """
1482 """
1488 oldctx = repo[b'.']
1483 oldctx = repo[b'.']
1489 ds = repo.dirstate
1484 ds = repo.dirstate
1490 copies = dict(ds.copies())
1485 copies = dict(ds.copies())
1491 ds.setparents(newctx.node(), repo.nullid)
1486 ds.setparents(newctx.node(), repo.nullid)
1492 s = newctx.status(oldctx, match=match)
1487 s = newctx.status(oldctx, match=match)
1493 for f in s.modified:
1488 for f in s.modified:
1494 if ds[f] == b'r':
1489 if ds[f] == b'r':
1495 # modified + removed -> removed
1490 # modified + removed -> removed
1496 continue
1491 continue
1497 ds.normallookup(f)
1492 ds.normallookup(f)
1498
1493
1499 for f in s.added:
1494 for f in s.added:
1500 if ds[f] == b'r':
1495 if ds[f] == b'r':
1501 # added + removed -> unknown
1496 # added + removed -> unknown
1502 ds.drop(f)
1497 ds.drop(f)
1503 elif ds[f] != b'a':
1498 elif ds[f] != b'a':
1504 ds.add(f)
1499 ds.add(f)
1505
1500
1506 for f in s.removed:
1501 for f in s.removed:
1507 if ds[f] == b'a':
1502 if ds[f] == b'a':
1508 # removed + added -> normal
1503 # removed + added -> normal
1509 ds.normallookup(f)
1504 ds.normallookup(f)
1510 elif ds[f] != b'r':
1505 elif ds[f] != b'r':
1511 ds.remove(f)
1506 ds.remove(f)
1512
1507
1513 # Merge old parent and old working dir copies
1508 # Merge old parent and old working dir copies
1514 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1509 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1515 oldcopies.update(copies)
1510 oldcopies.update(copies)
1516 copies = {
1511 copies = {
1517 dst: oldcopies.get(src, src)
1512 dst: oldcopies.get(src, src)
1518 for dst, src in pycompat.iteritems(oldcopies)
1513 for dst, src in pycompat.iteritems(oldcopies)
1519 }
1514 }
1520 # Adjust the dirstate copies
1515 # Adjust the dirstate copies
1521 for dst, src in pycompat.iteritems(copies):
1516 for dst, src in pycompat.iteritems(copies):
1522 if src not in newctx or dst in newctx or ds[dst] != b'a':
1517 if src not in newctx or dst in newctx or ds[dst] != b'a':
1523 src = None
1518 src = None
1524 ds.copy(src, dst)
1519 ds.copy(src, dst)
1525 repo._quick_access_changeid_invalidate()
1520 repo._quick_access_changeid_invalidate()
1526
1521
1527
1522
1528 def filterrequirements(requirements):
1523 def filterrequirements(requirements):
1529 """filters the requirements into two sets:
1524 """filters the requirements into two sets:
1530
1525
1531 wcreq: requirements which should be written in .hg/requires
1526 wcreq: requirements which should be written in .hg/requires
1532 storereq: which should be written in .hg/store/requires
1527 storereq: which should be written in .hg/store/requires
1533
1528
1534 Returns (wcreq, storereq)
1529 Returns (wcreq, storereq)
1535 """
1530 """
1536 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
1531 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
1537 wc, store = set(), set()
1532 wc, store = set(), set()
1538 for r in requirements:
1533 for r in requirements:
1539 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
1534 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
1540 wc.add(r)
1535 wc.add(r)
1541 else:
1536 else:
1542 store.add(r)
1537 store.add(r)
1543 return wc, store
1538 return wc, store
1544 return requirements, None
1539 return requirements, None
1545
1540
1546
1541
1547 def istreemanifest(repo):
1542 def istreemanifest(repo):
1548 """returns whether the repository is using treemanifest or not"""
1543 """returns whether the repository is using treemanifest or not"""
1549 return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements
1544 return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements
1550
1545
1551
1546
1552 def writereporequirements(repo, requirements=None):
1547 def writereporequirements(repo, requirements=None):
1553 """writes requirements for the repo
1548 """writes requirements for the repo
1554
1549
1555 Requirements are written to .hg/requires and .hg/store/requires based
1550 Requirements are written to .hg/requires and .hg/store/requires based
1556 on whether share-safe mode is enabled and which requirements are wdir
1551 on whether share-safe mode is enabled and which requirements are wdir
1557 requirements and which are store requirements
1552 requirements and which are store requirements
1558 """
1553 """
1559 if requirements:
1554 if requirements:
1560 repo.requirements = requirements
1555 repo.requirements = requirements
1561 wcreq, storereq = filterrequirements(repo.requirements)
1556 wcreq, storereq = filterrequirements(repo.requirements)
1562 if wcreq is not None:
1557 if wcreq is not None:
1563 writerequires(repo.vfs, wcreq)
1558 writerequires(repo.vfs, wcreq)
1564 if storereq is not None:
1559 if storereq is not None:
1565 writerequires(repo.svfs, storereq)
1560 writerequires(repo.svfs, storereq)
1566 elif repo.ui.configbool(b'format', b'usestore'):
1561 elif repo.ui.configbool(b'format', b'usestore'):
1567 # only remove store requires if we are using store
1562 # only remove store requires if we are using store
1568 repo.svfs.tryunlink(b'requires')
1563 repo.svfs.tryunlink(b'requires')
1569
1564
1570
1565
1571 def writerequires(opener, requirements):
1566 def writerequires(opener, requirements):
1572 with opener(b'requires', b'w', atomictemp=True) as fp:
1567 with opener(b'requires', b'w', atomictemp=True) as fp:
1573 for r in sorted(requirements):
1568 for r in sorted(requirements):
1574 fp.write(b"%s\n" % r)
1569 fp.write(b"%s\n" % r)
1575
1570
1576
1571
1577 class filecachesubentry(object):
1572 class filecachesubentry(object):
1578 def __init__(self, path, stat):
1573 def __init__(self, path, stat):
1579 self.path = path
1574 self.path = path
1580 self.cachestat = None
1575 self.cachestat = None
1581 self._cacheable = None
1576 self._cacheable = None
1582
1577
1583 if stat:
1578 if stat:
1584 self.cachestat = filecachesubentry.stat(self.path)
1579 self.cachestat = filecachesubentry.stat(self.path)
1585
1580
1586 if self.cachestat:
1581 if self.cachestat:
1587 self._cacheable = self.cachestat.cacheable()
1582 self._cacheable = self.cachestat.cacheable()
1588 else:
1583 else:
1589 # None means we don't know yet
1584 # None means we don't know yet
1590 self._cacheable = None
1585 self._cacheable = None
1591
1586
1592 def refresh(self):
1587 def refresh(self):
1593 if self.cacheable():
1588 if self.cacheable():
1594 self.cachestat = filecachesubentry.stat(self.path)
1589 self.cachestat = filecachesubentry.stat(self.path)
1595
1590
1596 def cacheable(self):
1591 def cacheable(self):
1597 if self._cacheable is not None:
1592 if self._cacheable is not None:
1598 return self._cacheable
1593 return self._cacheable
1599
1594
1600 # we don't know yet, assume it is for now
1595 # we don't know yet, assume it is for now
1601 return True
1596 return True
1602
1597
1603 def changed(self):
1598 def changed(self):
1604 # no point in going further if we can't cache it
1599 # no point in going further if we can't cache it
1605 if not self.cacheable():
1600 if not self.cacheable():
1606 return True
1601 return True
1607
1602
1608 newstat = filecachesubentry.stat(self.path)
1603 newstat = filecachesubentry.stat(self.path)
1609
1604
1610 # we may not know if it's cacheable yet, check again now
1605 # we may not know if it's cacheable yet, check again now
1611 if newstat and self._cacheable is None:
1606 if newstat and self._cacheable is None:
1612 self._cacheable = newstat.cacheable()
1607 self._cacheable = newstat.cacheable()
1613
1608
1614 # check again
1609 # check again
1615 if not self._cacheable:
1610 if not self._cacheable:
1616 return True
1611 return True
1617
1612
1618 if self.cachestat != newstat:
1613 if self.cachestat != newstat:
1619 self.cachestat = newstat
1614 self.cachestat = newstat
1620 return True
1615 return True
1621 else:
1616 else:
1622 return False
1617 return False
1623
1618
1624 @staticmethod
1619 @staticmethod
1625 def stat(path):
1620 def stat(path):
1626 try:
1621 try:
1627 return util.cachestat(path)
1622 return util.cachestat(path)
1628 except OSError as e:
1623 except OSError as e:
1629 if e.errno != errno.ENOENT:
1624 if e.errno != errno.ENOENT:
1630 raise
1625 raise
1631
1626
1632
1627
1633 class filecacheentry(object):
1628 class filecacheentry(object):
1634 def __init__(self, paths, stat=True):
1629 def __init__(self, paths, stat=True):
1635 self._entries = []
1630 self._entries = []
1636 for path in paths:
1631 for path in paths:
1637 self._entries.append(filecachesubentry(path, stat))
1632 self._entries.append(filecachesubentry(path, stat))
1638
1633
1639 def changed(self):
1634 def changed(self):
1640 '''true if any entry has changed'''
1635 '''true if any entry has changed'''
1641 for entry in self._entries:
1636 for entry in self._entries:
1642 if entry.changed():
1637 if entry.changed():
1643 return True
1638 return True
1644 return False
1639 return False
1645
1640
1646 def refresh(self):
1641 def refresh(self):
1647 for entry in self._entries:
1642 for entry in self._entries:
1648 entry.refresh()
1643 entry.refresh()
1649
1644
1650
1645
1651 class filecache(object):
1646 class filecache(object):
1652 """A property like decorator that tracks files under .hg/ for updates.
1647 """A property like decorator that tracks files under .hg/ for updates.
1653
1648
1654 On first access, the files defined as arguments are stat()ed and the
1649 On first access, the files defined as arguments are stat()ed and the
1655 results cached. The decorated function is called. The results are stashed
1650 results cached. The decorated function is called. The results are stashed
1656 away in a ``_filecache`` dict on the object whose method is decorated.
1651 away in a ``_filecache`` dict on the object whose method is decorated.
1657
1652
1658 On subsequent access, the cached result is used as it is set to the
1653 On subsequent access, the cached result is used as it is set to the
1659 instance dictionary.
1654 instance dictionary.
1660
1655
1661 On external property set/delete operations, the caller must update the
1656 On external property set/delete operations, the caller must update the
1662 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1657 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1663 instead of directly setting <attr>.
1658 instead of directly setting <attr>.
1664
1659
1665 When using the property API, the cached data is always used if available.
1660 When using the property API, the cached data is always used if available.
1666 No stat() is performed to check if the file has changed.
1661 No stat() is performed to check if the file has changed.
1667
1662
1668 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1663 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1669 can populate an entry before the property's getter is called. In this case,
1664 can populate an entry before the property's getter is called. In this case,
1670 entries in ``_filecache`` will be used during property operations,
1665 entries in ``_filecache`` will be used during property operations,
1671 if available. If the underlying file changes, it is up to external callers
1666 if available. If the underlying file changes, it is up to external callers
1672 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1667 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1673 method result as well as possibly calling ``del obj._filecache[attr]`` to
1668 method result as well as possibly calling ``del obj._filecache[attr]`` to
1674 remove the ``filecacheentry``.
1669 remove the ``filecacheentry``.
1675 """
1670 """
1676
1671
1677 def __init__(self, *paths):
1672 def __init__(self, *paths):
1678 self.paths = paths
1673 self.paths = paths
1679
1674
1680 def join(self, obj, fname):
1675 def join(self, obj, fname):
1681 """Used to compute the runtime path of a cached file.
1676 """Used to compute the runtime path of a cached file.
1682
1677
1683 Users should subclass filecache and provide their own version of this
1678 Users should subclass filecache and provide their own version of this
1684 function to call the appropriate join function on 'obj' (an instance
1679 function to call the appropriate join function on 'obj' (an instance
1685 of the class that its member function was decorated).
1680 of the class that its member function was decorated).
1686 """
1681 """
1687 raise NotImplementedError
1682 raise NotImplementedError
1688
1683
1689 def __call__(self, func):
1684 def __call__(self, func):
1690 self.func = func
1685 self.func = func
1691 self.sname = func.__name__
1686 self.sname = func.__name__
1692 self.name = pycompat.sysbytes(self.sname)
1687 self.name = pycompat.sysbytes(self.sname)
1693 return self
1688 return self
1694
1689
1695 def __get__(self, obj, type=None):
1690 def __get__(self, obj, type=None):
1696 # if accessed on the class, return the descriptor itself.
1691 # if accessed on the class, return the descriptor itself.
1697 if obj is None:
1692 if obj is None:
1698 return self
1693 return self
1699
1694
1700 assert self.sname not in obj.__dict__
1695 assert self.sname not in obj.__dict__
1701
1696
1702 entry = obj._filecache.get(self.name)
1697 entry = obj._filecache.get(self.name)
1703
1698
1704 if entry:
1699 if entry:
1705 if entry.changed():
1700 if entry.changed():
1706 entry.obj = self.func(obj)
1701 entry.obj = self.func(obj)
1707 else:
1702 else:
1708 paths = [self.join(obj, path) for path in self.paths]
1703 paths = [self.join(obj, path) for path in self.paths]
1709
1704
1710 # We stat -before- creating the object so our cache doesn't lie if
1705 # We stat -before- creating the object so our cache doesn't lie if
1711 # a writer modified between the time we read and stat
1706 # a writer modified between the time we read and stat
1712 entry = filecacheentry(paths, True)
1707 entry = filecacheentry(paths, True)
1713 entry.obj = self.func(obj)
1708 entry.obj = self.func(obj)
1714
1709
1715 obj._filecache[self.name] = entry
1710 obj._filecache[self.name] = entry
1716
1711
1717 obj.__dict__[self.sname] = entry.obj
1712 obj.__dict__[self.sname] = entry.obj
1718 return entry.obj
1713 return entry.obj
1719
1714
1720 # don't implement __set__(), which would make __dict__ lookup as slow as
1715 # don't implement __set__(), which would make __dict__ lookup as slow as
1721 # function call.
1716 # function call.
1722
1717
1723 def set(self, obj, value):
1718 def set(self, obj, value):
1724 if self.name not in obj._filecache:
1719 if self.name not in obj._filecache:
1725 # we add an entry for the missing value because X in __dict__
1720 # we add an entry for the missing value because X in __dict__
1726 # implies X in _filecache
1721 # implies X in _filecache
1727 paths = [self.join(obj, path) for path in self.paths]
1722 paths = [self.join(obj, path) for path in self.paths]
1728 ce = filecacheentry(paths, False)
1723 ce = filecacheentry(paths, False)
1729 obj._filecache[self.name] = ce
1724 obj._filecache[self.name] = ce
1730 else:
1725 else:
1731 ce = obj._filecache[self.name]
1726 ce = obj._filecache[self.name]
1732
1727
1733 ce.obj = value # update cached copy
1728 ce.obj = value # update cached copy
1734 obj.__dict__[self.sname] = value # update copy returned by obj.x
1729 obj.__dict__[self.sname] = value # update copy returned by obj.x
1735
1730
1736
1731
1737 def extdatasource(repo, source):
1732 def extdatasource(repo, source):
1738 """Gather a map of rev -> value dict from the specified source
1733 """Gather a map of rev -> value dict from the specified source
1739
1734
1740 A source spec is treated as a URL, with a special case shell: type
1735 A source spec is treated as a URL, with a special case shell: type
1741 for parsing the output from a shell command.
1736 for parsing the output from a shell command.
1742
1737
1743 The data is parsed as a series of newline-separated records where
1738 The data is parsed as a series of newline-separated records where
1744 each record is a revision specifier optionally followed by a space
1739 each record is a revision specifier optionally followed by a space
1745 and a freeform string value. If the revision is known locally, it
1740 and a freeform string value. If the revision is known locally, it
1746 is converted to a rev, otherwise the record is skipped.
1741 is converted to a rev, otherwise the record is skipped.
1747
1742
1748 Note that both key and value are treated as UTF-8 and converted to
1743 Note that both key and value are treated as UTF-8 and converted to
1749 the local encoding. This allows uniformity between local and
1744 the local encoding. This allows uniformity between local and
1750 remote data sources.
1745 remote data sources.
1751 """
1746 """
1752
1747
1753 spec = repo.ui.config(b"extdata", source)
1748 spec = repo.ui.config(b"extdata", source)
1754 if not spec:
1749 if not spec:
1755 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1750 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1756
1751
1757 data = {}
1752 data = {}
1758 src = proc = None
1753 src = proc = None
1759 try:
1754 try:
1760 if spec.startswith(b"shell:"):
1755 if spec.startswith(b"shell:"):
1761 # external commands should be run relative to the repo root
1756 # external commands should be run relative to the repo root
1762 cmd = spec[6:]
1757 cmd = spec[6:]
1763 proc = subprocess.Popen(
1758 proc = subprocess.Popen(
1764 procutil.tonativestr(cmd),
1759 procutil.tonativestr(cmd),
1765 shell=True,
1760 shell=True,
1766 bufsize=-1,
1761 bufsize=-1,
1767 close_fds=procutil.closefds,
1762 close_fds=procutil.closefds,
1768 stdout=subprocess.PIPE,
1763 stdout=subprocess.PIPE,
1769 cwd=procutil.tonativestr(repo.root),
1764 cwd=procutil.tonativestr(repo.root),
1770 )
1765 )
1771 src = proc.stdout
1766 src = proc.stdout
1772 else:
1767 else:
1773 # treat as a URL or file
1768 # treat as a URL or file
1774 src = url.open(repo.ui, spec)
1769 src = url.open(repo.ui, spec)
1775 for l in src:
1770 for l in src:
1776 if b" " in l:
1771 if b" " in l:
1777 k, v = l.strip().split(b" ", 1)
1772 k, v = l.strip().split(b" ", 1)
1778 else:
1773 else:
1779 k, v = l.strip(), b""
1774 k, v = l.strip(), b""
1780
1775
1781 k = encoding.tolocal(k)
1776 k = encoding.tolocal(k)
1782 try:
1777 try:
1783 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1778 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1784 except (error.LookupError, error.RepoLookupError, error.InputError):
1779 except (error.LookupError, error.RepoLookupError, error.InputError):
1785 pass # we ignore data for nodes that don't exist locally
1780 pass # we ignore data for nodes that don't exist locally
1786 finally:
1781 finally:
1787 if proc:
1782 if proc:
1788 try:
1783 try:
1789 proc.communicate()
1784 proc.communicate()
1790 except ValueError:
1785 except ValueError:
1791 # This happens if we started iterating src and then
1786 # This happens if we started iterating src and then
1792 # get a parse error on a line. It should be safe to ignore.
1787 # get a parse error on a line. It should be safe to ignore.
1793 pass
1788 pass
1794 if src:
1789 if src:
1795 src.close()
1790 src.close()
1796 if proc and proc.returncode != 0:
1791 if proc and proc.returncode != 0:
1797 raise error.Abort(
1792 raise error.Abort(
1798 _(b"extdata command '%s' failed: %s")
1793 _(b"extdata command '%s' failed: %s")
1799 % (cmd, procutil.explainexit(proc.returncode))
1794 % (cmd, procutil.explainexit(proc.returncode))
1800 )
1795 )
1801
1796
1802 return data
1797 return data
1803
1798
1804
1799
1805 class progress(object):
1800 class progress(object):
1806 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1801 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1807 self.ui = ui
1802 self.ui = ui
1808 self.pos = 0
1803 self.pos = 0
1809 self.topic = topic
1804 self.topic = topic
1810 self.unit = unit
1805 self.unit = unit
1811 self.total = total
1806 self.total = total
1812 self.debug = ui.configbool(b'progress', b'debug')
1807 self.debug = ui.configbool(b'progress', b'debug')
1813 self._updatebar = updatebar
1808 self._updatebar = updatebar
1814
1809
1815 def __enter__(self):
1810 def __enter__(self):
1816 return self
1811 return self
1817
1812
1818 def __exit__(self, exc_type, exc_value, exc_tb):
1813 def __exit__(self, exc_type, exc_value, exc_tb):
1819 self.complete()
1814 self.complete()
1820
1815
1821 def update(self, pos, item=b"", total=None):
1816 def update(self, pos, item=b"", total=None):
1822 assert pos is not None
1817 assert pos is not None
1823 if total:
1818 if total:
1824 self.total = total
1819 self.total = total
1825 self.pos = pos
1820 self.pos = pos
1826 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1821 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1827 if self.debug:
1822 if self.debug:
1828 self._printdebug(item)
1823 self._printdebug(item)
1829
1824
1830 def increment(self, step=1, item=b"", total=None):
1825 def increment(self, step=1, item=b"", total=None):
1831 self.update(self.pos + step, item, total)
1826 self.update(self.pos + step, item, total)
1832
1827
1833 def complete(self):
1828 def complete(self):
1834 self.pos = None
1829 self.pos = None
1835 self.unit = b""
1830 self.unit = b""
1836 self.total = None
1831 self.total = None
1837 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1832 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1838
1833
1839 def _printdebug(self, item):
1834 def _printdebug(self, item):
1840 unit = b''
1835 unit = b''
1841 if self.unit:
1836 if self.unit:
1842 unit = b' ' + self.unit
1837 unit = b' ' + self.unit
1843 if item:
1838 if item:
1844 item = b' ' + item
1839 item = b' ' + item
1845
1840
1846 if self.total:
1841 if self.total:
1847 pct = 100.0 * self.pos / self.total
1842 pct = 100.0 * self.pos / self.total
1848 self.ui.debug(
1843 self.ui.debug(
1849 b'%s:%s %d/%d%s (%4.2f%%)\n'
1844 b'%s:%s %d/%d%s (%4.2f%%)\n'
1850 % (self.topic, item, self.pos, self.total, unit, pct)
1845 % (self.topic, item, self.pos, self.total, unit, pct)
1851 )
1846 )
1852 else:
1847 else:
1853 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1848 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1854
1849
1855
1850
1856 def gdinitconfig(ui):
1851 def gdinitconfig(ui):
1857 """helper function to know if a repo should be created as general delta"""
1852 """helper function to know if a repo should be created as general delta"""
1858 # experimental config: format.generaldelta
1853 # experimental config: format.generaldelta
1859 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1854 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1860 b'format', b'usegeneraldelta'
1855 b'format', b'usegeneraldelta'
1861 )
1856 )
1862
1857
1863
1858
1864 def gddeltaconfig(ui):
1859 def gddeltaconfig(ui):
1865 """helper function to know if incoming delta should be optimised"""
1860 """helper function to know if incoming delta should be optimised"""
1866 # experimental config: format.generaldelta
1861 # experimental config: format.generaldelta
1867 return ui.configbool(b'format', b'generaldelta')
1862 return ui.configbool(b'format', b'generaldelta')
1868
1863
1869
1864
1870 class simplekeyvaluefile(object):
1865 class simplekeyvaluefile(object):
1871 """A simple file with key=value lines
1866 """A simple file with key=value lines
1872
1867
1873 Keys must be alphanumerics and start with a letter, values must not
1868 Keys must be alphanumerics and start with a letter, values must not
1874 contain '\n' characters"""
1869 contain '\n' characters"""
1875
1870
1876 firstlinekey = b'__firstline'
1871 firstlinekey = b'__firstline'
1877
1872
1878 def __init__(self, vfs, path, keys=None):
1873 def __init__(self, vfs, path, keys=None):
1879 self.vfs = vfs
1874 self.vfs = vfs
1880 self.path = path
1875 self.path = path
1881
1876
1882 def read(self, firstlinenonkeyval=False):
1877 def read(self, firstlinenonkeyval=False):
1883 """Read the contents of a simple key-value file
1878 """Read the contents of a simple key-value file
1884
1879
1885 'firstlinenonkeyval' indicates whether the first line of file should
1880 'firstlinenonkeyval' indicates whether the first line of file should
1886 be treated as a key-value pair or reuturned fully under the
1881 be treated as a key-value pair or reuturned fully under the
1887 __firstline key."""
1882 __firstline key."""
1888 lines = self.vfs.readlines(self.path)
1883 lines = self.vfs.readlines(self.path)
1889 d = {}
1884 d = {}
1890 if firstlinenonkeyval:
1885 if firstlinenonkeyval:
1891 if not lines:
1886 if not lines:
1892 e = _(b"empty simplekeyvalue file")
1887 e = _(b"empty simplekeyvalue file")
1893 raise error.CorruptedState(e)
1888 raise error.CorruptedState(e)
1894 # we don't want to include '\n' in the __firstline
1889 # we don't want to include '\n' in the __firstline
1895 d[self.firstlinekey] = lines[0][:-1]
1890 d[self.firstlinekey] = lines[0][:-1]
1896 del lines[0]
1891 del lines[0]
1897
1892
1898 try:
1893 try:
1899 # the 'if line.strip()' part prevents us from failing on empty
1894 # the 'if line.strip()' part prevents us from failing on empty
1900 # lines which only contain '\n' therefore are not skipped
1895 # lines which only contain '\n' therefore are not skipped
1901 # by 'if line'
1896 # by 'if line'
1902 updatedict = dict(
1897 updatedict = dict(
1903 line[:-1].split(b'=', 1) for line in lines if line.strip()
1898 line[:-1].split(b'=', 1) for line in lines if line.strip()
1904 )
1899 )
1905 if self.firstlinekey in updatedict:
1900 if self.firstlinekey in updatedict:
1906 e = _(b"%r can't be used as a key")
1901 e = _(b"%r can't be used as a key")
1907 raise error.CorruptedState(e % self.firstlinekey)
1902 raise error.CorruptedState(e % self.firstlinekey)
1908 d.update(updatedict)
1903 d.update(updatedict)
1909 except ValueError as e:
1904 except ValueError as e:
1910 raise error.CorruptedState(stringutil.forcebytestr(e))
1905 raise error.CorruptedState(stringutil.forcebytestr(e))
1911 return d
1906 return d
1912
1907
1913 def write(self, data, firstline=None):
1908 def write(self, data, firstline=None):
1914 """Write key=>value mapping to a file
1909 """Write key=>value mapping to a file
1915 data is a dict. Keys must be alphanumerical and start with a letter.
1910 data is a dict. Keys must be alphanumerical and start with a letter.
1916 Values must not contain newline characters.
1911 Values must not contain newline characters.
1917
1912
1918 If 'firstline' is not None, it is written to file before
1913 If 'firstline' is not None, it is written to file before
1919 everything else, as it is, not in a key=value form"""
1914 everything else, as it is, not in a key=value form"""
1920 lines = []
1915 lines = []
1921 if firstline is not None:
1916 if firstline is not None:
1922 lines.append(b'%s\n' % firstline)
1917 lines.append(b'%s\n' % firstline)
1923
1918
1924 for k, v in data.items():
1919 for k, v in data.items():
1925 if k == self.firstlinekey:
1920 if k == self.firstlinekey:
1926 e = b"key name '%s' is reserved" % self.firstlinekey
1921 e = b"key name '%s' is reserved" % self.firstlinekey
1927 raise error.ProgrammingError(e)
1922 raise error.ProgrammingError(e)
1928 if not k[0:1].isalpha():
1923 if not k[0:1].isalpha():
1929 e = b"keys must start with a letter in a key-value file"
1924 e = b"keys must start with a letter in a key-value file"
1930 raise error.ProgrammingError(e)
1925 raise error.ProgrammingError(e)
1931 if not k.isalnum():
1926 if not k.isalnum():
1932 e = b"invalid key name in a simple key-value file"
1927 e = b"invalid key name in a simple key-value file"
1933 raise error.ProgrammingError(e)
1928 raise error.ProgrammingError(e)
1934 if b'\n' in v:
1929 if b'\n' in v:
1935 e = b"invalid value in a simple key-value file"
1930 e = b"invalid value in a simple key-value file"
1936 raise error.ProgrammingError(e)
1931 raise error.ProgrammingError(e)
1937 lines.append(b"%s=%s\n" % (k, v))
1932 lines.append(b"%s=%s\n" % (k, v))
1938 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1933 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1939 fp.write(b''.join(lines))
1934 fp.write(b''.join(lines))
1940
1935
1941
1936
1942 _reportobsoletedsource = [
1937 _reportobsoletedsource = [
1943 b'debugobsolete',
1938 b'debugobsolete',
1944 b'pull',
1939 b'pull',
1945 b'push',
1940 b'push',
1946 b'serve',
1941 b'serve',
1947 b'unbundle',
1942 b'unbundle',
1948 ]
1943 ]
1949
1944
1950 _reportnewcssource = [
1945 _reportnewcssource = [
1951 b'pull',
1946 b'pull',
1952 b'unbundle',
1947 b'unbundle',
1953 ]
1948 ]
1954
1949
1955
1950
1956 def prefetchfiles(repo, revmatches):
1951 def prefetchfiles(repo, revmatches):
1957 """Invokes the registered file prefetch functions, allowing extensions to
1952 """Invokes the registered file prefetch functions, allowing extensions to
1958 ensure the corresponding files are available locally, before the command
1953 ensure the corresponding files are available locally, before the command
1959 uses them.
1954 uses them.
1960
1955
1961 Args:
1956 Args:
1962 revmatches: a list of (revision, match) tuples to indicate the files to
1957 revmatches: a list of (revision, match) tuples to indicate the files to
1963 fetch at each revision. If any of the match elements is None, it matches
1958 fetch at each revision. If any of the match elements is None, it matches
1964 all files.
1959 all files.
1965 """
1960 """
1966
1961
1967 def _matcher(m):
1962 def _matcher(m):
1968 if m:
1963 if m:
1969 assert isinstance(m, matchmod.basematcher)
1964 assert isinstance(m, matchmod.basematcher)
1970 # The command itself will complain about files that don't exist, so
1965 # The command itself will complain about files that don't exist, so
1971 # don't duplicate the message.
1966 # don't duplicate the message.
1972 return matchmod.badmatch(m, lambda fn, msg: None)
1967 return matchmod.badmatch(m, lambda fn, msg: None)
1973 else:
1968 else:
1974 return matchall(repo)
1969 return matchall(repo)
1975
1970
1976 revbadmatches = [(rev, _matcher(match)) for (rev, match) in revmatches]
1971 revbadmatches = [(rev, _matcher(match)) for (rev, match) in revmatches]
1977
1972
1978 fileprefetchhooks(repo, revbadmatches)
1973 fileprefetchhooks(repo, revbadmatches)
1979
1974
1980
1975
1981 # a list of (repo, revs, match) prefetch functions
1976 # a list of (repo, revs, match) prefetch functions
1982 fileprefetchhooks = util.hooks()
1977 fileprefetchhooks = util.hooks()
1983
1978
1984 # A marker that tells the evolve extension to suppress its own reporting
1979 # A marker that tells the evolve extension to suppress its own reporting
1985 _reportstroubledchangesets = True
1980 _reportstroubledchangesets = True
1986
1981
1987
1982
1988 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
1983 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
1989 """register a callback to issue a summary after the transaction is closed
1984 """register a callback to issue a summary after the transaction is closed
1990
1985
1991 If as_validator is true, then the callbacks are registered as transaction
1986 If as_validator is true, then the callbacks are registered as transaction
1992 validators instead
1987 validators instead
1993 """
1988 """
1994
1989
1995 def txmatch(sources):
1990 def txmatch(sources):
1996 return any(txnname.startswith(source) for source in sources)
1991 return any(txnname.startswith(source) for source in sources)
1997
1992
1998 categories = []
1993 categories = []
1999
1994
2000 def reportsummary(func):
1995 def reportsummary(func):
2001 """decorator for report callbacks."""
1996 """decorator for report callbacks."""
2002 # The repoview life cycle is shorter than the one of the actual
1997 # The repoview life cycle is shorter than the one of the actual
2003 # underlying repository. So the filtered object can die before the
1998 # underlying repository. So the filtered object can die before the
2004 # weakref is used leading to troubles. We keep a reference to the
1999 # weakref is used leading to troubles. We keep a reference to the
2005 # unfiltered object and restore the filtering when retrieving the
2000 # unfiltered object and restore the filtering when retrieving the
2006 # repository through the weakref.
2001 # repository through the weakref.
2007 filtername = repo.filtername
2002 filtername = repo.filtername
2008 reporef = weakref.ref(repo.unfiltered())
2003 reporef = weakref.ref(repo.unfiltered())
2009
2004
2010 def wrapped(tr):
2005 def wrapped(tr):
2011 repo = reporef()
2006 repo = reporef()
2012 if filtername:
2007 if filtername:
2013 assert repo is not None # help pytype
2008 assert repo is not None # help pytype
2014 repo = repo.filtered(filtername)
2009 repo = repo.filtered(filtername)
2015 func(repo, tr)
2010 func(repo, tr)
2016
2011
2017 newcat = b'%02i-txnreport' % len(categories)
2012 newcat = b'%02i-txnreport' % len(categories)
2018 if as_validator:
2013 if as_validator:
2019 otr.addvalidator(newcat, wrapped)
2014 otr.addvalidator(newcat, wrapped)
2020 else:
2015 else:
2021 otr.addpostclose(newcat, wrapped)
2016 otr.addpostclose(newcat, wrapped)
2022 categories.append(newcat)
2017 categories.append(newcat)
2023 return wrapped
2018 return wrapped
2024
2019
2025 @reportsummary
2020 @reportsummary
2026 def reportchangegroup(repo, tr):
2021 def reportchangegroup(repo, tr):
2027 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
2022 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
2028 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
2023 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
2029 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
2024 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
2030 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
2025 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
2031 if cgchangesets or cgrevisions or cgfiles:
2026 if cgchangesets or cgrevisions or cgfiles:
2032 htext = b""
2027 htext = b""
2033 if cgheads:
2028 if cgheads:
2034 htext = _(b" (%+d heads)") % cgheads
2029 htext = _(b" (%+d heads)") % cgheads
2035 msg = _(b"added %d changesets with %d changes to %d files%s\n")
2030 msg = _(b"added %d changesets with %d changes to %d files%s\n")
2036 if as_validator:
2031 if as_validator:
2037 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
2032 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
2038 assert repo is not None # help pytype
2033 assert repo is not None # help pytype
2039 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
2034 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
2040
2035
2041 if txmatch(_reportobsoletedsource):
2036 if txmatch(_reportobsoletedsource):
2042
2037
2043 @reportsummary
2038 @reportsummary
2044 def reportobsoleted(repo, tr):
2039 def reportobsoleted(repo, tr):
2045 obsoleted = obsutil.getobsoleted(repo, tr)
2040 obsoleted = obsutil.getobsoleted(repo, tr)
2046 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
2041 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
2047 if newmarkers:
2042 if newmarkers:
2048 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
2043 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
2049 if obsoleted:
2044 if obsoleted:
2050 msg = _(b'obsoleted %i changesets\n')
2045 msg = _(b'obsoleted %i changesets\n')
2051 if as_validator:
2046 if as_validator:
2052 msg = _(b'obsoleting %i changesets\n')
2047 msg = _(b'obsoleting %i changesets\n')
2053 repo.ui.status(msg % len(obsoleted))
2048 repo.ui.status(msg % len(obsoleted))
2054
2049
2055 if obsolete.isenabled(
2050 if obsolete.isenabled(
2056 repo, obsolete.createmarkersopt
2051 repo, obsolete.createmarkersopt
2057 ) and repo.ui.configbool(
2052 ) and repo.ui.configbool(
2058 b'experimental', b'evolution.report-instabilities'
2053 b'experimental', b'evolution.report-instabilities'
2059 ):
2054 ):
2060 instabilitytypes = [
2055 instabilitytypes = [
2061 (b'orphan', b'orphan'),
2056 (b'orphan', b'orphan'),
2062 (b'phase-divergent', b'phasedivergent'),
2057 (b'phase-divergent', b'phasedivergent'),
2063 (b'content-divergent', b'contentdivergent'),
2058 (b'content-divergent', b'contentdivergent'),
2064 ]
2059 ]
2065
2060
2066 def getinstabilitycounts(repo):
2061 def getinstabilitycounts(repo):
2067 filtered = repo.changelog.filteredrevs
2062 filtered = repo.changelog.filteredrevs
2068 counts = {}
2063 counts = {}
2069 for instability, revset in instabilitytypes:
2064 for instability, revset in instabilitytypes:
2070 counts[instability] = len(
2065 counts[instability] = len(
2071 set(obsolete.getrevs(repo, revset)) - filtered
2066 set(obsolete.getrevs(repo, revset)) - filtered
2072 )
2067 )
2073 return counts
2068 return counts
2074
2069
2075 oldinstabilitycounts = getinstabilitycounts(repo)
2070 oldinstabilitycounts = getinstabilitycounts(repo)
2076
2071
2077 @reportsummary
2072 @reportsummary
2078 def reportnewinstabilities(repo, tr):
2073 def reportnewinstabilities(repo, tr):
2079 newinstabilitycounts = getinstabilitycounts(repo)
2074 newinstabilitycounts = getinstabilitycounts(repo)
2080 for instability, revset in instabilitytypes:
2075 for instability, revset in instabilitytypes:
2081 delta = (
2076 delta = (
2082 newinstabilitycounts[instability]
2077 newinstabilitycounts[instability]
2083 - oldinstabilitycounts[instability]
2078 - oldinstabilitycounts[instability]
2084 )
2079 )
2085 msg = getinstabilitymessage(delta, instability)
2080 msg = getinstabilitymessage(delta, instability)
2086 if msg:
2081 if msg:
2087 repo.ui.warn(msg)
2082 repo.ui.warn(msg)
2088
2083
2089 if txmatch(_reportnewcssource):
2084 if txmatch(_reportnewcssource):
2090
2085
2091 @reportsummary
2086 @reportsummary
2092 def reportnewcs(repo, tr):
2087 def reportnewcs(repo, tr):
2093 """Report the range of new revisions pulled/unbundled."""
2088 """Report the range of new revisions pulled/unbundled."""
2094 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2089 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2095 unfi = repo.unfiltered()
2090 unfi = repo.unfiltered()
2096 if origrepolen >= len(unfi):
2091 if origrepolen >= len(unfi):
2097 return
2092 return
2098
2093
2099 # Compute the bounds of new visible revisions' range.
2094 # Compute the bounds of new visible revisions' range.
2100 revs = smartset.spanset(repo, start=origrepolen)
2095 revs = smartset.spanset(repo, start=origrepolen)
2101 if revs:
2096 if revs:
2102 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2097 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2103
2098
2104 if minrev == maxrev:
2099 if minrev == maxrev:
2105 revrange = minrev
2100 revrange = minrev
2106 else:
2101 else:
2107 revrange = b'%s:%s' % (minrev, maxrev)
2102 revrange = b'%s:%s' % (minrev, maxrev)
2108 draft = len(repo.revs(b'%ld and draft()', revs))
2103 draft = len(repo.revs(b'%ld and draft()', revs))
2109 secret = len(repo.revs(b'%ld and secret()', revs))
2104 secret = len(repo.revs(b'%ld and secret()', revs))
2110 if not (draft or secret):
2105 if not (draft or secret):
2111 msg = _(b'new changesets %s\n') % revrange
2106 msg = _(b'new changesets %s\n') % revrange
2112 elif draft and secret:
2107 elif draft and secret:
2113 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2108 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2114 msg %= (revrange, draft, secret)
2109 msg %= (revrange, draft, secret)
2115 elif draft:
2110 elif draft:
2116 msg = _(b'new changesets %s (%d drafts)\n')
2111 msg = _(b'new changesets %s (%d drafts)\n')
2117 msg %= (revrange, draft)
2112 msg %= (revrange, draft)
2118 elif secret:
2113 elif secret:
2119 msg = _(b'new changesets %s (%d secrets)\n')
2114 msg = _(b'new changesets %s (%d secrets)\n')
2120 msg %= (revrange, secret)
2115 msg %= (revrange, secret)
2121 else:
2116 else:
2122 errormsg = b'entered unreachable condition'
2117 errormsg = b'entered unreachable condition'
2123 raise error.ProgrammingError(errormsg)
2118 raise error.ProgrammingError(errormsg)
2124 repo.ui.status(msg)
2119 repo.ui.status(msg)
2125
2120
2126 # search new changesets directly pulled as obsolete
2121 # search new changesets directly pulled as obsolete
2127 duplicates = tr.changes.get(b'revduplicates', ())
2122 duplicates = tr.changes.get(b'revduplicates', ())
2128 obsadded = unfi.revs(
2123 obsadded = unfi.revs(
2129 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2124 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2130 )
2125 )
2131 cl = repo.changelog
2126 cl = repo.changelog
2132 extinctadded = [r for r in obsadded if r not in cl]
2127 extinctadded = [r for r in obsadded if r not in cl]
2133 if extinctadded:
2128 if extinctadded:
2134 # They are not just obsolete, but obsolete and invisible
2129 # They are not just obsolete, but obsolete and invisible
2135 # we call them "extinct" internally but the terms have not been
2130 # we call them "extinct" internally but the terms have not been
2136 # exposed to users.
2131 # exposed to users.
2137 msg = b'(%d other changesets obsolete on arrival)\n'
2132 msg = b'(%d other changesets obsolete on arrival)\n'
2138 repo.ui.status(msg % len(extinctadded))
2133 repo.ui.status(msg % len(extinctadded))
2139
2134
2140 @reportsummary
2135 @reportsummary
2141 def reportphasechanges(repo, tr):
2136 def reportphasechanges(repo, tr):
2142 """Report statistics of phase changes for changesets pre-existing
2137 """Report statistics of phase changes for changesets pre-existing
2143 pull/unbundle.
2138 pull/unbundle.
2144 """
2139 """
2145 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2140 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2146 published = []
2141 published = []
2147 for revs, (old, new) in tr.changes.get(b'phases', []):
2142 for revs, (old, new) in tr.changes.get(b'phases', []):
2148 if new != phases.public:
2143 if new != phases.public:
2149 continue
2144 continue
2150 published.extend(rev for rev in revs if rev < origrepolen)
2145 published.extend(rev for rev in revs if rev < origrepolen)
2151 if not published:
2146 if not published:
2152 return
2147 return
2153 msg = _(b'%d local changesets published\n')
2148 msg = _(b'%d local changesets published\n')
2154 if as_validator:
2149 if as_validator:
2155 msg = _(b'%d local changesets will be published\n')
2150 msg = _(b'%d local changesets will be published\n')
2156 repo.ui.status(msg % len(published))
2151 repo.ui.status(msg % len(published))
2157
2152
2158
2153
2159 def getinstabilitymessage(delta, instability):
2154 def getinstabilitymessage(delta, instability):
2160 """function to return the message to show warning about new instabilities
2155 """function to return the message to show warning about new instabilities
2161
2156
2162 exists as a separate function so that extension can wrap to show more
2157 exists as a separate function so that extension can wrap to show more
2163 information like how to fix instabilities"""
2158 information like how to fix instabilities"""
2164 if delta > 0:
2159 if delta > 0:
2165 return _(b'%i new %s changesets\n') % (delta, instability)
2160 return _(b'%i new %s changesets\n') % (delta, instability)
2166
2161
2167
2162
2168 def nodesummaries(repo, nodes, maxnumnodes=4):
2163 def nodesummaries(repo, nodes, maxnumnodes=4):
2169 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2164 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2170 return b' '.join(short(h) for h in nodes)
2165 return b' '.join(short(h) for h in nodes)
2171 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2166 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2172 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2167 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2173
2168
2174
2169
2175 def enforcesinglehead(repo, tr, desc, accountclosed, filtername):
2170 def enforcesinglehead(repo, tr, desc, accountclosed, filtername):
2176 """check that no named branch has multiple heads"""
2171 """check that no named branch has multiple heads"""
2177 if desc in (b'strip', b'repair'):
2172 if desc in (b'strip', b'repair'):
2178 # skip the logic during strip
2173 # skip the logic during strip
2179 return
2174 return
2180 visible = repo.filtered(filtername)
2175 visible = repo.filtered(filtername)
2181 # possible improvement: we could restrict the check to affected branch
2176 # possible improvement: we could restrict the check to affected branch
2182 bm = visible.branchmap()
2177 bm = visible.branchmap()
2183 for name in bm:
2178 for name in bm:
2184 heads = bm.branchheads(name, closed=accountclosed)
2179 heads = bm.branchheads(name, closed=accountclosed)
2185 if len(heads) > 1:
2180 if len(heads) > 1:
2186 msg = _(b'rejecting multiple heads on branch "%s"')
2181 msg = _(b'rejecting multiple heads on branch "%s"')
2187 msg %= name
2182 msg %= name
2188 hint = _(b'%d heads: %s')
2183 hint = _(b'%d heads: %s')
2189 hint %= (len(heads), nodesummaries(repo, heads))
2184 hint %= (len(heads), nodesummaries(repo, heads))
2190 raise error.Abort(msg, hint=hint)
2185 raise error.Abort(msg, hint=hint)
2191
2186
2192
2187
2193 def wrapconvertsink(sink):
2188 def wrapconvertsink(sink):
2194 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2189 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2195 before it is used, whether or not the convert extension was formally loaded.
2190 before it is used, whether or not the convert extension was formally loaded.
2196 """
2191 """
2197 return sink
2192 return sink
2198
2193
2199
2194
2200 def unhidehashlikerevs(repo, specs, hiddentype):
2195 def unhidehashlikerevs(repo, specs, hiddentype):
2201 """parse the user specs and unhide changesets whose hash or revision number
2196 """parse the user specs and unhide changesets whose hash or revision number
2202 is passed.
2197 is passed.
2203
2198
2204 hiddentype can be: 1) 'warn': warn while unhiding changesets
2199 hiddentype can be: 1) 'warn': warn while unhiding changesets
2205 2) 'nowarn': don't warn while unhiding changesets
2200 2) 'nowarn': don't warn while unhiding changesets
2206
2201
2207 returns a repo object with the required changesets unhidden
2202 returns a repo object with the required changesets unhidden
2208 """
2203 """
2209 if not repo.filtername or not repo.ui.configbool(
2204 if not repo.filtername or not repo.ui.configbool(
2210 b'experimental', b'directaccess'
2205 b'experimental', b'directaccess'
2211 ):
2206 ):
2212 return repo
2207 return repo
2213
2208
2214 if repo.filtername not in (b'visible', b'visible-hidden'):
2209 if repo.filtername not in (b'visible', b'visible-hidden'):
2215 return repo
2210 return repo
2216
2211
2217 symbols = set()
2212 symbols = set()
2218 for spec in specs:
2213 for spec in specs:
2219 try:
2214 try:
2220 tree = revsetlang.parse(spec)
2215 tree = revsetlang.parse(spec)
2221 except error.ParseError: # will be reported by scmutil.revrange()
2216 except error.ParseError: # will be reported by scmutil.revrange()
2222 continue
2217 continue
2223
2218
2224 symbols.update(revsetlang.gethashlikesymbols(tree))
2219 symbols.update(revsetlang.gethashlikesymbols(tree))
2225
2220
2226 if not symbols:
2221 if not symbols:
2227 return repo
2222 return repo
2228
2223
2229 revs = _getrevsfromsymbols(repo, symbols)
2224 revs = _getrevsfromsymbols(repo, symbols)
2230
2225
2231 if not revs:
2226 if not revs:
2232 return repo
2227 return repo
2233
2228
2234 if hiddentype == b'warn':
2229 if hiddentype == b'warn':
2235 unfi = repo.unfiltered()
2230 unfi = repo.unfiltered()
2236 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2231 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2237 repo.ui.warn(
2232 repo.ui.warn(
2238 _(
2233 _(
2239 b"warning: accessing hidden changesets for write "
2234 b"warning: accessing hidden changesets for write "
2240 b"operation: %s\n"
2235 b"operation: %s\n"
2241 )
2236 )
2242 % revstr
2237 % revstr
2243 )
2238 )
2244
2239
2245 # we have to use new filtername to separate branch/tags cache until we can
2240 # we have to use new filtername to separate branch/tags cache until we can
2246 # disbale these cache when revisions are dynamically pinned.
2241 # disbale these cache when revisions are dynamically pinned.
2247 return repo.filtered(b'visible-hidden', revs)
2242 return repo.filtered(b'visible-hidden', revs)
2248
2243
2249
2244
2250 def _getrevsfromsymbols(repo, symbols):
2245 def _getrevsfromsymbols(repo, symbols):
2251 """parse the list of symbols and returns a set of revision numbers of hidden
2246 """parse the list of symbols and returns a set of revision numbers of hidden
2252 changesets present in symbols"""
2247 changesets present in symbols"""
2253 revs = set()
2248 revs = set()
2254 unfi = repo.unfiltered()
2249 unfi = repo.unfiltered()
2255 unficl = unfi.changelog
2250 unficl = unfi.changelog
2256 cl = repo.changelog
2251 cl = repo.changelog
2257 tiprev = len(unficl)
2252 tiprev = len(unficl)
2258 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2253 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2259 for s in symbols:
2254 for s in symbols:
2260 try:
2255 try:
2261 n = int(s)
2256 n = int(s)
2262 if n <= tiprev:
2257 if n <= tiprev:
2263 if not allowrevnums:
2258 if not allowrevnums:
2264 continue
2259 continue
2265 else:
2260 else:
2266 if n not in cl:
2261 if n not in cl:
2267 revs.add(n)
2262 revs.add(n)
2268 continue
2263 continue
2269 except ValueError:
2264 except ValueError:
2270 pass
2265 pass
2271
2266
2272 try:
2267 try:
2273 s = resolvehexnodeidprefix(unfi, s)
2268 s = resolvehexnodeidprefix(unfi, s)
2274 except (error.LookupError, error.WdirUnsupported):
2269 except (error.LookupError, error.WdirUnsupported):
2275 s = None
2270 s = None
2276
2271
2277 if s is not None:
2272 if s is not None:
2278 rev = unficl.rev(s)
2273 rev = unficl.rev(s)
2279 if rev not in cl:
2274 if rev not in cl:
2280 revs.add(rev)
2275 revs.add(rev)
2281
2276
2282 return revs
2277 return revs
2283
2278
2284
2279
2285 def bookmarkrevs(repo, mark):
2280 def bookmarkrevs(repo, mark):
2286 """Select revisions reachable by a given bookmark
2281 """Select revisions reachable by a given bookmark
2287
2282
2288 If the bookmarked revision isn't a head, an empty set will be returned.
2283 If the bookmarked revision isn't a head, an empty set will be returned.
2289 """
2284 """
2290 return repo.revs(format_bookmark_revspec(mark))
2285 return repo.revs(format_bookmark_revspec(mark))
2291
2286
2292
2287
2293 def format_bookmark_revspec(mark):
2288 def format_bookmark_revspec(mark):
2294 """Build a revset expression to select revisions reachable by a given
2289 """Build a revset expression to select revisions reachable by a given
2295 bookmark"""
2290 bookmark"""
2296 mark = b'literal:' + mark
2291 mark = b'literal:' + mark
2297 return revsetlang.formatspec(
2292 return revsetlang.formatspec(
2298 b"ancestors(bookmark(%s)) - "
2293 b"ancestors(bookmark(%s)) - "
2299 b"ancestors(head() and not bookmark(%s)) - "
2294 b"ancestors(head() and not bookmark(%s)) - "
2300 b"ancestors(bookmark() and not bookmark(%s))",
2295 b"ancestors(bookmark() and not bookmark(%s))",
2301 mark,
2296 mark,
2302 mark,
2297 mark,
2303 mark,
2298 mark,
2304 )
2299 )
General Comments 0
You need to be logged in to leave comments. Login now