##// END OF EJS Templates
errors: create "similarity hint" for UnknownIdentifier eagerly in constructor...
Martin von Zweigbergk -
r46495:1817b668 default
parent child Browse files
Show More
@@ -1,540 +1,543 b''
1 # error.py - Mercurial exceptions
1 # error.py - Mercurial exceptions
2 #
2 #
3 # Copyright 2005-2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """Mercurial exceptions.
8 """Mercurial exceptions.
9
9
10 This allows us to catch exceptions at higher levels without forcing
10 This allows us to catch exceptions at higher levels without forcing
11 imports.
11 imports.
12 """
12 """
13
13
14 from __future__ import absolute_import
14 from __future__ import absolute_import
15
15
16 import difflib
16 import difflib
17
17
18 # Do not import anything but pycompat here, please
18 # Do not import anything but pycompat here, please
19 from . import pycompat
19 from . import pycompat
20
20
21
21
22 def _tobytes(exc):
22 def _tobytes(exc):
23 """Byte-stringify exception in the same way as BaseException_str()"""
23 """Byte-stringify exception in the same way as BaseException_str()"""
24 if not exc.args:
24 if not exc.args:
25 return b''
25 return b''
26 if len(exc.args) == 1:
26 if len(exc.args) == 1:
27 return pycompat.bytestr(exc.args[0])
27 return pycompat.bytestr(exc.args[0])
28 return b'(%s)' % b', '.join(b"'%s'" % pycompat.bytestr(a) for a in exc.args)
28 return b'(%s)' % b', '.join(b"'%s'" % pycompat.bytestr(a) for a in exc.args)
29
29
30
30
31 class Hint(object):
31 class Hint(object):
32 """Mix-in to provide a hint of an error
32 """Mix-in to provide a hint of an error
33
33
34 This should come first in the inheritance list to consume a hint and
34 This should come first in the inheritance list to consume a hint and
35 pass remaining arguments to the exception class.
35 pass remaining arguments to the exception class.
36 """
36 """
37
37
38 def __init__(self, *args, **kw):
38 def __init__(self, *args, **kw):
39 self.hint = kw.pop('hint', None)
39 self.hint = kw.pop('hint', None)
40 super(Hint, self).__init__(*args, **kw)
40 super(Hint, self).__init__(*args, **kw)
41
41
42
42
43 class StorageError(Hint, Exception):
43 class StorageError(Hint, Exception):
44 """Raised when an error occurs in a storage layer.
44 """Raised when an error occurs in a storage layer.
45
45
46 Usually subclassed by a storage-specific exception.
46 Usually subclassed by a storage-specific exception.
47 """
47 """
48
48
49 __bytes__ = _tobytes
49 __bytes__ = _tobytes
50
50
51
51
52 class RevlogError(StorageError):
52 class RevlogError(StorageError):
53 pass
53 pass
54
54
55
55
56 class SidedataHashError(RevlogError):
56 class SidedataHashError(RevlogError):
57 def __init__(self, key, expected, got):
57 def __init__(self, key, expected, got):
58 self.sidedatakey = key
58 self.sidedatakey = key
59 self.expecteddigest = expected
59 self.expecteddigest = expected
60 self.actualdigest = got
60 self.actualdigest = got
61
61
62
62
63 class FilteredIndexError(IndexError):
63 class FilteredIndexError(IndexError):
64 __bytes__ = _tobytes
64 __bytes__ = _tobytes
65
65
66
66
67 class LookupError(RevlogError, KeyError):
67 class LookupError(RevlogError, KeyError):
68 def __init__(self, name, index, message):
68 def __init__(self, name, index, message):
69 self.name = name
69 self.name = name
70 self.index = index
70 self.index = index
71 # this can't be called 'message' because at least some installs of
71 # this can't be called 'message' because at least some installs of
72 # Python 2.6+ complain about the 'message' property being deprecated
72 # Python 2.6+ complain about the 'message' property being deprecated
73 self.lookupmessage = message
73 self.lookupmessage = message
74 if isinstance(name, bytes) and len(name) == 20:
74 if isinstance(name, bytes) and len(name) == 20:
75 from .node import short
75 from .node import short
76
76
77 name = short(name)
77 name = short(name)
78 # if name is a binary node, it can be None
78 # if name is a binary node, it can be None
79 RevlogError.__init__(
79 RevlogError.__init__(
80 self, b'%s@%s: %s' % (index, pycompat.bytestr(name), message)
80 self, b'%s@%s: %s' % (index, pycompat.bytestr(name), message)
81 )
81 )
82
82
83 def __bytes__(self):
83 def __bytes__(self):
84 return RevlogError.__bytes__(self)
84 return RevlogError.__bytes__(self)
85
85
86 def __str__(self):
86 def __str__(self):
87 return RevlogError.__str__(self)
87 return RevlogError.__str__(self)
88
88
89
89
90 class AmbiguousPrefixLookupError(LookupError):
90 class AmbiguousPrefixLookupError(LookupError):
91 pass
91 pass
92
92
93
93
94 class FilteredLookupError(LookupError):
94 class FilteredLookupError(LookupError):
95 pass
95 pass
96
96
97
97
98 class ManifestLookupError(LookupError):
98 class ManifestLookupError(LookupError):
99 pass
99 pass
100
100
101
101
102 class CommandError(Exception):
102 class CommandError(Exception):
103 """Exception raised on errors in parsing the command line."""
103 """Exception raised on errors in parsing the command line."""
104
104
105 def __init__(self, command, message):
105 def __init__(self, command, message):
106 self.command = command
106 self.command = command
107 self.message = message
107 self.message = message
108 super(CommandError, self).__init__()
108 super(CommandError, self).__init__()
109
109
110 __bytes__ = _tobytes
110 __bytes__ = _tobytes
111
111
112
112
113 class UnknownCommand(Exception):
113 class UnknownCommand(Exception):
114 """Exception raised if command is not in the command table."""
114 """Exception raised if command is not in the command table."""
115
115
116 def __init__(self, command, all_commands=None):
116 def __init__(self, command, all_commands=None):
117 self.command = command
117 self.command = command
118 self.all_commands = all_commands
118 self.all_commands = all_commands
119 super(UnknownCommand, self).__init__()
119 super(UnknownCommand, self).__init__()
120
120
121 __bytes__ = _tobytes
121 __bytes__ = _tobytes
122
122
123
123
124 class AmbiguousCommand(Exception):
124 class AmbiguousCommand(Exception):
125 """Exception raised if command shortcut matches more than one command."""
125 """Exception raised if command shortcut matches more than one command."""
126
126
127 def __init__(self, prefix, matches):
127 def __init__(self, prefix, matches):
128 self.prefix = prefix
128 self.prefix = prefix
129 self.matches = matches
129 self.matches = matches
130 super(AmbiguousCommand, self).__init__()
130 super(AmbiguousCommand, self).__init__()
131
131
132 __bytes__ = _tobytes
132 __bytes__ = _tobytes
133
133
134
134
135 class WorkerError(Exception):
135 class WorkerError(Exception):
136 """Exception raised when a worker process dies."""
136 """Exception raised when a worker process dies."""
137
137
138 def __init__(self, status_code):
138 def __init__(self, status_code):
139 self.status_code = status_code
139 self.status_code = status_code
140
140
141
141
142 class InterventionRequired(Hint, Exception):
142 class InterventionRequired(Hint, Exception):
143 """Exception raised when a command requires human intervention."""
143 """Exception raised when a command requires human intervention."""
144
144
145 __bytes__ = _tobytes
145 __bytes__ = _tobytes
146
146
147
147
148 class ConflictResolutionRequired(InterventionRequired):
148 class ConflictResolutionRequired(InterventionRequired):
149 """Exception raised when a continuable command required merge conflict resolution."""
149 """Exception raised when a continuable command required merge conflict resolution."""
150
150
151 def __init__(self, opname):
151 def __init__(self, opname):
152 from .i18n import _
152 from .i18n import _
153
153
154 self.opname = opname
154 self.opname = opname
155 InterventionRequired.__init__(
155 InterventionRequired.__init__(
156 self,
156 self,
157 _(
157 _(
158 b"unresolved conflicts (see 'hg resolve', then 'hg %s --continue')"
158 b"unresolved conflicts (see 'hg resolve', then 'hg %s --continue')"
159 )
159 )
160 % opname,
160 % opname,
161 )
161 )
162
162
163
163
164 class Abort(Hint, Exception):
164 class Abort(Hint, Exception):
165 """Raised if a command needs to print an error and exit."""
165 """Raised if a command needs to print an error and exit."""
166
166
167 def __init__(self, message, hint=None):
167 def __init__(self, message, hint=None):
168 self.message = message
168 self.message = message
169 self.hint = hint
169 self.hint = hint
170 # Pass the message into the Exception constructor to help extensions
170 # Pass the message into the Exception constructor to help extensions
171 # that look for exc.args[0].
171 # that look for exc.args[0].
172 Exception.__init__(self, message)
172 Exception.__init__(self, message)
173
173
174 def __bytes__(self):
174 def __bytes__(self):
175 return self.message
175 return self.message
176
176
177 if pycompat.ispy3:
177 if pycompat.ispy3:
178
178
179 def __str__(self):
179 def __str__(self):
180 # the output would be unreadable if the message was translated,
180 # the output would be unreadable if the message was translated,
181 # but do not replace it with encoding.strfromlocal(), which
181 # but do not replace it with encoding.strfromlocal(), which
182 # may raise another exception.
182 # may raise another exception.
183 return pycompat.sysstr(self.__bytes__())
183 return pycompat.sysstr(self.__bytes__())
184
184
185
185
186 class InputError(Abort):
186 class InputError(Abort):
187 """Indicates that the user made an error in their input.
187 """Indicates that the user made an error in their input.
188
188
189 Examples: Invalid command, invalid flags, invalid revision.
189 Examples: Invalid command, invalid flags, invalid revision.
190 """
190 """
191
191
192
192
193 class StateError(Abort):
193 class StateError(Abort):
194 """Indicates that the operation might work if retried in a different state.
194 """Indicates that the operation might work if retried in a different state.
195
195
196 Examples: Unresolved merge conflicts, unfinished operations.
196 Examples: Unresolved merge conflicts, unfinished operations.
197 """
197 """
198
198
199
199
200 class CanceledError(Abort):
200 class CanceledError(Abort):
201 """Indicates that the user canceled the operation.
201 """Indicates that the user canceled the operation.
202
202
203 Examples: Close commit editor with error status, quit chistedit.
203 Examples: Close commit editor with error status, quit chistedit.
204 """
204 """
205
205
206
206
207 class HookLoadError(Abort):
207 class HookLoadError(Abort):
208 """raised when loading a hook fails, aborting an operation
208 """raised when loading a hook fails, aborting an operation
209
209
210 Exists to allow more specialized catching."""
210 Exists to allow more specialized catching."""
211
211
212
212
213 class HookAbort(Abort):
213 class HookAbort(Abort):
214 """raised when a validation hook fails, aborting an operation
214 """raised when a validation hook fails, aborting an operation
215
215
216 Exists to allow more specialized catching."""
216 Exists to allow more specialized catching."""
217
217
218
218
219 class ConfigError(Abort):
219 class ConfigError(Abort):
220 """Exception raised when parsing config files"""
220 """Exception raised when parsing config files"""
221
221
222
222
223 class UpdateAbort(Abort):
223 class UpdateAbort(Abort):
224 """Raised when an update is aborted for destination issue"""
224 """Raised when an update is aborted for destination issue"""
225
225
226
226
227 class MergeDestAbort(Abort):
227 class MergeDestAbort(Abort):
228 """Raised when an update is aborted for destination issues"""
228 """Raised when an update is aborted for destination issues"""
229
229
230
230
231 class NoMergeDestAbort(MergeDestAbort):
231 class NoMergeDestAbort(MergeDestAbort):
232 """Raised when an update is aborted because there is nothing to merge"""
232 """Raised when an update is aborted because there is nothing to merge"""
233
233
234
234
235 class ManyMergeDestAbort(MergeDestAbort):
235 class ManyMergeDestAbort(MergeDestAbort):
236 """Raised when an update is aborted because destination is ambiguous"""
236 """Raised when an update is aborted because destination is ambiguous"""
237
237
238
238
239 class ResponseExpected(Abort):
239 class ResponseExpected(Abort):
240 """Raised when an EOF is received for a prompt"""
240 """Raised when an EOF is received for a prompt"""
241
241
242 def __init__(self):
242 def __init__(self):
243 from .i18n import _
243 from .i18n import _
244
244
245 Abort.__init__(self, _(b'response expected'))
245 Abort.__init__(self, _(b'response expected'))
246
246
247
247
248 class OutOfBandError(Hint, Exception):
248 class OutOfBandError(Hint, Exception):
249 """Exception raised when a remote repo reports failure"""
249 """Exception raised when a remote repo reports failure"""
250
250
251 __bytes__ = _tobytes
251 __bytes__ = _tobytes
252
252
253
253
254 class ParseError(Hint, Exception):
254 class ParseError(Hint, Exception):
255 """Raised when parsing config files and {rev,file}sets (msg[, pos])"""
255 """Raised when parsing config files and {rev,file}sets (msg[, pos])"""
256
256
257 def __init__(self, message, location=None, hint=None):
257 def __init__(self, message, location=None, hint=None):
258 self.message = message
258 self.message = message
259 self.location = location
259 self.location = location
260 self.hint = hint
260 self.hint = hint
261 # Pass the message and possibly location into the Exception constructor
261 # Pass the message and possibly location into the Exception constructor
262 # to help code that looks for exc.args.
262 # to help code that looks for exc.args.
263 if location is not None:
263 if location is not None:
264 Exception.__init__(self, message, location)
264 Exception.__init__(self, message, location)
265 else:
265 else:
266 Exception.__init__(self, message)
266 Exception.__init__(self, message)
267
267
268 __bytes__ = _tobytes
268 __bytes__ = _tobytes
269
269
270
270
271 class PatchError(Exception):
271 class PatchError(Exception):
272 __bytes__ = _tobytes
272 __bytes__ = _tobytes
273
273
274
274
275 def getsimilar(symbols, value):
275 def getsimilar(symbols, value):
276 sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
276 sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
277 # The cutoff for similarity here is pretty arbitrary. It should
277 # The cutoff for similarity here is pretty arbitrary. It should
278 # probably be investigated and tweaked.
278 # probably be investigated and tweaked.
279 return [s for s in symbols if sim(s) > 0.6]
279 return [s for s in symbols if sim(s) > 0.6]
280
280
281
281
282 def similarity_hint(similar):
282 def similarity_hint(similar):
283 from .i18n import _
283 from .i18n import _
284
284
285 if len(similar) == 1:
285 if len(similar) == 1:
286 return _(b"did you mean %s?") % similar[0]
286 return _(b"did you mean %s?") % similar[0]
287 elif similar:
287 elif similar:
288 ss = b", ".join(sorted(similar))
288 ss = b", ".join(sorted(similar))
289 return _(b"did you mean one of %s?") % ss
289 return _(b"did you mean one of %s?") % ss
290 else:
290 else:
291 return None
291 return None
292
292
293
293
294 class UnknownIdentifier(ParseError):
294 class UnknownIdentifier(ParseError):
295 """Exception raised when a {rev,file}set references an unknown identifier"""
295 """Exception raised when a {rev,file}set references an unknown identifier"""
296
296
297 def __init__(self, function, symbols):
297 def __init__(self, function, symbols):
298 from .i18n import _
298 from .i18n import _
299
299
300 ParseError.__init__(self, _(b"unknown identifier: %s") % function)
300 similar = getsimilar(symbols, function)
301 self.function = function
301 hint = similarity_hint(similar)
302 self.symbols = symbols
302
303 ParseError.__init__(
304 self, _(b"unknown identifier: %s") % function, hint=hint
305 )
303
306
304
307
305 class RepoError(Hint, Exception):
308 class RepoError(Hint, Exception):
306 __bytes__ = _tobytes
309 __bytes__ = _tobytes
307
310
308
311
309 class RepoLookupError(RepoError):
312 class RepoLookupError(RepoError):
310 pass
313 pass
311
314
312
315
313 class FilteredRepoLookupError(RepoLookupError):
316 class FilteredRepoLookupError(RepoLookupError):
314 pass
317 pass
315
318
316
319
317 class CapabilityError(RepoError):
320 class CapabilityError(RepoError):
318 pass
321 pass
319
322
320
323
321 class RequirementError(RepoError):
324 class RequirementError(RepoError):
322 """Exception raised if .hg/requires has an unknown entry."""
325 """Exception raised if .hg/requires has an unknown entry."""
323
326
324
327
325 class StdioError(IOError):
328 class StdioError(IOError):
326 """Raised if I/O to stdout or stderr fails"""
329 """Raised if I/O to stdout or stderr fails"""
327
330
328 def __init__(self, err):
331 def __init__(self, err):
329 IOError.__init__(self, err.errno, err.strerror)
332 IOError.__init__(self, err.errno, err.strerror)
330
333
331 # no __bytes__() because error message is derived from the standard IOError
334 # no __bytes__() because error message is derived from the standard IOError
332
335
333
336
334 class UnsupportedMergeRecords(Abort):
337 class UnsupportedMergeRecords(Abort):
335 def __init__(self, recordtypes):
338 def __init__(self, recordtypes):
336 from .i18n import _
339 from .i18n import _
337
340
338 self.recordtypes = sorted(recordtypes)
341 self.recordtypes = sorted(recordtypes)
339 s = b' '.join(self.recordtypes)
342 s = b' '.join(self.recordtypes)
340 Abort.__init__(
343 Abort.__init__(
341 self,
344 self,
342 _(b'unsupported merge state records: %s') % s,
345 _(b'unsupported merge state records: %s') % s,
343 hint=_(
346 hint=_(
344 b'see https://mercurial-scm.org/wiki/MergeStateRecords for '
347 b'see https://mercurial-scm.org/wiki/MergeStateRecords for '
345 b'more information'
348 b'more information'
346 ),
349 ),
347 )
350 )
348
351
349
352
350 class UnknownVersion(Abort):
353 class UnknownVersion(Abort):
351 """generic exception for aborting from an encounter with an unknown version
354 """generic exception for aborting from an encounter with an unknown version
352 """
355 """
353
356
354 def __init__(self, msg, hint=None, version=None):
357 def __init__(self, msg, hint=None, version=None):
355 self.version = version
358 self.version = version
356 super(UnknownVersion, self).__init__(msg, hint=hint)
359 super(UnknownVersion, self).__init__(msg, hint=hint)
357
360
358
361
359 class LockError(IOError):
362 class LockError(IOError):
360 def __init__(self, errno, strerror, filename, desc):
363 def __init__(self, errno, strerror, filename, desc):
361 IOError.__init__(self, errno, strerror, filename)
364 IOError.__init__(self, errno, strerror, filename)
362 self.desc = desc
365 self.desc = desc
363
366
364 # no __bytes__() because error message is derived from the standard IOError
367 # no __bytes__() because error message is derived from the standard IOError
365
368
366
369
367 class LockHeld(LockError):
370 class LockHeld(LockError):
368 def __init__(self, errno, filename, desc, locker):
371 def __init__(self, errno, filename, desc, locker):
369 LockError.__init__(self, errno, b'Lock held', filename, desc)
372 LockError.__init__(self, errno, b'Lock held', filename, desc)
370 self.locker = locker
373 self.locker = locker
371
374
372
375
373 class LockUnavailable(LockError):
376 class LockUnavailable(LockError):
374 pass
377 pass
375
378
376
379
377 # LockError is for errors while acquiring the lock -- this is unrelated
380 # LockError is for errors while acquiring the lock -- this is unrelated
378 class LockInheritanceContractViolation(RuntimeError):
381 class LockInheritanceContractViolation(RuntimeError):
379 __bytes__ = _tobytes
382 __bytes__ = _tobytes
380
383
381
384
382 class ResponseError(Exception):
385 class ResponseError(Exception):
383 """Raised to print an error with part of output and exit."""
386 """Raised to print an error with part of output and exit."""
384
387
385 __bytes__ = _tobytes
388 __bytes__ = _tobytes
386
389
387
390
388 # derived from KeyboardInterrupt to simplify some breakout code
391 # derived from KeyboardInterrupt to simplify some breakout code
389 class SignalInterrupt(KeyboardInterrupt):
392 class SignalInterrupt(KeyboardInterrupt):
390 """Exception raised on SIGTERM and SIGHUP."""
393 """Exception raised on SIGTERM and SIGHUP."""
391
394
392
395
393 class SignatureError(Exception):
396 class SignatureError(Exception):
394 __bytes__ = _tobytes
397 __bytes__ = _tobytes
395
398
396
399
397 class PushRaced(RuntimeError):
400 class PushRaced(RuntimeError):
398 """An exception raised during unbundling that indicate a push race"""
401 """An exception raised during unbundling that indicate a push race"""
399
402
400 __bytes__ = _tobytes
403 __bytes__ = _tobytes
401
404
402
405
403 class ProgrammingError(Hint, RuntimeError):
406 class ProgrammingError(Hint, RuntimeError):
404 """Raised if a mercurial (core or extension) developer made a mistake"""
407 """Raised if a mercurial (core or extension) developer made a mistake"""
405
408
406 def __init__(self, msg, *args, **kwargs):
409 def __init__(self, msg, *args, **kwargs):
407 # On Python 3, turn the message back into a string since this is
410 # On Python 3, turn the message back into a string since this is
408 # an internal-only error that won't be printed except in a
411 # an internal-only error that won't be printed except in a
409 # stack traces.
412 # stack traces.
410 msg = pycompat.sysstr(msg)
413 msg = pycompat.sysstr(msg)
411 super(ProgrammingError, self).__init__(msg, *args, **kwargs)
414 super(ProgrammingError, self).__init__(msg, *args, **kwargs)
412
415
413 __bytes__ = _tobytes
416 __bytes__ = _tobytes
414
417
415
418
416 class WdirUnsupported(Exception):
419 class WdirUnsupported(Exception):
417 """An exception which is raised when 'wdir()' is not supported"""
420 """An exception which is raised when 'wdir()' is not supported"""
418
421
419 __bytes__ = _tobytes
422 __bytes__ = _tobytes
420
423
421
424
422 # bundle2 related errors
425 # bundle2 related errors
423 class BundleValueError(ValueError):
426 class BundleValueError(ValueError):
424 """error raised when bundle2 cannot be processed"""
427 """error raised when bundle2 cannot be processed"""
425
428
426 __bytes__ = _tobytes
429 __bytes__ = _tobytes
427
430
428
431
429 class BundleUnknownFeatureError(BundleValueError):
432 class BundleUnknownFeatureError(BundleValueError):
430 def __init__(self, parttype=None, params=(), values=()):
433 def __init__(self, parttype=None, params=(), values=()):
431 self.parttype = parttype
434 self.parttype = parttype
432 self.params = params
435 self.params = params
433 self.values = values
436 self.values = values
434 if self.parttype is None:
437 if self.parttype is None:
435 msg = b'Stream Parameter'
438 msg = b'Stream Parameter'
436 else:
439 else:
437 msg = parttype
440 msg = parttype
438 entries = self.params
441 entries = self.params
439 if self.params and self.values:
442 if self.params and self.values:
440 assert len(self.params) == len(self.values)
443 assert len(self.params) == len(self.values)
441 entries = []
444 entries = []
442 for idx, par in enumerate(self.params):
445 for idx, par in enumerate(self.params):
443 val = self.values[idx]
446 val = self.values[idx]
444 if val is None:
447 if val is None:
445 entries.append(val)
448 entries.append(val)
446 else:
449 else:
447 entries.append(b"%s=%r" % (par, pycompat.maybebytestr(val)))
450 entries.append(b"%s=%r" % (par, pycompat.maybebytestr(val)))
448 if entries:
451 if entries:
449 msg = b'%s - %s' % (msg, b', '.join(entries))
452 msg = b'%s - %s' % (msg, b', '.join(entries))
450 ValueError.__init__(self, msg)
453 ValueError.__init__(self, msg)
451
454
452
455
453 class ReadOnlyPartError(RuntimeError):
456 class ReadOnlyPartError(RuntimeError):
454 """error raised when code tries to alter a part being generated"""
457 """error raised when code tries to alter a part being generated"""
455
458
456 __bytes__ = _tobytes
459 __bytes__ = _tobytes
457
460
458
461
459 class PushkeyFailed(Abort):
462 class PushkeyFailed(Abort):
460 """error raised when a pushkey part failed to update a value"""
463 """error raised when a pushkey part failed to update a value"""
461
464
462 def __init__(
465 def __init__(
463 self, partid, namespace=None, key=None, new=None, old=None, ret=None
466 self, partid, namespace=None, key=None, new=None, old=None, ret=None
464 ):
467 ):
465 self.partid = partid
468 self.partid = partid
466 self.namespace = namespace
469 self.namespace = namespace
467 self.key = key
470 self.key = key
468 self.new = new
471 self.new = new
469 self.old = old
472 self.old = old
470 self.ret = ret
473 self.ret = ret
471 # no i18n expected to be processed into a better message
474 # no i18n expected to be processed into a better message
472 Abort.__init__(
475 Abort.__init__(
473 self, b'failed to update value for "%s/%s"' % (namespace, key)
476 self, b'failed to update value for "%s/%s"' % (namespace, key)
474 )
477 )
475
478
476
479
477 class CensoredNodeError(StorageError):
480 class CensoredNodeError(StorageError):
478 """error raised when content verification fails on a censored node
481 """error raised when content verification fails on a censored node
479
482
480 Also contains the tombstone data substituted for the uncensored data.
483 Also contains the tombstone data substituted for the uncensored data.
481 """
484 """
482
485
483 def __init__(self, filename, node, tombstone):
486 def __init__(self, filename, node, tombstone):
484 from .node import short
487 from .node import short
485
488
486 StorageError.__init__(self, b'%s:%s' % (filename, short(node)))
489 StorageError.__init__(self, b'%s:%s' % (filename, short(node)))
487 self.tombstone = tombstone
490 self.tombstone = tombstone
488
491
489
492
490 class CensoredBaseError(StorageError):
493 class CensoredBaseError(StorageError):
491 """error raised when a delta is rejected because its base is censored
494 """error raised when a delta is rejected because its base is censored
492
495
493 A delta based on a censored revision must be formed as single patch
496 A delta based on a censored revision must be formed as single patch
494 operation which replaces the entire base with new content. This ensures
497 operation which replaces the entire base with new content. This ensures
495 the delta may be applied by clones which have not censored the base.
498 the delta may be applied by clones which have not censored the base.
496 """
499 """
497
500
498
501
499 class InvalidBundleSpecification(Exception):
502 class InvalidBundleSpecification(Exception):
500 """error raised when a bundle specification is invalid.
503 """error raised when a bundle specification is invalid.
501
504
502 This is used for syntax errors as opposed to support errors.
505 This is used for syntax errors as opposed to support errors.
503 """
506 """
504
507
505 __bytes__ = _tobytes
508 __bytes__ = _tobytes
506
509
507
510
508 class UnsupportedBundleSpecification(Exception):
511 class UnsupportedBundleSpecification(Exception):
509 """error raised when a bundle specification is not supported."""
512 """error raised when a bundle specification is not supported."""
510
513
511 __bytes__ = _tobytes
514 __bytes__ = _tobytes
512
515
513
516
514 class CorruptedState(Exception):
517 class CorruptedState(Exception):
515 """error raised when a command is not able to read its state from file"""
518 """error raised when a command is not able to read its state from file"""
516
519
517 __bytes__ = _tobytes
520 __bytes__ = _tobytes
518
521
519
522
520 class PeerTransportError(Abort):
523 class PeerTransportError(Abort):
521 """Transport-level I/O error when communicating with a peer repo."""
524 """Transport-level I/O error when communicating with a peer repo."""
522
525
523
526
524 class InMemoryMergeConflictsError(Exception):
527 class InMemoryMergeConflictsError(Exception):
525 """Exception raised when merge conflicts arose during an in-memory merge."""
528 """Exception raised when merge conflicts arose during an in-memory merge."""
526
529
527 __bytes__ = _tobytes
530 __bytes__ = _tobytes
528
531
529
532
530 class WireprotoCommandError(Exception):
533 class WireprotoCommandError(Exception):
531 """Represents an error during execution of a wire protocol command.
534 """Represents an error during execution of a wire protocol command.
532
535
533 Should only be thrown by wire protocol version 2 commands.
536 Should only be thrown by wire protocol version 2 commands.
534
537
535 The error is a formatter string and an optional iterable of arguments.
538 The error is a formatter string and an optional iterable of arguments.
536 """
539 """
537
540
538 def __init__(self, message, args=None):
541 def __init__(self, message, args=None):
539 self.message = message
542 self.message = message
540 self.messageargs = args
543 self.messageargs = args
@@ -1,2330 +1,2324 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import os
12 import os
13 import posixpath
13 import posixpath
14 import re
14 import re
15 import subprocess
15 import subprocess
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 bin,
20 bin,
21 hex,
21 hex,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirrev,
26 wdirrev,
27 )
27 )
28 from .pycompat import getattr
28 from .pycompat import getattr
29 from .thirdparty import attr
29 from .thirdparty import attr
30 from . import (
30 from . import (
31 copies as copiesmod,
31 copies as copiesmod,
32 encoding,
32 encoding,
33 error,
33 error,
34 match as matchmod,
34 match as matchmod,
35 obsolete,
35 obsolete,
36 obsutil,
36 obsutil,
37 pathutil,
37 pathutil,
38 phases,
38 phases,
39 policy,
39 policy,
40 pycompat,
40 pycompat,
41 requirements as requirementsmod,
41 requirements as requirementsmod,
42 revsetlang,
42 revsetlang,
43 similar,
43 similar,
44 smartset,
44 smartset,
45 url,
45 url,
46 util,
46 util,
47 vfs,
47 vfs,
48 )
48 )
49
49
50 from .utils import (
50 from .utils import (
51 hashutil,
51 hashutil,
52 procutil,
52 procutil,
53 stringutil,
53 stringutil,
54 )
54 )
55
55
56 if pycompat.iswindows:
56 if pycompat.iswindows:
57 from . import scmwindows as scmplatform
57 from . import scmwindows as scmplatform
58 else:
58 else:
59 from . import scmposix as scmplatform
59 from . import scmposix as scmplatform
60
60
61 parsers = policy.importmod('parsers')
61 parsers = policy.importmod('parsers')
62 rustrevlog = policy.importrust('revlog')
62 rustrevlog = policy.importrust('revlog')
63
63
64 termsize = scmplatform.termsize
64 termsize = scmplatform.termsize
65
65
66
66
67 @attr.s(slots=True, repr=False)
67 @attr.s(slots=True, repr=False)
68 class status(object):
68 class status(object):
69 '''Struct with a list of files per status.
69 '''Struct with a list of files per status.
70
70
71 The 'deleted', 'unknown' and 'ignored' properties are only
71 The 'deleted', 'unknown' and 'ignored' properties are only
72 relevant to the working copy.
72 relevant to the working copy.
73 '''
73 '''
74
74
75 modified = attr.ib(default=attr.Factory(list))
75 modified = attr.ib(default=attr.Factory(list))
76 added = attr.ib(default=attr.Factory(list))
76 added = attr.ib(default=attr.Factory(list))
77 removed = attr.ib(default=attr.Factory(list))
77 removed = attr.ib(default=attr.Factory(list))
78 deleted = attr.ib(default=attr.Factory(list))
78 deleted = attr.ib(default=attr.Factory(list))
79 unknown = attr.ib(default=attr.Factory(list))
79 unknown = attr.ib(default=attr.Factory(list))
80 ignored = attr.ib(default=attr.Factory(list))
80 ignored = attr.ib(default=attr.Factory(list))
81 clean = attr.ib(default=attr.Factory(list))
81 clean = attr.ib(default=attr.Factory(list))
82
82
83 def __iter__(self):
83 def __iter__(self):
84 yield self.modified
84 yield self.modified
85 yield self.added
85 yield self.added
86 yield self.removed
86 yield self.removed
87 yield self.deleted
87 yield self.deleted
88 yield self.unknown
88 yield self.unknown
89 yield self.ignored
89 yield self.ignored
90 yield self.clean
90 yield self.clean
91
91
92 def __repr__(self):
92 def __repr__(self):
93 return (
93 return (
94 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
94 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
95 r'unknown=%s, ignored=%s, clean=%s>'
95 r'unknown=%s, ignored=%s, clean=%s>'
96 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
96 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
97
97
98
98
99 def itersubrepos(ctx1, ctx2):
99 def itersubrepos(ctx1, ctx2):
100 """find subrepos in ctx1 or ctx2"""
100 """find subrepos in ctx1 or ctx2"""
101 # Create a (subpath, ctx) mapping where we prefer subpaths from
101 # Create a (subpath, ctx) mapping where we prefer subpaths from
102 # ctx1. The subpaths from ctx2 are important when the .hgsub file
102 # ctx1. The subpaths from ctx2 are important when the .hgsub file
103 # has been modified (in ctx2) but not yet committed (in ctx1).
103 # has been modified (in ctx2) but not yet committed (in ctx1).
104 subpaths = dict.fromkeys(ctx2.substate, ctx2)
104 subpaths = dict.fromkeys(ctx2.substate, ctx2)
105 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
105 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
106
106
107 missing = set()
107 missing = set()
108
108
109 for subpath in ctx2.substate:
109 for subpath in ctx2.substate:
110 if subpath not in ctx1.substate:
110 if subpath not in ctx1.substate:
111 del subpaths[subpath]
111 del subpaths[subpath]
112 missing.add(subpath)
112 missing.add(subpath)
113
113
114 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
114 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
115 yield subpath, ctx.sub(subpath)
115 yield subpath, ctx.sub(subpath)
116
116
117 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
117 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
118 # status and diff will have an accurate result when it does
118 # status and diff will have an accurate result when it does
119 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
119 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
120 # against itself.
120 # against itself.
121 for subpath in missing:
121 for subpath in missing:
122 yield subpath, ctx2.nullsub(subpath, ctx1)
122 yield subpath, ctx2.nullsub(subpath, ctx1)
123
123
124
124
125 def nochangesfound(ui, repo, excluded=None):
125 def nochangesfound(ui, repo, excluded=None):
126 '''Report no changes for push/pull, excluded is None or a list of
126 '''Report no changes for push/pull, excluded is None or a list of
127 nodes excluded from the push/pull.
127 nodes excluded from the push/pull.
128 '''
128 '''
129 secretlist = []
129 secretlist = []
130 if excluded:
130 if excluded:
131 for n in excluded:
131 for n in excluded:
132 ctx = repo[n]
132 ctx = repo[n]
133 if ctx.phase() >= phases.secret and not ctx.extinct():
133 if ctx.phase() >= phases.secret and not ctx.extinct():
134 secretlist.append(n)
134 secretlist.append(n)
135
135
136 if secretlist:
136 if secretlist:
137 ui.status(
137 ui.status(
138 _(b"no changes found (ignored %d secret changesets)\n")
138 _(b"no changes found (ignored %d secret changesets)\n")
139 % len(secretlist)
139 % len(secretlist)
140 )
140 )
141 else:
141 else:
142 ui.status(_(b"no changes found\n"))
142 ui.status(_(b"no changes found\n"))
143
143
144
144
145 def formatparse(write, inst):
145 def formatparse(write, inst):
146 if inst.location is not None:
146 if inst.location is not None:
147 write(
147 write(
148 _(b"hg: parse error at %s: %s\n")
148 _(b"hg: parse error at %s: %s\n")
149 % (pycompat.bytestr(inst.location), inst.message)
149 % (pycompat.bytestr(inst.location), inst.message)
150 )
150 )
151 else:
151 else:
152 write(_(b"hg: parse error: %s\n") % inst.message)
152 write(_(b"hg: parse error: %s\n") % inst.message)
153 if isinstance(inst, error.UnknownIdentifier):
153 if inst.hint:
154 # make sure to check fileset first, as revset can invoke fileset
155 similar = error.getsimilar(inst.symbols, inst.function)
156 hint = error.similarity_hint(similar)
157 if hint:
158 write(b"(%s)\n" % hint)
159 elif inst.hint:
160 write(_(b"(%s)\n") % inst.hint)
154 write(_(b"(%s)\n") % inst.hint)
161
155
162
156
163 def callcatch(ui, func):
157 def callcatch(ui, func):
164 """call func() with global exception handling
158 """call func() with global exception handling
165
159
166 return func() if no exception happens. otherwise do some error handling
160 return func() if no exception happens. otherwise do some error handling
167 and return an exit code accordingly. does not handle all exceptions.
161 and return an exit code accordingly. does not handle all exceptions.
168 """
162 """
169 coarse_exit_code = -1
163 coarse_exit_code = -1
170 detailed_exit_code = -1
164 detailed_exit_code = -1
171 try:
165 try:
172 try:
166 try:
173 return func()
167 return func()
174 except: # re-raises
168 except: # re-raises
175 ui.traceback()
169 ui.traceback()
176 raise
170 raise
177 # Global exception handling, alphabetically
171 # Global exception handling, alphabetically
178 # Mercurial-specific first, followed by built-in and library exceptions
172 # Mercurial-specific first, followed by built-in and library exceptions
179 except error.LockHeld as inst:
173 except error.LockHeld as inst:
180 detailed_exit_code = 20
174 detailed_exit_code = 20
181 if inst.errno == errno.ETIMEDOUT:
175 if inst.errno == errno.ETIMEDOUT:
182 reason = _(b'timed out waiting for lock held by %r') % (
176 reason = _(b'timed out waiting for lock held by %r') % (
183 pycompat.bytestr(inst.locker)
177 pycompat.bytestr(inst.locker)
184 )
178 )
185 else:
179 else:
186 reason = _(b'lock held by %r') % inst.locker
180 reason = _(b'lock held by %r') % inst.locker
187 ui.error(
181 ui.error(
188 _(b"abort: %s: %s\n")
182 _(b"abort: %s: %s\n")
189 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
183 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
190 )
184 )
191 if not inst.locker:
185 if not inst.locker:
192 ui.error(_(b"(lock might be very busy)\n"))
186 ui.error(_(b"(lock might be very busy)\n"))
193 except error.LockUnavailable as inst:
187 except error.LockUnavailable as inst:
194 detailed_exit_code = 20
188 detailed_exit_code = 20
195 ui.error(
189 ui.error(
196 _(b"abort: could not lock %s: %s\n")
190 _(b"abort: could not lock %s: %s\n")
197 % (
191 % (
198 inst.desc or stringutil.forcebytestr(inst.filename),
192 inst.desc or stringutil.forcebytestr(inst.filename),
199 encoding.strtolocal(inst.strerror),
193 encoding.strtolocal(inst.strerror),
200 )
194 )
201 )
195 )
202 except error.OutOfBandError as inst:
196 except error.OutOfBandError as inst:
203 detailed_exit_code = 100
197 detailed_exit_code = 100
204 if inst.args:
198 if inst.args:
205 msg = _(b"abort: remote error:\n")
199 msg = _(b"abort: remote error:\n")
206 else:
200 else:
207 msg = _(b"abort: remote error\n")
201 msg = _(b"abort: remote error\n")
208 ui.error(msg)
202 ui.error(msg)
209 if inst.args:
203 if inst.args:
210 ui.error(b''.join(inst.args))
204 ui.error(b''.join(inst.args))
211 if inst.hint:
205 if inst.hint:
212 ui.error(b'(%s)\n' % inst.hint)
206 ui.error(b'(%s)\n' % inst.hint)
213 except error.RepoError as inst:
207 except error.RepoError as inst:
214 ui.error(_(b"abort: %s!\n") % inst)
208 ui.error(_(b"abort: %s!\n") % inst)
215 if inst.hint:
209 if inst.hint:
216 ui.error(_(b"(%s)\n") % inst.hint)
210 ui.error(_(b"(%s)\n") % inst.hint)
217 except error.ResponseError as inst:
211 except error.ResponseError as inst:
218 ui.error(_(b"abort: %s") % inst.args[0])
212 ui.error(_(b"abort: %s") % inst.args[0])
219 msg = inst.args[1]
213 msg = inst.args[1]
220 if isinstance(msg, type(u'')):
214 if isinstance(msg, type(u'')):
221 msg = pycompat.sysbytes(msg)
215 msg = pycompat.sysbytes(msg)
222 if not isinstance(msg, bytes):
216 if not isinstance(msg, bytes):
223 ui.error(b" %r\n" % (msg,))
217 ui.error(b" %r\n" % (msg,))
224 elif not msg:
218 elif not msg:
225 ui.error(_(b" empty string\n"))
219 ui.error(_(b" empty string\n"))
226 else:
220 else:
227 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
221 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
228 except error.CensoredNodeError as inst:
222 except error.CensoredNodeError as inst:
229 ui.error(_(b"abort: file censored %s!\n") % inst)
223 ui.error(_(b"abort: file censored %s!\n") % inst)
230 except error.StorageError as inst:
224 except error.StorageError as inst:
231 ui.error(_(b"abort: %s!\n") % inst)
225 ui.error(_(b"abort: %s!\n") % inst)
232 if inst.hint:
226 if inst.hint:
233 ui.error(_(b"(%s)\n") % inst.hint)
227 ui.error(_(b"(%s)\n") % inst.hint)
234 except error.InterventionRequired as inst:
228 except error.InterventionRequired as inst:
235 ui.error(b"%s\n" % inst)
229 ui.error(b"%s\n" % inst)
236 if inst.hint:
230 if inst.hint:
237 ui.error(_(b"(%s)\n") % inst.hint)
231 ui.error(_(b"(%s)\n") % inst.hint)
238 detailed_exit_code = 240
232 detailed_exit_code = 240
239 coarse_exit_code = 1
233 coarse_exit_code = 1
240 except error.WdirUnsupported:
234 except error.WdirUnsupported:
241 ui.error(_(b"abort: working directory revision cannot be specified\n"))
235 ui.error(_(b"abort: working directory revision cannot be specified\n"))
242 except error.Abort as inst:
236 except error.Abort as inst:
243 if isinstance(inst, error.InputError):
237 if isinstance(inst, error.InputError):
244 detailed_exit_code = 10
238 detailed_exit_code = 10
245 elif isinstance(inst, error.StateError):
239 elif isinstance(inst, error.StateError):
246 detailed_exit_code = 20
240 detailed_exit_code = 20
247 elif isinstance(inst, error.ConfigError):
241 elif isinstance(inst, error.ConfigError):
248 detailed_exit_code = 30
242 detailed_exit_code = 30
249 elif isinstance(inst, error.CanceledError):
243 elif isinstance(inst, error.CanceledError):
250 detailed_exit_code = 250
244 detailed_exit_code = 250
251 ui.error(_(b"abort: %s\n") % inst.message)
245 ui.error(_(b"abort: %s\n") % inst.message)
252 if inst.hint:
246 if inst.hint:
253 ui.error(_(b"(%s)\n") % inst.hint)
247 ui.error(_(b"(%s)\n") % inst.hint)
254 except error.WorkerError as inst:
248 except error.WorkerError as inst:
255 # Don't print a message -- the worker already should have
249 # Don't print a message -- the worker already should have
256 return inst.status_code
250 return inst.status_code
257 except ImportError as inst:
251 except ImportError as inst:
258 ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
252 ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
259 m = stringutil.forcebytestr(inst).split()[-1]
253 m = stringutil.forcebytestr(inst).split()[-1]
260 if m in b"mpatch bdiff".split():
254 if m in b"mpatch bdiff".split():
261 ui.error(_(b"(did you forget to compile extensions?)\n"))
255 ui.error(_(b"(did you forget to compile extensions?)\n"))
262 elif m in b"zlib".split():
256 elif m in b"zlib".split():
263 ui.error(_(b"(is your Python install correct?)\n"))
257 ui.error(_(b"(is your Python install correct?)\n"))
264 except util.urlerr.httperror as inst:
258 except util.urlerr.httperror as inst:
265 detailed_exit_code = 100
259 detailed_exit_code = 100
266 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
260 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
267 except util.urlerr.urlerror as inst:
261 except util.urlerr.urlerror as inst:
268 detailed_exit_code = 100
262 detailed_exit_code = 100
269 try: # usually it is in the form (errno, strerror)
263 try: # usually it is in the form (errno, strerror)
270 reason = inst.reason.args[1]
264 reason = inst.reason.args[1]
271 except (AttributeError, IndexError):
265 except (AttributeError, IndexError):
272 # it might be anything, for example a string
266 # it might be anything, for example a string
273 reason = inst.reason
267 reason = inst.reason
274 if isinstance(reason, pycompat.unicode):
268 if isinstance(reason, pycompat.unicode):
275 # SSLError of Python 2.7.9 contains a unicode
269 # SSLError of Python 2.7.9 contains a unicode
276 reason = encoding.unitolocal(reason)
270 reason = encoding.unitolocal(reason)
277 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
271 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
278 except (IOError, OSError) as inst:
272 except (IOError, OSError) as inst:
279 if (
273 if (
280 util.safehasattr(inst, b"args")
274 util.safehasattr(inst, b"args")
281 and inst.args
275 and inst.args
282 and inst.args[0] == errno.EPIPE
276 and inst.args[0] == errno.EPIPE
283 ):
277 ):
284 pass
278 pass
285 elif getattr(inst, "strerror", None): # common IOError or OSError
279 elif getattr(inst, "strerror", None): # common IOError or OSError
286 if getattr(inst, "filename", None) is not None:
280 if getattr(inst, "filename", None) is not None:
287 ui.error(
281 ui.error(
288 _(b"abort: %s: '%s'\n")
282 _(b"abort: %s: '%s'\n")
289 % (
283 % (
290 encoding.strtolocal(inst.strerror),
284 encoding.strtolocal(inst.strerror),
291 stringutil.forcebytestr(inst.filename),
285 stringutil.forcebytestr(inst.filename),
292 )
286 )
293 )
287 )
294 else:
288 else:
295 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
289 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
296 else: # suspicious IOError
290 else: # suspicious IOError
297 raise
291 raise
298 except MemoryError:
292 except MemoryError:
299 ui.error(_(b"abort: out of memory\n"))
293 ui.error(_(b"abort: out of memory\n"))
300 except SystemExit as inst:
294 except SystemExit as inst:
301 # Commands shouldn't sys.exit directly, but give a return code.
295 # Commands shouldn't sys.exit directly, but give a return code.
302 # Just in case catch this and and pass exit code to caller.
296 # Just in case catch this and and pass exit code to caller.
303 detailed_exit_code = 254
297 detailed_exit_code = 254
304 coarse_exit_code = inst.code
298 coarse_exit_code = inst.code
305
299
306 if ui.configbool(b'ui', b'detailed-exit-code'):
300 if ui.configbool(b'ui', b'detailed-exit-code'):
307 return detailed_exit_code
301 return detailed_exit_code
308 else:
302 else:
309 return coarse_exit_code
303 return coarse_exit_code
310
304
311
305
312 def checknewlabel(repo, lbl, kind):
306 def checknewlabel(repo, lbl, kind):
313 # Do not use the "kind" parameter in ui output.
307 # Do not use the "kind" parameter in ui output.
314 # It makes strings difficult to translate.
308 # It makes strings difficult to translate.
315 if lbl in [b'tip', b'.', b'null']:
309 if lbl in [b'tip', b'.', b'null']:
316 raise error.InputError(_(b"the name '%s' is reserved") % lbl)
310 raise error.InputError(_(b"the name '%s' is reserved") % lbl)
317 for c in (b':', b'\0', b'\n', b'\r'):
311 for c in (b':', b'\0', b'\n', b'\r'):
318 if c in lbl:
312 if c in lbl:
319 raise error.InputError(
313 raise error.InputError(
320 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
314 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
321 )
315 )
322 try:
316 try:
323 int(lbl)
317 int(lbl)
324 raise error.InputError(_(b"cannot use an integer as a name"))
318 raise error.InputError(_(b"cannot use an integer as a name"))
325 except ValueError:
319 except ValueError:
326 pass
320 pass
327 if lbl.strip() != lbl:
321 if lbl.strip() != lbl:
328 raise error.InputError(
322 raise error.InputError(
329 _(b"leading or trailing whitespace in name %r") % lbl
323 _(b"leading or trailing whitespace in name %r") % lbl
330 )
324 )
331
325
332
326
333 def checkfilename(f):
327 def checkfilename(f):
334 '''Check that the filename f is an acceptable filename for a tracked file'''
328 '''Check that the filename f is an acceptable filename for a tracked file'''
335 if b'\r' in f or b'\n' in f:
329 if b'\r' in f or b'\n' in f:
336 raise error.InputError(
330 raise error.InputError(
337 _(b"'\\n' and '\\r' disallowed in filenames: %r")
331 _(b"'\\n' and '\\r' disallowed in filenames: %r")
338 % pycompat.bytestr(f)
332 % pycompat.bytestr(f)
339 )
333 )
340
334
341
335
342 def checkportable(ui, f):
336 def checkportable(ui, f):
343 '''Check if filename f is portable and warn or abort depending on config'''
337 '''Check if filename f is portable and warn or abort depending on config'''
344 checkfilename(f)
338 checkfilename(f)
345 abort, warn = checkportabilityalert(ui)
339 abort, warn = checkportabilityalert(ui)
346 if abort or warn:
340 if abort or warn:
347 msg = util.checkwinfilename(f)
341 msg = util.checkwinfilename(f)
348 if msg:
342 if msg:
349 msg = b"%s: %s" % (msg, procutil.shellquote(f))
343 msg = b"%s: %s" % (msg, procutil.shellquote(f))
350 if abort:
344 if abort:
351 raise error.InputError(msg)
345 raise error.InputError(msg)
352 ui.warn(_(b"warning: %s\n") % msg)
346 ui.warn(_(b"warning: %s\n") % msg)
353
347
354
348
355 def checkportabilityalert(ui):
349 def checkportabilityalert(ui):
356 '''check if the user's config requests nothing, a warning, or abort for
350 '''check if the user's config requests nothing, a warning, or abort for
357 non-portable filenames'''
351 non-portable filenames'''
358 val = ui.config(b'ui', b'portablefilenames')
352 val = ui.config(b'ui', b'portablefilenames')
359 lval = val.lower()
353 lval = val.lower()
360 bval = stringutil.parsebool(val)
354 bval = stringutil.parsebool(val)
361 abort = pycompat.iswindows or lval == b'abort'
355 abort = pycompat.iswindows or lval == b'abort'
362 warn = bval or lval == b'warn'
356 warn = bval or lval == b'warn'
363 if bval is None and not (warn or abort or lval == b'ignore'):
357 if bval is None and not (warn or abort or lval == b'ignore'):
364 raise error.ConfigError(
358 raise error.ConfigError(
365 _(b"ui.portablefilenames value is invalid ('%s')") % val
359 _(b"ui.portablefilenames value is invalid ('%s')") % val
366 )
360 )
367 return abort, warn
361 return abort, warn
368
362
369
363
370 class casecollisionauditor(object):
364 class casecollisionauditor(object):
371 def __init__(self, ui, abort, dirstate):
365 def __init__(self, ui, abort, dirstate):
372 self._ui = ui
366 self._ui = ui
373 self._abort = abort
367 self._abort = abort
374 allfiles = b'\0'.join(dirstate)
368 allfiles = b'\0'.join(dirstate)
375 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
369 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
376 self._dirstate = dirstate
370 self._dirstate = dirstate
377 # The purpose of _newfiles is so that we don't complain about
371 # The purpose of _newfiles is so that we don't complain about
378 # case collisions if someone were to call this object with the
372 # case collisions if someone were to call this object with the
379 # same filename twice.
373 # same filename twice.
380 self._newfiles = set()
374 self._newfiles = set()
381
375
382 def __call__(self, f):
376 def __call__(self, f):
383 if f in self._newfiles:
377 if f in self._newfiles:
384 return
378 return
385 fl = encoding.lower(f)
379 fl = encoding.lower(f)
386 if fl in self._loweredfiles and f not in self._dirstate:
380 if fl in self._loweredfiles and f not in self._dirstate:
387 msg = _(b'possible case-folding collision for %s') % f
381 msg = _(b'possible case-folding collision for %s') % f
388 if self._abort:
382 if self._abort:
389 raise error.Abort(msg)
383 raise error.Abort(msg)
390 self._ui.warn(_(b"warning: %s\n") % msg)
384 self._ui.warn(_(b"warning: %s\n") % msg)
391 self._loweredfiles.add(fl)
385 self._loweredfiles.add(fl)
392 self._newfiles.add(f)
386 self._newfiles.add(f)
393
387
394
388
395 def filteredhash(repo, maxrev):
389 def filteredhash(repo, maxrev):
396 """build hash of filtered revisions in the current repoview.
390 """build hash of filtered revisions in the current repoview.
397
391
398 Multiple caches perform up-to-date validation by checking that the
392 Multiple caches perform up-to-date validation by checking that the
399 tiprev and tipnode stored in the cache file match the current repository.
393 tiprev and tipnode stored in the cache file match the current repository.
400 However, this is not sufficient for validating repoviews because the set
394 However, this is not sufficient for validating repoviews because the set
401 of revisions in the view may change without the repository tiprev and
395 of revisions in the view may change without the repository tiprev and
402 tipnode changing.
396 tipnode changing.
403
397
404 This function hashes all the revs filtered from the view and returns
398 This function hashes all the revs filtered from the view and returns
405 that SHA-1 digest.
399 that SHA-1 digest.
406 """
400 """
407 cl = repo.changelog
401 cl = repo.changelog
408 if not cl.filteredrevs:
402 if not cl.filteredrevs:
409 return None
403 return None
410 key = cl._filteredrevs_hashcache.get(maxrev)
404 key = cl._filteredrevs_hashcache.get(maxrev)
411 if not key:
405 if not key:
412 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
406 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
413 if revs:
407 if revs:
414 s = hashutil.sha1()
408 s = hashutil.sha1()
415 for rev in revs:
409 for rev in revs:
416 s.update(b'%d;' % rev)
410 s.update(b'%d;' % rev)
417 key = s.digest()
411 key = s.digest()
418 cl._filteredrevs_hashcache[maxrev] = key
412 cl._filteredrevs_hashcache[maxrev] = key
419 return key
413 return key
420
414
421
415
422 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
416 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
423 '''yield every hg repository under path, always recursively.
417 '''yield every hg repository under path, always recursively.
424 The recurse flag will only control recursion into repo working dirs'''
418 The recurse flag will only control recursion into repo working dirs'''
425
419
426 def errhandler(err):
420 def errhandler(err):
427 if err.filename == path:
421 if err.filename == path:
428 raise err
422 raise err
429
423
430 samestat = getattr(os.path, 'samestat', None)
424 samestat = getattr(os.path, 'samestat', None)
431 if followsym and samestat is not None:
425 if followsym and samestat is not None:
432
426
433 def adddir(dirlst, dirname):
427 def adddir(dirlst, dirname):
434 dirstat = os.stat(dirname)
428 dirstat = os.stat(dirname)
435 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
429 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
436 if not match:
430 if not match:
437 dirlst.append(dirstat)
431 dirlst.append(dirstat)
438 return not match
432 return not match
439
433
440 else:
434 else:
441 followsym = False
435 followsym = False
442
436
443 if (seen_dirs is None) and followsym:
437 if (seen_dirs is None) and followsym:
444 seen_dirs = []
438 seen_dirs = []
445 adddir(seen_dirs, path)
439 adddir(seen_dirs, path)
446 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
440 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
447 dirs.sort()
441 dirs.sort()
448 if b'.hg' in dirs:
442 if b'.hg' in dirs:
449 yield root # found a repository
443 yield root # found a repository
450 qroot = os.path.join(root, b'.hg', b'patches')
444 qroot = os.path.join(root, b'.hg', b'patches')
451 if os.path.isdir(os.path.join(qroot, b'.hg')):
445 if os.path.isdir(os.path.join(qroot, b'.hg')):
452 yield qroot # we have a patch queue repo here
446 yield qroot # we have a patch queue repo here
453 if recurse:
447 if recurse:
454 # avoid recursing inside the .hg directory
448 # avoid recursing inside the .hg directory
455 dirs.remove(b'.hg')
449 dirs.remove(b'.hg')
456 else:
450 else:
457 dirs[:] = [] # don't descend further
451 dirs[:] = [] # don't descend further
458 elif followsym:
452 elif followsym:
459 newdirs = []
453 newdirs = []
460 for d in dirs:
454 for d in dirs:
461 fname = os.path.join(root, d)
455 fname = os.path.join(root, d)
462 if adddir(seen_dirs, fname):
456 if adddir(seen_dirs, fname):
463 if os.path.islink(fname):
457 if os.path.islink(fname):
464 for hgname in walkrepos(fname, True, seen_dirs):
458 for hgname in walkrepos(fname, True, seen_dirs):
465 yield hgname
459 yield hgname
466 else:
460 else:
467 newdirs.append(d)
461 newdirs.append(d)
468 dirs[:] = newdirs
462 dirs[:] = newdirs
469
463
470
464
471 def binnode(ctx):
465 def binnode(ctx):
472 """Return binary node id for a given basectx"""
466 """Return binary node id for a given basectx"""
473 node = ctx.node()
467 node = ctx.node()
474 if node is None:
468 if node is None:
475 return wdirid
469 return wdirid
476 return node
470 return node
477
471
478
472
479 def intrev(ctx):
473 def intrev(ctx):
480 """Return integer for a given basectx that can be used in comparison or
474 """Return integer for a given basectx that can be used in comparison or
481 arithmetic operation"""
475 arithmetic operation"""
482 rev = ctx.rev()
476 rev = ctx.rev()
483 if rev is None:
477 if rev is None:
484 return wdirrev
478 return wdirrev
485 return rev
479 return rev
486
480
487
481
488 def formatchangeid(ctx):
482 def formatchangeid(ctx):
489 """Format changectx as '{rev}:{node|formatnode}', which is the default
483 """Format changectx as '{rev}:{node|formatnode}', which is the default
490 template provided by logcmdutil.changesettemplater"""
484 template provided by logcmdutil.changesettemplater"""
491 repo = ctx.repo()
485 repo = ctx.repo()
492 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
486 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
493
487
494
488
495 def formatrevnode(ui, rev, node):
489 def formatrevnode(ui, rev, node):
496 """Format given revision and node depending on the current verbosity"""
490 """Format given revision and node depending on the current verbosity"""
497 if ui.debugflag:
491 if ui.debugflag:
498 hexfunc = hex
492 hexfunc = hex
499 else:
493 else:
500 hexfunc = short
494 hexfunc = short
501 return b'%d:%s' % (rev, hexfunc(node))
495 return b'%d:%s' % (rev, hexfunc(node))
502
496
503
497
504 def resolvehexnodeidprefix(repo, prefix):
498 def resolvehexnodeidprefix(repo, prefix):
505 if prefix.startswith(b'x'):
499 if prefix.startswith(b'x'):
506 prefix = prefix[1:]
500 prefix = prefix[1:]
507 try:
501 try:
508 # Uses unfiltered repo because it's faster when prefix is ambiguous/
502 # Uses unfiltered repo because it's faster when prefix is ambiguous/
509 # This matches the shortesthexnodeidprefix() function below.
503 # This matches the shortesthexnodeidprefix() function below.
510 node = repo.unfiltered().changelog._partialmatch(prefix)
504 node = repo.unfiltered().changelog._partialmatch(prefix)
511 except error.AmbiguousPrefixLookupError:
505 except error.AmbiguousPrefixLookupError:
512 revset = repo.ui.config(
506 revset = repo.ui.config(
513 b'experimental', b'revisions.disambiguatewithin'
507 b'experimental', b'revisions.disambiguatewithin'
514 )
508 )
515 if revset:
509 if revset:
516 # Clear config to avoid infinite recursion
510 # Clear config to avoid infinite recursion
517 configoverrides = {
511 configoverrides = {
518 (b'experimental', b'revisions.disambiguatewithin'): None
512 (b'experimental', b'revisions.disambiguatewithin'): None
519 }
513 }
520 with repo.ui.configoverride(configoverrides):
514 with repo.ui.configoverride(configoverrides):
521 revs = repo.anyrevs([revset], user=True)
515 revs = repo.anyrevs([revset], user=True)
522 matches = []
516 matches = []
523 for rev in revs:
517 for rev in revs:
524 node = repo.changelog.node(rev)
518 node = repo.changelog.node(rev)
525 if hex(node).startswith(prefix):
519 if hex(node).startswith(prefix):
526 matches.append(node)
520 matches.append(node)
527 if len(matches) == 1:
521 if len(matches) == 1:
528 return matches[0]
522 return matches[0]
529 raise
523 raise
530 if node is None:
524 if node is None:
531 return
525 return
532 repo.changelog.rev(node) # make sure node isn't filtered
526 repo.changelog.rev(node) # make sure node isn't filtered
533 return node
527 return node
534
528
535
529
536 def mayberevnum(repo, prefix):
530 def mayberevnum(repo, prefix):
537 """Checks if the given prefix may be mistaken for a revision number"""
531 """Checks if the given prefix may be mistaken for a revision number"""
538 try:
532 try:
539 i = int(prefix)
533 i = int(prefix)
540 # if we are a pure int, then starting with zero will not be
534 # if we are a pure int, then starting with zero will not be
541 # confused as a rev; or, obviously, if the int is larger
535 # confused as a rev; or, obviously, if the int is larger
542 # than the value of the tip rev. We still need to disambiguate if
536 # than the value of the tip rev. We still need to disambiguate if
543 # prefix == '0', since that *is* a valid revnum.
537 # prefix == '0', since that *is* a valid revnum.
544 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
538 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
545 return False
539 return False
546 return True
540 return True
547 except ValueError:
541 except ValueError:
548 return False
542 return False
549
543
550
544
551 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
545 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
552 """Find the shortest unambiguous prefix that matches hexnode.
546 """Find the shortest unambiguous prefix that matches hexnode.
553
547
554 If "cache" is not None, it must be a dictionary that can be used for
548 If "cache" is not None, it must be a dictionary that can be used for
555 caching between calls to this method.
549 caching between calls to this method.
556 """
550 """
557 # _partialmatch() of filtered changelog could take O(len(repo)) time,
551 # _partialmatch() of filtered changelog could take O(len(repo)) time,
558 # which would be unacceptably slow. so we look for hash collision in
552 # which would be unacceptably slow. so we look for hash collision in
559 # unfiltered space, which means some hashes may be slightly longer.
553 # unfiltered space, which means some hashes may be slightly longer.
560
554
561 minlength = max(minlength, 1)
555 minlength = max(minlength, 1)
562
556
563 def disambiguate(prefix):
557 def disambiguate(prefix):
564 """Disambiguate against revnums."""
558 """Disambiguate against revnums."""
565 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
559 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
566 if mayberevnum(repo, prefix):
560 if mayberevnum(repo, prefix):
567 return b'x' + prefix
561 return b'x' + prefix
568 else:
562 else:
569 return prefix
563 return prefix
570
564
571 hexnode = hex(node)
565 hexnode = hex(node)
572 for length in range(len(prefix), len(hexnode) + 1):
566 for length in range(len(prefix), len(hexnode) + 1):
573 prefix = hexnode[:length]
567 prefix = hexnode[:length]
574 if not mayberevnum(repo, prefix):
568 if not mayberevnum(repo, prefix):
575 return prefix
569 return prefix
576
570
577 cl = repo.unfiltered().changelog
571 cl = repo.unfiltered().changelog
578 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
572 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
579 if revset:
573 if revset:
580 revs = None
574 revs = None
581 if cache is not None:
575 if cache is not None:
582 revs = cache.get(b'disambiguationrevset')
576 revs = cache.get(b'disambiguationrevset')
583 if revs is None:
577 if revs is None:
584 revs = repo.anyrevs([revset], user=True)
578 revs = repo.anyrevs([revset], user=True)
585 if cache is not None:
579 if cache is not None:
586 cache[b'disambiguationrevset'] = revs
580 cache[b'disambiguationrevset'] = revs
587 if cl.rev(node) in revs:
581 if cl.rev(node) in revs:
588 hexnode = hex(node)
582 hexnode = hex(node)
589 nodetree = None
583 nodetree = None
590 if cache is not None:
584 if cache is not None:
591 nodetree = cache.get(b'disambiguationnodetree')
585 nodetree = cache.get(b'disambiguationnodetree')
592 if not nodetree:
586 if not nodetree:
593 if util.safehasattr(parsers, 'nodetree'):
587 if util.safehasattr(parsers, 'nodetree'):
594 # The CExt is the only implementation to provide a nodetree
588 # The CExt is the only implementation to provide a nodetree
595 # class so far.
589 # class so far.
596 index = cl.index
590 index = cl.index
597 if util.safehasattr(index, 'get_cindex'):
591 if util.safehasattr(index, 'get_cindex'):
598 # the rust wrapped need to give access to its internal index
592 # the rust wrapped need to give access to its internal index
599 index = index.get_cindex()
593 index = index.get_cindex()
600 nodetree = parsers.nodetree(index, len(revs))
594 nodetree = parsers.nodetree(index, len(revs))
601 for r in revs:
595 for r in revs:
602 nodetree.insert(r)
596 nodetree.insert(r)
603 if cache is not None:
597 if cache is not None:
604 cache[b'disambiguationnodetree'] = nodetree
598 cache[b'disambiguationnodetree'] = nodetree
605 if nodetree is not None:
599 if nodetree is not None:
606 length = max(nodetree.shortest(node), minlength)
600 length = max(nodetree.shortest(node), minlength)
607 prefix = hexnode[:length]
601 prefix = hexnode[:length]
608 return disambiguate(prefix)
602 return disambiguate(prefix)
609 for length in range(minlength, len(hexnode) + 1):
603 for length in range(minlength, len(hexnode) + 1):
610 matches = []
604 matches = []
611 prefix = hexnode[:length]
605 prefix = hexnode[:length]
612 for rev in revs:
606 for rev in revs:
613 otherhexnode = repo[rev].hex()
607 otherhexnode = repo[rev].hex()
614 if prefix == otherhexnode[:length]:
608 if prefix == otherhexnode[:length]:
615 matches.append(otherhexnode)
609 matches.append(otherhexnode)
616 if len(matches) == 1:
610 if len(matches) == 1:
617 return disambiguate(prefix)
611 return disambiguate(prefix)
618
612
619 try:
613 try:
620 return disambiguate(cl.shortest(node, minlength))
614 return disambiguate(cl.shortest(node, minlength))
621 except error.LookupError:
615 except error.LookupError:
622 raise error.RepoLookupError()
616 raise error.RepoLookupError()
623
617
624
618
625 def isrevsymbol(repo, symbol):
619 def isrevsymbol(repo, symbol):
626 """Checks if a symbol exists in the repo.
620 """Checks if a symbol exists in the repo.
627
621
628 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
622 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
629 symbol is an ambiguous nodeid prefix.
623 symbol is an ambiguous nodeid prefix.
630 """
624 """
631 try:
625 try:
632 revsymbol(repo, symbol)
626 revsymbol(repo, symbol)
633 return True
627 return True
634 except error.RepoLookupError:
628 except error.RepoLookupError:
635 return False
629 return False
636
630
637
631
638 def revsymbol(repo, symbol):
632 def revsymbol(repo, symbol):
639 """Returns a context given a single revision symbol (as string).
633 """Returns a context given a single revision symbol (as string).
640
634
641 This is similar to revsingle(), but accepts only a single revision symbol,
635 This is similar to revsingle(), but accepts only a single revision symbol,
642 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
636 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
643 not "max(public())".
637 not "max(public())".
644 """
638 """
645 if not isinstance(symbol, bytes):
639 if not isinstance(symbol, bytes):
646 msg = (
640 msg = (
647 b"symbol (%s of type %s) was not a string, did you mean "
641 b"symbol (%s of type %s) was not a string, did you mean "
648 b"repo[symbol]?" % (symbol, type(symbol))
642 b"repo[symbol]?" % (symbol, type(symbol))
649 )
643 )
650 raise error.ProgrammingError(msg)
644 raise error.ProgrammingError(msg)
651 try:
645 try:
652 if symbol in (b'.', b'tip', b'null'):
646 if symbol in (b'.', b'tip', b'null'):
653 return repo[symbol]
647 return repo[symbol]
654
648
655 try:
649 try:
656 r = int(symbol)
650 r = int(symbol)
657 if b'%d' % r != symbol:
651 if b'%d' % r != symbol:
658 raise ValueError
652 raise ValueError
659 l = len(repo.changelog)
653 l = len(repo.changelog)
660 if r < 0:
654 if r < 0:
661 r += l
655 r += l
662 if r < 0 or r >= l and r != wdirrev:
656 if r < 0 or r >= l and r != wdirrev:
663 raise ValueError
657 raise ValueError
664 return repo[r]
658 return repo[r]
665 except error.FilteredIndexError:
659 except error.FilteredIndexError:
666 raise
660 raise
667 except (ValueError, OverflowError, IndexError):
661 except (ValueError, OverflowError, IndexError):
668 pass
662 pass
669
663
670 if len(symbol) == 40:
664 if len(symbol) == 40:
671 try:
665 try:
672 node = bin(symbol)
666 node = bin(symbol)
673 rev = repo.changelog.rev(node)
667 rev = repo.changelog.rev(node)
674 return repo[rev]
668 return repo[rev]
675 except error.FilteredLookupError:
669 except error.FilteredLookupError:
676 raise
670 raise
677 except (TypeError, LookupError):
671 except (TypeError, LookupError):
678 pass
672 pass
679
673
680 # look up bookmarks through the name interface
674 # look up bookmarks through the name interface
681 try:
675 try:
682 node = repo.names.singlenode(repo, symbol)
676 node = repo.names.singlenode(repo, symbol)
683 rev = repo.changelog.rev(node)
677 rev = repo.changelog.rev(node)
684 return repo[rev]
678 return repo[rev]
685 except KeyError:
679 except KeyError:
686 pass
680 pass
687
681
688 node = resolvehexnodeidprefix(repo, symbol)
682 node = resolvehexnodeidprefix(repo, symbol)
689 if node is not None:
683 if node is not None:
690 rev = repo.changelog.rev(node)
684 rev = repo.changelog.rev(node)
691 return repo[rev]
685 return repo[rev]
692
686
693 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
687 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
694
688
695 except error.WdirUnsupported:
689 except error.WdirUnsupported:
696 return repo[None]
690 return repo[None]
697 except (
691 except (
698 error.FilteredIndexError,
692 error.FilteredIndexError,
699 error.FilteredLookupError,
693 error.FilteredLookupError,
700 error.FilteredRepoLookupError,
694 error.FilteredRepoLookupError,
701 ):
695 ):
702 raise _filterederror(repo, symbol)
696 raise _filterederror(repo, symbol)
703
697
704
698
705 def _filterederror(repo, changeid):
699 def _filterederror(repo, changeid):
706 """build an exception to be raised about a filtered changeid
700 """build an exception to be raised about a filtered changeid
707
701
708 This is extracted in a function to help extensions (eg: evolve) to
702 This is extracted in a function to help extensions (eg: evolve) to
709 experiment with various message variants."""
703 experiment with various message variants."""
710 if repo.filtername.startswith(b'visible'):
704 if repo.filtername.startswith(b'visible'):
711
705
712 # Check if the changeset is obsolete
706 # Check if the changeset is obsolete
713 unfilteredrepo = repo.unfiltered()
707 unfilteredrepo = repo.unfiltered()
714 ctx = revsymbol(unfilteredrepo, changeid)
708 ctx = revsymbol(unfilteredrepo, changeid)
715
709
716 # If the changeset is obsolete, enrich the message with the reason
710 # If the changeset is obsolete, enrich the message with the reason
717 # that made this changeset not visible
711 # that made this changeset not visible
718 if ctx.obsolete():
712 if ctx.obsolete():
719 msg = obsutil._getfilteredreason(repo, changeid, ctx)
713 msg = obsutil._getfilteredreason(repo, changeid, ctx)
720 else:
714 else:
721 msg = _(b"hidden revision '%s'") % changeid
715 msg = _(b"hidden revision '%s'") % changeid
722
716
723 hint = _(b'use --hidden to access hidden revisions')
717 hint = _(b'use --hidden to access hidden revisions')
724
718
725 return error.FilteredRepoLookupError(msg, hint=hint)
719 return error.FilteredRepoLookupError(msg, hint=hint)
726 msg = _(b"filtered revision '%s' (not in '%s' subset)")
720 msg = _(b"filtered revision '%s' (not in '%s' subset)")
727 msg %= (changeid, repo.filtername)
721 msg %= (changeid, repo.filtername)
728 return error.FilteredRepoLookupError(msg)
722 return error.FilteredRepoLookupError(msg)
729
723
730
724
731 def revsingle(repo, revspec, default=b'.', localalias=None):
725 def revsingle(repo, revspec, default=b'.', localalias=None):
732 if not revspec and revspec != 0:
726 if not revspec and revspec != 0:
733 return repo[default]
727 return repo[default]
734
728
735 l = revrange(repo, [revspec], localalias=localalias)
729 l = revrange(repo, [revspec], localalias=localalias)
736 if not l:
730 if not l:
737 raise error.Abort(_(b'empty revision set'))
731 raise error.Abort(_(b'empty revision set'))
738 return repo[l.last()]
732 return repo[l.last()]
739
733
740
734
741 def _pairspec(revspec):
735 def _pairspec(revspec):
742 tree = revsetlang.parse(revspec)
736 tree = revsetlang.parse(revspec)
743 return tree and tree[0] in (
737 return tree and tree[0] in (
744 b'range',
738 b'range',
745 b'rangepre',
739 b'rangepre',
746 b'rangepost',
740 b'rangepost',
747 b'rangeall',
741 b'rangeall',
748 )
742 )
749
743
750
744
751 def revpair(repo, revs):
745 def revpair(repo, revs):
752 if not revs:
746 if not revs:
753 return repo[b'.'], repo[None]
747 return repo[b'.'], repo[None]
754
748
755 l = revrange(repo, revs)
749 l = revrange(repo, revs)
756
750
757 if not l:
751 if not l:
758 raise error.Abort(_(b'empty revision range'))
752 raise error.Abort(_(b'empty revision range'))
759
753
760 first = l.first()
754 first = l.first()
761 second = l.last()
755 second = l.last()
762
756
763 if (
757 if (
764 first == second
758 first == second
765 and len(revs) >= 2
759 and len(revs) >= 2
766 and not all(revrange(repo, [r]) for r in revs)
760 and not all(revrange(repo, [r]) for r in revs)
767 ):
761 ):
768 raise error.Abort(_(b'empty revision on one side of range'))
762 raise error.Abort(_(b'empty revision on one side of range'))
769
763
770 # if top-level is range expression, the result must always be a pair
764 # if top-level is range expression, the result must always be a pair
771 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
765 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
772 return repo[first], repo[None]
766 return repo[first], repo[None]
773
767
774 return repo[first], repo[second]
768 return repo[first], repo[second]
775
769
776
770
777 def revrange(repo, specs, localalias=None):
771 def revrange(repo, specs, localalias=None):
778 """Execute 1 to many revsets and return the union.
772 """Execute 1 to many revsets and return the union.
779
773
780 This is the preferred mechanism for executing revsets using user-specified
774 This is the preferred mechanism for executing revsets using user-specified
781 config options, such as revset aliases.
775 config options, such as revset aliases.
782
776
783 The revsets specified by ``specs`` will be executed via a chained ``OR``
777 The revsets specified by ``specs`` will be executed via a chained ``OR``
784 expression. If ``specs`` is empty, an empty result is returned.
778 expression. If ``specs`` is empty, an empty result is returned.
785
779
786 ``specs`` can contain integers, in which case they are assumed to be
780 ``specs`` can contain integers, in which case they are assumed to be
787 revision numbers.
781 revision numbers.
788
782
789 It is assumed the revsets are already formatted. If you have arguments
783 It is assumed the revsets are already formatted. If you have arguments
790 that need to be expanded in the revset, call ``revsetlang.formatspec()``
784 that need to be expanded in the revset, call ``revsetlang.formatspec()``
791 and pass the result as an element of ``specs``.
785 and pass the result as an element of ``specs``.
792
786
793 Specifying a single revset is allowed.
787 Specifying a single revset is allowed.
794
788
795 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
789 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
796 integer revisions.
790 integer revisions.
797 """
791 """
798 allspecs = []
792 allspecs = []
799 for spec in specs:
793 for spec in specs:
800 if isinstance(spec, int):
794 if isinstance(spec, int):
801 spec = revsetlang.formatspec(b'%d', spec)
795 spec = revsetlang.formatspec(b'%d', spec)
802 allspecs.append(spec)
796 allspecs.append(spec)
803 return repo.anyrevs(allspecs, user=True, localalias=localalias)
797 return repo.anyrevs(allspecs, user=True, localalias=localalias)
804
798
805
799
806 def increasingwindows(windowsize=8, sizelimit=512):
800 def increasingwindows(windowsize=8, sizelimit=512):
807 while True:
801 while True:
808 yield windowsize
802 yield windowsize
809 if windowsize < sizelimit:
803 if windowsize < sizelimit:
810 windowsize *= 2
804 windowsize *= 2
811
805
812
806
813 def walkchangerevs(repo, revs, makefilematcher, prepare):
807 def walkchangerevs(repo, revs, makefilematcher, prepare):
814 '''Iterate over files and the revs in a "windowed" way.
808 '''Iterate over files and the revs in a "windowed" way.
815
809
816 Callers most commonly need to iterate backwards over the history
810 Callers most commonly need to iterate backwards over the history
817 in which they are interested. Doing so has awful (quadratic-looking)
811 in which they are interested. Doing so has awful (quadratic-looking)
818 performance, so we use iterators in a "windowed" way.
812 performance, so we use iterators in a "windowed" way.
819
813
820 We walk a window of revisions in the desired order. Within the
814 We walk a window of revisions in the desired order. Within the
821 window, we first walk forwards to gather data, then in the desired
815 window, we first walk forwards to gather data, then in the desired
822 order (usually backwards) to display it.
816 order (usually backwards) to display it.
823
817
824 This function returns an iterator yielding contexts. Before
818 This function returns an iterator yielding contexts. Before
825 yielding each context, the iterator will first call the prepare
819 yielding each context, the iterator will first call the prepare
826 function on each context in the window in forward order.'''
820 function on each context in the window in forward order.'''
827
821
828 if not revs:
822 if not revs:
829 return []
823 return []
830 change = repo.__getitem__
824 change = repo.__getitem__
831
825
832 def iterate():
826 def iterate():
833 it = iter(revs)
827 it = iter(revs)
834 stopiteration = False
828 stopiteration = False
835 for windowsize in increasingwindows():
829 for windowsize in increasingwindows():
836 nrevs = []
830 nrevs = []
837 for i in pycompat.xrange(windowsize):
831 for i in pycompat.xrange(windowsize):
838 rev = next(it, None)
832 rev = next(it, None)
839 if rev is None:
833 if rev is None:
840 stopiteration = True
834 stopiteration = True
841 break
835 break
842 nrevs.append(rev)
836 nrevs.append(rev)
843 for rev in sorted(nrevs):
837 for rev in sorted(nrevs):
844 ctx = change(rev)
838 ctx = change(rev)
845 prepare(ctx, makefilematcher(ctx))
839 prepare(ctx, makefilematcher(ctx))
846 for rev in nrevs:
840 for rev in nrevs:
847 yield change(rev)
841 yield change(rev)
848
842
849 if stopiteration:
843 if stopiteration:
850 break
844 break
851
845
852 return iterate()
846 return iterate()
853
847
854
848
855 def meaningfulparents(repo, ctx):
849 def meaningfulparents(repo, ctx):
856 """Return list of meaningful (or all if debug) parentrevs for rev.
850 """Return list of meaningful (or all if debug) parentrevs for rev.
857
851
858 For merges (two non-nullrev revisions) both parents are meaningful.
852 For merges (two non-nullrev revisions) both parents are meaningful.
859 Otherwise the first parent revision is considered meaningful if it
853 Otherwise the first parent revision is considered meaningful if it
860 is not the preceding revision.
854 is not the preceding revision.
861 """
855 """
862 parents = ctx.parents()
856 parents = ctx.parents()
863 if len(parents) > 1:
857 if len(parents) > 1:
864 return parents
858 return parents
865 if repo.ui.debugflag:
859 if repo.ui.debugflag:
866 return [parents[0], repo[nullrev]]
860 return [parents[0], repo[nullrev]]
867 if parents[0].rev() >= intrev(ctx) - 1:
861 if parents[0].rev() >= intrev(ctx) - 1:
868 return []
862 return []
869 return parents
863 return parents
870
864
871
865
872 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
866 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
873 """Return a function that produced paths for presenting to the user.
867 """Return a function that produced paths for presenting to the user.
874
868
875 The returned function takes a repo-relative path and produces a path
869 The returned function takes a repo-relative path and produces a path
876 that can be presented in the UI.
870 that can be presented in the UI.
877
871
878 Depending on the value of ui.relative-paths, either a repo-relative or
872 Depending on the value of ui.relative-paths, either a repo-relative or
879 cwd-relative path will be produced.
873 cwd-relative path will be produced.
880
874
881 legacyrelativevalue is the value to use if ui.relative-paths=legacy
875 legacyrelativevalue is the value to use if ui.relative-paths=legacy
882
876
883 If forcerelativevalue is not None, then that value will be used regardless
877 If forcerelativevalue is not None, then that value will be used regardless
884 of what ui.relative-paths is set to.
878 of what ui.relative-paths is set to.
885 """
879 """
886 if forcerelativevalue is not None:
880 if forcerelativevalue is not None:
887 relative = forcerelativevalue
881 relative = forcerelativevalue
888 else:
882 else:
889 config = repo.ui.config(b'ui', b'relative-paths')
883 config = repo.ui.config(b'ui', b'relative-paths')
890 if config == b'legacy':
884 if config == b'legacy':
891 relative = legacyrelativevalue
885 relative = legacyrelativevalue
892 else:
886 else:
893 relative = stringutil.parsebool(config)
887 relative = stringutil.parsebool(config)
894 if relative is None:
888 if relative is None:
895 raise error.ConfigError(
889 raise error.ConfigError(
896 _(b"ui.relative-paths is not a boolean ('%s')") % config
890 _(b"ui.relative-paths is not a boolean ('%s')") % config
897 )
891 )
898
892
899 if relative:
893 if relative:
900 cwd = repo.getcwd()
894 cwd = repo.getcwd()
901 if cwd != b'':
895 if cwd != b'':
902 # this branch would work even if cwd == b'' (ie cwd = repo
896 # this branch would work even if cwd == b'' (ie cwd = repo
903 # root), but its generality makes the returned function slower
897 # root), but its generality makes the returned function slower
904 pathto = repo.pathto
898 pathto = repo.pathto
905 return lambda f: pathto(f, cwd)
899 return lambda f: pathto(f, cwd)
906 if repo.ui.configbool(b'ui', b'slash'):
900 if repo.ui.configbool(b'ui', b'slash'):
907 return lambda f: f
901 return lambda f: f
908 else:
902 else:
909 return util.localpath
903 return util.localpath
910
904
911
905
912 def subdiruipathfn(subpath, uipathfn):
906 def subdiruipathfn(subpath, uipathfn):
913 '''Create a new uipathfn that treats the file as relative to subpath.'''
907 '''Create a new uipathfn that treats the file as relative to subpath.'''
914 return lambda f: uipathfn(posixpath.join(subpath, f))
908 return lambda f: uipathfn(posixpath.join(subpath, f))
915
909
916
910
917 def anypats(pats, opts):
911 def anypats(pats, opts):
918 '''Checks if any patterns, including --include and --exclude were given.
912 '''Checks if any patterns, including --include and --exclude were given.
919
913
920 Some commands (e.g. addremove) use this condition for deciding whether to
914 Some commands (e.g. addremove) use this condition for deciding whether to
921 print absolute or relative paths.
915 print absolute or relative paths.
922 '''
916 '''
923 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
917 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
924
918
925
919
926 def expandpats(pats):
920 def expandpats(pats):
927 '''Expand bare globs when running on windows.
921 '''Expand bare globs when running on windows.
928 On posix we assume it already has already been done by sh.'''
922 On posix we assume it already has already been done by sh.'''
929 if not util.expandglobs:
923 if not util.expandglobs:
930 return list(pats)
924 return list(pats)
931 ret = []
925 ret = []
932 for kindpat in pats:
926 for kindpat in pats:
933 kind, pat = matchmod._patsplit(kindpat, None)
927 kind, pat = matchmod._patsplit(kindpat, None)
934 if kind is None:
928 if kind is None:
935 try:
929 try:
936 globbed = glob.glob(pat)
930 globbed = glob.glob(pat)
937 except re.error:
931 except re.error:
938 globbed = [pat]
932 globbed = [pat]
939 if globbed:
933 if globbed:
940 ret.extend(globbed)
934 ret.extend(globbed)
941 continue
935 continue
942 ret.append(kindpat)
936 ret.append(kindpat)
943 return ret
937 return ret
944
938
945
939
946 def matchandpats(
940 def matchandpats(
947 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
941 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
948 ):
942 ):
949 '''Return a matcher and the patterns that were used.
943 '''Return a matcher and the patterns that were used.
950 The matcher will warn about bad matches, unless an alternate badfn callback
944 The matcher will warn about bad matches, unless an alternate badfn callback
951 is provided.'''
945 is provided.'''
952 if opts is None:
946 if opts is None:
953 opts = {}
947 opts = {}
954 if not globbed and default == b'relpath':
948 if not globbed and default == b'relpath':
955 pats = expandpats(pats or [])
949 pats = expandpats(pats or [])
956
950
957 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
951 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
958
952
959 def bad(f, msg):
953 def bad(f, msg):
960 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
954 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
961
955
962 if badfn is None:
956 if badfn is None:
963 badfn = bad
957 badfn = bad
964
958
965 m = ctx.match(
959 m = ctx.match(
966 pats,
960 pats,
967 opts.get(b'include'),
961 opts.get(b'include'),
968 opts.get(b'exclude'),
962 opts.get(b'exclude'),
969 default,
963 default,
970 listsubrepos=opts.get(b'subrepos'),
964 listsubrepos=opts.get(b'subrepos'),
971 badfn=badfn,
965 badfn=badfn,
972 )
966 )
973
967
974 if m.always():
968 if m.always():
975 pats = []
969 pats = []
976 return m, pats
970 return m, pats
977
971
978
972
979 def match(
973 def match(
980 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
974 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
981 ):
975 ):
982 '''Return a matcher that will warn about bad matches.'''
976 '''Return a matcher that will warn about bad matches.'''
983 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
977 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
984
978
985
979
986 def matchall(repo):
980 def matchall(repo):
987 '''Return a matcher that will efficiently match everything.'''
981 '''Return a matcher that will efficiently match everything.'''
988 return matchmod.always()
982 return matchmod.always()
989
983
990
984
991 def matchfiles(repo, files, badfn=None):
985 def matchfiles(repo, files, badfn=None):
992 '''Return a matcher that will efficiently match exactly these files.'''
986 '''Return a matcher that will efficiently match exactly these files.'''
993 return matchmod.exact(files, badfn=badfn)
987 return matchmod.exact(files, badfn=badfn)
994
988
995
989
996 def parsefollowlinespattern(repo, rev, pat, msg):
990 def parsefollowlinespattern(repo, rev, pat, msg):
997 """Return a file name from `pat` pattern suitable for usage in followlines
991 """Return a file name from `pat` pattern suitable for usage in followlines
998 logic.
992 logic.
999 """
993 """
1000 if not matchmod.patkind(pat):
994 if not matchmod.patkind(pat):
1001 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
995 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
1002 else:
996 else:
1003 ctx = repo[rev]
997 ctx = repo[rev]
1004 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
998 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
1005 files = [f for f in ctx if m(f)]
999 files = [f for f in ctx if m(f)]
1006 if len(files) != 1:
1000 if len(files) != 1:
1007 raise error.ParseError(msg)
1001 raise error.ParseError(msg)
1008 return files[0]
1002 return files[0]
1009
1003
1010
1004
1011 def getorigvfs(ui, repo):
1005 def getorigvfs(ui, repo):
1012 """return a vfs suitable to save 'orig' file
1006 """return a vfs suitable to save 'orig' file
1013
1007
1014 return None if no special directory is configured"""
1008 return None if no special directory is configured"""
1015 origbackuppath = ui.config(b'ui', b'origbackuppath')
1009 origbackuppath = ui.config(b'ui', b'origbackuppath')
1016 if not origbackuppath:
1010 if not origbackuppath:
1017 return None
1011 return None
1018 return vfs.vfs(repo.wvfs.join(origbackuppath))
1012 return vfs.vfs(repo.wvfs.join(origbackuppath))
1019
1013
1020
1014
1021 def backuppath(ui, repo, filepath):
1015 def backuppath(ui, repo, filepath):
1022 '''customize where working copy backup files (.orig files) are created
1016 '''customize where working copy backup files (.orig files) are created
1023
1017
1024 Fetch user defined path from config file: [ui] origbackuppath = <path>
1018 Fetch user defined path from config file: [ui] origbackuppath = <path>
1025 Fall back to default (filepath with .orig suffix) if not specified
1019 Fall back to default (filepath with .orig suffix) if not specified
1026
1020
1027 filepath is repo-relative
1021 filepath is repo-relative
1028
1022
1029 Returns an absolute path
1023 Returns an absolute path
1030 '''
1024 '''
1031 origvfs = getorigvfs(ui, repo)
1025 origvfs = getorigvfs(ui, repo)
1032 if origvfs is None:
1026 if origvfs is None:
1033 return repo.wjoin(filepath + b".orig")
1027 return repo.wjoin(filepath + b".orig")
1034
1028
1035 origbackupdir = origvfs.dirname(filepath)
1029 origbackupdir = origvfs.dirname(filepath)
1036 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
1030 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
1037 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
1031 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
1038
1032
1039 # Remove any files that conflict with the backup file's path
1033 # Remove any files that conflict with the backup file's path
1040 for f in reversed(list(pathutil.finddirs(filepath))):
1034 for f in reversed(list(pathutil.finddirs(filepath))):
1041 if origvfs.isfileorlink(f):
1035 if origvfs.isfileorlink(f):
1042 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
1036 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
1043 origvfs.unlink(f)
1037 origvfs.unlink(f)
1044 break
1038 break
1045
1039
1046 origvfs.makedirs(origbackupdir)
1040 origvfs.makedirs(origbackupdir)
1047
1041
1048 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
1042 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
1049 ui.note(
1043 ui.note(
1050 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
1044 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
1051 )
1045 )
1052 origvfs.rmtree(filepath, forcibly=True)
1046 origvfs.rmtree(filepath, forcibly=True)
1053
1047
1054 return origvfs.join(filepath)
1048 return origvfs.join(filepath)
1055
1049
1056
1050
1057 class _containsnode(object):
1051 class _containsnode(object):
1058 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1052 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1059
1053
1060 def __init__(self, repo, revcontainer):
1054 def __init__(self, repo, revcontainer):
1061 self._torev = repo.changelog.rev
1055 self._torev = repo.changelog.rev
1062 self._revcontains = revcontainer.__contains__
1056 self._revcontains = revcontainer.__contains__
1063
1057
1064 def __contains__(self, node):
1058 def __contains__(self, node):
1065 return self._revcontains(self._torev(node))
1059 return self._revcontains(self._torev(node))
1066
1060
1067
1061
1068 def cleanupnodes(
1062 def cleanupnodes(
1069 repo,
1063 repo,
1070 replacements,
1064 replacements,
1071 operation,
1065 operation,
1072 moves=None,
1066 moves=None,
1073 metadata=None,
1067 metadata=None,
1074 fixphase=False,
1068 fixphase=False,
1075 targetphase=None,
1069 targetphase=None,
1076 backup=True,
1070 backup=True,
1077 ):
1071 ):
1078 """do common cleanups when old nodes are replaced by new nodes
1072 """do common cleanups when old nodes are replaced by new nodes
1079
1073
1080 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1074 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1081 (we might also want to move working directory parent in the future)
1075 (we might also want to move working directory parent in the future)
1082
1076
1083 By default, bookmark moves are calculated automatically from 'replacements',
1077 By default, bookmark moves are calculated automatically from 'replacements',
1084 but 'moves' can be used to override that. Also, 'moves' may include
1078 but 'moves' can be used to override that. Also, 'moves' may include
1085 additional bookmark moves that should not have associated obsmarkers.
1079 additional bookmark moves that should not have associated obsmarkers.
1086
1080
1087 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1081 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1088 have replacements. operation is a string, like "rebase".
1082 have replacements. operation is a string, like "rebase".
1089
1083
1090 metadata is dictionary containing metadata to be stored in obsmarker if
1084 metadata is dictionary containing metadata to be stored in obsmarker if
1091 obsolescence is enabled.
1085 obsolescence is enabled.
1092 """
1086 """
1093 assert fixphase or targetphase is None
1087 assert fixphase or targetphase is None
1094 if not replacements and not moves:
1088 if not replacements and not moves:
1095 return
1089 return
1096
1090
1097 # translate mapping's other forms
1091 # translate mapping's other forms
1098 if not util.safehasattr(replacements, b'items'):
1092 if not util.safehasattr(replacements, b'items'):
1099 replacements = {(n,): () for n in replacements}
1093 replacements = {(n,): () for n in replacements}
1100 else:
1094 else:
1101 # upgrading non tuple "source" to tuple ones for BC
1095 # upgrading non tuple "source" to tuple ones for BC
1102 repls = {}
1096 repls = {}
1103 for key, value in replacements.items():
1097 for key, value in replacements.items():
1104 if not isinstance(key, tuple):
1098 if not isinstance(key, tuple):
1105 key = (key,)
1099 key = (key,)
1106 repls[key] = value
1100 repls[key] = value
1107 replacements = repls
1101 replacements = repls
1108
1102
1109 # Unfiltered repo is needed since nodes in replacements might be hidden.
1103 # Unfiltered repo is needed since nodes in replacements might be hidden.
1110 unfi = repo.unfiltered()
1104 unfi = repo.unfiltered()
1111
1105
1112 # Calculate bookmark movements
1106 # Calculate bookmark movements
1113 if moves is None:
1107 if moves is None:
1114 moves = {}
1108 moves = {}
1115 for oldnodes, newnodes in replacements.items():
1109 for oldnodes, newnodes in replacements.items():
1116 for oldnode in oldnodes:
1110 for oldnode in oldnodes:
1117 if oldnode in moves:
1111 if oldnode in moves:
1118 continue
1112 continue
1119 if len(newnodes) > 1:
1113 if len(newnodes) > 1:
1120 # usually a split, take the one with biggest rev number
1114 # usually a split, take the one with biggest rev number
1121 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1115 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1122 elif len(newnodes) == 0:
1116 elif len(newnodes) == 0:
1123 # move bookmark backwards
1117 # move bookmark backwards
1124 allreplaced = []
1118 allreplaced = []
1125 for rep in replacements:
1119 for rep in replacements:
1126 allreplaced.extend(rep)
1120 allreplaced.extend(rep)
1127 roots = list(
1121 roots = list(
1128 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1122 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1129 )
1123 )
1130 if roots:
1124 if roots:
1131 newnode = roots[0].node()
1125 newnode = roots[0].node()
1132 else:
1126 else:
1133 newnode = nullid
1127 newnode = nullid
1134 else:
1128 else:
1135 newnode = newnodes[0]
1129 newnode = newnodes[0]
1136 moves[oldnode] = newnode
1130 moves[oldnode] = newnode
1137
1131
1138 allnewnodes = [n for ns in replacements.values() for n in ns]
1132 allnewnodes = [n for ns in replacements.values() for n in ns]
1139 toretract = {}
1133 toretract = {}
1140 toadvance = {}
1134 toadvance = {}
1141 if fixphase:
1135 if fixphase:
1142 precursors = {}
1136 precursors = {}
1143 for oldnodes, newnodes in replacements.items():
1137 for oldnodes, newnodes in replacements.items():
1144 for oldnode in oldnodes:
1138 for oldnode in oldnodes:
1145 for newnode in newnodes:
1139 for newnode in newnodes:
1146 precursors.setdefault(newnode, []).append(oldnode)
1140 precursors.setdefault(newnode, []).append(oldnode)
1147
1141
1148 allnewnodes.sort(key=lambda n: unfi[n].rev())
1142 allnewnodes.sort(key=lambda n: unfi[n].rev())
1149 newphases = {}
1143 newphases = {}
1150
1144
1151 def phase(ctx):
1145 def phase(ctx):
1152 return newphases.get(ctx.node(), ctx.phase())
1146 return newphases.get(ctx.node(), ctx.phase())
1153
1147
1154 for newnode in allnewnodes:
1148 for newnode in allnewnodes:
1155 ctx = unfi[newnode]
1149 ctx = unfi[newnode]
1156 parentphase = max(phase(p) for p in ctx.parents())
1150 parentphase = max(phase(p) for p in ctx.parents())
1157 if targetphase is None:
1151 if targetphase is None:
1158 oldphase = max(
1152 oldphase = max(
1159 unfi[oldnode].phase() for oldnode in precursors[newnode]
1153 unfi[oldnode].phase() for oldnode in precursors[newnode]
1160 )
1154 )
1161 newphase = max(oldphase, parentphase)
1155 newphase = max(oldphase, parentphase)
1162 else:
1156 else:
1163 newphase = max(targetphase, parentphase)
1157 newphase = max(targetphase, parentphase)
1164 newphases[newnode] = newphase
1158 newphases[newnode] = newphase
1165 if newphase > ctx.phase():
1159 if newphase > ctx.phase():
1166 toretract.setdefault(newphase, []).append(newnode)
1160 toretract.setdefault(newphase, []).append(newnode)
1167 elif newphase < ctx.phase():
1161 elif newphase < ctx.phase():
1168 toadvance.setdefault(newphase, []).append(newnode)
1162 toadvance.setdefault(newphase, []).append(newnode)
1169
1163
1170 with repo.transaction(b'cleanup') as tr:
1164 with repo.transaction(b'cleanup') as tr:
1171 # Move bookmarks
1165 # Move bookmarks
1172 bmarks = repo._bookmarks
1166 bmarks = repo._bookmarks
1173 bmarkchanges = []
1167 bmarkchanges = []
1174 for oldnode, newnode in moves.items():
1168 for oldnode, newnode in moves.items():
1175 oldbmarks = repo.nodebookmarks(oldnode)
1169 oldbmarks = repo.nodebookmarks(oldnode)
1176 if not oldbmarks:
1170 if not oldbmarks:
1177 continue
1171 continue
1178 from . import bookmarks # avoid import cycle
1172 from . import bookmarks # avoid import cycle
1179
1173
1180 repo.ui.debug(
1174 repo.ui.debug(
1181 b'moving bookmarks %r from %s to %s\n'
1175 b'moving bookmarks %r from %s to %s\n'
1182 % (
1176 % (
1183 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1177 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1184 hex(oldnode),
1178 hex(oldnode),
1185 hex(newnode),
1179 hex(newnode),
1186 )
1180 )
1187 )
1181 )
1188 # Delete divergent bookmarks being parents of related newnodes
1182 # Delete divergent bookmarks being parents of related newnodes
1189 deleterevs = repo.revs(
1183 deleterevs = repo.revs(
1190 b'parents(roots(%ln & (::%n))) - parents(%n)',
1184 b'parents(roots(%ln & (::%n))) - parents(%n)',
1191 allnewnodes,
1185 allnewnodes,
1192 newnode,
1186 newnode,
1193 oldnode,
1187 oldnode,
1194 )
1188 )
1195 deletenodes = _containsnode(repo, deleterevs)
1189 deletenodes = _containsnode(repo, deleterevs)
1196 for name in oldbmarks:
1190 for name in oldbmarks:
1197 bmarkchanges.append((name, newnode))
1191 bmarkchanges.append((name, newnode))
1198 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1192 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1199 bmarkchanges.append((b, None))
1193 bmarkchanges.append((b, None))
1200
1194
1201 if bmarkchanges:
1195 if bmarkchanges:
1202 bmarks.applychanges(repo, tr, bmarkchanges)
1196 bmarks.applychanges(repo, tr, bmarkchanges)
1203
1197
1204 for phase, nodes in toretract.items():
1198 for phase, nodes in toretract.items():
1205 phases.retractboundary(repo, tr, phase, nodes)
1199 phases.retractboundary(repo, tr, phase, nodes)
1206 for phase, nodes in toadvance.items():
1200 for phase, nodes in toadvance.items():
1207 phases.advanceboundary(repo, tr, phase, nodes)
1201 phases.advanceboundary(repo, tr, phase, nodes)
1208
1202
1209 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1203 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1210 # Obsolete or strip nodes
1204 # Obsolete or strip nodes
1211 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1205 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1212 # If a node is already obsoleted, and we want to obsolete it
1206 # If a node is already obsoleted, and we want to obsolete it
1213 # without a successor, skip that obssolete request since it's
1207 # without a successor, skip that obssolete request since it's
1214 # unnecessary. That's the "if s or not isobs(n)" check below.
1208 # unnecessary. That's the "if s or not isobs(n)" check below.
1215 # Also sort the node in topology order, that might be useful for
1209 # Also sort the node in topology order, that might be useful for
1216 # some obsstore logic.
1210 # some obsstore logic.
1217 # NOTE: the sorting might belong to createmarkers.
1211 # NOTE: the sorting might belong to createmarkers.
1218 torev = unfi.changelog.rev
1212 torev = unfi.changelog.rev
1219 sortfunc = lambda ns: torev(ns[0][0])
1213 sortfunc = lambda ns: torev(ns[0][0])
1220 rels = []
1214 rels = []
1221 for ns, s in sorted(replacements.items(), key=sortfunc):
1215 for ns, s in sorted(replacements.items(), key=sortfunc):
1222 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1216 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1223 rels.append(rel)
1217 rels.append(rel)
1224 if rels:
1218 if rels:
1225 obsolete.createmarkers(
1219 obsolete.createmarkers(
1226 repo, rels, operation=operation, metadata=metadata
1220 repo, rels, operation=operation, metadata=metadata
1227 )
1221 )
1228 elif phases.supportinternal(repo) and mayusearchived:
1222 elif phases.supportinternal(repo) and mayusearchived:
1229 # this assume we do not have "unstable" nodes above the cleaned ones
1223 # this assume we do not have "unstable" nodes above the cleaned ones
1230 allreplaced = set()
1224 allreplaced = set()
1231 for ns in replacements.keys():
1225 for ns in replacements.keys():
1232 allreplaced.update(ns)
1226 allreplaced.update(ns)
1233 if backup:
1227 if backup:
1234 from . import repair # avoid import cycle
1228 from . import repair # avoid import cycle
1235
1229
1236 node = min(allreplaced, key=repo.changelog.rev)
1230 node = min(allreplaced, key=repo.changelog.rev)
1237 repair.backupbundle(
1231 repair.backupbundle(
1238 repo, allreplaced, allreplaced, node, operation
1232 repo, allreplaced, allreplaced, node, operation
1239 )
1233 )
1240 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1234 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1241 else:
1235 else:
1242 from . import repair # avoid import cycle
1236 from . import repair # avoid import cycle
1243
1237
1244 tostrip = list(n for ns in replacements for n in ns)
1238 tostrip = list(n for ns in replacements for n in ns)
1245 if tostrip:
1239 if tostrip:
1246 repair.delayedstrip(
1240 repair.delayedstrip(
1247 repo.ui, repo, tostrip, operation, backup=backup
1241 repo.ui, repo, tostrip, operation, backup=backup
1248 )
1242 )
1249
1243
1250
1244
1251 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1245 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1252 if opts is None:
1246 if opts is None:
1253 opts = {}
1247 opts = {}
1254 m = matcher
1248 m = matcher
1255 dry_run = opts.get(b'dry_run')
1249 dry_run = opts.get(b'dry_run')
1256 try:
1250 try:
1257 similarity = float(opts.get(b'similarity') or 0)
1251 similarity = float(opts.get(b'similarity') or 0)
1258 except ValueError:
1252 except ValueError:
1259 raise error.Abort(_(b'similarity must be a number'))
1253 raise error.Abort(_(b'similarity must be a number'))
1260 if similarity < 0 or similarity > 100:
1254 if similarity < 0 or similarity > 100:
1261 raise error.Abort(_(b'similarity must be between 0 and 100'))
1255 raise error.Abort(_(b'similarity must be between 0 and 100'))
1262 similarity /= 100.0
1256 similarity /= 100.0
1263
1257
1264 ret = 0
1258 ret = 0
1265
1259
1266 wctx = repo[None]
1260 wctx = repo[None]
1267 for subpath in sorted(wctx.substate):
1261 for subpath in sorted(wctx.substate):
1268 submatch = matchmod.subdirmatcher(subpath, m)
1262 submatch = matchmod.subdirmatcher(subpath, m)
1269 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1263 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1270 sub = wctx.sub(subpath)
1264 sub = wctx.sub(subpath)
1271 subprefix = repo.wvfs.reljoin(prefix, subpath)
1265 subprefix = repo.wvfs.reljoin(prefix, subpath)
1272 subuipathfn = subdiruipathfn(subpath, uipathfn)
1266 subuipathfn = subdiruipathfn(subpath, uipathfn)
1273 try:
1267 try:
1274 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1268 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1275 ret = 1
1269 ret = 1
1276 except error.LookupError:
1270 except error.LookupError:
1277 repo.ui.status(
1271 repo.ui.status(
1278 _(b"skipping missing subrepository: %s\n")
1272 _(b"skipping missing subrepository: %s\n")
1279 % uipathfn(subpath)
1273 % uipathfn(subpath)
1280 )
1274 )
1281
1275
1282 rejected = []
1276 rejected = []
1283
1277
1284 def badfn(f, msg):
1278 def badfn(f, msg):
1285 if f in m.files():
1279 if f in m.files():
1286 m.bad(f, msg)
1280 m.bad(f, msg)
1287 rejected.append(f)
1281 rejected.append(f)
1288
1282
1289 badmatch = matchmod.badmatch(m, badfn)
1283 badmatch = matchmod.badmatch(m, badfn)
1290 added, unknown, deleted, removed, forgotten = _interestingfiles(
1284 added, unknown, deleted, removed, forgotten = _interestingfiles(
1291 repo, badmatch
1285 repo, badmatch
1292 )
1286 )
1293
1287
1294 unknownset = set(unknown + forgotten)
1288 unknownset = set(unknown + forgotten)
1295 toprint = unknownset.copy()
1289 toprint = unknownset.copy()
1296 toprint.update(deleted)
1290 toprint.update(deleted)
1297 for abs in sorted(toprint):
1291 for abs in sorted(toprint):
1298 if repo.ui.verbose or not m.exact(abs):
1292 if repo.ui.verbose or not m.exact(abs):
1299 if abs in unknownset:
1293 if abs in unknownset:
1300 status = _(b'adding %s\n') % uipathfn(abs)
1294 status = _(b'adding %s\n') % uipathfn(abs)
1301 label = b'ui.addremove.added'
1295 label = b'ui.addremove.added'
1302 else:
1296 else:
1303 status = _(b'removing %s\n') % uipathfn(abs)
1297 status = _(b'removing %s\n') % uipathfn(abs)
1304 label = b'ui.addremove.removed'
1298 label = b'ui.addremove.removed'
1305 repo.ui.status(status, label=label)
1299 repo.ui.status(status, label=label)
1306
1300
1307 renames = _findrenames(
1301 renames = _findrenames(
1308 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1302 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1309 )
1303 )
1310
1304
1311 if not dry_run:
1305 if not dry_run:
1312 _markchanges(repo, unknown + forgotten, deleted, renames)
1306 _markchanges(repo, unknown + forgotten, deleted, renames)
1313
1307
1314 for f in rejected:
1308 for f in rejected:
1315 if f in m.files():
1309 if f in m.files():
1316 return 1
1310 return 1
1317 return ret
1311 return ret
1318
1312
1319
1313
1320 def marktouched(repo, files, similarity=0.0):
1314 def marktouched(repo, files, similarity=0.0):
1321 '''Assert that files have somehow been operated upon. files are relative to
1315 '''Assert that files have somehow been operated upon. files are relative to
1322 the repo root.'''
1316 the repo root.'''
1323 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1317 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1324 rejected = []
1318 rejected = []
1325
1319
1326 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1320 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1327
1321
1328 if repo.ui.verbose:
1322 if repo.ui.verbose:
1329 unknownset = set(unknown + forgotten)
1323 unknownset = set(unknown + forgotten)
1330 toprint = unknownset.copy()
1324 toprint = unknownset.copy()
1331 toprint.update(deleted)
1325 toprint.update(deleted)
1332 for abs in sorted(toprint):
1326 for abs in sorted(toprint):
1333 if abs in unknownset:
1327 if abs in unknownset:
1334 status = _(b'adding %s\n') % abs
1328 status = _(b'adding %s\n') % abs
1335 else:
1329 else:
1336 status = _(b'removing %s\n') % abs
1330 status = _(b'removing %s\n') % abs
1337 repo.ui.status(status)
1331 repo.ui.status(status)
1338
1332
1339 # TODO: We should probably have the caller pass in uipathfn and apply it to
1333 # TODO: We should probably have the caller pass in uipathfn and apply it to
1340 # the messages above too. legacyrelativevalue=True is consistent with how
1334 # the messages above too. legacyrelativevalue=True is consistent with how
1341 # it used to work.
1335 # it used to work.
1342 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1336 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1343 renames = _findrenames(
1337 renames = _findrenames(
1344 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1338 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1345 )
1339 )
1346
1340
1347 _markchanges(repo, unknown + forgotten, deleted, renames)
1341 _markchanges(repo, unknown + forgotten, deleted, renames)
1348
1342
1349 for f in rejected:
1343 for f in rejected:
1350 if f in m.files():
1344 if f in m.files():
1351 return 1
1345 return 1
1352 return 0
1346 return 0
1353
1347
1354
1348
1355 def _interestingfiles(repo, matcher):
1349 def _interestingfiles(repo, matcher):
1356 '''Walk dirstate with matcher, looking for files that addremove would care
1350 '''Walk dirstate with matcher, looking for files that addremove would care
1357 about.
1351 about.
1358
1352
1359 This is different from dirstate.status because it doesn't care about
1353 This is different from dirstate.status because it doesn't care about
1360 whether files are modified or clean.'''
1354 whether files are modified or clean.'''
1361 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1355 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1362 audit_path = pathutil.pathauditor(repo.root, cached=True)
1356 audit_path = pathutil.pathauditor(repo.root, cached=True)
1363
1357
1364 ctx = repo[None]
1358 ctx = repo[None]
1365 dirstate = repo.dirstate
1359 dirstate = repo.dirstate
1366 matcher = repo.narrowmatch(matcher, includeexact=True)
1360 matcher = repo.narrowmatch(matcher, includeexact=True)
1367 walkresults = dirstate.walk(
1361 walkresults = dirstate.walk(
1368 matcher,
1362 matcher,
1369 subrepos=sorted(ctx.substate),
1363 subrepos=sorted(ctx.substate),
1370 unknown=True,
1364 unknown=True,
1371 ignored=False,
1365 ignored=False,
1372 full=False,
1366 full=False,
1373 )
1367 )
1374 for abs, st in pycompat.iteritems(walkresults):
1368 for abs, st in pycompat.iteritems(walkresults):
1375 dstate = dirstate[abs]
1369 dstate = dirstate[abs]
1376 if dstate == b'?' and audit_path.check(abs):
1370 if dstate == b'?' and audit_path.check(abs):
1377 unknown.append(abs)
1371 unknown.append(abs)
1378 elif dstate != b'r' and not st:
1372 elif dstate != b'r' and not st:
1379 deleted.append(abs)
1373 deleted.append(abs)
1380 elif dstate == b'r' and st:
1374 elif dstate == b'r' and st:
1381 forgotten.append(abs)
1375 forgotten.append(abs)
1382 # for finding renames
1376 # for finding renames
1383 elif dstate == b'r' and not st:
1377 elif dstate == b'r' and not st:
1384 removed.append(abs)
1378 removed.append(abs)
1385 elif dstate == b'a':
1379 elif dstate == b'a':
1386 added.append(abs)
1380 added.append(abs)
1387
1381
1388 return added, unknown, deleted, removed, forgotten
1382 return added, unknown, deleted, removed, forgotten
1389
1383
1390
1384
1391 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1385 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1392 '''Find renames from removed files to added ones.'''
1386 '''Find renames from removed files to added ones.'''
1393 renames = {}
1387 renames = {}
1394 if similarity > 0:
1388 if similarity > 0:
1395 for old, new, score in similar.findrenames(
1389 for old, new, score in similar.findrenames(
1396 repo, added, removed, similarity
1390 repo, added, removed, similarity
1397 ):
1391 ):
1398 if (
1392 if (
1399 repo.ui.verbose
1393 repo.ui.verbose
1400 or not matcher.exact(old)
1394 or not matcher.exact(old)
1401 or not matcher.exact(new)
1395 or not matcher.exact(new)
1402 ):
1396 ):
1403 repo.ui.status(
1397 repo.ui.status(
1404 _(
1398 _(
1405 b'recording removal of %s as rename to %s '
1399 b'recording removal of %s as rename to %s '
1406 b'(%d%% similar)\n'
1400 b'(%d%% similar)\n'
1407 )
1401 )
1408 % (uipathfn(old), uipathfn(new), score * 100)
1402 % (uipathfn(old), uipathfn(new), score * 100)
1409 )
1403 )
1410 renames[new] = old
1404 renames[new] = old
1411 return renames
1405 return renames
1412
1406
1413
1407
1414 def _markchanges(repo, unknown, deleted, renames):
1408 def _markchanges(repo, unknown, deleted, renames):
1415 '''Marks the files in unknown as added, the files in deleted as removed,
1409 '''Marks the files in unknown as added, the files in deleted as removed,
1416 and the files in renames as copied.'''
1410 and the files in renames as copied.'''
1417 wctx = repo[None]
1411 wctx = repo[None]
1418 with repo.wlock():
1412 with repo.wlock():
1419 wctx.forget(deleted)
1413 wctx.forget(deleted)
1420 wctx.add(unknown)
1414 wctx.add(unknown)
1421 for new, old in pycompat.iteritems(renames):
1415 for new, old in pycompat.iteritems(renames):
1422 wctx.copy(old, new)
1416 wctx.copy(old, new)
1423
1417
1424
1418
1425 def getrenamedfn(repo, endrev=None):
1419 def getrenamedfn(repo, endrev=None):
1426 if copiesmod.usechangesetcentricalgo(repo):
1420 if copiesmod.usechangesetcentricalgo(repo):
1427
1421
1428 def getrenamed(fn, rev):
1422 def getrenamed(fn, rev):
1429 ctx = repo[rev]
1423 ctx = repo[rev]
1430 p1copies = ctx.p1copies()
1424 p1copies = ctx.p1copies()
1431 if fn in p1copies:
1425 if fn in p1copies:
1432 return p1copies[fn]
1426 return p1copies[fn]
1433 p2copies = ctx.p2copies()
1427 p2copies = ctx.p2copies()
1434 if fn in p2copies:
1428 if fn in p2copies:
1435 return p2copies[fn]
1429 return p2copies[fn]
1436 return None
1430 return None
1437
1431
1438 return getrenamed
1432 return getrenamed
1439
1433
1440 rcache = {}
1434 rcache = {}
1441 if endrev is None:
1435 if endrev is None:
1442 endrev = len(repo)
1436 endrev = len(repo)
1443
1437
1444 def getrenamed(fn, rev):
1438 def getrenamed(fn, rev):
1445 '''looks up all renames for a file (up to endrev) the first
1439 '''looks up all renames for a file (up to endrev) the first
1446 time the file is given. It indexes on the changerev and only
1440 time the file is given. It indexes on the changerev and only
1447 parses the manifest if linkrev != changerev.
1441 parses the manifest if linkrev != changerev.
1448 Returns rename info for fn at changerev rev.'''
1442 Returns rename info for fn at changerev rev.'''
1449 if fn not in rcache:
1443 if fn not in rcache:
1450 rcache[fn] = {}
1444 rcache[fn] = {}
1451 fl = repo.file(fn)
1445 fl = repo.file(fn)
1452 for i in fl:
1446 for i in fl:
1453 lr = fl.linkrev(i)
1447 lr = fl.linkrev(i)
1454 renamed = fl.renamed(fl.node(i))
1448 renamed = fl.renamed(fl.node(i))
1455 rcache[fn][lr] = renamed and renamed[0]
1449 rcache[fn][lr] = renamed and renamed[0]
1456 if lr >= endrev:
1450 if lr >= endrev:
1457 break
1451 break
1458 if rev in rcache[fn]:
1452 if rev in rcache[fn]:
1459 return rcache[fn][rev]
1453 return rcache[fn][rev]
1460
1454
1461 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1455 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1462 # filectx logic.
1456 # filectx logic.
1463 try:
1457 try:
1464 return repo[rev][fn].copysource()
1458 return repo[rev][fn].copysource()
1465 except error.LookupError:
1459 except error.LookupError:
1466 return None
1460 return None
1467
1461
1468 return getrenamed
1462 return getrenamed
1469
1463
1470
1464
1471 def getcopiesfn(repo, endrev=None):
1465 def getcopiesfn(repo, endrev=None):
1472 if copiesmod.usechangesetcentricalgo(repo):
1466 if copiesmod.usechangesetcentricalgo(repo):
1473
1467
1474 def copiesfn(ctx):
1468 def copiesfn(ctx):
1475 if ctx.p2copies():
1469 if ctx.p2copies():
1476 allcopies = ctx.p1copies().copy()
1470 allcopies = ctx.p1copies().copy()
1477 # There should be no overlap
1471 # There should be no overlap
1478 allcopies.update(ctx.p2copies())
1472 allcopies.update(ctx.p2copies())
1479 return sorted(allcopies.items())
1473 return sorted(allcopies.items())
1480 else:
1474 else:
1481 return sorted(ctx.p1copies().items())
1475 return sorted(ctx.p1copies().items())
1482
1476
1483 else:
1477 else:
1484 getrenamed = getrenamedfn(repo, endrev)
1478 getrenamed = getrenamedfn(repo, endrev)
1485
1479
1486 def copiesfn(ctx):
1480 def copiesfn(ctx):
1487 copies = []
1481 copies = []
1488 for fn in ctx.files():
1482 for fn in ctx.files():
1489 rename = getrenamed(fn, ctx.rev())
1483 rename = getrenamed(fn, ctx.rev())
1490 if rename:
1484 if rename:
1491 copies.append((fn, rename))
1485 copies.append((fn, rename))
1492 return copies
1486 return copies
1493
1487
1494 return copiesfn
1488 return copiesfn
1495
1489
1496
1490
1497 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1491 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1498 """Update the dirstate to reflect the intent of copying src to dst. For
1492 """Update the dirstate to reflect the intent of copying src to dst. For
1499 different reasons it might not end with dst being marked as copied from src.
1493 different reasons it might not end with dst being marked as copied from src.
1500 """
1494 """
1501 origsrc = repo.dirstate.copied(src) or src
1495 origsrc = repo.dirstate.copied(src) or src
1502 if dst == origsrc: # copying back a copy?
1496 if dst == origsrc: # copying back a copy?
1503 if repo.dirstate[dst] not in b'mn' and not dryrun:
1497 if repo.dirstate[dst] not in b'mn' and not dryrun:
1504 repo.dirstate.normallookup(dst)
1498 repo.dirstate.normallookup(dst)
1505 else:
1499 else:
1506 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1500 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1507 if not ui.quiet:
1501 if not ui.quiet:
1508 ui.warn(
1502 ui.warn(
1509 _(
1503 _(
1510 b"%s has not been committed yet, so no copy "
1504 b"%s has not been committed yet, so no copy "
1511 b"data will be stored for %s.\n"
1505 b"data will be stored for %s.\n"
1512 )
1506 )
1513 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1507 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1514 )
1508 )
1515 if repo.dirstate[dst] in b'?r' and not dryrun:
1509 if repo.dirstate[dst] in b'?r' and not dryrun:
1516 wctx.add([dst])
1510 wctx.add([dst])
1517 elif not dryrun:
1511 elif not dryrun:
1518 wctx.copy(origsrc, dst)
1512 wctx.copy(origsrc, dst)
1519
1513
1520
1514
1521 def movedirstate(repo, newctx, match=None):
1515 def movedirstate(repo, newctx, match=None):
1522 """Move the dirstate to newctx and adjust it as necessary.
1516 """Move the dirstate to newctx and adjust it as necessary.
1523
1517
1524 A matcher can be provided as an optimization. It is probably a bug to pass
1518 A matcher can be provided as an optimization. It is probably a bug to pass
1525 a matcher that doesn't match all the differences between the parent of the
1519 a matcher that doesn't match all the differences between the parent of the
1526 working copy and newctx.
1520 working copy and newctx.
1527 """
1521 """
1528 oldctx = repo[b'.']
1522 oldctx = repo[b'.']
1529 ds = repo.dirstate
1523 ds = repo.dirstate
1530 copies = dict(ds.copies())
1524 copies = dict(ds.copies())
1531 ds.setparents(newctx.node(), nullid)
1525 ds.setparents(newctx.node(), nullid)
1532 s = newctx.status(oldctx, match=match)
1526 s = newctx.status(oldctx, match=match)
1533 for f in s.modified:
1527 for f in s.modified:
1534 if ds[f] == b'r':
1528 if ds[f] == b'r':
1535 # modified + removed -> removed
1529 # modified + removed -> removed
1536 continue
1530 continue
1537 ds.normallookup(f)
1531 ds.normallookup(f)
1538
1532
1539 for f in s.added:
1533 for f in s.added:
1540 if ds[f] == b'r':
1534 if ds[f] == b'r':
1541 # added + removed -> unknown
1535 # added + removed -> unknown
1542 ds.drop(f)
1536 ds.drop(f)
1543 elif ds[f] != b'a':
1537 elif ds[f] != b'a':
1544 ds.add(f)
1538 ds.add(f)
1545
1539
1546 for f in s.removed:
1540 for f in s.removed:
1547 if ds[f] == b'a':
1541 if ds[f] == b'a':
1548 # removed + added -> normal
1542 # removed + added -> normal
1549 ds.normallookup(f)
1543 ds.normallookup(f)
1550 elif ds[f] != b'r':
1544 elif ds[f] != b'r':
1551 ds.remove(f)
1545 ds.remove(f)
1552
1546
1553 # Merge old parent and old working dir copies
1547 # Merge old parent and old working dir copies
1554 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1548 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1555 oldcopies.update(copies)
1549 oldcopies.update(copies)
1556 copies = {
1550 copies = {
1557 dst: oldcopies.get(src, src)
1551 dst: oldcopies.get(src, src)
1558 for dst, src in pycompat.iteritems(oldcopies)
1552 for dst, src in pycompat.iteritems(oldcopies)
1559 }
1553 }
1560 # Adjust the dirstate copies
1554 # Adjust the dirstate copies
1561 for dst, src in pycompat.iteritems(copies):
1555 for dst, src in pycompat.iteritems(copies):
1562 if src not in newctx or dst in newctx or ds[dst] != b'a':
1556 if src not in newctx or dst in newctx or ds[dst] != b'a':
1563 src = None
1557 src = None
1564 ds.copy(src, dst)
1558 ds.copy(src, dst)
1565 repo._quick_access_changeid_invalidate()
1559 repo._quick_access_changeid_invalidate()
1566
1560
1567
1561
1568 def filterrequirements(requirements):
1562 def filterrequirements(requirements):
1569 """ filters the requirements into two sets:
1563 """ filters the requirements into two sets:
1570
1564
1571 wcreq: requirements which should be written in .hg/requires
1565 wcreq: requirements which should be written in .hg/requires
1572 storereq: which should be written in .hg/store/requires
1566 storereq: which should be written in .hg/store/requires
1573
1567
1574 Returns (wcreq, storereq)
1568 Returns (wcreq, storereq)
1575 """
1569 """
1576 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
1570 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
1577 wc, store = set(), set()
1571 wc, store = set(), set()
1578 for r in requirements:
1572 for r in requirements:
1579 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
1573 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
1580 wc.add(r)
1574 wc.add(r)
1581 else:
1575 else:
1582 store.add(r)
1576 store.add(r)
1583 return wc, store
1577 return wc, store
1584 return requirements, None
1578 return requirements, None
1585
1579
1586
1580
1587 def istreemanifest(repo):
1581 def istreemanifest(repo):
1588 """ returns whether the repository is using treemanifest or not """
1582 """ returns whether the repository is using treemanifest or not """
1589 return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements
1583 return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements
1590
1584
1591
1585
1592 def writereporequirements(repo, requirements=None):
1586 def writereporequirements(repo, requirements=None):
1593 """ writes requirements for the repo to .hg/requires """
1587 """ writes requirements for the repo to .hg/requires """
1594 if requirements:
1588 if requirements:
1595 repo.requirements = requirements
1589 repo.requirements = requirements
1596 wcreq, storereq = filterrequirements(repo.requirements)
1590 wcreq, storereq = filterrequirements(repo.requirements)
1597 if wcreq is not None:
1591 if wcreq is not None:
1598 writerequires(repo.vfs, wcreq)
1592 writerequires(repo.vfs, wcreq)
1599 if storereq is not None:
1593 if storereq is not None:
1600 writerequires(repo.svfs, storereq)
1594 writerequires(repo.svfs, storereq)
1601
1595
1602
1596
1603 def writerequires(opener, requirements):
1597 def writerequires(opener, requirements):
1604 with opener(b'requires', b'w', atomictemp=True) as fp:
1598 with opener(b'requires', b'w', atomictemp=True) as fp:
1605 for r in sorted(requirements):
1599 for r in sorted(requirements):
1606 fp.write(b"%s\n" % r)
1600 fp.write(b"%s\n" % r)
1607
1601
1608
1602
1609 class filecachesubentry(object):
1603 class filecachesubentry(object):
1610 def __init__(self, path, stat):
1604 def __init__(self, path, stat):
1611 self.path = path
1605 self.path = path
1612 self.cachestat = None
1606 self.cachestat = None
1613 self._cacheable = None
1607 self._cacheable = None
1614
1608
1615 if stat:
1609 if stat:
1616 self.cachestat = filecachesubentry.stat(self.path)
1610 self.cachestat = filecachesubentry.stat(self.path)
1617
1611
1618 if self.cachestat:
1612 if self.cachestat:
1619 self._cacheable = self.cachestat.cacheable()
1613 self._cacheable = self.cachestat.cacheable()
1620 else:
1614 else:
1621 # None means we don't know yet
1615 # None means we don't know yet
1622 self._cacheable = None
1616 self._cacheable = None
1623
1617
1624 def refresh(self):
1618 def refresh(self):
1625 if self.cacheable():
1619 if self.cacheable():
1626 self.cachestat = filecachesubentry.stat(self.path)
1620 self.cachestat = filecachesubentry.stat(self.path)
1627
1621
1628 def cacheable(self):
1622 def cacheable(self):
1629 if self._cacheable is not None:
1623 if self._cacheable is not None:
1630 return self._cacheable
1624 return self._cacheable
1631
1625
1632 # we don't know yet, assume it is for now
1626 # we don't know yet, assume it is for now
1633 return True
1627 return True
1634
1628
1635 def changed(self):
1629 def changed(self):
1636 # no point in going further if we can't cache it
1630 # no point in going further if we can't cache it
1637 if not self.cacheable():
1631 if not self.cacheable():
1638 return True
1632 return True
1639
1633
1640 newstat = filecachesubentry.stat(self.path)
1634 newstat = filecachesubentry.stat(self.path)
1641
1635
1642 # we may not know if it's cacheable yet, check again now
1636 # we may not know if it's cacheable yet, check again now
1643 if newstat and self._cacheable is None:
1637 if newstat and self._cacheable is None:
1644 self._cacheable = newstat.cacheable()
1638 self._cacheable = newstat.cacheable()
1645
1639
1646 # check again
1640 # check again
1647 if not self._cacheable:
1641 if not self._cacheable:
1648 return True
1642 return True
1649
1643
1650 if self.cachestat != newstat:
1644 if self.cachestat != newstat:
1651 self.cachestat = newstat
1645 self.cachestat = newstat
1652 return True
1646 return True
1653 else:
1647 else:
1654 return False
1648 return False
1655
1649
1656 @staticmethod
1650 @staticmethod
1657 def stat(path):
1651 def stat(path):
1658 try:
1652 try:
1659 return util.cachestat(path)
1653 return util.cachestat(path)
1660 except OSError as e:
1654 except OSError as e:
1661 if e.errno != errno.ENOENT:
1655 if e.errno != errno.ENOENT:
1662 raise
1656 raise
1663
1657
1664
1658
1665 class filecacheentry(object):
1659 class filecacheentry(object):
1666 def __init__(self, paths, stat=True):
1660 def __init__(self, paths, stat=True):
1667 self._entries = []
1661 self._entries = []
1668 for path in paths:
1662 for path in paths:
1669 self._entries.append(filecachesubentry(path, stat))
1663 self._entries.append(filecachesubentry(path, stat))
1670
1664
1671 def changed(self):
1665 def changed(self):
1672 '''true if any entry has changed'''
1666 '''true if any entry has changed'''
1673 for entry in self._entries:
1667 for entry in self._entries:
1674 if entry.changed():
1668 if entry.changed():
1675 return True
1669 return True
1676 return False
1670 return False
1677
1671
1678 def refresh(self):
1672 def refresh(self):
1679 for entry in self._entries:
1673 for entry in self._entries:
1680 entry.refresh()
1674 entry.refresh()
1681
1675
1682
1676
1683 class filecache(object):
1677 class filecache(object):
1684 """A property like decorator that tracks files under .hg/ for updates.
1678 """A property like decorator that tracks files under .hg/ for updates.
1685
1679
1686 On first access, the files defined as arguments are stat()ed and the
1680 On first access, the files defined as arguments are stat()ed and the
1687 results cached. The decorated function is called. The results are stashed
1681 results cached. The decorated function is called. The results are stashed
1688 away in a ``_filecache`` dict on the object whose method is decorated.
1682 away in a ``_filecache`` dict on the object whose method is decorated.
1689
1683
1690 On subsequent access, the cached result is used as it is set to the
1684 On subsequent access, the cached result is used as it is set to the
1691 instance dictionary.
1685 instance dictionary.
1692
1686
1693 On external property set/delete operations, the caller must update the
1687 On external property set/delete operations, the caller must update the
1694 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1688 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1695 instead of directly setting <attr>.
1689 instead of directly setting <attr>.
1696
1690
1697 When using the property API, the cached data is always used if available.
1691 When using the property API, the cached data is always used if available.
1698 No stat() is performed to check if the file has changed.
1692 No stat() is performed to check if the file has changed.
1699
1693
1700 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1694 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1701 can populate an entry before the property's getter is called. In this case,
1695 can populate an entry before the property's getter is called. In this case,
1702 entries in ``_filecache`` will be used during property operations,
1696 entries in ``_filecache`` will be used during property operations,
1703 if available. If the underlying file changes, it is up to external callers
1697 if available. If the underlying file changes, it is up to external callers
1704 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1698 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1705 method result as well as possibly calling ``del obj._filecache[attr]`` to
1699 method result as well as possibly calling ``del obj._filecache[attr]`` to
1706 remove the ``filecacheentry``.
1700 remove the ``filecacheentry``.
1707 """
1701 """
1708
1702
1709 def __init__(self, *paths):
1703 def __init__(self, *paths):
1710 self.paths = paths
1704 self.paths = paths
1711
1705
1712 def join(self, obj, fname):
1706 def join(self, obj, fname):
1713 """Used to compute the runtime path of a cached file.
1707 """Used to compute the runtime path of a cached file.
1714
1708
1715 Users should subclass filecache and provide their own version of this
1709 Users should subclass filecache and provide their own version of this
1716 function to call the appropriate join function on 'obj' (an instance
1710 function to call the appropriate join function on 'obj' (an instance
1717 of the class that its member function was decorated).
1711 of the class that its member function was decorated).
1718 """
1712 """
1719 raise NotImplementedError
1713 raise NotImplementedError
1720
1714
1721 def __call__(self, func):
1715 def __call__(self, func):
1722 self.func = func
1716 self.func = func
1723 self.sname = func.__name__
1717 self.sname = func.__name__
1724 self.name = pycompat.sysbytes(self.sname)
1718 self.name = pycompat.sysbytes(self.sname)
1725 return self
1719 return self
1726
1720
1727 def __get__(self, obj, type=None):
1721 def __get__(self, obj, type=None):
1728 # if accessed on the class, return the descriptor itself.
1722 # if accessed on the class, return the descriptor itself.
1729 if obj is None:
1723 if obj is None:
1730 return self
1724 return self
1731
1725
1732 assert self.sname not in obj.__dict__
1726 assert self.sname not in obj.__dict__
1733
1727
1734 entry = obj._filecache.get(self.name)
1728 entry = obj._filecache.get(self.name)
1735
1729
1736 if entry:
1730 if entry:
1737 if entry.changed():
1731 if entry.changed():
1738 entry.obj = self.func(obj)
1732 entry.obj = self.func(obj)
1739 else:
1733 else:
1740 paths = [self.join(obj, path) for path in self.paths]
1734 paths = [self.join(obj, path) for path in self.paths]
1741
1735
1742 # We stat -before- creating the object so our cache doesn't lie if
1736 # We stat -before- creating the object so our cache doesn't lie if
1743 # a writer modified between the time we read and stat
1737 # a writer modified between the time we read and stat
1744 entry = filecacheentry(paths, True)
1738 entry = filecacheentry(paths, True)
1745 entry.obj = self.func(obj)
1739 entry.obj = self.func(obj)
1746
1740
1747 obj._filecache[self.name] = entry
1741 obj._filecache[self.name] = entry
1748
1742
1749 obj.__dict__[self.sname] = entry.obj
1743 obj.__dict__[self.sname] = entry.obj
1750 return entry.obj
1744 return entry.obj
1751
1745
1752 # don't implement __set__(), which would make __dict__ lookup as slow as
1746 # don't implement __set__(), which would make __dict__ lookup as slow as
1753 # function call.
1747 # function call.
1754
1748
1755 def set(self, obj, value):
1749 def set(self, obj, value):
1756 if self.name not in obj._filecache:
1750 if self.name not in obj._filecache:
1757 # we add an entry for the missing value because X in __dict__
1751 # we add an entry for the missing value because X in __dict__
1758 # implies X in _filecache
1752 # implies X in _filecache
1759 paths = [self.join(obj, path) for path in self.paths]
1753 paths = [self.join(obj, path) for path in self.paths]
1760 ce = filecacheentry(paths, False)
1754 ce = filecacheentry(paths, False)
1761 obj._filecache[self.name] = ce
1755 obj._filecache[self.name] = ce
1762 else:
1756 else:
1763 ce = obj._filecache[self.name]
1757 ce = obj._filecache[self.name]
1764
1758
1765 ce.obj = value # update cached copy
1759 ce.obj = value # update cached copy
1766 obj.__dict__[self.sname] = value # update copy returned by obj.x
1760 obj.__dict__[self.sname] = value # update copy returned by obj.x
1767
1761
1768
1762
1769 def extdatasource(repo, source):
1763 def extdatasource(repo, source):
1770 """Gather a map of rev -> value dict from the specified source
1764 """Gather a map of rev -> value dict from the specified source
1771
1765
1772 A source spec is treated as a URL, with a special case shell: type
1766 A source spec is treated as a URL, with a special case shell: type
1773 for parsing the output from a shell command.
1767 for parsing the output from a shell command.
1774
1768
1775 The data is parsed as a series of newline-separated records where
1769 The data is parsed as a series of newline-separated records where
1776 each record is a revision specifier optionally followed by a space
1770 each record is a revision specifier optionally followed by a space
1777 and a freeform string value. If the revision is known locally, it
1771 and a freeform string value. If the revision is known locally, it
1778 is converted to a rev, otherwise the record is skipped.
1772 is converted to a rev, otherwise the record is skipped.
1779
1773
1780 Note that both key and value are treated as UTF-8 and converted to
1774 Note that both key and value are treated as UTF-8 and converted to
1781 the local encoding. This allows uniformity between local and
1775 the local encoding. This allows uniformity between local and
1782 remote data sources.
1776 remote data sources.
1783 """
1777 """
1784
1778
1785 spec = repo.ui.config(b"extdata", source)
1779 spec = repo.ui.config(b"extdata", source)
1786 if not spec:
1780 if not spec:
1787 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1781 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1788
1782
1789 data = {}
1783 data = {}
1790 src = proc = None
1784 src = proc = None
1791 try:
1785 try:
1792 if spec.startswith(b"shell:"):
1786 if spec.startswith(b"shell:"):
1793 # external commands should be run relative to the repo root
1787 # external commands should be run relative to the repo root
1794 cmd = spec[6:]
1788 cmd = spec[6:]
1795 proc = subprocess.Popen(
1789 proc = subprocess.Popen(
1796 procutil.tonativestr(cmd),
1790 procutil.tonativestr(cmd),
1797 shell=True,
1791 shell=True,
1798 bufsize=-1,
1792 bufsize=-1,
1799 close_fds=procutil.closefds,
1793 close_fds=procutil.closefds,
1800 stdout=subprocess.PIPE,
1794 stdout=subprocess.PIPE,
1801 cwd=procutil.tonativestr(repo.root),
1795 cwd=procutil.tonativestr(repo.root),
1802 )
1796 )
1803 src = proc.stdout
1797 src = proc.stdout
1804 else:
1798 else:
1805 # treat as a URL or file
1799 # treat as a URL or file
1806 src = url.open(repo.ui, spec)
1800 src = url.open(repo.ui, spec)
1807 for l in src:
1801 for l in src:
1808 if b" " in l:
1802 if b" " in l:
1809 k, v = l.strip().split(b" ", 1)
1803 k, v = l.strip().split(b" ", 1)
1810 else:
1804 else:
1811 k, v = l.strip(), b""
1805 k, v = l.strip(), b""
1812
1806
1813 k = encoding.tolocal(k)
1807 k = encoding.tolocal(k)
1814 try:
1808 try:
1815 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1809 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1816 except (error.LookupError, error.RepoLookupError):
1810 except (error.LookupError, error.RepoLookupError):
1817 pass # we ignore data for nodes that don't exist locally
1811 pass # we ignore data for nodes that don't exist locally
1818 finally:
1812 finally:
1819 if proc:
1813 if proc:
1820 try:
1814 try:
1821 proc.communicate()
1815 proc.communicate()
1822 except ValueError:
1816 except ValueError:
1823 # This happens if we started iterating src and then
1817 # This happens if we started iterating src and then
1824 # get a parse error on a line. It should be safe to ignore.
1818 # get a parse error on a line. It should be safe to ignore.
1825 pass
1819 pass
1826 if src:
1820 if src:
1827 src.close()
1821 src.close()
1828 if proc and proc.returncode != 0:
1822 if proc and proc.returncode != 0:
1829 raise error.Abort(
1823 raise error.Abort(
1830 _(b"extdata command '%s' failed: %s")
1824 _(b"extdata command '%s' failed: %s")
1831 % (cmd, procutil.explainexit(proc.returncode))
1825 % (cmd, procutil.explainexit(proc.returncode))
1832 )
1826 )
1833
1827
1834 return data
1828 return data
1835
1829
1836
1830
1837 class progress(object):
1831 class progress(object):
1838 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1832 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1839 self.ui = ui
1833 self.ui = ui
1840 self.pos = 0
1834 self.pos = 0
1841 self.topic = topic
1835 self.topic = topic
1842 self.unit = unit
1836 self.unit = unit
1843 self.total = total
1837 self.total = total
1844 self.debug = ui.configbool(b'progress', b'debug')
1838 self.debug = ui.configbool(b'progress', b'debug')
1845 self._updatebar = updatebar
1839 self._updatebar = updatebar
1846
1840
1847 def __enter__(self):
1841 def __enter__(self):
1848 return self
1842 return self
1849
1843
1850 def __exit__(self, exc_type, exc_value, exc_tb):
1844 def __exit__(self, exc_type, exc_value, exc_tb):
1851 self.complete()
1845 self.complete()
1852
1846
1853 def update(self, pos, item=b"", total=None):
1847 def update(self, pos, item=b"", total=None):
1854 assert pos is not None
1848 assert pos is not None
1855 if total:
1849 if total:
1856 self.total = total
1850 self.total = total
1857 self.pos = pos
1851 self.pos = pos
1858 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1852 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1859 if self.debug:
1853 if self.debug:
1860 self._printdebug(item)
1854 self._printdebug(item)
1861
1855
1862 def increment(self, step=1, item=b"", total=None):
1856 def increment(self, step=1, item=b"", total=None):
1863 self.update(self.pos + step, item, total)
1857 self.update(self.pos + step, item, total)
1864
1858
1865 def complete(self):
1859 def complete(self):
1866 self.pos = None
1860 self.pos = None
1867 self.unit = b""
1861 self.unit = b""
1868 self.total = None
1862 self.total = None
1869 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1863 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1870
1864
1871 def _printdebug(self, item):
1865 def _printdebug(self, item):
1872 unit = b''
1866 unit = b''
1873 if self.unit:
1867 if self.unit:
1874 unit = b' ' + self.unit
1868 unit = b' ' + self.unit
1875 if item:
1869 if item:
1876 item = b' ' + item
1870 item = b' ' + item
1877
1871
1878 if self.total:
1872 if self.total:
1879 pct = 100.0 * self.pos / self.total
1873 pct = 100.0 * self.pos / self.total
1880 self.ui.debug(
1874 self.ui.debug(
1881 b'%s:%s %d/%d%s (%4.2f%%)\n'
1875 b'%s:%s %d/%d%s (%4.2f%%)\n'
1882 % (self.topic, item, self.pos, self.total, unit, pct)
1876 % (self.topic, item, self.pos, self.total, unit, pct)
1883 )
1877 )
1884 else:
1878 else:
1885 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1879 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1886
1880
1887
1881
1888 def gdinitconfig(ui):
1882 def gdinitconfig(ui):
1889 """helper function to know if a repo should be created as general delta
1883 """helper function to know if a repo should be created as general delta
1890 """
1884 """
1891 # experimental config: format.generaldelta
1885 # experimental config: format.generaldelta
1892 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1886 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1893 b'format', b'usegeneraldelta'
1887 b'format', b'usegeneraldelta'
1894 )
1888 )
1895
1889
1896
1890
1897 def gddeltaconfig(ui):
1891 def gddeltaconfig(ui):
1898 """helper function to know if incoming delta should be optimised
1892 """helper function to know if incoming delta should be optimised
1899 """
1893 """
1900 # experimental config: format.generaldelta
1894 # experimental config: format.generaldelta
1901 return ui.configbool(b'format', b'generaldelta')
1895 return ui.configbool(b'format', b'generaldelta')
1902
1896
1903
1897
1904 class simplekeyvaluefile(object):
1898 class simplekeyvaluefile(object):
1905 """A simple file with key=value lines
1899 """A simple file with key=value lines
1906
1900
1907 Keys must be alphanumerics and start with a letter, values must not
1901 Keys must be alphanumerics and start with a letter, values must not
1908 contain '\n' characters"""
1902 contain '\n' characters"""
1909
1903
1910 firstlinekey = b'__firstline'
1904 firstlinekey = b'__firstline'
1911
1905
1912 def __init__(self, vfs, path, keys=None):
1906 def __init__(self, vfs, path, keys=None):
1913 self.vfs = vfs
1907 self.vfs = vfs
1914 self.path = path
1908 self.path = path
1915
1909
1916 def read(self, firstlinenonkeyval=False):
1910 def read(self, firstlinenonkeyval=False):
1917 """Read the contents of a simple key-value file
1911 """Read the contents of a simple key-value file
1918
1912
1919 'firstlinenonkeyval' indicates whether the first line of file should
1913 'firstlinenonkeyval' indicates whether the first line of file should
1920 be treated as a key-value pair or reuturned fully under the
1914 be treated as a key-value pair or reuturned fully under the
1921 __firstline key."""
1915 __firstline key."""
1922 lines = self.vfs.readlines(self.path)
1916 lines = self.vfs.readlines(self.path)
1923 d = {}
1917 d = {}
1924 if firstlinenonkeyval:
1918 if firstlinenonkeyval:
1925 if not lines:
1919 if not lines:
1926 e = _(b"empty simplekeyvalue file")
1920 e = _(b"empty simplekeyvalue file")
1927 raise error.CorruptedState(e)
1921 raise error.CorruptedState(e)
1928 # we don't want to include '\n' in the __firstline
1922 # we don't want to include '\n' in the __firstline
1929 d[self.firstlinekey] = lines[0][:-1]
1923 d[self.firstlinekey] = lines[0][:-1]
1930 del lines[0]
1924 del lines[0]
1931
1925
1932 try:
1926 try:
1933 # the 'if line.strip()' part prevents us from failing on empty
1927 # the 'if line.strip()' part prevents us from failing on empty
1934 # lines which only contain '\n' therefore are not skipped
1928 # lines which only contain '\n' therefore are not skipped
1935 # by 'if line'
1929 # by 'if line'
1936 updatedict = dict(
1930 updatedict = dict(
1937 line[:-1].split(b'=', 1) for line in lines if line.strip()
1931 line[:-1].split(b'=', 1) for line in lines if line.strip()
1938 )
1932 )
1939 if self.firstlinekey in updatedict:
1933 if self.firstlinekey in updatedict:
1940 e = _(b"%r can't be used as a key")
1934 e = _(b"%r can't be used as a key")
1941 raise error.CorruptedState(e % self.firstlinekey)
1935 raise error.CorruptedState(e % self.firstlinekey)
1942 d.update(updatedict)
1936 d.update(updatedict)
1943 except ValueError as e:
1937 except ValueError as e:
1944 raise error.CorruptedState(stringutil.forcebytestr(e))
1938 raise error.CorruptedState(stringutil.forcebytestr(e))
1945 return d
1939 return d
1946
1940
1947 def write(self, data, firstline=None):
1941 def write(self, data, firstline=None):
1948 """Write key=>value mapping to a file
1942 """Write key=>value mapping to a file
1949 data is a dict. Keys must be alphanumerical and start with a letter.
1943 data is a dict. Keys must be alphanumerical and start with a letter.
1950 Values must not contain newline characters.
1944 Values must not contain newline characters.
1951
1945
1952 If 'firstline' is not None, it is written to file before
1946 If 'firstline' is not None, it is written to file before
1953 everything else, as it is, not in a key=value form"""
1947 everything else, as it is, not in a key=value form"""
1954 lines = []
1948 lines = []
1955 if firstline is not None:
1949 if firstline is not None:
1956 lines.append(b'%s\n' % firstline)
1950 lines.append(b'%s\n' % firstline)
1957
1951
1958 for k, v in data.items():
1952 for k, v in data.items():
1959 if k == self.firstlinekey:
1953 if k == self.firstlinekey:
1960 e = b"key name '%s' is reserved" % self.firstlinekey
1954 e = b"key name '%s' is reserved" % self.firstlinekey
1961 raise error.ProgrammingError(e)
1955 raise error.ProgrammingError(e)
1962 if not k[0:1].isalpha():
1956 if not k[0:1].isalpha():
1963 e = b"keys must start with a letter in a key-value file"
1957 e = b"keys must start with a letter in a key-value file"
1964 raise error.ProgrammingError(e)
1958 raise error.ProgrammingError(e)
1965 if not k.isalnum():
1959 if not k.isalnum():
1966 e = b"invalid key name in a simple key-value file"
1960 e = b"invalid key name in a simple key-value file"
1967 raise error.ProgrammingError(e)
1961 raise error.ProgrammingError(e)
1968 if b'\n' in v:
1962 if b'\n' in v:
1969 e = b"invalid value in a simple key-value file"
1963 e = b"invalid value in a simple key-value file"
1970 raise error.ProgrammingError(e)
1964 raise error.ProgrammingError(e)
1971 lines.append(b"%s=%s\n" % (k, v))
1965 lines.append(b"%s=%s\n" % (k, v))
1972 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1966 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1973 fp.write(b''.join(lines))
1967 fp.write(b''.join(lines))
1974
1968
1975
1969
1976 _reportobsoletedsource = [
1970 _reportobsoletedsource = [
1977 b'debugobsolete',
1971 b'debugobsolete',
1978 b'pull',
1972 b'pull',
1979 b'push',
1973 b'push',
1980 b'serve',
1974 b'serve',
1981 b'unbundle',
1975 b'unbundle',
1982 ]
1976 ]
1983
1977
1984 _reportnewcssource = [
1978 _reportnewcssource = [
1985 b'pull',
1979 b'pull',
1986 b'unbundle',
1980 b'unbundle',
1987 ]
1981 ]
1988
1982
1989
1983
1990 def prefetchfiles(repo, revmatches):
1984 def prefetchfiles(repo, revmatches):
1991 """Invokes the registered file prefetch functions, allowing extensions to
1985 """Invokes the registered file prefetch functions, allowing extensions to
1992 ensure the corresponding files are available locally, before the command
1986 ensure the corresponding files are available locally, before the command
1993 uses them.
1987 uses them.
1994
1988
1995 Args:
1989 Args:
1996 revmatches: a list of (revision, match) tuples to indicate the files to
1990 revmatches: a list of (revision, match) tuples to indicate the files to
1997 fetch at each revision. If any of the match elements is None, it matches
1991 fetch at each revision. If any of the match elements is None, it matches
1998 all files.
1992 all files.
1999 """
1993 """
2000
1994
2001 def _matcher(m):
1995 def _matcher(m):
2002 if m:
1996 if m:
2003 assert isinstance(m, matchmod.basematcher)
1997 assert isinstance(m, matchmod.basematcher)
2004 # The command itself will complain about files that don't exist, so
1998 # The command itself will complain about files that don't exist, so
2005 # don't duplicate the message.
1999 # don't duplicate the message.
2006 return matchmod.badmatch(m, lambda fn, msg: None)
2000 return matchmod.badmatch(m, lambda fn, msg: None)
2007 else:
2001 else:
2008 return matchall(repo)
2002 return matchall(repo)
2009
2003
2010 revbadmatches = [(rev, _matcher(match)) for (rev, match) in revmatches]
2004 revbadmatches = [(rev, _matcher(match)) for (rev, match) in revmatches]
2011
2005
2012 fileprefetchhooks(repo, revbadmatches)
2006 fileprefetchhooks(repo, revbadmatches)
2013
2007
2014
2008
2015 # a list of (repo, revs, match) prefetch functions
2009 # a list of (repo, revs, match) prefetch functions
2016 fileprefetchhooks = util.hooks()
2010 fileprefetchhooks = util.hooks()
2017
2011
2018 # A marker that tells the evolve extension to suppress its own reporting
2012 # A marker that tells the evolve extension to suppress its own reporting
2019 _reportstroubledchangesets = True
2013 _reportstroubledchangesets = True
2020
2014
2021
2015
2022 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
2016 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
2023 """register a callback to issue a summary after the transaction is closed
2017 """register a callback to issue a summary after the transaction is closed
2024
2018
2025 If as_validator is true, then the callbacks are registered as transaction
2019 If as_validator is true, then the callbacks are registered as transaction
2026 validators instead
2020 validators instead
2027 """
2021 """
2028
2022
2029 def txmatch(sources):
2023 def txmatch(sources):
2030 return any(txnname.startswith(source) for source in sources)
2024 return any(txnname.startswith(source) for source in sources)
2031
2025
2032 categories = []
2026 categories = []
2033
2027
2034 def reportsummary(func):
2028 def reportsummary(func):
2035 """decorator for report callbacks."""
2029 """decorator for report callbacks."""
2036 # The repoview life cycle is shorter than the one of the actual
2030 # The repoview life cycle is shorter than the one of the actual
2037 # underlying repository. So the filtered object can die before the
2031 # underlying repository. So the filtered object can die before the
2038 # weakref is used leading to troubles. We keep a reference to the
2032 # weakref is used leading to troubles. We keep a reference to the
2039 # unfiltered object and restore the filtering when retrieving the
2033 # unfiltered object and restore the filtering when retrieving the
2040 # repository through the weakref.
2034 # repository through the weakref.
2041 filtername = repo.filtername
2035 filtername = repo.filtername
2042 reporef = weakref.ref(repo.unfiltered())
2036 reporef = weakref.ref(repo.unfiltered())
2043
2037
2044 def wrapped(tr):
2038 def wrapped(tr):
2045 repo = reporef()
2039 repo = reporef()
2046 if filtername:
2040 if filtername:
2047 assert repo is not None # help pytype
2041 assert repo is not None # help pytype
2048 repo = repo.filtered(filtername)
2042 repo = repo.filtered(filtername)
2049 func(repo, tr)
2043 func(repo, tr)
2050
2044
2051 newcat = b'%02i-txnreport' % len(categories)
2045 newcat = b'%02i-txnreport' % len(categories)
2052 if as_validator:
2046 if as_validator:
2053 otr.addvalidator(newcat, wrapped)
2047 otr.addvalidator(newcat, wrapped)
2054 else:
2048 else:
2055 otr.addpostclose(newcat, wrapped)
2049 otr.addpostclose(newcat, wrapped)
2056 categories.append(newcat)
2050 categories.append(newcat)
2057 return wrapped
2051 return wrapped
2058
2052
2059 @reportsummary
2053 @reportsummary
2060 def reportchangegroup(repo, tr):
2054 def reportchangegroup(repo, tr):
2061 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
2055 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
2062 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
2056 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
2063 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
2057 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
2064 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
2058 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
2065 if cgchangesets or cgrevisions or cgfiles:
2059 if cgchangesets or cgrevisions or cgfiles:
2066 htext = b""
2060 htext = b""
2067 if cgheads:
2061 if cgheads:
2068 htext = _(b" (%+d heads)") % cgheads
2062 htext = _(b" (%+d heads)") % cgheads
2069 msg = _(b"added %d changesets with %d changes to %d files%s\n")
2063 msg = _(b"added %d changesets with %d changes to %d files%s\n")
2070 if as_validator:
2064 if as_validator:
2071 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
2065 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
2072 assert repo is not None # help pytype
2066 assert repo is not None # help pytype
2073 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
2067 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
2074
2068
2075 if txmatch(_reportobsoletedsource):
2069 if txmatch(_reportobsoletedsource):
2076
2070
2077 @reportsummary
2071 @reportsummary
2078 def reportobsoleted(repo, tr):
2072 def reportobsoleted(repo, tr):
2079 obsoleted = obsutil.getobsoleted(repo, tr)
2073 obsoleted = obsutil.getobsoleted(repo, tr)
2080 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
2074 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
2081 if newmarkers:
2075 if newmarkers:
2082 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
2076 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
2083 if obsoleted:
2077 if obsoleted:
2084 msg = _(b'obsoleted %i changesets\n')
2078 msg = _(b'obsoleted %i changesets\n')
2085 if as_validator:
2079 if as_validator:
2086 msg = _(b'obsoleting %i changesets\n')
2080 msg = _(b'obsoleting %i changesets\n')
2087 repo.ui.status(msg % len(obsoleted))
2081 repo.ui.status(msg % len(obsoleted))
2088
2082
2089 if obsolete.isenabled(
2083 if obsolete.isenabled(
2090 repo, obsolete.createmarkersopt
2084 repo, obsolete.createmarkersopt
2091 ) and repo.ui.configbool(
2085 ) and repo.ui.configbool(
2092 b'experimental', b'evolution.report-instabilities'
2086 b'experimental', b'evolution.report-instabilities'
2093 ):
2087 ):
2094 instabilitytypes = [
2088 instabilitytypes = [
2095 (b'orphan', b'orphan'),
2089 (b'orphan', b'orphan'),
2096 (b'phase-divergent', b'phasedivergent'),
2090 (b'phase-divergent', b'phasedivergent'),
2097 (b'content-divergent', b'contentdivergent'),
2091 (b'content-divergent', b'contentdivergent'),
2098 ]
2092 ]
2099
2093
2100 def getinstabilitycounts(repo):
2094 def getinstabilitycounts(repo):
2101 filtered = repo.changelog.filteredrevs
2095 filtered = repo.changelog.filteredrevs
2102 counts = {}
2096 counts = {}
2103 for instability, revset in instabilitytypes:
2097 for instability, revset in instabilitytypes:
2104 counts[instability] = len(
2098 counts[instability] = len(
2105 set(obsolete.getrevs(repo, revset)) - filtered
2099 set(obsolete.getrevs(repo, revset)) - filtered
2106 )
2100 )
2107 return counts
2101 return counts
2108
2102
2109 oldinstabilitycounts = getinstabilitycounts(repo)
2103 oldinstabilitycounts = getinstabilitycounts(repo)
2110
2104
2111 @reportsummary
2105 @reportsummary
2112 def reportnewinstabilities(repo, tr):
2106 def reportnewinstabilities(repo, tr):
2113 newinstabilitycounts = getinstabilitycounts(repo)
2107 newinstabilitycounts = getinstabilitycounts(repo)
2114 for instability, revset in instabilitytypes:
2108 for instability, revset in instabilitytypes:
2115 delta = (
2109 delta = (
2116 newinstabilitycounts[instability]
2110 newinstabilitycounts[instability]
2117 - oldinstabilitycounts[instability]
2111 - oldinstabilitycounts[instability]
2118 )
2112 )
2119 msg = getinstabilitymessage(delta, instability)
2113 msg = getinstabilitymessage(delta, instability)
2120 if msg:
2114 if msg:
2121 repo.ui.warn(msg)
2115 repo.ui.warn(msg)
2122
2116
2123 if txmatch(_reportnewcssource):
2117 if txmatch(_reportnewcssource):
2124
2118
2125 @reportsummary
2119 @reportsummary
2126 def reportnewcs(repo, tr):
2120 def reportnewcs(repo, tr):
2127 """Report the range of new revisions pulled/unbundled."""
2121 """Report the range of new revisions pulled/unbundled."""
2128 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2122 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2129 unfi = repo.unfiltered()
2123 unfi = repo.unfiltered()
2130 if origrepolen >= len(unfi):
2124 if origrepolen >= len(unfi):
2131 return
2125 return
2132
2126
2133 # Compute the bounds of new visible revisions' range.
2127 # Compute the bounds of new visible revisions' range.
2134 revs = smartset.spanset(repo, start=origrepolen)
2128 revs = smartset.spanset(repo, start=origrepolen)
2135 if revs:
2129 if revs:
2136 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2130 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2137
2131
2138 if minrev == maxrev:
2132 if minrev == maxrev:
2139 revrange = minrev
2133 revrange = minrev
2140 else:
2134 else:
2141 revrange = b'%s:%s' % (minrev, maxrev)
2135 revrange = b'%s:%s' % (minrev, maxrev)
2142 draft = len(repo.revs(b'%ld and draft()', revs))
2136 draft = len(repo.revs(b'%ld and draft()', revs))
2143 secret = len(repo.revs(b'%ld and secret()', revs))
2137 secret = len(repo.revs(b'%ld and secret()', revs))
2144 if not (draft or secret):
2138 if not (draft or secret):
2145 msg = _(b'new changesets %s\n') % revrange
2139 msg = _(b'new changesets %s\n') % revrange
2146 elif draft and secret:
2140 elif draft and secret:
2147 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2141 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2148 msg %= (revrange, draft, secret)
2142 msg %= (revrange, draft, secret)
2149 elif draft:
2143 elif draft:
2150 msg = _(b'new changesets %s (%d drafts)\n')
2144 msg = _(b'new changesets %s (%d drafts)\n')
2151 msg %= (revrange, draft)
2145 msg %= (revrange, draft)
2152 elif secret:
2146 elif secret:
2153 msg = _(b'new changesets %s (%d secrets)\n')
2147 msg = _(b'new changesets %s (%d secrets)\n')
2154 msg %= (revrange, secret)
2148 msg %= (revrange, secret)
2155 else:
2149 else:
2156 errormsg = b'entered unreachable condition'
2150 errormsg = b'entered unreachable condition'
2157 raise error.ProgrammingError(errormsg)
2151 raise error.ProgrammingError(errormsg)
2158 repo.ui.status(msg)
2152 repo.ui.status(msg)
2159
2153
2160 # search new changesets directly pulled as obsolete
2154 # search new changesets directly pulled as obsolete
2161 duplicates = tr.changes.get(b'revduplicates', ())
2155 duplicates = tr.changes.get(b'revduplicates', ())
2162 obsadded = unfi.revs(
2156 obsadded = unfi.revs(
2163 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2157 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2164 )
2158 )
2165 cl = repo.changelog
2159 cl = repo.changelog
2166 extinctadded = [r for r in obsadded if r not in cl]
2160 extinctadded = [r for r in obsadded if r not in cl]
2167 if extinctadded:
2161 if extinctadded:
2168 # They are not just obsolete, but obsolete and invisible
2162 # They are not just obsolete, but obsolete and invisible
2169 # we call them "extinct" internally but the terms have not been
2163 # we call them "extinct" internally but the terms have not been
2170 # exposed to users.
2164 # exposed to users.
2171 msg = b'(%d other changesets obsolete on arrival)\n'
2165 msg = b'(%d other changesets obsolete on arrival)\n'
2172 repo.ui.status(msg % len(extinctadded))
2166 repo.ui.status(msg % len(extinctadded))
2173
2167
2174 @reportsummary
2168 @reportsummary
2175 def reportphasechanges(repo, tr):
2169 def reportphasechanges(repo, tr):
2176 """Report statistics of phase changes for changesets pre-existing
2170 """Report statistics of phase changes for changesets pre-existing
2177 pull/unbundle.
2171 pull/unbundle.
2178 """
2172 """
2179 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2173 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2180 published = []
2174 published = []
2181 for revs, (old, new) in tr.changes.get(b'phases', []):
2175 for revs, (old, new) in tr.changes.get(b'phases', []):
2182 if new != phases.public:
2176 if new != phases.public:
2183 continue
2177 continue
2184 published.extend(rev for rev in revs if rev < origrepolen)
2178 published.extend(rev for rev in revs if rev < origrepolen)
2185 if not published:
2179 if not published:
2186 return
2180 return
2187 msg = _(b'%d local changesets published\n')
2181 msg = _(b'%d local changesets published\n')
2188 if as_validator:
2182 if as_validator:
2189 msg = _(b'%d local changesets will be published\n')
2183 msg = _(b'%d local changesets will be published\n')
2190 repo.ui.status(msg % len(published))
2184 repo.ui.status(msg % len(published))
2191
2185
2192
2186
2193 def getinstabilitymessage(delta, instability):
2187 def getinstabilitymessage(delta, instability):
2194 """function to return the message to show warning about new instabilities
2188 """function to return the message to show warning about new instabilities
2195
2189
2196 exists as a separate function so that extension can wrap to show more
2190 exists as a separate function so that extension can wrap to show more
2197 information like how to fix instabilities"""
2191 information like how to fix instabilities"""
2198 if delta > 0:
2192 if delta > 0:
2199 return _(b'%i new %s changesets\n') % (delta, instability)
2193 return _(b'%i new %s changesets\n') % (delta, instability)
2200
2194
2201
2195
2202 def nodesummaries(repo, nodes, maxnumnodes=4):
2196 def nodesummaries(repo, nodes, maxnumnodes=4):
2203 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2197 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2204 return b' '.join(short(h) for h in nodes)
2198 return b' '.join(short(h) for h in nodes)
2205 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2199 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2206 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2200 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2207
2201
2208
2202
2209 def enforcesinglehead(repo, tr, desc, accountclosed=False):
2203 def enforcesinglehead(repo, tr, desc, accountclosed=False):
2210 """check that no named branch has multiple heads"""
2204 """check that no named branch has multiple heads"""
2211 if desc in (b'strip', b'repair'):
2205 if desc in (b'strip', b'repair'):
2212 # skip the logic during strip
2206 # skip the logic during strip
2213 return
2207 return
2214 visible = repo.filtered(b'visible')
2208 visible = repo.filtered(b'visible')
2215 # possible improvement: we could restrict the check to affected branch
2209 # possible improvement: we could restrict the check to affected branch
2216 bm = visible.branchmap()
2210 bm = visible.branchmap()
2217 for name in bm:
2211 for name in bm:
2218 heads = bm.branchheads(name, closed=accountclosed)
2212 heads = bm.branchheads(name, closed=accountclosed)
2219 if len(heads) > 1:
2213 if len(heads) > 1:
2220 msg = _(b'rejecting multiple heads on branch "%s"')
2214 msg = _(b'rejecting multiple heads on branch "%s"')
2221 msg %= name
2215 msg %= name
2222 hint = _(b'%d heads: %s')
2216 hint = _(b'%d heads: %s')
2223 hint %= (len(heads), nodesummaries(repo, heads))
2217 hint %= (len(heads), nodesummaries(repo, heads))
2224 raise error.Abort(msg, hint=hint)
2218 raise error.Abort(msg, hint=hint)
2225
2219
2226
2220
2227 def wrapconvertsink(sink):
2221 def wrapconvertsink(sink):
2228 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2222 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2229 before it is used, whether or not the convert extension was formally loaded.
2223 before it is used, whether or not the convert extension was formally loaded.
2230 """
2224 """
2231 return sink
2225 return sink
2232
2226
2233
2227
2234 def unhidehashlikerevs(repo, specs, hiddentype):
2228 def unhidehashlikerevs(repo, specs, hiddentype):
2235 """parse the user specs and unhide changesets whose hash or revision number
2229 """parse the user specs and unhide changesets whose hash or revision number
2236 is passed.
2230 is passed.
2237
2231
2238 hiddentype can be: 1) 'warn': warn while unhiding changesets
2232 hiddentype can be: 1) 'warn': warn while unhiding changesets
2239 2) 'nowarn': don't warn while unhiding changesets
2233 2) 'nowarn': don't warn while unhiding changesets
2240
2234
2241 returns a repo object with the required changesets unhidden
2235 returns a repo object with the required changesets unhidden
2242 """
2236 """
2243 if not repo.filtername or not repo.ui.configbool(
2237 if not repo.filtername or not repo.ui.configbool(
2244 b'experimental', b'directaccess'
2238 b'experimental', b'directaccess'
2245 ):
2239 ):
2246 return repo
2240 return repo
2247
2241
2248 if repo.filtername not in (b'visible', b'visible-hidden'):
2242 if repo.filtername not in (b'visible', b'visible-hidden'):
2249 return repo
2243 return repo
2250
2244
2251 symbols = set()
2245 symbols = set()
2252 for spec in specs:
2246 for spec in specs:
2253 try:
2247 try:
2254 tree = revsetlang.parse(spec)
2248 tree = revsetlang.parse(spec)
2255 except error.ParseError: # will be reported by scmutil.revrange()
2249 except error.ParseError: # will be reported by scmutil.revrange()
2256 continue
2250 continue
2257
2251
2258 symbols.update(revsetlang.gethashlikesymbols(tree))
2252 symbols.update(revsetlang.gethashlikesymbols(tree))
2259
2253
2260 if not symbols:
2254 if not symbols:
2261 return repo
2255 return repo
2262
2256
2263 revs = _getrevsfromsymbols(repo, symbols)
2257 revs = _getrevsfromsymbols(repo, symbols)
2264
2258
2265 if not revs:
2259 if not revs:
2266 return repo
2260 return repo
2267
2261
2268 if hiddentype == b'warn':
2262 if hiddentype == b'warn':
2269 unfi = repo.unfiltered()
2263 unfi = repo.unfiltered()
2270 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2264 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2271 repo.ui.warn(
2265 repo.ui.warn(
2272 _(
2266 _(
2273 b"warning: accessing hidden changesets for write "
2267 b"warning: accessing hidden changesets for write "
2274 b"operation: %s\n"
2268 b"operation: %s\n"
2275 )
2269 )
2276 % revstr
2270 % revstr
2277 )
2271 )
2278
2272
2279 # we have to use new filtername to separate branch/tags cache until we can
2273 # we have to use new filtername to separate branch/tags cache until we can
2280 # disbale these cache when revisions are dynamically pinned.
2274 # disbale these cache when revisions are dynamically pinned.
2281 return repo.filtered(b'visible-hidden', revs)
2275 return repo.filtered(b'visible-hidden', revs)
2282
2276
2283
2277
2284 def _getrevsfromsymbols(repo, symbols):
2278 def _getrevsfromsymbols(repo, symbols):
2285 """parse the list of symbols and returns a set of revision numbers of hidden
2279 """parse the list of symbols and returns a set of revision numbers of hidden
2286 changesets present in symbols"""
2280 changesets present in symbols"""
2287 revs = set()
2281 revs = set()
2288 unfi = repo.unfiltered()
2282 unfi = repo.unfiltered()
2289 unficl = unfi.changelog
2283 unficl = unfi.changelog
2290 cl = repo.changelog
2284 cl = repo.changelog
2291 tiprev = len(unficl)
2285 tiprev = len(unficl)
2292 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2286 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2293 for s in symbols:
2287 for s in symbols:
2294 try:
2288 try:
2295 n = int(s)
2289 n = int(s)
2296 if n <= tiprev:
2290 if n <= tiprev:
2297 if not allowrevnums:
2291 if not allowrevnums:
2298 continue
2292 continue
2299 else:
2293 else:
2300 if n not in cl:
2294 if n not in cl:
2301 revs.add(n)
2295 revs.add(n)
2302 continue
2296 continue
2303 except ValueError:
2297 except ValueError:
2304 pass
2298 pass
2305
2299
2306 try:
2300 try:
2307 s = resolvehexnodeidprefix(unfi, s)
2301 s = resolvehexnodeidprefix(unfi, s)
2308 except (error.LookupError, error.WdirUnsupported):
2302 except (error.LookupError, error.WdirUnsupported):
2309 s = None
2303 s = None
2310
2304
2311 if s is not None:
2305 if s is not None:
2312 rev = unficl.rev(s)
2306 rev = unficl.rev(s)
2313 if rev not in cl:
2307 if rev not in cl:
2314 revs.add(rev)
2308 revs.add(rev)
2315
2309
2316 return revs
2310 return revs
2317
2311
2318
2312
2319 def bookmarkrevs(repo, mark):
2313 def bookmarkrevs(repo, mark):
2320 """
2314 """
2321 Select revisions reachable by a given bookmark
2315 Select revisions reachable by a given bookmark
2322 """
2316 """
2323 return repo.revs(
2317 return repo.revs(
2324 b"ancestors(bookmark(%s)) - "
2318 b"ancestors(bookmark(%s)) - "
2325 b"ancestors(head() and not bookmark(%s)) - "
2319 b"ancestors(head() and not bookmark(%s)) - "
2326 b"ancestors(bookmark() and not bookmark(%s))",
2320 b"ancestors(bookmark() and not bookmark(%s))",
2327 mark,
2321 mark,
2328 mark,
2322 mark,
2329 mark,
2323 mark,
2330 )
2324 )
General Comments 0
You need to be logged in to leave comments. Login now