##// END OF EJS Templates
scmutil: make shortest() respect disambiguation revset...
Martin von Zweigbergk -
r38879:6f7c9527 default
parent child Browse files
Show More
@@ -1,1718 +1,1733 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 bin,
22 22 hex,
23 23 nullid,
24 24 short,
25 25 wdirid,
26 26 wdirrev,
27 27 )
28 28
29 29 from . import (
30 30 encoding,
31 31 error,
32 32 match as matchmod,
33 33 obsolete,
34 34 obsutil,
35 35 pathutil,
36 36 phases,
37 37 pycompat,
38 38 revsetlang,
39 39 similar,
40 40 url,
41 41 util,
42 42 vfs,
43 43 )
44 44
45 45 from .utils import (
46 46 procutil,
47 47 stringutil,
48 48 )
49 49
50 50 if pycompat.iswindows:
51 51 from . import scmwindows as scmplatform
52 52 else:
53 53 from . import scmposix as scmplatform
54 54
55 55 termsize = scmplatform.termsize
56 56
57 57 class status(tuple):
58 58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
59 59 and 'ignored' properties are only relevant to the working copy.
60 60 '''
61 61
62 62 __slots__ = ()
63 63
64 64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
65 65 clean):
66 66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
67 67 ignored, clean))
68 68
69 69 @property
70 70 def modified(self):
71 71 '''files that have been modified'''
72 72 return self[0]
73 73
74 74 @property
75 75 def added(self):
76 76 '''files that have been added'''
77 77 return self[1]
78 78
79 79 @property
80 80 def removed(self):
81 81 '''files that have been removed'''
82 82 return self[2]
83 83
84 84 @property
85 85 def deleted(self):
86 86 '''files that are in the dirstate, but have been deleted from the
87 87 working copy (aka "missing")
88 88 '''
89 89 return self[3]
90 90
91 91 @property
92 92 def unknown(self):
93 93 '''files not in the dirstate that are not ignored'''
94 94 return self[4]
95 95
96 96 @property
97 97 def ignored(self):
98 98 '''files not in the dirstate that are ignored (by _dirignore())'''
99 99 return self[5]
100 100
101 101 @property
102 102 def clean(self):
103 103 '''files that have not been modified'''
104 104 return self[6]
105 105
106 106 def __repr__(self, *args, **kwargs):
107 107 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
108 108 r'unknown=%s, ignored=%s, clean=%s>') %
109 109 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
110 110
111 111 def itersubrepos(ctx1, ctx2):
112 112 """find subrepos in ctx1 or ctx2"""
113 113 # Create a (subpath, ctx) mapping where we prefer subpaths from
114 114 # ctx1. The subpaths from ctx2 are important when the .hgsub file
115 115 # has been modified (in ctx2) but not yet committed (in ctx1).
116 116 subpaths = dict.fromkeys(ctx2.substate, ctx2)
117 117 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
118 118
119 119 missing = set()
120 120
121 121 for subpath in ctx2.substate:
122 122 if subpath not in ctx1.substate:
123 123 del subpaths[subpath]
124 124 missing.add(subpath)
125 125
126 126 for subpath, ctx in sorted(subpaths.iteritems()):
127 127 yield subpath, ctx.sub(subpath)
128 128
129 129 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
130 130 # status and diff will have an accurate result when it does
131 131 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
132 132 # against itself.
133 133 for subpath in missing:
134 134 yield subpath, ctx2.nullsub(subpath, ctx1)
135 135
136 136 def nochangesfound(ui, repo, excluded=None):
137 137 '''Report no changes for push/pull, excluded is None or a list of
138 138 nodes excluded from the push/pull.
139 139 '''
140 140 secretlist = []
141 141 if excluded:
142 142 for n in excluded:
143 143 ctx = repo[n]
144 144 if ctx.phase() >= phases.secret and not ctx.extinct():
145 145 secretlist.append(n)
146 146
147 147 if secretlist:
148 148 ui.status(_("no changes found (ignored %d secret changesets)\n")
149 149 % len(secretlist))
150 150 else:
151 151 ui.status(_("no changes found\n"))
152 152
153 153 def callcatch(ui, func):
154 154 """call func() with global exception handling
155 155
156 156 return func() if no exception happens. otherwise do some error handling
157 157 and return an exit code accordingly. does not handle all exceptions.
158 158 """
159 159 try:
160 160 try:
161 161 return func()
162 162 except: # re-raises
163 163 ui.traceback()
164 164 raise
165 165 # Global exception handling, alphabetically
166 166 # Mercurial-specific first, followed by built-in and library exceptions
167 167 except error.LockHeld as inst:
168 168 if inst.errno == errno.ETIMEDOUT:
169 169 reason = _('timed out waiting for lock held by %r') % inst.locker
170 170 else:
171 171 reason = _('lock held by %r') % inst.locker
172 172 ui.error(_("abort: %s: %s\n") % (
173 173 inst.desc or stringutil.forcebytestr(inst.filename), reason))
174 174 if not inst.locker:
175 175 ui.error(_("(lock might be very busy)\n"))
176 176 except error.LockUnavailable as inst:
177 177 ui.error(_("abort: could not lock %s: %s\n") %
178 178 (inst.desc or stringutil.forcebytestr(inst.filename),
179 179 encoding.strtolocal(inst.strerror)))
180 180 except error.OutOfBandError as inst:
181 181 if inst.args:
182 182 msg = _("abort: remote error:\n")
183 183 else:
184 184 msg = _("abort: remote error\n")
185 185 ui.error(msg)
186 186 if inst.args:
187 187 ui.error(''.join(inst.args))
188 188 if inst.hint:
189 189 ui.error('(%s)\n' % inst.hint)
190 190 except error.RepoError as inst:
191 191 ui.error(_("abort: %s!\n") % inst)
192 192 if inst.hint:
193 193 ui.error(_("(%s)\n") % inst.hint)
194 194 except error.ResponseError as inst:
195 195 ui.error(_("abort: %s") % inst.args[0])
196 196 msg = inst.args[1]
197 197 if isinstance(msg, type(u'')):
198 198 msg = pycompat.sysbytes(msg)
199 199 if not isinstance(msg, bytes):
200 200 ui.error(" %r\n" % (msg,))
201 201 elif not msg:
202 202 ui.error(_(" empty string\n"))
203 203 else:
204 204 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
205 205 except error.CensoredNodeError as inst:
206 206 ui.error(_("abort: file censored %s!\n") % inst)
207 207 except error.RevlogError as inst:
208 208 ui.error(_("abort: %s!\n") % inst)
209 209 except error.InterventionRequired as inst:
210 210 ui.error("%s\n" % inst)
211 211 if inst.hint:
212 212 ui.error(_("(%s)\n") % inst.hint)
213 213 return 1
214 214 except error.WdirUnsupported:
215 215 ui.error(_("abort: working directory revision cannot be specified\n"))
216 216 except error.Abort as inst:
217 217 ui.error(_("abort: %s\n") % inst)
218 218 if inst.hint:
219 219 ui.error(_("(%s)\n") % inst.hint)
220 220 except ImportError as inst:
221 221 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
222 222 m = stringutil.forcebytestr(inst).split()[-1]
223 223 if m in "mpatch bdiff".split():
224 224 ui.error(_("(did you forget to compile extensions?)\n"))
225 225 elif m in "zlib".split():
226 226 ui.error(_("(is your Python install correct?)\n"))
227 227 except IOError as inst:
228 228 if util.safehasattr(inst, "code"):
229 229 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
230 230 elif util.safehasattr(inst, "reason"):
231 231 try: # usually it is in the form (errno, strerror)
232 232 reason = inst.reason.args[1]
233 233 except (AttributeError, IndexError):
234 234 # it might be anything, for example a string
235 235 reason = inst.reason
236 236 if isinstance(reason, pycompat.unicode):
237 237 # SSLError of Python 2.7.9 contains a unicode
238 238 reason = encoding.unitolocal(reason)
239 239 ui.error(_("abort: error: %s\n") % reason)
240 240 elif (util.safehasattr(inst, "args")
241 241 and inst.args and inst.args[0] == errno.EPIPE):
242 242 pass
243 243 elif getattr(inst, "strerror", None):
244 244 if getattr(inst, "filename", None):
245 245 ui.error(_("abort: %s: %s\n") % (
246 246 encoding.strtolocal(inst.strerror),
247 247 stringutil.forcebytestr(inst.filename)))
248 248 else:
249 249 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
250 250 else:
251 251 raise
252 252 except OSError as inst:
253 253 if getattr(inst, "filename", None) is not None:
254 254 ui.error(_("abort: %s: '%s'\n") % (
255 255 encoding.strtolocal(inst.strerror),
256 256 stringutil.forcebytestr(inst.filename)))
257 257 else:
258 258 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
259 259 except MemoryError:
260 260 ui.error(_("abort: out of memory\n"))
261 261 except SystemExit as inst:
262 262 # Commands shouldn't sys.exit directly, but give a return code.
263 263 # Just in case catch this and and pass exit code to caller.
264 264 return inst.code
265 265 except socket.error as inst:
266 266 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
267 267
268 268 return -1
269 269
270 270 def checknewlabel(repo, lbl, kind):
271 271 # Do not use the "kind" parameter in ui output.
272 272 # It makes strings difficult to translate.
273 273 if lbl in ['tip', '.', 'null']:
274 274 raise error.Abort(_("the name '%s' is reserved") % lbl)
275 275 for c in (':', '\0', '\n', '\r'):
276 276 if c in lbl:
277 277 raise error.Abort(
278 278 _("%r cannot be used in a name") % pycompat.bytestr(c))
279 279 try:
280 280 int(lbl)
281 281 raise error.Abort(_("cannot use an integer as a name"))
282 282 except ValueError:
283 283 pass
284 284 if lbl.strip() != lbl:
285 285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
286 286
287 287 def checkfilename(f):
288 288 '''Check that the filename f is an acceptable filename for a tracked file'''
289 289 if '\r' in f or '\n' in f:
290 290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
291 291 % pycompat.bytestr(f))
292 292
293 293 def checkportable(ui, f):
294 294 '''Check if filename f is portable and warn or abort depending on config'''
295 295 checkfilename(f)
296 296 abort, warn = checkportabilityalert(ui)
297 297 if abort or warn:
298 298 msg = util.checkwinfilename(f)
299 299 if msg:
300 300 msg = "%s: %s" % (msg, procutil.shellquote(f))
301 301 if abort:
302 302 raise error.Abort(msg)
303 303 ui.warn(_("warning: %s\n") % msg)
304 304
305 305 def checkportabilityalert(ui):
306 306 '''check if the user's config requests nothing, a warning, or abort for
307 307 non-portable filenames'''
308 308 val = ui.config('ui', 'portablefilenames')
309 309 lval = val.lower()
310 310 bval = stringutil.parsebool(val)
311 311 abort = pycompat.iswindows or lval == 'abort'
312 312 warn = bval or lval == 'warn'
313 313 if bval is None and not (warn or abort or lval == 'ignore'):
314 314 raise error.ConfigError(
315 315 _("ui.portablefilenames value is invalid ('%s')") % val)
316 316 return abort, warn
317 317
318 318 class casecollisionauditor(object):
319 319 def __init__(self, ui, abort, dirstate):
320 320 self._ui = ui
321 321 self._abort = abort
322 322 allfiles = '\0'.join(dirstate._map)
323 323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
324 324 self._dirstate = dirstate
325 325 # The purpose of _newfiles is so that we don't complain about
326 326 # case collisions if someone were to call this object with the
327 327 # same filename twice.
328 328 self._newfiles = set()
329 329
330 330 def __call__(self, f):
331 331 if f in self._newfiles:
332 332 return
333 333 fl = encoding.lower(f)
334 334 if fl in self._loweredfiles and f not in self._dirstate:
335 335 msg = _('possible case-folding collision for %s') % f
336 336 if self._abort:
337 337 raise error.Abort(msg)
338 338 self._ui.warn(_("warning: %s\n") % msg)
339 339 self._loweredfiles.add(fl)
340 340 self._newfiles.add(f)
341 341
342 342 def filteredhash(repo, maxrev):
343 343 """build hash of filtered revisions in the current repoview.
344 344
345 345 Multiple caches perform up-to-date validation by checking that the
346 346 tiprev and tipnode stored in the cache file match the current repository.
347 347 However, this is not sufficient for validating repoviews because the set
348 348 of revisions in the view may change without the repository tiprev and
349 349 tipnode changing.
350 350
351 351 This function hashes all the revs filtered from the view and returns
352 352 that SHA-1 digest.
353 353 """
354 354 cl = repo.changelog
355 355 if not cl.filteredrevs:
356 356 return None
357 357 key = None
358 358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
359 359 if revs:
360 360 s = hashlib.sha1()
361 361 for rev in revs:
362 362 s.update('%d;' % rev)
363 363 key = s.digest()
364 364 return key
365 365
366 366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
367 367 '''yield every hg repository under path, always recursively.
368 368 The recurse flag will only control recursion into repo working dirs'''
369 369 def errhandler(err):
370 370 if err.filename == path:
371 371 raise err
372 372 samestat = getattr(os.path, 'samestat', None)
373 373 if followsym and samestat is not None:
374 374 def adddir(dirlst, dirname):
375 375 dirstat = os.stat(dirname)
376 376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
377 377 if not match:
378 378 dirlst.append(dirstat)
379 379 return not match
380 380 else:
381 381 followsym = False
382 382
383 383 if (seen_dirs is None) and followsym:
384 384 seen_dirs = []
385 385 adddir(seen_dirs, path)
386 386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
387 387 dirs.sort()
388 388 if '.hg' in dirs:
389 389 yield root # found a repository
390 390 qroot = os.path.join(root, '.hg', 'patches')
391 391 if os.path.isdir(os.path.join(qroot, '.hg')):
392 392 yield qroot # we have a patch queue repo here
393 393 if recurse:
394 394 # avoid recursing inside the .hg directory
395 395 dirs.remove('.hg')
396 396 else:
397 397 dirs[:] = [] # don't descend further
398 398 elif followsym:
399 399 newdirs = []
400 400 for d in dirs:
401 401 fname = os.path.join(root, d)
402 402 if adddir(seen_dirs, fname):
403 403 if os.path.islink(fname):
404 404 for hgname in walkrepos(fname, True, seen_dirs):
405 405 yield hgname
406 406 else:
407 407 newdirs.append(d)
408 408 dirs[:] = newdirs
409 409
410 410 def binnode(ctx):
411 411 """Return binary node id for a given basectx"""
412 412 node = ctx.node()
413 413 if node is None:
414 414 return wdirid
415 415 return node
416 416
417 417 def intrev(ctx):
418 418 """Return integer for a given basectx that can be used in comparison or
419 419 arithmetic operation"""
420 420 rev = ctx.rev()
421 421 if rev is None:
422 422 return wdirrev
423 423 return rev
424 424
425 425 def formatchangeid(ctx):
426 426 """Format changectx as '{rev}:{node|formatnode}', which is the default
427 427 template provided by logcmdutil.changesettemplater"""
428 428 repo = ctx.repo()
429 429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
430 430
431 431 def formatrevnode(ui, rev, node):
432 432 """Format given revision and node depending on the current verbosity"""
433 433 if ui.debugflag:
434 434 hexfunc = hex
435 435 else:
436 436 hexfunc = short
437 437 return '%d:%s' % (rev, hexfunc(node))
438 438
439 439 def resolvehexnodeidprefix(repo, prefix):
440 440 try:
441 441 # Uses unfiltered repo because it's faster when prefix is ambiguous/
442 442 # This matches the shortesthexnodeidprefix() function below.
443 443 node = repo.unfiltered().changelog._partialmatch(prefix)
444 444 except error.AmbiguousPrefixLookupError:
445 445 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
446 446 if revset:
447 447 # Clear config to avoid infinite recursion
448 448 configoverrides = {('experimental',
449 449 'revisions.disambiguatewithin'): None}
450 450 with repo.ui.configoverride(configoverrides):
451 451 revs = repo.anyrevs([revset], user=True)
452 452 matches = []
453 453 for rev in revs:
454 454 node = repo.changelog.node(rev)
455 455 if hex(node).startswith(prefix):
456 456 matches.append(node)
457 457 if len(matches) == 1:
458 458 return matches[0]
459 459 raise
460 460 if node is None:
461 461 return
462 462 repo.changelog.rev(node) # make sure node isn't filtered
463 463 return node
464 464
465 465 def shortesthexnodeidprefix(repo, node, minlength=1):
466 466 """Find the shortest unambiguous prefix that matches hexnode."""
467 467 # _partialmatch() of filtered changelog could take O(len(repo)) time,
468 468 # which would be unacceptably slow. so we look for hash collision in
469 469 # unfiltered space, which means some hashes may be slightly longer.
470 470 cl = repo.unfiltered().changelog
471 471
472 472 def isrev(prefix):
473 473 try:
474 474 i = int(prefix)
475 475 # if we are a pure int, then starting with zero will not be
476 476 # confused as a rev; or, obviously, if the int is larger
477 477 # than the value of the tip rev
478 478 if prefix[0:1] == b'0' or i > len(cl):
479 479 return False
480 480 return True
481 481 except ValueError:
482 482 return False
483 483
484 484 def disambiguate(prefix):
485 485 """Disambiguate against revnums."""
486 486 hexnode = hex(node)
487 487 for length in range(len(prefix), len(hexnode) + 1):
488 488 prefix = hexnode[:length]
489 489 if not isrev(prefix):
490 490 return prefix
491 491
492 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
493 if revset:
494 revs = repo.anyrevs([revset], user=True)
495 if cl.rev(node) in revs:
496 hexnode = hex(node)
497 for length in range(minlength, len(hexnode) + 1):
498 matches = []
499 prefix = hexnode[:length]
500 for rev in revs:
501 otherhexnode = repo[rev].hex()
502 if prefix == otherhexnode[:length]:
503 matches.append(otherhexnode)
504 if len(matches) == 1:
505 return disambiguate(prefix)
506
492 507 try:
493 508 return disambiguate(cl.shortest(node, minlength))
494 509 except error.LookupError:
495 510 raise error.RepoLookupError()
496 511
497 512 def isrevsymbol(repo, symbol):
498 513 """Checks if a symbol exists in the repo.
499 514
500 515 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
501 516 symbol is an ambiguous nodeid prefix.
502 517 """
503 518 try:
504 519 revsymbol(repo, symbol)
505 520 return True
506 521 except error.RepoLookupError:
507 522 return False
508 523
509 524 def revsymbol(repo, symbol):
510 525 """Returns a context given a single revision symbol (as string).
511 526
512 527 This is similar to revsingle(), but accepts only a single revision symbol,
513 528 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
514 529 not "max(public())".
515 530 """
516 531 if not isinstance(symbol, bytes):
517 532 msg = ("symbol (%s of type %s) was not a string, did you mean "
518 533 "repo[symbol]?" % (symbol, type(symbol)))
519 534 raise error.ProgrammingError(msg)
520 535 try:
521 536 if symbol in ('.', 'tip', 'null'):
522 537 return repo[symbol]
523 538
524 539 try:
525 540 r = int(symbol)
526 541 if '%d' % r != symbol:
527 542 raise ValueError
528 543 l = len(repo.changelog)
529 544 if r < 0:
530 545 r += l
531 546 if r < 0 or r >= l and r != wdirrev:
532 547 raise ValueError
533 548 return repo[r]
534 549 except error.FilteredIndexError:
535 550 raise
536 551 except (ValueError, OverflowError, IndexError):
537 552 pass
538 553
539 554 if len(symbol) == 40:
540 555 try:
541 556 node = bin(symbol)
542 557 rev = repo.changelog.rev(node)
543 558 return repo[rev]
544 559 except error.FilteredLookupError:
545 560 raise
546 561 except (TypeError, LookupError):
547 562 pass
548 563
549 564 # look up bookmarks through the name interface
550 565 try:
551 566 node = repo.names.singlenode(repo, symbol)
552 567 rev = repo.changelog.rev(node)
553 568 return repo[rev]
554 569 except KeyError:
555 570 pass
556 571
557 572 node = resolvehexnodeidprefix(repo, symbol)
558 573 if node is not None:
559 574 rev = repo.changelog.rev(node)
560 575 return repo[rev]
561 576
562 577 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
563 578
564 579 except error.WdirUnsupported:
565 580 return repo[None]
566 581 except (error.FilteredIndexError, error.FilteredLookupError,
567 582 error.FilteredRepoLookupError):
568 583 raise _filterederror(repo, symbol)
569 584
570 585 def _filterederror(repo, changeid):
571 586 """build an exception to be raised about a filtered changeid
572 587
573 588 This is extracted in a function to help extensions (eg: evolve) to
574 589 experiment with various message variants."""
575 590 if repo.filtername.startswith('visible'):
576 591
577 592 # Check if the changeset is obsolete
578 593 unfilteredrepo = repo.unfiltered()
579 594 ctx = revsymbol(unfilteredrepo, changeid)
580 595
581 596 # If the changeset is obsolete, enrich the message with the reason
582 597 # that made this changeset not visible
583 598 if ctx.obsolete():
584 599 msg = obsutil._getfilteredreason(repo, changeid, ctx)
585 600 else:
586 601 msg = _("hidden revision '%s'") % changeid
587 602
588 603 hint = _('use --hidden to access hidden revisions')
589 604
590 605 return error.FilteredRepoLookupError(msg, hint=hint)
591 606 msg = _("filtered revision '%s' (not in '%s' subset)")
592 607 msg %= (changeid, repo.filtername)
593 608 return error.FilteredRepoLookupError(msg)
594 609
595 610 def revsingle(repo, revspec, default='.', localalias=None):
596 611 if not revspec and revspec != 0:
597 612 return repo[default]
598 613
599 614 l = revrange(repo, [revspec], localalias=localalias)
600 615 if not l:
601 616 raise error.Abort(_('empty revision set'))
602 617 return repo[l.last()]
603 618
604 619 def _pairspec(revspec):
605 620 tree = revsetlang.parse(revspec)
606 621 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
607 622
608 623 def revpair(repo, revs):
609 624 if not revs:
610 625 return repo['.'], repo[None]
611 626
612 627 l = revrange(repo, revs)
613 628
614 629 if not l:
615 630 first = second = None
616 631 elif l.isascending():
617 632 first = l.min()
618 633 second = l.max()
619 634 elif l.isdescending():
620 635 first = l.max()
621 636 second = l.min()
622 637 else:
623 638 first = l.first()
624 639 second = l.last()
625 640
626 641 if first is None:
627 642 raise error.Abort(_('empty revision range'))
628 643 if (first == second and len(revs) >= 2
629 644 and not all(revrange(repo, [r]) for r in revs)):
630 645 raise error.Abort(_('empty revision on one side of range'))
631 646
632 647 # if top-level is range expression, the result must always be a pair
633 648 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
634 649 return repo[first], repo[None]
635 650
636 651 return repo[first], repo[second]
637 652
638 653 def revrange(repo, specs, localalias=None):
639 654 """Execute 1 to many revsets and return the union.
640 655
641 656 This is the preferred mechanism for executing revsets using user-specified
642 657 config options, such as revset aliases.
643 658
644 659 The revsets specified by ``specs`` will be executed via a chained ``OR``
645 660 expression. If ``specs`` is empty, an empty result is returned.
646 661
647 662 ``specs`` can contain integers, in which case they are assumed to be
648 663 revision numbers.
649 664
650 665 It is assumed the revsets are already formatted. If you have arguments
651 666 that need to be expanded in the revset, call ``revsetlang.formatspec()``
652 667 and pass the result as an element of ``specs``.
653 668
654 669 Specifying a single revset is allowed.
655 670
656 671 Returns a ``revset.abstractsmartset`` which is a list-like interface over
657 672 integer revisions.
658 673 """
659 674 allspecs = []
660 675 for spec in specs:
661 676 if isinstance(spec, int):
662 677 spec = revsetlang.formatspec('rev(%d)', spec)
663 678 allspecs.append(spec)
664 679 return repo.anyrevs(allspecs, user=True, localalias=localalias)
665 680
666 681 def meaningfulparents(repo, ctx):
667 682 """Return list of meaningful (or all if debug) parentrevs for rev.
668 683
669 684 For merges (two non-nullrev revisions) both parents are meaningful.
670 685 Otherwise the first parent revision is considered meaningful if it
671 686 is not the preceding revision.
672 687 """
673 688 parents = ctx.parents()
674 689 if len(parents) > 1:
675 690 return parents
676 691 if repo.ui.debugflag:
677 692 return [parents[0], repo['null']]
678 693 if parents[0].rev() >= intrev(ctx) - 1:
679 694 return []
680 695 return parents
681 696
682 697 def expandpats(pats):
683 698 '''Expand bare globs when running on windows.
684 699 On posix we assume it already has already been done by sh.'''
685 700 if not util.expandglobs:
686 701 return list(pats)
687 702 ret = []
688 703 for kindpat in pats:
689 704 kind, pat = matchmod._patsplit(kindpat, None)
690 705 if kind is None:
691 706 try:
692 707 globbed = glob.glob(pat)
693 708 except re.error:
694 709 globbed = [pat]
695 710 if globbed:
696 711 ret.extend(globbed)
697 712 continue
698 713 ret.append(kindpat)
699 714 return ret
700 715
701 716 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
702 717 badfn=None):
703 718 '''Return a matcher and the patterns that were used.
704 719 The matcher will warn about bad matches, unless an alternate badfn callback
705 720 is provided.'''
706 721 if pats == ("",):
707 722 pats = []
708 723 if opts is None:
709 724 opts = {}
710 725 if not globbed and default == 'relpath':
711 726 pats = expandpats(pats or [])
712 727
713 728 def bad(f, msg):
714 729 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
715 730
716 731 if badfn is None:
717 732 badfn = bad
718 733
719 734 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
720 735 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
721 736
722 737 if m.always():
723 738 pats = []
724 739 return m, pats
725 740
726 741 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
727 742 badfn=None):
728 743 '''Return a matcher that will warn about bad matches.'''
729 744 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
730 745
731 746 def matchall(repo):
732 747 '''Return a matcher that will efficiently match everything.'''
733 748 return matchmod.always(repo.root, repo.getcwd())
734 749
735 750 def matchfiles(repo, files, badfn=None):
736 751 '''Return a matcher that will efficiently match exactly these files.'''
737 752 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
738 753
739 754 def parsefollowlinespattern(repo, rev, pat, msg):
740 755 """Return a file name from `pat` pattern suitable for usage in followlines
741 756 logic.
742 757 """
743 758 if not matchmod.patkind(pat):
744 759 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
745 760 else:
746 761 ctx = repo[rev]
747 762 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
748 763 files = [f for f in ctx if m(f)]
749 764 if len(files) != 1:
750 765 raise error.ParseError(msg)
751 766 return files[0]
752 767
753 768 def origpath(ui, repo, filepath):
754 769 '''customize where .orig files are created
755 770
756 771 Fetch user defined path from config file: [ui] origbackuppath = <path>
757 772 Fall back to default (filepath with .orig suffix) if not specified
758 773 '''
759 774 origbackuppath = ui.config('ui', 'origbackuppath')
760 775 if not origbackuppath:
761 776 return filepath + ".orig"
762 777
763 778 # Convert filepath from an absolute path into a path inside the repo.
764 779 filepathfromroot = util.normpath(os.path.relpath(filepath,
765 780 start=repo.root))
766 781
767 782 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
768 783 origbackupdir = origvfs.dirname(filepathfromroot)
769 784 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
770 785 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
771 786
772 787 # Remove any files that conflict with the backup file's path
773 788 for f in reversed(list(util.finddirs(filepathfromroot))):
774 789 if origvfs.isfileorlink(f):
775 790 ui.note(_('removing conflicting file: %s\n')
776 791 % origvfs.join(f))
777 792 origvfs.unlink(f)
778 793 break
779 794
780 795 origvfs.makedirs(origbackupdir)
781 796
782 797 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
783 798 ui.note(_('removing conflicting directory: %s\n')
784 799 % origvfs.join(filepathfromroot))
785 800 origvfs.rmtree(filepathfromroot, forcibly=True)
786 801
787 802 return origvfs.join(filepathfromroot)
788 803
789 804 class _containsnode(object):
790 805 """proxy __contains__(node) to container.__contains__ which accepts revs"""
791 806
792 807 def __init__(self, repo, revcontainer):
793 808 self._torev = repo.changelog.rev
794 809 self._revcontains = revcontainer.__contains__
795 810
796 811 def __contains__(self, node):
797 812 return self._revcontains(self._torev(node))
798 813
799 814 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
800 815 fixphase=False, targetphase=None, backup=True):
801 816 """do common cleanups when old nodes are replaced by new nodes
802 817
803 818 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
804 819 (we might also want to move working directory parent in the future)
805 820
806 821 By default, bookmark moves are calculated automatically from 'replacements',
807 822 but 'moves' can be used to override that. Also, 'moves' may include
808 823 additional bookmark moves that should not have associated obsmarkers.
809 824
810 825 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
811 826 have replacements. operation is a string, like "rebase".
812 827
813 828 metadata is dictionary containing metadata to be stored in obsmarker if
814 829 obsolescence is enabled.
815 830 """
816 831 assert fixphase or targetphase is None
817 832 if not replacements and not moves:
818 833 return
819 834
820 835 # translate mapping's other forms
821 836 if not util.safehasattr(replacements, 'items'):
822 837 replacements = {n: () for n in replacements}
823 838
824 839 # Calculate bookmark movements
825 840 if moves is None:
826 841 moves = {}
827 842 # Unfiltered repo is needed since nodes in replacements might be hidden.
828 843 unfi = repo.unfiltered()
829 844 for oldnode, newnodes in replacements.items():
830 845 if oldnode in moves:
831 846 continue
832 847 if len(newnodes) > 1:
833 848 # usually a split, take the one with biggest rev number
834 849 newnode = next(unfi.set('max(%ln)', newnodes)).node()
835 850 elif len(newnodes) == 0:
836 851 # move bookmark backwards
837 852 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
838 853 list(replacements)))
839 854 if roots:
840 855 newnode = roots[0].node()
841 856 else:
842 857 newnode = nullid
843 858 else:
844 859 newnode = newnodes[0]
845 860 moves[oldnode] = newnode
846 861
847 862 allnewnodes = [n for ns in replacements.values() for n in ns]
848 863 toretract = {}
849 864 toadvance = {}
850 865 if fixphase:
851 866 precursors = {}
852 867 for oldnode, newnodes in replacements.items():
853 868 for newnode in newnodes:
854 869 precursors.setdefault(newnode, []).append(oldnode)
855 870
856 871 allnewnodes.sort(key=lambda n: unfi[n].rev())
857 872 newphases = {}
858 873 def phase(ctx):
859 874 return newphases.get(ctx.node(), ctx.phase())
860 875 for newnode in allnewnodes:
861 876 ctx = unfi[newnode]
862 877 parentphase = max(phase(p) for p in ctx.parents())
863 878 if targetphase is None:
864 879 oldphase = max(unfi[oldnode].phase()
865 880 for oldnode in precursors[newnode])
866 881 newphase = max(oldphase, parentphase)
867 882 else:
868 883 newphase = max(targetphase, parentphase)
869 884 newphases[newnode] = newphase
870 885 if newphase > ctx.phase():
871 886 toretract.setdefault(newphase, []).append(newnode)
872 887 elif newphase < ctx.phase():
873 888 toadvance.setdefault(newphase, []).append(newnode)
874 889
875 890 with repo.transaction('cleanup') as tr:
876 891 # Move bookmarks
877 892 bmarks = repo._bookmarks
878 893 bmarkchanges = []
879 894 for oldnode, newnode in moves.items():
880 895 oldbmarks = repo.nodebookmarks(oldnode)
881 896 if not oldbmarks:
882 897 continue
883 898 from . import bookmarks # avoid import cycle
884 899 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
885 900 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
886 901 hex(oldnode), hex(newnode)))
887 902 # Delete divergent bookmarks being parents of related newnodes
888 903 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
889 904 allnewnodes, newnode, oldnode)
890 905 deletenodes = _containsnode(repo, deleterevs)
891 906 for name in oldbmarks:
892 907 bmarkchanges.append((name, newnode))
893 908 for b in bookmarks.divergent2delete(repo, deletenodes, name):
894 909 bmarkchanges.append((b, None))
895 910
896 911 if bmarkchanges:
897 912 bmarks.applychanges(repo, tr, bmarkchanges)
898 913
899 914 for phase, nodes in toretract.items():
900 915 phases.retractboundary(repo, tr, phase, nodes)
901 916 for phase, nodes in toadvance.items():
902 917 phases.advanceboundary(repo, tr, phase, nodes)
903 918
904 919 # Obsolete or strip nodes
905 920 if obsolete.isenabled(repo, obsolete.createmarkersopt):
906 921 # If a node is already obsoleted, and we want to obsolete it
907 922 # without a successor, skip that obssolete request since it's
908 923 # unnecessary. That's the "if s or not isobs(n)" check below.
909 924 # Also sort the node in topology order, that might be useful for
910 925 # some obsstore logic.
911 926 # NOTE: the filtering and sorting might belong to createmarkers.
912 927 isobs = unfi.obsstore.successors.__contains__
913 928 torev = unfi.changelog.rev
914 929 sortfunc = lambda ns: torev(ns[0])
915 930 rels = [(unfi[n], tuple(unfi[m] for m in s))
916 931 for n, s in sorted(replacements.items(), key=sortfunc)
917 932 if s or not isobs(n)]
918 933 if rels:
919 934 obsolete.createmarkers(repo, rels, operation=operation,
920 935 metadata=metadata)
921 936 else:
922 937 from . import repair # avoid import cycle
923 938 tostrip = list(replacements)
924 939 if tostrip:
925 940 repair.delayedstrip(repo.ui, repo, tostrip, operation,
926 941 backup=backup)
927 942
928 943 def addremove(repo, matcher, prefix, opts=None):
929 944 if opts is None:
930 945 opts = {}
931 946 m = matcher
932 947 dry_run = opts.get('dry_run')
933 948 try:
934 949 similarity = float(opts.get('similarity') or 0)
935 950 except ValueError:
936 951 raise error.Abort(_('similarity must be a number'))
937 952 if similarity < 0 or similarity > 100:
938 953 raise error.Abort(_('similarity must be between 0 and 100'))
939 954 similarity /= 100.0
940 955
941 956 ret = 0
942 957 join = lambda f: os.path.join(prefix, f)
943 958
944 959 wctx = repo[None]
945 960 for subpath in sorted(wctx.substate):
946 961 submatch = matchmod.subdirmatcher(subpath, m)
947 962 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
948 963 sub = wctx.sub(subpath)
949 964 try:
950 965 if sub.addremove(submatch, prefix, opts):
951 966 ret = 1
952 967 except error.LookupError:
953 968 repo.ui.status(_("skipping missing subrepository: %s\n")
954 969 % join(subpath))
955 970
956 971 rejected = []
957 972 def badfn(f, msg):
958 973 if f in m.files():
959 974 m.bad(f, msg)
960 975 rejected.append(f)
961 976
962 977 badmatch = matchmod.badmatch(m, badfn)
963 978 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
964 979 badmatch)
965 980
966 981 unknownset = set(unknown + forgotten)
967 982 toprint = unknownset.copy()
968 983 toprint.update(deleted)
969 984 for abs in sorted(toprint):
970 985 if repo.ui.verbose or not m.exact(abs):
971 986 if abs in unknownset:
972 987 status = _('adding %s\n') % m.uipath(abs)
973 988 else:
974 989 status = _('removing %s\n') % m.uipath(abs)
975 990 repo.ui.status(status)
976 991
977 992 renames = _findrenames(repo, m, added + unknown, removed + deleted,
978 993 similarity)
979 994
980 995 if not dry_run:
981 996 _markchanges(repo, unknown + forgotten, deleted, renames)
982 997
983 998 for f in rejected:
984 999 if f in m.files():
985 1000 return 1
986 1001 return ret
987 1002
988 1003 def marktouched(repo, files, similarity=0.0):
989 1004 '''Assert that files have somehow been operated upon. files are relative to
990 1005 the repo root.'''
991 1006 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
992 1007 rejected = []
993 1008
994 1009 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
995 1010
996 1011 if repo.ui.verbose:
997 1012 unknownset = set(unknown + forgotten)
998 1013 toprint = unknownset.copy()
999 1014 toprint.update(deleted)
1000 1015 for abs in sorted(toprint):
1001 1016 if abs in unknownset:
1002 1017 status = _('adding %s\n') % abs
1003 1018 else:
1004 1019 status = _('removing %s\n') % abs
1005 1020 repo.ui.status(status)
1006 1021
1007 1022 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1008 1023 similarity)
1009 1024
1010 1025 _markchanges(repo, unknown + forgotten, deleted, renames)
1011 1026
1012 1027 for f in rejected:
1013 1028 if f in m.files():
1014 1029 return 1
1015 1030 return 0
1016 1031
1017 1032 def _interestingfiles(repo, matcher):
1018 1033 '''Walk dirstate with matcher, looking for files that addremove would care
1019 1034 about.
1020 1035
1021 1036 This is different from dirstate.status because it doesn't care about
1022 1037 whether files are modified or clean.'''
1023 1038 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1024 1039 audit_path = pathutil.pathauditor(repo.root, cached=True)
1025 1040
1026 1041 ctx = repo[None]
1027 1042 dirstate = repo.dirstate
1028 1043 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1029 1044 unknown=True, ignored=False, full=False)
1030 1045 for abs, st in walkresults.iteritems():
1031 1046 dstate = dirstate[abs]
1032 1047 if dstate == '?' and audit_path.check(abs):
1033 1048 unknown.append(abs)
1034 1049 elif dstate != 'r' and not st:
1035 1050 deleted.append(abs)
1036 1051 elif dstate == 'r' and st:
1037 1052 forgotten.append(abs)
1038 1053 # for finding renames
1039 1054 elif dstate == 'r' and not st:
1040 1055 removed.append(abs)
1041 1056 elif dstate == 'a':
1042 1057 added.append(abs)
1043 1058
1044 1059 return added, unknown, deleted, removed, forgotten
1045 1060
1046 1061 def _findrenames(repo, matcher, added, removed, similarity):
1047 1062 '''Find renames from removed files to added ones.'''
1048 1063 renames = {}
1049 1064 if similarity > 0:
1050 1065 for old, new, score in similar.findrenames(repo, added, removed,
1051 1066 similarity):
1052 1067 if (repo.ui.verbose or not matcher.exact(old)
1053 1068 or not matcher.exact(new)):
1054 1069 repo.ui.status(_('recording removal of %s as rename to %s '
1055 1070 '(%d%% similar)\n') %
1056 1071 (matcher.rel(old), matcher.rel(new),
1057 1072 score * 100))
1058 1073 renames[new] = old
1059 1074 return renames
1060 1075
1061 1076 def _markchanges(repo, unknown, deleted, renames):
1062 1077 '''Marks the files in unknown as added, the files in deleted as removed,
1063 1078 and the files in renames as copied.'''
1064 1079 wctx = repo[None]
1065 1080 with repo.wlock():
1066 1081 wctx.forget(deleted)
1067 1082 wctx.add(unknown)
1068 1083 for new, old in renames.iteritems():
1069 1084 wctx.copy(old, new)
1070 1085
1071 1086 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1072 1087 """Update the dirstate to reflect the intent of copying src to dst. For
1073 1088 different reasons it might not end with dst being marked as copied from src.
1074 1089 """
1075 1090 origsrc = repo.dirstate.copied(src) or src
1076 1091 if dst == origsrc: # copying back a copy?
1077 1092 if repo.dirstate[dst] not in 'mn' and not dryrun:
1078 1093 repo.dirstate.normallookup(dst)
1079 1094 else:
1080 1095 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1081 1096 if not ui.quiet:
1082 1097 ui.warn(_("%s has not been committed yet, so no copy "
1083 1098 "data will be stored for %s.\n")
1084 1099 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1085 1100 if repo.dirstate[dst] in '?r' and not dryrun:
1086 1101 wctx.add([dst])
1087 1102 elif not dryrun:
1088 1103 wctx.copy(origsrc, dst)
1089 1104
1090 1105 def readrequires(opener, supported):
1091 1106 '''Reads and parses .hg/requires and checks if all entries found
1092 1107 are in the list of supported features.'''
1093 1108 requirements = set(opener.read("requires").splitlines())
1094 1109 missings = []
1095 1110 for r in requirements:
1096 1111 if r not in supported:
1097 1112 if not r or not r[0:1].isalnum():
1098 1113 raise error.RequirementError(_(".hg/requires file is corrupt"))
1099 1114 missings.append(r)
1100 1115 missings.sort()
1101 1116 if missings:
1102 1117 raise error.RequirementError(
1103 1118 _("repository requires features unknown to this Mercurial: %s")
1104 1119 % " ".join(missings),
1105 1120 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1106 1121 " for more information"))
1107 1122 return requirements
1108 1123
1109 1124 def writerequires(opener, requirements):
1110 1125 with opener('requires', 'w') as fp:
1111 1126 for r in sorted(requirements):
1112 1127 fp.write("%s\n" % r)
1113 1128
1114 1129 class filecachesubentry(object):
1115 1130 def __init__(self, path, stat):
1116 1131 self.path = path
1117 1132 self.cachestat = None
1118 1133 self._cacheable = None
1119 1134
1120 1135 if stat:
1121 1136 self.cachestat = filecachesubentry.stat(self.path)
1122 1137
1123 1138 if self.cachestat:
1124 1139 self._cacheable = self.cachestat.cacheable()
1125 1140 else:
1126 1141 # None means we don't know yet
1127 1142 self._cacheable = None
1128 1143
1129 1144 def refresh(self):
1130 1145 if self.cacheable():
1131 1146 self.cachestat = filecachesubentry.stat(self.path)
1132 1147
1133 1148 def cacheable(self):
1134 1149 if self._cacheable is not None:
1135 1150 return self._cacheable
1136 1151
1137 1152 # we don't know yet, assume it is for now
1138 1153 return True
1139 1154
1140 1155 def changed(self):
1141 1156 # no point in going further if we can't cache it
1142 1157 if not self.cacheable():
1143 1158 return True
1144 1159
1145 1160 newstat = filecachesubentry.stat(self.path)
1146 1161
1147 1162 # we may not know if it's cacheable yet, check again now
1148 1163 if newstat and self._cacheable is None:
1149 1164 self._cacheable = newstat.cacheable()
1150 1165
1151 1166 # check again
1152 1167 if not self._cacheable:
1153 1168 return True
1154 1169
1155 1170 if self.cachestat != newstat:
1156 1171 self.cachestat = newstat
1157 1172 return True
1158 1173 else:
1159 1174 return False
1160 1175
1161 1176 @staticmethod
1162 1177 def stat(path):
1163 1178 try:
1164 1179 return util.cachestat(path)
1165 1180 except OSError as e:
1166 1181 if e.errno != errno.ENOENT:
1167 1182 raise
1168 1183
1169 1184 class filecacheentry(object):
1170 1185 def __init__(self, paths, stat=True):
1171 1186 self._entries = []
1172 1187 for path in paths:
1173 1188 self._entries.append(filecachesubentry(path, stat))
1174 1189
1175 1190 def changed(self):
1176 1191 '''true if any entry has changed'''
1177 1192 for entry in self._entries:
1178 1193 if entry.changed():
1179 1194 return True
1180 1195 return False
1181 1196
1182 1197 def refresh(self):
1183 1198 for entry in self._entries:
1184 1199 entry.refresh()
1185 1200
1186 1201 class filecache(object):
1187 1202 """A property like decorator that tracks files under .hg/ for updates.
1188 1203
1189 1204 On first access, the files defined as arguments are stat()ed and the
1190 1205 results cached. The decorated function is called. The results are stashed
1191 1206 away in a ``_filecache`` dict on the object whose method is decorated.
1192 1207
1193 1208 On subsequent access, the cached result is returned.
1194 1209
1195 1210 On external property set operations, stat() calls are performed and the new
1196 1211 value is cached.
1197 1212
1198 1213 On property delete operations, cached data is removed.
1199 1214
1200 1215 When using the property API, cached data is always returned, if available:
1201 1216 no stat() is performed to check if the file has changed and if the function
1202 1217 needs to be called to reflect file changes.
1203 1218
1204 1219 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1205 1220 can populate an entry before the property's getter is called. In this case,
1206 1221 entries in ``_filecache`` will be used during property operations,
1207 1222 if available. If the underlying file changes, it is up to external callers
1208 1223 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1209 1224 method result as well as possibly calling ``del obj._filecache[attr]`` to
1210 1225 remove the ``filecacheentry``.
1211 1226 """
1212 1227
1213 1228 def __init__(self, *paths):
1214 1229 self.paths = paths
1215 1230
1216 1231 def join(self, obj, fname):
1217 1232 """Used to compute the runtime path of a cached file.
1218 1233
1219 1234 Users should subclass filecache and provide their own version of this
1220 1235 function to call the appropriate join function on 'obj' (an instance
1221 1236 of the class that its member function was decorated).
1222 1237 """
1223 1238 raise NotImplementedError
1224 1239
1225 1240 def __call__(self, func):
1226 1241 self.func = func
1227 1242 self.sname = func.__name__
1228 1243 self.name = pycompat.sysbytes(self.sname)
1229 1244 return self
1230 1245
1231 1246 def __get__(self, obj, type=None):
1232 1247 # if accessed on the class, return the descriptor itself.
1233 1248 if obj is None:
1234 1249 return self
1235 1250 # do we need to check if the file changed?
1236 1251 if self.sname in obj.__dict__:
1237 1252 assert self.name in obj._filecache, self.name
1238 1253 return obj.__dict__[self.sname]
1239 1254
1240 1255 entry = obj._filecache.get(self.name)
1241 1256
1242 1257 if entry:
1243 1258 if entry.changed():
1244 1259 entry.obj = self.func(obj)
1245 1260 else:
1246 1261 paths = [self.join(obj, path) for path in self.paths]
1247 1262
1248 1263 # We stat -before- creating the object so our cache doesn't lie if
1249 1264 # a writer modified between the time we read and stat
1250 1265 entry = filecacheentry(paths, True)
1251 1266 entry.obj = self.func(obj)
1252 1267
1253 1268 obj._filecache[self.name] = entry
1254 1269
1255 1270 obj.__dict__[self.sname] = entry.obj
1256 1271 return entry.obj
1257 1272
1258 1273 def __set__(self, obj, value):
1259 1274 if self.name not in obj._filecache:
1260 1275 # we add an entry for the missing value because X in __dict__
1261 1276 # implies X in _filecache
1262 1277 paths = [self.join(obj, path) for path in self.paths]
1263 1278 ce = filecacheentry(paths, False)
1264 1279 obj._filecache[self.name] = ce
1265 1280 else:
1266 1281 ce = obj._filecache[self.name]
1267 1282
1268 1283 ce.obj = value # update cached copy
1269 1284 obj.__dict__[self.sname] = value # update copy returned by obj.x
1270 1285
1271 1286 def __delete__(self, obj):
1272 1287 try:
1273 1288 del obj.__dict__[self.sname]
1274 1289 except KeyError:
1275 1290 raise AttributeError(self.sname)
1276 1291
1277 1292 def extdatasource(repo, source):
1278 1293 """Gather a map of rev -> value dict from the specified source
1279 1294
1280 1295 A source spec is treated as a URL, with a special case shell: type
1281 1296 for parsing the output from a shell command.
1282 1297
1283 1298 The data is parsed as a series of newline-separated records where
1284 1299 each record is a revision specifier optionally followed by a space
1285 1300 and a freeform string value. If the revision is known locally, it
1286 1301 is converted to a rev, otherwise the record is skipped.
1287 1302
1288 1303 Note that both key and value are treated as UTF-8 and converted to
1289 1304 the local encoding. This allows uniformity between local and
1290 1305 remote data sources.
1291 1306 """
1292 1307
1293 1308 spec = repo.ui.config("extdata", source)
1294 1309 if not spec:
1295 1310 raise error.Abort(_("unknown extdata source '%s'") % source)
1296 1311
1297 1312 data = {}
1298 1313 src = proc = None
1299 1314 try:
1300 1315 if spec.startswith("shell:"):
1301 1316 # external commands should be run relative to the repo root
1302 1317 cmd = spec[6:]
1303 1318 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1304 1319 close_fds=procutil.closefds,
1305 1320 stdout=subprocess.PIPE, cwd=repo.root)
1306 1321 src = proc.stdout
1307 1322 else:
1308 1323 # treat as a URL or file
1309 1324 src = url.open(repo.ui, spec)
1310 1325 for l in src:
1311 1326 if " " in l:
1312 1327 k, v = l.strip().split(" ", 1)
1313 1328 else:
1314 1329 k, v = l.strip(), ""
1315 1330
1316 1331 k = encoding.tolocal(k)
1317 1332 try:
1318 1333 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1319 1334 except (error.LookupError, error.RepoLookupError):
1320 1335 pass # we ignore data for nodes that don't exist locally
1321 1336 finally:
1322 1337 if proc:
1323 1338 proc.communicate()
1324 1339 if src:
1325 1340 src.close()
1326 1341 if proc and proc.returncode != 0:
1327 1342 raise error.Abort(_("extdata command '%s' failed: %s")
1328 1343 % (cmd, procutil.explainexit(proc.returncode)))
1329 1344
1330 1345 return data
1331 1346
1332 1347 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1333 1348 if lock is None:
1334 1349 raise error.LockInheritanceContractViolation(
1335 1350 'lock can only be inherited while held')
1336 1351 if environ is None:
1337 1352 environ = {}
1338 1353 with lock.inherit() as locker:
1339 1354 environ[envvar] = locker
1340 1355 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1341 1356
1342 1357 def wlocksub(repo, cmd, *args, **kwargs):
1343 1358 """run cmd as a subprocess that allows inheriting repo's wlock
1344 1359
1345 1360 This can only be called while the wlock is held. This takes all the
1346 1361 arguments that ui.system does, and returns the exit code of the
1347 1362 subprocess."""
1348 1363 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1349 1364 **kwargs)
1350 1365
1351 1366 class progress(object):
1352 1367 def __init__(self, ui, topic, unit="", total=None):
1353 1368 self.ui = ui
1354 1369 self.pos = 0
1355 1370 self.topic = topic
1356 1371 self.unit = unit
1357 1372 self.total = total
1358 1373
1359 1374 def __enter__(self):
1360 1375 return self
1361 1376
1362 1377 def __exit__(self, exc_type, exc_value, exc_tb):
1363 1378 self.complete()
1364 1379
1365 1380 def update(self, pos, item="", total=None):
1366 1381 assert pos is not None
1367 1382 if total:
1368 1383 self.total = total
1369 1384 self.pos = pos
1370 1385 self._print(item)
1371 1386
1372 1387 def increment(self, step=1, item="", total=None):
1373 1388 self.update(self.pos + step, item, total)
1374 1389
1375 1390 def complete(self):
1376 1391 self.ui.progress(self.topic, None)
1377 1392
1378 1393 def _print(self, item):
1379 1394 self.ui.progress(self.topic, self.pos, item, self.unit,
1380 1395 self.total)
1381 1396
1382 1397 def gdinitconfig(ui):
1383 1398 """helper function to know if a repo should be created as general delta
1384 1399 """
1385 1400 # experimental config: format.generaldelta
1386 1401 return (ui.configbool('format', 'generaldelta')
1387 1402 or ui.configbool('format', 'usegeneraldelta')
1388 1403 or ui.configbool('format', 'sparse-revlog'))
1389 1404
1390 1405 def gddeltaconfig(ui):
1391 1406 """helper function to know if incoming delta should be optimised
1392 1407 """
1393 1408 # experimental config: format.generaldelta
1394 1409 return ui.configbool('format', 'generaldelta')
1395 1410
1396 1411 class simplekeyvaluefile(object):
1397 1412 """A simple file with key=value lines
1398 1413
1399 1414 Keys must be alphanumerics and start with a letter, values must not
1400 1415 contain '\n' characters"""
1401 1416 firstlinekey = '__firstline'
1402 1417
1403 1418 def __init__(self, vfs, path, keys=None):
1404 1419 self.vfs = vfs
1405 1420 self.path = path
1406 1421
1407 1422 def read(self, firstlinenonkeyval=False):
1408 1423 """Read the contents of a simple key-value file
1409 1424
1410 1425 'firstlinenonkeyval' indicates whether the first line of file should
1411 1426 be treated as a key-value pair or reuturned fully under the
1412 1427 __firstline key."""
1413 1428 lines = self.vfs.readlines(self.path)
1414 1429 d = {}
1415 1430 if firstlinenonkeyval:
1416 1431 if not lines:
1417 1432 e = _("empty simplekeyvalue file")
1418 1433 raise error.CorruptedState(e)
1419 1434 # we don't want to include '\n' in the __firstline
1420 1435 d[self.firstlinekey] = lines[0][:-1]
1421 1436 del lines[0]
1422 1437
1423 1438 try:
1424 1439 # the 'if line.strip()' part prevents us from failing on empty
1425 1440 # lines which only contain '\n' therefore are not skipped
1426 1441 # by 'if line'
1427 1442 updatedict = dict(line[:-1].split('=', 1) for line in lines
1428 1443 if line.strip())
1429 1444 if self.firstlinekey in updatedict:
1430 1445 e = _("%r can't be used as a key")
1431 1446 raise error.CorruptedState(e % self.firstlinekey)
1432 1447 d.update(updatedict)
1433 1448 except ValueError as e:
1434 1449 raise error.CorruptedState(str(e))
1435 1450 return d
1436 1451
1437 1452 def write(self, data, firstline=None):
1438 1453 """Write key=>value mapping to a file
1439 1454 data is a dict. Keys must be alphanumerical and start with a letter.
1440 1455 Values must not contain newline characters.
1441 1456
1442 1457 If 'firstline' is not None, it is written to file before
1443 1458 everything else, as it is, not in a key=value form"""
1444 1459 lines = []
1445 1460 if firstline is not None:
1446 1461 lines.append('%s\n' % firstline)
1447 1462
1448 1463 for k, v in data.items():
1449 1464 if k == self.firstlinekey:
1450 1465 e = "key name '%s' is reserved" % self.firstlinekey
1451 1466 raise error.ProgrammingError(e)
1452 1467 if not k[0:1].isalpha():
1453 1468 e = "keys must start with a letter in a key-value file"
1454 1469 raise error.ProgrammingError(e)
1455 1470 if not k.isalnum():
1456 1471 e = "invalid key name in a simple key-value file"
1457 1472 raise error.ProgrammingError(e)
1458 1473 if '\n' in v:
1459 1474 e = "invalid value in a simple key-value file"
1460 1475 raise error.ProgrammingError(e)
1461 1476 lines.append("%s=%s\n" % (k, v))
1462 1477 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1463 1478 fp.write(''.join(lines))
1464 1479
1465 1480 _reportobsoletedsource = [
1466 1481 'debugobsolete',
1467 1482 'pull',
1468 1483 'push',
1469 1484 'serve',
1470 1485 'unbundle',
1471 1486 ]
1472 1487
1473 1488 _reportnewcssource = [
1474 1489 'pull',
1475 1490 'unbundle',
1476 1491 ]
1477 1492
1478 1493 def prefetchfiles(repo, revs, match):
1479 1494 """Invokes the registered file prefetch functions, allowing extensions to
1480 1495 ensure the corresponding files are available locally, before the command
1481 1496 uses them."""
1482 1497 if match:
1483 1498 # The command itself will complain about files that don't exist, so
1484 1499 # don't duplicate the message.
1485 1500 match = matchmod.badmatch(match, lambda fn, msg: None)
1486 1501 else:
1487 1502 match = matchall(repo)
1488 1503
1489 1504 fileprefetchhooks(repo, revs, match)
1490 1505
1491 1506 # a list of (repo, revs, match) prefetch functions
1492 1507 fileprefetchhooks = util.hooks()
1493 1508
1494 1509 # A marker that tells the evolve extension to suppress its own reporting
1495 1510 _reportstroubledchangesets = True
1496 1511
1497 1512 def registersummarycallback(repo, otr, txnname=''):
1498 1513 """register a callback to issue a summary after the transaction is closed
1499 1514 """
1500 1515 def txmatch(sources):
1501 1516 return any(txnname.startswith(source) for source in sources)
1502 1517
1503 1518 categories = []
1504 1519
1505 1520 def reportsummary(func):
1506 1521 """decorator for report callbacks."""
1507 1522 # The repoview life cycle is shorter than the one of the actual
1508 1523 # underlying repository. So the filtered object can die before the
1509 1524 # weakref is used leading to troubles. We keep a reference to the
1510 1525 # unfiltered object and restore the filtering when retrieving the
1511 1526 # repository through the weakref.
1512 1527 filtername = repo.filtername
1513 1528 reporef = weakref.ref(repo.unfiltered())
1514 1529 def wrapped(tr):
1515 1530 repo = reporef()
1516 1531 if filtername:
1517 1532 repo = repo.filtered(filtername)
1518 1533 func(repo, tr)
1519 1534 newcat = '%02i-txnreport' % len(categories)
1520 1535 otr.addpostclose(newcat, wrapped)
1521 1536 categories.append(newcat)
1522 1537 return wrapped
1523 1538
1524 1539 if txmatch(_reportobsoletedsource):
1525 1540 @reportsummary
1526 1541 def reportobsoleted(repo, tr):
1527 1542 obsoleted = obsutil.getobsoleted(repo, tr)
1528 1543 if obsoleted:
1529 1544 repo.ui.status(_('obsoleted %i changesets\n')
1530 1545 % len(obsoleted))
1531 1546
1532 1547 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1533 1548 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1534 1549 instabilitytypes = [
1535 1550 ('orphan', 'orphan'),
1536 1551 ('phase-divergent', 'phasedivergent'),
1537 1552 ('content-divergent', 'contentdivergent'),
1538 1553 ]
1539 1554
1540 1555 def getinstabilitycounts(repo):
1541 1556 filtered = repo.changelog.filteredrevs
1542 1557 counts = {}
1543 1558 for instability, revset in instabilitytypes:
1544 1559 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1545 1560 filtered)
1546 1561 return counts
1547 1562
1548 1563 oldinstabilitycounts = getinstabilitycounts(repo)
1549 1564 @reportsummary
1550 1565 def reportnewinstabilities(repo, tr):
1551 1566 newinstabilitycounts = getinstabilitycounts(repo)
1552 1567 for instability, revset in instabilitytypes:
1553 1568 delta = (newinstabilitycounts[instability] -
1554 1569 oldinstabilitycounts[instability])
1555 1570 msg = getinstabilitymessage(delta, instability)
1556 1571 if msg:
1557 1572 repo.ui.warn(msg)
1558 1573
1559 1574 if txmatch(_reportnewcssource):
1560 1575 @reportsummary
1561 1576 def reportnewcs(repo, tr):
1562 1577 """Report the range of new revisions pulled/unbundled."""
1563 1578 newrevs = tr.changes.get('revs', pycompat.xrange(0, 0))
1564 1579 if not newrevs:
1565 1580 return
1566 1581
1567 1582 # Compute the bounds of new revisions' range, excluding obsoletes.
1568 1583 unfi = repo.unfiltered()
1569 1584 revs = unfi.revs('%ld and not obsolete()', newrevs)
1570 1585 if not revs:
1571 1586 # Got only obsoletes.
1572 1587 return
1573 1588 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1574 1589
1575 1590 if minrev == maxrev:
1576 1591 revrange = minrev
1577 1592 else:
1578 1593 revrange = '%s:%s' % (minrev, maxrev)
1579 1594 repo.ui.status(_('new changesets %s\n') % revrange)
1580 1595
1581 1596 @reportsummary
1582 1597 def reportphasechanges(repo, tr):
1583 1598 """Report statistics of phase changes for changesets pre-existing
1584 1599 pull/unbundle.
1585 1600 """
1586 1601 newrevs = tr.changes.get('revs', pycompat.xrange(0, 0))
1587 1602 phasetracking = tr.changes.get('phases', {})
1588 1603 if not phasetracking:
1589 1604 return
1590 1605 published = [
1591 1606 rev for rev, (old, new) in phasetracking.iteritems()
1592 1607 if new == phases.public and rev not in newrevs
1593 1608 ]
1594 1609 if not published:
1595 1610 return
1596 1611 repo.ui.status(_('%d local changesets published\n')
1597 1612 % len(published))
1598 1613
1599 1614 def getinstabilitymessage(delta, instability):
1600 1615 """function to return the message to show warning about new instabilities
1601 1616
1602 1617 exists as a separate function so that extension can wrap to show more
1603 1618 information like how to fix instabilities"""
1604 1619 if delta > 0:
1605 1620 return _('%i new %s changesets\n') % (delta, instability)
1606 1621
1607 1622 def nodesummaries(repo, nodes, maxnumnodes=4):
1608 1623 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1609 1624 return ' '.join(short(h) for h in nodes)
1610 1625 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1611 1626 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1612 1627
1613 1628 def enforcesinglehead(repo, tr, desc):
1614 1629 """check that no named branch has multiple heads"""
1615 1630 if desc in ('strip', 'repair'):
1616 1631 # skip the logic during strip
1617 1632 return
1618 1633 visible = repo.filtered('visible')
1619 1634 # possible improvement: we could restrict the check to affected branch
1620 1635 for name, heads in visible.branchmap().iteritems():
1621 1636 if len(heads) > 1:
1622 1637 msg = _('rejecting multiple heads on branch "%s"')
1623 1638 msg %= name
1624 1639 hint = _('%d heads: %s')
1625 1640 hint %= (len(heads), nodesummaries(repo, heads))
1626 1641 raise error.Abort(msg, hint=hint)
1627 1642
1628 1643 def wrapconvertsink(sink):
1629 1644 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1630 1645 before it is used, whether or not the convert extension was formally loaded.
1631 1646 """
1632 1647 return sink
1633 1648
1634 1649 def unhidehashlikerevs(repo, specs, hiddentype):
1635 1650 """parse the user specs and unhide changesets whose hash or revision number
1636 1651 is passed.
1637 1652
1638 1653 hiddentype can be: 1) 'warn': warn while unhiding changesets
1639 1654 2) 'nowarn': don't warn while unhiding changesets
1640 1655
1641 1656 returns a repo object with the required changesets unhidden
1642 1657 """
1643 1658 if not repo.filtername or not repo.ui.configbool('experimental',
1644 1659 'directaccess'):
1645 1660 return repo
1646 1661
1647 1662 if repo.filtername not in ('visible', 'visible-hidden'):
1648 1663 return repo
1649 1664
1650 1665 symbols = set()
1651 1666 for spec in specs:
1652 1667 try:
1653 1668 tree = revsetlang.parse(spec)
1654 1669 except error.ParseError: # will be reported by scmutil.revrange()
1655 1670 continue
1656 1671
1657 1672 symbols.update(revsetlang.gethashlikesymbols(tree))
1658 1673
1659 1674 if not symbols:
1660 1675 return repo
1661 1676
1662 1677 revs = _getrevsfromsymbols(repo, symbols)
1663 1678
1664 1679 if not revs:
1665 1680 return repo
1666 1681
1667 1682 if hiddentype == 'warn':
1668 1683 unfi = repo.unfiltered()
1669 1684 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1670 1685 repo.ui.warn(_("warning: accessing hidden changesets for write "
1671 1686 "operation: %s\n") % revstr)
1672 1687
1673 1688 # we have to use new filtername to separate branch/tags cache until we can
1674 1689 # disbale these cache when revisions are dynamically pinned.
1675 1690 return repo.filtered('visible-hidden', revs)
1676 1691
1677 1692 def _getrevsfromsymbols(repo, symbols):
1678 1693 """parse the list of symbols and returns a set of revision numbers of hidden
1679 1694 changesets present in symbols"""
1680 1695 revs = set()
1681 1696 unfi = repo.unfiltered()
1682 1697 unficl = unfi.changelog
1683 1698 cl = repo.changelog
1684 1699 tiprev = len(unficl)
1685 1700 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1686 1701 for s in symbols:
1687 1702 try:
1688 1703 n = int(s)
1689 1704 if n <= tiprev:
1690 1705 if not allowrevnums:
1691 1706 continue
1692 1707 else:
1693 1708 if n not in cl:
1694 1709 revs.add(n)
1695 1710 continue
1696 1711 except ValueError:
1697 1712 pass
1698 1713
1699 1714 try:
1700 1715 s = resolvehexnodeidprefix(unfi, s)
1701 1716 except (error.LookupError, error.WdirUnsupported):
1702 1717 s = None
1703 1718
1704 1719 if s is not None:
1705 1720 rev = unficl.rev(s)
1706 1721 if rev not in cl:
1707 1722 revs.add(rev)
1708 1723
1709 1724 return revs
1710 1725
1711 1726 def bookmarkrevs(repo, mark):
1712 1727 """
1713 1728 Select revisions reachable by a given bookmark
1714 1729 """
1715 1730 return repo.revs("ancestors(bookmark(%s)) - "
1716 1731 "ancestors(head() and not bookmark(%s)) - "
1717 1732 "ancestors(bookmark() and not bookmark(%s))",
1718 1733 mark, mark, mark)
@@ -1,37 +1,43 b''
1 1 $ hg init repo
2 2 $ cd repo
3 3
4 4 $ echo 0 > a
5 5 $ hg ci -qAm 0
6 6 $ for i in 5 8 14 43; do
7 7 > hg up -q 0
8 8 > echo $i > a
9 9 > hg ci -qm $i
10 10 > done
11 11 $ cat <<EOF >> .hg/hgrc
12 12 > [alias]
13 13 > l = log -T '{rev}:{shortest(node,1)}\n'
14 14 > EOF
15 15
16 16 $ hg l
17 17 4:7ba5d
18 18 3:7ba57
19 19 2:72
20 20 1:9
21 21 0:b
22 22 $ cat <<EOF >> .hg/hgrc
23 23 > [experimental]
24 24 > revisions.disambiguatewithin=:3
25 25 > EOF
26 $ hg l
27 4:7ba5d
28 3:7b
29 2:72
30 1:9
31 0:b
26 32 9 was unambiguous and still is
27 33 $ hg l -r 9
28 34 1:9
29 35 7 was ambiguous and still is
30 36 $ hg l -r 7
31 37 abort: 00changelog.i@7: ambiguous identifier!
32 38 [255]
33 39 7b is no longer ambiguous
34 40 $ hg l -r 7b
35 3:7ba57
41 3:7b
36 42
37 43 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now