##// END OF EJS Templates
shortest: never emit 0-length prefix even if unique...
Martin von Zweigbergk -
r40439:bf249bb6 default
parent child Browse files
Show More
@@ -1,1804 +1,1806 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 bin,
22 22 hex,
23 23 nullid,
24 24 nullrev,
25 25 short,
26 26 wdirid,
27 27 wdirrev,
28 28 )
29 29
30 30 from . import (
31 31 encoding,
32 32 error,
33 33 match as matchmod,
34 34 obsolete,
35 35 obsutil,
36 36 pathutil,
37 37 phases,
38 38 policy,
39 39 pycompat,
40 40 revsetlang,
41 41 similar,
42 42 smartset,
43 43 url,
44 44 util,
45 45 vfs,
46 46 )
47 47
48 48 from .utils import (
49 49 procutil,
50 50 stringutil,
51 51 )
52 52
53 53 if pycompat.iswindows:
54 54 from . import scmwindows as scmplatform
55 55 else:
56 56 from . import scmposix as scmplatform
57 57
58 58 parsers = policy.importmod(r'parsers')
59 59
60 60 termsize = scmplatform.termsize
61 61
62 62 class status(tuple):
63 63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 64 and 'ignored' properties are only relevant to the working copy.
65 65 '''
66 66
67 67 __slots__ = ()
68 68
69 69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 70 clean):
71 71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 72 ignored, clean))
73 73
74 74 @property
75 75 def modified(self):
76 76 '''files that have been modified'''
77 77 return self[0]
78 78
79 79 @property
80 80 def added(self):
81 81 '''files that have been added'''
82 82 return self[1]
83 83
84 84 @property
85 85 def removed(self):
86 86 '''files that have been removed'''
87 87 return self[2]
88 88
89 89 @property
90 90 def deleted(self):
91 91 '''files that are in the dirstate, but have been deleted from the
92 92 working copy (aka "missing")
93 93 '''
94 94 return self[3]
95 95
96 96 @property
97 97 def unknown(self):
98 98 '''files not in the dirstate that are not ignored'''
99 99 return self[4]
100 100
101 101 @property
102 102 def ignored(self):
103 103 '''files not in the dirstate that are ignored (by _dirignore())'''
104 104 return self[5]
105 105
106 106 @property
107 107 def clean(self):
108 108 '''files that have not been modified'''
109 109 return self[6]
110 110
111 111 def __repr__(self, *args, **kwargs):
112 112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 113 r'unknown=%s, ignored=%s, clean=%s>') %
114 114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115 115
116 116 def itersubrepos(ctx1, ctx2):
117 117 """find subrepos in ctx1 or ctx2"""
118 118 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 120 # has been modified (in ctx2) but not yet committed (in ctx1).
121 121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123 123
124 124 missing = set()
125 125
126 126 for subpath in ctx2.substate:
127 127 if subpath not in ctx1.substate:
128 128 del subpaths[subpath]
129 129 missing.add(subpath)
130 130
131 131 for subpath, ctx in sorted(subpaths.iteritems()):
132 132 yield subpath, ctx.sub(subpath)
133 133
134 134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 135 # status and diff will have an accurate result when it does
136 136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 137 # against itself.
138 138 for subpath in missing:
139 139 yield subpath, ctx2.nullsub(subpath, ctx1)
140 140
141 141 def nochangesfound(ui, repo, excluded=None):
142 142 '''Report no changes for push/pull, excluded is None or a list of
143 143 nodes excluded from the push/pull.
144 144 '''
145 145 secretlist = []
146 146 if excluded:
147 147 for n in excluded:
148 148 ctx = repo[n]
149 149 if ctx.phase() >= phases.secret and not ctx.extinct():
150 150 secretlist.append(n)
151 151
152 152 if secretlist:
153 153 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 154 % len(secretlist))
155 155 else:
156 156 ui.status(_("no changes found\n"))
157 157
158 158 def callcatch(ui, func):
159 159 """call func() with global exception handling
160 160
161 161 return func() if no exception happens. otherwise do some error handling
162 162 and return an exit code accordingly. does not handle all exceptions.
163 163 """
164 164 try:
165 165 try:
166 166 return func()
167 167 except: # re-raises
168 168 ui.traceback()
169 169 raise
170 170 # Global exception handling, alphabetically
171 171 # Mercurial-specific first, followed by built-in and library exceptions
172 172 except error.LockHeld as inst:
173 173 if inst.errno == errno.ETIMEDOUT:
174 174 reason = _('timed out waiting for lock held by %r') % (
175 175 pycompat.bytestr(inst.locker))
176 176 else:
177 177 reason = _('lock held by %r') % inst.locker
178 178 ui.error(_("abort: %s: %s\n") % (
179 179 inst.desc or stringutil.forcebytestr(inst.filename), reason))
180 180 if not inst.locker:
181 181 ui.error(_("(lock might be very busy)\n"))
182 182 except error.LockUnavailable as inst:
183 183 ui.error(_("abort: could not lock %s: %s\n") %
184 184 (inst.desc or stringutil.forcebytestr(inst.filename),
185 185 encoding.strtolocal(inst.strerror)))
186 186 except error.OutOfBandError as inst:
187 187 if inst.args:
188 188 msg = _("abort: remote error:\n")
189 189 else:
190 190 msg = _("abort: remote error\n")
191 191 ui.error(msg)
192 192 if inst.args:
193 193 ui.error(''.join(inst.args))
194 194 if inst.hint:
195 195 ui.error('(%s)\n' % inst.hint)
196 196 except error.RepoError as inst:
197 197 ui.error(_("abort: %s!\n") % inst)
198 198 if inst.hint:
199 199 ui.error(_("(%s)\n") % inst.hint)
200 200 except error.ResponseError as inst:
201 201 ui.error(_("abort: %s") % inst.args[0])
202 202 msg = inst.args[1]
203 203 if isinstance(msg, type(u'')):
204 204 msg = pycompat.sysbytes(msg)
205 205 if not isinstance(msg, bytes):
206 206 ui.error(" %r\n" % (msg,))
207 207 elif not msg:
208 208 ui.error(_(" empty string\n"))
209 209 else:
210 210 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
211 211 except error.CensoredNodeError as inst:
212 212 ui.error(_("abort: file censored %s!\n") % inst)
213 213 except error.StorageError as inst:
214 214 ui.error(_("abort: %s!\n") % inst)
215 215 except error.InterventionRequired as inst:
216 216 ui.error("%s\n" % inst)
217 217 if inst.hint:
218 218 ui.error(_("(%s)\n") % inst.hint)
219 219 return 1
220 220 except error.WdirUnsupported:
221 221 ui.error(_("abort: working directory revision cannot be specified\n"))
222 222 except error.Abort as inst:
223 223 ui.error(_("abort: %s\n") % inst)
224 224 if inst.hint:
225 225 ui.error(_("(%s)\n") % inst.hint)
226 226 except ImportError as inst:
227 227 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
228 228 m = stringutil.forcebytestr(inst).split()[-1]
229 229 if m in "mpatch bdiff".split():
230 230 ui.error(_("(did you forget to compile extensions?)\n"))
231 231 elif m in "zlib".split():
232 232 ui.error(_("(is your Python install correct?)\n"))
233 233 except IOError as inst:
234 234 if util.safehasattr(inst, "code"):
235 235 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
236 236 elif util.safehasattr(inst, "reason"):
237 237 try: # usually it is in the form (errno, strerror)
238 238 reason = inst.reason.args[1]
239 239 except (AttributeError, IndexError):
240 240 # it might be anything, for example a string
241 241 reason = inst.reason
242 242 if isinstance(reason, pycompat.unicode):
243 243 # SSLError of Python 2.7.9 contains a unicode
244 244 reason = encoding.unitolocal(reason)
245 245 ui.error(_("abort: error: %s\n") % reason)
246 246 elif (util.safehasattr(inst, "args")
247 247 and inst.args and inst.args[0] == errno.EPIPE):
248 248 pass
249 249 elif getattr(inst, "strerror", None):
250 250 if getattr(inst, "filename", None):
251 251 ui.error(_("abort: %s: %s\n") % (
252 252 encoding.strtolocal(inst.strerror),
253 253 stringutil.forcebytestr(inst.filename)))
254 254 else:
255 255 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
256 256 else:
257 257 raise
258 258 except OSError as inst:
259 259 if getattr(inst, "filename", None) is not None:
260 260 ui.error(_("abort: %s: '%s'\n") % (
261 261 encoding.strtolocal(inst.strerror),
262 262 stringutil.forcebytestr(inst.filename)))
263 263 else:
264 264 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
265 265 except MemoryError:
266 266 ui.error(_("abort: out of memory\n"))
267 267 except SystemExit as inst:
268 268 # Commands shouldn't sys.exit directly, but give a return code.
269 269 # Just in case catch this and and pass exit code to caller.
270 270 return inst.code
271 271 except socket.error as inst:
272 272 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
273 273
274 274 return -1
275 275
276 276 def checknewlabel(repo, lbl, kind):
277 277 # Do not use the "kind" parameter in ui output.
278 278 # It makes strings difficult to translate.
279 279 if lbl in ['tip', '.', 'null']:
280 280 raise error.Abort(_("the name '%s' is reserved") % lbl)
281 281 for c in (':', '\0', '\n', '\r'):
282 282 if c in lbl:
283 283 raise error.Abort(
284 284 _("%r cannot be used in a name") % pycompat.bytestr(c))
285 285 try:
286 286 int(lbl)
287 287 raise error.Abort(_("cannot use an integer as a name"))
288 288 except ValueError:
289 289 pass
290 290 if lbl.strip() != lbl:
291 291 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
292 292
293 293 def checkfilename(f):
294 294 '''Check that the filename f is an acceptable filename for a tracked file'''
295 295 if '\r' in f or '\n' in f:
296 296 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
297 297 % pycompat.bytestr(f))
298 298
299 299 def checkportable(ui, f):
300 300 '''Check if filename f is portable and warn or abort depending on config'''
301 301 checkfilename(f)
302 302 abort, warn = checkportabilityalert(ui)
303 303 if abort or warn:
304 304 msg = util.checkwinfilename(f)
305 305 if msg:
306 306 msg = "%s: %s" % (msg, procutil.shellquote(f))
307 307 if abort:
308 308 raise error.Abort(msg)
309 309 ui.warn(_("warning: %s\n") % msg)
310 310
311 311 def checkportabilityalert(ui):
312 312 '''check if the user's config requests nothing, a warning, or abort for
313 313 non-portable filenames'''
314 314 val = ui.config('ui', 'portablefilenames')
315 315 lval = val.lower()
316 316 bval = stringutil.parsebool(val)
317 317 abort = pycompat.iswindows or lval == 'abort'
318 318 warn = bval or lval == 'warn'
319 319 if bval is None and not (warn or abort or lval == 'ignore'):
320 320 raise error.ConfigError(
321 321 _("ui.portablefilenames value is invalid ('%s')") % val)
322 322 return abort, warn
323 323
324 324 class casecollisionauditor(object):
325 325 def __init__(self, ui, abort, dirstate):
326 326 self._ui = ui
327 327 self._abort = abort
328 328 allfiles = '\0'.join(dirstate._map)
329 329 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
330 330 self._dirstate = dirstate
331 331 # The purpose of _newfiles is so that we don't complain about
332 332 # case collisions if someone were to call this object with the
333 333 # same filename twice.
334 334 self._newfiles = set()
335 335
336 336 def __call__(self, f):
337 337 if f in self._newfiles:
338 338 return
339 339 fl = encoding.lower(f)
340 340 if fl in self._loweredfiles and f not in self._dirstate:
341 341 msg = _('possible case-folding collision for %s') % f
342 342 if self._abort:
343 343 raise error.Abort(msg)
344 344 self._ui.warn(_("warning: %s\n") % msg)
345 345 self._loweredfiles.add(fl)
346 346 self._newfiles.add(f)
347 347
348 348 def filteredhash(repo, maxrev):
349 349 """build hash of filtered revisions in the current repoview.
350 350
351 351 Multiple caches perform up-to-date validation by checking that the
352 352 tiprev and tipnode stored in the cache file match the current repository.
353 353 However, this is not sufficient for validating repoviews because the set
354 354 of revisions in the view may change without the repository tiprev and
355 355 tipnode changing.
356 356
357 357 This function hashes all the revs filtered from the view and returns
358 358 that SHA-1 digest.
359 359 """
360 360 cl = repo.changelog
361 361 if not cl.filteredrevs:
362 362 return None
363 363 key = None
364 364 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
365 365 if revs:
366 366 s = hashlib.sha1()
367 367 for rev in revs:
368 368 s.update('%d;' % rev)
369 369 key = s.digest()
370 370 return key
371 371
372 372 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
373 373 '''yield every hg repository under path, always recursively.
374 374 The recurse flag will only control recursion into repo working dirs'''
375 375 def errhandler(err):
376 376 if err.filename == path:
377 377 raise err
378 378 samestat = getattr(os.path, 'samestat', None)
379 379 if followsym and samestat is not None:
380 380 def adddir(dirlst, dirname):
381 381 dirstat = os.stat(dirname)
382 382 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
383 383 if not match:
384 384 dirlst.append(dirstat)
385 385 return not match
386 386 else:
387 387 followsym = False
388 388
389 389 if (seen_dirs is None) and followsym:
390 390 seen_dirs = []
391 391 adddir(seen_dirs, path)
392 392 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
393 393 dirs.sort()
394 394 if '.hg' in dirs:
395 395 yield root # found a repository
396 396 qroot = os.path.join(root, '.hg', 'patches')
397 397 if os.path.isdir(os.path.join(qroot, '.hg')):
398 398 yield qroot # we have a patch queue repo here
399 399 if recurse:
400 400 # avoid recursing inside the .hg directory
401 401 dirs.remove('.hg')
402 402 else:
403 403 dirs[:] = [] # don't descend further
404 404 elif followsym:
405 405 newdirs = []
406 406 for d in dirs:
407 407 fname = os.path.join(root, d)
408 408 if adddir(seen_dirs, fname):
409 409 if os.path.islink(fname):
410 410 for hgname in walkrepos(fname, True, seen_dirs):
411 411 yield hgname
412 412 else:
413 413 newdirs.append(d)
414 414 dirs[:] = newdirs
415 415
416 416 def binnode(ctx):
417 417 """Return binary node id for a given basectx"""
418 418 node = ctx.node()
419 419 if node is None:
420 420 return wdirid
421 421 return node
422 422
423 423 def intrev(ctx):
424 424 """Return integer for a given basectx that can be used in comparison or
425 425 arithmetic operation"""
426 426 rev = ctx.rev()
427 427 if rev is None:
428 428 return wdirrev
429 429 return rev
430 430
431 431 def formatchangeid(ctx):
432 432 """Format changectx as '{rev}:{node|formatnode}', which is the default
433 433 template provided by logcmdutil.changesettemplater"""
434 434 repo = ctx.repo()
435 435 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
436 436
437 437 def formatrevnode(ui, rev, node):
438 438 """Format given revision and node depending on the current verbosity"""
439 439 if ui.debugflag:
440 440 hexfunc = hex
441 441 else:
442 442 hexfunc = short
443 443 return '%d:%s' % (rev, hexfunc(node))
444 444
445 445 def resolvehexnodeidprefix(repo, prefix):
446 446 if (prefix.startswith('x') and
447 447 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
448 448 prefix = prefix[1:]
449 449 try:
450 450 # Uses unfiltered repo because it's faster when prefix is ambiguous/
451 451 # This matches the shortesthexnodeidprefix() function below.
452 452 node = repo.unfiltered().changelog._partialmatch(prefix)
453 453 except error.AmbiguousPrefixLookupError:
454 454 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
455 455 if revset:
456 456 # Clear config to avoid infinite recursion
457 457 configoverrides = {('experimental',
458 458 'revisions.disambiguatewithin'): None}
459 459 with repo.ui.configoverride(configoverrides):
460 460 revs = repo.anyrevs([revset], user=True)
461 461 matches = []
462 462 for rev in revs:
463 463 node = repo.changelog.node(rev)
464 464 if hex(node).startswith(prefix):
465 465 matches.append(node)
466 466 if len(matches) == 1:
467 467 return matches[0]
468 468 raise
469 469 if node is None:
470 470 return
471 471 repo.changelog.rev(node) # make sure node isn't filtered
472 472 return node
473 473
474 474 def mayberevnum(repo, prefix):
475 475 """Checks if the given prefix may be mistaken for a revision number"""
476 476 try:
477 477 i = int(prefix)
478 478 # if we are a pure int, then starting with zero will not be
479 479 # confused as a rev; or, obviously, if the int is larger
480 480 # than the value of the tip rev. We still need to disambiguate if
481 481 # prefix == '0', since that *is* a valid revnum.
482 482 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
483 483 return False
484 484 return True
485 485 except ValueError:
486 486 return False
487 487
488 488 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
489 489 """Find the shortest unambiguous prefix that matches hexnode.
490 490
491 491 If "cache" is not None, it must be a dictionary that can be used for
492 492 caching between calls to this method.
493 493 """
494 494 # _partialmatch() of filtered changelog could take O(len(repo)) time,
495 495 # which would be unacceptably slow. so we look for hash collision in
496 496 # unfiltered space, which means some hashes may be slightly longer.
497 497
498 minlength=max(minlength, 1)
499
498 500 def disambiguate(prefix):
499 501 """Disambiguate against revnums."""
500 502 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
501 503 if mayberevnum(repo, prefix):
502 504 return 'x' + prefix
503 505 else:
504 506 return prefix
505 507
506 508 hexnode = hex(node)
507 509 for length in range(len(prefix), len(hexnode) + 1):
508 510 prefix = hexnode[:length]
509 511 if not mayberevnum(repo, prefix):
510 512 return prefix
511 513
512 514 cl = repo.unfiltered().changelog
513 515 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
514 516 if revset:
515 517 revs = None
516 518 if cache is not None:
517 519 revs = cache.get('disambiguationrevset')
518 520 if revs is None:
519 521 revs = repo.anyrevs([revset], user=True)
520 522 if cache is not None:
521 523 cache['disambiguationrevset'] = revs
522 524 if cl.rev(node) in revs:
523 525 hexnode = hex(node)
524 526 nodetree = None
525 527 if cache is not None:
526 528 nodetree = cache.get('disambiguationnodetree')
527 529 if not nodetree:
528 530 try:
529 531 nodetree = parsers.nodetree(cl.index, len(revs))
530 532 except AttributeError:
531 533 # no native nodetree
532 534 pass
533 535 else:
534 536 for r in revs:
535 537 nodetree.insert(r)
536 538 if cache is not None:
537 539 cache['disambiguationnodetree'] = nodetree
538 540 if nodetree is not None:
539 541 length = max(nodetree.shortest(node), minlength)
540 542 prefix = hexnode[:length]
541 543 return disambiguate(prefix)
542 544 for length in range(minlength, len(hexnode) + 1):
543 545 matches = []
544 546 prefix = hexnode[:length]
545 547 for rev in revs:
546 548 otherhexnode = repo[rev].hex()
547 549 if prefix == otherhexnode[:length]:
548 550 matches.append(otherhexnode)
549 551 if len(matches) == 1:
550 552 return disambiguate(prefix)
551 553
552 554 try:
553 555 return disambiguate(cl.shortest(node, minlength))
554 556 except error.LookupError:
555 557 raise error.RepoLookupError()
556 558
557 559 def isrevsymbol(repo, symbol):
558 560 """Checks if a symbol exists in the repo.
559 561
560 562 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
561 563 symbol is an ambiguous nodeid prefix.
562 564 """
563 565 try:
564 566 revsymbol(repo, symbol)
565 567 return True
566 568 except error.RepoLookupError:
567 569 return False
568 570
569 571 def revsymbol(repo, symbol):
570 572 """Returns a context given a single revision symbol (as string).
571 573
572 574 This is similar to revsingle(), but accepts only a single revision symbol,
573 575 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
574 576 not "max(public())".
575 577 """
576 578 if not isinstance(symbol, bytes):
577 579 msg = ("symbol (%s of type %s) was not a string, did you mean "
578 580 "repo[symbol]?" % (symbol, type(symbol)))
579 581 raise error.ProgrammingError(msg)
580 582 try:
581 583 if symbol in ('.', 'tip', 'null'):
582 584 return repo[symbol]
583 585
584 586 try:
585 587 r = int(symbol)
586 588 if '%d' % r != symbol:
587 589 raise ValueError
588 590 l = len(repo.changelog)
589 591 if r < 0:
590 592 r += l
591 593 if r < 0 or r >= l and r != wdirrev:
592 594 raise ValueError
593 595 return repo[r]
594 596 except error.FilteredIndexError:
595 597 raise
596 598 except (ValueError, OverflowError, IndexError):
597 599 pass
598 600
599 601 if len(symbol) == 40:
600 602 try:
601 603 node = bin(symbol)
602 604 rev = repo.changelog.rev(node)
603 605 return repo[rev]
604 606 except error.FilteredLookupError:
605 607 raise
606 608 except (TypeError, LookupError):
607 609 pass
608 610
609 611 # look up bookmarks through the name interface
610 612 try:
611 613 node = repo.names.singlenode(repo, symbol)
612 614 rev = repo.changelog.rev(node)
613 615 return repo[rev]
614 616 except KeyError:
615 617 pass
616 618
617 619 node = resolvehexnodeidprefix(repo, symbol)
618 620 if node is not None:
619 621 rev = repo.changelog.rev(node)
620 622 return repo[rev]
621 623
622 624 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
623 625
624 626 except error.WdirUnsupported:
625 627 return repo[None]
626 628 except (error.FilteredIndexError, error.FilteredLookupError,
627 629 error.FilteredRepoLookupError):
628 630 raise _filterederror(repo, symbol)
629 631
630 632 def _filterederror(repo, changeid):
631 633 """build an exception to be raised about a filtered changeid
632 634
633 635 This is extracted in a function to help extensions (eg: evolve) to
634 636 experiment with various message variants."""
635 637 if repo.filtername.startswith('visible'):
636 638
637 639 # Check if the changeset is obsolete
638 640 unfilteredrepo = repo.unfiltered()
639 641 ctx = revsymbol(unfilteredrepo, changeid)
640 642
641 643 # If the changeset is obsolete, enrich the message with the reason
642 644 # that made this changeset not visible
643 645 if ctx.obsolete():
644 646 msg = obsutil._getfilteredreason(repo, changeid, ctx)
645 647 else:
646 648 msg = _("hidden revision '%s'") % changeid
647 649
648 650 hint = _('use --hidden to access hidden revisions')
649 651
650 652 return error.FilteredRepoLookupError(msg, hint=hint)
651 653 msg = _("filtered revision '%s' (not in '%s' subset)")
652 654 msg %= (changeid, repo.filtername)
653 655 return error.FilteredRepoLookupError(msg)
654 656
655 657 def revsingle(repo, revspec, default='.', localalias=None):
656 658 if not revspec and revspec != 0:
657 659 return repo[default]
658 660
659 661 l = revrange(repo, [revspec], localalias=localalias)
660 662 if not l:
661 663 raise error.Abort(_('empty revision set'))
662 664 return repo[l.last()]
663 665
664 666 def _pairspec(revspec):
665 667 tree = revsetlang.parse(revspec)
666 668 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
667 669
668 670 def revpair(repo, revs):
669 671 if not revs:
670 672 return repo['.'], repo[None]
671 673
672 674 l = revrange(repo, revs)
673 675
674 676 if not l:
675 677 first = second = None
676 678 elif l.isascending():
677 679 first = l.min()
678 680 second = l.max()
679 681 elif l.isdescending():
680 682 first = l.max()
681 683 second = l.min()
682 684 else:
683 685 first = l.first()
684 686 second = l.last()
685 687
686 688 if first is None:
687 689 raise error.Abort(_('empty revision range'))
688 690 if (first == second and len(revs) >= 2
689 691 and not all(revrange(repo, [r]) for r in revs)):
690 692 raise error.Abort(_('empty revision on one side of range'))
691 693
692 694 # if top-level is range expression, the result must always be a pair
693 695 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
694 696 return repo[first], repo[None]
695 697
696 698 return repo[first], repo[second]
697 699
698 700 def revrange(repo, specs, localalias=None):
699 701 """Execute 1 to many revsets and return the union.
700 702
701 703 This is the preferred mechanism for executing revsets using user-specified
702 704 config options, such as revset aliases.
703 705
704 706 The revsets specified by ``specs`` will be executed via a chained ``OR``
705 707 expression. If ``specs`` is empty, an empty result is returned.
706 708
707 709 ``specs`` can contain integers, in which case they are assumed to be
708 710 revision numbers.
709 711
710 712 It is assumed the revsets are already formatted. If you have arguments
711 713 that need to be expanded in the revset, call ``revsetlang.formatspec()``
712 714 and pass the result as an element of ``specs``.
713 715
714 716 Specifying a single revset is allowed.
715 717
716 718 Returns a ``revset.abstractsmartset`` which is a list-like interface over
717 719 integer revisions.
718 720 """
719 721 allspecs = []
720 722 for spec in specs:
721 723 if isinstance(spec, int):
722 724 spec = revsetlang.formatspec('rev(%d)', spec)
723 725 allspecs.append(spec)
724 726 return repo.anyrevs(allspecs, user=True, localalias=localalias)
725 727
726 728 def meaningfulparents(repo, ctx):
727 729 """Return list of meaningful (or all if debug) parentrevs for rev.
728 730
729 731 For merges (two non-nullrev revisions) both parents are meaningful.
730 732 Otherwise the first parent revision is considered meaningful if it
731 733 is not the preceding revision.
732 734 """
733 735 parents = ctx.parents()
734 736 if len(parents) > 1:
735 737 return parents
736 738 if repo.ui.debugflag:
737 739 return [parents[0], repo[nullrev]]
738 740 if parents[0].rev() >= intrev(ctx) - 1:
739 741 return []
740 742 return parents
741 743
742 744 def expandpats(pats):
743 745 '''Expand bare globs when running on windows.
744 746 On posix we assume it already has already been done by sh.'''
745 747 if not util.expandglobs:
746 748 return list(pats)
747 749 ret = []
748 750 for kindpat in pats:
749 751 kind, pat = matchmod._patsplit(kindpat, None)
750 752 if kind is None:
751 753 try:
752 754 globbed = glob.glob(pat)
753 755 except re.error:
754 756 globbed = [pat]
755 757 if globbed:
756 758 ret.extend(globbed)
757 759 continue
758 760 ret.append(kindpat)
759 761 return ret
760 762
761 763 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
762 764 badfn=None):
763 765 '''Return a matcher and the patterns that were used.
764 766 The matcher will warn about bad matches, unless an alternate badfn callback
765 767 is provided.'''
766 768 if pats == ("",):
767 769 pats = []
768 770 if opts is None:
769 771 opts = {}
770 772 if not globbed and default == 'relpath':
771 773 pats = expandpats(pats or [])
772 774
773 775 def bad(f, msg):
774 776 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
775 777
776 778 if badfn is None:
777 779 badfn = bad
778 780
779 781 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
780 782 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
781 783
782 784 if m.always():
783 785 pats = []
784 786 return m, pats
785 787
786 788 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
787 789 badfn=None):
788 790 '''Return a matcher that will warn about bad matches.'''
789 791 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
790 792
791 793 def matchall(repo):
792 794 '''Return a matcher that will efficiently match everything.'''
793 795 return matchmod.always(repo.root, repo.getcwd())
794 796
795 797 def matchfiles(repo, files, badfn=None):
796 798 '''Return a matcher that will efficiently match exactly these files.'''
797 799 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
798 800
799 801 def parsefollowlinespattern(repo, rev, pat, msg):
800 802 """Return a file name from `pat` pattern suitable for usage in followlines
801 803 logic.
802 804 """
803 805 if not matchmod.patkind(pat):
804 806 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
805 807 else:
806 808 ctx = repo[rev]
807 809 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
808 810 files = [f for f in ctx if m(f)]
809 811 if len(files) != 1:
810 812 raise error.ParseError(msg)
811 813 return files[0]
812 814
813 815 def origpath(ui, repo, filepath):
814 816 '''customize where .orig files are created
815 817
816 818 Fetch user defined path from config file: [ui] origbackuppath = <path>
817 819 Fall back to default (filepath with .orig suffix) if not specified
818 820 '''
819 821 origbackuppath = ui.config('ui', 'origbackuppath')
820 822 if not origbackuppath:
821 823 return filepath + ".orig"
822 824
823 825 # Convert filepath from an absolute path into a path inside the repo.
824 826 filepathfromroot = util.normpath(os.path.relpath(filepath,
825 827 start=repo.root))
826 828
827 829 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
828 830 origbackupdir = origvfs.dirname(filepathfromroot)
829 831 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
830 832 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
831 833
832 834 # Remove any files that conflict with the backup file's path
833 835 for f in reversed(list(util.finddirs(filepathfromroot))):
834 836 if origvfs.isfileorlink(f):
835 837 ui.note(_('removing conflicting file: %s\n')
836 838 % origvfs.join(f))
837 839 origvfs.unlink(f)
838 840 break
839 841
840 842 origvfs.makedirs(origbackupdir)
841 843
842 844 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
843 845 ui.note(_('removing conflicting directory: %s\n')
844 846 % origvfs.join(filepathfromroot))
845 847 origvfs.rmtree(filepathfromroot, forcibly=True)
846 848
847 849 return origvfs.join(filepathfromroot)
848 850
849 851 class _containsnode(object):
850 852 """proxy __contains__(node) to container.__contains__ which accepts revs"""
851 853
852 854 def __init__(self, repo, revcontainer):
853 855 self._torev = repo.changelog.rev
854 856 self._revcontains = revcontainer.__contains__
855 857
856 858 def __contains__(self, node):
857 859 return self._revcontains(self._torev(node))
858 860
859 861 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
860 862 fixphase=False, targetphase=None, backup=True):
861 863 """do common cleanups when old nodes are replaced by new nodes
862 864
863 865 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
864 866 (we might also want to move working directory parent in the future)
865 867
866 868 By default, bookmark moves are calculated automatically from 'replacements',
867 869 but 'moves' can be used to override that. Also, 'moves' may include
868 870 additional bookmark moves that should not have associated obsmarkers.
869 871
870 872 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
871 873 have replacements. operation is a string, like "rebase".
872 874
873 875 metadata is dictionary containing metadata to be stored in obsmarker if
874 876 obsolescence is enabled.
875 877 """
876 878 assert fixphase or targetphase is None
877 879 if not replacements and not moves:
878 880 return
879 881
880 882 # translate mapping's other forms
881 883 if not util.safehasattr(replacements, 'items'):
882 884 replacements = {(n,): () for n in replacements}
883 885 else:
884 886 # upgrading non tuple "source" to tuple ones for BC
885 887 repls = {}
886 888 for key, value in replacements.items():
887 889 if not isinstance(key, tuple):
888 890 key = (key,)
889 891 repls[key] = value
890 892 replacements = repls
891 893
892 894 # Calculate bookmark movements
893 895 if moves is None:
894 896 moves = {}
895 897 # Unfiltered repo is needed since nodes in replacements might be hidden.
896 898 unfi = repo.unfiltered()
897 899 for oldnodes, newnodes in replacements.items():
898 900 for oldnode in oldnodes:
899 901 if oldnode in moves:
900 902 continue
901 903 if len(newnodes) > 1:
902 904 # usually a split, take the one with biggest rev number
903 905 newnode = next(unfi.set('max(%ln)', newnodes)).node()
904 906 elif len(newnodes) == 0:
905 907 # move bookmark backwards
906 908 allreplaced = []
907 909 for rep in replacements:
908 910 allreplaced.extend(rep)
909 911 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
910 912 allreplaced))
911 913 if roots:
912 914 newnode = roots[0].node()
913 915 else:
914 916 newnode = nullid
915 917 else:
916 918 newnode = newnodes[0]
917 919 moves[oldnode] = newnode
918 920
919 921 allnewnodes = [n for ns in replacements.values() for n in ns]
920 922 toretract = {}
921 923 toadvance = {}
922 924 if fixphase:
923 925 precursors = {}
924 926 for oldnodes, newnodes in replacements.items():
925 927 for oldnode in oldnodes:
926 928 for newnode in newnodes:
927 929 precursors.setdefault(newnode, []).append(oldnode)
928 930
929 931 allnewnodes.sort(key=lambda n: unfi[n].rev())
930 932 newphases = {}
931 933 def phase(ctx):
932 934 return newphases.get(ctx.node(), ctx.phase())
933 935 for newnode in allnewnodes:
934 936 ctx = unfi[newnode]
935 937 parentphase = max(phase(p) for p in ctx.parents())
936 938 if targetphase is None:
937 939 oldphase = max(unfi[oldnode].phase()
938 940 for oldnode in precursors[newnode])
939 941 newphase = max(oldphase, parentphase)
940 942 else:
941 943 newphase = max(targetphase, parentphase)
942 944 newphases[newnode] = newphase
943 945 if newphase > ctx.phase():
944 946 toretract.setdefault(newphase, []).append(newnode)
945 947 elif newphase < ctx.phase():
946 948 toadvance.setdefault(newphase, []).append(newnode)
947 949
948 950 with repo.transaction('cleanup') as tr:
949 951 # Move bookmarks
950 952 bmarks = repo._bookmarks
951 953 bmarkchanges = []
952 954 for oldnode, newnode in moves.items():
953 955 oldbmarks = repo.nodebookmarks(oldnode)
954 956 if not oldbmarks:
955 957 continue
956 958 from . import bookmarks # avoid import cycle
957 959 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
958 960 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
959 961 hex(oldnode), hex(newnode)))
960 962 # Delete divergent bookmarks being parents of related newnodes
961 963 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
962 964 allnewnodes, newnode, oldnode)
963 965 deletenodes = _containsnode(repo, deleterevs)
964 966 for name in oldbmarks:
965 967 bmarkchanges.append((name, newnode))
966 968 for b in bookmarks.divergent2delete(repo, deletenodes, name):
967 969 bmarkchanges.append((b, None))
968 970
969 971 if bmarkchanges:
970 972 bmarks.applychanges(repo, tr, bmarkchanges)
971 973
972 974 for phase, nodes in toretract.items():
973 975 phases.retractboundary(repo, tr, phase, nodes)
974 976 for phase, nodes in toadvance.items():
975 977 phases.advanceboundary(repo, tr, phase, nodes)
976 978
977 979 # Obsolete or strip nodes
978 980 if obsolete.isenabled(repo, obsolete.createmarkersopt):
979 981 # If a node is already obsoleted, and we want to obsolete it
980 982 # without a successor, skip that obssolete request since it's
981 983 # unnecessary. That's the "if s or not isobs(n)" check below.
982 984 # Also sort the node in topology order, that might be useful for
983 985 # some obsstore logic.
984 986 # NOTE: the sorting might belong to createmarkers.
985 987 torev = unfi.changelog.rev
986 988 sortfunc = lambda ns: torev(ns[0][0])
987 989 rels = []
988 990 for ns, s in sorted(replacements.items(), key=sortfunc):
989 991 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
990 992 rels.append(rel)
991 993 if rels:
992 994 obsolete.createmarkers(repo, rels, operation=operation,
993 995 metadata=metadata)
994 996 else:
995 997 from . import repair # avoid import cycle
996 998 tostrip = list(n for ns in replacements for n in ns)
997 999 if tostrip:
998 1000 repair.delayedstrip(repo.ui, repo, tostrip, operation,
999 1001 backup=backup)
1000 1002
1001 1003 def addremove(repo, matcher, prefix, opts=None):
1002 1004 if opts is None:
1003 1005 opts = {}
1004 1006 m = matcher
1005 1007 dry_run = opts.get('dry_run')
1006 1008 try:
1007 1009 similarity = float(opts.get('similarity') or 0)
1008 1010 except ValueError:
1009 1011 raise error.Abort(_('similarity must be a number'))
1010 1012 if similarity < 0 or similarity > 100:
1011 1013 raise error.Abort(_('similarity must be between 0 and 100'))
1012 1014 similarity /= 100.0
1013 1015
1014 1016 ret = 0
1015 1017 join = lambda f: os.path.join(prefix, f)
1016 1018
1017 1019 wctx = repo[None]
1018 1020 for subpath in sorted(wctx.substate):
1019 1021 submatch = matchmod.subdirmatcher(subpath, m)
1020 1022 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1021 1023 sub = wctx.sub(subpath)
1022 1024 try:
1023 1025 if sub.addremove(submatch, prefix, opts):
1024 1026 ret = 1
1025 1027 except error.LookupError:
1026 1028 repo.ui.status(_("skipping missing subrepository: %s\n")
1027 1029 % join(subpath))
1028 1030
1029 1031 rejected = []
1030 1032 def badfn(f, msg):
1031 1033 if f in m.files():
1032 1034 m.bad(f, msg)
1033 1035 rejected.append(f)
1034 1036
1035 1037 badmatch = matchmod.badmatch(m, badfn)
1036 1038 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1037 1039 badmatch)
1038 1040
1039 1041 unknownset = set(unknown + forgotten)
1040 1042 toprint = unknownset.copy()
1041 1043 toprint.update(deleted)
1042 1044 for abs in sorted(toprint):
1043 1045 if repo.ui.verbose or not m.exact(abs):
1044 1046 if abs in unknownset:
1045 1047 status = _('adding %s\n') % m.uipath(abs)
1046 1048 label = 'ui.addremove.added'
1047 1049 else:
1048 1050 status = _('removing %s\n') % m.uipath(abs)
1049 1051 label = 'ui.addremove.removed'
1050 1052 repo.ui.status(status, label=label)
1051 1053
1052 1054 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1053 1055 similarity)
1054 1056
1055 1057 if not dry_run:
1056 1058 _markchanges(repo, unknown + forgotten, deleted, renames)
1057 1059
1058 1060 for f in rejected:
1059 1061 if f in m.files():
1060 1062 return 1
1061 1063 return ret
1062 1064
1063 1065 def marktouched(repo, files, similarity=0.0):
1064 1066 '''Assert that files have somehow been operated upon. files are relative to
1065 1067 the repo root.'''
1066 1068 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1067 1069 rejected = []
1068 1070
1069 1071 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1070 1072
1071 1073 if repo.ui.verbose:
1072 1074 unknownset = set(unknown + forgotten)
1073 1075 toprint = unknownset.copy()
1074 1076 toprint.update(deleted)
1075 1077 for abs in sorted(toprint):
1076 1078 if abs in unknownset:
1077 1079 status = _('adding %s\n') % abs
1078 1080 else:
1079 1081 status = _('removing %s\n') % abs
1080 1082 repo.ui.status(status)
1081 1083
1082 1084 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1083 1085 similarity)
1084 1086
1085 1087 _markchanges(repo, unknown + forgotten, deleted, renames)
1086 1088
1087 1089 for f in rejected:
1088 1090 if f in m.files():
1089 1091 return 1
1090 1092 return 0
1091 1093
1092 1094 def _interestingfiles(repo, matcher):
1093 1095 '''Walk dirstate with matcher, looking for files that addremove would care
1094 1096 about.
1095 1097
1096 1098 This is different from dirstate.status because it doesn't care about
1097 1099 whether files are modified or clean.'''
1098 1100 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1099 1101 audit_path = pathutil.pathauditor(repo.root, cached=True)
1100 1102
1101 1103 ctx = repo[None]
1102 1104 dirstate = repo.dirstate
1103 1105 matcher = repo.narrowmatch(matcher, includeexact=True)
1104 1106 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1105 1107 unknown=True, ignored=False, full=False)
1106 1108 for abs, st in walkresults.iteritems():
1107 1109 dstate = dirstate[abs]
1108 1110 if dstate == '?' and audit_path.check(abs):
1109 1111 unknown.append(abs)
1110 1112 elif dstate != 'r' and not st:
1111 1113 deleted.append(abs)
1112 1114 elif dstate == 'r' and st:
1113 1115 forgotten.append(abs)
1114 1116 # for finding renames
1115 1117 elif dstate == 'r' and not st:
1116 1118 removed.append(abs)
1117 1119 elif dstate == 'a':
1118 1120 added.append(abs)
1119 1121
1120 1122 return added, unknown, deleted, removed, forgotten
1121 1123
1122 1124 def _findrenames(repo, matcher, added, removed, similarity):
1123 1125 '''Find renames from removed files to added ones.'''
1124 1126 renames = {}
1125 1127 if similarity > 0:
1126 1128 for old, new, score in similar.findrenames(repo, added, removed,
1127 1129 similarity):
1128 1130 if (repo.ui.verbose or not matcher.exact(old)
1129 1131 or not matcher.exact(new)):
1130 1132 repo.ui.status(_('recording removal of %s as rename to %s '
1131 1133 '(%d%% similar)\n') %
1132 1134 (matcher.rel(old), matcher.rel(new),
1133 1135 score * 100))
1134 1136 renames[new] = old
1135 1137 return renames
1136 1138
1137 1139 def _markchanges(repo, unknown, deleted, renames):
1138 1140 '''Marks the files in unknown as added, the files in deleted as removed,
1139 1141 and the files in renames as copied.'''
1140 1142 wctx = repo[None]
1141 1143 with repo.wlock():
1142 1144 wctx.forget(deleted)
1143 1145 wctx.add(unknown)
1144 1146 for new, old in renames.iteritems():
1145 1147 wctx.copy(old, new)
1146 1148
1147 1149 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1148 1150 """Update the dirstate to reflect the intent of copying src to dst. For
1149 1151 different reasons it might not end with dst being marked as copied from src.
1150 1152 """
1151 1153 origsrc = repo.dirstate.copied(src) or src
1152 1154 if dst == origsrc: # copying back a copy?
1153 1155 if repo.dirstate[dst] not in 'mn' and not dryrun:
1154 1156 repo.dirstate.normallookup(dst)
1155 1157 else:
1156 1158 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1157 1159 if not ui.quiet:
1158 1160 ui.warn(_("%s has not been committed yet, so no copy "
1159 1161 "data will be stored for %s.\n")
1160 1162 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1161 1163 if repo.dirstate[dst] in '?r' and not dryrun:
1162 1164 wctx.add([dst])
1163 1165 elif not dryrun:
1164 1166 wctx.copy(origsrc, dst)
1165 1167
1166 1168 def writerequires(opener, requirements):
1167 1169 with opener('requires', 'w') as fp:
1168 1170 for r in sorted(requirements):
1169 1171 fp.write("%s\n" % r)
1170 1172
1171 1173 class filecachesubentry(object):
1172 1174 def __init__(self, path, stat):
1173 1175 self.path = path
1174 1176 self.cachestat = None
1175 1177 self._cacheable = None
1176 1178
1177 1179 if stat:
1178 1180 self.cachestat = filecachesubentry.stat(self.path)
1179 1181
1180 1182 if self.cachestat:
1181 1183 self._cacheable = self.cachestat.cacheable()
1182 1184 else:
1183 1185 # None means we don't know yet
1184 1186 self._cacheable = None
1185 1187
1186 1188 def refresh(self):
1187 1189 if self.cacheable():
1188 1190 self.cachestat = filecachesubentry.stat(self.path)
1189 1191
1190 1192 def cacheable(self):
1191 1193 if self._cacheable is not None:
1192 1194 return self._cacheable
1193 1195
1194 1196 # we don't know yet, assume it is for now
1195 1197 return True
1196 1198
1197 1199 def changed(self):
1198 1200 # no point in going further if we can't cache it
1199 1201 if not self.cacheable():
1200 1202 return True
1201 1203
1202 1204 newstat = filecachesubentry.stat(self.path)
1203 1205
1204 1206 # we may not know if it's cacheable yet, check again now
1205 1207 if newstat and self._cacheable is None:
1206 1208 self._cacheable = newstat.cacheable()
1207 1209
1208 1210 # check again
1209 1211 if not self._cacheable:
1210 1212 return True
1211 1213
1212 1214 if self.cachestat != newstat:
1213 1215 self.cachestat = newstat
1214 1216 return True
1215 1217 else:
1216 1218 return False
1217 1219
1218 1220 @staticmethod
1219 1221 def stat(path):
1220 1222 try:
1221 1223 return util.cachestat(path)
1222 1224 except OSError as e:
1223 1225 if e.errno != errno.ENOENT:
1224 1226 raise
1225 1227
1226 1228 class filecacheentry(object):
1227 1229 def __init__(self, paths, stat=True):
1228 1230 self._entries = []
1229 1231 for path in paths:
1230 1232 self._entries.append(filecachesubentry(path, stat))
1231 1233
1232 1234 def changed(self):
1233 1235 '''true if any entry has changed'''
1234 1236 for entry in self._entries:
1235 1237 if entry.changed():
1236 1238 return True
1237 1239 return False
1238 1240
1239 1241 def refresh(self):
1240 1242 for entry in self._entries:
1241 1243 entry.refresh()
1242 1244
1243 1245 class filecache(object):
1244 1246 """A property like decorator that tracks files under .hg/ for updates.
1245 1247
1246 1248 On first access, the files defined as arguments are stat()ed and the
1247 1249 results cached. The decorated function is called. The results are stashed
1248 1250 away in a ``_filecache`` dict on the object whose method is decorated.
1249 1251
1250 1252 On subsequent access, the cached result is returned.
1251 1253
1252 1254 On external property set operations, stat() calls are performed and the new
1253 1255 value is cached.
1254 1256
1255 1257 On property delete operations, cached data is removed.
1256 1258
1257 1259 When using the property API, cached data is always returned, if available:
1258 1260 no stat() is performed to check if the file has changed and if the function
1259 1261 needs to be called to reflect file changes.
1260 1262
1261 1263 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1262 1264 can populate an entry before the property's getter is called. In this case,
1263 1265 entries in ``_filecache`` will be used during property operations,
1264 1266 if available. If the underlying file changes, it is up to external callers
1265 1267 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1266 1268 method result as well as possibly calling ``del obj._filecache[attr]`` to
1267 1269 remove the ``filecacheentry``.
1268 1270 """
1269 1271
1270 1272 def __init__(self, *paths):
1271 1273 self.paths = paths
1272 1274
1273 1275 def join(self, obj, fname):
1274 1276 """Used to compute the runtime path of a cached file.
1275 1277
1276 1278 Users should subclass filecache and provide their own version of this
1277 1279 function to call the appropriate join function on 'obj' (an instance
1278 1280 of the class that its member function was decorated).
1279 1281 """
1280 1282 raise NotImplementedError
1281 1283
1282 1284 def __call__(self, func):
1283 1285 self.func = func
1284 1286 self.sname = func.__name__
1285 1287 self.name = pycompat.sysbytes(self.sname)
1286 1288 return self
1287 1289
1288 1290 def __get__(self, obj, type=None):
1289 1291 # if accessed on the class, return the descriptor itself.
1290 1292 if obj is None:
1291 1293 return self
1292 1294 # do we need to check if the file changed?
1293 1295 if self.sname in obj.__dict__:
1294 1296 assert self.name in obj._filecache, self.name
1295 1297 return obj.__dict__[self.sname]
1296 1298
1297 1299 entry = obj._filecache.get(self.name)
1298 1300
1299 1301 if entry:
1300 1302 if entry.changed():
1301 1303 entry.obj = self.func(obj)
1302 1304 else:
1303 1305 paths = [self.join(obj, path) for path in self.paths]
1304 1306
1305 1307 # We stat -before- creating the object so our cache doesn't lie if
1306 1308 # a writer modified between the time we read and stat
1307 1309 entry = filecacheentry(paths, True)
1308 1310 entry.obj = self.func(obj)
1309 1311
1310 1312 obj._filecache[self.name] = entry
1311 1313
1312 1314 obj.__dict__[self.sname] = entry.obj
1313 1315 return entry.obj
1314 1316
1315 1317 def __set__(self, obj, value):
1316 1318 if self.name not in obj._filecache:
1317 1319 # we add an entry for the missing value because X in __dict__
1318 1320 # implies X in _filecache
1319 1321 paths = [self.join(obj, path) for path in self.paths]
1320 1322 ce = filecacheentry(paths, False)
1321 1323 obj._filecache[self.name] = ce
1322 1324 else:
1323 1325 ce = obj._filecache[self.name]
1324 1326
1325 1327 ce.obj = value # update cached copy
1326 1328 obj.__dict__[self.sname] = value # update copy returned by obj.x
1327 1329
1328 1330 def __delete__(self, obj):
1329 1331 try:
1330 1332 del obj.__dict__[self.sname]
1331 1333 except KeyError:
1332 1334 raise AttributeError(self.sname)
1333 1335
1334 1336 def extdatasource(repo, source):
1335 1337 """Gather a map of rev -> value dict from the specified source
1336 1338
1337 1339 A source spec is treated as a URL, with a special case shell: type
1338 1340 for parsing the output from a shell command.
1339 1341
1340 1342 The data is parsed as a series of newline-separated records where
1341 1343 each record is a revision specifier optionally followed by a space
1342 1344 and a freeform string value. If the revision is known locally, it
1343 1345 is converted to a rev, otherwise the record is skipped.
1344 1346
1345 1347 Note that both key and value are treated as UTF-8 and converted to
1346 1348 the local encoding. This allows uniformity between local and
1347 1349 remote data sources.
1348 1350 """
1349 1351
1350 1352 spec = repo.ui.config("extdata", source)
1351 1353 if not spec:
1352 1354 raise error.Abort(_("unknown extdata source '%s'") % source)
1353 1355
1354 1356 data = {}
1355 1357 src = proc = None
1356 1358 try:
1357 1359 if spec.startswith("shell:"):
1358 1360 # external commands should be run relative to the repo root
1359 1361 cmd = spec[6:]
1360 1362 proc = subprocess.Popen(procutil.tonativestr(cmd),
1361 1363 shell=True, bufsize=-1,
1362 1364 close_fds=procutil.closefds,
1363 1365 stdout=subprocess.PIPE,
1364 1366 cwd=procutil.tonativestr(repo.root))
1365 1367 src = proc.stdout
1366 1368 else:
1367 1369 # treat as a URL or file
1368 1370 src = url.open(repo.ui, spec)
1369 1371 for l in src:
1370 1372 if " " in l:
1371 1373 k, v = l.strip().split(" ", 1)
1372 1374 else:
1373 1375 k, v = l.strip(), ""
1374 1376
1375 1377 k = encoding.tolocal(k)
1376 1378 try:
1377 1379 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1378 1380 except (error.LookupError, error.RepoLookupError):
1379 1381 pass # we ignore data for nodes that don't exist locally
1380 1382 finally:
1381 1383 if proc:
1382 1384 proc.communicate()
1383 1385 if src:
1384 1386 src.close()
1385 1387 if proc and proc.returncode != 0:
1386 1388 raise error.Abort(_("extdata command '%s' failed: %s")
1387 1389 % (cmd, procutil.explainexit(proc.returncode)))
1388 1390
1389 1391 return data
1390 1392
1391 1393 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1392 1394 if lock is None:
1393 1395 raise error.LockInheritanceContractViolation(
1394 1396 'lock can only be inherited while held')
1395 1397 if environ is None:
1396 1398 environ = {}
1397 1399 with lock.inherit() as locker:
1398 1400 environ[envvar] = locker
1399 1401 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1400 1402
1401 1403 def wlocksub(repo, cmd, *args, **kwargs):
1402 1404 """run cmd as a subprocess that allows inheriting repo's wlock
1403 1405
1404 1406 This can only be called while the wlock is held. This takes all the
1405 1407 arguments that ui.system does, and returns the exit code of the
1406 1408 subprocess."""
1407 1409 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1408 1410 **kwargs)
1409 1411
1410 1412 class progress(object):
1411 1413 def __init__(self, ui, topic, unit="", total=None):
1412 1414 self.ui = ui
1413 1415 self.pos = 0
1414 1416 self.topic = topic
1415 1417 self.unit = unit
1416 1418 self.total = total
1417 1419
1418 1420 def __enter__(self):
1419 1421 return self
1420 1422
1421 1423 def __exit__(self, exc_type, exc_value, exc_tb):
1422 1424 self.complete()
1423 1425
1424 1426 def update(self, pos, item="", total=None):
1425 1427 assert pos is not None
1426 1428 if total:
1427 1429 self.total = total
1428 1430 self.pos = pos
1429 1431 self._print(item)
1430 1432
1431 1433 def increment(self, step=1, item="", total=None):
1432 1434 self.update(self.pos + step, item, total)
1433 1435
1434 1436 def complete(self):
1435 1437 self.ui.progress(self.topic, None)
1436 1438
1437 1439 def _print(self, item):
1438 1440 self.ui.progress(self.topic, self.pos, item, self.unit,
1439 1441 self.total)
1440 1442
1441 1443 def gdinitconfig(ui):
1442 1444 """helper function to know if a repo should be created as general delta
1443 1445 """
1444 1446 # experimental config: format.generaldelta
1445 1447 return (ui.configbool('format', 'generaldelta')
1446 1448 or ui.configbool('format', 'usegeneraldelta')
1447 1449 or ui.configbool('format', 'sparse-revlog'))
1448 1450
1449 1451 def gddeltaconfig(ui):
1450 1452 """helper function to know if incoming delta should be optimised
1451 1453 """
1452 1454 # experimental config: format.generaldelta
1453 1455 return ui.configbool('format', 'generaldelta')
1454 1456
1455 1457 class simplekeyvaluefile(object):
1456 1458 """A simple file with key=value lines
1457 1459
1458 1460 Keys must be alphanumerics and start with a letter, values must not
1459 1461 contain '\n' characters"""
1460 1462 firstlinekey = '__firstline'
1461 1463
1462 1464 def __init__(self, vfs, path, keys=None):
1463 1465 self.vfs = vfs
1464 1466 self.path = path
1465 1467
1466 1468 def read(self, firstlinenonkeyval=False):
1467 1469 """Read the contents of a simple key-value file
1468 1470
1469 1471 'firstlinenonkeyval' indicates whether the first line of file should
1470 1472 be treated as a key-value pair or reuturned fully under the
1471 1473 __firstline key."""
1472 1474 lines = self.vfs.readlines(self.path)
1473 1475 d = {}
1474 1476 if firstlinenonkeyval:
1475 1477 if not lines:
1476 1478 e = _("empty simplekeyvalue file")
1477 1479 raise error.CorruptedState(e)
1478 1480 # we don't want to include '\n' in the __firstline
1479 1481 d[self.firstlinekey] = lines[0][:-1]
1480 1482 del lines[0]
1481 1483
1482 1484 try:
1483 1485 # the 'if line.strip()' part prevents us from failing on empty
1484 1486 # lines which only contain '\n' therefore are not skipped
1485 1487 # by 'if line'
1486 1488 updatedict = dict(line[:-1].split('=', 1) for line in lines
1487 1489 if line.strip())
1488 1490 if self.firstlinekey in updatedict:
1489 1491 e = _("%r can't be used as a key")
1490 1492 raise error.CorruptedState(e % self.firstlinekey)
1491 1493 d.update(updatedict)
1492 1494 except ValueError as e:
1493 1495 raise error.CorruptedState(str(e))
1494 1496 return d
1495 1497
1496 1498 def write(self, data, firstline=None):
1497 1499 """Write key=>value mapping to a file
1498 1500 data is a dict. Keys must be alphanumerical and start with a letter.
1499 1501 Values must not contain newline characters.
1500 1502
1501 1503 If 'firstline' is not None, it is written to file before
1502 1504 everything else, as it is, not in a key=value form"""
1503 1505 lines = []
1504 1506 if firstline is not None:
1505 1507 lines.append('%s\n' % firstline)
1506 1508
1507 1509 for k, v in data.items():
1508 1510 if k == self.firstlinekey:
1509 1511 e = "key name '%s' is reserved" % self.firstlinekey
1510 1512 raise error.ProgrammingError(e)
1511 1513 if not k[0:1].isalpha():
1512 1514 e = "keys must start with a letter in a key-value file"
1513 1515 raise error.ProgrammingError(e)
1514 1516 if not k.isalnum():
1515 1517 e = "invalid key name in a simple key-value file"
1516 1518 raise error.ProgrammingError(e)
1517 1519 if '\n' in v:
1518 1520 e = "invalid value in a simple key-value file"
1519 1521 raise error.ProgrammingError(e)
1520 1522 lines.append("%s=%s\n" % (k, v))
1521 1523 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1522 1524 fp.write(''.join(lines))
1523 1525
1524 1526 _reportobsoletedsource = [
1525 1527 'debugobsolete',
1526 1528 'pull',
1527 1529 'push',
1528 1530 'serve',
1529 1531 'unbundle',
1530 1532 ]
1531 1533
1532 1534 _reportnewcssource = [
1533 1535 'pull',
1534 1536 'unbundle',
1535 1537 ]
1536 1538
1537 1539 def prefetchfiles(repo, revs, match):
1538 1540 """Invokes the registered file prefetch functions, allowing extensions to
1539 1541 ensure the corresponding files are available locally, before the command
1540 1542 uses them."""
1541 1543 if match:
1542 1544 # The command itself will complain about files that don't exist, so
1543 1545 # don't duplicate the message.
1544 1546 match = matchmod.badmatch(match, lambda fn, msg: None)
1545 1547 else:
1546 1548 match = matchall(repo)
1547 1549
1548 1550 fileprefetchhooks(repo, revs, match)
1549 1551
1550 1552 # a list of (repo, revs, match) prefetch functions
1551 1553 fileprefetchhooks = util.hooks()
1552 1554
1553 1555 # A marker that tells the evolve extension to suppress its own reporting
1554 1556 _reportstroubledchangesets = True
1555 1557
1556 1558 def registersummarycallback(repo, otr, txnname=''):
1557 1559 """register a callback to issue a summary after the transaction is closed
1558 1560 """
1559 1561 def txmatch(sources):
1560 1562 return any(txnname.startswith(source) for source in sources)
1561 1563
1562 1564 categories = []
1563 1565
1564 1566 def reportsummary(func):
1565 1567 """decorator for report callbacks."""
1566 1568 # The repoview life cycle is shorter than the one of the actual
1567 1569 # underlying repository. So the filtered object can die before the
1568 1570 # weakref is used leading to troubles. We keep a reference to the
1569 1571 # unfiltered object and restore the filtering when retrieving the
1570 1572 # repository through the weakref.
1571 1573 filtername = repo.filtername
1572 1574 reporef = weakref.ref(repo.unfiltered())
1573 1575 def wrapped(tr):
1574 1576 repo = reporef()
1575 1577 if filtername:
1576 1578 repo = repo.filtered(filtername)
1577 1579 func(repo, tr)
1578 1580 newcat = '%02i-txnreport' % len(categories)
1579 1581 otr.addpostclose(newcat, wrapped)
1580 1582 categories.append(newcat)
1581 1583 return wrapped
1582 1584
1583 1585 if txmatch(_reportobsoletedsource):
1584 1586 @reportsummary
1585 1587 def reportobsoleted(repo, tr):
1586 1588 obsoleted = obsutil.getobsoleted(repo, tr)
1587 1589 if obsoleted:
1588 1590 repo.ui.status(_('obsoleted %i changesets\n')
1589 1591 % len(obsoleted))
1590 1592
1591 1593 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1592 1594 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1593 1595 instabilitytypes = [
1594 1596 ('orphan', 'orphan'),
1595 1597 ('phase-divergent', 'phasedivergent'),
1596 1598 ('content-divergent', 'contentdivergent'),
1597 1599 ]
1598 1600
1599 1601 def getinstabilitycounts(repo):
1600 1602 filtered = repo.changelog.filteredrevs
1601 1603 counts = {}
1602 1604 for instability, revset in instabilitytypes:
1603 1605 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1604 1606 filtered)
1605 1607 return counts
1606 1608
1607 1609 oldinstabilitycounts = getinstabilitycounts(repo)
1608 1610 @reportsummary
1609 1611 def reportnewinstabilities(repo, tr):
1610 1612 newinstabilitycounts = getinstabilitycounts(repo)
1611 1613 for instability, revset in instabilitytypes:
1612 1614 delta = (newinstabilitycounts[instability] -
1613 1615 oldinstabilitycounts[instability])
1614 1616 msg = getinstabilitymessage(delta, instability)
1615 1617 if msg:
1616 1618 repo.ui.warn(msg)
1617 1619
1618 1620 if txmatch(_reportnewcssource):
1619 1621 @reportsummary
1620 1622 def reportnewcs(repo, tr):
1621 1623 """Report the range of new revisions pulled/unbundled."""
1622 1624 origrepolen = tr.changes.get('origrepolen', len(repo))
1623 1625 unfi = repo.unfiltered()
1624 1626 if origrepolen >= len(unfi):
1625 1627 return
1626 1628
1627 1629 # Compute the bounds of new visible revisions' range.
1628 1630 revs = smartset.spanset(repo, start=origrepolen)
1629 1631 if revs:
1630 1632 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1631 1633
1632 1634 if minrev == maxrev:
1633 1635 revrange = minrev
1634 1636 else:
1635 1637 revrange = '%s:%s' % (minrev, maxrev)
1636 1638 draft = len(repo.revs('%ld and draft()', revs))
1637 1639 secret = len(repo.revs('%ld and secret()', revs))
1638 1640 if not (draft or secret):
1639 1641 msg = _('new changesets %s\n') % revrange
1640 1642 elif draft and secret:
1641 1643 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1642 1644 msg %= (revrange, draft, secret)
1643 1645 elif draft:
1644 1646 msg = _('new changesets %s (%d drafts)\n')
1645 1647 msg %= (revrange, draft)
1646 1648 elif secret:
1647 1649 msg = _('new changesets %s (%d secrets)\n')
1648 1650 msg %= (revrange, secret)
1649 1651 else:
1650 1652 errormsg = 'entered unreachable condition'
1651 1653 raise error.ProgrammingError(errormsg)
1652 1654 repo.ui.status(msg)
1653 1655
1654 1656 # search new changesets directly pulled as obsolete
1655 1657 duplicates = tr.changes.get('revduplicates', ())
1656 1658 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1657 1659 origrepolen, duplicates)
1658 1660 cl = repo.changelog
1659 1661 extinctadded = [r for r in obsadded if r not in cl]
1660 1662 if extinctadded:
1661 1663 # They are not just obsolete, but obsolete and invisible
1662 1664 # we call them "extinct" internally but the terms have not been
1663 1665 # exposed to users.
1664 1666 msg = '(%d other changesets obsolete on arrival)\n'
1665 1667 repo.ui.status(msg % len(extinctadded))
1666 1668
1667 1669 @reportsummary
1668 1670 def reportphasechanges(repo, tr):
1669 1671 """Report statistics of phase changes for changesets pre-existing
1670 1672 pull/unbundle.
1671 1673 """
1672 1674 origrepolen = tr.changes.get('origrepolen', len(repo))
1673 1675 phasetracking = tr.changes.get('phases', {})
1674 1676 if not phasetracking:
1675 1677 return
1676 1678 published = [
1677 1679 rev for rev, (old, new) in phasetracking.iteritems()
1678 1680 if new == phases.public and rev < origrepolen
1679 1681 ]
1680 1682 if not published:
1681 1683 return
1682 1684 repo.ui.status(_('%d local changesets published\n')
1683 1685 % len(published))
1684 1686
1685 1687 def getinstabilitymessage(delta, instability):
1686 1688 """function to return the message to show warning about new instabilities
1687 1689
1688 1690 exists as a separate function so that extension can wrap to show more
1689 1691 information like how to fix instabilities"""
1690 1692 if delta > 0:
1691 1693 return _('%i new %s changesets\n') % (delta, instability)
1692 1694
1693 1695 def nodesummaries(repo, nodes, maxnumnodes=4):
1694 1696 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1695 1697 return ' '.join(short(h) for h in nodes)
1696 1698 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1697 1699 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1698 1700
1699 1701 def enforcesinglehead(repo, tr, desc):
1700 1702 """check that no named branch has multiple heads"""
1701 1703 if desc in ('strip', 'repair'):
1702 1704 # skip the logic during strip
1703 1705 return
1704 1706 visible = repo.filtered('visible')
1705 1707 # possible improvement: we could restrict the check to affected branch
1706 1708 for name, heads in visible.branchmap().iteritems():
1707 1709 if len(heads) > 1:
1708 1710 msg = _('rejecting multiple heads on branch "%s"')
1709 1711 msg %= name
1710 1712 hint = _('%d heads: %s')
1711 1713 hint %= (len(heads), nodesummaries(repo, heads))
1712 1714 raise error.Abort(msg, hint=hint)
1713 1715
1714 1716 def wrapconvertsink(sink):
1715 1717 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1716 1718 before it is used, whether or not the convert extension was formally loaded.
1717 1719 """
1718 1720 return sink
1719 1721
1720 1722 def unhidehashlikerevs(repo, specs, hiddentype):
1721 1723 """parse the user specs and unhide changesets whose hash or revision number
1722 1724 is passed.
1723 1725
1724 1726 hiddentype can be: 1) 'warn': warn while unhiding changesets
1725 1727 2) 'nowarn': don't warn while unhiding changesets
1726 1728
1727 1729 returns a repo object with the required changesets unhidden
1728 1730 """
1729 1731 if not repo.filtername or not repo.ui.configbool('experimental',
1730 1732 'directaccess'):
1731 1733 return repo
1732 1734
1733 1735 if repo.filtername not in ('visible', 'visible-hidden'):
1734 1736 return repo
1735 1737
1736 1738 symbols = set()
1737 1739 for spec in specs:
1738 1740 try:
1739 1741 tree = revsetlang.parse(spec)
1740 1742 except error.ParseError: # will be reported by scmutil.revrange()
1741 1743 continue
1742 1744
1743 1745 symbols.update(revsetlang.gethashlikesymbols(tree))
1744 1746
1745 1747 if not symbols:
1746 1748 return repo
1747 1749
1748 1750 revs = _getrevsfromsymbols(repo, symbols)
1749 1751
1750 1752 if not revs:
1751 1753 return repo
1752 1754
1753 1755 if hiddentype == 'warn':
1754 1756 unfi = repo.unfiltered()
1755 1757 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1756 1758 repo.ui.warn(_("warning: accessing hidden changesets for write "
1757 1759 "operation: %s\n") % revstr)
1758 1760
1759 1761 # we have to use new filtername to separate branch/tags cache until we can
1760 1762 # disbale these cache when revisions are dynamically pinned.
1761 1763 return repo.filtered('visible-hidden', revs)
1762 1764
1763 1765 def _getrevsfromsymbols(repo, symbols):
1764 1766 """parse the list of symbols and returns a set of revision numbers of hidden
1765 1767 changesets present in symbols"""
1766 1768 revs = set()
1767 1769 unfi = repo.unfiltered()
1768 1770 unficl = unfi.changelog
1769 1771 cl = repo.changelog
1770 1772 tiprev = len(unficl)
1771 1773 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1772 1774 for s in symbols:
1773 1775 try:
1774 1776 n = int(s)
1775 1777 if n <= tiprev:
1776 1778 if not allowrevnums:
1777 1779 continue
1778 1780 else:
1779 1781 if n not in cl:
1780 1782 revs.add(n)
1781 1783 continue
1782 1784 except ValueError:
1783 1785 pass
1784 1786
1785 1787 try:
1786 1788 s = resolvehexnodeidprefix(unfi, s)
1787 1789 except (error.LookupError, error.WdirUnsupported):
1788 1790 s = None
1789 1791
1790 1792 if s is not None:
1791 1793 rev = unficl.rev(s)
1792 1794 if rev not in cl:
1793 1795 revs.add(rev)
1794 1796
1795 1797 return revs
1796 1798
1797 1799 def bookmarkrevs(repo, mark):
1798 1800 """
1799 1801 Select revisions reachable by a given bookmark
1800 1802 """
1801 1803 return repo.revs("ancestors(bookmark(%s)) - "
1802 1804 "ancestors(head() and not bookmark(%s)) - "
1803 1805 "ancestors(bookmark() and not bookmark(%s))",
1804 1806 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now