##// END OF EJS Templates
scmutil: fix a comment that doesn't match the code...
Martin von Zweigbergk -
r41846:e21183db default
parent child Browse files
Show More
@@ -1,1860 +1,1860
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import posixpath
15 15 import re
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 bin,
22 22 hex,
23 23 nullid,
24 24 nullrev,
25 25 short,
26 26 wdirid,
27 27 wdirrev,
28 28 )
29 29
30 30 from . import (
31 31 encoding,
32 32 error,
33 33 match as matchmod,
34 34 obsolete,
35 35 obsutil,
36 36 pathutil,
37 37 phases,
38 38 policy,
39 39 pycompat,
40 40 revsetlang,
41 41 similar,
42 42 smartset,
43 43 url,
44 44 util,
45 45 vfs,
46 46 )
47 47
48 48 from .utils import (
49 49 procutil,
50 50 stringutil,
51 51 )
52 52
53 53 if pycompat.iswindows:
54 54 from . import scmwindows as scmplatform
55 55 else:
56 56 from . import scmposix as scmplatform
57 57
58 58 parsers = policy.importmod(r'parsers')
59 59
60 60 termsize = scmplatform.termsize
61 61
62 62 class status(tuple):
63 63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 64 and 'ignored' properties are only relevant to the working copy.
65 65 '''
66 66
67 67 __slots__ = ()
68 68
69 69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 70 clean):
71 71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 72 ignored, clean))
73 73
74 74 @property
75 75 def modified(self):
76 76 '''files that have been modified'''
77 77 return self[0]
78 78
79 79 @property
80 80 def added(self):
81 81 '''files that have been added'''
82 82 return self[1]
83 83
84 84 @property
85 85 def removed(self):
86 86 '''files that have been removed'''
87 87 return self[2]
88 88
89 89 @property
90 90 def deleted(self):
91 91 '''files that are in the dirstate, but have been deleted from the
92 92 working copy (aka "missing")
93 93 '''
94 94 return self[3]
95 95
96 96 @property
97 97 def unknown(self):
98 98 '''files not in the dirstate that are not ignored'''
99 99 return self[4]
100 100
101 101 @property
102 102 def ignored(self):
103 103 '''files not in the dirstate that are ignored (by _dirignore())'''
104 104 return self[5]
105 105
106 106 @property
107 107 def clean(self):
108 108 '''files that have not been modified'''
109 109 return self[6]
110 110
111 111 def __repr__(self, *args, **kwargs):
112 112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 113 r'unknown=%s, ignored=%s, clean=%s>') %
114 114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115 115
116 116 def itersubrepos(ctx1, ctx2):
117 117 """find subrepos in ctx1 or ctx2"""
118 118 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 120 # has been modified (in ctx2) but not yet committed (in ctx1).
121 121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123 123
124 124 missing = set()
125 125
126 126 for subpath in ctx2.substate:
127 127 if subpath not in ctx1.substate:
128 128 del subpaths[subpath]
129 129 missing.add(subpath)
130 130
131 131 for subpath, ctx in sorted(subpaths.iteritems()):
132 132 yield subpath, ctx.sub(subpath)
133 133
134 134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 135 # status and diff will have an accurate result when it does
136 136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 137 # against itself.
138 138 for subpath in missing:
139 139 yield subpath, ctx2.nullsub(subpath, ctx1)
140 140
141 141 def nochangesfound(ui, repo, excluded=None):
142 142 '''Report no changes for push/pull, excluded is None or a list of
143 143 nodes excluded from the push/pull.
144 144 '''
145 145 secretlist = []
146 146 if excluded:
147 147 for n in excluded:
148 148 ctx = repo[n]
149 149 if ctx.phase() >= phases.secret and not ctx.extinct():
150 150 secretlist.append(n)
151 151
152 152 if secretlist:
153 153 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 154 % len(secretlist))
155 155 else:
156 156 ui.status(_("no changes found\n"))
157 157
158 158 def callcatch(ui, func):
159 159 """call func() with global exception handling
160 160
161 161 return func() if no exception happens. otherwise do some error handling
162 162 and return an exit code accordingly. does not handle all exceptions.
163 163 """
164 164 try:
165 165 try:
166 166 return func()
167 167 except: # re-raises
168 168 ui.traceback()
169 169 raise
170 170 # Global exception handling, alphabetically
171 171 # Mercurial-specific first, followed by built-in and library exceptions
172 172 except error.LockHeld as inst:
173 173 if inst.errno == errno.ETIMEDOUT:
174 174 reason = _('timed out waiting for lock held by %r') % (
175 175 pycompat.bytestr(inst.locker))
176 176 else:
177 177 reason = _('lock held by %r') % inst.locker
178 178 ui.error(_("abort: %s: %s\n") % (
179 179 inst.desc or stringutil.forcebytestr(inst.filename), reason))
180 180 if not inst.locker:
181 181 ui.error(_("(lock might be very busy)\n"))
182 182 except error.LockUnavailable as inst:
183 183 ui.error(_("abort: could not lock %s: %s\n") %
184 184 (inst.desc or stringutil.forcebytestr(inst.filename),
185 185 encoding.strtolocal(inst.strerror)))
186 186 except error.OutOfBandError as inst:
187 187 if inst.args:
188 188 msg = _("abort: remote error:\n")
189 189 else:
190 190 msg = _("abort: remote error\n")
191 191 ui.error(msg)
192 192 if inst.args:
193 193 ui.error(''.join(inst.args))
194 194 if inst.hint:
195 195 ui.error('(%s)\n' % inst.hint)
196 196 except error.RepoError as inst:
197 197 ui.error(_("abort: %s!\n") % inst)
198 198 if inst.hint:
199 199 ui.error(_("(%s)\n") % inst.hint)
200 200 except error.ResponseError as inst:
201 201 ui.error(_("abort: %s") % inst.args[0])
202 202 msg = inst.args[1]
203 203 if isinstance(msg, type(u'')):
204 204 msg = pycompat.sysbytes(msg)
205 205 if not isinstance(msg, bytes):
206 206 ui.error(" %r\n" % (msg,))
207 207 elif not msg:
208 208 ui.error(_(" empty string\n"))
209 209 else:
210 210 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
211 211 except error.CensoredNodeError as inst:
212 212 ui.error(_("abort: file censored %s!\n") % inst)
213 213 except error.StorageError as inst:
214 214 ui.error(_("abort: %s!\n") % inst)
215 215 if inst.hint:
216 216 ui.error(_("(%s)\n") % inst.hint)
217 217 except error.InterventionRequired as inst:
218 218 ui.error("%s\n" % inst)
219 219 if inst.hint:
220 220 ui.error(_("(%s)\n") % inst.hint)
221 221 return 1
222 222 except error.WdirUnsupported:
223 223 ui.error(_("abort: working directory revision cannot be specified\n"))
224 224 except error.Abort as inst:
225 225 ui.error(_("abort: %s\n") % inst)
226 226 if inst.hint:
227 227 ui.error(_("(%s)\n") % inst.hint)
228 228 except ImportError as inst:
229 229 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
230 230 m = stringutil.forcebytestr(inst).split()[-1]
231 231 if m in "mpatch bdiff".split():
232 232 ui.error(_("(did you forget to compile extensions?)\n"))
233 233 elif m in "zlib".split():
234 234 ui.error(_("(is your Python install correct?)\n"))
235 235 except (IOError, OSError) as inst:
236 236 if util.safehasattr(inst, "code"): # HTTPError
237 237 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
238 238 elif util.safehasattr(inst, "reason"): # URLError or SSLError
239 239 try: # usually it is in the form (errno, strerror)
240 240 reason = inst.reason.args[1]
241 241 except (AttributeError, IndexError):
242 242 # it might be anything, for example a string
243 243 reason = inst.reason
244 244 if isinstance(reason, pycompat.unicode):
245 245 # SSLError of Python 2.7.9 contains a unicode
246 246 reason = encoding.unitolocal(reason)
247 247 ui.error(_("abort: error: %s\n") % reason)
248 248 elif (util.safehasattr(inst, "args")
249 249 and inst.args and inst.args[0] == errno.EPIPE):
250 250 pass
251 251 elif getattr(inst, "strerror", None): # common IOError or OSError
252 252 if getattr(inst, "filename", None) is not None:
253 253 ui.error(_("abort: %s: '%s'\n") % (
254 254 encoding.strtolocal(inst.strerror),
255 255 stringutil.forcebytestr(inst.filename)))
256 256 else:
257 257 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 258 else: # suspicious IOError
259 259 raise
260 260 except MemoryError:
261 261 ui.error(_("abort: out of memory\n"))
262 262 except SystemExit as inst:
263 263 # Commands shouldn't sys.exit directly, but give a return code.
264 264 # Just in case catch this and and pass exit code to caller.
265 265 return inst.code
266 266
267 267 return -1
268 268
269 269 def checknewlabel(repo, lbl, kind):
270 270 # Do not use the "kind" parameter in ui output.
271 271 # It makes strings difficult to translate.
272 272 if lbl in ['tip', '.', 'null']:
273 273 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 274 for c in (':', '\0', '\n', '\r'):
275 275 if c in lbl:
276 276 raise error.Abort(
277 277 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 278 try:
279 279 int(lbl)
280 280 raise error.Abort(_("cannot use an integer as a name"))
281 281 except ValueError:
282 282 pass
283 283 if lbl.strip() != lbl:
284 284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285 285
286 286 def checkfilename(f):
287 287 '''Check that the filename f is an acceptable filename for a tracked file'''
288 288 if '\r' in f or '\n' in f:
289 289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
290 290 % pycompat.bytestr(f))
291 291
292 292 def checkportable(ui, f):
293 293 '''Check if filename f is portable and warn or abort depending on config'''
294 294 checkfilename(f)
295 295 abort, warn = checkportabilityalert(ui)
296 296 if abort or warn:
297 297 msg = util.checkwinfilename(f)
298 298 if msg:
299 299 msg = "%s: %s" % (msg, procutil.shellquote(f))
300 300 if abort:
301 301 raise error.Abort(msg)
302 302 ui.warn(_("warning: %s\n") % msg)
303 303
304 304 def checkportabilityalert(ui):
305 305 '''check if the user's config requests nothing, a warning, or abort for
306 306 non-portable filenames'''
307 307 val = ui.config('ui', 'portablefilenames')
308 308 lval = val.lower()
309 309 bval = stringutil.parsebool(val)
310 310 abort = pycompat.iswindows or lval == 'abort'
311 311 warn = bval or lval == 'warn'
312 312 if bval is None and not (warn or abort or lval == 'ignore'):
313 313 raise error.ConfigError(
314 314 _("ui.portablefilenames value is invalid ('%s')") % val)
315 315 return abort, warn
316 316
317 317 class casecollisionauditor(object):
318 318 def __init__(self, ui, abort, dirstate):
319 319 self._ui = ui
320 320 self._abort = abort
321 321 allfiles = '\0'.join(dirstate._map)
322 322 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
323 323 self._dirstate = dirstate
324 324 # The purpose of _newfiles is so that we don't complain about
325 325 # case collisions if someone were to call this object with the
326 326 # same filename twice.
327 327 self._newfiles = set()
328 328
329 329 def __call__(self, f):
330 330 if f in self._newfiles:
331 331 return
332 332 fl = encoding.lower(f)
333 333 if fl in self._loweredfiles and f not in self._dirstate:
334 334 msg = _('possible case-folding collision for %s') % f
335 335 if self._abort:
336 336 raise error.Abort(msg)
337 337 self._ui.warn(_("warning: %s\n") % msg)
338 338 self._loweredfiles.add(fl)
339 339 self._newfiles.add(f)
340 340
341 341 def filteredhash(repo, maxrev):
342 342 """build hash of filtered revisions in the current repoview.
343 343
344 344 Multiple caches perform up-to-date validation by checking that the
345 345 tiprev and tipnode stored in the cache file match the current repository.
346 346 However, this is not sufficient for validating repoviews because the set
347 347 of revisions in the view may change without the repository tiprev and
348 348 tipnode changing.
349 349
350 350 This function hashes all the revs filtered from the view and returns
351 351 that SHA-1 digest.
352 352 """
353 353 cl = repo.changelog
354 354 if not cl.filteredrevs:
355 355 return None
356 356 key = None
357 357 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
358 358 if revs:
359 359 s = hashlib.sha1()
360 360 for rev in revs:
361 361 s.update('%d;' % rev)
362 362 key = s.digest()
363 363 return key
364 364
365 365 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
366 366 '''yield every hg repository under path, always recursively.
367 367 The recurse flag will only control recursion into repo working dirs'''
368 368 def errhandler(err):
369 369 if err.filename == path:
370 370 raise err
371 371 samestat = getattr(os.path, 'samestat', None)
372 372 if followsym and samestat is not None:
373 373 def adddir(dirlst, dirname):
374 374 dirstat = os.stat(dirname)
375 375 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
376 376 if not match:
377 377 dirlst.append(dirstat)
378 378 return not match
379 379 else:
380 380 followsym = False
381 381
382 382 if (seen_dirs is None) and followsym:
383 383 seen_dirs = []
384 384 adddir(seen_dirs, path)
385 385 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
386 386 dirs.sort()
387 387 if '.hg' in dirs:
388 388 yield root # found a repository
389 389 qroot = os.path.join(root, '.hg', 'patches')
390 390 if os.path.isdir(os.path.join(qroot, '.hg')):
391 391 yield qroot # we have a patch queue repo here
392 392 if recurse:
393 393 # avoid recursing inside the .hg directory
394 394 dirs.remove('.hg')
395 395 else:
396 396 dirs[:] = [] # don't descend further
397 397 elif followsym:
398 398 newdirs = []
399 399 for d in dirs:
400 400 fname = os.path.join(root, d)
401 401 if adddir(seen_dirs, fname):
402 402 if os.path.islink(fname):
403 403 for hgname in walkrepos(fname, True, seen_dirs):
404 404 yield hgname
405 405 else:
406 406 newdirs.append(d)
407 407 dirs[:] = newdirs
408 408
409 409 def binnode(ctx):
410 410 """Return binary node id for a given basectx"""
411 411 node = ctx.node()
412 412 if node is None:
413 413 return wdirid
414 414 return node
415 415
416 416 def intrev(ctx):
417 417 """Return integer for a given basectx that can be used in comparison or
418 418 arithmetic operation"""
419 419 rev = ctx.rev()
420 420 if rev is None:
421 421 return wdirrev
422 422 return rev
423 423
424 424 def formatchangeid(ctx):
425 425 """Format changectx as '{rev}:{node|formatnode}', which is the default
426 426 template provided by logcmdutil.changesettemplater"""
427 427 repo = ctx.repo()
428 428 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
429 429
430 430 def formatrevnode(ui, rev, node):
431 431 """Format given revision and node depending on the current verbosity"""
432 432 if ui.debugflag:
433 433 hexfunc = hex
434 434 else:
435 435 hexfunc = short
436 436 return '%d:%s' % (rev, hexfunc(node))
437 437
438 438 def resolvehexnodeidprefix(repo, prefix):
439 439 if (prefix.startswith('x') and
440 440 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
441 441 prefix = prefix[1:]
442 442 try:
443 443 # Uses unfiltered repo because it's faster when prefix is ambiguous/
444 444 # This matches the shortesthexnodeidprefix() function below.
445 445 node = repo.unfiltered().changelog._partialmatch(prefix)
446 446 except error.AmbiguousPrefixLookupError:
447 447 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
448 448 if revset:
449 449 # Clear config to avoid infinite recursion
450 450 configoverrides = {('experimental',
451 451 'revisions.disambiguatewithin'): None}
452 452 with repo.ui.configoverride(configoverrides):
453 453 revs = repo.anyrevs([revset], user=True)
454 454 matches = []
455 455 for rev in revs:
456 456 node = repo.changelog.node(rev)
457 457 if hex(node).startswith(prefix):
458 458 matches.append(node)
459 459 if len(matches) == 1:
460 460 return matches[0]
461 461 raise
462 462 if node is None:
463 463 return
464 464 repo.changelog.rev(node) # make sure node isn't filtered
465 465 return node
466 466
467 467 def mayberevnum(repo, prefix):
468 468 """Checks if the given prefix may be mistaken for a revision number"""
469 469 try:
470 470 i = int(prefix)
471 471 # if we are a pure int, then starting with zero will not be
472 472 # confused as a rev; or, obviously, if the int is larger
473 473 # than the value of the tip rev. We still need to disambiguate if
474 474 # prefix == '0', since that *is* a valid revnum.
475 475 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
476 476 return False
477 477 return True
478 478 except ValueError:
479 479 return False
480 480
481 481 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
482 482 """Find the shortest unambiguous prefix that matches hexnode.
483 483
484 484 If "cache" is not None, it must be a dictionary that can be used for
485 485 caching between calls to this method.
486 486 """
487 487 # _partialmatch() of filtered changelog could take O(len(repo)) time,
488 488 # which would be unacceptably slow. so we look for hash collision in
489 489 # unfiltered space, which means some hashes may be slightly longer.
490 490
491 491 minlength=max(minlength, 1)
492 492
493 493 def disambiguate(prefix):
494 494 """Disambiguate against revnums."""
495 495 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
496 496 if mayberevnum(repo, prefix):
497 497 return 'x' + prefix
498 498 else:
499 499 return prefix
500 500
501 501 hexnode = hex(node)
502 502 for length in range(len(prefix), len(hexnode) + 1):
503 503 prefix = hexnode[:length]
504 504 if not mayberevnum(repo, prefix):
505 505 return prefix
506 506
507 507 cl = repo.unfiltered().changelog
508 508 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
509 509 if revset:
510 510 revs = None
511 511 if cache is not None:
512 512 revs = cache.get('disambiguationrevset')
513 513 if revs is None:
514 514 revs = repo.anyrevs([revset], user=True)
515 515 if cache is not None:
516 516 cache['disambiguationrevset'] = revs
517 517 if cl.rev(node) in revs:
518 518 hexnode = hex(node)
519 519 nodetree = None
520 520 if cache is not None:
521 521 nodetree = cache.get('disambiguationnodetree')
522 522 if not nodetree:
523 523 try:
524 524 nodetree = parsers.nodetree(cl.index, len(revs))
525 525 except AttributeError:
526 526 # no native nodetree
527 527 pass
528 528 else:
529 529 for r in revs:
530 530 nodetree.insert(r)
531 531 if cache is not None:
532 532 cache['disambiguationnodetree'] = nodetree
533 533 if nodetree is not None:
534 534 length = max(nodetree.shortest(node), minlength)
535 535 prefix = hexnode[:length]
536 536 return disambiguate(prefix)
537 537 for length in range(minlength, len(hexnode) + 1):
538 538 matches = []
539 539 prefix = hexnode[:length]
540 540 for rev in revs:
541 541 otherhexnode = repo[rev].hex()
542 542 if prefix == otherhexnode[:length]:
543 543 matches.append(otherhexnode)
544 544 if len(matches) == 1:
545 545 return disambiguate(prefix)
546 546
547 547 try:
548 548 return disambiguate(cl.shortest(node, minlength))
549 549 except error.LookupError:
550 550 raise error.RepoLookupError()
551 551
552 552 def isrevsymbol(repo, symbol):
553 553 """Checks if a symbol exists in the repo.
554 554
555 555 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
556 556 symbol is an ambiguous nodeid prefix.
557 557 """
558 558 try:
559 559 revsymbol(repo, symbol)
560 560 return True
561 561 except error.RepoLookupError:
562 562 return False
563 563
564 564 def revsymbol(repo, symbol):
565 565 """Returns a context given a single revision symbol (as string).
566 566
567 567 This is similar to revsingle(), but accepts only a single revision symbol,
568 568 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
569 569 not "max(public())".
570 570 """
571 571 if not isinstance(symbol, bytes):
572 572 msg = ("symbol (%s of type %s) was not a string, did you mean "
573 573 "repo[symbol]?" % (symbol, type(symbol)))
574 574 raise error.ProgrammingError(msg)
575 575 try:
576 576 if symbol in ('.', 'tip', 'null'):
577 577 return repo[symbol]
578 578
579 579 try:
580 580 r = int(symbol)
581 581 if '%d' % r != symbol:
582 582 raise ValueError
583 583 l = len(repo.changelog)
584 584 if r < 0:
585 585 r += l
586 586 if r < 0 or r >= l and r != wdirrev:
587 587 raise ValueError
588 588 return repo[r]
589 589 except error.FilteredIndexError:
590 590 raise
591 591 except (ValueError, OverflowError, IndexError):
592 592 pass
593 593
594 594 if len(symbol) == 40:
595 595 try:
596 596 node = bin(symbol)
597 597 rev = repo.changelog.rev(node)
598 598 return repo[rev]
599 599 except error.FilteredLookupError:
600 600 raise
601 601 except (TypeError, LookupError):
602 602 pass
603 603
604 604 # look up bookmarks through the name interface
605 605 try:
606 606 node = repo.names.singlenode(repo, symbol)
607 607 rev = repo.changelog.rev(node)
608 608 return repo[rev]
609 609 except KeyError:
610 610 pass
611 611
612 612 node = resolvehexnodeidprefix(repo, symbol)
613 613 if node is not None:
614 614 rev = repo.changelog.rev(node)
615 615 return repo[rev]
616 616
617 617 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
618 618
619 619 except error.WdirUnsupported:
620 620 return repo[None]
621 621 except (error.FilteredIndexError, error.FilteredLookupError,
622 622 error.FilteredRepoLookupError):
623 623 raise _filterederror(repo, symbol)
624 624
625 625 def _filterederror(repo, changeid):
626 626 """build an exception to be raised about a filtered changeid
627 627
628 628 This is extracted in a function to help extensions (eg: evolve) to
629 629 experiment with various message variants."""
630 630 if repo.filtername.startswith('visible'):
631 631
632 632 # Check if the changeset is obsolete
633 633 unfilteredrepo = repo.unfiltered()
634 634 ctx = revsymbol(unfilteredrepo, changeid)
635 635
636 636 # If the changeset is obsolete, enrich the message with the reason
637 637 # that made this changeset not visible
638 638 if ctx.obsolete():
639 639 msg = obsutil._getfilteredreason(repo, changeid, ctx)
640 640 else:
641 641 msg = _("hidden revision '%s'") % changeid
642 642
643 643 hint = _('use --hidden to access hidden revisions')
644 644
645 645 return error.FilteredRepoLookupError(msg, hint=hint)
646 646 msg = _("filtered revision '%s' (not in '%s' subset)")
647 647 msg %= (changeid, repo.filtername)
648 648 return error.FilteredRepoLookupError(msg)
649 649
650 650 def revsingle(repo, revspec, default='.', localalias=None):
651 651 if not revspec and revspec != 0:
652 652 return repo[default]
653 653
654 654 l = revrange(repo, [revspec], localalias=localalias)
655 655 if not l:
656 656 raise error.Abort(_('empty revision set'))
657 657 return repo[l.last()]
658 658
659 659 def _pairspec(revspec):
660 660 tree = revsetlang.parse(revspec)
661 661 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
662 662
663 663 def revpair(repo, revs):
664 664 if not revs:
665 665 return repo['.'], repo[None]
666 666
667 667 l = revrange(repo, revs)
668 668
669 669 if not l:
670 670 raise error.Abort(_('empty revision range'))
671 671
672 672 first = l.first()
673 673 second = l.last()
674 674
675 675 if (first == second and len(revs) >= 2
676 676 and not all(revrange(repo, [r]) for r in revs)):
677 677 raise error.Abort(_('empty revision on one side of range'))
678 678
679 679 # if top-level is range expression, the result must always be a pair
680 680 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
681 681 return repo[first], repo[None]
682 682
683 683 return repo[first], repo[second]
684 684
685 685 def revrange(repo, specs, localalias=None):
686 686 """Execute 1 to many revsets and return the union.
687 687
688 688 This is the preferred mechanism for executing revsets using user-specified
689 689 config options, such as revset aliases.
690 690
691 691 The revsets specified by ``specs`` will be executed via a chained ``OR``
692 692 expression. If ``specs`` is empty, an empty result is returned.
693 693
694 694 ``specs`` can contain integers, in which case they are assumed to be
695 695 revision numbers.
696 696
697 697 It is assumed the revsets are already formatted. If you have arguments
698 698 that need to be expanded in the revset, call ``revsetlang.formatspec()``
699 699 and pass the result as an element of ``specs``.
700 700
701 701 Specifying a single revset is allowed.
702 702
703 703 Returns a ``revset.abstractsmartset`` which is a list-like interface over
704 704 integer revisions.
705 705 """
706 706 allspecs = []
707 707 for spec in specs:
708 708 if isinstance(spec, int):
709 709 spec = revsetlang.formatspec('%d', spec)
710 710 allspecs.append(spec)
711 711 return repo.anyrevs(allspecs, user=True, localalias=localalias)
712 712
713 713 def meaningfulparents(repo, ctx):
714 714 """Return list of meaningful (or all if debug) parentrevs for rev.
715 715
716 716 For merges (two non-nullrev revisions) both parents are meaningful.
717 717 Otherwise the first parent revision is considered meaningful if it
718 718 is not the preceding revision.
719 719 """
720 720 parents = ctx.parents()
721 721 if len(parents) > 1:
722 722 return parents
723 723 if repo.ui.debugflag:
724 724 return [parents[0], repo[nullrev]]
725 725 if parents[0].rev() >= intrev(ctx) - 1:
726 726 return []
727 727 return parents
728 728
729 729 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
730 730 """Return a function that produced paths for presenting to the user.
731 731
732 732 The returned function takes a repo-relative path and produces a path
733 733 that can be presented in the UI.
734 734
735 735 Depending on the value of ui.relative-paths, either a repo-relative or
736 736 cwd-relative path will be produced.
737 737
738 738 legacyrelativevalue is the value to use if ui.relative-paths=legacy
739 739
740 740 If forcerelativevalue is not None, then that value will be used regardless
741 741 of what ui.relative-paths is set to.
742 742 """
743 743 if forcerelativevalue is not None:
744 744 relative = forcerelativevalue
745 745 else:
746 746 config = repo.ui.config('ui', 'relative-paths')
747 747 if config == 'legacy':
748 748 relative = legacyrelativevalue
749 749 else:
750 750 relative = stringutil.parsebool(config)
751 751 if relative is None:
752 752 raise error.ConfigError(
753 753 _("ui.relative-paths is not a boolean ('%s')") % config)
754 754
755 755 if relative:
756 756 cwd = repo.getcwd()
757 757 pathto = repo.pathto
758 758 return lambda f: pathto(f, cwd)
759 759 elif repo.ui.configbool('ui', 'slash'):
760 760 return lambda f: f
761 761 else:
762 762 return util.localpath
763 763
764 764 def subdiruipathfn(subpath, uipathfn):
765 765 '''Create a new uipathfn that treats the file as relative to subpath.'''
766 766 return lambda f: uipathfn(posixpath.join(subpath, f))
767 767
768 768 def anypats(pats, opts):
769 769 '''Checks if any patterns, including --include and --exclude were given.
770 770
771 771 Some commands (e.g. addremove) use this condition for deciding whether to
772 772 print absolute or relative paths.
773 773 '''
774 774 return bool(pats or opts.get('include') or opts.get('exclude'))
775 775
776 776 def expandpats(pats):
777 777 '''Expand bare globs when running on windows.
778 778 On posix we assume it already has already been done by sh.'''
779 779 if not util.expandglobs:
780 780 return list(pats)
781 781 ret = []
782 782 for kindpat in pats:
783 783 kind, pat = matchmod._patsplit(kindpat, None)
784 784 if kind is None:
785 785 try:
786 786 globbed = glob.glob(pat)
787 787 except re.error:
788 788 globbed = [pat]
789 789 if globbed:
790 790 ret.extend(globbed)
791 791 continue
792 792 ret.append(kindpat)
793 793 return ret
794 794
795 795 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
796 796 badfn=None):
797 797 '''Return a matcher and the patterns that were used.
798 798 The matcher will warn about bad matches, unless an alternate badfn callback
799 799 is provided.'''
800 800 if opts is None:
801 801 opts = {}
802 802 if not globbed and default == 'relpath':
803 803 pats = expandpats(pats or [])
804 804
805 805 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
806 806 def bad(f, msg):
807 807 ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
808 808
809 809 if badfn is None:
810 810 badfn = bad
811 811
812 812 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
813 813 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
814 814
815 815 if m.always():
816 816 pats = []
817 817 return m, pats
818 818
819 819 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
820 820 badfn=None):
821 821 '''Return a matcher that will warn about bad matches.'''
822 822 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
823 823
824 824 def matchall(repo):
825 825 '''Return a matcher that will efficiently match everything.'''
826 826 return matchmod.always()
827 827
828 828 def matchfiles(repo, files, badfn=None):
829 829 '''Return a matcher that will efficiently match exactly these files.'''
830 830 return matchmod.exact(files, badfn=badfn)
831 831
832 832 def parsefollowlinespattern(repo, rev, pat, msg):
833 833 """Return a file name from `pat` pattern suitable for usage in followlines
834 834 logic.
835 835 """
836 836 if not matchmod.patkind(pat):
837 837 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
838 838 else:
839 839 ctx = repo[rev]
840 840 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
841 841 files = [f for f in ctx if m(f)]
842 842 if len(files) != 1:
843 843 raise error.ParseError(msg)
844 844 return files[0]
845 845
846 846 def getorigvfs(ui, repo):
847 847 """return a vfs suitable to save 'orig' file
848 848
849 849 return None if no special directory is configured"""
850 850 origbackuppath = ui.config('ui', 'origbackuppath')
851 851 if not origbackuppath:
852 852 return None
853 853 return vfs.vfs(repo.wvfs.join(origbackuppath))
854 854
855 855 def backuppath(ui, repo, filepath):
856 856 '''customize where working copy backup files (.orig files) are created
857 857
858 858 Fetch user defined path from config file: [ui] origbackuppath = <path>
859 859 Fall back to default (filepath with .orig suffix) if not specified
860 860
861 861 filepath is repo-relative
862 862
863 863 Returns an absolute path
864 864 '''
865 865 origvfs = getorigvfs(ui, repo)
866 866 if origvfs is None:
867 867 return repo.wjoin(filepath + ".orig")
868 868
869 869 origbackupdir = origvfs.dirname(filepath)
870 870 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
871 871 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
872 872
873 873 # Remove any files that conflict with the backup file's path
874 874 for f in reversed(list(util.finddirs(filepath))):
875 875 if origvfs.isfileorlink(f):
876 876 ui.note(_('removing conflicting file: %s\n')
877 877 % origvfs.join(f))
878 878 origvfs.unlink(f)
879 879 break
880 880
881 881 origvfs.makedirs(origbackupdir)
882 882
883 883 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
884 884 ui.note(_('removing conflicting directory: %s\n')
885 885 % origvfs.join(filepath))
886 886 origvfs.rmtree(filepath, forcibly=True)
887 887
888 888 return origvfs.join(filepath)
889 889
890 890 class _containsnode(object):
891 891 """proxy __contains__(node) to container.__contains__ which accepts revs"""
892 892
893 893 def __init__(self, repo, revcontainer):
894 894 self._torev = repo.changelog.rev
895 895 self._revcontains = revcontainer.__contains__
896 896
897 897 def __contains__(self, node):
898 898 return self._revcontains(self._torev(node))
899 899
900 900 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
901 901 fixphase=False, targetphase=None, backup=True):
902 902 """do common cleanups when old nodes are replaced by new nodes
903 903
904 904 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
905 905 (we might also want to move working directory parent in the future)
906 906
907 907 By default, bookmark moves are calculated automatically from 'replacements',
908 908 but 'moves' can be used to override that. Also, 'moves' may include
909 909 additional bookmark moves that should not have associated obsmarkers.
910 910
911 911 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
912 912 have replacements. operation is a string, like "rebase".
913 913
914 914 metadata is dictionary containing metadata to be stored in obsmarker if
915 915 obsolescence is enabled.
916 916 """
917 917 assert fixphase or targetphase is None
918 918 if not replacements and not moves:
919 919 return
920 920
921 921 # translate mapping's other forms
922 922 if not util.safehasattr(replacements, 'items'):
923 923 replacements = {(n,): () for n in replacements}
924 924 else:
925 925 # upgrading non tuple "source" to tuple ones for BC
926 926 repls = {}
927 927 for key, value in replacements.items():
928 928 if not isinstance(key, tuple):
929 929 key = (key,)
930 930 repls[key] = value
931 931 replacements = repls
932 932
933 933 # Unfiltered repo is needed since nodes in replacements might be hidden.
934 934 unfi = repo.unfiltered()
935 935
936 936 # Calculate bookmark movements
937 937 if moves is None:
938 938 moves = {}
939 939 for oldnodes, newnodes in replacements.items():
940 940 for oldnode in oldnodes:
941 941 if oldnode in moves:
942 942 continue
943 943 if len(newnodes) > 1:
944 944 # usually a split, take the one with biggest rev number
945 945 newnode = next(unfi.set('max(%ln)', newnodes)).node()
946 946 elif len(newnodes) == 0:
947 947 # move bookmark backwards
948 948 allreplaced = []
949 949 for rep in replacements:
950 950 allreplaced.extend(rep)
951 951 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
952 952 allreplaced))
953 953 if roots:
954 954 newnode = roots[0].node()
955 955 else:
956 956 newnode = nullid
957 957 else:
958 958 newnode = newnodes[0]
959 959 moves[oldnode] = newnode
960 960
961 961 allnewnodes = [n for ns in replacements.values() for n in ns]
962 962 toretract = {}
963 963 toadvance = {}
964 964 if fixphase:
965 965 precursors = {}
966 966 for oldnodes, newnodes in replacements.items():
967 967 for oldnode in oldnodes:
968 968 for newnode in newnodes:
969 969 precursors.setdefault(newnode, []).append(oldnode)
970 970
971 971 allnewnodes.sort(key=lambda n: unfi[n].rev())
972 972 newphases = {}
973 973 def phase(ctx):
974 974 return newphases.get(ctx.node(), ctx.phase())
975 975 for newnode in allnewnodes:
976 976 ctx = unfi[newnode]
977 977 parentphase = max(phase(p) for p in ctx.parents())
978 978 if targetphase is None:
979 979 oldphase = max(unfi[oldnode].phase()
980 980 for oldnode in precursors[newnode])
981 981 newphase = max(oldphase, parentphase)
982 982 else:
983 983 newphase = max(targetphase, parentphase)
984 984 newphases[newnode] = newphase
985 985 if newphase > ctx.phase():
986 986 toretract.setdefault(newphase, []).append(newnode)
987 987 elif newphase < ctx.phase():
988 988 toadvance.setdefault(newphase, []).append(newnode)
989 989
990 990 with repo.transaction('cleanup') as tr:
991 991 # Move bookmarks
992 992 bmarks = repo._bookmarks
993 993 bmarkchanges = []
994 994 for oldnode, newnode in moves.items():
995 995 oldbmarks = repo.nodebookmarks(oldnode)
996 996 if not oldbmarks:
997 997 continue
998 998 from . import bookmarks # avoid import cycle
999 999 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
1000 1000 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1001 1001 hex(oldnode), hex(newnode)))
1002 1002 # Delete divergent bookmarks being parents of related newnodes
1003 1003 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
1004 1004 allnewnodes, newnode, oldnode)
1005 1005 deletenodes = _containsnode(repo, deleterevs)
1006 1006 for name in oldbmarks:
1007 1007 bmarkchanges.append((name, newnode))
1008 1008 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1009 1009 bmarkchanges.append((b, None))
1010 1010
1011 1011 if bmarkchanges:
1012 1012 bmarks.applychanges(repo, tr, bmarkchanges)
1013 1013
1014 1014 for phase, nodes in toretract.items():
1015 1015 phases.retractboundary(repo, tr, phase, nodes)
1016 1016 for phase, nodes in toadvance.items():
1017 1017 phases.advanceboundary(repo, tr, phase, nodes)
1018 1018
1019 1019 # Obsolete or strip nodes
1020 1020 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1021 1021 # If a node is already obsoleted, and we want to obsolete it
1022 1022 # without a successor, skip that obssolete request since it's
1023 1023 # unnecessary. That's the "if s or not isobs(n)" check below.
1024 1024 # Also sort the node in topology order, that might be useful for
1025 1025 # some obsstore logic.
1026 1026 # NOTE: the sorting might belong to createmarkers.
1027 1027 torev = unfi.changelog.rev
1028 1028 sortfunc = lambda ns: torev(ns[0][0])
1029 1029 rels = []
1030 1030 for ns, s in sorted(replacements.items(), key=sortfunc):
1031 1031 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1032 1032 rels.append(rel)
1033 1033 if rels:
1034 1034 obsolete.createmarkers(repo, rels, operation=operation,
1035 1035 metadata=metadata)
1036 1036 else:
1037 1037 from . import repair # avoid import cycle
1038 1038 tostrip = list(n for ns in replacements for n in ns)
1039 1039 if tostrip:
1040 1040 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1041 1041 backup=backup)
1042 1042
1043 1043 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1044 1044 if opts is None:
1045 1045 opts = {}
1046 1046 m = matcher
1047 1047 dry_run = opts.get('dry_run')
1048 1048 try:
1049 1049 similarity = float(opts.get('similarity') or 0)
1050 1050 except ValueError:
1051 1051 raise error.Abort(_('similarity must be a number'))
1052 1052 if similarity < 0 or similarity > 100:
1053 1053 raise error.Abort(_('similarity must be between 0 and 100'))
1054 1054 similarity /= 100.0
1055 1055
1056 1056 ret = 0
1057 1057
1058 1058 wctx = repo[None]
1059 1059 for subpath in sorted(wctx.substate):
1060 1060 submatch = matchmod.subdirmatcher(subpath, m)
1061 1061 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1062 1062 sub = wctx.sub(subpath)
1063 1063 subprefix = repo.wvfs.reljoin(prefix, subpath)
1064 1064 subuipathfn = subdiruipathfn(subpath, uipathfn)
1065 1065 try:
1066 1066 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1067 1067 ret = 1
1068 1068 except error.LookupError:
1069 1069 repo.ui.status(_("skipping missing subrepository: %s\n")
1070 1070 % uipathfn(subpath))
1071 1071
1072 1072 rejected = []
1073 1073 def badfn(f, msg):
1074 1074 if f in m.files():
1075 1075 m.bad(f, msg)
1076 1076 rejected.append(f)
1077 1077
1078 1078 badmatch = matchmod.badmatch(m, badfn)
1079 1079 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1080 1080 badmatch)
1081 1081
1082 1082 unknownset = set(unknown + forgotten)
1083 1083 toprint = unknownset.copy()
1084 1084 toprint.update(deleted)
1085 1085 for abs in sorted(toprint):
1086 1086 if repo.ui.verbose or not m.exact(abs):
1087 1087 if abs in unknownset:
1088 1088 status = _('adding %s\n') % uipathfn(abs)
1089 1089 label = 'ui.addremove.added'
1090 1090 else:
1091 1091 status = _('removing %s\n') % uipathfn(abs)
1092 1092 label = 'ui.addremove.removed'
1093 1093 repo.ui.status(status, label=label)
1094 1094
1095 1095 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1096 1096 similarity, uipathfn)
1097 1097
1098 1098 if not dry_run:
1099 1099 _markchanges(repo, unknown + forgotten, deleted, renames)
1100 1100
1101 1101 for f in rejected:
1102 1102 if f in m.files():
1103 1103 return 1
1104 1104 return ret
1105 1105
1106 1106 def marktouched(repo, files, similarity=0.0):
1107 1107 '''Assert that files have somehow been operated upon. files are relative to
1108 1108 the repo root.'''
1109 1109 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1110 1110 rejected = []
1111 1111
1112 1112 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1113 1113
1114 1114 if repo.ui.verbose:
1115 1115 unknownset = set(unknown + forgotten)
1116 1116 toprint = unknownset.copy()
1117 1117 toprint.update(deleted)
1118 1118 for abs in sorted(toprint):
1119 1119 if abs in unknownset:
1120 1120 status = _('adding %s\n') % abs
1121 1121 else:
1122 1122 status = _('removing %s\n') % abs
1123 1123 repo.ui.status(status)
1124 1124
1125 1125 # TODO: We should probably have the caller pass in uipathfn and apply it to
1126 # the messages above too. forcerelativevalue=True is consistent with how
1126 # the messages above too. legacyrelativevalue=True is consistent with how
1127 1127 # it used to work.
1128 1128 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1129 1129 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1130 1130 similarity, uipathfn)
1131 1131
1132 1132 _markchanges(repo, unknown + forgotten, deleted, renames)
1133 1133
1134 1134 for f in rejected:
1135 1135 if f in m.files():
1136 1136 return 1
1137 1137 return 0
1138 1138
1139 1139 def _interestingfiles(repo, matcher):
1140 1140 '''Walk dirstate with matcher, looking for files that addremove would care
1141 1141 about.
1142 1142
1143 1143 This is different from dirstate.status because it doesn't care about
1144 1144 whether files are modified or clean.'''
1145 1145 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1146 1146 audit_path = pathutil.pathauditor(repo.root, cached=True)
1147 1147
1148 1148 ctx = repo[None]
1149 1149 dirstate = repo.dirstate
1150 1150 matcher = repo.narrowmatch(matcher, includeexact=True)
1151 1151 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1152 1152 unknown=True, ignored=False, full=False)
1153 1153 for abs, st in walkresults.iteritems():
1154 1154 dstate = dirstate[abs]
1155 1155 if dstate == '?' and audit_path.check(abs):
1156 1156 unknown.append(abs)
1157 1157 elif dstate != 'r' and not st:
1158 1158 deleted.append(abs)
1159 1159 elif dstate == 'r' and st:
1160 1160 forgotten.append(abs)
1161 1161 # for finding renames
1162 1162 elif dstate == 'r' and not st:
1163 1163 removed.append(abs)
1164 1164 elif dstate == 'a':
1165 1165 added.append(abs)
1166 1166
1167 1167 return added, unknown, deleted, removed, forgotten
1168 1168
1169 1169 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1170 1170 '''Find renames from removed files to added ones.'''
1171 1171 renames = {}
1172 1172 if similarity > 0:
1173 1173 for old, new, score in similar.findrenames(repo, added, removed,
1174 1174 similarity):
1175 1175 if (repo.ui.verbose or not matcher.exact(old)
1176 1176 or not matcher.exact(new)):
1177 1177 repo.ui.status(_('recording removal of %s as rename to %s '
1178 1178 '(%d%% similar)\n') %
1179 1179 (uipathfn(old), uipathfn(new),
1180 1180 score * 100))
1181 1181 renames[new] = old
1182 1182 return renames
1183 1183
1184 1184 def _markchanges(repo, unknown, deleted, renames):
1185 1185 '''Marks the files in unknown as added, the files in deleted as removed,
1186 1186 and the files in renames as copied.'''
1187 1187 wctx = repo[None]
1188 1188 with repo.wlock():
1189 1189 wctx.forget(deleted)
1190 1190 wctx.add(unknown)
1191 1191 for new, old in renames.iteritems():
1192 1192 wctx.copy(old, new)
1193 1193
1194 1194 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1195 1195 """Update the dirstate to reflect the intent of copying src to dst. For
1196 1196 different reasons it might not end with dst being marked as copied from src.
1197 1197 """
1198 1198 origsrc = repo.dirstate.copied(src) or src
1199 1199 if dst == origsrc: # copying back a copy?
1200 1200 if repo.dirstate[dst] not in 'mn' and not dryrun:
1201 1201 repo.dirstate.normallookup(dst)
1202 1202 else:
1203 1203 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1204 1204 if not ui.quiet:
1205 1205 ui.warn(_("%s has not been committed yet, so no copy "
1206 1206 "data will be stored for %s.\n")
1207 1207 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1208 1208 if repo.dirstate[dst] in '?r' and not dryrun:
1209 1209 wctx.add([dst])
1210 1210 elif not dryrun:
1211 1211 wctx.copy(origsrc, dst)
1212 1212
1213 1213 def writerequires(opener, requirements):
1214 1214 with opener('requires', 'w', atomictemp=True) as fp:
1215 1215 for r in sorted(requirements):
1216 1216 fp.write("%s\n" % r)
1217 1217
1218 1218 class filecachesubentry(object):
1219 1219 def __init__(self, path, stat):
1220 1220 self.path = path
1221 1221 self.cachestat = None
1222 1222 self._cacheable = None
1223 1223
1224 1224 if stat:
1225 1225 self.cachestat = filecachesubentry.stat(self.path)
1226 1226
1227 1227 if self.cachestat:
1228 1228 self._cacheable = self.cachestat.cacheable()
1229 1229 else:
1230 1230 # None means we don't know yet
1231 1231 self._cacheable = None
1232 1232
1233 1233 def refresh(self):
1234 1234 if self.cacheable():
1235 1235 self.cachestat = filecachesubentry.stat(self.path)
1236 1236
1237 1237 def cacheable(self):
1238 1238 if self._cacheable is not None:
1239 1239 return self._cacheable
1240 1240
1241 1241 # we don't know yet, assume it is for now
1242 1242 return True
1243 1243
1244 1244 def changed(self):
1245 1245 # no point in going further if we can't cache it
1246 1246 if not self.cacheable():
1247 1247 return True
1248 1248
1249 1249 newstat = filecachesubentry.stat(self.path)
1250 1250
1251 1251 # we may not know if it's cacheable yet, check again now
1252 1252 if newstat and self._cacheable is None:
1253 1253 self._cacheable = newstat.cacheable()
1254 1254
1255 1255 # check again
1256 1256 if not self._cacheable:
1257 1257 return True
1258 1258
1259 1259 if self.cachestat != newstat:
1260 1260 self.cachestat = newstat
1261 1261 return True
1262 1262 else:
1263 1263 return False
1264 1264
1265 1265 @staticmethod
1266 1266 def stat(path):
1267 1267 try:
1268 1268 return util.cachestat(path)
1269 1269 except OSError as e:
1270 1270 if e.errno != errno.ENOENT:
1271 1271 raise
1272 1272
1273 1273 class filecacheentry(object):
1274 1274 def __init__(self, paths, stat=True):
1275 1275 self._entries = []
1276 1276 for path in paths:
1277 1277 self._entries.append(filecachesubentry(path, stat))
1278 1278
1279 1279 def changed(self):
1280 1280 '''true if any entry has changed'''
1281 1281 for entry in self._entries:
1282 1282 if entry.changed():
1283 1283 return True
1284 1284 return False
1285 1285
1286 1286 def refresh(self):
1287 1287 for entry in self._entries:
1288 1288 entry.refresh()
1289 1289
1290 1290 class filecache(object):
1291 1291 """A property like decorator that tracks files under .hg/ for updates.
1292 1292
1293 1293 On first access, the files defined as arguments are stat()ed and the
1294 1294 results cached. The decorated function is called. The results are stashed
1295 1295 away in a ``_filecache`` dict on the object whose method is decorated.
1296 1296
1297 1297 On subsequent access, the cached result is used as it is set to the
1298 1298 instance dictionary.
1299 1299
1300 1300 On external property set/delete operations, the caller must update the
1301 1301 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1302 1302 instead of directly setting <attr>.
1303 1303
1304 1304 When using the property API, the cached data is always used if available.
1305 1305 No stat() is performed to check if the file has changed.
1306 1306
1307 1307 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1308 1308 can populate an entry before the property's getter is called. In this case,
1309 1309 entries in ``_filecache`` will be used during property operations,
1310 1310 if available. If the underlying file changes, it is up to external callers
1311 1311 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1312 1312 method result as well as possibly calling ``del obj._filecache[attr]`` to
1313 1313 remove the ``filecacheentry``.
1314 1314 """
1315 1315
1316 1316 def __init__(self, *paths):
1317 1317 self.paths = paths
1318 1318
1319 1319 def join(self, obj, fname):
1320 1320 """Used to compute the runtime path of a cached file.
1321 1321
1322 1322 Users should subclass filecache and provide their own version of this
1323 1323 function to call the appropriate join function on 'obj' (an instance
1324 1324 of the class that its member function was decorated).
1325 1325 """
1326 1326 raise NotImplementedError
1327 1327
1328 1328 def __call__(self, func):
1329 1329 self.func = func
1330 1330 self.sname = func.__name__
1331 1331 self.name = pycompat.sysbytes(self.sname)
1332 1332 return self
1333 1333
1334 1334 def __get__(self, obj, type=None):
1335 1335 # if accessed on the class, return the descriptor itself.
1336 1336 if obj is None:
1337 1337 return self
1338 1338
1339 1339 assert self.sname not in obj.__dict__
1340 1340
1341 1341 entry = obj._filecache.get(self.name)
1342 1342
1343 1343 if entry:
1344 1344 if entry.changed():
1345 1345 entry.obj = self.func(obj)
1346 1346 else:
1347 1347 paths = [self.join(obj, path) for path in self.paths]
1348 1348
1349 1349 # We stat -before- creating the object so our cache doesn't lie if
1350 1350 # a writer modified between the time we read and stat
1351 1351 entry = filecacheentry(paths, True)
1352 1352 entry.obj = self.func(obj)
1353 1353
1354 1354 obj._filecache[self.name] = entry
1355 1355
1356 1356 obj.__dict__[self.sname] = entry.obj
1357 1357 return entry.obj
1358 1358
1359 1359 # don't implement __set__(), which would make __dict__ lookup as slow as
1360 1360 # function call.
1361 1361
1362 1362 def set(self, obj, value):
1363 1363 if self.name not in obj._filecache:
1364 1364 # we add an entry for the missing value because X in __dict__
1365 1365 # implies X in _filecache
1366 1366 paths = [self.join(obj, path) for path in self.paths]
1367 1367 ce = filecacheentry(paths, False)
1368 1368 obj._filecache[self.name] = ce
1369 1369 else:
1370 1370 ce = obj._filecache[self.name]
1371 1371
1372 1372 ce.obj = value # update cached copy
1373 1373 obj.__dict__[self.sname] = value # update copy returned by obj.x
1374 1374
1375 1375 def extdatasource(repo, source):
1376 1376 """Gather a map of rev -> value dict from the specified source
1377 1377
1378 1378 A source spec is treated as a URL, with a special case shell: type
1379 1379 for parsing the output from a shell command.
1380 1380
1381 1381 The data is parsed as a series of newline-separated records where
1382 1382 each record is a revision specifier optionally followed by a space
1383 1383 and a freeform string value. If the revision is known locally, it
1384 1384 is converted to a rev, otherwise the record is skipped.
1385 1385
1386 1386 Note that both key and value are treated as UTF-8 and converted to
1387 1387 the local encoding. This allows uniformity between local and
1388 1388 remote data sources.
1389 1389 """
1390 1390
1391 1391 spec = repo.ui.config("extdata", source)
1392 1392 if not spec:
1393 1393 raise error.Abort(_("unknown extdata source '%s'") % source)
1394 1394
1395 1395 data = {}
1396 1396 src = proc = None
1397 1397 try:
1398 1398 if spec.startswith("shell:"):
1399 1399 # external commands should be run relative to the repo root
1400 1400 cmd = spec[6:]
1401 1401 proc = subprocess.Popen(procutil.tonativestr(cmd),
1402 1402 shell=True, bufsize=-1,
1403 1403 close_fds=procutil.closefds,
1404 1404 stdout=subprocess.PIPE,
1405 1405 cwd=procutil.tonativestr(repo.root))
1406 1406 src = proc.stdout
1407 1407 else:
1408 1408 # treat as a URL or file
1409 1409 src = url.open(repo.ui, spec)
1410 1410 for l in src:
1411 1411 if " " in l:
1412 1412 k, v = l.strip().split(" ", 1)
1413 1413 else:
1414 1414 k, v = l.strip(), ""
1415 1415
1416 1416 k = encoding.tolocal(k)
1417 1417 try:
1418 1418 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1419 1419 except (error.LookupError, error.RepoLookupError):
1420 1420 pass # we ignore data for nodes that don't exist locally
1421 1421 finally:
1422 1422 if proc:
1423 1423 proc.communicate()
1424 1424 if src:
1425 1425 src.close()
1426 1426 if proc and proc.returncode != 0:
1427 1427 raise error.Abort(_("extdata command '%s' failed: %s")
1428 1428 % (cmd, procutil.explainexit(proc.returncode)))
1429 1429
1430 1430 return data
1431 1431
1432 1432 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1433 1433 if lock is None:
1434 1434 raise error.LockInheritanceContractViolation(
1435 1435 'lock can only be inherited while held')
1436 1436 if environ is None:
1437 1437 environ = {}
1438 1438 with lock.inherit() as locker:
1439 1439 environ[envvar] = locker
1440 1440 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1441 1441
1442 1442 def wlocksub(repo, cmd, *args, **kwargs):
1443 1443 """run cmd as a subprocess that allows inheriting repo's wlock
1444 1444
1445 1445 This can only be called while the wlock is held. This takes all the
1446 1446 arguments that ui.system does, and returns the exit code of the
1447 1447 subprocess."""
1448 1448 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1449 1449 **kwargs)
1450 1450
1451 1451 class progress(object):
1452 1452 def __init__(self, ui, updatebar, topic, unit="", total=None):
1453 1453 self.ui = ui
1454 1454 self.pos = 0
1455 1455 self.topic = topic
1456 1456 self.unit = unit
1457 1457 self.total = total
1458 1458 self.debug = ui.configbool('progress', 'debug')
1459 1459 self._updatebar = updatebar
1460 1460
1461 1461 def __enter__(self):
1462 1462 return self
1463 1463
1464 1464 def __exit__(self, exc_type, exc_value, exc_tb):
1465 1465 self.complete()
1466 1466
1467 1467 def update(self, pos, item="", total=None):
1468 1468 assert pos is not None
1469 1469 if total:
1470 1470 self.total = total
1471 1471 self.pos = pos
1472 1472 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1473 1473 if self.debug:
1474 1474 self._printdebug(item)
1475 1475
1476 1476 def increment(self, step=1, item="", total=None):
1477 1477 self.update(self.pos + step, item, total)
1478 1478
1479 1479 def complete(self):
1480 1480 self.pos = None
1481 1481 self.unit = ""
1482 1482 self.total = None
1483 1483 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1484 1484
1485 1485 def _printdebug(self, item):
1486 1486 if self.unit:
1487 1487 unit = ' ' + self.unit
1488 1488 if item:
1489 1489 item = ' ' + item
1490 1490
1491 1491 if self.total:
1492 1492 pct = 100.0 * self.pos / self.total
1493 1493 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1494 1494 % (self.topic, item, self.pos, self.total, unit, pct))
1495 1495 else:
1496 1496 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1497 1497
1498 1498 def gdinitconfig(ui):
1499 1499 """helper function to know if a repo should be created as general delta
1500 1500 """
1501 1501 # experimental config: format.generaldelta
1502 1502 return (ui.configbool('format', 'generaldelta')
1503 1503 or ui.configbool('format', 'usegeneraldelta'))
1504 1504
1505 1505 def gddeltaconfig(ui):
1506 1506 """helper function to know if incoming delta should be optimised
1507 1507 """
1508 1508 # experimental config: format.generaldelta
1509 1509 return ui.configbool('format', 'generaldelta')
1510 1510
1511 1511 class simplekeyvaluefile(object):
1512 1512 """A simple file with key=value lines
1513 1513
1514 1514 Keys must be alphanumerics and start with a letter, values must not
1515 1515 contain '\n' characters"""
1516 1516 firstlinekey = '__firstline'
1517 1517
1518 1518 def __init__(self, vfs, path, keys=None):
1519 1519 self.vfs = vfs
1520 1520 self.path = path
1521 1521
1522 1522 def read(self, firstlinenonkeyval=False):
1523 1523 """Read the contents of a simple key-value file
1524 1524
1525 1525 'firstlinenonkeyval' indicates whether the first line of file should
1526 1526 be treated as a key-value pair or reuturned fully under the
1527 1527 __firstline key."""
1528 1528 lines = self.vfs.readlines(self.path)
1529 1529 d = {}
1530 1530 if firstlinenonkeyval:
1531 1531 if not lines:
1532 1532 e = _("empty simplekeyvalue file")
1533 1533 raise error.CorruptedState(e)
1534 1534 # we don't want to include '\n' in the __firstline
1535 1535 d[self.firstlinekey] = lines[0][:-1]
1536 1536 del lines[0]
1537 1537
1538 1538 try:
1539 1539 # the 'if line.strip()' part prevents us from failing on empty
1540 1540 # lines which only contain '\n' therefore are not skipped
1541 1541 # by 'if line'
1542 1542 updatedict = dict(line[:-1].split('=', 1) for line in lines
1543 1543 if line.strip())
1544 1544 if self.firstlinekey in updatedict:
1545 1545 e = _("%r can't be used as a key")
1546 1546 raise error.CorruptedState(e % self.firstlinekey)
1547 1547 d.update(updatedict)
1548 1548 except ValueError as e:
1549 1549 raise error.CorruptedState(str(e))
1550 1550 return d
1551 1551
1552 1552 def write(self, data, firstline=None):
1553 1553 """Write key=>value mapping to a file
1554 1554 data is a dict. Keys must be alphanumerical and start with a letter.
1555 1555 Values must not contain newline characters.
1556 1556
1557 1557 If 'firstline' is not None, it is written to file before
1558 1558 everything else, as it is, not in a key=value form"""
1559 1559 lines = []
1560 1560 if firstline is not None:
1561 1561 lines.append('%s\n' % firstline)
1562 1562
1563 1563 for k, v in data.items():
1564 1564 if k == self.firstlinekey:
1565 1565 e = "key name '%s' is reserved" % self.firstlinekey
1566 1566 raise error.ProgrammingError(e)
1567 1567 if not k[0:1].isalpha():
1568 1568 e = "keys must start with a letter in a key-value file"
1569 1569 raise error.ProgrammingError(e)
1570 1570 if not k.isalnum():
1571 1571 e = "invalid key name in a simple key-value file"
1572 1572 raise error.ProgrammingError(e)
1573 1573 if '\n' in v:
1574 1574 e = "invalid value in a simple key-value file"
1575 1575 raise error.ProgrammingError(e)
1576 1576 lines.append("%s=%s\n" % (k, v))
1577 1577 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1578 1578 fp.write(''.join(lines))
1579 1579
1580 1580 _reportobsoletedsource = [
1581 1581 'debugobsolete',
1582 1582 'pull',
1583 1583 'push',
1584 1584 'serve',
1585 1585 'unbundle',
1586 1586 ]
1587 1587
1588 1588 _reportnewcssource = [
1589 1589 'pull',
1590 1590 'unbundle',
1591 1591 ]
1592 1592
1593 1593 def prefetchfiles(repo, revs, match):
1594 1594 """Invokes the registered file prefetch functions, allowing extensions to
1595 1595 ensure the corresponding files are available locally, before the command
1596 1596 uses them."""
1597 1597 if match:
1598 1598 # The command itself will complain about files that don't exist, so
1599 1599 # don't duplicate the message.
1600 1600 match = matchmod.badmatch(match, lambda fn, msg: None)
1601 1601 else:
1602 1602 match = matchall(repo)
1603 1603
1604 1604 fileprefetchhooks(repo, revs, match)
1605 1605
1606 1606 # a list of (repo, revs, match) prefetch functions
1607 1607 fileprefetchhooks = util.hooks()
1608 1608
1609 1609 # A marker that tells the evolve extension to suppress its own reporting
1610 1610 _reportstroubledchangesets = True
1611 1611
1612 1612 def registersummarycallback(repo, otr, txnname=''):
1613 1613 """register a callback to issue a summary after the transaction is closed
1614 1614 """
1615 1615 def txmatch(sources):
1616 1616 return any(txnname.startswith(source) for source in sources)
1617 1617
1618 1618 categories = []
1619 1619
1620 1620 def reportsummary(func):
1621 1621 """decorator for report callbacks."""
1622 1622 # The repoview life cycle is shorter than the one of the actual
1623 1623 # underlying repository. So the filtered object can die before the
1624 1624 # weakref is used leading to troubles. We keep a reference to the
1625 1625 # unfiltered object and restore the filtering when retrieving the
1626 1626 # repository through the weakref.
1627 1627 filtername = repo.filtername
1628 1628 reporef = weakref.ref(repo.unfiltered())
1629 1629 def wrapped(tr):
1630 1630 repo = reporef()
1631 1631 if filtername:
1632 1632 repo = repo.filtered(filtername)
1633 1633 func(repo, tr)
1634 1634 newcat = '%02i-txnreport' % len(categories)
1635 1635 otr.addpostclose(newcat, wrapped)
1636 1636 categories.append(newcat)
1637 1637 return wrapped
1638 1638
1639 1639 if txmatch(_reportobsoletedsource):
1640 1640 @reportsummary
1641 1641 def reportobsoleted(repo, tr):
1642 1642 obsoleted = obsutil.getobsoleted(repo, tr)
1643 1643 if obsoleted:
1644 1644 repo.ui.status(_('obsoleted %i changesets\n')
1645 1645 % len(obsoleted))
1646 1646
1647 1647 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1648 1648 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1649 1649 instabilitytypes = [
1650 1650 ('orphan', 'orphan'),
1651 1651 ('phase-divergent', 'phasedivergent'),
1652 1652 ('content-divergent', 'contentdivergent'),
1653 1653 ]
1654 1654
1655 1655 def getinstabilitycounts(repo):
1656 1656 filtered = repo.changelog.filteredrevs
1657 1657 counts = {}
1658 1658 for instability, revset in instabilitytypes:
1659 1659 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1660 1660 filtered)
1661 1661 return counts
1662 1662
1663 1663 oldinstabilitycounts = getinstabilitycounts(repo)
1664 1664 @reportsummary
1665 1665 def reportnewinstabilities(repo, tr):
1666 1666 newinstabilitycounts = getinstabilitycounts(repo)
1667 1667 for instability, revset in instabilitytypes:
1668 1668 delta = (newinstabilitycounts[instability] -
1669 1669 oldinstabilitycounts[instability])
1670 1670 msg = getinstabilitymessage(delta, instability)
1671 1671 if msg:
1672 1672 repo.ui.warn(msg)
1673 1673
1674 1674 if txmatch(_reportnewcssource):
1675 1675 @reportsummary
1676 1676 def reportnewcs(repo, tr):
1677 1677 """Report the range of new revisions pulled/unbundled."""
1678 1678 origrepolen = tr.changes.get('origrepolen', len(repo))
1679 1679 unfi = repo.unfiltered()
1680 1680 if origrepolen >= len(unfi):
1681 1681 return
1682 1682
1683 1683 # Compute the bounds of new visible revisions' range.
1684 1684 revs = smartset.spanset(repo, start=origrepolen)
1685 1685 if revs:
1686 1686 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1687 1687
1688 1688 if minrev == maxrev:
1689 1689 revrange = minrev
1690 1690 else:
1691 1691 revrange = '%s:%s' % (minrev, maxrev)
1692 1692 draft = len(repo.revs('%ld and draft()', revs))
1693 1693 secret = len(repo.revs('%ld and secret()', revs))
1694 1694 if not (draft or secret):
1695 1695 msg = _('new changesets %s\n') % revrange
1696 1696 elif draft and secret:
1697 1697 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1698 1698 msg %= (revrange, draft, secret)
1699 1699 elif draft:
1700 1700 msg = _('new changesets %s (%d drafts)\n')
1701 1701 msg %= (revrange, draft)
1702 1702 elif secret:
1703 1703 msg = _('new changesets %s (%d secrets)\n')
1704 1704 msg %= (revrange, secret)
1705 1705 else:
1706 1706 errormsg = 'entered unreachable condition'
1707 1707 raise error.ProgrammingError(errormsg)
1708 1708 repo.ui.status(msg)
1709 1709
1710 1710 # search new changesets directly pulled as obsolete
1711 1711 duplicates = tr.changes.get('revduplicates', ())
1712 1712 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1713 1713 origrepolen, duplicates)
1714 1714 cl = repo.changelog
1715 1715 extinctadded = [r for r in obsadded if r not in cl]
1716 1716 if extinctadded:
1717 1717 # They are not just obsolete, but obsolete and invisible
1718 1718 # we call them "extinct" internally but the terms have not been
1719 1719 # exposed to users.
1720 1720 msg = '(%d other changesets obsolete on arrival)\n'
1721 1721 repo.ui.status(msg % len(extinctadded))
1722 1722
1723 1723 @reportsummary
1724 1724 def reportphasechanges(repo, tr):
1725 1725 """Report statistics of phase changes for changesets pre-existing
1726 1726 pull/unbundle.
1727 1727 """
1728 1728 origrepolen = tr.changes.get('origrepolen', len(repo))
1729 1729 phasetracking = tr.changes.get('phases', {})
1730 1730 if not phasetracking:
1731 1731 return
1732 1732 published = [
1733 1733 rev for rev, (old, new) in phasetracking.iteritems()
1734 1734 if new == phases.public and rev < origrepolen
1735 1735 ]
1736 1736 if not published:
1737 1737 return
1738 1738 repo.ui.status(_('%d local changesets published\n')
1739 1739 % len(published))
1740 1740
1741 1741 def getinstabilitymessage(delta, instability):
1742 1742 """function to return the message to show warning about new instabilities
1743 1743
1744 1744 exists as a separate function so that extension can wrap to show more
1745 1745 information like how to fix instabilities"""
1746 1746 if delta > 0:
1747 1747 return _('%i new %s changesets\n') % (delta, instability)
1748 1748
1749 1749 def nodesummaries(repo, nodes, maxnumnodes=4):
1750 1750 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1751 1751 return ' '.join(short(h) for h in nodes)
1752 1752 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1753 1753 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1754 1754
1755 1755 def enforcesinglehead(repo, tr, desc):
1756 1756 """check that no named branch has multiple heads"""
1757 1757 if desc in ('strip', 'repair'):
1758 1758 # skip the logic during strip
1759 1759 return
1760 1760 visible = repo.filtered('visible')
1761 1761 # possible improvement: we could restrict the check to affected branch
1762 1762 for name, heads in visible.branchmap().iteritems():
1763 1763 if len(heads) > 1:
1764 1764 msg = _('rejecting multiple heads on branch "%s"')
1765 1765 msg %= name
1766 1766 hint = _('%d heads: %s')
1767 1767 hint %= (len(heads), nodesummaries(repo, heads))
1768 1768 raise error.Abort(msg, hint=hint)
1769 1769
1770 1770 def wrapconvertsink(sink):
1771 1771 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1772 1772 before it is used, whether or not the convert extension was formally loaded.
1773 1773 """
1774 1774 return sink
1775 1775
1776 1776 def unhidehashlikerevs(repo, specs, hiddentype):
1777 1777 """parse the user specs and unhide changesets whose hash or revision number
1778 1778 is passed.
1779 1779
1780 1780 hiddentype can be: 1) 'warn': warn while unhiding changesets
1781 1781 2) 'nowarn': don't warn while unhiding changesets
1782 1782
1783 1783 returns a repo object with the required changesets unhidden
1784 1784 """
1785 1785 if not repo.filtername or not repo.ui.configbool('experimental',
1786 1786 'directaccess'):
1787 1787 return repo
1788 1788
1789 1789 if repo.filtername not in ('visible', 'visible-hidden'):
1790 1790 return repo
1791 1791
1792 1792 symbols = set()
1793 1793 for spec in specs:
1794 1794 try:
1795 1795 tree = revsetlang.parse(spec)
1796 1796 except error.ParseError: # will be reported by scmutil.revrange()
1797 1797 continue
1798 1798
1799 1799 symbols.update(revsetlang.gethashlikesymbols(tree))
1800 1800
1801 1801 if not symbols:
1802 1802 return repo
1803 1803
1804 1804 revs = _getrevsfromsymbols(repo, symbols)
1805 1805
1806 1806 if not revs:
1807 1807 return repo
1808 1808
1809 1809 if hiddentype == 'warn':
1810 1810 unfi = repo.unfiltered()
1811 1811 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1812 1812 repo.ui.warn(_("warning: accessing hidden changesets for write "
1813 1813 "operation: %s\n") % revstr)
1814 1814
1815 1815 # we have to use new filtername to separate branch/tags cache until we can
1816 1816 # disbale these cache when revisions are dynamically pinned.
1817 1817 return repo.filtered('visible-hidden', revs)
1818 1818
1819 1819 def _getrevsfromsymbols(repo, symbols):
1820 1820 """parse the list of symbols and returns a set of revision numbers of hidden
1821 1821 changesets present in symbols"""
1822 1822 revs = set()
1823 1823 unfi = repo.unfiltered()
1824 1824 unficl = unfi.changelog
1825 1825 cl = repo.changelog
1826 1826 tiprev = len(unficl)
1827 1827 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1828 1828 for s in symbols:
1829 1829 try:
1830 1830 n = int(s)
1831 1831 if n <= tiprev:
1832 1832 if not allowrevnums:
1833 1833 continue
1834 1834 else:
1835 1835 if n not in cl:
1836 1836 revs.add(n)
1837 1837 continue
1838 1838 except ValueError:
1839 1839 pass
1840 1840
1841 1841 try:
1842 1842 s = resolvehexnodeidprefix(unfi, s)
1843 1843 except (error.LookupError, error.WdirUnsupported):
1844 1844 s = None
1845 1845
1846 1846 if s is not None:
1847 1847 rev = unficl.rev(s)
1848 1848 if rev not in cl:
1849 1849 revs.add(rev)
1850 1850
1851 1851 return revs
1852 1852
1853 1853 def bookmarkrevs(repo, mark):
1854 1854 """
1855 1855 Select revisions reachable by a given bookmark
1856 1856 """
1857 1857 return repo.revs("ancestors(bookmark(%s)) - "
1858 1858 "ancestors(head() and not bookmark(%s)) - "
1859 1859 "ancestors(bookmark() and not bookmark(%s))",
1860 1860 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now