##// END OF EJS Templates
scmutil: move construction of instability count message to separate fn...
Pulkit Goyal -
r38474:1cac2e8c default
parent child Browse files
Show More
@@ -1,1680 +1,1688 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 bin,
22 22 hex,
23 23 nullid,
24 24 short,
25 25 wdirid,
26 26 wdirrev,
27 27 )
28 28
29 29 from . import (
30 30 encoding,
31 31 error,
32 32 match as matchmod,
33 33 obsolete,
34 34 obsutil,
35 35 pathutil,
36 36 phases,
37 37 pycompat,
38 38 revsetlang,
39 39 similar,
40 40 url,
41 41 util,
42 42 vfs,
43 43 )
44 44
45 45 from .utils import (
46 46 procutil,
47 47 stringutil,
48 48 )
49 49
50 50 if pycompat.iswindows:
51 51 from . import scmwindows as scmplatform
52 52 else:
53 53 from . import scmposix as scmplatform
54 54
55 55 termsize = scmplatform.termsize
56 56
57 57 class status(tuple):
58 58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
59 59 and 'ignored' properties are only relevant to the working copy.
60 60 '''
61 61
62 62 __slots__ = ()
63 63
64 64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
65 65 clean):
66 66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
67 67 ignored, clean))
68 68
69 69 @property
70 70 def modified(self):
71 71 '''files that have been modified'''
72 72 return self[0]
73 73
74 74 @property
75 75 def added(self):
76 76 '''files that have been added'''
77 77 return self[1]
78 78
79 79 @property
80 80 def removed(self):
81 81 '''files that have been removed'''
82 82 return self[2]
83 83
84 84 @property
85 85 def deleted(self):
86 86 '''files that are in the dirstate, but have been deleted from the
87 87 working copy (aka "missing")
88 88 '''
89 89 return self[3]
90 90
91 91 @property
92 92 def unknown(self):
93 93 '''files not in the dirstate that are not ignored'''
94 94 return self[4]
95 95
96 96 @property
97 97 def ignored(self):
98 98 '''files not in the dirstate that are ignored (by _dirignore())'''
99 99 return self[5]
100 100
101 101 @property
102 102 def clean(self):
103 103 '''files that have not been modified'''
104 104 return self[6]
105 105
106 106 def __repr__(self, *args, **kwargs):
107 107 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
108 108 r'unknown=%s, ignored=%s, clean=%s>') %
109 109 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
110 110
111 111 def itersubrepos(ctx1, ctx2):
112 112 """find subrepos in ctx1 or ctx2"""
113 113 # Create a (subpath, ctx) mapping where we prefer subpaths from
114 114 # ctx1. The subpaths from ctx2 are important when the .hgsub file
115 115 # has been modified (in ctx2) but not yet committed (in ctx1).
116 116 subpaths = dict.fromkeys(ctx2.substate, ctx2)
117 117 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
118 118
119 119 missing = set()
120 120
121 121 for subpath in ctx2.substate:
122 122 if subpath not in ctx1.substate:
123 123 del subpaths[subpath]
124 124 missing.add(subpath)
125 125
126 126 for subpath, ctx in sorted(subpaths.iteritems()):
127 127 yield subpath, ctx.sub(subpath)
128 128
129 129 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
130 130 # status and diff will have an accurate result when it does
131 131 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
132 132 # against itself.
133 133 for subpath in missing:
134 134 yield subpath, ctx2.nullsub(subpath, ctx1)
135 135
136 136 def nochangesfound(ui, repo, excluded=None):
137 137 '''Report no changes for push/pull, excluded is None or a list of
138 138 nodes excluded from the push/pull.
139 139 '''
140 140 secretlist = []
141 141 if excluded:
142 142 for n in excluded:
143 143 ctx = repo[n]
144 144 if ctx.phase() >= phases.secret and not ctx.extinct():
145 145 secretlist.append(n)
146 146
147 147 if secretlist:
148 148 ui.status(_("no changes found (ignored %d secret changesets)\n")
149 149 % len(secretlist))
150 150 else:
151 151 ui.status(_("no changes found\n"))
152 152
153 153 def callcatch(ui, func):
154 154 """call func() with global exception handling
155 155
156 156 return func() if no exception happens. otherwise do some error handling
157 157 and return an exit code accordingly. does not handle all exceptions.
158 158 """
159 159 try:
160 160 try:
161 161 return func()
162 162 except: # re-raises
163 163 ui.traceback()
164 164 raise
165 165 # Global exception handling, alphabetically
166 166 # Mercurial-specific first, followed by built-in and library exceptions
167 167 except error.LockHeld as inst:
168 168 if inst.errno == errno.ETIMEDOUT:
169 169 reason = _('timed out waiting for lock held by %r') % inst.locker
170 170 else:
171 171 reason = _('lock held by %r') % inst.locker
172 172 ui.warn(_("abort: %s: %s\n")
173 173 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
174 174 if not inst.locker:
175 175 ui.warn(_("(lock might be very busy)\n"))
176 176 except error.LockUnavailable as inst:
177 177 ui.warn(_("abort: could not lock %s: %s\n") %
178 178 (inst.desc or stringutil.forcebytestr(inst.filename),
179 179 encoding.strtolocal(inst.strerror)))
180 180 except error.OutOfBandError as inst:
181 181 if inst.args:
182 182 msg = _("abort: remote error:\n")
183 183 else:
184 184 msg = _("abort: remote error\n")
185 185 ui.warn(msg)
186 186 if inst.args:
187 187 ui.warn(''.join(inst.args))
188 188 if inst.hint:
189 189 ui.warn('(%s)\n' % inst.hint)
190 190 except error.RepoError as inst:
191 191 ui.warn(_("abort: %s!\n") % inst)
192 192 if inst.hint:
193 193 ui.warn(_("(%s)\n") % inst.hint)
194 194 except error.ResponseError as inst:
195 195 ui.warn(_("abort: %s") % inst.args[0])
196 196 msg = inst.args[1]
197 197 if isinstance(msg, type(u'')):
198 198 msg = pycompat.sysbytes(msg)
199 199 if not isinstance(msg, bytes):
200 200 ui.warn(" %r\n" % (msg,))
201 201 elif not msg:
202 202 ui.warn(_(" empty string\n"))
203 203 else:
204 204 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
205 205 except error.CensoredNodeError as inst:
206 206 ui.warn(_("abort: file censored %s!\n") % inst)
207 207 except error.RevlogError as inst:
208 208 ui.warn(_("abort: %s!\n") % inst)
209 209 except error.InterventionRequired as inst:
210 210 ui.warn("%s\n" % inst)
211 211 if inst.hint:
212 212 ui.warn(_("(%s)\n") % inst.hint)
213 213 return 1
214 214 except error.WdirUnsupported:
215 215 ui.warn(_("abort: working directory revision cannot be specified\n"))
216 216 except error.Abort as inst:
217 217 ui.warn(_("abort: %s\n") % inst)
218 218 if inst.hint:
219 219 ui.warn(_("(%s)\n") % inst.hint)
220 220 except ImportError as inst:
221 221 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
222 222 m = stringutil.forcebytestr(inst).split()[-1]
223 223 if m in "mpatch bdiff".split():
224 224 ui.warn(_("(did you forget to compile extensions?)\n"))
225 225 elif m in "zlib".split():
226 226 ui.warn(_("(is your Python install correct?)\n"))
227 227 except IOError as inst:
228 228 if util.safehasattr(inst, "code"):
229 229 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
230 230 elif util.safehasattr(inst, "reason"):
231 231 try: # usually it is in the form (errno, strerror)
232 232 reason = inst.reason.args[1]
233 233 except (AttributeError, IndexError):
234 234 # it might be anything, for example a string
235 235 reason = inst.reason
236 236 if isinstance(reason, pycompat.unicode):
237 237 # SSLError of Python 2.7.9 contains a unicode
238 238 reason = encoding.unitolocal(reason)
239 239 ui.warn(_("abort: error: %s\n") % reason)
240 240 elif (util.safehasattr(inst, "args")
241 241 and inst.args and inst.args[0] == errno.EPIPE):
242 242 pass
243 243 elif getattr(inst, "strerror", None):
244 244 if getattr(inst, "filename", None):
245 245 ui.warn(_("abort: %s: %s\n") % (
246 246 encoding.strtolocal(inst.strerror),
247 247 stringutil.forcebytestr(inst.filename)))
248 248 else:
249 249 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
250 250 else:
251 251 raise
252 252 except OSError as inst:
253 253 if getattr(inst, "filename", None) is not None:
254 254 ui.warn(_("abort: %s: '%s'\n") % (
255 255 encoding.strtolocal(inst.strerror),
256 256 stringutil.forcebytestr(inst.filename)))
257 257 else:
258 258 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
259 259 except MemoryError:
260 260 ui.warn(_("abort: out of memory\n"))
261 261 except SystemExit as inst:
262 262 # Commands shouldn't sys.exit directly, but give a return code.
263 263 # Just in case catch this and and pass exit code to caller.
264 264 return inst.code
265 265 except socket.error as inst:
266 266 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
267 267
268 268 return -1
269 269
270 270 def checknewlabel(repo, lbl, kind):
271 271 # Do not use the "kind" parameter in ui output.
272 272 # It makes strings difficult to translate.
273 273 if lbl in ['tip', '.', 'null']:
274 274 raise error.Abort(_("the name '%s' is reserved") % lbl)
275 275 for c in (':', '\0', '\n', '\r'):
276 276 if c in lbl:
277 277 raise error.Abort(
278 278 _("%r cannot be used in a name") % pycompat.bytestr(c))
279 279 try:
280 280 int(lbl)
281 281 raise error.Abort(_("cannot use an integer as a name"))
282 282 except ValueError:
283 283 pass
284 284 if lbl.strip() != lbl:
285 285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
286 286
287 287 def checkfilename(f):
288 288 '''Check that the filename f is an acceptable filename for a tracked file'''
289 289 if '\r' in f or '\n' in f:
290 290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
291 291 % pycompat.bytestr(f))
292 292
293 293 def checkportable(ui, f):
294 294 '''Check if filename f is portable and warn or abort depending on config'''
295 295 checkfilename(f)
296 296 abort, warn = checkportabilityalert(ui)
297 297 if abort or warn:
298 298 msg = util.checkwinfilename(f)
299 299 if msg:
300 300 msg = "%s: %s" % (msg, procutil.shellquote(f))
301 301 if abort:
302 302 raise error.Abort(msg)
303 303 ui.warn(_("warning: %s\n") % msg)
304 304
305 305 def checkportabilityalert(ui):
306 306 '''check if the user's config requests nothing, a warning, or abort for
307 307 non-portable filenames'''
308 308 val = ui.config('ui', 'portablefilenames')
309 309 lval = val.lower()
310 310 bval = stringutil.parsebool(val)
311 311 abort = pycompat.iswindows or lval == 'abort'
312 312 warn = bval or lval == 'warn'
313 313 if bval is None and not (warn or abort or lval == 'ignore'):
314 314 raise error.ConfigError(
315 315 _("ui.portablefilenames value is invalid ('%s')") % val)
316 316 return abort, warn
317 317
318 318 class casecollisionauditor(object):
319 319 def __init__(self, ui, abort, dirstate):
320 320 self._ui = ui
321 321 self._abort = abort
322 322 allfiles = '\0'.join(dirstate._map)
323 323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
324 324 self._dirstate = dirstate
325 325 # The purpose of _newfiles is so that we don't complain about
326 326 # case collisions if someone were to call this object with the
327 327 # same filename twice.
328 328 self._newfiles = set()
329 329
330 330 def __call__(self, f):
331 331 if f in self._newfiles:
332 332 return
333 333 fl = encoding.lower(f)
334 334 if fl in self._loweredfiles and f not in self._dirstate:
335 335 msg = _('possible case-folding collision for %s') % f
336 336 if self._abort:
337 337 raise error.Abort(msg)
338 338 self._ui.warn(_("warning: %s\n") % msg)
339 339 self._loweredfiles.add(fl)
340 340 self._newfiles.add(f)
341 341
342 342 def filteredhash(repo, maxrev):
343 343 """build hash of filtered revisions in the current repoview.
344 344
345 345 Multiple caches perform up-to-date validation by checking that the
346 346 tiprev and tipnode stored in the cache file match the current repository.
347 347 However, this is not sufficient for validating repoviews because the set
348 348 of revisions in the view may change without the repository tiprev and
349 349 tipnode changing.
350 350
351 351 This function hashes all the revs filtered from the view and returns
352 352 that SHA-1 digest.
353 353 """
354 354 cl = repo.changelog
355 355 if not cl.filteredrevs:
356 356 return None
357 357 key = None
358 358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
359 359 if revs:
360 360 s = hashlib.sha1()
361 361 for rev in revs:
362 362 s.update('%d;' % rev)
363 363 key = s.digest()
364 364 return key
365 365
366 366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
367 367 '''yield every hg repository under path, always recursively.
368 368 The recurse flag will only control recursion into repo working dirs'''
369 369 def errhandler(err):
370 370 if err.filename == path:
371 371 raise err
372 372 samestat = getattr(os.path, 'samestat', None)
373 373 if followsym and samestat is not None:
374 374 def adddir(dirlst, dirname):
375 375 dirstat = os.stat(dirname)
376 376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
377 377 if not match:
378 378 dirlst.append(dirstat)
379 379 return not match
380 380 else:
381 381 followsym = False
382 382
383 383 if (seen_dirs is None) and followsym:
384 384 seen_dirs = []
385 385 adddir(seen_dirs, path)
386 386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
387 387 dirs.sort()
388 388 if '.hg' in dirs:
389 389 yield root # found a repository
390 390 qroot = os.path.join(root, '.hg', 'patches')
391 391 if os.path.isdir(os.path.join(qroot, '.hg')):
392 392 yield qroot # we have a patch queue repo here
393 393 if recurse:
394 394 # avoid recursing inside the .hg directory
395 395 dirs.remove('.hg')
396 396 else:
397 397 dirs[:] = [] # don't descend further
398 398 elif followsym:
399 399 newdirs = []
400 400 for d in dirs:
401 401 fname = os.path.join(root, d)
402 402 if adddir(seen_dirs, fname):
403 403 if os.path.islink(fname):
404 404 for hgname in walkrepos(fname, True, seen_dirs):
405 405 yield hgname
406 406 else:
407 407 newdirs.append(d)
408 408 dirs[:] = newdirs
409 409
410 410 def binnode(ctx):
411 411 """Return binary node id for a given basectx"""
412 412 node = ctx.node()
413 413 if node is None:
414 414 return wdirid
415 415 return node
416 416
417 417 def intrev(ctx):
418 418 """Return integer for a given basectx that can be used in comparison or
419 419 arithmetic operation"""
420 420 rev = ctx.rev()
421 421 if rev is None:
422 422 return wdirrev
423 423 return rev
424 424
425 425 def formatchangeid(ctx):
426 426 """Format changectx as '{rev}:{node|formatnode}', which is the default
427 427 template provided by logcmdutil.changesettemplater"""
428 428 repo = ctx.repo()
429 429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
430 430
431 431 def formatrevnode(ui, rev, node):
432 432 """Format given revision and node depending on the current verbosity"""
433 433 if ui.debugflag:
434 434 hexfunc = hex
435 435 else:
436 436 hexfunc = short
437 437 return '%d:%s' % (rev, hexfunc(node))
438 438
439 439 def resolvehexnodeidprefix(repo, prefix):
440 440 # Uses unfiltered repo because it's faster when prefix is ambiguous/
441 441 # This matches the shortesthexnodeidprefix() function below.
442 442 node = repo.unfiltered().changelog._partialmatch(prefix)
443 443 if node is None:
444 444 return
445 445 repo.changelog.rev(node) # make sure node isn't filtered
446 446 return node
447 447
448 448 def shortesthexnodeidprefix(repo, node, minlength=1):
449 449 """Find the shortest unambiguous prefix that matches hexnode."""
450 450 # _partialmatch() of filtered changelog could take O(len(repo)) time,
451 451 # which would be unacceptably slow. so we look for hash collision in
452 452 # unfiltered space, which means some hashes may be slightly longer.
453 453 cl = repo.unfiltered().changelog
454 454
455 455 def isrev(prefix):
456 456 try:
457 457 i = int(prefix)
458 458 # if we are a pure int, then starting with zero will not be
459 459 # confused as a rev; or, obviously, if the int is larger
460 460 # than the value of the tip rev
461 461 if prefix[0] == '0' or i > len(cl):
462 462 return False
463 463 return True
464 464 except ValueError:
465 465 return False
466 466
467 467 def disambiguate(prefix):
468 468 """Disambiguate against revnums."""
469 469 hexnode = hex(node)
470 470 for length in range(len(prefix), len(hexnode) + 1):
471 471 prefix = hexnode[:length]
472 472 if not isrev(prefix):
473 473 return prefix
474 474
475 475 try:
476 476 return disambiguate(cl.shortest(node, minlength))
477 477 except error.LookupError:
478 478 raise error.RepoLookupError()
479 479
480 480 def isrevsymbol(repo, symbol):
481 481 """Checks if a symbol exists in the repo.
482 482
483 483 See revsymbol() for details. Raises error.LookupError if the symbol is an
484 484 ambiguous nodeid prefix.
485 485 """
486 486 try:
487 487 revsymbol(repo, symbol)
488 488 return True
489 489 except error.RepoLookupError:
490 490 return False
491 491
492 492 def revsymbol(repo, symbol):
493 493 """Returns a context given a single revision symbol (as string).
494 494
495 495 This is similar to revsingle(), but accepts only a single revision symbol,
496 496 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
497 497 not "max(public())".
498 498 """
499 499 if not isinstance(symbol, bytes):
500 500 msg = ("symbol (%s of type %s) was not a string, did you mean "
501 501 "repo[symbol]?" % (symbol, type(symbol)))
502 502 raise error.ProgrammingError(msg)
503 503 try:
504 504 if symbol in ('.', 'tip', 'null'):
505 505 return repo[symbol]
506 506
507 507 try:
508 508 r = int(symbol)
509 509 if '%d' % r != symbol:
510 510 raise ValueError
511 511 l = len(repo.changelog)
512 512 if r < 0:
513 513 r += l
514 514 if r < 0 or r >= l and r != wdirrev:
515 515 raise ValueError
516 516 return repo[r]
517 517 except error.FilteredIndexError:
518 518 raise
519 519 except (ValueError, OverflowError, IndexError):
520 520 pass
521 521
522 522 if len(symbol) == 40:
523 523 try:
524 524 node = bin(symbol)
525 525 rev = repo.changelog.rev(node)
526 526 return repo[rev]
527 527 except error.FilteredLookupError:
528 528 raise
529 529 except (TypeError, LookupError):
530 530 pass
531 531
532 532 # look up bookmarks through the name interface
533 533 try:
534 534 node = repo.names.singlenode(repo, symbol)
535 535 rev = repo.changelog.rev(node)
536 536 return repo[rev]
537 537 except KeyError:
538 538 pass
539 539
540 540 node = resolvehexnodeidprefix(repo, symbol)
541 541 if node is not None:
542 542 rev = repo.changelog.rev(node)
543 543 return repo[rev]
544 544
545 545 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
546 546
547 547 except error.WdirUnsupported:
548 548 return repo[None]
549 549 except (error.FilteredIndexError, error.FilteredLookupError,
550 550 error.FilteredRepoLookupError):
551 551 raise _filterederror(repo, symbol)
552 552
553 553 def _filterederror(repo, changeid):
554 554 """build an exception to be raised about a filtered changeid
555 555
556 556 This is extracted in a function to help extensions (eg: evolve) to
557 557 experiment with various message variants."""
558 558 if repo.filtername.startswith('visible'):
559 559
560 560 # Check if the changeset is obsolete
561 561 unfilteredrepo = repo.unfiltered()
562 562 ctx = revsymbol(unfilteredrepo, changeid)
563 563
564 564 # If the changeset is obsolete, enrich the message with the reason
565 565 # that made this changeset not visible
566 566 if ctx.obsolete():
567 567 msg = obsutil._getfilteredreason(repo, changeid, ctx)
568 568 else:
569 569 msg = _("hidden revision '%s'") % changeid
570 570
571 571 hint = _('use --hidden to access hidden revisions')
572 572
573 573 return error.FilteredRepoLookupError(msg, hint=hint)
574 574 msg = _("filtered revision '%s' (not in '%s' subset)")
575 575 msg %= (changeid, repo.filtername)
576 576 return error.FilteredRepoLookupError(msg)
577 577
578 578 def revsingle(repo, revspec, default='.', localalias=None):
579 579 if not revspec and revspec != 0:
580 580 return repo[default]
581 581
582 582 l = revrange(repo, [revspec], localalias=localalias)
583 583 if not l:
584 584 raise error.Abort(_('empty revision set'))
585 585 return repo[l.last()]
586 586
587 587 def _pairspec(revspec):
588 588 tree = revsetlang.parse(revspec)
589 589 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
590 590
591 591 def revpair(repo, revs):
592 592 if not revs:
593 593 return repo['.'], repo[None]
594 594
595 595 l = revrange(repo, revs)
596 596
597 597 if not l:
598 598 first = second = None
599 599 elif l.isascending():
600 600 first = l.min()
601 601 second = l.max()
602 602 elif l.isdescending():
603 603 first = l.max()
604 604 second = l.min()
605 605 else:
606 606 first = l.first()
607 607 second = l.last()
608 608
609 609 if first is None:
610 610 raise error.Abort(_('empty revision range'))
611 611 if (first == second and len(revs) >= 2
612 612 and not all(revrange(repo, [r]) for r in revs)):
613 613 raise error.Abort(_('empty revision on one side of range'))
614 614
615 615 # if top-level is range expression, the result must always be a pair
616 616 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
617 617 return repo[first], repo[None]
618 618
619 619 return repo[first], repo[second]
620 620
621 621 def revrange(repo, specs, localalias=None):
622 622 """Execute 1 to many revsets and return the union.
623 623
624 624 This is the preferred mechanism for executing revsets using user-specified
625 625 config options, such as revset aliases.
626 626
627 627 The revsets specified by ``specs`` will be executed via a chained ``OR``
628 628 expression. If ``specs`` is empty, an empty result is returned.
629 629
630 630 ``specs`` can contain integers, in which case they are assumed to be
631 631 revision numbers.
632 632
633 633 It is assumed the revsets are already formatted. If you have arguments
634 634 that need to be expanded in the revset, call ``revsetlang.formatspec()``
635 635 and pass the result as an element of ``specs``.
636 636
637 637 Specifying a single revset is allowed.
638 638
639 639 Returns a ``revset.abstractsmartset`` which is a list-like interface over
640 640 integer revisions.
641 641 """
642 642 allspecs = []
643 643 for spec in specs:
644 644 if isinstance(spec, int):
645 645 spec = revsetlang.formatspec('rev(%d)', spec)
646 646 allspecs.append(spec)
647 647 return repo.anyrevs(allspecs, user=True, localalias=localalias)
648 648
649 649 def meaningfulparents(repo, ctx):
650 650 """Return list of meaningful (or all if debug) parentrevs for rev.
651 651
652 652 For merges (two non-nullrev revisions) both parents are meaningful.
653 653 Otherwise the first parent revision is considered meaningful if it
654 654 is not the preceding revision.
655 655 """
656 656 parents = ctx.parents()
657 657 if len(parents) > 1:
658 658 return parents
659 659 if repo.ui.debugflag:
660 660 return [parents[0], repo['null']]
661 661 if parents[0].rev() >= intrev(ctx) - 1:
662 662 return []
663 663 return parents
664 664
665 665 def expandpats(pats):
666 666 '''Expand bare globs when running on windows.
667 667 On posix we assume it already has already been done by sh.'''
668 668 if not util.expandglobs:
669 669 return list(pats)
670 670 ret = []
671 671 for kindpat in pats:
672 672 kind, pat = matchmod._patsplit(kindpat, None)
673 673 if kind is None:
674 674 try:
675 675 globbed = glob.glob(pat)
676 676 except re.error:
677 677 globbed = [pat]
678 678 if globbed:
679 679 ret.extend(globbed)
680 680 continue
681 681 ret.append(kindpat)
682 682 return ret
683 683
684 684 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
685 685 badfn=None):
686 686 '''Return a matcher and the patterns that were used.
687 687 The matcher will warn about bad matches, unless an alternate badfn callback
688 688 is provided.'''
689 689 if pats == ("",):
690 690 pats = []
691 691 if opts is None:
692 692 opts = {}
693 693 if not globbed and default == 'relpath':
694 694 pats = expandpats(pats or [])
695 695
696 696 def bad(f, msg):
697 697 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
698 698
699 699 if badfn is None:
700 700 badfn = bad
701 701
702 702 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
703 703 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
704 704
705 705 if m.always():
706 706 pats = []
707 707 return m, pats
708 708
709 709 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
710 710 badfn=None):
711 711 '''Return a matcher that will warn about bad matches.'''
712 712 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
713 713
714 714 def matchall(repo):
715 715 '''Return a matcher that will efficiently match everything.'''
716 716 return matchmod.always(repo.root, repo.getcwd())
717 717
718 718 def matchfiles(repo, files, badfn=None):
719 719 '''Return a matcher that will efficiently match exactly these files.'''
720 720 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
721 721
722 722 def parsefollowlinespattern(repo, rev, pat, msg):
723 723 """Return a file name from `pat` pattern suitable for usage in followlines
724 724 logic.
725 725 """
726 726 if not matchmod.patkind(pat):
727 727 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
728 728 else:
729 729 ctx = repo[rev]
730 730 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
731 731 files = [f for f in ctx if m(f)]
732 732 if len(files) != 1:
733 733 raise error.ParseError(msg)
734 734 return files[0]
735 735
736 736 def origpath(ui, repo, filepath):
737 737 '''customize where .orig files are created
738 738
739 739 Fetch user defined path from config file: [ui] origbackuppath = <path>
740 740 Fall back to default (filepath with .orig suffix) if not specified
741 741 '''
742 742 origbackuppath = ui.config('ui', 'origbackuppath')
743 743 if not origbackuppath:
744 744 return filepath + ".orig"
745 745
746 746 # Convert filepath from an absolute path into a path inside the repo.
747 747 filepathfromroot = util.normpath(os.path.relpath(filepath,
748 748 start=repo.root))
749 749
750 750 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
751 751 origbackupdir = origvfs.dirname(filepathfromroot)
752 752 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
753 753 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
754 754
755 755 # Remove any files that conflict with the backup file's path
756 756 for f in reversed(list(util.finddirs(filepathfromroot))):
757 757 if origvfs.isfileorlink(f):
758 758 ui.note(_('removing conflicting file: %s\n')
759 759 % origvfs.join(f))
760 760 origvfs.unlink(f)
761 761 break
762 762
763 763 origvfs.makedirs(origbackupdir)
764 764
765 765 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
766 766 ui.note(_('removing conflicting directory: %s\n')
767 767 % origvfs.join(filepathfromroot))
768 768 origvfs.rmtree(filepathfromroot, forcibly=True)
769 769
770 770 return origvfs.join(filepathfromroot)
771 771
772 772 class _containsnode(object):
773 773 """proxy __contains__(node) to container.__contains__ which accepts revs"""
774 774
775 775 def __init__(self, repo, revcontainer):
776 776 self._torev = repo.changelog.rev
777 777 self._revcontains = revcontainer.__contains__
778 778
779 779 def __contains__(self, node):
780 780 return self._revcontains(self._torev(node))
781 781
782 782 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
783 783 fixphase=False, targetphase=None):
784 784 """do common cleanups when old nodes are replaced by new nodes
785 785
786 786 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
787 787 (we might also want to move working directory parent in the future)
788 788
789 789 By default, bookmark moves are calculated automatically from 'replacements',
790 790 but 'moves' can be used to override that. Also, 'moves' may include
791 791 additional bookmark moves that should not have associated obsmarkers.
792 792
793 793 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
794 794 have replacements. operation is a string, like "rebase".
795 795
796 796 metadata is dictionary containing metadata to be stored in obsmarker if
797 797 obsolescence is enabled.
798 798 """
799 799 assert fixphase or targetphase is None
800 800 if not replacements and not moves:
801 801 return
802 802
803 803 # translate mapping's other forms
804 804 if not util.safehasattr(replacements, 'items'):
805 805 replacements = {n: () for n in replacements}
806 806
807 807 # Calculate bookmark movements
808 808 if moves is None:
809 809 moves = {}
810 810 # Unfiltered repo is needed since nodes in replacements might be hidden.
811 811 unfi = repo.unfiltered()
812 812 for oldnode, newnodes in replacements.items():
813 813 if oldnode in moves:
814 814 continue
815 815 if len(newnodes) > 1:
816 816 # usually a split, take the one with biggest rev number
817 817 newnode = next(unfi.set('max(%ln)', newnodes)).node()
818 818 elif len(newnodes) == 0:
819 819 # move bookmark backwards
820 820 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
821 821 list(replacements)))
822 822 if roots:
823 823 newnode = roots[0].node()
824 824 else:
825 825 newnode = nullid
826 826 else:
827 827 newnode = newnodes[0]
828 828 moves[oldnode] = newnode
829 829
830 830 allnewnodes = [n for ns in replacements.values() for n in ns]
831 831 toretract = {}
832 832 toadvance = {}
833 833 if fixphase:
834 834 precursors = {}
835 835 for oldnode, newnodes in replacements.items():
836 836 for newnode in newnodes:
837 837 precursors.setdefault(newnode, []).append(oldnode)
838 838
839 839 allnewnodes.sort(key=lambda n: unfi[n].rev())
840 840 newphases = {}
841 841 def phase(ctx):
842 842 return newphases.get(ctx.node(), ctx.phase())
843 843 for newnode in allnewnodes:
844 844 ctx = unfi[newnode]
845 845 parentphase = max(phase(p) for p in ctx.parents())
846 846 if targetphase is None:
847 847 oldphase = max(unfi[oldnode].phase()
848 848 for oldnode in precursors[newnode])
849 849 newphase = max(oldphase, parentphase)
850 850 else:
851 851 newphase = max(targetphase, parentphase)
852 852 newphases[newnode] = newphase
853 853 if newphase > ctx.phase():
854 854 toretract.setdefault(newphase, []).append(newnode)
855 855 elif newphase < ctx.phase():
856 856 toadvance.setdefault(newphase, []).append(newnode)
857 857
858 858 with repo.transaction('cleanup') as tr:
859 859 # Move bookmarks
860 860 bmarks = repo._bookmarks
861 861 bmarkchanges = []
862 862 for oldnode, newnode in moves.items():
863 863 oldbmarks = repo.nodebookmarks(oldnode)
864 864 if not oldbmarks:
865 865 continue
866 866 from . import bookmarks # avoid import cycle
867 867 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
868 868 (util.rapply(pycompat.maybebytestr, oldbmarks),
869 869 hex(oldnode), hex(newnode)))
870 870 # Delete divergent bookmarks being parents of related newnodes
871 871 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
872 872 allnewnodes, newnode, oldnode)
873 873 deletenodes = _containsnode(repo, deleterevs)
874 874 for name in oldbmarks:
875 875 bmarkchanges.append((name, newnode))
876 876 for b in bookmarks.divergent2delete(repo, deletenodes, name):
877 877 bmarkchanges.append((b, None))
878 878
879 879 if bmarkchanges:
880 880 bmarks.applychanges(repo, tr, bmarkchanges)
881 881
882 882 for phase, nodes in toretract.items():
883 883 phases.retractboundary(repo, tr, phase, nodes)
884 884 for phase, nodes in toadvance.items():
885 885 phases.advanceboundary(repo, tr, phase, nodes)
886 886
887 887 # Obsolete or strip nodes
888 888 if obsolete.isenabled(repo, obsolete.createmarkersopt):
889 889 # If a node is already obsoleted, and we want to obsolete it
890 890 # without a successor, skip that obssolete request since it's
891 891 # unnecessary. That's the "if s or not isobs(n)" check below.
892 892 # Also sort the node in topology order, that might be useful for
893 893 # some obsstore logic.
894 894 # NOTE: the filtering and sorting might belong to createmarkers.
895 895 isobs = unfi.obsstore.successors.__contains__
896 896 torev = unfi.changelog.rev
897 897 sortfunc = lambda ns: torev(ns[0])
898 898 rels = [(unfi[n], tuple(unfi[m] for m in s))
899 899 for n, s in sorted(replacements.items(), key=sortfunc)
900 900 if s or not isobs(n)]
901 901 if rels:
902 902 obsolete.createmarkers(repo, rels, operation=operation,
903 903 metadata=metadata)
904 904 else:
905 905 from . import repair # avoid import cycle
906 906 tostrip = list(replacements)
907 907 if tostrip:
908 908 repair.delayedstrip(repo.ui, repo, tostrip, operation)
909 909
910 910 def addremove(repo, matcher, prefix, opts=None):
911 911 if opts is None:
912 912 opts = {}
913 913 m = matcher
914 914 dry_run = opts.get('dry_run')
915 915 try:
916 916 similarity = float(opts.get('similarity') or 0)
917 917 except ValueError:
918 918 raise error.Abort(_('similarity must be a number'))
919 919 if similarity < 0 or similarity > 100:
920 920 raise error.Abort(_('similarity must be between 0 and 100'))
921 921 similarity /= 100.0
922 922
923 923 ret = 0
924 924 join = lambda f: os.path.join(prefix, f)
925 925
926 926 wctx = repo[None]
927 927 for subpath in sorted(wctx.substate):
928 928 submatch = matchmod.subdirmatcher(subpath, m)
929 929 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
930 930 sub = wctx.sub(subpath)
931 931 try:
932 932 if sub.addremove(submatch, prefix, opts):
933 933 ret = 1
934 934 except error.LookupError:
935 935 repo.ui.status(_("skipping missing subrepository: %s\n")
936 936 % join(subpath))
937 937
938 938 rejected = []
939 939 def badfn(f, msg):
940 940 if f in m.files():
941 941 m.bad(f, msg)
942 942 rejected.append(f)
943 943
944 944 badmatch = matchmod.badmatch(m, badfn)
945 945 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
946 946 badmatch)
947 947
948 948 unknownset = set(unknown + forgotten)
949 949 toprint = unknownset.copy()
950 950 toprint.update(deleted)
951 951 for abs in sorted(toprint):
952 952 if repo.ui.verbose or not m.exact(abs):
953 953 if abs in unknownset:
954 954 status = _('adding %s\n') % m.uipath(abs)
955 955 else:
956 956 status = _('removing %s\n') % m.uipath(abs)
957 957 repo.ui.status(status)
958 958
959 959 renames = _findrenames(repo, m, added + unknown, removed + deleted,
960 960 similarity)
961 961
962 962 if not dry_run:
963 963 _markchanges(repo, unknown + forgotten, deleted, renames)
964 964
965 965 for f in rejected:
966 966 if f in m.files():
967 967 return 1
968 968 return ret
969 969
970 970 def marktouched(repo, files, similarity=0.0):
971 971 '''Assert that files have somehow been operated upon. files are relative to
972 972 the repo root.'''
973 973 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
974 974 rejected = []
975 975
976 976 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
977 977
978 978 if repo.ui.verbose:
979 979 unknownset = set(unknown + forgotten)
980 980 toprint = unknownset.copy()
981 981 toprint.update(deleted)
982 982 for abs in sorted(toprint):
983 983 if abs in unknownset:
984 984 status = _('adding %s\n') % abs
985 985 else:
986 986 status = _('removing %s\n') % abs
987 987 repo.ui.status(status)
988 988
989 989 renames = _findrenames(repo, m, added + unknown, removed + deleted,
990 990 similarity)
991 991
992 992 _markchanges(repo, unknown + forgotten, deleted, renames)
993 993
994 994 for f in rejected:
995 995 if f in m.files():
996 996 return 1
997 997 return 0
998 998
999 999 def _interestingfiles(repo, matcher):
1000 1000 '''Walk dirstate with matcher, looking for files that addremove would care
1001 1001 about.
1002 1002
1003 1003 This is different from dirstate.status because it doesn't care about
1004 1004 whether files are modified or clean.'''
1005 1005 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1006 1006 audit_path = pathutil.pathauditor(repo.root, cached=True)
1007 1007
1008 1008 ctx = repo[None]
1009 1009 dirstate = repo.dirstate
1010 1010 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1011 1011 unknown=True, ignored=False, full=False)
1012 1012 for abs, st in walkresults.iteritems():
1013 1013 dstate = dirstate[abs]
1014 1014 if dstate == '?' and audit_path.check(abs):
1015 1015 unknown.append(abs)
1016 1016 elif dstate != 'r' and not st:
1017 1017 deleted.append(abs)
1018 1018 elif dstate == 'r' and st:
1019 1019 forgotten.append(abs)
1020 1020 # for finding renames
1021 1021 elif dstate == 'r' and not st:
1022 1022 removed.append(abs)
1023 1023 elif dstate == 'a':
1024 1024 added.append(abs)
1025 1025
1026 1026 return added, unknown, deleted, removed, forgotten
1027 1027
1028 1028 def _findrenames(repo, matcher, added, removed, similarity):
1029 1029 '''Find renames from removed files to added ones.'''
1030 1030 renames = {}
1031 1031 if similarity > 0:
1032 1032 for old, new, score in similar.findrenames(repo, added, removed,
1033 1033 similarity):
1034 1034 if (repo.ui.verbose or not matcher.exact(old)
1035 1035 or not matcher.exact(new)):
1036 1036 repo.ui.status(_('recording removal of %s as rename to %s '
1037 1037 '(%d%% similar)\n') %
1038 1038 (matcher.rel(old), matcher.rel(new),
1039 1039 score * 100))
1040 1040 renames[new] = old
1041 1041 return renames
1042 1042
1043 1043 def _markchanges(repo, unknown, deleted, renames):
1044 1044 '''Marks the files in unknown as added, the files in deleted as removed,
1045 1045 and the files in renames as copied.'''
1046 1046 wctx = repo[None]
1047 1047 with repo.wlock():
1048 1048 wctx.forget(deleted)
1049 1049 wctx.add(unknown)
1050 1050 for new, old in renames.iteritems():
1051 1051 wctx.copy(old, new)
1052 1052
1053 1053 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1054 1054 """Update the dirstate to reflect the intent of copying src to dst. For
1055 1055 different reasons it might not end with dst being marked as copied from src.
1056 1056 """
1057 1057 origsrc = repo.dirstate.copied(src) or src
1058 1058 if dst == origsrc: # copying back a copy?
1059 1059 if repo.dirstate[dst] not in 'mn' and not dryrun:
1060 1060 repo.dirstate.normallookup(dst)
1061 1061 else:
1062 1062 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1063 1063 if not ui.quiet:
1064 1064 ui.warn(_("%s has not been committed yet, so no copy "
1065 1065 "data will be stored for %s.\n")
1066 1066 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1067 1067 if repo.dirstate[dst] in '?r' and not dryrun:
1068 1068 wctx.add([dst])
1069 1069 elif not dryrun:
1070 1070 wctx.copy(origsrc, dst)
1071 1071
1072 1072 def readrequires(opener, supported):
1073 1073 '''Reads and parses .hg/requires and checks if all entries found
1074 1074 are in the list of supported features.'''
1075 1075 requirements = set(opener.read("requires").splitlines())
1076 1076 missings = []
1077 1077 for r in requirements:
1078 1078 if r not in supported:
1079 1079 if not r or not r[0:1].isalnum():
1080 1080 raise error.RequirementError(_(".hg/requires file is corrupt"))
1081 1081 missings.append(r)
1082 1082 missings.sort()
1083 1083 if missings:
1084 1084 raise error.RequirementError(
1085 1085 _("repository requires features unknown to this Mercurial: %s")
1086 1086 % " ".join(missings),
1087 1087 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1088 1088 " for more information"))
1089 1089 return requirements
1090 1090
1091 1091 def writerequires(opener, requirements):
1092 1092 with opener('requires', 'w') as fp:
1093 1093 for r in sorted(requirements):
1094 1094 fp.write("%s\n" % r)
1095 1095
1096 1096 class filecachesubentry(object):
1097 1097 def __init__(self, path, stat):
1098 1098 self.path = path
1099 1099 self.cachestat = None
1100 1100 self._cacheable = None
1101 1101
1102 1102 if stat:
1103 1103 self.cachestat = filecachesubentry.stat(self.path)
1104 1104
1105 1105 if self.cachestat:
1106 1106 self._cacheable = self.cachestat.cacheable()
1107 1107 else:
1108 1108 # None means we don't know yet
1109 1109 self._cacheable = None
1110 1110
1111 1111 def refresh(self):
1112 1112 if self.cacheable():
1113 1113 self.cachestat = filecachesubentry.stat(self.path)
1114 1114
1115 1115 def cacheable(self):
1116 1116 if self._cacheable is not None:
1117 1117 return self._cacheable
1118 1118
1119 1119 # we don't know yet, assume it is for now
1120 1120 return True
1121 1121
1122 1122 def changed(self):
1123 1123 # no point in going further if we can't cache it
1124 1124 if not self.cacheable():
1125 1125 return True
1126 1126
1127 1127 newstat = filecachesubentry.stat(self.path)
1128 1128
1129 1129 # we may not know if it's cacheable yet, check again now
1130 1130 if newstat and self._cacheable is None:
1131 1131 self._cacheable = newstat.cacheable()
1132 1132
1133 1133 # check again
1134 1134 if not self._cacheable:
1135 1135 return True
1136 1136
1137 1137 if self.cachestat != newstat:
1138 1138 self.cachestat = newstat
1139 1139 return True
1140 1140 else:
1141 1141 return False
1142 1142
1143 1143 @staticmethod
1144 1144 def stat(path):
1145 1145 try:
1146 1146 return util.cachestat(path)
1147 1147 except OSError as e:
1148 1148 if e.errno != errno.ENOENT:
1149 1149 raise
1150 1150
1151 1151 class filecacheentry(object):
1152 1152 def __init__(self, paths, stat=True):
1153 1153 self._entries = []
1154 1154 for path in paths:
1155 1155 self._entries.append(filecachesubentry(path, stat))
1156 1156
1157 1157 def changed(self):
1158 1158 '''true if any entry has changed'''
1159 1159 for entry in self._entries:
1160 1160 if entry.changed():
1161 1161 return True
1162 1162 return False
1163 1163
1164 1164 def refresh(self):
1165 1165 for entry in self._entries:
1166 1166 entry.refresh()
1167 1167
1168 1168 class filecache(object):
1169 1169 '''A property like decorator that tracks files under .hg/ for updates.
1170 1170
1171 1171 Records stat info when called in _filecache.
1172 1172
1173 1173 On subsequent calls, compares old stat info with new info, and recreates the
1174 1174 object when any of the files changes, updating the new stat info in
1175 1175 _filecache.
1176 1176
1177 1177 Mercurial either atomic renames or appends for files under .hg,
1178 1178 so to ensure the cache is reliable we need the filesystem to be able
1179 1179 to tell us if a file has been replaced. If it can't, we fallback to
1180 1180 recreating the object on every call (essentially the same behavior as
1181 1181 propertycache).
1182 1182
1183 1183 '''
1184 1184 def __init__(self, *paths):
1185 1185 self.paths = paths
1186 1186
1187 1187 def join(self, obj, fname):
1188 1188 """Used to compute the runtime path of a cached file.
1189 1189
1190 1190 Users should subclass filecache and provide their own version of this
1191 1191 function to call the appropriate join function on 'obj' (an instance
1192 1192 of the class that its member function was decorated).
1193 1193 """
1194 1194 raise NotImplementedError
1195 1195
1196 1196 def __call__(self, func):
1197 1197 self.func = func
1198 1198 self.sname = func.__name__
1199 1199 self.name = pycompat.sysbytes(self.sname)
1200 1200 return self
1201 1201
1202 1202 def __get__(self, obj, type=None):
1203 1203 # if accessed on the class, return the descriptor itself.
1204 1204 if obj is None:
1205 1205 return self
1206 1206 # do we need to check if the file changed?
1207 1207 if self.sname in obj.__dict__:
1208 1208 assert self.name in obj._filecache, self.name
1209 1209 return obj.__dict__[self.sname]
1210 1210
1211 1211 entry = obj._filecache.get(self.name)
1212 1212
1213 1213 if entry:
1214 1214 if entry.changed():
1215 1215 entry.obj = self.func(obj)
1216 1216 else:
1217 1217 paths = [self.join(obj, path) for path in self.paths]
1218 1218
1219 1219 # We stat -before- creating the object so our cache doesn't lie if
1220 1220 # a writer modified between the time we read and stat
1221 1221 entry = filecacheentry(paths, True)
1222 1222 entry.obj = self.func(obj)
1223 1223
1224 1224 obj._filecache[self.name] = entry
1225 1225
1226 1226 obj.__dict__[self.sname] = entry.obj
1227 1227 return entry.obj
1228 1228
1229 1229 def __set__(self, obj, value):
1230 1230 if self.name not in obj._filecache:
1231 1231 # we add an entry for the missing value because X in __dict__
1232 1232 # implies X in _filecache
1233 1233 paths = [self.join(obj, path) for path in self.paths]
1234 1234 ce = filecacheentry(paths, False)
1235 1235 obj._filecache[self.name] = ce
1236 1236 else:
1237 1237 ce = obj._filecache[self.name]
1238 1238
1239 1239 ce.obj = value # update cached copy
1240 1240 obj.__dict__[self.sname] = value # update copy returned by obj.x
1241 1241
1242 1242 def __delete__(self, obj):
1243 1243 try:
1244 1244 del obj.__dict__[self.sname]
1245 1245 except KeyError:
1246 1246 raise AttributeError(self.sname)
1247 1247
1248 1248 def extdatasource(repo, source):
1249 1249 """Gather a map of rev -> value dict from the specified source
1250 1250
1251 1251 A source spec is treated as a URL, with a special case shell: type
1252 1252 for parsing the output from a shell command.
1253 1253
1254 1254 The data is parsed as a series of newline-separated records where
1255 1255 each record is a revision specifier optionally followed by a space
1256 1256 and a freeform string value. If the revision is known locally, it
1257 1257 is converted to a rev, otherwise the record is skipped.
1258 1258
1259 1259 Note that both key and value are treated as UTF-8 and converted to
1260 1260 the local encoding. This allows uniformity between local and
1261 1261 remote data sources.
1262 1262 """
1263 1263
1264 1264 spec = repo.ui.config("extdata", source)
1265 1265 if not spec:
1266 1266 raise error.Abort(_("unknown extdata source '%s'") % source)
1267 1267
1268 1268 data = {}
1269 1269 src = proc = None
1270 1270 try:
1271 1271 if spec.startswith("shell:"):
1272 1272 # external commands should be run relative to the repo root
1273 1273 cmd = spec[6:]
1274 1274 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1275 1275 close_fds=procutil.closefds,
1276 1276 stdout=subprocess.PIPE, cwd=repo.root)
1277 1277 src = proc.stdout
1278 1278 else:
1279 1279 # treat as a URL or file
1280 1280 src = url.open(repo.ui, spec)
1281 1281 for l in src:
1282 1282 if " " in l:
1283 1283 k, v = l.strip().split(" ", 1)
1284 1284 else:
1285 1285 k, v = l.strip(), ""
1286 1286
1287 1287 k = encoding.tolocal(k)
1288 1288 try:
1289 1289 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1290 1290 except (error.LookupError, error.RepoLookupError):
1291 1291 pass # we ignore data for nodes that don't exist locally
1292 1292 finally:
1293 1293 if proc:
1294 1294 proc.communicate()
1295 1295 if src:
1296 1296 src.close()
1297 1297 if proc and proc.returncode != 0:
1298 1298 raise error.Abort(_("extdata command '%s' failed: %s")
1299 1299 % (cmd, procutil.explainexit(proc.returncode)))
1300 1300
1301 1301 return data
1302 1302
1303 1303 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1304 1304 if lock is None:
1305 1305 raise error.LockInheritanceContractViolation(
1306 1306 'lock can only be inherited while held')
1307 1307 if environ is None:
1308 1308 environ = {}
1309 1309 with lock.inherit() as locker:
1310 1310 environ[envvar] = locker
1311 1311 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1312 1312
1313 1313 def wlocksub(repo, cmd, *args, **kwargs):
1314 1314 """run cmd as a subprocess that allows inheriting repo's wlock
1315 1315
1316 1316 This can only be called while the wlock is held. This takes all the
1317 1317 arguments that ui.system does, and returns the exit code of the
1318 1318 subprocess."""
1319 1319 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1320 1320 **kwargs)
1321 1321
1322 1322 class progress(object):
1323 1323 def __init__(self, ui, topic, unit="", total=None):
1324 1324 self.ui = ui
1325 1325 self.pos = 0
1326 1326 self.topic = topic
1327 1327 self.unit = unit
1328 1328 self.total = total
1329 1329
1330 1330 def __enter__(self):
1331 1331 pass
1332 1332
1333 1333 def __exit__(self, exc_type, exc_value, exc_tb):
1334 1334 self.complete()
1335 1335
1336 1336 def update(self, pos, item="", total=None):
1337 1337 assert pos is not None
1338 1338 if total:
1339 1339 self.total = total
1340 1340 self.pos = pos
1341 1341 self._print(item)
1342 1342
1343 1343 def increment(self, step=1, item="", total=None):
1344 1344 self.update(self.pos + step, item, total)
1345 1345
1346 1346 def complete(self):
1347 1347 self.ui.progress(self.topic, None)
1348 1348
1349 1349 def _print(self, item):
1350 1350 self.ui.progress(self.topic, self.pos, item, self.unit,
1351 1351 self.total)
1352 1352
1353 1353 def gdinitconfig(ui):
1354 1354 """helper function to know if a repo should be created as general delta
1355 1355 """
1356 1356 # experimental config: format.generaldelta
1357 1357 return (ui.configbool('format', 'generaldelta')
1358 1358 or ui.configbool('format', 'usegeneraldelta'))
1359 1359
1360 1360 def gddeltaconfig(ui):
1361 1361 """helper function to know if incoming delta should be optimised
1362 1362 """
1363 1363 # experimental config: format.generaldelta
1364 1364 return ui.configbool('format', 'generaldelta')
1365 1365
1366 1366 class simplekeyvaluefile(object):
1367 1367 """A simple file with key=value lines
1368 1368
1369 1369 Keys must be alphanumerics and start with a letter, values must not
1370 1370 contain '\n' characters"""
1371 1371 firstlinekey = '__firstline'
1372 1372
1373 1373 def __init__(self, vfs, path, keys=None):
1374 1374 self.vfs = vfs
1375 1375 self.path = path
1376 1376
1377 1377 def read(self, firstlinenonkeyval=False):
1378 1378 """Read the contents of a simple key-value file
1379 1379
1380 1380 'firstlinenonkeyval' indicates whether the first line of file should
1381 1381 be treated as a key-value pair or reuturned fully under the
1382 1382 __firstline key."""
1383 1383 lines = self.vfs.readlines(self.path)
1384 1384 d = {}
1385 1385 if firstlinenonkeyval:
1386 1386 if not lines:
1387 1387 e = _("empty simplekeyvalue file")
1388 1388 raise error.CorruptedState(e)
1389 1389 # we don't want to include '\n' in the __firstline
1390 1390 d[self.firstlinekey] = lines[0][:-1]
1391 1391 del lines[0]
1392 1392
1393 1393 try:
1394 1394 # the 'if line.strip()' part prevents us from failing on empty
1395 1395 # lines which only contain '\n' therefore are not skipped
1396 1396 # by 'if line'
1397 1397 updatedict = dict(line[:-1].split('=', 1) for line in lines
1398 1398 if line.strip())
1399 1399 if self.firstlinekey in updatedict:
1400 1400 e = _("%r can't be used as a key")
1401 1401 raise error.CorruptedState(e % self.firstlinekey)
1402 1402 d.update(updatedict)
1403 1403 except ValueError as e:
1404 1404 raise error.CorruptedState(str(e))
1405 1405 return d
1406 1406
1407 1407 def write(self, data, firstline=None):
1408 1408 """Write key=>value mapping to a file
1409 1409 data is a dict. Keys must be alphanumerical and start with a letter.
1410 1410 Values must not contain newline characters.
1411 1411
1412 1412 If 'firstline' is not None, it is written to file before
1413 1413 everything else, as it is, not in a key=value form"""
1414 1414 lines = []
1415 1415 if firstline is not None:
1416 1416 lines.append('%s\n' % firstline)
1417 1417
1418 1418 for k, v in data.items():
1419 1419 if k == self.firstlinekey:
1420 1420 e = "key name '%s' is reserved" % self.firstlinekey
1421 1421 raise error.ProgrammingError(e)
1422 1422 if not k[0:1].isalpha():
1423 1423 e = "keys must start with a letter in a key-value file"
1424 1424 raise error.ProgrammingError(e)
1425 1425 if not k.isalnum():
1426 1426 e = "invalid key name in a simple key-value file"
1427 1427 raise error.ProgrammingError(e)
1428 1428 if '\n' in v:
1429 1429 e = "invalid value in a simple key-value file"
1430 1430 raise error.ProgrammingError(e)
1431 1431 lines.append("%s=%s\n" % (k, v))
1432 1432 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1433 1433 fp.write(''.join(lines))
1434 1434
1435 1435 _reportobsoletedsource = [
1436 1436 'debugobsolete',
1437 1437 'pull',
1438 1438 'push',
1439 1439 'serve',
1440 1440 'unbundle',
1441 1441 ]
1442 1442
1443 1443 _reportnewcssource = [
1444 1444 'pull',
1445 1445 'unbundle',
1446 1446 ]
1447 1447
1448 1448 def prefetchfiles(repo, revs, match):
1449 1449 """Invokes the registered file prefetch functions, allowing extensions to
1450 1450 ensure the corresponding files are available locally, before the command
1451 1451 uses them."""
1452 1452 if match:
1453 1453 # The command itself will complain about files that don't exist, so
1454 1454 # don't duplicate the message.
1455 1455 match = matchmod.badmatch(match, lambda fn, msg: None)
1456 1456 else:
1457 1457 match = matchall(repo)
1458 1458
1459 1459 fileprefetchhooks(repo, revs, match)
1460 1460
1461 1461 # a list of (repo, revs, match) prefetch functions
1462 1462 fileprefetchhooks = util.hooks()
1463 1463
1464 1464 # A marker that tells the evolve extension to suppress its own reporting
1465 1465 _reportstroubledchangesets = True
1466 1466
1467 1467 def registersummarycallback(repo, otr, txnname=''):
1468 1468 """register a callback to issue a summary after the transaction is closed
1469 1469 """
1470 1470 def txmatch(sources):
1471 1471 return any(txnname.startswith(source) for source in sources)
1472 1472
1473 1473 categories = []
1474 1474
1475 1475 def reportsummary(func):
1476 1476 """decorator for report callbacks."""
1477 1477 # The repoview life cycle is shorter than the one of the actual
1478 1478 # underlying repository. So the filtered object can die before the
1479 1479 # weakref is used leading to troubles. We keep a reference to the
1480 1480 # unfiltered object and restore the filtering when retrieving the
1481 1481 # repository through the weakref.
1482 1482 filtername = repo.filtername
1483 1483 reporef = weakref.ref(repo.unfiltered())
1484 1484 def wrapped(tr):
1485 1485 repo = reporef()
1486 1486 if filtername:
1487 1487 repo = repo.filtered(filtername)
1488 1488 func(repo, tr)
1489 1489 newcat = '%02i-txnreport' % len(categories)
1490 1490 otr.addpostclose(newcat, wrapped)
1491 1491 categories.append(newcat)
1492 1492 return wrapped
1493 1493
1494 1494 if txmatch(_reportobsoletedsource):
1495 1495 @reportsummary
1496 1496 def reportobsoleted(repo, tr):
1497 1497 obsoleted = obsutil.getobsoleted(repo, tr)
1498 1498 if obsoleted:
1499 1499 repo.ui.status(_('obsoleted %i changesets\n')
1500 1500 % len(obsoleted))
1501 1501
1502 1502 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1503 1503 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1504 1504 instabilitytypes = [
1505 1505 ('orphan', 'orphan'),
1506 1506 ('phase-divergent', 'phasedivergent'),
1507 1507 ('content-divergent', 'contentdivergent'),
1508 1508 ]
1509 1509
1510 1510 def getinstabilitycounts(repo):
1511 1511 filtered = repo.changelog.filteredrevs
1512 1512 counts = {}
1513 1513 for instability, revset in instabilitytypes:
1514 1514 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1515 1515 filtered)
1516 1516 return counts
1517 1517
1518 1518 oldinstabilitycounts = getinstabilitycounts(repo)
1519 1519 @reportsummary
1520 1520 def reportnewinstabilities(repo, tr):
1521 1521 newinstabilitycounts = getinstabilitycounts(repo)
1522 1522 for instability, revset in instabilitytypes:
1523 1523 delta = (newinstabilitycounts[instability] -
1524 1524 oldinstabilitycounts[instability])
1525 if delta > 0:
1526 repo.ui.warn(_('%i new %s changesets\n') %
1527 (delta, instability))
1525 msg = getinstabilitymessage(delta, instability)
1526 if msg:
1527 repo.ui.warn(msg)
1528 1528
1529 1529 if txmatch(_reportnewcssource):
1530 1530 @reportsummary
1531 1531 def reportnewcs(repo, tr):
1532 1532 """Report the range of new revisions pulled/unbundled."""
1533 1533 newrevs = tr.changes.get('revs', xrange(0, 0))
1534 1534 if not newrevs:
1535 1535 return
1536 1536
1537 1537 # Compute the bounds of new revisions' range, excluding obsoletes.
1538 1538 unfi = repo.unfiltered()
1539 1539 revs = unfi.revs('%ld and not obsolete()', newrevs)
1540 1540 if not revs:
1541 1541 # Got only obsoletes.
1542 1542 return
1543 1543 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1544 1544
1545 1545 if minrev == maxrev:
1546 1546 revrange = minrev
1547 1547 else:
1548 1548 revrange = '%s:%s' % (minrev, maxrev)
1549 1549 repo.ui.status(_('new changesets %s\n') % revrange)
1550 1550
1551 1551 @reportsummary
1552 1552 def reportphasechanges(repo, tr):
1553 1553 """Report statistics of phase changes for changesets pre-existing
1554 1554 pull/unbundle.
1555 1555 """
1556 1556 newrevs = tr.changes.get('revs', xrange(0, 0))
1557 1557 phasetracking = tr.changes.get('phases', {})
1558 1558 if not phasetracking:
1559 1559 return
1560 1560 published = [
1561 1561 rev for rev, (old, new) in phasetracking.iteritems()
1562 1562 if new == phases.public and rev not in newrevs
1563 1563 ]
1564 1564 if not published:
1565 1565 return
1566 1566 repo.ui.status(_('%d local changesets published\n')
1567 1567 % len(published))
1568 1568
1569 def getinstabilitymessage(delta, instability):
1570 """function to return the message to show warning about new instabilities
1571
1572 exists as a separate function so that extension can wrap to show more
1573 information like how to fix instabilities"""
1574 if delta > 0:
1575 return _('%i new %s changesets\n') % (delta, instability)
1576
1569 1577 def nodesummaries(repo, nodes, maxnumnodes=4):
1570 1578 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1571 1579 return ' '.join(short(h) for h in nodes)
1572 1580 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1573 1581 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1574 1582
1575 1583 def enforcesinglehead(repo, tr, desc):
1576 1584 """check that no named branch has multiple heads"""
1577 1585 if desc in ('strip', 'repair'):
1578 1586 # skip the logic during strip
1579 1587 return
1580 1588 visible = repo.filtered('visible')
1581 1589 # possible improvement: we could restrict the check to affected branch
1582 1590 for name, heads in visible.branchmap().iteritems():
1583 1591 if len(heads) > 1:
1584 1592 msg = _('rejecting multiple heads on branch "%s"')
1585 1593 msg %= name
1586 1594 hint = _('%d heads: %s')
1587 1595 hint %= (len(heads), nodesummaries(repo, heads))
1588 1596 raise error.Abort(msg, hint=hint)
1589 1597
1590 1598 def wrapconvertsink(sink):
1591 1599 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1592 1600 before it is used, whether or not the convert extension was formally loaded.
1593 1601 """
1594 1602 return sink
1595 1603
1596 1604 def unhidehashlikerevs(repo, specs, hiddentype):
1597 1605 """parse the user specs and unhide changesets whose hash or revision number
1598 1606 is passed.
1599 1607
1600 1608 hiddentype can be: 1) 'warn': warn while unhiding changesets
1601 1609 2) 'nowarn': don't warn while unhiding changesets
1602 1610
1603 1611 returns a repo object with the required changesets unhidden
1604 1612 """
1605 1613 if not repo.filtername or not repo.ui.configbool('experimental',
1606 1614 'directaccess'):
1607 1615 return repo
1608 1616
1609 1617 if repo.filtername not in ('visible', 'visible-hidden'):
1610 1618 return repo
1611 1619
1612 1620 symbols = set()
1613 1621 for spec in specs:
1614 1622 try:
1615 1623 tree = revsetlang.parse(spec)
1616 1624 except error.ParseError: # will be reported by scmutil.revrange()
1617 1625 continue
1618 1626
1619 1627 symbols.update(revsetlang.gethashlikesymbols(tree))
1620 1628
1621 1629 if not symbols:
1622 1630 return repo
1623 1631
1624 1632 revs = _getrevsfromsymbols(repo, symbols)
1625 1633
1626 1634 if not revs:
1627 1635 return repo
1628 1636
1629 1637 if hiddentype == 'warn':
1630 1638 unfi = repo.unfiltered()
1631 1639 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1632 1640 repo.ui.warn(_("warning: accessing hidden changesets for write "
1633 1641 "operation: %s\n") % revstr)
1634 1642
1635 1643 # we have to use new filtername to separate branch/tags cache until we can
1636 1644 # disbale these cache when revisions are dynamically pinned.
1637 1645 return repo.filtered('visible-hidden', revs)
1638 1646
1639 1647 def _getrevsfromsymbols(repo, symbols):
1640 1648 """parse the list of symbols and returns a set of revision numbers of hidden
1641 1649 changesets present in symbols"""
1642 1650 revs = set()
1643 1651 unfi = repo.unfiltered()
1644 1652 unficl = unfi.changelog
1645 1653 cl = repo.changelog
1646 1654 tiprev = len(unficl)
1647 1655 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1648 1656 for s in symbols:
1649 1657 try:
1650 1658 n = int(s)
1651 1659 if n <= tiprev:
1652 1660 if not allowrevnums:
1653 1661 continue
1654 1662 else:
1655 1663 if n not in cl:
1656 1664 revs.add(n)
1657 1665 continue
1658 1666 except ValueError:
1659 1667 pass
1660 1668
1661 1669 try:
1662 1670 s = resolvehexnodeidprefix(unfi, s)
1663 1671 except (error.LookupError, error.WdirUnsupported):
1664 1672 s = None
1665 1673
1666 1674 if s is not None:
1667 1675 rev = unficl.rev(s)
1668 1676 if rev not in cl:
1669 1677 revs.add(rev)
1670 1678
1671 1679 return revs
1672 1680
1673 1681 def bookmarkrevs(repo, mark):
1674 1682 """
1675 1683 Select revisions reachable by a given bookmark
1676 1684 """
1677 1685 return repo.revs("ancestors(bookmark(%s)) - "
1678 1686 "ancestors(head() and not bookmark(%s)) - "
1679 1687 "ancestors(bookmark() and not bookmark(%s))",
1680 1688 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now