##// END OF EJS Templates
sparse-revlog: also use sparse-revlog config as a general delta trigger...
Boris Feld -
r38782:f8cbff21 stable
parent child Browse files
Show More
@@ -1,1699 +1,1700 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 bin,
22 22 hex,
23 23 nullid,
24 24 short,
25 25 wdirid,
26 26 wdirrev,
27 27 )
28 28
29 29 from . import (
30 30 encoding,
31 31 error,
32 32 match as matchmod,
33 33 obsolete,
34 34 obsutil,
35 35 pathutil,
36 36 phases,
37 37 pycompat,
38 38 revsetlang,
39 39 similar,
40 40 url,
41 41 util,
42 42 vfs,
43 43 )
44 44
45 45 from .utils import (
46 46 procutil,
47 47 stringutil,
48 48 )
49 49
50 50 if pycompat.iswindows:
51 51 from . import scmwindows as scmplatform
52 52 else:
53 53 from . import scmposix as scmplatform
54 54
55 55 termsize = scmplatform.termsize
56 56
57 57 class status(tuple):
58 58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
59 59 and 'ignored' properties are only relevant to the working copy.
60 60 '''
61 61
62 62 __slots__ = ()
63 63
64 64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
65 65 clean):
66 66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
67 67 ignored, clean))
68 68
69 69 @property
70 70 def modified(self):
71 71 '''files that have been modified'''
72 72 return self[0]
73 73
74 74 @property
75 75 def added(self):
76 76 '''files that have been added'''
77 77 return self[1]
78 78
79 79 @property
80 80 def removed(self):
81 81 '''files that have been removed'''
82 82 return self[2]
83 83
84 84 @property
85 85 def deleted(self):
86 86 '''files that are in the dirstate, but have been deleted from the
87 87 working copy (aka "missing")
88 88 '''
89 89 return self[3]
90 90
91 91 @property
92 92 def unknown(self):
93 93 '''files not in the dirstate that are not ignored'''
94 94 return self[4]
95 95
96 96 @property
97 97 def ignored(self):
98 98 '''files not in the dirstate that are ignored (by _dirignore())'''
99 99 return self[5]
100 100
101 101 @property
102 102 def clean(self):
103 103 '''files that have not been modified'''
104 104 return self[6]
105 105
106 106 def __repr__(self, *args, **kwargs):
107 107 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
108 108 r'unknown=%s, ignored=%s, clean=%s>') %
109 109 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
110 110
111 111 def itersubrepos(ctx1, ctx2):
112 112 """find subrepos in ctx1 or ctx2"""
113 113 # Create a (subpath, ctx) mapping where we prefer subpaths from
114 114 # ctx1. The subpaths from ctx2 are important when the .hgsub file
115 115 # has been modified (in ctx2) but not yet committed (in ctx1).
116 116 subpaths = dict.fromkeys(ctx2.substate, ctx2)
117 117 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
118 118
119 119 missing = set()
120 120
121 121 for subpath in ctx2.substate:
122 122 if subpath not in ctx1.substate:
123 123 del subpaths[subpath]
124 124 missing.add(subpath)
125 125
126 126 for subpath, ctx in sorted(subpaths.iteritems()):
127 127 yield subpath, ctx.sub(subpath)
128 128
129 129 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
130 130 # status and diff will have an accurate result when it does
131 131 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
132 132 # against itself.
133 133 for subpath in missing:
134 134 yield subpath, ctx2.nullsub(subpath, ctx1)
135 135
136 136 def nochangesfound(ui, repo, excluded=None):
137 137 '''Report no changes for push/pull, excluded is None or a list of
138 138 nodes excluded from the push/pull.
139 139 '''
140 140 secretlist = []
141 141 if excluded:
142 142 for n in excluded:
143 143 ctx = repo[n]
144 144 if ctx.phase() >= phases.secret and not ctx.extinct():
145 145 secretlist.append(n)
146 146
147 147 if secretlist:
148 148 ui.status(_("no changes found (ignored %d secret changesets)\n")
149 149 % len(secretlist))
150 150 else:
151 151 ui.status(_("no changes found\n"))
152 152
153 153 def callcatch(ui, func):
154 154 """call func() with global exception handling
155 155
156 156 return func() if no exception happens. otherwise do some error handling
157 157 and return an exit code accordingly. does not handle all exceptions.
158 158 """
159 159 try:
160 160 try:
161 161 return func()
162 162 except: # re-raises
163 163 ui.traceback()
164 164 raise
165 165 # Global exception handling, alphabetically
166 166 # Mercurial-specific first, followed by built-in and library exceptions
167 167 except error.LockHeld as inst:
168 168 if inst.errno == errno.ETIMEDOUT:
169 169 reason = _('timed out waiting for lock held by %r') % inst.locker
170 170 else:
171 171 reason = _('lock held by %r') % inst.locker
172 172 ui.warn(_("abort: %s: %s\n")
173 173 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
174 174 if not inst.locker:
175 175 ui.warn(_("(lock might be very busy)\n"))
176 176 except error.LockUnavailable as inst:
177 177 ui.warn(_("abort: could not lock %s: %s\n") %
178 178 (inst.desc or stringutil.forcebytestr(inst.filename),
179 179 encoding.strtolocal(inst.strerror)))
180 180 except error.OutOfBandError as inst:
181 181 if inst.args:
182 182 msg = _("abort: remote error:\n")
183 183 else:
184 184 msg = _("abort: remote error\n")
185 185 ui.warn(msg)
186 186 if inst.args:
187 187 ui.warn(''.join(inst.args))
188 188 if inst.hint:
189 189 ui.warn('(%s)\n' % inst.hint)
190 190 except error.RepoError as inst:
191 191 ui.warn(_("abort: %s!\n") % inst)
192 192 if inst.hint:
193 193 ui.warn(_("(%s)\n") % inst.hint)
194 194 except error.ResponseError as inst:
195 195 ui.warn(_("abort: %s") % inst.args[0])
196 196 msg = inst.args[1]
197 197 if isinstance(msg, type(u'')):
198 198 msg = pycompat.sysbytes(msg)
199 199 if not isinstance(msg, bytes):
200 200 ui.warn(" %r\n" % (msg,))
201 201 elif not msg:
202 202 ui.warn(_(" empty string\n"))
203 203 else:
204 204 ui.warn("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
205 205 except error.CensoredNodeError as inst:
206 206 ui.warn(_("abort: file censored %s!\n") % inst)
207 207 except error.RevlogError as inst:
208 208 ui.warn(_("abort: %s!\n") % inst)
209 209 except error.InterventionRequired as inst:
210 210 ui.warn("%s\n" % inst)
211 211 if inst.hint:
212 212 ui.warn(_("(%s)\n") % inst.hint)
213 213 return 1
214 214 except error.WdirUnsupported:
215 215 ui.warn(_("abort: working directory revision cannot be specified\n"))
216 216 except error.Abort as inst:
217 217 ui.warn(_("abort: %s\n") % inst)
218 218 if inst.hint:
219 219 ui.warn(_("(%s)\n") % inst.hint)
220 220 except ImportError as inst:
221 221 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
222 222 m = stringutil.forcebytestr(inst).split()[-1]
223 223 if m in "mpatch bdiff".split():
224 224 ui.warn(_("(did you forget to compile extensions?)\n"))
225 225 elif m in "zlib".split():
226 226 ui.warn(_("(is your Python install correct?)\n"))
227 227 except IOError as inst:
228 228 if util.safehasattr(inst, "code"):
229 229 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
230 230 elif util.safehasattr(inst, "reason"):
231 231 try: # usually it is in the form (errno, strerror)
232 232 reason = inst.reason.args[1]
233 233 except (AttributeError, IndexError):
234 234 # it might be anything, for example a string
235 235 reason = inst.reason
236 236 if isinstance(reason, pycompat.unicode):
237 237 # SSLError of Python 2.7.9 contains a unicode
238 238 reason = encoding.unitolocal(reason)
239 239 ui.warn(_("abort: error: %s\n") % reason)
240 240 elif (util.safehasattr(inst, "args")
241 241 and inst.args and inst.args[0] == errno.EPIPE):
242 242 pass
243 243 elif getattr(inst, "strerror", None):
244 244 if getattr(inst, "filename", None):
245 245 ui.warn(_("abort: %s: %s\n") % (
246 246 encoding.strtolocal(inst.strerror),
247 247 stringutil.forcebytestr(inst.filename)))
248 248 else:
249 249 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
250 250 else:
251 251 raise
252 252 except OSError as inst:
253 253 if getattr(inst, "filename", None) is not None:
254 254 ui.warn(_("abort: %s: '%s'\n") % (
255 255 encoding.strtolocal(inst.strerror),
256 256 stringutil.forcebytestr(inst.filename)))
257 257 else:
258 258 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
259 259 except MemoryError:
260 260 ui.warn(_("abort: out of memory\n"))
261 261 except SystemExit as inst:
262 262 # Commands shouldn't sys.exit directly, but give a return code.
263 263 # Just in case catch this and and pass exit code to caller.
264 264 return inst.code
265 265 except socket.error as inst:
266 266 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
267 267
268 268 return -1
269 269
270 270 def checknewlabel(repo, lbl, kind):
271 271 # Do not use the "kind" parameter in ui output.
272 272 # It makes strings difficult to translate.
273 273 if lbl in ['tip', '.', 'null']:
274 274 raise error.Abort(_("the name '%s' is reserved") % lbl)
275 275 for c in (':', '\0', '\n', '\r'):
276 276 if c in lbl:
277 277 raise error.Abort(
278 278 _("%r cannot be used in a name") % pycompat.bytestr(c))
279 279 try:
280 280 int(lbl)
281 281 raise error.Abort(_("cannot use an integer as a name"))
282 282 except ValueError:
283 283 pass
284 284 if lbl.strip() != lbl:
285 285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
286 286
287 287 def checkfilename(f):
288 288 '''Check that the filename f is an acceptable filename for a tracked file'''
289 289 if '\r' in f or '\n' in f:
290 290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
291 291 % pycompat.bytestr(f))
292 292
293 293 def checkportable(ui, f):
294 294 '''Check if filename f is portable and warn or abort depending on config'''
295 295 checkfilename(f)
296 296 abort, warn = checkportabilityalert(ui)
297 297 if abort or warn:
298 298 msg = util.checkwinfilename(f)
299 299 if msg:
300 300 msg = "%s: %s" % (msg, procutil.shellquote(f))
301 301 if abort:
302 302 raise error.Abort(msg)
303 303 ui.warn(_("warning: %s\n") % msg)
304 304
305 305 def checkportabilityalert(ui):
306 306 '''check if the user's config requests nothing, a warning, or abort for
307 307 non-portable filenames'''
308 308 val = ui.config('ui', 'portablefilenames')
309 309 lval = val.lower()
310 310 bval = stringutil.parsebool(val)
311 311 abort = pycompat.iswindows or lval == 'abort'
312 312 warn = bval or lval == 'warn'
313 313 if bval is None and not (warn or abort or lval == 'ignore'):
314 314 raise error.ConfigError(
315 315 _("ui.portablefilenames value is invalid ('%s')") % val)
316 316 return abort, warn
317 317
318 318 class casecollisionauditor(object):
319 319 def __init__(self, ui, abort, dirstate):
320 320 self._ui = ui
321 321 self._abort = abort
322 322 allfiles = '\0'.join(dirstate._map)
323 323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
324 324 self._dirstate = dirstate
325 325 # The purpose of _newfiles is so that we don't complain about
326 326 # case collisions if someone were to call this object with the
327 327 # same filename twice.
328 328 self._newfiles = set()
329 329
330 330 def __call__(self, f):
331 331 if f in self._newfiles:
332 332 return
333 333 fl = encoding.lower(f)
334 334 if fl in self._loweredfiles and f not in self._dirstate:
335 335 msg = _('possible case-folding collision for %s') % f
336 336 if self._abort:
337 337 raise error.Abort(msg)
338 338 self._ui.warn(_("warning: %s\n") % msg)
339 339 self._loweredfiles.add(fl)
340 340 self._newfiles.add(f)
341 341
342 342 def filteredhash(repo, maxrev):
343 343 """build hash of filtered revisions in the current repoview.
344 344
345 345 Multiple caches perform up-to-date validation by checking that the
346 346 tiprev and tipnode stored in the cache file match the current repository.
347 347 However, this is not sufficient for validating repoviews because the set
348 348 of revisions in the view may change without the repository tiprev and
349 349 tipnode changing.
350 350
351 351 This function hashes all the revs filtered from the view and returns
352 352 that SHA-1 digest.
353 353 """
354 354 cl = repo.changelog
355 355 if not cl.filteredrevs:
356 356 return None
357 357 key = None
358 358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
359 359 if revs:
360 360 s = hashlib.sha1()
361 361 for rev in revs:
362 362 s.update('%d;' % rev)
363 363 key = s.digest()
364 364 return key
365 365
366 366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
367 367 '''yield every hg repository under path, always recursively.
368 368 The recurse flag will only control recursion into repo working dirs'''
369 369 def errhandler(err):
370 370 if err.filename == path:
371 371 raise err
372 372 samestat = getattr(os.path, 'samestat', None)
373 373 if followsym and samestat is not None:
374 374 def adddir(dirlst, dirname):
375 375 dirstat = os.stat(dirname)
376 376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
377 377 if not match:
378 378 dirlst.append(dirstat)
379 379 return not match
380 380 else:
381 381 followsym = False
382 382
383 383 if (seen_dirs is None) and followsym:
384 384 seen_dirs = []
385 385 adddir(seen_dirs, path)
386 386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
387 387 dirs.sort()
388 388 if '.hg' in dirs:
389 389 yield root # found a repository
390 390 qroot = os.path.join(root, '.hg', 'patches')
391 391 if os.path.isdir(os.path.join(qroot, '.hg')):
392 392 yield qroot # we have a patch queue repo here
393 393 if recurse:
394 394 # avoid recursing inside the .hg directory
395 395 dirs.remove('.hg')
396 396 else:
397 397 dirs[:] = [] # don't descend further
398 398 elif followsym:
399 399 newdirs = []
400 400 for d in dirs:
401 401 fname = os.path.join(root, d)
402 402 if adddir(seen_dirs, fname):
403 403 if os.path.islink(fname):
404 404 for hgname in walkrepos(fname, True, seen_dirs):
405 405 yield hgname
406 406 else:
407 407 newdirs.append(d)
408 408 dirs[:] = newdirs
409 409
410 410 def binnode(ctx):
411 411 """Return binary node id for a given basectx"""
412 412 node = ctx.node()
413 413 if node is None:
414 414 return wdirid
415 415 return node
416 416
417 417 def intrev(ctx):
418 418 """Return integer for a given basectx that can be used in comparison or
419 419 arithmetic operation"""
420 420 rev = ctx.rev()
421 421 if rev is None:
422 422 return wdirrev
423 423 return rev
424 424
425 425 def formatchangeid(ctx):
426 426 """Format changectx as '{rev}:{node|formatnode}', which is the default
427 427 template provided by logcmdutil.changesettemplater"""
428 428 repo = ctx.repo()
429 429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
430 430
431 431 def formatrevnode(ui, rev, node):
432 432 """Format given revision and node depending on the current verbosity"""
433 433 if ui.debugflag:
434 434 hexfunc = hex
435 435 else:
436 436 hexfunc = short
437 437 return '%d:%s' % (rev, hexfunc(node))
438 438
439 439 def resolvehexnodeidprefix(repo, prefix):
440 440 # Uses unfiltered repo because it's faster when prefix is ambiguous/
441 441 # This matches the shortesthexnodeidprefix() function below.
442 442 node = repo.unfiltered().changelog._partialmatch(prefix)
443 443 if node is None:
444 444 return
445 445 repo.changelog.rev(node) # make sure node isn't filtered
446 446 return node
447 447
448 448 def shortesthexnodeidprefix(repo, node, minlength=1):
449 449 """Find the shortest unambiguous prefix that matches hexnode."""
450 450 # _partialmatch() of filtered changelog could take O(len(repo)) time,
451 451 # which would be unacceptably slow. so we look for hash collision in
452 452 # unfiltered space, which means some hashes may be slightly longer.
453 453 cl = repo.unfiltered().changelog
454 454
455 455 def isrev(prefix):
456 456 try:
457 457 i = int(prefix)
458 458 # if we are a pure int, then starting with zero will not be
459 459 # confused as a rev; or, obviously, if the int is larger
460 460 # than the value of the tip rev
461 461 if prefix[0:1] == b'0' or i > len(cl):
462 462 return False
463 463 return True
464 464 except ValueError:
465 465 return False
466 466
467 467 def disambiguate(prefix):
468 468 """Disambiguate against revnums."""
469 469 hexnode = hex(node)
470 470 for length in range(len(prefix), len(hexnode) + 1):
471 471 prefix = hexnode[:length]
472 472 if not isrev(prefix):
473 473 return prefix
474 474
475 475 try:
476 476 return disambiguate(cl.shortest(node, minlength))
477 477 except error.LookupError:
478 478 raise error.RepoLookupError()
479 479
480 480 def isrevsymbol(repo, symbol):
481 481 """Checks if a symbol exists in the repo.
482 482
483 483 See revsymbol() for details. Raises error.LookupError if the symbol is an
484 484 ambiguous nodeid prefix.
485 485 """
486 486 try:
487 487 revsymbol(repo, symbol)
488 488 return True
489 489 except error.RepoLookupError:
490 490 return False
491 491
492 492 def revsymbol(repo, symbol):
493 493 """Returns a context given a single revision symbol (as string).
494 494
495 495 This is similar to revsingle(), but accepts only a single revision symbol,
496 496 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
497 497 not "max(public())".
498 498 """
499 499 if not isinstance(symbol, bytes):
500 500 msg = ("symbol (%s of type %s) was not a string, did you mean "
501 501 "repo[symbol]?" % (symbol, type(symbol)))
502 502 raise error.ProgrammingError(msg)
503 503 try:
504 504 if symbol in ('.', 'tip', 'null'):
505 505 return repo[symbol]
506 506
507 507 try:
508 508 r = int(symbol)
509 509 if '%d' % r != symbol:
510 510 raise ValueError
511 511 l = len(repo.changelog)
512 512 if r < 0:
513 513 r += l
514 514 if r < 0 or r >= l and r != wdirrev:
515 515 raise ValueError
516 516 return repo[r]
517 517 except error.FilteredIndexError:
518 518 raise
519 519 except (ValueError, OverflowError, IndexError):
520 520 pass
521 521
522 522 if len(symbol) == 40:
523 523 try:
524 524 node = bin(symbol)
525 525 rev = repo.changelog.rev(node)
526 526 return repo[rev]
527 527 except error.FilteredLookupError:
528 528 raise
529 529 except (TypeError, LookupError):
530 530 pass
531 531
532 532 # look up bookmarks through the name interface
533 533 try:
534 534 node = repo.names.singlenode(repo, symbol)
535 535 rev = repo.changelog.rev(node)
536 536 return repo[rev]
537 537 except KeyError:
538 538 pass
539 539
540 540 node = resolvehexnodeidprefix(repo, symbol)
541 541 if node is not None:
542 542 rev = repo.changelog.rev(node)
543 543 return repo[rev]
544 544
545 545 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
546 546
547 547 except error.WdirUnsupported:
548 548 return repo[None]
549 549 except (error.FilteredIndexError, error.FilteredLookupError,
550 550 error.FilteredRepoLookupError):
551 551 raise _filterederror(repo, symbol)
552 552
553 553 def _filterederror(repo, changeid):
554 554 """build an exception to be raised about a filtered changeid
555 555
556 556 This is extracted in a function to help extensions (eg: evolve) to
557 557 experiment with various message variants."""
558 558 if repo.filtername.startswith('visible'):
559 559
560 560 # Check if the changeset is obsolete
561 561 unfilteredrepo = repo.unfiltered()
562 562 ctx = revsymbol(unfilteredrepo, changeid)
563 563
564 564 # If the changeset is obsolete, enrich the message with the reason
565 565 # that made this changeset not visible
566 566 if ctx.obsolete():
567 567 msg = obsutil._getfilteredreason(repo, changeid, ctx)
568 568 else:
569 569 msg = _("hidden revision '%s'") % changeid
570 570
571 571 hint = _('use --hidden to access hidden revisions')
572 572
573 573 return error.FilteredRepoLookupError(msg, hint=hint)
574 574 msg = _("filtered revision '%s' (not in '%s' subset)")
575 575 msg %= (changeid, repo.filtername)
576 576 return error.FilteredRepoLookupError(msg)
577 577
578 578 def revsingle(repo, revspec, default='.', localalias=None):
579 579 if not revspec and revspec != 0:
580 580 return repo[default]
581 581
582 582 l = revrange(repo, [revspec], localalias=localalias)
583 583 if not l:
584 584 raise error.Abort(_('empty revision set'))
585 585 return repo[l.last()]
586 586
587 587 def _pairspec(revspec):
588 588 tree = revsetlang.parse(revspec)
589 589 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
590 590
591 591 def revpair(repo, revs):
592 592 if not revs:
593 593 return repo['.'], repo[None]
594 594
595 595 l = revrange(repo, revs)
596 596
597 597 if not l:
598 598 first = second = None
599 599 elif l.isascending():
600 600 first = l.min()
601 601 second = l.max()
602 602 elif l.isdescending():
603 603 first = l.max()
604 604 second = l.min()
605 605 else:
606 606 first = l.first()
607 607 second = l.last()
608 608
609 609 if first is None:
610 610 raise error.Abort(_('empty revision range'))
611 611 if (first == second and len(revs) >= 2
612 612 and not all(revrange(repo, [r]) for r in revs)):
613 613 raise error.Abort(_('empty revision on one side of range'))
614 614
615 615 # if top-level is range expression, the result must always be a pair
616 616 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
617 617 return repo[first], repo[None]
618 618
619 619 return repo[first], repo[second]
620 620
621 621 def revrange(repo, specs, localalias=None):
622 622 """Execute 1 to many revsets and return the union.
623 623
624 624 This is the preferred mechanism for executing revsets using user-specified
625 625 config options, such as revset aliases.
626 626
627 627 The revsets specified by ``specs`` will be executed via a chained ``OR``
628 628 expression. If ``specs`` is empty, an empty result is returned.
629 629
630 630 ``specs`` can contain integers, in which case they are assumed to be
631 631 revision numbers.
632 632
633 633 It is assumed the revsets are already formatted. If you have arguments
634 634 that need to be expanded in the revset, call ``revsetlang.formatspec()``
635 635 and pass the result as an element of ``specs``.
636 636
637 637 Specifying a single revset is allowed.
638 638
639 639 Returns a ``revset.abstractsmartset`` which is a list-like interface over
640 640 integer revisions.
641 641 """
642 642 allspecs = []
643 643 for spec in specs:
644 644 if isinstance(spec, int):
645 645 spec = revsetlang.formatspec('rev(%d)', spec)
646 646 allspecs.append(spec)
647 647 return repo.anyrevs(allspecs, user=True, localalias=localalias)
648 648
649 649 def meaningfulparents(repo, ctx):
650 650 """Return list of meaningful (or all if debug) parentrevs for rev.
651 651
652 652 For merges (two non-nullrev revisions) both parents are meaningful.
653 653 Otherwise the first parent revision is considered meaningful if it
654 654 is not the preceding revision.
655 655 """
656 656 parents = ctx.parents()
657 657 if len(parents) > 1:
658 658 return parents
659 659 if repo.ui.debugflag:
660 660 return [parents[0], repo['null']]
661 661 if parents[0].rev() >= intrev(ctx) - 1:
662 662 return []
663 663 return parents
664 664
665 665 def expandpats(pats):
666 666 '''Expand bare globs when running on windows.
667 667 On posix we assume it already has already been done by sh.'''
668 668 if not util.expandglobs:
669 669 return list(pats)
670 670 ret = []
671 671 for kindpat in pats:
672 672 kind, pat = matchmod._patsplit(kindpat, None)
673 673 if kind is None:
674 674 try:
675 675 globbed = glob.glob(pat)
676 676 except re.error:
677 677 globbed = [pat]
678 678 if globbed:
679 679 ret.extend(globbed)
680 680 continue
681 681 ret.append(kindpat)
682 682 return ret
683 683
684 684 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
685 685 badfn=None):
686 686 '''Return a matcher and the patterns that were used.
687 687 The matcher will warn about bad matches, unless an alternate badfn callback
688 688 is provided.'''
689 689 if pats == ("",):
690 690 pats = []
691 691 if opts is None:
692 692 opts = {}
693 693 if not globbed and default == 'relpath':
694 694 pats = expandpats(pats or [])
695 695
696 696 def bad(f, msg):
697 697 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
698 698
699 699 if badfn is None:
700 700 badfn = bad
701 701
702 702 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
703 703 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
704 704
705 705 if m.always():
706 706 pats = []
707 707 return m, pats
708 708
709 709 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
710 710 badfn=None):
711 711 '''Return a matcher that will warn about bad matches.'''
712 712 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
713 713
714 714 def matchall(repo):
715 715 '''Return a matcher that will efficiently match everything.'''
716 716 return matchmod.always(repo.root, repo.getcwd())
717 717
718 718 def matchfiles(repo, files, badfn=None):
719 719 '''Return a matcher that will efficiently match exactly these files.'''
720 720 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
721 721
722 722 def parsefollowlinespattern(repo, rev, pat, msg):
723 723 """Return a file name from `pat` pattern suitable for usage in followlines
724 724 logic.
725 725 """
726 726 if not matchmod.patkind(pat):
727 727 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
728 728 else:
729 729 ctx = repo[rev]
730 730 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
731 731 files = [f for f in ctx if m(f)]
732 732 if len(files) != 1:
733 733 raise error.ParseError(msg)
734 734 return files[0]
735 735
736 736 def origpath(ui, repo, filepath):
737 737 '''customize where .orig files are created
738 738
739 739 Fetch user defined path from config file: [ui] origbackuppath = <path>
740 740 Fall back to default (filepath with .orig suffix) if not specified
741 741 '''
742 742 origbackuppath = ui.config('ui', 'origbackuppath')
743 743 if not origbackuppath:
744 744 return filepath + ".orig"
745 745
746 746 # Convert filepath from an absolute path into a path inside the repo.
747 747 filepathfromroot = util.normpath(os.path.relpath(filepath,
748 748 start=repo.root))
749 749
750 750 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
751 751 origbackupdir = origvfs.dirname(filepathfromroot)
752 752 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
753 753 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
754 754
755 755 # Remove any files that conflict with the backup file's path
756 756 for f in reversed(list(util.finddirs(filepathfromroot))):
757 757 if origvfs.isfileorlink(f):
758 758 ui.note(_('removing conflicting file: %s\n')
759 759 % origvfs.join(f))
760 760 origvfs.unlink(f)
761 761 break
762 762
763 763 origvfs.makedirs(origbackupdir)
764 764
765 765 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
766 766 ui.note(_('removing conflicting directory: %s\n')
767 767 % origvfs.join(filepathfromroot))
768 768 origvfs.rmtree(filepathfromroot, forcibly=True)
769 769
770 770 return origvfs.join(filepathfromroot)
771 771
772 772 class _containsnode(object):
773 773 """proxy __contains__(node) to container.__contains__ which accepts revs"""
774 774
775 775 def __init__(self, repo, revcontainer):
776 776 self._torev = repo.changelog.rev
777 777 self._revcontains = revcontainer.__contains__
778 778
779 779 def __contains__(self, node):
780 780 return self._revcontains(self._torev(node))
781 781
782 782 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
783 783 fixphase=False, targetphase=None):
784 784 """do common cleanups when old nodes are replaced by new nodes
785 785
786 786 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
787 787 (we might also want to move working directory parent in the future)
788 788
789 789 By default, bookmark moves are calculated automatically from 'replacements',
790 790 but 'moves' can be used to override that. Also, 'moves' may include
791 791 additional bookmark moves that should not have associated obsmarkers.
792 792
793 793 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
794 794 have replacements. operation is a string, like "rebase".
795 795
796 796 metadata is dictionary containing metadata to be stored in obsmarker if
797 797 obsolescence is enabled.
798 798 """
799 799 assert fixphase or targetphase is None
800 800 if not replacements and not moves:
801 801 return
802 802
803 803 # translate mapping's other forms
804 804 if not util.safehasattr(replacements, 'items'):
805 805 replacements = {n: () for n in replacements}
806 806
807 807 # Calculate bookmark movements
808 808 if moves is None:
809 809 moves = {}
810 810 # Unfiltered repo is needed since nodes in replacements might be hidden.
811 811 unfi = repo.unfiltered()
812 812 for oldnode, newnodes in replacements.items():
813 813 if oldnode in moves:
814 814 continue
815 815 if len(newnodes) > 1:
816 816 # usually a split, take the one with biggest rev number
817 817 newnode = next(unfi.set('max(%ln)', newnodes)).node()
818 818 elif len(newnodes) == 0:
819 819 # move bookmark backwards
820 820 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
821 821 list(replacements)))
822 822 if roots:
823 823 newnode = roots[0].node()
824 824 else:
825 825 newnode = nullid
826 826 else:
827 827 newnode = newnodes[0]
828 828 moves[oldnode] = newnode
829 829
830 830 allnewnodes = [n for ns in replacements.values() for n in ns]
831 831 toretract = {}
832 832 toadvance = {}
833 833 if fixphase:
834 834 precursors = {}
835 835 for oldnode, newnodes in replacements.items():
836 836 for newnode in newnodes:
837 837 precursors.setdefault(newnode, []).append(oldnode)
838 838
839 839 allnewnodes.sort(key=lambda n: unfi[n].rev())
840 840 newphases = {}
841 841 def phase(ctx):
842 842 return newphases.get(ctx.node(), ctx.phase())
843 843 for newnode in allnewnodes:
844 844 ctx = unfi[newnode]
845 845 parentphase = max(phase(p) for p in ctx.parents())
846 846 if targetphase is None:
847 847 oldphase = max(unfi[oldnode].phase()
848 848 for oldnode in precursors[newnode])
849 849 newphase = max(oldphase, parentphase)
850 850 else:
851 851 newphase = max(targetphase, parentphase)
852 852 newphases[newnode] = newphase
853 853 if newphase > ctx.phase():
854 854 toretract.setdefault(newphase, []).append(newnode)
855 855 elif newphase < ctx.phase():
856 856 toadvance.setdefault(newphase, []).append(newnode)
857 857
858 858 with repo.transaction('cleanup') as tr:
859 859 # Move bookmarks
860 860 bmarks = repo._bookmarks
861 861 bmarkchanges = []
862 862 for oldnode, newnode in moves.items():
863 863 oldbmarks = repo.nodebookmarks(oldnode)
864 864 if not oldbmarks:
865 865 continue
866 866 from . import bookmarks # avoid import cycle
867 867 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
868 868 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
869 869 hex(oldnode), hex(newnode)))
870 870 # Delete divergent bookmarks being parents of related newnodes
871 871 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
872 872 allnewnodes, newnode, oldnode)
873 873 deletenodes = _containsnode(repo, deleterevs)
874 874 for name in oldbmarks:
875 875 bmarkchanges.append((name, newnode))
876 876 for b in bookmarks.divergent2delete(repo, deletenodes, name):
877 877 bmarkchanges.append((b, None))
878 878
879 879 if bmarkchanges:
880 880 bmarks.applychanges(repo, tr, bmarkchanges)
881 881
882 882 for phase, nodes in toretract.items():
883 883 phases.retractboundary(repo, tr, phase, nodes)
884 884 for phase, nodes in toadvance.items():
885 885 phases.advanceboundary(repo, tr, phase, nodes)
886 886
887 887 # Obsolete or strip nodes
888 888 if obsolete.isenabled(repo, obsolete.createmarkersopt):
889 889 # If a node is already obsoleted, and we want to obsolete it
890 890 # without a successor, skip that obssolete request since it's
891 891 # unnecessary. That's the "if s or not isobs(n)" check below.
892 892 # Also sort the node in topology order, that might be useful for
893 893 # some obsstore logic.
894 894 # NOTE: the filtering and sorting might belong to createmarkers.
895 895 isobs = unfi.obsstore.successors.__contains__
896 896 torev = unfi.changelog.rev
897 897 sortfunc = lambda ns: torev(ns[0])
898 898 rels = [(unfi[n], tuple(unfi[m] for m in s))
899 899 for n, s in sorted(replacements.items(), key=sortfunc)
900 900 if s or not isobs(n)]
901 901 if rels:
902 902 obsolete.createmarkers(repo, rels, operation=operation,
903 903 metadata=metadata)
904 904 else:
905 905 from . import repair # avoid import cycle
906 906 tostrip = list(replacements)
907 907 if tostrip:
908 908 repair.delayedstrip(repo.ui, repo, tostrip, operation)
909 909
910 910 def addremove(repo, matcher, prefix, opts=None):
911 911 if opts is None:
912 912 opts = {}
913 913 m = matcher
914 914 dry_run = opts.get('dry_run')
915 915 try:
916 916 similarity = float(opts.get('similarity') or 0)
917 917 except ValueError:
918 918 raise error.Abort(_('similarity must be a number'))
919 919 if similarity < 0 or similarity > 100:
920 920 raise error.Abort(_('similarity must be between 0 and 100'))
921 921 similarity /= 100.0
922 922
923 923 ret = 0
924 924 join = lambda f: os.path.join(prefix, f)
925 925
926 926 wctx = repo[None]
927 927 for subpath in sorted(wctx.substate):
928 928 submatch = matchmod.subdirmatcher(subpath, m)
929 929 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
930 930 sub = wctx.sub(subpath)
931 931 try:
932 932 if sub.addremove(submatch, prefix, opts):
933 933 ret = 1
934 934 except error.LookupError:
935 935 repo.ui.status(_("skipping missing subrepository: %s\n")
936 936 % join(subpath))
937 937
938 938 rejected = []
939 939 def badfn(f, msg):
940 940 if f in m.files():
941 941 m.bad(f, msg)
942 942 rejected.append(f)
943 943
944 944 badmatch = matchmod.badmatch(m, badfn)
945 945 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
946 946 badmatch)
947 947
948 948 unknownset = set(unknown + forgotten)
949 949 toprint = unknownset.copy()
950 950 toprint.update(deleted)
951 951 for abs in sorted(toprint):
952 952 if repo.ui.verbose or not m.exact(abs):
953 953 if abs in unknownset:
954 954 status = _('adding %s\n') % m.uipath(abs)
955 955 else:
956 956 status = _('removing %s\n') % m.uipath(abs)
957 957 repo.ui.status(status)
958 958
959 959 renames = _findrenames(repo, m, added + unknown, removed + deleted,
960 960 similarity)
961 961
962 962 if not dry_run:
963 963 _markchanges(repo, unknown + forgotten, deleted, renames)
964 964
965 965 for f in rejected:
966 966 if f in m.files():
967 967 return 1
968 968 return ret
969 969
970 970 def marktouched(repo, files, similarity=0.0):
971 971 '''Assert that files have somehow been operated upon. files are relative to
972 972 the repo root.'''
973 973 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
974 974 rejected = []
975 975
976 976 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
977 977
978 978 if repo.ui.verbose:
979 979 unknownset = set(unknown + forgotten)
980 980 toprint = unknownset.copy()
981 981 toprint.update(deleted)
982 982 for abs in sorted(toprint):
983 983 if abs in unknownset:
984 984 status = _('adding %s\n') % abs
985 985 else:
986 986 status = _('removing %s\n') % abs
987 987 repo.ui.status(status)
988 988
989 989 renames = _findrenames(repo, m, added + unknown, removed + deleted,
990 990 similarity)
991 991
992 992 _markchanges(repo, unknown + forgotten, deleted, renames)
993 993
994 994 for f in rejected:
995 995 if f in m.files():
996 996 return 1
997 997 return 0
998 998
999 999 def _interestingfiles(repo, matcher):
1000 1000 '''Walk dirstate with matcher, looking for files that addremove would care
1001 1001 about.
1002 1002
1003 1003 This is different from dirstate.status because it doesn't care about
1004 1004 whether files are modified or clean.'''
1005 1005 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1006 1006 audit_path = pathutil.pathauditor(repo.root, cached=True)
1007 1007
1008 1008 ctx = repo[None]
1009 1009 dirstate = repo.dirstate
1010 1010 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1011 1011 unknown=True, ignored=False, full=False)
1012 1012 for abs, st in walkresults.iteritems():
1013 1013 dstate = dirstate[abs]
1014 1014 if dstate == '?' and audit_path.check(abs):
1015 1015 unknown.append(abs)
1016 1016 elif dstate != 'r' and not st:
1017 1017 deleted.append(abs)
1018 1018 elif dstate == 'r' and st:
1019 1019 forgotten.append(abs)
1020 1020 # for finding renames
1021 1021 elif dstate == 'r' and not st:
1022 1022 removed.append(abs)
1023 1023 elif dstate == 'a':
1024 1024 added.append(abs)
1025 1025
1026 1026 return added, unknown, deleted, removed, forgotten
1027 1027
1028 1028 def _findrenames(repo, matcher, added, removed, similarity):
1029 1029 '''Find renames from removed files to added ones.'''
1030 1030 renames = {}
1031 1031 if similarity > 0:
1032 1032 for old, new, score in similar.findrenames(repo, added, removed,
1033 1033 similarity):
1034 1034 if (repo.ui.verbose or not matcher.exact(old)
1035 1035 or not matcher.exact(new)):
1036 1036 repo.ui.status(_('recording removal of %s as rename to %s '
1037 1037 '(%d%% similar)\n') %
1038 1038 (matcher.rel(old), matcher.rel(new),
1039 1039 score * 100))
1040 1040 renames[new] = old
1041 1041 return renames
1042 1042
1043 1043 def _markchanges(repo, unknown, deleted, renames):
1044 1044 '''Marks the files in unknown as added, the files in deleted as removed,
1045 1045 and the files in renames as copied.'''
1046 1046 wctx = repo[None]
1047 1047 with repo.wlock():
1048 1048 wctx.forget(deleted)
1049 1049 wctx.add(unknown)
1050 1050 for new, old in renames.iteritems():
1051 1051 wctx.copy(old, new)
1052 1052
1053 1053 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1054 1054 """Update the dirstate to reflect the intent of copying src to dst. For
1055 1055 different reasons it might not end with dst being marked as copied from src.
1056 1056 """
1057 1057 origsrc = repo.dirstate.copied(src) or src
1058 1058 if dst == origsrc: # copying back a copy?
1059 1059 if repo.dirstate[dst] not in 'mn' and not dryrun:
1060 1060 repo.dirstate.normallookup(dst)
1061 1061 else:
1062 1062 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1063 1063 if not ui.quiet:
1064 1064 ui.warn(_("%s has not been committed yet, so no copy "
1065 1065 "data will be stored for %s.\n")
1066 1066 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1067 1067 if repo.dirstate[dst] in '?r' and not dryrun:
1068 1068 wctx.add([dst])
1069 1069 elif not dryrun:
1070 1070 wctx.copy(origsrc, dst)
1071 1071
1072 1072 def readrequires(opener, supported):
1073 1073 '''Reads and parses .hg/requires and checks if all entries found
1074 1074 are in the list of supported features.'''
1075 1075 requirements = set(opener.read("requires").splitlines())
1076 1076 missings = []
1077 1077 for r in requirements:
1078 1078 if r not in supported:
1079 1079 if not r or not r[0:1].isalnum():
1080 1080 raise error.RequirementError(_(".hg/requires file is corrupt"))
1081 1081 missings.append(r)
1082 1082 missings.sort()
1083 1083 if missings:
1084 1084 raise error.RequirementError(
1085 1085 _("repository requires features unknown to this Mercurial: %s")
1086 1086 % " ".join(missings),
1087 1087 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1088 1088 " for more information"))
1089 1089 return requirements
1090 1090
1091 1091 def writerequires(opener, requirements):
1092 1092 with opener('requires', 'w') as fp:
1093 1093 for r in sorted(requirements):
1094 1094 fp.write("%s\n" % r)
1095 1095
1096 1096 class filecachesubentry(object):
1097 1097 def __init__(self, path, stat):
1098 1098 self.path = path
1099 1099 self.cachestat = None
1100 1100 self._cacheable = None
1101 1101
1102 1102 if stat:
1103 1103 self.cachestat = filecachesubentry.stat(self.path)
1104 1104
1105 1105 if self.cachestat:
1106 1106 self._cacheable = self.cachestat.cacheable()
1107 1107 else:
1108 1108 # None means we don't know yet
1109 1109 self._cacheable = None
1110 1110
1111 1111 def refresh(self):
1112 1112 if self.cacheable():
1113 1113 self.cachestat = filecachesubentry.stat(self.path)
1114 1114
1115 1115 def cacheable(self):
1116 1116 if self._cacheable is not None:
1117 1117 return self._cacheable
1118 1118
1119 1119 # we don't know yet, assume it is for now
1120 1120 return True
1121 1121
1122 1122 def changed(self):
1123 1123 # no point in going further if we can't cache it
1124 1124 if not self.cacheable():
1125 1125 return True
1126 1126
1127 1127 newstat = filecachesubentry.stat(self.path)
1128 1128
1129 1129 # we may not know if it's cacheable yet, check again now
1130 1130 if newstat and self._cacheable is None:
1131 1131 self._cacheable = newstat.cacheable()
1132 1132
1133 1133 # check again
1134 1134 if not self._cacheable:
1135 1135 return True
1136 1136
1137 1137 if self.cachestat != newstat:
1138 1138 self.cachestat = newstat
1139 1139 return True
1140 1140 else:
1141 1141 return False
1142 1142
1143 1143 @staticmethod
1144 1144 def stat(path):
1145 1145 try:
1146 1146 return util.cachestat(path)
1147 1147 except OSError as e:
1148 1148 if e.errno != errno.ENOENT:
1149 1149 raise
1150 1150
1151 1151 class filecacheentry(object):
1152 1152 def __init__(self, paths, stat=True):
1153 1153 self._entries = []
1154 1154 for path in paths:
1155 1155 self._entries.append(filecachesubentry(path, stat))
1156 1156
1157 1157 def changed(self):
1158 1158 '''true if any entry has changed'''
1159 1159 for entry in self._entries:
1160 1160 if entry.changed():
1161 1161 return True
1162 1162 return False
1163 1163
1164 1164 def refresh(self):
1165 1165 for entry in self._entries:
1166 1166 entry.refresh()
1167 1167
1168 1168 class filecache(object):
1169 1169 """A property like decorator that tracks files under .hg/ for updates.
1170 1170
1171 1171 On first access, the files defined as arguments are stat()ed and the
1172 1172 results cached. The decorated function is called. The results are stashed
1173 1173 away in a ``_filecache`` dict on the object whose method is decorated.
1174 1174
1175 1175 On subsequent access, the cached result is returned.
1176 1176
1177 1177 On external property set operations, stat() calls are performed and the new
1178 1178 value is cached.
1179 1179
1180 1180 On property delete operations, cached data is removed.
1181 1181
1182 1182 When using the property API, cached data is always returned, if available:
1183 1183 no stat() is performed to check if the file has changed and if the function
1184 1184 needs to be called to reflect file changes.
1185 1185
1186 1186 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1187 1187 can populate an entry before the property's getter is called. In this case,
1188 1188 entries in ``_filecache`` will be used during property operations,
1189 1189 if available. If the underlying file changes, it is up to external callers
1190 1190 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1191 1191 method result as well as possibly calling ``del obj._filecache[attr]`` to
1192 1192 remove the ``filecacheentry``.
1193 1193 """
1194 1194
1195 1195 def __init__(self, *paths):
1196 1196 self.paths = paths
1197 1197
1198 1198 def join(self, obj, fname):
1199 1199 """Used to compute the runtime path of a cached file.
1200 1200
1201 1201 Users should subclass filecache and provide their own version of this
1202 1202 function to call the appropriate join function on 'obj' (an instance
1203 1203 of the class that its member function was decorated).
1204 1204 """
1205 1205 raise NotImplementedError
1206 1206
1207 1207 def __call__(self, func):
1208 1208 self.func = func
1209 1209 self.sname = func.__name__
1210 1210 self.name = pycompat.sysbytes(self.sname)
1211 1211 return self
1212 1212
1213 1213 def __get__(self, obj, type=None):
1214 1214 # if accessed on the class, return the descriptor itself.
1215 1215 if obj is None:
1216 1216 return self
1217 1217 # do we need to check if the file changed?
1218 1218 if self.sname in obj.__dict__:
1219 1219 assert self.name in obj._filecache, self.name
1220 1220 return obj.__dict__[self.sname]
1221 1221
1222 1222 entry = obj._filecache.get(self.name)
1223 1223
1224 1224 if entry:
1225 1225 if entry.changed():
1226 1226 entry.obj = self.func(obj)
1227 1227 else:
1228 1228 paths = [self.join(obj, path) for path in self.paths]
1229 1229
1230 1230 # We stat -before- creating the object so our cache doesn't lie if
1231 1231 # a writer modified between the time we read and stat
1232 1232 entry = filecacheentry(paths, True)
1233 1233 entry.obj = self.func(obj)
1234 1234
1235 1235 obj._filecache[self.name] = entry
1236 1236
1237 1237 obj.__dict__[self.sname] = entry.obj
1238 1238 return entry.obj
1239 1239
1240 1240 def __set__(self, obj, value):
1241 1241 if self.name not in obj._filecache:
1242 1242 # we add an entry for the missing value because X in __dict__
1243 1243 # implies X in _filecache
1244 1244 paths = [self.join(obj, path) for path in self.paths]
1245 1245 ce = filecacheentry(paths, False)
1246 1246 obj._filecache[self.name] = ce
1247 1247 else:
1248 1248 ce = obj._filecache[self.name]
1249 1249
1250 1250 ce.obj = value # update cached copy
1251 1251 obj.__dict__[self.sname] = value # update copy returned by obj.x
1252 1252
1253 1253 def __delete__(self, obj):
1254 1254 try:
1255 1255 del obj.__dict__[self.sname]
1256 1256 except KeyError:
1257 1257 raise AttributeError(self.sname)
1258 1258
1259 1259 def extdatasource(repo, source):
1260 1260 """Gather a map of rev -> value dict from the specified source
1261 1261
1262 1262 A source spec is treated as a URL, with a special case shell: type
1263 1263 for parsing the output from a shell command.
1264 1264
1265 1265 The data is parsed as a series of newline-separated records where
1266 1266 each record is a revision specifier optionally followed by a space
1267 1267 and a freeform string value. If the revision is known locally, it
1268 1268 is converted to a rev, otherwise the record is skipped.
1269 1269
1270 1270 Note that both key and value are treated as UTF-8 and converted to
1271 1271 the local encoding. This allows uniformity between local and
1272 1272 remote data sources.
1273 1273 """
1274 1274
1275 1275 spec = repo.ui.config("extdata", source)
1276 1276 if not spec:
1277 1277 raise error.Abort(_("unknown extdata source '%s'") % source)
1278 1278
1279 1279 data = {}
1280 1280 src = proc = None
1281 1281 try:
1282 1282 if spec.startswith("shell:"):
1283 1283 # external commands should be run relative to the repo root
1284 1284 cmd = spec[6:]
1285 1285 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1286 1286 close_fds=procutil.closefds,
1287 1287 stdout=subprocess.PIPE, cwd=repo.root)
1288 1288 src = proc.stdout
1289 1289 else:
1290 1290 # treat as a URL or file
1291 1291 src = url.open(repo.ui, spec)
1292 1292 for l in src:
1293 1293 if " " in l:
1294 1294 k, v = l.strip().split(" ", 1)
1295 1295 else:
1296 1296 k, v = l.strip(), ""
1297 1297
1298 1298 k = encoding.tolocal(k)
1299 1299 try:
1300 1300 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1301 1301 except (error.LookupError, error.RepoLookupError):
1302 1302 pass # we ignore data for nodes that don't exist locally
1303 1303 finally:
1304 1304 if proc:
1305 1305 proc.communicate()
1306 1306 if src:
1307 1307 src.close()
1308 1308 if proc and proc.returncode != 0:
1309 1309 raise error.Abort(_("extdata command '%s' failed: %s")
1310 1310 % (cmd, procutil.explainexit(proc.returncode)))
1311 1311
1312 1312 return data
1313 1313
1314 1314 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1315 1315 if lock is None:
1316 1316 raise error.LockInheritanceContractViolation(
1317 1317 'lock can only be inherited while held')
1318 1318 if environ is None:
1319 1319 environ = {}
1320 1320 with lock.inherit() as locker:
1321 1321 environ[envvar] = locker
1322 1322 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1323 1323
1324 1324 def wlocksub(repo, cmd, *args, **kwargs):
1325 1325 """run cmd as a subprocess that allows inheriting repo's wlock
1326 1326
1327 1327 This can only be called while the wlock is held. This takes all the
1328 1328 arguments that ui.system does, and returns the exit code of the
1329 1329 subprocess."""
1330 1330 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1331 1331 **kwargs)
1332 1332
1333 1333 class progress(object):
1334 1334 def __init__(self, ui, topic, unit="", total=None):
1335 1335 self.ui = ui
1336 1336 self.pos = 0
1337 1337 self.topic = topic
1338 1338 self.unit = unit
1339 1339 self.total = total
1340 1340
1341 1341 def __enter__(self):
1342 1342 return self
1343 1343
1344 1344 def __exit__(self, exc_type, exc_value, exc_tb):
1345 1345 self.complete()
1346 1346
1347 1347 def update(self, pos, item="", total=None):
1348 1348 assert pos is not None
1349 1349 if total:
1350 1350 self.total = total
1351 1351 self.pos = pos
1352 1352 self._print(item)
1353 1353
1354 1354 def increment(self, step=1, item="", total=None):
1355 1355 self.update(self.pos + step, item, total)
1356 1356
1357 1357 def complete(self):
1358 1358 self.ui.progress(self.topic, None)
1359 1359
1360 1360 def _print(self, item):
1361 1361 self.ui.progress(self.topic, self.pos, item, self.unit,
1362 1362 self.total)
1363 1363
1364 1364 def gdinitconfig(ui):
1365 1365 """helper function to know if a repo should be created as general delta
1366 1366 """
1367 1367 # experimental config: format.generaldelta
1368 1368 return (ui.configbool('format', 'generaldelta')
1369 or ui.configbool('format', 'usegeneraldelta'))
1369 or ui.configbool('format', 'usegeneraldelta')
1370 or ui.configbool('format', 'sparse-revlog'))
1370 1371
1371 1372 def gddeltaconfig(ui):
1372 1373 """helper function to know if incoming delta should be optimised
1373 1374 """
1374 1375 # experimental config: format.generaldelta
1375 1376 return ui.configbool('format', 'generaldelta')
1376 1377
1377 1378 class simplekeyvaluefile(object):
1378 1379 """A simple file with key=value lines
1379 1380
1380 1381 Keys must be alphanumerics and start with a letter, values must not
1381 1382 contain '\n' characters"""
1382 1383 firstlinekey = '__firstline'
1383 1384
1384 1385 def __init__(self, vfs, path, keys=None):
1385 1386 self.vfs = vfs
1386 1387 self.path = path
1387 1388
1388 1389 def read(self, firstlinenonkeyval=False):
1389 1390 """Read the contents of a simple key-value file
1390 1391
1391 1392 'firstlinenonkeyval' indicates whether the first line of file should
1392 1393 be treated as a key-value pair or reuturned fully under the
1393 1394 __firstline key."""
1394 1395 lines = self.vfs.readlines(self.path)
1395 1396 d = {}
1396 1397 if firstlinenonkeyval:
1397 1398 if not lines:
1398 1399 e = _("empty simplekeyvalue file")
1399 1400 raise error.CorruptedState(e)
1400 1401 # we don't want to include '\n' in the __firstline
1401 1402 d[self.firstlinekey] = lines[0][:-1]
1402 1403 del lines[0]
1403 1404
1404 1405 try:
1405 1406 # the 'if line.strip()' part prevents us from failing on empty
1406 1407 # lines which only contain '\n' therefore are not skipped
1407 1408 # by 'if line'
1408 1409 updatedict = dict(line[:-1].split('=', 1) for line in lines
1409 1410 if line.strip())
1410 1411 if self.firstlinekey in updatedict:
1411 1412 e = _("%r can't be used as a key")
1412 1413 raise error.CorruptedState(e % self.firstlinekey)
1413 1414 d.update(updatedict)
1414 1415 except ValueError as e:
1415 1416 raise error.CorruptedState(str(e))
1416 1417 return d
1417 1418
1418 1419 def write(self, data, firstline=None):
1419 1420 """Write key=>value mapping to a file
1420 1421 data is a dict. Keys must be alphanumerical and start with a letter.
1421 1422 Values must not contain newline characters.
1422 1423
1423 1424 If 'firstline' is not None, it is written to file before
1424 1425 everything else, as it is, not in a key=value form"""
1425 1426 lines = []
1426 1427 if firstline is not None:
1427 1428 lines.append('%s\n' % firstline)
1428 1429
1429 1430 for k, v in data.items():
1430 1431 if k == self.firstlinekey:
1431 1432 e = "key name '%s' is reserved" % self.firstlinekey
1432 1433 raise error.ProgrammingError(e)
1433 1434 if not k[0:1].isalpha():
1434 1435 e = "keys must start with a letter in a key-value file"
1435 1436 raise error.ProgrammingError(e)
1436 1437 if not k.isalnum():
1437 1438 e = "invalid key name in a simple key-value file"
1438 1439 raise error.ProgrammingError(e)
1439 1440 if '\n' in v:
1440 1441 e = "invalid value in a simple key-value file"
1441 1442 raise error.ProgrammingError(e)
1442 1443 lines.append("%s=%s\n" % (k, v))
1443 1444 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1444 1445 fp.write(''.join(lines))
1445 1446
1446 1447 _reportobsoletedsource = [
1447 1448 'debugobsolete',
1448 1449 'pull',
1449 1450 'push',
1450 1451 'serve',
1451 1452 'unbundle',
1452 1453 ]
1453 1454
1454 1455 _reportnewcssource = [
1455 1456 'pull',
1456 1457 'unbundle',
1457 1458 ]
1458 1459
1459 1460 def prefetchfiles(repo, revs, match):
1460 1461 """Invokes the registered file prefetch functions, allowing extensions to
1461 1462 ensure the corresponding files are available locally, before the command
1462 1463 uses them."""
1463 1464 if match:
1464 1465 # The command itself will complain about files that don't exist, so
1465 1466 # don't duplicate the message.
1466 1467 match = matchmod.badmatch(match, lambda fn, msg: None)
1467 1468 else:
1468 1469 match = matchall(repo)
1469 1470
1470 1471 fileprefetchhooks(repo, revs, match)
1471 1472
1472 1473 # a list of (repo, revs, match) prefetch functions
1473 1474 fileprefetchhooks = util.hooks()
1474 1475
1475 1476 # A marker that tells the evolve extension to suppress its own reporting
1476 1477 _reportstroubledchangesets = True
1477 1478
1478 1479 def registersummarycallback(repo, otr, txnname=''):
1479 1480 """register a callback to issue a summary after the transaction is closed
1480 1481 """
1481 1482 def txmatch(sources):
1482 1483 return any(txnname.startswith(source) for source in sources)
1483 1484
1484 1485 categories = []
1485 1486
1486 1487 def reportsummary(func):
1487 1488 """decorator for report callbacks."""
1488 1489 # The repoview life cycle is shorter than the one of the actual
1489 1490 # underlying repository. So the filtered object can die before the
1490 1491 # weakref is used leading to troubles. We keep a reference to the
1491 1492 # unfiltered object and restore the filtering when retrieving the
1492 1493 # repository through the weakref.
1493 1494 filtername = repo.filtername
1494 1495 reporef = weakref.ref(repo.unfiltered())
1495 1496 def wrapped(tr):
1496 1497 repo = reporef()
1497 1498 if filtername:
1498 1499 repo = repo.filtered(filtername)
1499 1500 func(repo, tr)
1500 1501 newcat = '%02i-txnreport' % len(categories)
1501 1502 otr.addpostclose(newcat, wrapped)
1502 1503 categories.append(newcat)
1503 1504 return wrapped
1504 1505
1505 1506 if txmatch(_reportobsoletedsource):
1506 1507 @reportsummary
1507 1508 def reportobsoleted(repo, tr):
1508 1509 obsoleted = obsutil.getobsoleted(repo, tr)
1509 1510 if obsoleted:
1510 1511 repo.ui.status(_('obsoleted %i changesets\n')
1511 1512 % len(obsoleted))
1512 1513
1513 1514 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1514 1515 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1515 1516 instabilitytypes = [
1516 1517 ('orphan', 'orphan'),
1517 1518 ('phase-divergent', 'phasedivergent'),
1518 1519 ('content-divergent', 'contentdivergent'),
1519 1520 ]
1520 1521
1521 1522 def getinstabilitycounts(repo):
1522 1523 filtered = repo.changelog.filteredrevs
1523 1524 counts = {}
1524 1525 for instability, revset in instabilitytypes:
1525 1526 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1526 1527 filtered)
1527 1528 return counts
1528 1529
1529 1530 oldinstabilitycounts = getinstabilitycounts(repo)
1530 1531 @reportsummary
1531 1532 def reportnewinstabilities(repo, tr):
1532 1533 newinstabilitycounts = getinstabilitycounts(repo)
1533 1534 for instability, revset in instabilitytypes:
1534 1535 delta = (newinstabilitycounts[instability] -
1535 1536 oldinstabilitycounts[instability])
1536 1537 msg = getinstabilitymessage(delta, instability)
1537 1538 if msg:
1538 1539 repo.ui.warn(msg)
1539 1540
1540 1541 if txmatch(_reportnewcssource):
1541 1542 @reportsummary
1542 1543 def reportnewcs(repo, tr):
1543 1544 """Report the range of new revisions pulled/unbundled."""
1544 1545 newrevs = tr.changes.get('revs', xrange(0, 0))
1545 1546 if not newrevs:
1546 1547 return
1547 1548
1548 1549 # Compute the bounds of new revisions' range, excluding obsoletes.
1549 1550 unfi = repo.unfiltered()
1550 1551 revs = unfi.revs('%ld and not obsolete()', newrevs)
1551 1552 if not revs:
1552 1553 # Got only obsoletes.
1553 1554 return
1554 1555 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1555 1556
1556 1557 if minrev == maxrev:
1557 1558 revrange = minrev
1558 1559 else:
1559 1560 revrange = '%s:%s' % (minrev, maxrev)
1560 1561 repo.ui.status(_('new changesets %s\n') % revrange)
1561 1562
1562 1563 @reportsummary
1563 1564 def reportphasechanges(repo, tr):
1564 1565 """Report statistics of phase changes for changesets pre-existing
1565 1566 pull/unbundle.
1566 1567 """
1567 1568 newrevs = tr.changes.get('revs', xrange(0, 0))
1568 1569 phasetracking = tr.changes.get('phases', {})
1569 1570 if not phasetracking:
1570 1571 return
1571 1572 published = [
1572 1573 rev for rev, (old, new) in phasetracking.iteritems()
1573 1574 if new == phases.public and rev not in newrevs
1574 1575 ]
1575 1576 if not published:
1576 1577 return
1577 1578 repo.ui.status(_('%d local changesets published\n')
1578 1579 % len(published))
1579 1580
1580 1581 def getinstabilitymessage(delta, instability):
1581 1582 """function to return the message to show warning about new instabilities
1582 1583
1583 1584 exists as a separate function so that extension can wrap to show more
1584 1585 information like how to fix instabilities"""
1585 1586 if delta > 0:
1586 1587 return _('%i new %s changesets\n') % (delta, instability)
1587 1588
1588 1589 def nodesummaries(repo, nodes, maxnumnodes=4):
1589 1590 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1590 1591 return ' '.join(short(h) for h in nodes)
1591 1592 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1592 1593 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1593 1594
1594 1595 def enforcesinglehead(repo, tr, desc):
1595 1596 """check that no named branch has multiple heads"""
1596 1597 if desc in ('strip', 'repair'):
1597 1598 # skip the logic during strip
1598 1599 return
1599 1600 visible = repo.filtered('visible')
1600 1601 # possible improvement: we could restrict the check to affected branch
1601 1602 for name, heads in visible.branchmap().iteritems():
1602 1603 if len(heads) > 1:
1603 1604 msg = _('rejecting multiple heads on branch "%s"')
1604 1605 msg %= name
1605 1606 hint = _('%d heads: %s')
1606 1607 hint %= (len(heads), nodesummaries(repo, heads))
1607 1608 raise error.Abort(msg, hint=hint)
1608 1609
1609 1610 def wrapconvertsink(sink):
1610 1611 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1611 1612 before it is used, whether or not the convert extension was formally loaded.
1612 1613 """
1613 1614 return sink
1614 1615
1615 1616 def unhidehashlikerevs(repo, specs, hiddentype):
1616 1617 """parse the user specs and unhide changesets whose hash or revision number
1617 1618 is passed.
1618 1619
1619 1620 hiddentype can be: 1) 'warn': warn while unhiding changesets
1620 1621 2) 'nowarn': don't warn while unhiding changesets
1621 1622
1622 1623 returns a repo object with the required changesets unhidden
1623 1624 """
1624 1625 if not repo.filtername or not repo.ui.configbool('experimental',
1625 1626 'directaccess'):
1626 1627 return repo
1627 1628
1628 1629 if repo.filtername not in ('visible', 'visible-hidden'):
1629 1630 return repo
1630 1631
1631 1632 symbols = set()
1632 1633 for spec in specs:
1633 1634 try:
1634 1635 tree = revsetlang.parse(spec)
1635 1636 except error.ParseError: # will be reported by scmutil.revrange()
1636 1637 continue
1637 1638
1638 1639 symbols.update(revsetlang.gethashlikesymbols(tree))
1639 1640
1640 1641 if not symbols:
1641 1642 return repo
1642 1643
1643 1644 revs = _getrevsfromsymbols(repo, symbols)
1644 1645
1645 1646 if not revs:
1646 1647 return repo
1647 1648
1648 1649 if hiddentype == 'warn':
1649 1650 unfi = repo.unfiltered()
1650 1651 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1651 1652 repo.ui.warn(_("warning: accessing hidden changesets for write "
1652 1653 "operation: %s\n") % revstr)
1653 1654
1654 1655 # we have to use new filtername to separate branch/tags cache until we can
1655 1656 # disbale these cache when revisions are dynamically pinned.
1656 1657 return repo.filtered('visible-hidden', revs)
1657 1658
1658 1659 def _getrevsfromsymbols(repo, symbols):
1659 1660 """parse the list of symbols and returns a set of revision numbers of hidden
1660 1661 changesets present in symbols"""
1661 1662 revs = set()
1662 1663 unfi = repo.unfiltered()
1663 1664 unficl = unfi.changelog
1664 1665 cl = repo.changelog
1665 1666 tiprev = len(unficl)
1666 1667 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1667 1668 for s in symbols:
1668 1669 try:
1669 1670 n = int(s)
1670 1671 if n <= tiprev:
1671 1672 if not allowrevnums:
1672 1673 continue
1673 1674 else:
1674 1675 if n not in cl:
1675 1676 revs.add(n)
1676 1677 continue
1677 1678 except ValueError:
1678 1679 pass
1679 1680
1680 1681 try:
1681 1682 s = resolvehexnodeidprefix(unfi, s)
1682 1683 except (error.LookupError, error.WdirUnsupported):
1683 1684 s = None
1684 1685
1685 1686 if s is not None:
1686 1687 rev = unficl.rev(s)
1687 1688 if rev not in cl:
1688 1689 revs.add(rev)
1689 1690
1690 1691 return revs
1691 1692
1692 1693 def bookmarkrevs(repo, mark):
1693 1694 """
1694 1695 Select revisions reachable by a given bookmark
1695 1696 """
1696 1697 return repo.revs("ancestors(bookmark(%s)) - "
1697 1698 "ancestors(head() and not bookmark(%s)) - "
1698 1699 "ancestors(bookmark() and not bookmark(%s))",
1699 1700 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now