##// END OF EJS Templates
scmutil: use resolvehexnodeidprefix() from revsymbol()...
Martin von Zweigbergk -
r37697:ab828755 default
parent child Browse files
Show More
@@ -1,1548 +1,1548 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 bin,
22 22 hex,
23 23 nullid,
24 24 short,
25 25 wdirid,
26 26 wdirrev,
27 27 )
28 28
29 29 from . import (
30 30 encoding,
31 31 error,
32 32 match as matchmod,
33 33 obsolete,
34 34 obsutil,
35 35 pathutil,
36 36 phases,
37 37 pycompat,
38 38 revsetlang,
39 39 similar,
40 40 url,
41 41 util,
42 42 vfs,
43 43 )
44 44
45 45 from .utils import (
46 46 procutil,
47 47 stringutil,
48 48 )
49 49
50 50 if pycompat.iswindows:
51 51 from . import scmwindows as scmplatform
52 52 else:
53 53 from . import scmposix as scmplatform
54 54
55 55 termsize = scmplatform.termsize
56 56
57 57 class status(tuple):
58 58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
59 59 and 'ignored' properties are only relevant to the working copy.
60 60 '''
61 61
62 62 __slots__ = ()
63 63
64 64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
65 65 clean):
66 66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
67 67 ignored, clean))
68 68
69 69 @property
70 70 def modified(self):
71 71 '''files that have been modified'''
72 72 return self[0]
73 73
74 74 @property
75 75 def added(self):
76 76 '''files that have been added'''
77 77 return self[1]
78 78
79 79 @property
80 80 def removed(self):
81 81 '''files that have been removed'''
82 82 return self[2]
83 83
84 84 @property
85 85 def deleted(self):
86 86 '''files that are in the dirstate, but have been deleted from the
87 87 working copy (aka "missing")
88 88 '''
89 89 return self[3]
90 90
91 91 @property
92 92 def unknown(self):
93 93 '''files not in the dirstate that are not ignored'''
94 94 return self[4]
95 95
96 96 @property
97 97 def ignored(self):
98 98 '''files not in the dirstate that are ignored (by _dirignore())'''
99 99 return self[5]
100 100
101 101 @property
102 102 def clean(self):
103 103 '''files that have not been modified'''
104 104 return self[6]
105 105
106 106 def __repr__(self, *args, **kwargs):
107 107 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
108 108 'unknown=%r, ignored=%r, clean=%r>') % self)
109 109
110 110 def itersubrepos(ctx1, ctx2):
111 111 """find subrepos in ctx1 or ctx2"""
112 112 # Create a (subpath, ctx) mapping where we prefer subpaths from
113 113 # ctx1. The subpaths from ctx2 are important when the .hgsub file
114 114 # has been modified (in ctx2) but not yet committed (in ctx1).
115 115 subpaths = dict.fromkeys(ctx2.substate, ctx2)
116 116 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
117 117
118 118 missing = set()
119 119
120 120 for subpath in ctx2.substate:
121 121 if subpath not in ctx1.substate:
122 122 del subpaths[subpath]
123 123 missing.add(subpath)
124 124
125 125 for subpath, ctx in sorted(subpaths.iteritems()):
126 126 yield subpath, ctx.sub(subpath)
127 127
128 128 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
129 129 # status and diff will have an accurate result when it does
130 130 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
131 131 # against itself.
132 132 for subpath in missing:
133 133 yield subpath, ctx2.nullsub(subpath, ctx1)
134 134
135 135 def nochangesfound(ui, repo, excluded=None):
136 136 '''Report no changes for push/pull, excluded is None or a list of
137 137 nodes excluded from the push/pull.
138 138 '''
139 139 secretlist = []
140 140 if excluded:
141 141 for n in excluded:
142 142 ctx = repo[n]
143 143 if ctx.phase() >= phases.secret and not ctx.extinct():
144 144 secretlist.append(n)
145 145
146 146 if secretlist:
147 147 ui.status(_("no changes found (ignored %d secret changesets)\n")
148 148 % len(secretlist))
149 149 else:
150 150 ui.status(_("no changes found\n"))
151 151
152 152 def callcatch(ui, func):
153 153 """call func() with global exception handling
154 154
155 155 return func() if no exception happens. otherwise do some error handling
156 156 and return an exit code accordingly. does not handle all exceptions.
157 157 """
158 158 try:
159 159 try:
160 160 return func()
161 161 except: # re-raises
162 162 ui.traceback()
163 163 raise
164 164 # Global exception handling, alphabetically
165 165 # Mercurial-specific first, followed by built-in and library exceptions
166 166 except error.LockHeld as inst:
167 167 if inst.errno == errno.ETIMEDOUT:
168 168 reason = _('timed out waiting for lock held by %r') % inst.locker
169 169 else:
170 170 reason = _('lock held by %r') % inst.locker
171 171 ui.warn(_("abort: %s: %s\n")
172 172 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
173 173 if not inst.locker:
174 174 ui.warn(_("(lock might be very busy)\n"))
175 175 except error.LockUnavailable as inst:
176 176 ui.warn(_("abort: could not lock %s: %s\n") %
177 177 (inst.desc or stringutil.forcebytestr(inst.filename),
178 178 encoding.strtolocal(inst.strerror)))
179 179 except error.OutOfBandError as inst:
180 180 if inst.args:
181 181 msg = _("abort: remote error:\n")
182 182 else:
183 183 msg = _("abort: remote error\n")
184 184 ui.warn(msg)
185 185 if inst.args:
186 186 ui.warn(''.join(inst.args))
187 187 if inst.hint:
188 188 ui.warn('(%s)\n' % inst.hint)
189 189 except error.RepoError as inst:
190 190 ui.warn(_("abort: %s!\n") % inst)
191 191 if inst.hint:
192 192 ui.warn(_("(%s)\n") % inst.hint)
193 193 except error.ResponseError as inst:
194 194 ui.warn(_("abort: %s") % inst.args[0])
195 195 msg = inst.args[1]
196 196 if isinstance(msg, type(u'')):
197 197 msg = pycompat.sysbytes(msg)
198 198 if not isinstance(msg, bytes):
199 199 ui.warn(" %r\n" % (msg,))
200 200 elif not msg:
201 201 ui.warn(_(" empty string\n"))
202 202 else:
203 203 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
204 204 except error.CensoredNodeError as inst:
205 205 ui.warn(_("abort: file censored %s!\n") % inst)
206 206 except error.RevlogError as inst:
207 207 ui.warn(_("abort: %s!\n") % inst)
208 208 except error.InterventionRequired as inst:
209 209 ui.warn("%s\n" % inst)
210 210 if inst.hint:
211 211 ui.warn(_("(%s)\n") % inst.hint)
212 212 return 1
213 213 except error.WdirUnsupported:
214 214 ui.warn(_("abort: working directory revision cannot be specified\n"))
215 215 except error.Abort as inst:
216 216 ui.warn(_("abort: %s\n") % inst)
217 217 if inst.hint:
218 218 ui.warn(_("(%s)\n") % inst.hint)
219 219 except ImportError as inst:
220 220 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
221 221 m = stringutil.forcebytestr(inst).split()[-1]
222 222 if m in "mpatch bdiff".split():
223 223 ui.warn(_("(did you forget to compile extensions?)\n"))
224 224 elif m in "zlib".split():
225 225 ui.warn(_("(is your Python install correct?)\n"))
226 226 except IOError as inst:
227 227 if util.safehasattr(inst, "code"):
228 228 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
229 229 elif util.safehasattr(inst, "reason"):
230 230 try: # usually it is in the form (errno, strerror)
231 231 reason = inst.reason.args[1]
232 232 except (AttributeError, IndexError):
233 233 # it might be anything, for example a string
234 234 reason = inst.reason
235 235 if isinstance(reason, unicode):
236 236 # SSLError of Python 2.7.9 contains a unicode
237 237 reason = encoding.unitolocal(reason)
238 238 ui.warn(_("abort: error: %s\n") % reason)
239 239 elif (util.safehasattr(inst, "args")
240 240 and inst.args and inst.args[0] == errno.EPIPE):
241 241 pass
242 242 elif getattr(inst, "strerror", None):
243 243 if getattr(inst, "filename", None):
244 244 ui.warn(_("abort: %s: %s\n") % (
245 245 encoding.strtolocal(inst.strerror),
246 246 stringutil.forcebytestr(inst.filename)))
247 247 else:
248 248 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
249 249 else:
250 250 raise
251 251 except OSError as inst:
252 252 if getattr(inst, "filename", None) is not None:
253 253 ui.warn(_("abort: %s: '%s'\n") % (
254 254 encoding.strtolocal(inst.strerror),
255 255 stringutil.forcebytestr(inst.filename)))
256 256 else:
257 257 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 258 except MemoryError:
259 259 ui.warn(_("abort: out of memory\n"))
260 260 except SystemExit as inst:
261 261 # Commands shouldn't sys.exit directly, but give a return code.
262 262 # Just in case catch this and and pass exit code to caller.
263 263 return inst.code
264 264 except socket.error as inst:
265 265 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
266 266
267 267 return -1
268 268
269 269 def checknewlabel(repo, lbl, kind):
270 270 # Do not use the "kind" parameter in ui output.
271 271 # It makes strings difficult to translate.
272 272 if lbl in ['tip', '.', 'null']:
273 273 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 274 for c in (':', '\0', '\n', '\r'):
275 275 if c in lbl:
276 276 raise error.Abort(
277 277 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 278 try:
279 279 int(lbl)
280 280 raise error.Abort(_("cannot use an integer as a name"))
281 281 except ValueError:
282 282 pass
283 283 if lbl.strip() != lbl:
284 284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285 285
286 286 def checkfilename(f):
287 287 '''Check that the filename f is an acceptable filename for a tracked file'''
288 288 if '\r' in f or '\n' in f:
289 289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
290 290
291 291 def checkportable(ui, f):
292 292 '''Check if filename f is portable and warn or abort depending on config'''
293 293 checkfilename(f)
294 294 abort, warn = checkportabilityalert(ui)
295 295 if abort or warn:
296 296 msg = util.checkwinfilename(f)
297 297 if msg:
298 298 msg = "%s: %s" % (msg, procutil.shellquote(f))
299 299 if abort:
300 300 raise error.Abort(msg)
301 301 ui.warn(_("warning: %s\n") % msg)
302 302
303 303 def checkportabilityalert(ui):
304 304 '''check if the user's config requests nothing, a warning, or abort for
305 305 non-portable filenames'''
306 306 val = ui.config('ui', 'portablefilenames')
307 307 lval = val.lower()
308 308 bval = stringutil.parsebool(val)
309 309 abort = pycompat.iswindows or lval == 'abort'
310 310 warn = bval or lval == 'warn'
311 311 if bval is None and not (warn or abort or lval == 'ignore'):
312 312 raise error.ConfigError(
313 313 _("ui.portablefilenames value is invalid ('%s')") % val)
314 314 return abort, warn
315 315
316 316 class casecollisionauditor(object):
317 317 def __init__(self, ui, abort, dirstate):
318 318 self._ui = ui
319 319 self._abort = abort
320 320 allfiles = '\0'.join(dirstate._map)
321 321 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
322 322 self._dirstate = dirstate
323 323 # The purpose of _newfiles is so that we don't complain about
324 324 # case collisions if someone were to call this object with the
325 325 # same filename twice.
326 326 self._newfiles = set()
327 327
328 328 def __call__(self, f):
329 329 if f in self._newfiles:
330 330 return
331 331 fl = encoding.lower(f)
332 332 if fl in self._loweredfiles and f not in self._dirstate:
333 333 msg = _('possible case-folding collision for %s') % f
334 334 if self._abort:
335 335 raise error.Abort(msg)
336 336 self._ui.warn(_("warning: %s\n") % msg)
337 337 self._loweredfiles.add(fl)
338 338 self._newfiles.add(f)
339 339
340 340 def filteredhash(repo, maxrev):
341 341 """build hash of filtered revisions in the current repoview.
342 342
343 343 Multiple caches perform up-to-date validation by checking that the
344 344 tiprev and tipnode stored in the cache file match the current repository.
345 345 However, this is not sufficient for validating repoviews because the set
346 346 of revisions in the view may change without the repository tiprev and
347 347 tipnode changing.
348 348
349 349 This function hashes all the revs filtered from the view and returns
350 350 that SHA-1 digest.
351 351 """
352 352 cl = repo.changelog
353 353 if not cl.filteredrevs:
354 354 return None
355 355 key = None
356 356 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
357 357 if revs:
358 358 s = hashlib.sha1()
359 359 for rev in revs:
360 360 s.update('%d;' % rev)
361 361 key = s.digest()
362 362 return key
363 363
364 364 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
365 365 '''yield every hg repository under path, always recursively.
366 366 The recurse flag will only control recursion into repo working dirs'''
367 367 def errhandler(err):
368 368 if err.filename == path:
369 369 raise err
370 370 samestat = getattr(os.path, 'samestat', None)
371 371 if followsym and samestat is not None:
372 372 def adddir(dirlst, dirname):
373 373 dirstat = os.stat(dirname)
374 374 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
375 375 if not match:
376 376 dirlst.append(dirstat)
377 377 return not match
378 378 else:
379 379 followsym = False
380 380
381 381 if (seen_dirs is None) and followsym:
382 382 seen_dirs = []
383 383 adddir(seen_dirs, path)
384 384 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
385 385 dirs.sort()
386 386 if '.hg' in dirs:
387 387 yield root # found a repository
388 388 qroot = os.path.join(root, '.hg', 'patches')
389 389 if os.path.isdir(os.path.join(qroot, '.hg')):
390 390 yield qroot # we have a patch queue repo here
391 391 if recurse:
392 392 # avoid recursing inside the .hg directory
393 393 dirs.remove('.hg')
394 394 else:
395 395 dirs[:] = [] # don't descend further
396 396 elif followsym:
397 397 newdirs = []
398 398 for d in dirs:
399 399 fname = os.path.join(root, d)
400 400 if adddir(seen_dirs, fname):
401 401 if os.path.islink(fname):
402 402 for hgname in walkrepos(fname, True, seen_dirs):
403 403 yield hgname
404 404 else:
405 405 newdirs.append(d)
406 406 dirs[:] = newdirs
407 407
408 408 def binnode(ctx):
409 409 """Return binary node id for a given basectx"""
410 410 node = ctx.node()
411 411 if node is None:
412 412 return wdirid
413 413 return node
414 414
415 415 def intrev(ctx):
416 416 """Return integer for a given basectx that can be used in comparison or
417 417 arithmetic operation"""
418 418 rev = ctx.rev()
419 419 if rev is None:
420 420 return wdirrev
421 421 return rev
422 422
423 423 def formatchangeid(ctx):
424 424 """Format changectx as '{rev}:{node|formatnode}', which is the default
425 425 template provided by logcmdutil.changesettemplater"""
426 426 repo = ctx.repo()
427 427 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
428 428
429 429 def formatrevnode(ui, rev, node):
430 430 """Format given revision and node depending on the current verbosity"""
431 431 if ui.debugflag:
432 432 hexfunc = hex
433 433 else:
434 434 hexfunc = short
435 435 return '%d:%s' % (rev, hexfunc(node))
436 436
437 437 def resolvehexnodeidprefix(repo, prefix):
438 438 # Uses unfiltered repo because it's faster when prefix is ambiguous/
439 439 # This matches the "shortest" template function.
440 440 node = repo.unfiltered().changelog._partialmatch(prefix)
441 441 if node is None:
442 442 return
443 443 repo.changelog.rev(node) # make sure node isn't filtered
444 444 return node
445 445
446 446 def isrevsymbol(repo, symbol):
447 447 """Checks if a symbol exists in the repo.
448 448
449 449 See revsymbol() for details. Raises error.LookupError if the symbol is an
450 450 ambiguous nodeid prefix.
451 451 """
452 452 try:
453 453 revsymbol(repo, symbol)
454 454 return True
455 455 except error.RepoLookupError:
456 456 return False
457 457
458 458 def revsymbol(repo, symbol):
459 459 """Returns a context given a single revision symbol (as string).
460 460
461 461 This is similar to revsingle(), but accepts only a single revision symbol,
462 462 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
463 463 not "max(public())".
464 464 """
465 465 if not isinstance(symbol, bytes):
466 466 msg = ("symbol (%s of type %s) was not a string, did you mean "
467 467 "repo[symbol]?" % (symbol, type(symbol)))
468 468 raise error.ProgrammingError(msg)
469 469 try:
470 470 if symbol in ('.', 'tip', 'null'):
471 471 return repo[symbol]
472 472
473 473 try:
474 474 r = int(symbol)
475 475 if '%d' % r != symbol:
476 476 raise ValueError
477 477 l = len(repo.changelog)
478 478 if r < 0:
479 479 r += l
480 480 if r < 0 or r >= l and r != wdirrev:
481 481 raise ValueError
482 482 return repo[r]
483 483 except error.FilteredIndexError:
484 484 raise
485 485 except (ValueError, OverflowError, IndexError):
486 486 pass
487 487
488 488 if len(symbol) == 40:
489 489 try:
490 490 node = bin(symbol)
491 491 rev = repo.changelog.rev(node)
492 492 return repo[rev]
493 493 except error.FilteredLookupError:
494 494 raise
495 495 except (TypeError, LookupError):
496 496 pass
497 497
498 498 # look up bookmarks through the name interface
499 499 try:
500 500 node = repo.names.singlenode(repo, symbol)
501 501 rev = repo.changelog.rev(node)
502 502 return repo[rev]
503 503 except KeyError:
504 504 pass
505 505
506 node = repo.unfiltered().changelog._partialmatch(symbol)
506 node = resolvehexnodeidprefix(repo, symbol)
507 507 if node is not None:
508 508 rev = repo.changelog.rev(node)
509 509 return repo[rev]
510 510
511 511 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
512 512
513 513 except error.WdirUnsupported:
514 514 return repo[None]
515 515 except (error.FilteredIndexError, error.FilteredLookupError,
516 516 error.FilteredRepoLookupError):
517 517 raise _filterederror(repo, symbol)
518 518
519 519 def _filterederror(repo, changeid):
520 520 """build an exception to be raised about a filtered changeid
521 521
522 522 This is extracted in a function to help extensions (eg: evolve) to
523 523 experiment with various message variants."""
524 524 if repo.filtername.startswith('visible'):
525 525
526 526 # Check if the changeset is obsolete
527 527 unfilteredrepo = repo.unfiltered()
528 528 ctx = revsymbol(unfilteredrepo, changeid)
529 529
530 530 # If the changeset is obsolete, enrich the message with the reason
531 531 # that made this changeset not visible
532 532 if ctx.obsolete():
533 533 msg = obsutil._getfilteredreason(repo, changeid, ctx)
534 534 else:
535 535 msg = _("hidden revision '%s'") % changeid
536 536
537 537 hint = _('use --hidden to access hidden revisions')
538 538
539 539 return error.FilteredRepoLookupError(msg, hint=hint)
540 540 msg = _("filtered revision '%s' (not in '%s' subset)")
541 541 msg %= (changeid, repo.filtername)
542 542 return error.FilteredRepoLookupError(msg)
543 543
544 544 def revsingle(repo, revspec, default='.', localalias=None):
545 545 if not revspec and revspec != 0:
546 546 return repo[default]
547 547
548 548 l = revrange(repo, [revspec], localalias=localalias)
549 549 if not l:
550 550 raise error.Abort(_('empty revision set'))
551 551 return repo[l.last()]
552 552
553 553 def _pairspec(revspec):
554 554 tree = revsetlang.parse(revspec)
555 555 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
556 556
557 557 def revpairnodes(repo, revs):
558 558 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
559 559 ctx1, ctx2 = revpair(repo, revs)
560 560 return ctx1.node(), ctx2.node()
561 561
562 562 def revpair(repo, revs):
563 563 if not revs:
564 564 return repo['.'], repo[None]
565 565
566 566 l = revrange(repo, revs)
567 567
568 568 if not l:
569 569 first = second = None
570 570 elif l.isascending():
571 571 first = l.min()
572 572 second = l.max()
573 573 elif l.isdescending():
574 574 first = l.max()
575 575 second = l.min()
576 576 else:
577 577 first = l.first()
578 578 second = l.last()
579 579
580 580 if first is None:
581 581 raise error.Abort(_('empty revision range'))
582 582 if (first == second and len(revs) >= 2
583 583 and not all(revrange(repo, [r]) for r in revs)):
584 584 raise error.Abort(_('empty revision on one side of range'))
585 585
586 586 # if top-level is range expression, the result must always be a pair
587 587 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
588 588 return repo[first], repo[None]
589 589
590 590 return repo[first], repo[second]
591 591
592 592 def revrange(repo, specs, localalias=None):
593 593 """Execute 1 to many revsets and return the union.
594 594
595 595 This is the preferred mechanism for executing revsets using user-specified
596 596 config options, such as revset aliases.
597 597
598 598 The revsets specified by ``specs`` will be executed via a chained ``OR``
599 599 expression. If ``specs`` is empty, an empty result is returned.
600 600
601 601 ``specs`` can contain integers, in which case they are assumed to be
602 602 revision numbers.
603 603
604 604 It is assumed the revsets are already formatted. If you have arguments
605 605 that need to be expanded in the revset, call ``revsetlang.formatspec()``
606 606 and pass the result as an element of ``specs``.
607 607
608 608 Specifying a single revset is allowed.
609 609
610 610 Returns a ``revset.abstractsmartset`` which is a list-like interface over
611 611 integer revisions.
612 612 """
613 613 allspecs = []
614 614 for spec in specs:
615 615 if isinstance(spec, int):
616 616 spec = revsetlang.formatspec('rev(%d)', spec)
617 617 allspecs.append(spec)
618 618 return repo.anyrevs(allspecs, user=True, localalias=localalias)
619 619
620 620 def meaningfulparents(repo, ctx):
621 621 """Return list of meaningful (or all if debug) parentrevs for rev.
622 622
623 623 For merges (two non-nullrev revisions) both parents are meaningful.
624 624 Otherwise the first parent revision is considered meaningful if it
625 625 is not the preceding revision.
626 626 """
627 627 parents = ctx.parents()
628 628 if len(parents) > 1:
629 629 return parents
630 630 if repo.ui.debugflag:
631 631 return [parents[0], repo['null']]
632 632 if parents[0].rev() >= intrev(ctx) - 1:
633 633 return []
634 634 return parents
635 635
636 636 def expandpats(pats):
637 637 '''Expand bare globs when running on windows.
638 638 On posix we assume it already has already been done by sh.'''
639 639 if not util.expandglobs:
640 640 return list(pats)
641 641 ret = []
642 642 for kindpat in pats:
643 643 kind, pat = matchmod._patsplit(kindpat, None)
644 644 if kind is None:
645 645 try:
646 646 globbed = glob.glob(pat)
647 647 except re.error:
648 648 globbed = [pat]
649 649 if globbed:
650 650 ret.extend(globbed)
651 651 continue
652 652 ret.append(kindpat)
653 653 return ret
654 654
655 655 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
656 656 badfn=None):
657 657 '''Return a matcher and the patterns that were used.
658 658 The matcher will warn about bad matches, unless an alternate badfn callback
659 659 is provided.'''
660 660 if pats == ("",):
661 661 pats = []
662 662 if opts is None:
663 663 opts = {}
664 664 if not globbed and default == 'relpath':
665 665 pats = expandpats(pats or [])
666 666
667 667 def bad(f, msg):
668 668 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
669 669
670 670 if badfn is None:
671 671 badfn = bad
672 672
673 673 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
674 674 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
675 675
676 676 if m.always():
677 677 pats = []
678 678 return m, pats
679 679
680 680 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
681 681 badfn=None):
682 682 '''Return a matcher that will warn about bad matches.'''
683 683 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
684 684
685 685 def matchall(repo):
686 686 '''Return a matcher that will efficiently match everything.'''
687 687 return matchmod.always(repo.root, repo.getcwd())
688 688
689 689 def matchfiles(repo, files, badfn=None):
690 690 '''Return a matcher that will efficiently match exactly these files.'''
691 691 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
692 692
693 693 def parsefollowlinespattern(repo, rev, pat, msg):
694 694 """Return a file name from `pat` pattern suitable for usage in followlines
695 695 logic.
696 696 """
697 697 if not matchmod.patkind(pat):
698 698 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
699 699 else:
700 700 ctx = repo[rev]
701 701 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
702 702 files = [f for f in ctx if m(f)]
703 703 if len(files) != 1:
704 704 raise error.ParseError(msg)
705 705 return files[0]
706 706
707 707 def origpath(ui, repo, filepath):
708 708 '''customize where .orig files are created
709 709
710 710 Fetch user defined path from config file: [ui] origbackuppath = <path>
711 711 Fall back to default (filepath with .orig suffix) if not specified
712 712 '''
713 713 origbackuppath = ui.config('ui', 'origbackuppath')
714 714 if not origbackuppath:
715 715 return filepath + ".orig"
716 716
717 717 # Convert filepath from an absolute path into a path inside the repo.
718 718 filepathfromroot = util.normpath(os.path.relpath(filepath,
719 719 start=repo.root))
720 720
721 721 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
722 722 origbackupdir = origvfs.dirname(filepathfromroot)
723 723 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
724 724 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
725 725
726 726 # Remove any files that conflict with the backup file's path
727 727 for f in reversed(list(util.finddirs(filepathfromroot))):
728 728 if origvfs.isfileorlink(f):
729 729 ui.note(_('removing conflicting file: %s\n')
730 730 % origvfs.join(f))
731 731 origvfs.unlink(f)
732 732 break
733 733
734 734 origvfs.makedirs(origbackupdir)
735 735
736 736 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
737 737 ui.note(_('removing conflicting directory: %s\n')
738 738 % origvfs.join(filepathfromroot))
739 739 origvfs.rmtree(filepathfromroot, forcibly=True)
740 740
741 741 return origvfs.join(filepathfromroot)
742 742
743 743 class _containsnode(object):
744 744 """proxy __contains__(node) to container.__contains__ which accepts revs"""
745 745
746 746 def __init__(self, repo, revcontainer):
747 747 self._torev = repo.changelog.rev
748 748 self._revcontains = revcontainer.__contains__
749 749
750 750 def __contains__(self, node):
751 751 return self._revcontains(self._torev(node))
752 752
753 753 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
754 754 """do common cleanups when old nodes are replaced by new nodes
755 755
756 756 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
757 757 (we might also want to move working directory parent in the future)
758 758
759 759 By default, bookmark moves are calculated automatically from 'replacements',
760 760 but 'moves' can be used to override that. Also, 'moves' may include
761 761 additional bookmark moves that should not have associated obsmarkers.
762 762
763 763 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
764 764 have replacements. operation is a string, like "rebase".
765 765
766 766 metadata is dictionary containing metadata to be stored in obsmarker if
767 767 obsolescence is enabled.
768 768 """
769 769 if not replacements and not moves:
770 770 return
771 771
772 772 # translate mapping's other forms
773 773 if not util.safehasattr(replacements, 'items'):
774 774 replacements = {n: () for n in replacements}
775 775
776 776 # Calculate bookmark movements
777 777 if moves is None:
778 778 moves = {}
779 779 # Unfiltered repo is needed since nodes in replacements might be hidden.
780 780 unfi = repo.unfiltered()
781 781 for oldnode, newnodes in replacements.items():
782 782 if oldnode in moves:
783 783 continue
784 784 if len(newnodes) > 1:
785 785 # usually a split, take the one with biggest rev number
786 786 newnode = next(unfi.set('max(%ln)', newnodes)).node()
787 787 elif len(newnodes) == 0:
788 788 # move bookmark backwards
789 789 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
790 790 list(replacements)))
791 791 if roots:
792 792 newnode = roots[0].node()
793 793 else:
794 794 newnode = nullid
795 795 else:
796 796 newnode = newnodes[0]
797 797 moves[oldnode] = newnode
798 798
799 799 with repo.transaction('cleanup') as tr:
800 800 # Move bookmarks
801 801 bmarks = repo._bookmarks
802 802 bmarkchanges = []
803 803 allnewnodes = [n for ns in replacements.values() for n in ns]
804 804 for oldnode, newnode in moves.items():
805 805 oldbmarks = repo.nodebookmarks(oldnode)
806 806 if not oldbmarks:
807 807 continue
808 808 from . import bookmarks # avoid import cycle
809 809 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
810 810 (util.rapply(pycompat.maybebytestr, oldbmarks),
811 811 hex(oldnode), hex(newnode)))
812 812 # Delete divergent bookmarks being parents of related newnodes
813 813 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
814 814 allnewnodes, newnode, oldnode)
815 815 deletenodes = _containsnode(repo, deleterevs)
816 816 for name in oldbmarks:
817 817 bmarkchanges.append((name, newnode))
818 818 for b in bookmarks.divergent2delete(repo, deletenodes, name):
819 819 bmarkchanges.append((b, None))
820 820
821 821 if bmarkchanges:
822 822 bmarks.applychanges(repo, tr, bmarkchanges)
823 823
824 824 # Obsolete or strip nodes
825 825 if obsolete.isenabled(repo, obsolete.createmarkersopt):
826 826 # If a node is already obsoleted, and we want to obsolete it
827 827 # without a successor, skip that obssolete request since it's
828 828 # unnecessary. That's the "if s or not isobs(n)" check below.
829 829 # Also sort the node in topology order, that might be useful for
830 830 # some obsstore logic.
831 831 # NOTE: the filtering and sorting might belong to createmarkers.
832 832 isobs = unfi.obsstore.successors.__contains__
833 833 torev = unfi.changelog.rev
834 834 sortfunc = lambda ns: torev(ns[0])
835 835 rels = [(unfi[n], tuple(unfi[m] for m in s))
836 836 for n, s in sorted(replacements.items(), key=sortfunc)
837 837 if s or not isobs(n)]
838 838 if rels:
839 839 obsolete.createmarkers(repo, rels, operation=operation,
840 840 metadata=metadata)
841 841 else:
842 842 from . import repair # avoid import cycle
843 843 tostrip = list(replacements)
844 844 if tostrip:
845 845 repair.delayedstrip(repo.ui, repo, tostrip, operation)
846 846
847 847 def addremove(repo, matcher, prefix, opts=None):
848 848 if opts is None:
849 849 opts = {}
850 850 m = matcher
851 851 dry_run = opts.get('dry_run')
852 852 try:
853 853 similarity = float(opts.get('similarity') or 0)
854 854 except ValueError:
855 855 raise error.Abort(_('similarity must be a number'))
856 856 if similarity < 0 or similarity > 100:
857 857 raise error.Abort(_('similarity must be between 0 and 100'))
858 858 similarity /= 100.0
859 859
860 860 ret = 0
861 861 join = lambda f: os.path.join(prefix, f)
862 862
863 863 wctx = repo[None]
864 864 for subpath in sorted(wctx.substate):
865 865 submatch = matchmod.subdirmatcher(subpath, m)
866 866 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
867 867 sub = wctx.sub(subpath)
868 868 try:
869 869 if sub.addremove(submatch, prefix, opts):
870 870 ret = 1
871 871 except error.LookupError:
872 872 repo.ui.status(_("skipping missing subrepository: %s\n")
873 873 % join(subpath))
874 874
875 875 rejected = []
876 876 def badfn(f, msg):
877 877 if f in m.files():
878 878 m.bad(f, msg)
879 879 rejected.append(f)
880 880
881 881 badmatch = matchmod.badmatch(m, badfn)
882 882 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
883 883 badmatch)
884 884
885 885 unknownset = set(unknown + forgotten)
886 886 toprint = unknownset.copy()
887 887 toprint.update(deleted)
888 888 for abs in sorted(toprint):
889 889 if repo.ui.verbose or not m.exact(abs):
890 890 if abs in unknownset:
891 891 status = _('adding %s\n') % m.uipath(abs)
892 892 else:
893 893 status = _('removing %s\n') % m.uipath(abs)
894 894 repo.ui.status(status)
895 895
896 896 renames = _findrenames(repo, m, added + unknown, removed + deleted,
897 897 similarity)
898 898
899 899 if not dry_run:
900 900 _markchanges(repo, unknown + forgotten, deleted, renames)
901 901
902 902 for f in rejected:
903 903 if f in m.files():
904 904 return 1
905 905 return ret
906 906
907 907 def marktouched(repo, files, similarity=0.0):
908 908 '''Assert that files have somehow been operated upon. files are relative to
909 909 the repo root.'''
910 910 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
911 911 rejected = []
912 912
913 913 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
914 914
915 915 if repo.ui.verbose:
916 916 unknownset = set(unknown + forgotten)
917 917 toprint = unknownset.copy()
918 918 toprint.update(deleted)
919 919 for abs in sorted(toprint):
920 920 if abs in unknownset:
921 921 status = _('adding %s\n') % abs
922 922 else:
923 923 status = _('removing %s\n') % abs
924 924 repo.ui.status(status)
925 925
926 926 renames = _findrenames(repo, m, added + unknown, removed + deleted,
927 927 similarity)
928 928
929 929 _markchanges(repo, unknown + forgotten, deleted, renames)
930 930
931 931 for f in rejected:
932 932 if f in m.files():
933 933 return 1
934 934 return 0
935 935
936 936 def _interestingfiles(repo, matcher):
937 937 '''Walk dirstate with matcher, looking for files that addremove would care
938 938 about.
939 939
940 940 This is different from dirstate.status because it doesn't care about
941 941 whether files are modified or clean.'''
942 942 added, unknown, deleted, removed, forgotten = [], [], [], [], []
943 943 audit_path = pathutil.pathauditor(repo.root, cached=True)
944 944
945 945 ctx = repo[None]
946 946 dirstate = repo.dirstate
947 947 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
948 948 unknown=True, ignored=False, full=False)
949 949 for abs, st in walkresults.iteritems():
950 950 dstate = dirstate[abs]
951 951 if dstate == '?' and audit_path.check(abs):
952 952 unknown.append(abs)
953 953 elif dstate != 'r' and not st:
954 954 deleted.append(abs)
955 955 elif dstate == 'r' and st:
956 956 forgotten.append(abs)
957 957 # for finding renames
958 958 elif dstate == 'r' and not st:
959 959 removed.append(abs)
960 960 elif dstate == 'a':
961 961 added.append(abs)
962 962
963 963 return added, unknown, deleted, removed, forgotten
964 964
965 965 def _findrenames(repo, matcher, added, removed, similarity):
966 966 '''Find renames from removed files to added ones.'''
967 967 renames = {}
968 968 if similarity > 0:
969 969 for old, new, score in similar.findrenames(repo, added, removed,
970 970 similarity):
971 971 if (repo.ui.verbose or not matcher.exact(old)
972 972 or not matcher.exact(new)):
973 973 repo.ui.status(_('recording removal of %s as rename to %s '
974 974 '(%d%% similar)\n') %
975 975 (matcher.rel(old), matcher.rel(new),
976 976 score * 100))
977 977 renames[new] = old
978 978 return renames
979 979
980 980 def _markchanges(repo, unknown, deleted, renames):
981 981 '''Marks the files in unknown as added, the files in deleted as removed,
982 982 and the files in renames as copied.'''
983 983 wctx = repo[None]
984 984 with repo.wlock():
985 985 wctx.forget(deleted)
986 986 wctx.add(unknown)
987 987 for new, old in renames.iteritems():
988 988 wctx.copy(old, new)
989 989
990 990 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
991 991 """Update the dirstate to reflect the intent of copying src to dst. For
992 992 different reasons it might not end with dst being marked as copied from src.
993 993 """
994 994 origsrc = repo.dirstate.copied(src) or src
995 995 if dst == origsrc: # copying back a copy?
996 996 if repo.dirstate[dst] not in 'mn' and not dryrun:
997 997 repo.dirstate.normallookup(dst)
998 998 else:
999 999 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1000 1000 if not ui.quiet:
1001 1001 ui.warn(_("%s has not been committed yet, so no copy "
1002 1002 "data will be stored for %s.\n")
1003 1003 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1004 1004 if repo.dirstate[dst] in '?r' and not dryrun:
1005 1005 wctx.add([dst])
1006 1006 elif not dryrun:
1007 1007 wctx.copy(origsrc, dst)
1008 1008
1009 1009 def readrequires(opener, supported):
1010 1010 '''Reads and parses .hg/requires and checks if all entries found
1011 1011 are in the list of supported features.'''
1012 1012 requirements = set(opener.read("requires").splitlines())
1013 1013 missings = []
1014 1014 for r in requirements:
1015 1015 if r not in supported:
1016 1016 if not r or not r[0:1].isalnum():
1017 1017 raise error.RequirementError(_(".hg/requires file is corrupt"))
1018 1018 missings.append(r)
1019 1019 missings.sort()
1020 1020 if missings:
1021 1021 raise error.RequirementError(
1022 1022 _("repository requires features unknown to this Mercurial: %s")
1023 1023 % " ".join(missings),
1024 1024 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1025 1025 " for more information"))
1026 1026 return requirements
1027 1027
1028 1028 def writerequires(opener, requirements):
1029 1029 with opener('requires', 'w') as fp:
1030 1030 for r in sorted(requirements):
1031 1031 fp.write("%s\n" % r)
1032 1032
1033 1033 class filecachesubentry(object):
1034 1034 def __init__(self, path, stat):
1035 1035 self.path = path
1036 1036 self.cachestat = None
1037 1037 self._cacheable = None
1038 1038
1039 1039 if stat:
1040 1040 self.cachestat = filecachesubentry.stat(self.path)
1041 1041
1042 1042 if self.cachestat:
1043 1043 self._cacheable = self.cachestat.cacheable()
1044 1044 else:
1045 1045 # None means we don't know yet
1046 1046 self._cacheable = None
1047 1047
1048 1048 def refresh(self):
1049 1049 if self.cacheable():
1050 1050 self.cachestat = filecachesubentry.stat(self.path)
1051 1051
1052 1052 def cacheable(self):
1053 1053 if self._cacheable is not None:
1054 1054 return self._cacheable
1055 1055
1056 1056 # we don't know yet, assume it is for now
1057 1057 return True
1058 1058
1059 1059 def changed(self):
1060 1060 # no point in going further if we can't cache it
1061 1061 if not self.cacheable():
1062 1062 return True
1063 1063
1064 1064 newstat = filecachesubentry.stat(self.path)
1065 1065
1066 1066 # we may not know if it's cacheable yet, check again now
1067 1067 if newstat and self._cacheable is None:
1068 1068 self._cacheable = newstat.cacheable()
1069 1069
1070 1070 # check again
1071 1071 if not self._cacheable:
1072 1072 return True
1073 1073
1074 1074 if self.cachestat != newstat:
1075 1075 self.cachestat = newstat
1076 1076 return True
1077 1077 else:
1078 1078 return False
1079 1079
1080 1080 @staticmethod
1081 1081 def stat(path):
1082 1082 try:
1083 1083 return util.cachestat(path)
1084 1084 except OSError as e:
1085 1085 if e.errno != errno.ENOENT:
1086 1086 raise
1087 1087
1088 1088 class filecacheentry(object):
1089 1089 def __init__(self, paths, stat=True):
1090 1090 self._entries = []
1091 1091 for path in paths:
1092 1092 self._entries.append(filecachesubentry(path, stat))
1093 1093
1094 1094 def changed(self):
1095 1095 '''true if any entry has changed'''
1096 1096 for entry in self._entries:
1097 1097 if entry.changed():
1098 1098 return True
1099 1099 return False
1100 1100
1101 1101 def refresh(self):
1102 1102 for entry in self._entries:
1103 1103 entry.refresh()
1104 1104
1105 1105 class filecache(object):
1106 1106 '''A property like decorator that tracks files under .hg/ for updates.
1107 1107
1108 1108 Records stat info when called in _filecache.
1109 1109
1110 1110 On subsequent calls, compares old stat info with new info, and recreates the
1111 1111 object when any of the files changes, updating the new stat info in
1112 1112 _filecache.
1113 1113
1114 1114 Mercurial either atomic renames or appends for files under .hg,
1115 1115 so to ensure the cache is reliable we need the filesystem to be able
1116 1116 to tell us if a file has been replaced. If it can't, we fallback to
1117 1117 recreating the object on every call (essentially the same behavior as
1118 1118 propertycache).
1119 1119
1120 1120 '''
1121 1121 def __init__(self, *paths):
1122 1122 self.paths = paths
1123 1123
1124 1124 def join(self, obj, fname):
1125 1125 """Used to compute the runtime path of a cached file.
1126 1126
1127 1127 Users should subclass filecache and provide their own version of this
1128 1128 function to call the appropriate join function on 'obj' (an instance
1129 1129 of the class that its member function was decorated).
1130 1130 """
1131 1131 raise NotImplementedError
1132 1132
1133 1133 def __call__(self, func):
1134 1134 self.func = func
1135 1135 self.name = func.__name__.encode('ascii')
1136 1136 return self
1137 1137
1138 1138 def __get__(self, obj, type=None):
1139 1139 # if accessed on the class, return the descriptor itself.
1140 1140 if obj is None:
1141 1141 return self
1142 1142 # do we need to check if the file changed?
1143 1143 if self.name in obj.__dict__:
1144 1144 assert self.name in obj._filecache, self.name
1145 1145 return obj.__dict__[self.name]
1146 1146
1147 1147 entry = obj._filecache.get(self.name)
1148 1148
1149 1149 if entry:
1150 1150 if entry.changed():
1151 1151 entry.obj = self.func(obj)
1152 1152 else:
1153 1153 paths = [self.join(obj, path) for path in self.paths]
1154 1154
1155 1155 # We stat -before- creating the object so our cache doesn't lie if
1156 1156 # a writer modified between the time we read and stat
1157 1157 entry = filecacheentry(paths, True)
1158 1158 entry.obj = self.func(obj)
1159 1159
1160 1160 obj._filecache[self.name] = entry
1161 1161
1162 1162 obj.__dict__[self.name] = entry.obj
1163 1163 return entry.obj
1164 1164
1165 1165 def __set__(self, obj, value):
1166 1166 if self.name not in obj._filecache:
1167 1167 # we add an entry for the missing value because X in __dict__
1168 1168 # implies X in _filecache
1169 1169 paths = [self.join(obj, path) for path in self.paths]
1170 1170 ce = filecacheentry(paths, False)
1171 1171 obj._filecache[self.name] = ce
1172 1172 else:
1173 1173 ce = obj._filecache[self.name]
1174 1174
1175 1175 ce.obj = value # update cached copy
1176 1176 obj.__dict__[self.name] = value # update copy returned by obj.x
1177 1177
1178 1178 def __delete__(self, obj):
1179 1179 try:
1180 1180 del obj.__dict__[self.name]
1181 1181 except KeyError:
1182 1182 raise AttributeError(self.name)
1183 1183
1184 1184 def extdatasource(repo, source):
1185 1185 """Gather a map of rev -> value dict from the specified source
1186 1186
1187 1187 A source spec is treated as a URL, with a special case shell: type
1188 1188 for parsing the output from a shell command.
1189 1189
1190 1190 The data is parsed as a series of newline-separated records where
1191 1191 each record is a revision specifier optionally followed by a space
1192 1192 and a freeform string value. If the revision is known locally, it
1193 1193 is converted to a rev, otherwise the record is skipped.
1194 1194
1195 1195 Note that both key and value are treated as UTF-8 and converted to
1196 1196 the local encoding. This allows uniformity between local and
1197 1197 remote data sources.
1198 1198 """
1199 1199
1200 1200 spec = repo.ui.config("extdata", source)
1201 1201 if not spec:
1202 1202 raise error.Abort(_("unknown extdata source '%s'") % source)
1203 1203
1204 1204 data = {}
1205 1205 src = proc = None
1206 1206 try:
1207 1207 if spec.startswith("shell:"):
1208 1208 # external commands should be run relative to the repo root
1209 1209 cmd = spec[6:]
1210 1210 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1211 1211 close_fds=procutil.closefds,
1212 1212 stdout=subprocess.PIPE, cwd=repo.root)
1213 1213 src = proc.stdout
1214 1214 else:
1215 1215 # treat as a URL or file
1216 1216 src = url.open(repo.ui, spec)
1217 1217 for l in src:
1218 1218 if " " in l:
1219 1219 k, v = l.strip().split(" ", 1)
1220 1220 else:
1221 1221 k, v = l.strip(), ""
1222 1222
1223 1223 k = encoding.tolocal(k)
1224 1224 try:
1225 1225 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1226 1226 except (error.LookupError, error.RepoLookupError):
1227 1227 pass # we ignore data for nodes that don't exist locally
1228 1228 finally:
1229 1229 if proc:
1230 1230 proc.communicate()
1231 1231 if src:
1232 1232 src.close()
1233 1233 if proc and proc.returncode != 0:
1234 1234 raise error.Abort(_("extdata command '%s' failed: %s")
1235 1235 % (cmd, procutil.explainexit(proc.returncode)))
1236 1236
1237 1237 return data
1238 1238
1239 1239 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1240 1240 if lock is None:
1241 1241 raise error.LockInheritanceContractViolation(
1242 1242 'lock can only be inherited while held')
1243 1243 if environ is None:
1244 1244 environ = {}
1245 1245 with lock.inherit() as locker:
1246 1246 environ[envvar] = locker
1247 1247 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1248 1248
1249 1249 def wlocksub(repo, cmd, *args, **kwargs):
1250 1250 """run cmd as a subprocess that allows inheriting repo's wlock
1251 1251
1252 1252 This can only be called while the wlock is held. This takes all the
1253 1253 arguments that ui.system does, and returns the exit code of the
1254 1254 subprocess."""
1255 1255 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1256 1256 **kwargs)
1257 1257
1258 1258 def gdinitconfig(ui):
1259 1259 """helper function to know if a repo should be created as general delta
1260 1260 """
1261 1261 # experimental config: format.generaldelta
1262 1262 return (ui.configbool('format', 'generaldelta')
1263 1263 or ui.configbool('format', 'usegeneraldelta'))
1264 1264
1265 1265 def gddeltaconfig(ui):
1266 1266 """helper function to know if incoming delta should be optimised
1267 1267 """
1268 1268 # experimental config: format.generaldelta
1269 1269 return ui.configbool('format', 'generaldelta')
1270 1270
1271 1271 class simplekeyvaluefile(object):
1272 1272 """A simple file with key=value lines
1273 1273
1274 1274 Keys must be alphanumerics and start with a letter, values must not
1275 1275 contain '\n' characters"""
1276 1276 firstlinekey = '__firstline'
1277 1277
1278 1278 def __init__(self, vfs, path, keys=None):
1279 1279 self.vfs = vfs
1280 1280 self.path = path
1281 1281
1282 1282 def read(self, firstlinenonkeyval=False):
1283 1283 """Read the contents of a simple key-value file
1284 1284
1285 1285 'firstlinenonkeyval' indicates whether the first line of file should
1286 1286 be treated as a key-value pair or reuturned fully under the
1287 1287 __firstline key."""
1288 1288 lines = self.vfs.readlines(self.path)
1289 1289 d = {}
1290 1290 if firstlinenonkeyval:
1291 1291 if not lines:
1292 1292 e = _("empty simplekeyvalue file")
1293 1293 raise error.CorruptedState(e)
1294 1294 # we don't want to include '\n' in the __firstline
1295 1295 d[self.firstlinekey] = lines[0][:-1]
1296 1296 del lines[0]
1297 1297
1298 1298 try:
1299 1299 # the 'if line.strip()' part prevents us from failing on empty
1300 1300 # lines which only contain '\n' therefore are not skipped
1301 1301 # by 'if line'
1302 1302 updatedict = dict(line[:-1].split('=', 1) for line in lines
1303 1303 if line.strip())
1304 1304 if self.firstlinekey in updatedict:
1305 1305 e = _("%r can't be used as a key")
1306 1306 raise error.CorruptedState(e % self.firstlinekey)
1307 1307 d.update(updatedict)
1308 1308 except ValueError as e:
1309 1309 raise error.CorruptedState(str(e))
1310 1310 return d
1311 1311
1312 1312 def write(self, data, firstline=None):
1313 1313 """Write key=>value mapping to a file
1314 1314 data is a dict. Keys must be alphanumerical and start with a letter.
1315 1315 Values must not contain newline characters.
1316 1316
1317 1317 If 'firstline' is not None, it is written to file before
1318 1318 everything else, as it is, not in a key=value form"""
1319 1319 lines = []
1320 1320 if firstline is not None:
1321 1321 lines.append('%s\n' % firstline)
1322 1322
1323 1323 for k, v in data.items():
1324 1324 if k == self.firstlinekey:
1325 1325 e = "key name '%s' is reserved" % self.firstlinekey
1326 1326 raise error.ProgrammingError(e)
1327 1327 if not k[0:1].isalpha():
1328 1328 e = "keys must start with a letter in a key-value file"
1329 1329 raise error.ProgrammingError(e)
1330 1330 if not k.isalnum():
1331 1331 e = "invalid key name in a simple key-value file"
1332 1332 raise error.ProgrammingError(e)
1333 1333 if '\n' in v:
1334 1334 e = "invalid value in a simple key-value file"
1335 1335 raise error.ProgrammingError(e)
1336 1336 lines.append("%s=%s\n" % (k, v))
1337 1337 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1338 1338 fp.write(''.join(lines))
1339 1339
1340 1340 _reportobsoletedsource = [
1341 1341 'debugobsolete',
1342 1342 'pull',
1343 1343 'push',
1344 1344 'serve',
1345 1345 'unbundle',
1346 1346 ]
1347 1347
1348 1348 _reportnewcssource = [
1349 1349 'pull',
1350 1350 'unbundle',
1351 1351 ]
1352 1352
1353 1353 # a list of (repo, ctx, files) functions called by various commands to allow
1354 1354 # extensions to ensure the corresponding files are available locally, before the
1355 1355 # command uses them.
1356 1356 fileprefetchhooks = util.hooks()
1357 1357
1358 1358 # A marker that tells the evolve extension to suppress its own reporting
1359 1359 _reportstroubledchangesets = True
1360 1360
1361 1361 def registersummarycallback(repo, otr, txnname=''):
1362 1362 """register a callback to issue a summary after the transaction is closed
1363 1363 """
1364 1364 def txmatch(sources):
1365 1365 return any(txnname.startswith(source) for source in sources)
1366 1366
1367 1367 categories = []
1368 1368
1369 1369 def reportsummary(func):
1370 1370 """decorator for report callbacks."""
1371 1371 # The repoview life cycle is shorter than the one of the actual
1372 1372 # underlying repository. So the filtered object can die before the
1373 1373 # weakref is used leading to troubles. We keep a reference to the
1374 1374 # unfiltered object and restore the filtering when retrieving the
1375 1375 # repository through the weakref.
1376 1376 filtername = repo.filtername
1377 1377 reporef = weakref.ref(repo.unfiltered())
1378 1378 def wrapped(tr):
1379 1379 repo = reporef()
1380 1380 if filtername:
1381 1381 repo = repo.filtered(filtername)
1382 1382 func(repo, tr)
1383 1383 newcat = '%02i-txnreport' % len(categories)
1384 1384 otr.addpostclose(newcat, wrapped)
1385 1385 categories.append(newcat)
1386 1386 return wrapped
1387 1387
1388 1388 if txmatch(_reportobsoletedsource):
1389 1389 @reportsummary
1390 1390 def reportobsoleted(repo, tr):
1391 1391 obsoleted = obsutil.getobsoleted(repo, tr)
1392 1392 if obsoleted:
1393 1393 repo.ui.status(_('obsoleted %i changesets\n')
1394 1394 % len(obsoleted))
1395 1395
1396 1396 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1397 1397 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1398 1398 instabilitytypes = [
1399 1399 ('orphan', 'orphan'),
1400 1400 ('phase-divergent', 'phasedivergent'),
1401 1401 ('content-divergent', 'contentdivergent'),
1402 1402 ]
1403 1403
1404 1404 def getinstabilitycounts(repo):
1405 1405 filtered = repo.changelog.filteredrevs
1406 1406 counts = {}
1407 1407 for instability, revset in instabilitytypes:
1408 1408 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1409 1409 filtered)
1410 1410 return counts
1411 1411
1412 1412 oldinstabilitycounts = getinstabilitycounts(repo)
1413 1413 @reportsummary
1414 1414 def reportnewinstabilities(repo, tr):
1415 1415 newinstabilitycounts = getinstabilitycounts(repo)
1416 1416 for instability, revset in instabilitytypes:
1417 1417 delta = (newinstabilitycounts[instability] -
1418 1418 oldinstabilitycounts[instability])
1419 1419 if delta > 0:
1420 1420 repo.ui.warn(_('%i new %s changesets\n') %
1421 1421 (delta, instability))
1422 1422
1423 1423 if txmatch(_reportnewcssource):
1424 1424 @reportsummary
1425 1425 def reportnewcs(repo, tr):
1426 1426 """Report the range of new revisions pulled/unbundled."""
1427 1427 newrevs = tr.changes.get('revs', xrange(0, 0))
1428 1428 if not newrevs:
1429 1429 return
1430 1430
1431 1431 # Compute the bounds of new revisions' range, excluding obsoletes.
1432 1432 unfi = repo.unfiltered()
1433 1433 revs = unfi.revs('%ld and not obsolete()', newrevs)
1434 1434 if not revs:
1435 1435 # Got only obsoletes.
1436 1436 return
1437 1437 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1438 1438
1439 1439 if minrev == maxrev:
1440 1440 revrange = minrev
1441 1441 else:
1442 1442 revrange = '%s:%s' % (minrev, maxrev)
1443 1443 repo.ui.status(_('new changesets %s\n') % revrange)
1444 1444
1445 1445 def nodesummaries(repo, nodes, maxnumnodes=4):
1446 1446 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1447 1447 return ' '.join(short(h) for h in nodes)
1448 1448 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1449 1449 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1450 1450
1451 1451 def enforcesinglehead(repo, tr, desc):
1452 1452 """check that no named branch has multiple heads"""
1453 1453 if desc in ('strip', 'repair'):
1454 1454 # skip the logic during strip
1455 1455 return
1456 1456 visible = repo.filtered('visible')
1457 1457 # possible improvement: we could restrict the check to affected branch
1458 1458 for name, heads in visible.branchmap().iteritems():
1459 1459 if len(heads) > 1:
1460 1460 msg = _('rejecting multiple heads on branch "%s"')
1461 1461 msg %= name
1462 1462 hint = _('%d heads: %s')
1463 1463 hint %= (len(heads), nodesummaries(repo, heads))
1464 1464 raise error.Abort(msg, hint=hint)
1465 1465
1466 1466 def wrapconvertsink(sink):
1467 1467 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1468 1468 before it is used, whether or not the convert extension was formally loaded.
1469 1469 """
1470 1470 return sink
1471 1471
1472 1472 def unhidehashlikerevs(repo, specs, hiddentype):
1473 1473 """parse the user specs and unhide changesets whose hash or revision number
1474 1474 is passed.
1475 1475
1476 1476 hiddentype can be: 1) 'warn': warn while unhiding changesets
1477 1477 2) 'nowarn': don't warn while unhiding changesets
1478 1478
1479 1479 returns a repo object with the required changesets unhidden
1480 1480 """
1481 1481 if not repo.filtername or not repo.ui.configbool('experimental',
1482 1482 'directaccess'):
1483 1483 return repo
1484 1484
1485 1485 if repo.filtername not in ('visible', 'visible-hidden'):
1486 1486 return repo
1487 1487
1488 1488 symbols = set()
1489 1489 for spec in specs:
1490 1490 try:
1491 1491 tree = revsetlang.parse(spec)
1492 1492 except error.ParseError: # will be reported by scmutil.revrange()
1493 1493 continue
1494 1494
1495 1495 symbols.update(revsetlang.gethashlikesymbols(tree))
1496 1496
1497 1497 if not symbols:
1498 1498 return repo
1499 1499
1500 1500 revs = _getrevsfromsymbols(repo, symbols)
1501 1501
1502 1502 if not revs:
1503 1503 return repo
1504 1504
1505 1505 if hiddentype == 'warn':
1506 1506 unfi = repo.unfiltered()
1507 1507 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1508 1508 repo.ui.warn(_("warning: accessing hidden changesets for write "
1509 1509 "operation: %s\n") % revstr)
1510 1510
1511 1511 # we have to use new filtername to separate branch/tags cache until we can
1512 1512 # disbale these cache when revisions are dynamically pinned.
1513 1513 return repo.filtered('visible-hidden', revs)
1514 1514
1515 1515 def _getrevsfromsymbols(repo, symbols):
1516 1516 """parse the list of symbols and returns a set of revision numbers of hidden
1517 1517 changesets present in symbols"""
1518 1518 revs = set()
1519 1519 unfi = repo.unfiltered()
1520 1520 unficl = unfi.changelog
1521 1521 cl = repo.changelog
1522 1522 tiprev = len(unficl)
1523 1523 pmatch = unficl._partialmatch
1524 1524 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1525 1525 for s in symbols:
1526 1526 try:
1527 1527 n = int(s)
1528 1528 if n <= tiprev:
1529 1529 if not allowrevnums:
1530 1530 continue
1531 1531 else:
1532 1532 if n not in cl:
1533 1533 revs.add(n)
1534 1534 continue
1535 1535 except ValueError:
1536 1536 pass
1537 1537
1538 1538 try:
1539 1539 s = pmatch(s)
1540 1540 except (error.LookupError, error.WdirUnsupported):
1541 1541 s = None
1542 1542
1543 1543 if s is not None:
1544 1544 rev = unficl.rev(s)
1545 1545 if rev not in cl:
1546 1546 revs.add(rev)
1547 1547
1548 1548 return revs
General Comments 0
You need to be logged in to leave comments. Login now