##// END OF EJS Templates
scmutil: handle full hex nodeids in revsymbol()...
Martin von Zweigbergk -
r37546:d2b484ee default
parent child Browse files
Show More
@@ -1,1517 +1,1530 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 bin,
21 22 hex,
22 23 nullid,
23 24 short,
24 25 wdirid,
25 26 wdirrev,
26 27 )
27 28
28 29 from . import (
29 30 encoding,
30 31 error,
31 32 match as matchmod,
32 33 obsolete,
33 34 obsutil,
34 35 pathutil,
35 36 phases,
36 37 pycompat,
37 38 revsetlang,
38 39 similar,
39 40 url,
40 41 util,
41 42 vfs,
42 43 )
43 44
44 45 from .utils import (
45 46 procutil,
46 47 stringutil,
47 48 )
48 49
49 50 if pycompat.iswindows:
50 51 from . import scmwindows as scmplatform
51 52 else:
52 53 from . import scmposix as scmplatform
53 54
54 55 termsize = scmplatform.termsize
55 56
56 57 class status(tuple):
57 58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
58 59 and 'ignored' properties are only relevant to the working copy.
59 60 '''
60 61
61 62 __slots__ = ()
62 63
63 64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
64 65 clean):
65 66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
66 67 ignored, clean))
67 68
68 69 @property
69 70 def modified(self):
70 71 '''files that have been modified'''
71 72 return self[0]
72 73
73 74 @property
74 75 def added(self):
75 76 '''files that have been added'''
76 77 return self[1]
77 78
78 79 @property
79 80 def removed(self):
80 81 '''files that have been removed'''
81 82 return self[2]
82 83
83 84 @property
84 85 def deleted(self):
85 86 '''files that are in the dirstate, but have been deleted from the
86 87 working copy (aka "missing")
87 88 '''
88 89 return self[3]
89 90
90 91 @property
91 92 def unknown(self):
92 93 '''files not in the dirstate that are not ignored'''
93 94 return self[4]
94 95
95 96 @property
96 97 def ignored(self):
97 98 '''files not in the dirstate that are ignored (by _dirignore())'''
98 99 return self[5]
99 100
100 101 @property
101 102 def clean(self):
102 103 '''files that have not been modified'''
103 104 return self[6]
104 105
105 106 def __repr__(self, *args, **kwargs):
106 107 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
107 108 'unknown=%r, ignored=%r, clean=%r>') % self)
108 109
109 110 def itersubrepos(ctx1, ctx2):
110 111 """find subrepos in ctx1 or ctx2"""
111 112 # Create a (subpath, ctx) mapping where we prefer subpaths from
112 113 # ctx1. The subpaths from ctx2 are important when the .hgsub file
113 114 # has been modified (in ctx2) but not yet committed (in ctx1).
114 115 subpaths = dict.fromkeys(ctx2.substate, ctx2)
115 116 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
116 117
117 118 missing = set()
118 119
119 120 for subpath in ctx2.substate:
120 121 if subpath not in ctx1.substate:
121 122 del subpaths[subpath]
122 123 missing.add(subpath)
123 124
124 125 for subpath, ctx in sorted(subpaths.iteritems()):
125 126 yield subpath, ctx.sub(subpath)
126 127
127 128 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
128 129 # status and diff will have an accurate result when it does
129 130 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
130 131 # against itself.
131 132 for subpath in missing:
132 133 yield subpath, ctx2.nullsub(subpath, ctx1)
133 134
134 135 def nochangesfound(ui, repo, excluded=None):
135 136 '''Report no changes for push/pull, excluded is None or a list of
136 137 nodes excluded from the push/pull.
137 138 '''
138 139 secretlist = []
139 140 if excluded:
140 141 for n in excluded:
141 142 ctx = repo[n]
142 143 if ctx.phase() >= phases.secret and not ctx.extinct():
143 144 secretlist.append(n)
144 145
145 146 if secretlist:
146 147 ui.status(_("no changes found (ignored %d secret changesets)\n")
147 148 % len(secretlist))
148 149 else:
149 150 ui.status(_("no changes found\n"))
150 151
151 152 def callcatch(ui, func):
152 153 """call func() with global exception handling
153 154
154 155 return func() if no exception happens. otherwise do some error handling
155 156 and return an exit code accordingly. does not handle all exceptions.
156 157 """
157 158 try:
158 159 try:
159 160 return func()
160 161 except: # re-raises
161 162 ui.traceback()
162 163 raise
163 164 # Global exception handling, alphabetically
164 165 # Mercurial-specific first, followed by built-in and library exceptions
165 166 except error.LockHeld as inst:
166 167 if inst.errno == errno.ETIMEDOUT:
167 168 reason = _('timed out waiting for lock held by %r') % inst.locker
168 169 else:
169 170 reason = _('lock held by %r') % inst.locker
170 171 ui.warn(_("abort: %s: %s\n")
171 172 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
172 173 if not inst.locker:
173 174 ui.warn(_("(lock might be very busy)\n"))
174 175 except error.LockUnavailable as inst:
175 176 ui.warn(_("abort: could not lock %s: %s\n") %
176 177 (inst.desc or stringutil.forcebytestr(inst.filename),
177 178 encoding.strtolocal(inst.strerror)))
178 179 except error.OutOfBandError as inst:
179 180 if inst.args:
180 181 msg = _("abort: remote error:\n")
181 182 else:
182 183 msg = _("abort: remote error\n")
183 184 ui.warn(msg)
184 185 if inst.args:
185 186 ui.warn(''.join(inst.args))
186 187 if inst.hint:
187 188 ui.warn('(%s)\n' % inst.hint)
188 189 except error.RepoError as inst:
189 190 ui.warn(_("abort: %s!\n") % inst)
190 191 if inst.hint:
191 192 ui.warn(_("(%s)\n") % inst.hint)
192 193 except error.ResponseError as inst:
193 194 ui.warn(_("abort: %s") % inst.args[0])
194 195 msg = inst.args[1]
195 196 if isinstance(msg, type(u'')):
196 197 msg = pycompat.sysbytes(msg)
197 198 if not isinstance(msg, bytes):
198 199 ui.warn(" %r\n" % (msg,))
199 200 elif not msg:
200 201 ui.warn(_(" empty string\n"))
201 202 else:
202 203 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
203 204 except error.CensoredNodeError as inst:
204 205 ui.warn(_("abort: file censored %s!\n") % inst)
205 206 except error.RevlogError as inst:
206 207 ui.warn(_("abort: %s!\n") % inst)
207 208 except error.InterventionRequired as inst:
208 209 ui.warn("%s\n" % inst)
209 210 if inst.hint:
210 211 ui.warn(_("(%s)\n") % inst.hint)
211 212 return 1
212 213 except error.WdirUnsupported:
213 214 ui.warn(_("abort: working directory revision cannot be specified\n"))
214 215 except error.Abort as inst:
215 216 ui.warn(_("abort: %s\n") % inst)
216 217 if inst.hint:
217 218 ui.warn(_("(%s)\n") % inst.hint)
218 219 except ImportError as inst:
219 220 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
220 221 m = stringutil.forcebytestr(inst).split()[-1]
221 222 if m in "mpatch bdiff".split():
222 223 ui.warn(_("(did you forget to compile extensions?)\n"))
223 224 elif m in "zlib".split():
224 225 ui.warn(_("(is your Python install correct?)\n"))
225 226 except IOError as inst:
226 227 if util.safehasattr(inst, "code"):
227 228 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
228 229 elif util.safehasattr(inst, "reason"):
229 230 try: # usually it is in the form (errno, strerror)
230 231 reason = inst.reason.args[1]
231 232 except (AttributeError, IndexError):
232 233 # it might be anything, for example a string
233 234 reason = inst.reason
234 235 if isinstance(reason, unicode):
235 236 # SSLError of Python 2.7.9 contains a unicode
236 237 reason = encoding.unitolocal(reason)
237 238 ui.warn(_("abort: error: %s\n") % reason)
238 239 elif (util.safehasattr(inst, "args")
239 240 and inst.args and inst.args[0] == errno.EPIPE):
240 241 pass
241 242 elif getattr(inst, "strerror", None):
242 243 if getattr(inst, "filename", None):
243 244 ui.warn(_("abort: %s: %s\n") % (
244 245 encoding.strtolocal(inst.strerror),
245 246 stringutil.forcebytestr(inst.filename)))
246 247 else:
247 248 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
248 249 else:
249 250 raise
250 251 except OSError as inst:
251 252 if getattr(inst, "filename", None) is not None:
252 253 ui.warn(_("abort: %s: '%s'\n") % (
253 254 encoding.strtolocal(inst.strerror),
254 255 stringutil.forcebytestr(inst.filename)))
255 256 else:
256 257 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 258 except MemoryError:
258 259 ui.warn(_("abort: out of memory\n"))
259 260 except SystemExit as inst:
260 261 # Commands shouldn't sys.exit directly, but give a return code.
261 262 # Just in case catch this and and pass exit code to caller.
262 263 return inst.code
263 264 except socket.error as inst:
264 265 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
265 266
266 267 return -1
267 268
268 269 def checknewlabel(repo, lbl, kind):
269 270 # Do not use the "kind" parameter in ui output.
270 271 # It makes strings difficult to translate.
271 272 if lbl in ['tip', '.', 'null']:
272 273 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 274 for c in (':', '\0', '\n', '\r'):
274 275 if c in lbl:
275 276 raise error.Abort(
276 277 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 278 try:
278 279 int(lbl)
279 280 raise error.Abort(_("cannot use an integer as a name"))
280 281 except ValueError:
281 282 pass
282 283 if lbl.strip() != lbl:
283 284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284 285
285 286 def checkfilename(f):
286 287 '''Check that the filename f is an acceptable filename for a tracked file'''
287 288 if '\r' in f or '\n' in f:
288 289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
289 290
290 291 def checkportable(ui, f):
291 292 '''Check if filename f is portable and warn or abort depending on config'''
292 293 checkfilename(f)
293 294 abort, warn = checkportabilityalert(ui)
294 295 if abort or warn:
295 296 msg = util.checkwinfilename(f)
296 297 if msg:
297 298 msg = "%s: %s" % (msg, procutil.shellquote(f))
298 299 if abort:
299 300 raise error.Abort(msg)
300 301 ui.warn(_("warning: %s\n") % msg)
301 302
302 303 def checkportabilityalert(ui):
303 304 '''check if the user's config requests nothing, a warning, or abort for
304 305 non-portable filenames'''
305 306 val = ui.config('ui', 'portablefilenames')
306 307 lval = val.lower()
307 308 bval = stringutil.parsebool(val)
308 309 abort = pycompat.iswindows or lval == 'abort'
309 310 warn = bval or lval == 'warn'
310 311 if bval is None and not (warn or abort or lval == 'ignore'):
311 312 raise error.ConfigError(
312 313 _("ui.portablefilenames value is invalid ('%s')") % val)
313 314 return abort, warn
314 315
315 316 class casecollisionauditor(object):
316 317 def __init__(self, ui, abort, dirstate):
317 318 self._ui = ui
318 319 self._abort = abort
319 320 allfiles = '\0'.join(dirstate._map)
320 321 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
321 322 self._dirstate = dirstate
322 323 # The purpose of _newfiles is so that we don't complain about
323 324 # case collisions if someone were to call this object with the
324 325 # same filename twice.
325 326 self._newfiles = set()
326 327
327 328 def __call__(self, f):
328 329 if f in self._newfiles:
329 330 return
330 331 fl = encoding.lower(f)
331 332 if fl in self._loweredfiles and f not in self._dirstate:
332 333 msg = _('possible case-folding collision for %s') % f
333 334 if self._abort:
334 335 raise error.Abort(msg)
335 336 self._ui.warn(_("warning: %s\n") % msg)
336 337 self._loweredfiles.add(fl)
337 338 self._newfiles.add(f)
338 339
339 340 def filteredhash(repo, maxrev):
340 341 """build hash of filtered revisions in the current repoview.
341 342
342 343 Multiple caches perform up-to-date validation by checking that the
343 344 tiprev and tipnode stored in the cache file match the current repository.
344 345 However, this is not sufficient for validating repoviews because the set
345 346 of revisions in the view may change without the repository tiprev and
346 347 tipnode changing.
347 348
348 349 This function hashes all the revs filtered from the view and returns
349 350 that SHA-1 digest.
350 351 """
351 352 cl = repo.changelog
352 353 if not cl.filteredrevs:
353 354 return None
354 355 key = None
355 356 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
356 357 if revs:
357 358 s = hashlib.sha1()
358 359 for rev in revs:
359 360 s.update('%d;' % rev)
360 361 key = s.digest()
361 362 return key
362 363
363 364 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
364 365 '''yield every hg repository under path, always recursively.
365 366 The recurse flag will only control recursion into repo working dirs'''
366 367 def errhandler(err):
367 368 if err.filename == path:
368 369 raise err
369 370 samestat = getattr(os.path, 'samestat', None)
370 371 if followsym and samestat is not None:
371 372 def adddir(dirlst, dirname):
372 373 dirstat = os.stat(dirname)
373 374 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
374 375 if not match:
375 376 dirlst.append(dirstat)
376 377 return not match
377 378 else:
378 379 followsym = False
379 380
380 381 if (seen_dirs is None) and followsym:
381 382 seen_dirs = []
382 383 adddir(seen_dirs, path)
383 384 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
384 385 dirs.sort()
385 386 if '.hg' in dirs:
386 387 yield root # found a repository
387 388 qroot = os.path.join(root, '.hg', 'patches')
388 389 if os.path.isdir(os.path.join(qroot, '.hg')):
389 390 yield qroot # we have a patch queue repo here
390 391 if recurse:
391 392 # avoid recursing inside the .hg directory
392 393 dirs.remove('.hg')
393 394 else:
394 395 dirs[:] = [] # don't descend further
395 396 elif followsym:
396 397 newdirs = []
397 398 for d in dirs:
398 399 fname = os.path.join(root, d)
399 400 if adddir(seen_dirs, fname):
400 401 if os.path.islink(fname):
401 402 for hgname in walkrepos(fname, True, seen_dirs):
402 403 yield hgname
403 404 else:
404 405 newdirs.append(d)
405 406 dirs[:] = newdirs
406 407
407 408 def binnode(ctx):
408 409 """Return binary node id for a given basectx"""
409 410 node = ctx.node()
410 411 if node is None:
411 412 return wdirid
412 413 return node
413 414
414 415 def intrev(ctx):
415 416 """Return integer for a given basectx that can be used in comparison or
416 417 arithmetic operation"""
417 418 rev = ctx.rev()
418 419 if rev is None:
419 420 return wdirrev
420 421 return rev
421 422
422 423 def formatchangeid(ctx):
423 424 """Format changectx as '{rev}:{node|formatnode}', which is the default
424 425 template provided by logcmdutil.changesettemplater"""
425 426 repo = ctx.repo()
426 427 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
427 428
428 429 def formatrevnode(ui, rev, node):
429 430 """Format given revision and node depending on the current verbosity"""
430 431 if ui.debugflag:
431 432 hexfunc = hex
432 433 else:
433 434 hexfunc = short
434 435 return '%d:%s' % (rev, hexfunc(node))
435 436
436 437 def resolvepartialhexnodeid(repo, prefix):
437 438 # Uses unfiltered repo because it's faster when then prefix is ambiguous/
438 439 # This matches the "shortest" template function.
439 440 node = repo.unfiltered().changelog._partialmatch(prefix)
440 441 if node is None:
441 442 return
442 443 repo.changelog.rev(node) # make sure node isn't filtered
443 444 return node
444 445
445 446 def isrevsymbol(repo, symbol):
446 447 try:
447 448 revsymbol(repo, symbol)
448 449 return True
449 450 except error.RepoLookupError:
450 451 return False
451 452
452 453 def revsymbol(repo, symbol):
453 454 """Returns a context given a single revision symbol (as string).
454 455
455 456 This is similar to revsingle(), but accepts only a single revision symbol,
456 457 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
457 458 not "max(public())".
458 459 """
459 460 if not isinstance(symbol, bytes):
460 461 msg = ("symbol (%s of type %s) was not a string, did you mean "
461 462 "repo[symbol]?" % (symbol, type(symbol)))
462 463 raise error.ProgrammingError(msg)
463 464 try:
464 465 if symbol in ('.', 'tip', 'null'):
465 466 return repo[symbol]
466 467
467 468 try:
468 469 r = int(symbol)
469 470 if '%d' % r != symbol:
470 471 raise ValueError
471 472 l = len(repo.changelog)
472 473 if r < 0:
473 474 r += l
474 475 if r < 0 or r >= l and r != wdirrev:
475 476 raise ValueError
476 477 return repo[r]
477 478 except error.FilteredIndexError:
478 479 raise
479 480 except (ValueError, OverflowError, IndexError):
480 481 pass
481 482
483 if len(symbol) == 40:
484 try:
485 node = bin(symbol)
486 rev = repo.changelog.rev(node)
487 return repo[rev]
488 except error.FilteredLookupError:
489 raise
490 except (TypeError, LookupError):
491 pass
492
482 493 return repo[symbol]
483 494
495 except error.WdirUnsupported:
496 return repo[None]
484 497 except (error.FilteredIndexError, error.FilteredLookupError,
485 498 error.FilteredRepoLookupError):
486 499 raise _filterederror(repo, symbol)
487 500
488 501 def _filterederror(repo, changeid):
489 502 """build an exception to be raised about a filtered changeid
490 503
491 504 This is extracted in a function to help extensions (eg: evolve) to
492 505 experiment with various message variants."""
493 506 if repo.filtername.startswith('visible'):
494 507
495 508 # Check if the changeset is obsolete
496 509 unfilteredrepo = repo.unfiltered()
497 510 ctx = revsymbol(unfilteredrepo, changeid)
498 511
499 512 # If the changeset is obsolete, enrich the message with the reason
500 513 # that made this changeset not visible
501 514 if ctx.obsolete():
502 515 msg = obsutil._getfilteredreason(repo, changeid, ctx)
503 516 else:
504 517 msg = _("hidden revision '%s'") % changeid
505 518
506 519 hint = _('use --hidden to access hidden revisions')
507 520
508 521 return error.FilteredRepoLookupError(msg, hint=hint)
509 522 msg = _("filtered revision '%s' (not in '%s' subset)")
510 523 msg %= (changeid, repo.filtername)
511 524 return error.FilteredRepoLookupError(msg)
512 525
513 526 def revsingle(repo, revspec, default='.', localalias=None):
514 527 if not revspec and revspec != 0:
515 528 return repo[default]
516 529
517 530 l = revrange(repo, [revspec], localalias=localalias)
518 531 if not l:
519 532 raise error.Abort(_('empty revision set'))
520 533 return repo[l.last()]
521 534
522 535 def _pairspec(revspec):
523 536 tree = revsetlang.parse(revspec)
524 537 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
525 538
526 539 def revpairnodes(repo, revs):
527 540 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
528 541 ctx1, ctx2 = revpair(repo, revs)
529 542 return ctx1.node(), ctx2.node()
530 543
531 544 def revpair(repo, revs):
532 545 if not revs:
533 546 return repo['.'], repo[None]
534 547
535 548 l = revrange(repo, revs)
536 549
537 550 if not l:
538 551 first = second = None
539 552 elif l.isascending():
540 553 first = l.min()
541 554 second = l.max()
542 555 elif l.isdescending():
543 556 first = l.max()
544 557 second = l.min()
545 558 else:
546 559 first = l.first()
547 560 second = l.last()
548 561
549 562 if first is None:
550 563 raise error.Abort(_('empty revision range'))
551 564 if (first == second and len(revs) >= 2
552 565 and not all(revrange(repo, [r]) for r in revs)):
553 566 raise error.Abort(_('empty revision on one side of range'))
554 567
555 568 # if top-level is range expression, the result must always be a pair
556 569 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
557 570 return repo[first], repo[None]
558 571
559 572 return repo[first], repo[second]
560 573
561 574 def revrange(repo, specs, localalias=None):
562 575 """Execute 1 to many revsets and return the union.
563 576
564 577 This is the preferred mechanism for executing revsets using user-specified
565 578 config options, such as revset aliases.
566 579
567 580 The revsets specified by ``specs`` will be executed via a chained ``OR``
568 581 expression. If ``specs`` is empty, an empty result is returned.
569 582
570 583 ``specs`` can contain integers, in which case they are assumed to be
571 584 revision numbers.
572 585
573 586 It is assumed the revsets are already formatted. If you have arguments
574 587 that need to be expanded in the revset, call ``revsetlang.formatspec()``
575 588 and pass the result as an element of ``specs``.
576 589
577 590 Specifying a single revset is allowed.
578 591
579 592 Returns a ``revset.abstractsmartset`` which is a list-like interface over
580 593 integer revisions.
581 594 """
582 595 allspecs = []
583 596 for spec in specs:
584 597 if isinstance(spec, int):
585 598 spec = revsetlang.formatspec('rev(%d)', spec)
586 599 allspecs.append(spec)
587 600 return repo.anyrevs(allspecs, user=True, localalias=localalias)
588 601
589 602 def meaningfulparents(repo, ctx):
590 603 """Return list of meaningful (or all if debug) parentrevs for rev.
591 604
592 605 For merges (two non-nullrev revisions) both parents are meaningful.
593 606 Otherwise the first parent revision is considered meaningful if it
594 607 is not the preceding revision.
595 608 """
596 609 parents = ctx.parents()
597 610 if len(parents) > 1:
598 611 return parents
599 612 if repo.ui.debugflag:
600 613 return [parents[0], repo['null']]
601 614 if parents[0].rev() >= intrev(ctx) - 1:
602 615 return []
603 616 return parents
604 617
605 618 def expandpats(pats):
606 619 '''Expand bare globs when running on windows.
607 620 On posix we assume it already has already been done by sh.'''
608 621 if not util.expandglobs:
609 622 return list(pats)
610 623 ret = []
611 624 for kindpat in pats:
612 625 kind, pat = matchmod._patsplit(kindpat, None)
613 626 if kind is None:
614 627 try:
615 628 globbed = glob.glob(pat)
616 629 except re.error:
617 630 globbed = [pat]
618 631 if globbed:
619 632 ret.extend(globbed)
620 633 continue
621 634 ret.append(kindpat)
622 635 return ret
623 636
624 637 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
625 638 badfn=None):
626 639 '''Return a matcher and the patterns that were used.
627 640 The matcher will warn about bad matches, unless an alternate badfn callback
628 641 is provided.'''
629 642 if pats == ("",):
630 643 pats = []
631 644 if opts is None:
632 645 opts = {}
633 646 if not globbed and default == 'relpath':
634 647 pats = expandpats(pats or [])
635 648
636 649 def bad(f, msg):
637 650 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
638 651
639 652 if badfn is None:
640 653 badfn = bad
641 654
642 655 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
643 656 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
644 657
645 658 if m.always():
646 659 pats = []
647 660 return m, pats
648 661
649 662 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
650 663 badfn=None):
651 664 '''Return a matcher that will warn about bad matches.'''
652 665 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
653 666
654 667 def matchall(repo):
655 668 '''Return a matcher that will efficiently match everything.'''
656 669 return matchmod.always(repo.root, repo.getcwd())
657 670
658 671 def matchfiles(repo, files, badfn=None):
659 672 '''Return a matcher that will efficiently match exactly these files.'''
660 673 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
661 674
662 675 def parsefollowlinespattern(repo, rev, pat, msg):
663 676 """Return a file name from `pat` pattern suitable for usage in followlines
664 677 logic.
665 678 """
666 679 if not matchmod.patkind(pat):
667 680 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
668 681 else:
669 682 ctx = repo[rev]
670 683 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
671 684 files = [f for f in ctx if m(f)]
672 685 if len(files) != 1:
673 686 raise error.ParseError(msg)
674 687 return files[0]
675 688
676 689 def origpath(ui, repo, filepath):
677 690 '''customize where .orig files are created
678 691
679 692 Fetch user defined path from config file: [ui] origbackuppath = <path>
680 693 Fall back to default (filepath with .orig suffix) if not specified
681 694 '''
682 695 origbackuppath = ui.config('ui', 'origbackuppath')
683 696 if not origbackuppath:
684 697 return filepath + ".orig"
685 698
686 699 # Convert filepath from an absolute path into a path inside the repo.
687 700 filepathfromroot = util.normpath(os.path.relpath(filepath,
688 701 start=repo.root))
689 702
690 703 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
691 704 origbackupdir = origvfs.dirname(filepathfromroot)
692 705 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
693 706 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
694 707
695 708 # Remove any files that conflict with the backup file's path
696 709 for f in reversed(list(util.finddirs(filepathfromroot))):
697 710 if origvfs.isfileorlink(f):
698 711 ui.note(_('removing conflicting file: %s\n')
699 712 % origvfs.join(f))
700 713 origvfs.unlink(f)
701 714 break
702 715
703 716 origvfs.makedirs(origbackupdir)
704 717
705 718 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
706 719 ui.note(_('removing conflicting directory: %s\n')
707 720 % origvfs.join(filepathfromroot))
708 721 origvfs.rmtree(filepathfromroot, forcibly=True)
709 722
710 723 return origvfs.join(filepathfromroot)
711 724
712 725 class _containsnode(object):
713 726 """proxy __contains__(node) to container.__contains__ which accepts revs"""
714 727
715 728 def __init__(self, repo, revcontainer):
716 729 self._torev = repo.changelog.rev
717 730 self._revcontains = revcontainer.__contains__
718 731
719 732 def __contains__(self, node):
720 733 return self._revcontains(self._torev(node))
721 734
722 735 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
723 736 """do common cleanups when old nodes are replaced by new nodes
724 737
725 738 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
726 739 (we might also want to move working directory parent in the future)
727 740
728 741 By default, bookmark moves are calculated automatically from 'replacements',
729 742 but 'moves' can be used to override that. Also, 'moves' may include
730 743 additional bookmark moves that should not have associated obsmarkers.
731 744
732 745 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
733 746 have replacements. operation is a string, like "rebase".
734 747
735 748 metadata is dictionary containing metadata to be stored in obsmarker if
736 749 obsolescence is enabled.
737 750 """
738 751 if not replacements and not moves:
739 752 return
740 753
741 754 # translate mapping's other forms
742 755 if not util.safehasattr(replacements, 'items'):
743 756 replacements = {n: () for n in replacements}
744 757
745 758 # Calculate bookmark movements
746 759 if moves is None:
747 760 moves = {}
748 761 # Unfiltered repo is needed since nodes in replacements might be hidden.
749 762 unfi = repo.unfiltered()
750 763 for oldnode, newnodes in replacements.items():
751 764 if oldnode in moves:
752 765 continue
753 766 if len(newnodes) > 1:
754 767 # usually a split, take the one with biggest rev number
755 768 newnode = next(unfi.set('max(%ln)', newnodes)).node()
756 769 elif len(newnodes) == 0:
757 770 # move bookmark backwards
758 771 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
759 772 list(replacements)))
760 773 if roots:
761 774 newnode = roots[0].node()
762 775 else:
763 776 newnode = nullid
764 777 else:
765 778 newnode = newnodes[0]
766 779 moves[oldnode] = newnode
767 780
768 781 with repo.transaction('cleanup') as tr:
769 782 # Move bookmarks
770 783 bmarks = repo._bookmarks
771 784 bmarkchanges = []
772 785 allnewnodes = [n for ns in replacements.values() for n in ns]
773 786 for oldnode, newnode in moves.items():
774 787 oldbmarks = repo.nodebookmarks(oldnode)
775 788 if not oldbmarks:
776 789 continue
777 790 from . import bookmarks # avoid import cycle
778 791 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
779 792 (util.rapply(pycompat.maybebytestr, oldbmarks),
780 793 hex(oldnode), hex(newnode)))
781 794 # Delete divergent bookmarks being parents of related newnodes
782 795 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
783 796 allnewnodes, newnode, oldnode)
784 797 deletenodes = _containsnode(repo, deleterevs)
785 798 for name in oldbmarks:
786 799 bmarkchanges.append((name, newnode))
787 800 for b in bookmarks.divergent2delete(repo, deletenodes, name):
788 801 bmarkchanges.append((b, None))
789 802
790 803 if bmarkchanges:
791 804 bmarks.applychanges(repo, tr, bmarkchanges)
792 805
793 806 # Obsolete or strip nodes
794 807 if obsolete.isenabled(repo, obsolete.createmarkersopt):
795 808 # If a node is already obsoleted, and we want to obsolete it
796 809 # without a successor, skip that obssolete request since it's
797 810 # unnecessary. That's the "if s or not isobs(n)" check below.
798 811 # Also sort the node in topology order, that might be useful for
799 812 # some obsstore logic.
800 813 # NOTE: the filtering and sorting might belong to createmarkers.
801 814 isobs = unfi.obsstore.successors.__contains__
802 815 torev = unfi.changelog.rev
803 816 sortfunc = lambda ns: torev(ns[0])
804 817 rels = [(unfi[n], tuple(unfi[m] for m in s))
805 818 for n, s in sorted(replacements.items(), key=sortfunc)
806 819 if s or not isobs(n)]
807 820 if rels:
808 821 obsolete.createmarkers(repo, rels, operation=operation,
809 822 metadata=metadata)
810 823 else:
811 824 from . import repair # avoid import cycle
812 825 tostrip = list(replacements)
813 826 if tostrip:
814 827 repair.delayedstrip(repo.ui, repo, tostrip, operation)
815 828
816 829 def addremove(repo, matcher, prefix, opts=None):
817 830 if opts is None:
818 831 opts = {}
819 832 m = matcher
820 833 dry_run = opts.get('dry_run')
821 834 try:
822 835 similarity = float(opts.get('similarity') or 0)
823 836 except ValueError:
824 837 raise error.Abort(_('similarity must be a number'))
825 838 if similarity < 0 or similarity > 100:
826 839 raise error.Abort(_('similarity must be between 0 and 100'))
827 840 similarity /= 100.0
828 841
829 842 ret = 0
830 843 join = lambda f: os.path.join(prefix, f)
831 844
832 845 wctx = repo[None]
833 846 for subpath in sorted(wctx.substate):
834 847 submatch = matchmod.subdirmatcher(subpath, m)
835 848 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
836 849 sub = wctx.sub(subpath)
837 850 try:
838 851 if sub.addremove(submatch, prefix, opts):
839 852 ret = 1
840 853 except error.LookupError:
841 854 repo.ui.status(_("skipping missing subrepository: %s\n")
842 855 % join(subpath))
843 856
844 857 rejected = []
845 858 def badfn(f, msg):
846 859 if f in m.files():
847 860 m.bad(f, msg)
848 861 rejected.append(f)
849 862
850 863 badmatch = matchmod.badmatch(m, badfn)
851 864 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
852 865 badmatch)
853 866
854 867 unknownset = set(unknown + forgotten)
855 868 toprint = unknownset.copy()
856 869 toprint.update(deleted)
857 870 for abs in sorted(toprint):
858 871 if repo.ui.verbose or not m.exact(abs):
859 872 if abs in unknownset:
860 873 status = _('adding %s\n') % m.uipath(abs)
861 874 else:
862 875 status = _('removing %s\n') % m.uipath(abs)
863 876 repo.ui.status(status)
864 877
865 878 renames = _findrenames(repo, m, added + unknown, removed + deleted,
866 879 similarity)
867 880
868 881 if not dry_run:
869 882 _markchanges(repo, unknown + forgotten, deleted, renames)
870 883
871 884 for f in rejected:
872 885 if f in m.files():
873 886 return 1
874 887 return ret
875 888
876 889 def marktouched(repo, files, similarity=0.0):
877 890 '''Assert that files have somehow been operated upon. files are relative to
878 891 the repo root.'''
879 892 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
880 893 rejected = []
881 894
882 895 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
883 896
884 897 if repo.ui.verbose:
885 898 unknownset = set(unknown + forgotten)
886 899 toprint = unknownset.copy()
887 900 toprint.update(deleted)
888 901 for abs in sorted(toprint):
889 902 if abs in unknownset:
890 903 status = _('adding %s\n') % abs
891 904 else:
892 905 status = _('removing %s\n') % abs
893 906 repo.ui.status(status)
894 907
895 908 renames = _findrenames(repo, m, added + unknown, removed + deleted,
896 909 similarity)
897 910
898 911 _markchanges(repo, unknown + forgotten, deleted, renames)
899 912
900 913 for f in rejected:
901 914 if f in m.files():
902 915 return 1
903 916 return 0
904 917
905 918 def _interestingfiles(repo, matcher):
906 919 '''Walk dirstate with matcher, looking for files that addremove would care
907 920 about.
908 921
909 922 This is different from dirstate.status because it doesn't care about
910 923 whether files are modified or clean.'''
911 924 added, unknown, deleted, removed, forgotten = [], [], [], [], []
912 925 audit_path = pathutil.pathauditor(repo.root, cached=True)
913 926
914 927 ctx = repo[None]
915 928 dirstate = repo.dirstate
916 929 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
917 930 unknown=True, ignored=False, full=False)
918 931 for abs, st in walkresults.iteritems():
919 932 dstate = dirstate[abs]
920 933 if dstate == '?' and audit_path.check(abs):
921 934 unknown.append(abs)
922 935 elif dstate != 'r' and not st:
923 936 deleted.append(abs)
924 937 elif dstate == 'r' and st:
925 938 forgotten.append(abs)
926 939 # for finding renames
927 940 elif dstate == 'r' and not st:
928 941 removed.append(abs)
929 942 elif dstate == 'a':
930 943 added.append(abs)
931 944
932 945 return added, unknown, deleted, removed, forgotten
933 946
934 947 def _findrenames(repo, matcher, added, removed, similarity):
935 948 '''Find renames from removed files to added ones.'''
936 949 renames = {}
937 950 if similarity > 0:
938 951 for old, new, score in similar.findrenames(repo, added, removed,
939 952 similarity):
940 953 if (repo.ui.verbose or not matcher.exact(old)
941 954 or not matcher.exact(new)):
942 955 repo.ui.status(_('recording removal of %s as rename to %s '
943 956 '(%d%% similar)\n') %
944 957 (matcher.rel(old), matcher.rel(new),
945 958 score * 100))
946 959 renames[new] = old
947 960 return renames
948 961
949 962 def _markchanges(repo, unknown, deleted, renames):
950 963 '''Marks the files in unknown as added, the files in deleted as removed,
951 964 and the files in renames as copied.'''
952 965 wctx = repo[None]
953 966 with repo.wlock():
954 967 wctx.forget(deleted)
955 968 wctx.add(unknown)
956 969 for new, old in renames.iteritems():
957 970 wctx.copy(old, new)
958 971
959 972 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
960 973 """Update the dirstate to reflect the intent of copying src to dst. For
961 974 different reasons it might not end with dst being marked as copied from src.
962 975 """
963 976 origsrc = repo.dirstate.copied(src) or src
964 977 if dst == origsrc: # copying back a copy?
965 978 if repo.dirstate[dst] not in 'mn' and not dryrun:
966 979 repo.dirstate.normallookup(dst)
967 980 else:
968 981 if repo.dirstate[origsrc] == 'a' and origsrc == src:
969 982 if not ui.quiet:
970 983 ui.warn(_("%s has not been committed yet, so no copy "
971 984 "data will be stored for %s.\n")
972 985 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
973 986 if repo.dirstate[dst] in '?r' and not dryrun:
974 987 wctx.add([dst])
975 988 elif not dryrun:
976 989 wctx.copy(origsrc, dst)
977 990
978 991 def readrequires(opener, supported):
979 992 '''Reads and parses .hg/requires and checks if all entries found
980 993 are in the list of supported features.'''
981 994 requirements = set(opener.read("requires").splitlines())
982 995 missings = []
983 996 for r in requirements:
984 997 if r not in supported:
985 998 if not r or not r[0:1].isalnum():
986 999 raise error.RequirementError(_(".hg/requires file is corrupt"))
987 1000 missings.append(r)
988 1001 missings.sort()
989 1002 if missings:
990 1003 raise error.RequirementError(
991 1004 _("repository requires features unknown to this Mercurial: %s")
992 1005 % " ".join(missings),
993 1006 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
994 1007 " for more information"))
995 1008 return requirements
996 1009
997 1010 def writerequires(opener, requirements):
998 1011 with opener('requires', 'w') as fp:
999 1012 for r in sorted(requirements):
1000 1013 fp.write("%s\n" % r)
1001 1014
1002 1015 class filecachesubentry(object):
1003 1016 def __init__(self, path, stat):
1004 1017 self.path = path
1005 1018 self.cachestat = None
1006 1019 self._cacheable = None
1007 1020
1008 1021 if stat:
1009 1022 self.cachestat = filecachesubentry.stat(self.path)
1010 1023
1011 1024 if self.cachestat:
1012 1025 self._cacheable = self.cachestat.cacheable()
1013 1026 else:
1014 1027 # None means we don't know yet
1015 1028 self._cacheable = None
1016 1029
1017 1030 def refresh(self):
1018 1031 if self.cacheable():
1019 1032 self.cachestat = filecachesubentry.stat(self.path)
1020 1033
1021 1034 def cacheable(self):
1022 1035 if self._cacheable is not None:
1023 1036 return self._cacheable
1024 1037
1025 1038 # we don't know yet, assume it is for now
1026 1039 return True
1027 1040
1028 1041 def changed(self):
1029 1042 # no point in going further if we can't cache it
1030 1043 if not self.cacheable():
1031 1044 return True
1032 1045
1033 1046 newstat = filecachesubentry.stat(self.path)
1034 1047
1035 1048 # we may not know if it's cacheable yet, check again now
1036 1049 if newstat and self._cacheable is None:
1037 1050 self._cacheable = newstat.cacheable()
1038 1051
1039 1052 # check again
1040 1053 if not self._cacheable:
1041 1054 return True
1042 1055
1043 1056 if self.cachestat != newstat:
1044 1057 self.cachestat = newstat
1045 1058 return True
1046 1059 else:
1047 1060 return False
1048 1061
1049 1062 @staticmethod
1050 1063 def stat(path):
1051 1064 try:
1052 1065 return util.cachestat(path)
1053 1066 except OSError as e:
1054 1067 if e.errno != errno.ENOENT:
1055 1068 raise
1056 1069
1057 1070 class filecacheentry(object):
1058 1071 def __init__(self, paths, stat=True):
1059 1072 self._entries = []
1060 1073 for path in paths:
1061 1074 self._entries.append(filecachesubentry(path, stat))
1062 1075
1063 1076 def changed(self):
1064 1077 '''true if any entry has changed'''
1065 1078 for entry in self._entries:
1066 1079 if entry.changed():
1067 1080 return True
1068 1081 return False
1069 1082
1070 1083 def refresh(self):
1071 1084 for entry in self._entries:
1072 1085 entry.refresh()
1073 1086
1074 1087 class filecache(object):
1075 1088 '''A property like decorator that tracks files under .hg/ for updates.
1076 1089
1077 1090 Records stat info when called in _filecache.
1078 1091
1079 1092 On subsequent calls, compares old stat info with new info, and recreates the
1080 1093 object when any of the files changes, updating the new stat info in
1081 1094 _filecache.
1082 1095
1083 1096 Mercurial either atomic renames or appends for files under .hg,
1084 1097 so to ensure the cache is reliable we need the filesystem to be able
1085 1098 to tell us if a file has been replaced. If it can't, we fallback to
1086 1099 recreating the object on every call (essentially the same behavior as
1087 1100 propertycache).
1088 1101
1089 1102 '''
1090 1103 def __init__(self, *paths):
1091 1104 self.paths = paths
1092 1105
1093 1106 def join(self, obj, fname):
1094 1107 """Used to compute the runtime path of a cached file.
1095 1108
1096 1109 Users should subclass filecache and provide their own version of this
1097 1110 function to call the appropriate join function on 'obj' (an instance
1098 1111 of the class that its member function was decorated).
1099 1112 """
1100 1113 raise NotImplementedError
1101 1114
1102 1115 def __call__(self, func):
1103 1116 self.func = func
1104 1117 self.name = func.__name__.encode('ascii')
1105 1118 return self
1106 1119
1107 1120 def __get__(self, obj, type=None):
1108 1121 # if accessed on the class, return the descriptor itself.
1109 1122 if obj is None:
1110 1123 return self
1111 1124 # do we need to check if the file changed?
1112 1125 if self.name in obj.__dict__:
1113 1126 assert self.name in obj._filecache, self.name
1114 1127 return obj.__dict__[self.name]
1115 1128
1116 1129 entry = obj._filecache.get(self.name)
1117 1130
1118 1131 if entry:
1119 1132 if entry.changed():
1120 1133 entry.obj = self.func(obj)
1121 1134 else:
1122 1135 paths = [self.join(obj, path) for path in self.paths]
1123 1136
1124 1137 # We stat -before- creating the object so our cache doesn't lie if
1125 1138 # a writer modified between the time we read and stat
1126 1139 entry = filecacheentry(paths, True)
1127 1140 entry.obj = self.func(obj)
1128 1141
1129 1142 obj._filecache[self.name] = entry
1130 1143
1131 1144 obj.__dict__[self.name] = entry.obj
1132 1145 return entry.obj
1133 1146
1134 1147 def __set__(self, obj, value):
1135 1148 if self.name not in obj._filecache:
1136 1149 # we add an entry for the missing value because X in __dict__
1137 1150 # implies X in _filecache
1138 1151 paths = [self.join(obj, path) for path in self.paths]
1139 1152 ce = filecacheentry(paths, False)
1140 1153 obj._filecache[self.name] = ce
1141 1154 else:
1142 1155 ce = obj._filecache[self.name]
1143 1156
1144 1157 ce.obj = value # update cached copy
1145 1158 obj.__dict__[self.name] = value # update copy returned by obj.x
1146 1159
1147 1160 def __delete__(self, obj):
1148 1161 try:
1149 1162 del obj.__dict__[self.name]
1150 1163 except KeyError:
1151 1164 raise AttributeError(self.name)
1152 1165
1153 1166 def extdatasource(repo, source):
1154 1167 """Gather a map of rev -> value dict from the specified source
1155 1168
1156 1169 A source spec is treated as a URL, with a special case shell: type
1157 1170 for parsing the output from a shell command.
1158 1171
1159 1172 The data is parsed as a series of newline-separated records where
1160 1173 each record is a revision specifier optionally followed by a space
1161 1174 and a freeform string value. If the revision is known locally, it
1162 1175 is converted to a rev, otherwise the record is skipped.
1163 1176
1164 1177 Note that both key and value are treated as UTF-8 and converted to
1165 1178 the local encoding. This allows uniformity between local and
1166 1179 remote data sources.
1167 1180 """
1168 1181
1169 1182 spec = repo.ui.config("extdata", source)
1170 1183 if not spec:
1171 1184 raise error.Abort(_("unknown extdata source '%s'") % source)
1172 1185
1173 1186 data = {}
1174 1187 src = proc = None
1175 1188 try:
1176 1189 if spec.startswith("shell:"):
1177 1190 # external commands should be run relative to the repo root
1178 1191 cmd = spec[6:]
1179 1192 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1180 1193 close_fds=procutil.closefds,
1181 1194 stdout=subprocess.PIPE, cwd=repo.root)
1182 1195 src = proc.stdout
1183 1196 else:
1184 1197 # treat as a URL or file
1185 1198 src = url.open(repo.ui, spec)
1186 1199 for l in src:
1187 1200 if " " in l:
1188 1201 k, v = l.strip().split(" ", 1)
1189 1202 else:
1190 1203 k, v = l.strip(), ""
1191 1204
1192 1205 k = encoding.tolocal(k)
1193 1206 try:
1194 1207 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1195 1208 except (error.LookupError, error.RepoLookupError):
1196 1209 pass # we ignore data for nodes that don't exist locally
1197 1210 finally:
1198 1211 if proc:
1199 1212 proc.communicate()
1200 1213 if src:
1201 1214 src.close()
1202 1215 if proc and proc.returncode != 0:
1203 1216 raise error.Abort(_("extdata command '%s' failed: %s")
1204 1217 % (cmd, procutil.explainexit(proc.returncode)))
1205 1218
1206 1219 return data
1207 1220
1208 1221 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1209 1222 if lock is None:
1210 1223 raise error.LockInheritanceContractViolation(
1211 1224 'lock can only be inherited while held')
1212 1225 if environ is None:
1213 1226 environ = {}
1214 1227 with lock.inherit() as locker:
1215 1228 environ[envvar] = locker
1216 1229 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1217 1230
1218 1231 def wlocksub(repo, cmd, *args, **kwargs):
1219 1232 """run cmd as a subprocess that allows inheriting repo's wlock
1220 1233
1221 1234 This can only be called while the wlock is held. This takes all the
1222 1235 arguments that ui.system does, and returns the exit code of the
1223 1236 subprocess."""
1224 1237 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1225 1238 **kwargs)
1226 1239
1227 1240 def gdinitconfig(ui):
1228 1241 """helper function to know if a repo should be created as general delta
1229 1242 """
1230 1243 # experimental config: format.generaldelta
1231 1244 return (ui.configbool('format', 'generaldelta')
1232 1245 or ui.configbool('format', 'usegeneraldelta'))
1233 1246
1234 1247 def gddeltaconfig(ui):
1235 1248 """helper function to know if incoming delta should be optimised
1236 1249 """
1237 1250 # experimental config: format.generaldelta
1238 1251 return ui.configbool('format', 'generaldelta')
1239 1252
1240 1253 class simplekeyvaluefile(object):
1241 1254 """A simple file with key=value lines
1242 1255
1243 1256 Keys must be alphanumerics and start with a letter, values must not
1244 1257 contain '\n' characters"""
1245 1258 firstlinekey = '__firstline'
1246 1259
1247 1260 def __init__(self, vfs, path, keys=None):
1248 1261 self.vfs = vfs
1249 1262 self.path = path
1250 1263
1251 1264 def read(self, firstlinenonkeyval=False):
1252 1265 """Read the contents of a simple key-value file
1253 1266
1254 1267 'firstlinenonkeyval' indicates whether the first line of file should
1255 1268 be treated as a key-value pair or reuturned fully under the
1256 1269 __firstline key."""
1257 1270 lines = self.vfs.readlines(self.path)
1258 1271 d = {}
1259 1272 if firstlinenonkeyval:
1260 1273 if not lines:
1261 1274 e = _("empty simplekeyvalue file")
1262 1275 raise error.CorruptedState(e)
1263 1276 # we don't want to include '\n' in the __firstline
1264 1277 d[self.firstlinekey] = lines[0][:-1]
1265 1278 del lines[0]
1266 1279
1267 1280 try:
1268 1281 # the 'if line.strip()' part prevents us from failing on empty
1269 1282 # lines which only contain '\n' therefore are not skipped
1270 1283 # by 'if line'
1271 1284 updatedict = dict(line[:-1].split('=', 1) for line in lines
1272 1285 if line.strip())
1273 1286 if self.firstlinekey in updatedict:
1274 1287 e = _("%r can't be used as a key")
1275 1288 raise error.CorruptedState(e % self.firstlinekey)
1276 1289 d.update(updatedict)
1277 1290 except ValueError as e:
1278 1291 raise error.CorruptedState(str(e))
1279 1292 return d
1280 1293
1281 1294 def write(self, data, firstline=None):
1282 1295 """Write key=>value mapping to a file
1283 1296 data is a dict. Keys must be alphanumerical and start with a letter.
1284 1297 Values must not contain newline characters.
1285 1298
1286 1299 If 'firstline' is not None, it is written to file before
1287 1300 everything else, as it is, not in a key=value form"""
1288 1301 lines = []
1289 1302 if firstline is not None:
1290 1303 lines.append('%s\n' % firstline)
1291 1304
1292 1305 for k, v in data.items():
1293 1306 if k == self.firstlinekey:
1294 1307 e = "key name '%s' is reserved" % self.firstlinekey
1295 1308 raise error.ProgrammingError(e)
1296 1309 if not k[0:1].isalpha():
1297 1310 e = "keys must start with a letter in a key-value file"
1298 1311 raise error.ProgrammingError(e)
1299 1312 if not k.isalnum():
1300 1313 e = "invalid key name in a simple key-value file"
1301 1314 raise error.ProgrammingError(e)
1302 1315 if '\n' in v:
1303 1316 e = "invalid value in a simple key-value file"
1304 1317 raise error.ProgrammingError(e)
1305 1318 lines.append("%s=%s\n" % (k, v))
1306 1319 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1307 1320 fp.write(''.join(lines))
1308 1321
1309 1322 _reportobsoletedsource = [
1310 1323 'debugobsolete',
1311 1324 'pull',
1312 1325 'push',
1313 1326 'serve',
1314 1327 'unbundle',
1315 1328 ]
1316 1329
1317 1330 _reportnewcssource = [
1318 1331 'pull',
1319 1332 'unbundle',
1320 1333 ]
1321 1334
1322 1335 # a list of (repo, ctx, files) functions called by various commands to allow
1323 1336 # extensions to ensure the corresponding files are available locally, before the
1324 1337 # command uses them.
1325 1338 fileprefetchhooks = util.hooks()
1326 1339
1327 1340 # A marker that tells the evolve extension to suppress its own reporting
1328 1341 _reportstroubledchangesets = True
1329 1342
1330 1343 def registersummarycallback(repo, otr, txnname=''):
1331 1344 """register a callback to issue a summary after the transaction is closed
1332 1345 """
1333 1346 def txmatch(sources):
1334 1347 return any(txnname.startswith(source) for source in sources)
1335 1348
1336 1349 categories = []
1337 1350
1338 1351 def reportsummary(func):
1339 1352 """decorator for report callbacks."""
1340 1353 # The repoview life cycle is shorter than the one of the actual
1341 1354 # underlying repository. So the filtered object can die before the
1342 1355 # weakref is used leading to troubles. We keep a reference to the
1343 1356 # unfiltered object and restore the filtering when retrieving the
1344 1357 # repository through the weakref.
1345 1358 filtername = repo.filtername
1346 1359 reporef = weakref.ref(repo.unfiltered())
1347 1360 def wrapped(tr):
1348 1361 repo = reporef()
1349 1362 if filtername:
1350 1363 repo = repo.filtered(filtername)
1351 1364 func(repo, tr)
1352 1365 newcat = '%02i-txnreport' % len(categories)
1353 1366 otr.addpostclose(newcat, wrapped)
1354 1367 categories.append(newcat)
1355 1368 return wrapped
1356 1369
1357 1370 if txmatch(_reportobsoletedsource):
1358 1371 @reportsummary
1359 1372 def reportobsoleted(repo, tr):
1360 1373 obsoleted = obsutil.getobsoleted(repo, tr)
1361 1374 if obsoleted:
1362 1375 repo.ui.status(_('obsoleted %i changesets\n')
1363 1376 % len(obsoleted))
1364 1377
1365 1378 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1366 1379 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1367 1380 instabilitytypes = [
1368 1381 ('orphan', 'orphan'),
1369 1382 ('phase-divergent', 'phasedivergent'),
1370 1383 ('content-divergent', 'contentdivergent'),
1371 1384 ]
1372 1385
1373 1386 def getinstabilitycounts(repo):
1374 1387 filtered = repo.changelog.filteredrevs
1375 1388 counts = {}
1376 1389 for instability, revset in instabilitytypes:
1377 1390 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1378 1391 filtered)
1379 1392 return counts
1380 1393
1381 1394 oldinstabilitycounts = getinstabilitycounts(repo)
1382 1395 @reportsummary
1383 1396 def reportnewinstabilities(repo, tr):
1384 1397 newinstabilitycounts = getinstabilitycounts(repo)
1385 1398 for instability, revset in instabilitytypes:
1386 1399 delta = (newinstabilitycounts[instability] -
1387 1400 oldinstabilitycounts[instability])
1388 1401 if delta > 0:
1389 1402 repo.ui.warn(_('%i new %s changesets\n') %
1390 1403 (delta, instability))
1391 1404
1392 1405 if txmatch(_reportnewcssource):
1393 1406 @reportsummary
1394 1407 def reportnewcs(repo, tr):
1395 1408 """Report the range of new revisions pulled/unbundled."""
1396 1409 newrevs = tr.changes.get('revs', xrange(0, 0))
1397 1410 if not newrevs:
1398 1411 return
1399 1412
1400 1413 # Compute the bounds of new revisions' range, excluding obsoletes.
1401 1414 unfi = repo.unfiltered()
1402 1415 revs = unfi.revs('%ld and not obsolete()', newrevs)
1403 1416 if not revs:
1404 1417 # Got only obsoletes.
1405 1418 return
1406 1419 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1407 1420
1408 1421 if minrev == maxrev:
1409 1422 revrange = minrev
1410 1423 else:
1411 1424 revrange = '%s:%s' % (minrev, maxrev)
1412 1425 repo.ui.status(_('new changesets %s\n') % revrange)
1413 1426
1414 1427 def nodesummaries(repo, nodes, maxnumnodes=4):
1415 1428 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1416 1429 return ' '.join(short(h) for h in nodes)
1417 1430 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1418 1431 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1419 1432
1420 1433 def enforcesinglehead(repo, tr, desc):
1421 1434 """check that no named branch has multiple heads"""
1422 1435 if desc in ('strip', 'repair'):
1423 1436 # skip the logic during strip
1424 1437 return
1425 1438 visible = repo.filtered('visible')
1426 1439 # possible improvement: we could restrict the check to affected branch
1427 1440 for name, heads in visible.branchmap().iteritems():
1428 1441 if len(heads) > 1:
1429 1442 msg = _('rejecting multiple heads on branch "%s"')
1430 1443 msg %= name
1431 1444 hint = _('%d heads: %s')
1432 1445 hint %= (len(heads), nodesummaries(repo, heads))
1433 1446 raise error.Abort(msg, hint=hint)
1434 1447
1435 1448 def wrapconvertsink(sink):
1436 1449 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1437 1450 before it is used, whether or not the convert extension was formally loaded.
1438 1451 """
1439 1452 return sink
1440 1453
1441 1454 def unhidehashlikerevs(repo, specs, hiddentype):
1442 1455 """parse the user specs and unhide changesets whose hash or revision number
1443 1456 is passed.
1444 1457
1445 1458 hiddentype can be: 1) 'warn': warn while unhiding changesets
1446 1459 2) 'nowarn': don't warn while unhiding changesets
1447 1460
1448 1461 returns a repo object with the required changesets unhidden
1449 1462 """
1450 1463 if not repo.filtername or not repo.ui.configbool('experimental',
1451 1464 'directaccess'):
1452 1465 return repo
1453 1466
1454 1467 if repo.filtername not in ('visible', 'visible-hidden'):
1455 1468 return repo
1456 1469
1457 1470 symbols = set()
1458 1471 for spec in specs:
1459 1472 try:
1460 1473 tree = revsetlang.parse(spec)
1461 1474 except error.ParseError: # will be reported by scmutil.revrange()
1462 1475 continue
1463 1476
1464 1477 symbols.update(revsetlang.gethashlikesymbols(tree))
1465 1478
1466 1479 if not symbols:
1467 1480 return repo
1468 1481
1469 1482 revs = _getrevsfromsymbols(repo, symbols)
1470 1483
1471 1484 if not revs:
1472 1485 return repo
1473 1486
1474 1487 if hiddentype == 'warn':
1475 1488 unfi = repo.unfiltered()
1476 1489 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1477 1490 repo.ui.warn(_("warning: accessing hidden changesets for write "
1478 1491 "operation: %s\n") % revstr)
1479 1492
1480 1493 # we have to use new filtername to separate branch/tags cache until we can
1481 1494 # disbale these cache when revisions are dynamically pinned.
1482 1495 return repo.filtered('visible-hidden', revs)
1483 1496
1484 1497 def _getrevsfromsymbols(repo, symbols):
1485 1498 """parse the list of symbols and returns a set of revision numbers of hidden
1486 1499 changesets present in symbols"""
1487 1500 revs = set()
1488 1501 unfi = repo.unfiltered()
1489 1502 unficl = unfi.changelog
1490 1503 cl = repo.changelog
1491 1504 tiprev = len(unficl)
1492 1505 pmatch = unficl._partialmatch
1493 1506 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1494 1507 for s in symbols:
1495 1508 try:
1496 1509 n = int(s)
1497 1510 if n <= tiprev:
1498 1511 if not allowrevnums:
1499 1512 continue
1500 1513 else:
1501 1514 if n not in cl:
1502 1515 revs.add(n)
1503 1516 continue
1504 1517 except ValueError:
1505 1518 pass
1506 1519
1507 1520 try:
1508 1521 s = pmatch(s)
1509 1522 except (error.LookupError, error.WdirUnsupported):
1510 1523 s = None
1511 1524
1512 1525 if s is not None:
1513 1526 rev = unficl.rev(s)
1514 1527 if rev not in cl:
1515 1528 revs.add(rev)
1516 1529
1517 1530 return revs
General Comments 0
You need to be logged in to leave comments. Login now