##// END OF EJS Templates
scmutil: fix __repr__ of status tuple...
Augie Fackler -
r37940:a8a7ccec default
parent child Browse files
Show More
@@ -1,1569 +1,1571 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 bin,
22 22 hex,
23 23 nullid,
24 24 short,
25 25 wdirid,
26 26 wdirrev,
27 27 )
28 28
29 29 from . import (
30 30 encoding,
31 31 error,
32 32 match as matchmod,
33 33 obsolete,
34 34 obsutil,
35 35 pathutil,
36 36 phases,
37 37 pycompat,
38 38 revsetlang,
39 39 similar,
40 40 url,
41 41 util,
42 42 vfs,
43 43 )
44 44
45 45 from .utils import (
46 46 procutil,
47 47 stringutil,
48 48 )
49 49
50 50 if pycompat.iswindows:
51 51 from . import scmwindows as scmplatform
52 52 else:
53 53 from . import scmposix as scmplatform
54 54
55 55 termsize = scmplatform.termsize
56 56
57 57 class status(tuple):
58 58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
59 59 and 'ignored' properties are only relevant to the working copy.
60 60 '''
61 61
62 62 __slots__ = ()
63 63
64 64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
65 65 clean):
66 66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
67 67 ignored, clean))
68 68
69 69 @property
70 70 def modified(self):
71 71 '''files that have been modified'''
72 72 return self[0]
73 73
74 74 @property
75 75 def added(self):
76 76 '''files that have been added'''
77 77 return self[1]
78 78
79 79 @property
80 80 def removed(self):
81 81 '''files that have been removed'''
82 82 return self[2]
83 83
84 84 @property
85 85 def deleted(self):
86 86 '''files that are in the dirstate, but have been deleted from the
87 87 working copy (aka "missing")
88 88 '''
89 89 return self[3]
90 90
91 91 @property
92 92 def unknown(self):
93 93 '''files not in the dirstate that are not ignored'''
94 94 return self[4]
95 95
96 96 @property
97 97 def ignored(self):
98 98 '''files not in the dirstate that are ignored (by _dirignore())'''
99 99 return self[5]
100 100
101 101 @property
102 102 def clean(self):
103 103 '''files that have not been modified'''
104 104 return self[6]
105 105
106 106 def __repr__(self, *args, **kwargs):
107 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
108 'unknown=%r, ignored=%r, clean=%r>') % self)
107 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
108 r'unknown=%s, ignored=%s, clean=%s>') %
109 tuple(pycompat.sysstr(stringutil.pprint(
110 v, bprefix=False)) for v in self))
109 111
110 112 def itersubrepos(ctx1, ctx2):
111 113 """find subrepos in ctx1 or ctx2"""
112 114 # Create a (subpath, ctx) mapping where we prefer subpaths from
113 115 # ctx1. The subpaths from ctx2 are important when the .hgsub file
114 116 # has been modified (in ctx2) but not yet committed (in ctx1).
115 117 subpaths = dict.fromkeys(ctx2.substate, ctx2)
116 118 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
117 119
118 120 missing = set()
119 121
120 122 for subpath in ctx2.substate:
121 123 if subpath not in ctx1.substate:
122 124 del subpaths[subpath]
123 125 missing.add(subpath)
124 126
125 127 for subpath, ctx in sorted(subpaths.iteritems()):
126 128 yield subpath, ctx.sub(subpath)
127 129
128 130 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
129 131 # status and diff will have an accurate result when it does
130 132 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
131 133 # against itself.
132 134 for subpath in missing:
133 135 yield subpath, ctx2.nullsub(subpath, ctx1)
134 136
135 137 def nochangesfound(ui, repo, excluded=None):
136 138 '''Report no changes for push/pull, excluded is None or a list of
137 139 nodes excluded from the push/pull.
138 140 '''
139 141 secretlist = []
140 142 if excluded:
141 143 for n in excluded:
142 144 ctx = repo[n]
143 145 if ctx.phase() >= phases.secret and not ctx.extinct():
144 146 secretlist.append(n)
145 147
146 148 if secretlist:
147 149 ui.status(_("no changes found (ignored %d secret changesets)\n")
148 150 % len(secretlist))
149 151 else:
150 152 ui.status(_("no changes found\n"))
151 153
152 154 def callcatch(ui, func):
153 155 """call func() with global exception handling
154 156
155 157 return func() if no exception happens. otherwise do some error handling
156 158 and return an exit code accordingly. does not handle all exceptions.
157 159 """
158 160 try:
159 161 try:
160 162 return func()
161 163 except: # re-raises
162 164 ui.traceback()
163 165 raise
164 166 # Global exception handling, alphabetically
165 167 # Mercurial-specific first, followed by built-in and library exceptions
166 168 except error.LockHeld as inst:
167 169 if inst.errno == errno.ETIMEDOUT:
168 170 reason = _('timed out waiting for lock held by %r') % inst.locker
169 171 else:
170 172 reason = _('lock held by %r') % inst.locker
171 173 ui.warn(_("abort: %s: %s\n")
172 174 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
173 175 if not inst.locker:
174 176 ui.warn(_("(lock might be very busy)\n"))
175 177 except error.LockUnavailable as inst:
176 178 ui.warn(_("abort: could not lock %s: %s\n") %
177 179 (inst.desc or stringutil.forcebytestr(inst.filename),
178 180 encoding.strtolocal(inst.strerror)))
179 181 except error.OutOfBandError as inst:
180 182 if inst.args:
181 183 msg = _("abort: remote error:\n")
182 184 else:
183 185 msg = _("abort: remote error\n")
184 186 ui.warn(msg)
185 187 if inst.args:
186 188 ui.warn(''.join(inst.args))
187 189 if inst.hint:
188 190 ui.warn('(%s)\n' % inst.hint)
189 191 except error.RepoError as inst:
190 192 ui.warn(_("abort: %s!\n") % inst)
191 193 if inst.hint:
192 194 ui.warn(_("(%s)\n") % inst.hint)
193 195 except error.ResponseError as inst:
194 196 ui.warn(_("abort: %s") % inst.args[0])
195 197 msg = inst.args[1]
196 198 if isinstance(msg, type(u'')):
197 199 msg = pycompat.sysbytes(msg)
198 200 if not isinstance(msg, bytes):
199 201 ui.warn(" %r\n" % (msg,))
200 202 elif not msg:
201 203 ui.warn(_(" empty string\n"))
202 204 else:
203 205 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
204 206 except error.CensoredNodeError as inst:
205 207 ui.warn(_("abort: file censored %s!\n") % inst)
206 208 except error.RevlogError as inst:
207 209 ui.warn(_("abort: %s!\n") % inst)
208 210 except error.InterventionRequired as inst:
209 211 ui.warn("%s\n" % inst)
210 212 if inst.hint:
211 213 ui.warn(_("(%s)\n") % inst.hint)
212 214 return 1
213 215 except error.WdirUnsupported:
214 216 ui.warn(_("abort: working directory revision cannot be specified\n"))
215 217 except error.Abort as inst:
216 218 ui.warn(_("abort: %s\n") % inst)
217 219 if inst.hint:
218 220 ui.warn(_("(%s)\n") % inst.hint)
219 221 except ImportError as inst:
220 222 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
221 223 m = stringutil.forcebytestr(inst).split()[-1]
222 224 if m in "mpatch bdiff".split():
223 225 ui.warn(_("(did you forget to compile extensions?)\n"))
224 226 elif m in "zlib".split():
225 227 ui.warn(_("(is your Python install correct?)\n"))
226 228 except IOError as inst:
227 229 if util.safehasattr(inst, "code"):
228 230 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
229 231 elif util.safehasattr(inst, "reason"):
230 232 try: # usually it is in the form (errno, strerror)
231 233 reason = inst.reason.args[1]
232 234 except (AttributeError, IndexError):
233 235 # it might be anything, for example a string
234 236 reason = inst.reason
235 237 if isinstance(reason, unicode):
236 238 # SSLError of Python 2.7.9 contains a unicode
237 239 reason = encoding.unitolocal(reason)
238 240 ui.warn(_("abort: error: %s\n") % reason)
239 241 elif (util.safehasattr(inst, "args")
240 242 and inst.args and inst.args[0] == errno.EPIPE):
241 243 pass
242 244 elif getattr(inst, "strerror", None):
243 245 if getattr(inst, "filename", None):
244 246 ui.warn(_("abort: %s: %s\n") % (
245 247 encoding.strtolocal(inst.strerror),
246 248 stringutil.forcebytestr(inst.filename)))
247 249 else:
248 250 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
249 251 else:
250 252 raise
251 253 except OSError as inst:
252 254 if getattr(inst, "filename", None) is not None:
253 255 ui.warn(_("abort: %s: '%s'\n") % (
254 256 encoding.strtolocal(inst.strerror),
255 257 stringutil.forcebytestr(inst.filename)))
256 258 else:
257 259 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 260 except MemoryError:
259 261 ui.warn(_("abort: out of memory\n"))
260 262 except SystemExit as inst:
261 263 # Commands shouldn't sys.exit directly, but give a return code.
262 264 # Just in case catch this and and pass exit code to caller.
263 265 return inst.code
264 266 except socket.error as inst:
265 267 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
266 268
267 269 return -1
268 270
269 271 def checknewlabel(repo, lbl, kind):
270 272 # Do not use the "kind" parameter in ui output.
271 273 # It makes strings difficult to translate.
272 274 if lbl in ['tip', '.', 'null']:
273 275 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 276 for c in (':', '\0', '\n', '\r'):
275 277 if c in lbl:
276 278 raise error.Abort(
277 279 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 280 try:
279 281 int(lbl)
280 282 raise error.Abort(_("cannot use an integer as a name"))
281 283 except ValueError:
282 284 pass
283 285 if lbl.strip() != lbl:
284 286 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285 287
286 288 def checkfilename(f):
287 289 '''Check that the filename f is an acceptable filename for a tracked file'''
288 290 if '\r' in f or '\n' in f:
289 291 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
290 292
291 293 def checkportable(ui, f):
292 294 '''Check if filename f is portable and warn or abort depending on config'''
293 295 checkfilename(f)
294 296 abort, warn = checkportabilityalert(ui)
295 297 if abort or warn:
296 298 msg = util.checkwinfilename(f)
297 299 if msg:
298 300 msg = "%s: %s" % (msg, procutil.shellquote(f))
299 301 if abort:
300 302 raise error.Abort(msg)
301 303 ui.warn(_("warning: %s\n") % msg)
302 304
303 305 def checkportabilityalert(ui):
304 306 '''check if the user's config requests nothing, a warning, or abort for
305 307 non-portable filenames'''
306 308 val = ui.config('ui', 'portablefilenames')
307 309 lval = val.lower()
308 310 bval = stringutil.parsebool(val)
309 311 abort = pycompat.iswindows or lval == 'abort'
310 312 warn = bval or lval == 'warn'
311 313 if bval is None and not (warn or abort or lval == 'ignore'):
312 314 raise error.ConfigError(
313 315 _("ui.portablefilenames value is invalid ('%s')") % val)
314 316 return abort, warn
315 317
316 318 class casecollisionauditor(object):
317 319 def __init__(self, ui, abort, dirstate):
318 320 self._ui = ui
319 321 self._abort = abort
320 322 allfiles = '\0'.join(dirstate._map)
321 323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
322 324 self._dirstate = dirstate
323 325 # The purpose of _newfiles is so that we don't complain about
324 326 # case collisions if someone were to call this object with the
325 327 # same filename twice.
326 328 self._newfiles = set()
327 329
328 330 def __call__(self, f):
329 331 if f in self._newfiles:
330 332 return
331 333 fl = encoding.lower(f)
332 334 if fl in self._loweredfiles and f not in self._dirstate:
333 335 msg = _('possible case-folding collision for %s') % f
334 336 if self._abort:
335 337 raise error.Abort(msg)
336 338 self._ui.warn(_("warning: %s\n") % msg)
337 339 self._loweredfiles.add(fl)
338 340 self._newfiles.add(f)
339 341
340 342 def filteredhash(repo, maxrev):
341 343 """build hash of filtered revisions in the current repoview.
342 344
343 345 Multiple caches perform up-to-date validation by checking that the
344 346 tiprev and tipnode stored in the cache file match the current repository.
345 347 However, this is not sufficient for validating repoviews because the set
346 348 of revisions in the view may change without the repository tiprev and
347 349 tipnode changing.
348 350
349 351 This function hashes all the revs filtered from the view and returns
350 352 that SHA-1 digest.
351 353 """
352 354 cl = repo.changelog
353 355 if not cl.filteredrevs:
354 356 return None
355 357 key = None
356 358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
357 359 if revs:
358 360 s = hashlib.sha1()
359 361 for rev in revs:
360 362 s.update('%d;' % rev)
361 363 key = s.digest()
362 364 return key
363 365
364 366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
365 367 '''yield every hg repository under path, always recursively.
366 368 The recurse flag will only control recursion into repo working dirs'''
367 369 def errhandler(err):
368 370 if err.filename == path:
369 371 raise err
370 372 samestat = getattr(os.path, 'samestat', None)
371 373 if followsym and samestat is not None:
372 374 def adddir(dirlst, dirname):
373 375 dirstat = os.stat(dirname)
374 376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
375 377 if not match:
376 378 dirlst.append(dirstat)
377 379 return not match
378 380 else:
379 381 followsym = False
380 382
381 383 if (seen_dirs is None) and followsym:
382 384 seen_dirs = []
383 385 adddir(seen_dirs, path)
384 386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
385 387 dirs.sort()
386 388 if '.hg' in dirs:
387 389 yield root # found a repository
388 390 qroot = os.path.join(root, '.hg', 'patches')
389 391 if os.path.isdir(os.path.join(qroot, '.hg')):
390 392 yield qroot # we have a patch queue repo here
391 393 if recurse:
392 394 # avoid recursing inside the .hg directory
393 395 dirs.remove('.hg')
394 396 else:
395 397 dirs[:] = [] # don't descend further
396 398 elif followsym:
397 399 newdirs = []
398 400 for d in dirs:
399 401 fname = os.path.join(root, d)
400 402 if adddir(seen_dirs, fname):
401 403 if os.path.islink(fname):
402 404 for hgname in walkrepos(fname, True, seen_dirs):
403 405 yield hgname
404 406 else:
405 407 newdirs.append(d)
406 408 dirs[:] = newdirs
407 409
408 410 def binnode(ctx):
409 411 """Return binary node id for a given basectx"""
410 412 node = ctx.node()
411 413 if node is None:
412 414 return wdirid
413 415 return node
414 416
415 417 def intrev(ctx):
416 418 """Return integer for a given basectx that can be used in comparison or
417 419 arithmetic operation"""
418 420 rev = ctx.rev()
419 421 if rev is None:
420 422 return wdirrev
421 423 return rev
422 424
423 425 def formatchangeid(ctx):
424 426 """Format changectx as '{rev}:{node|formatnode}', which is the default
425 427 template provided by logcmdutil.changesettemplater"""
426 428 repo = ctx.repo()
427 429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
428 430
429 431 def formatrevnode(ui, rev, node):
430 432 """Format given revision and node depending on the current verbosity"""
431 433 if ui.debugflag:
432 434 hexfunc = hex
433 435 else:
434 436 hexfunc = short
435 437 return '%d:%s' % (rev, hexfunc(node))
436 438
437 439 def resolvehexnodeidprefix(repo, prefix):
438 440 # Uses unfiltered repo because it's faster when prefix is ambiguous/
439 441 # This matches the shortesthexnodeidprefix() function below.
440 442 node = repo.unfiltered().changelog._partialmatch(prefix)
441 443 if node is None:
442 444 return
443 445 repo.changelog.rev(node) # make sure node isn't filtered
444 446 return node
445 447
446 448 def shortesthexnodeidprefix(repo, node, minlength=1):
447 449 """Find the shortest unambiguous prefix that matches hexnode."""
448 450 # _partialmatch() of filtered changelog could take O(len(repo)) time,
449 451 # which would be unacceptably slow. so we look for hash collision in
450 452 # unfiltered space, which means some hashes may be slightly longer.
451 453 try:
452 454 return repo.unfiltered().changelog.shortest(node, minlength)
453 455 except error.LookupError:
454 456 raise error.RepoLookupError()
455 457
456 458 def isrevsymbol(repo, symbol):
457 459 """Checks if a symbol exists in the repo.
458 460
459 461 See revsymbol() for details. Raises error.LookupError if the symbol is an
460 462 ambiguous nodeid prefix.
461 463 """
462 464 try:
463 465 revsymbol(repo, symbol)
464 466 return True
465 467 except error.RepoLookupError:
466 468 return False
467 469
468 470 def revsymbol(repo, symbol):
469 471 """Returns a context given a single revision symbol (as string).
470 472
471 473 This is similar to revsingle(), but accepts only a single revision symbol,
472 474 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
473 475 not "max(public())".
474 476 """
475 477 if not isinstance(symbol, bytes):
476 478 msg = ("symbol (%s of type %s) was not a string, did you mean "
477 479 "repo[symbol]?" % (symbol, type(symbol)))
478 480 raise error.ProgrammingError(msg)
479 481 try:
480 482 if symbol in ('.', 'tip', 'null'):
481 483 return repo[symbol]
482 484
483 485 try:
484 486 r = int(symbol)
485 487 if '%d' % r != symbol:
486 488 raise ValueError
487 489 l = len(repo.changelog)
488 490 if r < 0:
489 491 r += l
490 492 if r < 0 or r >= l and r != wdirrev:
491 493 raise ValueError
492 494 return repo[r]
493 495 except error.FilteredIndexError:
494 496 raise
495 497 except (ValueError, OverflowError, IndexError):
496 498 pass
497 499
498 500 if len(symbol) == 40:
499 501 try:
500 502 node = bin(symbol)
501 503 rev = repo.changelog.rev(node)
502 504 return repo[rev]
503 505 except error.FilteredLookupError:
504 506 raise
505 507 except (TypeError, LookupError):
506 508 pass
507 509
508 510 # look up bookmarks through the name interface
509 511 try:
510 512 node = repo.names.singlenode(repo, symbol)
511 513 rev = repo.changelog.rev(node)
512 514 return repo[rev]
513 515 except KeyError:
514 516 pass
515 517
516 518 node = resolvehexnodeidprefix(repo, symbol)
517 519 if node is not None:
518 520 rev = repo.changelog.rev(node)
519 521 return repo[rev]
520 522
521 523 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
522 524
523 525 except error.WdirUnsupported:
524 526 return repo[None]
525 527 except (error.FilteredIndexError, error.FilteredLookupError,
526 528 error.FilteredRepoLookupError):
527 529 raise _filterederror(repo, symbol)
528 530
529 531 def _filterederror(repo, changeid):
530 532 """build an exception to be raised about a filtered changeid
531 533
532 534 This is extracted in a function to help extensions (eg: evolve) to
533 535 experiment with various message variants."""
534 536 if repo.filtername.startswith('visible'):
535 537
536 538 # Check if the changeset is obsolete
537 539 unfilteredrepo = repo.unfiltered()
538 540 ctx = revsymbol(unfilteredrepo, changeid)
539 541
540 542 # If the changeset is obsolete, enrich the message with the reason
541 543 # that made this changeset not visible
542 544 if ctx.obsolete():
543 545 msg = obsutil._getfilteredreason(repo, changeid, ctx)
544 546 else:
545 547 msg = _("hidden revision '%s'") % changeid
546 548
547 549 hint = _('use --hidden to access hidden revisions')
548 550
549 551 return error.FilteredRepoLookupError(msg, hint=hint)
550 552 msg = _("filtered revision '%s' (not in '%s' subset)")
551 553 msg %= (changeid, repo.filtername)
552 554 return error.FilteredRepoLookupError(msg)
553 555
554 556 def revsingle(repo, revspec, default='.', localalias=None):
555 557 if not revspec and revspec != 0:
556 558 return repo[default]
557 559
558 560 l = revrange(repo, [revspec], localalias=localalias)
559 561 if not l:
560 562 raise error.Abort(_('empty revision set'))
561 563 return repo[l.last()]
562 564
563 565 def _pairspec(revspec):
564 566 tree = revsetlang.parse(revspec)
565 567 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
566 568
567 569 def revpairnodes(repo, revs):
568 570 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
569 571 ctx1, ctx2 = revpair(repo, revs)
570 572 return ctx1.node(), ctx2.node()
571 573
572 574 def revpair(repo, revs):
573 575 if not revs:
574 576 return repo['.'], repo[None]
575 577
576 578 l = revrange(repo, revs)
577 579
578 580 if not l:
579 581 first = second = None
580 582 elif l.isascending():
581 583 first = l.min()
582 584 second = l.max()
583 585 elif l.isdescending():
584 586 first = l.max()
585 587 second = l.min()
586 588 else:
587 589 first = l.first()
588 590 second = l.last()
589 591
590 592 if first is None:
591 593 raise error.Abort(_('empty revision range'))
592 594 if (first == second and len(revs) >= 2
593 595 and not all(revrange(repo, [r]) for r in revs)):
594 596 raise error.Abort(_('empty revision on one side of range'))
595 597
596 598 # if top-level is range expression, the result must always be a pair
597 599 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
598 600 return repo[first], repo[None]
599 601
600 602 return repo[first], repo[second]
601 603
602 604 def revrange(repo, specs, localalias=None):
603 605 """Execute 1 to many revsets and return the union.
604 606
605 607 This is the preferred mechanism for executing revsets using user-specified
606 608 config options, such as revset aliases.
607 609
608 610 The revsets specified by ``specs`` will be executed via a chained ``OR``
609 611 expression. If ``specs`` is empty, an empty result is returned.
610 612
611 613 ``specs`` can contain integers, in which case they are assumed to be
612 614 revision numbers.
613 615
614 616 It is assumed the revsets are already formatted. If you have arguments
615 617 that need to be expanded in the revset, call ``revsetlang.formatspec()``
616 618 and pass the result as an element of ``specs``.
617 619
618 620 Specifying a single revset is allowed.
619 621
620 622 Returns a ``revset.abstractsmartset`` which is a list-like interface over
621 623 integer revisions.
622 624 """
623 625 allspecs = []
624 626 for spec in specs:
625 627 if isinstance(spec, int):
626 628 spec = revsetlang.formatspec('rev(%d)', spec)
627 629 allspecs.append(spec)
628 630 return repo.anyrevs(allspecs, user=True, localalias=localalias)
629 631
630 632 def meaningfulparents(repo, ctx):
631 633 """Return list of meaningful (or all if debug) parentrevs for rev.
632 634
633 635 For merges (two non-nullrev revisions) both parents are meaningful.
634 636 Otherwise the first parent revision is considered meaningful if it
635 637 is not the preceding revision.
636 638 """
637 639 parents = ctx.parents()
638 640 if len(parents) > 1:
639 641 return parents
640 642 if repo.ui.debugflag:
641 643 return [parents[0], repo['null']]
642 644 if parents[0].rev() >= intrev(ctx) - 1:
643 645 return []
644 646 return parents
645 647
646 648 def expandpats(pats):
647 649 '''Expand bare globs when running on windows.
648 650 On posix we assume it already has already been done by sh.'''
649 651 if not util.expandglobs:
650 652 return list(pats)
651 653 ret = []
652 654 for kindpat in pats:
653 655 kind, pat = matchmod._patsplit(kindpat, None)
654 656 if kind is None:
655 657 try:
656 658 globbed = glob.glob(pat)
657 659 except re.error:
658 660 globbed = [pat]
659 661 if globbed:
660 662 ret.extend(globbed)
661 663 continue
662 664 ret.append(kindpat)
663 665 return ret
664 666
665 667 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
666 668 badfn=None):
667 669 '''Return a matcher and the patterns that were used.
668 670 The matcher will warn about bad matches, unless an alternate badfn callback
669 671 is provided.'''
670 672 if pats == ("",):
671 673 pats = []
672 674 if opts is None:
673 675 opts = {}
674 676 if not globbed and default == 'relpath':
675 677 pats = expandpats(pats or [])
676 678
677 679 def bad(f, msg):
678 680 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
679 681
680 682 if badfn is None:
681 683 badfn = bad
682 684
683 685 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
684 686 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
685 687
686 688 if m.always():
687 689 pats = []
688 690 return m, pats
689 691
690 692 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
691 693 badfn=None):
692 694 '''Return a matcher that will warn about bad matches.'''
693 695 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
694 696
695 697 def matchall(repo):
696 698 '''Return a matcher that will efficiently match everything.'''
697 699 return matchmod.always(repo.root, repo.getcwd())
698 700
699 701 def matchfiles(repo, files, badfn=None):
700 702 '''Return a matcher that will efficiently match exactly these files.'''
701 703 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
702 704
703 705 def parsefollowlinespattern(repo, rev, pat, msg):
704 706 """Return a file name from `pat` pattern suitable for usage in followlines
705 707 logic.
706 708 """
707 709 if not matchmod.patkind(pat):
708 710 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
709 711 else:
710 712 ctx = repo[rev]
711 713 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
712 714 files = [f for f in ctx if m(f)]
713 715 if len(files) != 1:
714 716 raise error.ParseError(msg)
715 717 return files[0]
716 718
717 719 def origpath(ui, repo, filepath):
718 720 '''customize where .orig files are created
719 721
720 722 Fetch user defined path from config file: [ui] origbackuppath = <path>
721 723 Fall back to default (filepath with .orig suffix) if not specified
722 724 '''
723 725 origbackuppath = ui.config('ui', 'origbackuppath')
724 726 if not origbackuppath:
725 727 return filepath + ".orig"
726 728
727 729 # Convert filepath from an absolute path into a path inside the repo.
728 730 filepathfromroot = util.normpath(os.path.relpath(filepath,
729 731 start=repo.root))
730 732
731 733 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
732 734 origbackupdir = origvfs.dirname(filepathfromroot)
733 735 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
734 736 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
735 737
736 738 # Remove any files that conflict with the backup file's path
737 739 for f in reversed(list(util.finddirs(filepathfromroot))):
738 740 if origvfs.isfileorlink(f):
739 741 ui.note(_('removing conflicting file: %s\n')
740 742 % origvfs.join(f))
741 743 origvfs.unlink(f)
742 744 break
743 745
744 746 origvfs.makedirs(origbackupdir)
745 747
746 748 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
747 749 ui.note(_('removing conflicting directory: %s\n')
748 750 % origvfs.join(filepathfromroot))
749 751 origvfs.rmtree(filepathfromroot, forcibly=True)
750 752
751 753 return origvfs.join(filepathfromroot)
752 754
753 755 class _containsnode(object):
754 756 """proxy __contains__(node) to container.__contains__ which accepts revs"""
755 757
756 758 def __init__(self, repo, revcontainer):
757 759 self._torev = repo.changelog.rev
758 760 self._revcontains = revcontainer.__contains__
759 761
760 762 def __contains__(self, node):
761 763 return self._revcontains(self._torev(node))
762 764
763 765 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
764 766 """do common cleanups when old nodes are replaced by new nodes
765 767
766 768 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
767 769 (we might also want to move working directory parent in the future)
768 770
769 771 By default, bookmark moves are calculated automatically from 'replacements',
770 772 but 'moves' can be used to override that. Also, 'moves' may include
771 773 additional bookmark moves that should not have associated obsmarkers.
772 774
773 775 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
774 776 have replacements. operation is a string, like "rebase".
775 777
776 778 metadata is dictionary containing metadata to be stored in obsmarker if
777 779 obsolescence is enabled.
778 780 """
779 781 if not replacements and not moves:
780 782 return
781 783
782 784 # translate mapping's other forms
783 785 if not util.safehasattr(replacements, 'items'):
784 786 replacements = {n: () for n in replacements}
785 787
786 788 # Calculate bookmark movements
787 789 if moves is None:
788 790 moves = {}
789 791 # Unfiltered repo is needed since nodes in replacements might be hidden.
790 792 unfi = repo.unfiltered()
791 793 for oldnode, newnodes in replacements.items():
792 794 if oldnode in moves:
793 795 continue
794 796 if len(newnodes) > 1:
795 797 # usually a split, take the one with biggest rev number
796 798 newnode = next(unfi.set('max(%ln)', newnodes)).node()
797 799 elif len(newnodes) == 0:
798 800 # move bookmark backwards
799 801 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
800 802 list(replacements)))
801 803 if roots:
802 804 newnode = roots[0].node()
803 805 else:
804 806 newnode = nullid
805 807 else:
806 808 newnode = newnodes[0]
807 809 moves[oldnode] = newnode
808 810
809 811 with repo.transaction('cleanup') as tr:
810 812 # Move bookmarks
811 813 bmarks = repo._bookmarks
812 814 bmarkchanges = []
813 815 allnewnodes = [n for ns in replacements.values() for n in ns]
814 816 for oldnode, newnode in moves.items():
815 817 oldbmarks = repo.nodebookmarks(oldnode)
816 818 if not oldbmarks:
817 819 continue
818 820 from . import bookmarks # avoid import cycle
819 821 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
820 822 (util.rapply(pycompat.maybebytestr, oldbmarks),
821 823 hex(oldnode), hex(newnode)))
822 824 # Delete divergent bookmarks being parents of related newnodes
823 825 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
824 826 allnewnodes, newnode, oldnode)
825 827 deletenodes = _containsnode(repo, deleterevs)
826 828 for name in oldbmarks:
827 829 bmarkchanges.append((name, newnode))
828 830 for b in bookmarks.divergent2delete(repo, deletenodes, name):
829 831 bmarkchanges.append((b, None))
830 832
831 833 if bmarkchanges:
832 834 bmarks.applychanges(repo, tr, bmarkchanges)
833 835
834 836 # Obsolete or strip nodes
835 837 if obsolete.isenabled(repo, obsolete.createmarkersopt):
836 838 # If a node is already obsoleted, and we want to obsolete it
837 839 # without a successor, skip that obssolete request since it's
838 840 # unnecessary. That's the "if s or not isobs(n)" check below.
839 841 # Also sort the node in topology order, that might be useful for
840 842 # some obsstore logic.
841 843 # NOTE: the filtering and sorting might belong to createmarkers.
842 844 isobs = unfi.obsstore.successors.__contains__
843 845 torev = unfi.changelog.rev
844 846 sortfunc = lambda ns: torev(ns[0])
845 847 rels = [(unfi[n], tuple(unfi[m] for m in s))
846 848 for n, s in sorted(replacements.items(), key=sortfunc)
847 849 if s or not isobs(n)]
848 850 if rels:
849 851 obsolete.createmarkers(repo, rels, operation=operation,
850 852 metadata=metadata)
851 853 else:
852 854 from . import repair # avoid import cycle
853 855 tostrip = list(replacements)
854 856 if tostrip:
855 857 repair.delayedstrip(repo.ui, repo, tostrip, operation)
856 858
857 859 def addremove(repo, matcher, prefix, opts=None):
858 860 if opts is None:
859 861 opts = {}
860 862 m = matcher
861 863 dry_run = opts.get('dry_run')
862 864 try:
863 865 similarity = float(opts.get('similarity') or 0)
864 866 except ValueError:
865 867 raise error.Abort(_('similarity must be a number'))
866 868 if similarity < 0 or similarity > 100:
867 869 raise error.Abort(_('similarity must be between 0 and 100'))
868 870 similarity /= 100.0
869 871
870 872 ret = 0
871 873 join = lambda f: os.path.join(prefix, f)
872 874
873 875 wctx = repo[None]
874 876 for subpath in sorted(wctx.substate):
875 877 submatch = matchmod.subdirmatcher(subpath, m)
876 878 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
877 879 sub = wctx.sub(subpath)
878 880 try:
879 881 if sub.addremove(submatch, prefix, opts):
880 882 ret = 1
881 883 except error.LookupError:
882 884 repo.ui.status(_("skipping missing subrepository: %s\n")
883 885 % join(subpath))
884 886
885 887 rejected = []
886 888 def badfn(f, msg):
887 889 if f in m.files():
888 890 m.bad(f, msg)
889 891 rejected.append(f)
890 892
891 893 badmatch = matchmod.badmatch(m, badfn)
892 894 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
893 895 badmatch)
894 896
895 897 unknownset = set(unknown + forgotten)
896 898 toprint = unknownset.copy()
897 899 toprint.update(deleted)
898 900 for abs in sorted(toprint):
899 901 if repo.ui.verbose or not m.exact(abs):
900 902 if abs in unknownset:
901 903 status = _('adding %s\n') % m.uipath(abs)
902 904 else:
903 905 status = _('removing %s\n') % m.uipath(abs)
904 906 repo.ui.status(status)
905 907
906 908 renames = _findrenames(repo, m, added + unknown, removed + deleted,
907 909 similarity)
908 910
909 911 if not dry_run:
910 912 _markchanges(repo, unknown + forgotten, deleted, renames)
911 913
912 914 for f in rejected:
913 915 if f in m.files():
914 916 return 1
915 917 return ret
916 918
917 919 def marktouched(repo, files, similarity=0.0):
918 920 '''Assert that files have somehow been operated upon. files are relative to
919 921 the repo root.'''
920 922 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
921 923 rejected = []
922 924
923 925 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
924 926
925 927 if repo.ui.verbose:
926 928 unknownset = set(unknown + forgotten)
927 929 toprint = unknownset.copy()
928 930 toprint.update(deleted)
929 931 for abs in sorted(toprint):
930 932 if abs in unknownset:
931 933 status = _('adding %s\n') % abs
932 934 else:
933 935 status = _('removing %s\n') % abs
934 936 repo.ui.status(status)
935 937
936 938 renames = _findrenames(repo, m, added + unknown, removed + deleted,
937 939 similarity)
938 940
939 941 _markchanges(repo, unknown + forgotten, deleted, renames)
940 942
941 943 for f in rejected:
942 944 if f in m.files():
943 945 return 1
944 946 return 0
945 947
946 948 def _interestingfiles(repo, matcher):
947 949 '''Walk dirstate with matcher, looking for files that addremove would care
948 950 about.
949 951
950 952 This is different from dirstate.status because it doesn't care about
951 953 whether files are modified or clean.'''
952 954 added, unknown, deleted, removed, forgotten = [], [], [], [], []
953 955 audit_path = pathutil.pathauditor(repo.root, cached=True)
954 956
955 957 ctx = repo[None]
956 958 dirstate = repo.dirstate
957 959 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
958 960 unknown=True, ignored=False, full=False)
959 961 for abs, st in walkresults.iteritems():
960 962 dstate = dirstate[abs]
961 963 if dstate == '?' and audit_path.check(abs):
962 964 unknown.append(abs)
963 965 elif dstate != 'r' and not st:
964 966 deleted.append(abs)
965 967 elif dstate == 'r' and st:
966 968 forgotten.append(abs)
967 969 # for finding renames
968 970 elif dstate == 'r' and not st:
969 971 removed.append(abs)
970 972 elif dstate == 'a':
971 973 added.append(abs)
972 974
973 975 return added, unknown, deleted, removed, forgotten
974 976
975 977 def _findrenames(repo, matcher, added, removed, similarity):
976 978 '''Find renames from removed files to added ones.'''
977 979 renames = {}
978 980 if similarity > 0:
979 981 for old, new, score in similar.findrenames(repo, added, removed,
980 982 similarity):
981 983 if (repo.ui.verbose or not matcher.exact(old)
982 984 or not matcher.exact(new)):
983 985 repo.ui.status(_('recording removal of %s as rename to %s '
984 986 '(%d%% similar)\n') %
985 987 (matcher.rel(old), matcher.rel(new),
986 988 score * 100))
987 989 renames[new] = old
988 990 return renames
989 991
990 992 def _markchanges(repo, unknown, deleted, renames):
991 993 '''Marks the files in unknown as added, the files in deleted as removed,
992 994 and the files in renames as copied.'''
993 995 wctx = repo[None]
994 996 with repo.wlock():
995 997 wctx.forget(deleted)
996 998 wctx.add(unknown)
997 999 for new, old in renames.iteritems():
998 1000 wctx.copy(old, new)
999 1001
1000 1002 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1001 1003 """Update the dirstate to reflect the intent of copying src to dst. For
1002 1004 different reasons it might not end with dst being marked as copied from src.
1003 1005 """
1004 1006 origsrc = repo.dirstate.copied(src) or src
1005 1007 if dst == origsrc: # copying back a copy?
1006 1008 if repo.dirstate[dst] not in 'mn' and not dryrun:
1007 1009 repo.dirstate.normallookup(dst)
1008 1010 else:
1009 1011 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1010 1012 if not ui.quiet:
1011 1013 ui.warn(_("%s has not been committed yet, so no copy "
1012 1014 "data will be stored for %s.\n")
1013 1015 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1014 1016 if repo.dirstate[dst] in '?r' and not dryrun:
1015 1017 wctx.add([dst])
1016 1018 elif not dryrun:
1017 1019 wctx.copy(origsrc, dst)
1018 1020
1019 1021 def readrequires(opener, supported):
1020 1022 '''Reads and parses .hg/requires and checks if all entries found
1021 1023 are in the list of supported features.'''
1022 1024 requirements = set(opener.read("requires").splitlines())
1023 1025 missings = []
1024 1026 for r in requirements:
1025 1027 if r not in supported:
1026 1028 if not r or not r[0:1].isalnum():
1027 1029 raise error.RequirementError(_(".hg/requires file is corrupt"))
1028 1030 missings.append(r)
1029 1031 missings.sort()
1030 1032 if missings:
1031 1033 raise error.RequirementError(
1032 1034 _("repository requires features unknown to this Mercurial: %s")
1033 1035 % " ".join(missings),
1034 1036 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1035 1037 " for more information"))
1036 1038 return requirements
1037 1039
1038 1040 def writerequires(opener, requirements):
1039 1041 with opener('requires', 'w') as fp:
1040 1042 for r in sorted(requirements):
1041 1043 fp.write("%s\n" % r)
1042 1044
1043 1045 class filecachesubentry(object):
1044 1046 def __init__(self, path, stat):
1045 1047 self.path = path
1046 1048 self.cachestat = None
1047 1049 self._cacheable = None
1048 1050
1049 1051 if stat:
1050 1052 self.cachestat = filecachesubentry.stat(self.path)
1051 1053
1052 1054 if self.cachestat:
1053 1055 self._cacheable = self.cachestat.cacheable()
1054 1056 else:
1055 1057 # None means we don't know yet
1056 1058 self._cacheable = None
1057 1059
1058 1060 def refresh(self):
1059 1061 if self.cacheable():
1060 1062 self.cachestat = filecachesubentry.stat(self.path)
1061 1063
1062 1064 def cacheable(self):
1063 1065 if self._cacheable is not None:
1064 1066 return self._cacheable
1065 1067
1066 1068 # we don't know yet, assume it is for now
1067 1069 return True
1068 1070
1069 1071 def changed(self):
1070 1072 # no point in going further if we can't cache it
1071 1073 if not self.cacheable():
1072 1074 return True
1073 1075
1074 1076 newstat = filecachesubentry.stat(self.path)
1075 1077
1076 1078 # we may not know if it's cacheable yet, check again now
1077 1079 if newstat and self._cacheable is None:
1078 1080 self._cacheable = newstat.cacheable()
1079 1081
1080 1082 # check again
1081 1083 if not self._cacheable:
1082 1084 return True
1083 1085
1084 1086 if self.cachestat != newstat:
1085 1087 self.cachestat = newstat
1086 1088 return True
1087 1089 else:
1088 1090 return False
1089 1091
1090 1092 @staticmethod
1091 1093 def stat(path):
1092 1094 try:
1093 1095 return util.cachestat(path)
1094 1096 except OSError as e:
1095 1097 if e.errno != errno.ENOENT:
1096 1098 raise
1097 1099
1098 1100 class filecacheentry(object):
1099 1101 def __init__(self, paths, stat=True):
1100 1102 self._entries = []
1101 1103 for path in paths:
1102 1104 self._entries.append(filecachesubentry(path, stat))
1103 1105
1104 1106 def changed(self):
1105 1107 '''true if any entry has changed'''
1106 1108 for entry in self._entries:
1107 1109 if entry.changed():
1108 1110 return True
1109 1111 return False
1110 1112
1111 1113 def refresh(self):
1112 1114 for entry in self._entries:
1113 1115 entry.refresh()
1114 1116
1115 1117 class filecache(object):
1116 1118 '''A property like decorator that tracks files under .hg/ for updates.
1117 1119
1118 1120 Records stat info when called in _filecache.
1119 1121
1120 1122 On subsequent calls, compares old stat info with new info, and recreates the
1121 1123 object when any of the files changes, updating the new stat info in
1122 1124 _filecache.
1123 1125
1124 1126 Mercurial either atomic renames or appends for files under .hg,
1125 1127 so to ensure the cache is reliable we need the filesystem to be able
1126 1128 to tell us if a file has been replaced. If it can't, we fallback to
1127 1129 recreating the object on every call (essentially the same behavior as
1128 1130 propertycache).
1129 1131
1130 1132 '''
1131 1133 def __init__(self, *paths):
1132 1134 self.paths = paths
1133 1135
1134 1136 def join(self, obj, fname):
1135 1137 """Used to compute the runtime path of a cached file.
1136 1138
1137 1139 Users should subclass filecache and provide their own version of this
1138 1140 function to call the appropriate join function on 'obj' (an instance
1139 1141 of the class that its member function was decorated).
1140 1142 """
1141 1143 raise NotImplementedError
1142 1144
1143 1145 def __call__(self, func):
1144 1146 self.func = func
1145 1147 self.sname = func.__name__
1146 1148 self.name = pycompat.sysbytes(self.sname)
1147 1149 return self
1148 1150
1149 1151 def __get__(self, obj, type=None):
1150 1152 # if accessed on the class, return the descriptor itself.
1151 1153 if obj is None:
1152 1154 return self
1153 1155 # do we need to check if the file changed?
1154 1156 if self.sname in obj.__dict__:
1155 1157 assert self.name in obj._filecache, self.name
1156 1158 return obj.__dict__[self.sname]
1157 1159
1158 1160 entry = obj._filecache.get(self.name)
1159 1161
1160 1162 if entry:
1161 1163 if entry.changed():
1162 1164 entry.obj = self.func(obj)
1163 1165 else:
1164 1166 paths = [self.join(obj, path) for path in self.paths]
1165 1167
1166 1168 # We stat -before- creating the object so our cache doesn't lie if
1167 1169 # a writer modified between the time we read and stat
1168 1170 entry = filecacheentry(paths, True)
1169 1171 entry.obj = self.func(obj)
1170 1172
1171 1173 obj._filecache[self.name] = entry
1172 1174
1173 1175 obj.__dict__[self.sname] = entry.obj
1174 1176 return entry.obj
1175 1177
1176 1178 def __set__(self, obj, value):
1177 1179 if self.name not in obj._filecache:
1178 1180 # we add an entry for the missing value because X in __dict__
1179 1181 # implies X in _filecache
1180 1182 paths = [self.join(obj, path) for path in self.paths]
1181 1183 ce = filecacheentry(paths, False)
1182 1184 obj._filecache[self.name] = ce
1183 1185 else:
1184 1186 ce = obj._filecache[self.name]
1185 1187
1186 1188 ce.obj = value # update cached copy
1187 1189 obj.__dict__[self.sname] = value # update copy returned by obj.x
1188 1190
1189 1191 def __delete__(self, obj):
1190 1192 try:
1191 1193 del obj.__dict__[self.sname]
1192 1194 except KeyError:
1193 1195 raise AttributeError(self.sname)
1194 1196
1195 1197 def extdatasource(repo, source):
1196 1198 """Gather a map of rev -> value dict from the specified source
1197 1199
1198 1200 A source spec is treated as a URL, with a special case shell: type
1199 1201 for parsing the output from a shell command.
1200 1202
1201 1203 The data is parsed as a series of newline-separated records where
1202 1204 each record is a revision specifier optionally followed by a space
1203 1205 and a freeform string value. If the revision is known locally, it
1204 1206 is converted to a rev, otherwise the record is skipped.
1205 1207
1206 1208 Note that both key and value are treated as UTF-8 and converted to
1207 1209 the local encoding. This allows uniformity between local and
1208 1210 remote data sources.
1209 1211 """
1210 1212
1211 1213 spec = repo.ui.config("extdata", source)
1212 1214 if not spec:
1213 1215 raise error.Abort(_("unknown extdata source '%s'") % source)
1214 1216
1215 1217 data = {}
1216 1218 src = proc = None
1217 1219 try:
1218 1220 if spec.startswith("shell:"):
1219 1221 # external commands should be run relative to the repo root
1220 1222 cmd = spec[6:]
1221 1223 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1222 1224 close_fds=procutil.closefds,
1223 1225 stdout=subprocess.PIPE, cwd=repo.root)
1224 1226 src = proc.stdout
1225 1227 else:
1226 1228 # treat as a URL or file
1227 1229 src = url.open(repo.ui, spec)
1228 1230 for l in src:
1229 1231 if " " in l:
1230 1232 k, v = l.strip().split(" ", 1)
1231 1233 else:
1232 1234 k, v = l.strip(), ""
1233 1235
1234 1236 k = encoding.tolocal(k)
1235 1237 try:
1236 1238 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1237 1239 except (error.LookupError, error.RepoLookupError):
1238 1240 pass # we ignore data for nodes that don't exist locally
1239 1241 finally:
1240 1242 if proc:
1241 1243 proc.communicate()
1242 1244 if src:
1243 1245 src.close()
1244 1246 if proc and proc.returncode != 0:
1245 1247 raise error.Abort(_("extdata command '%s' failed: %s")
1246 1248 % (cmd, procutil.explainexit(proc.returncode)))
1247 1249
1248 1250 return data
1249 1251
1250 1252 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1251 1253 if lock is None:
1252 1254 raise error.LockInheritanceContractViolation(
1253 1255 'lock can only be inherited while held')
1254 1256 if environ is None:
1255 1257 environ = {}
1256 1258 with lock.inherit() as locker:
1257 1259 environ[envvar] = locker
1258 1260 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1259 1261
1260 1262 def wlocksub(repo, cmd, *args, **kwargs):
1261 1263 """run cmd as a subprocess that allows inheriting repo's wlock
1262 1264
1263 1265 This can only be called while the wlock is held. This takes all the
1264 1266 arguments that ui.system does, and returns the exit code of the
1265 1267 subprocess."""
1266 1268 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1267 1269 **kwargs)
1268 1270
1269 1271 def gdinitconfig(ui):
1270 1272 """helper function to know if a repo should be created as general delta
1271 1273 """
1272 1274 # experimental config: format.generaldelta
1273 1275 return (ui.configbool('format', 'generaldelta')
1274 1276 or ui.configbool('format', 'usegeneraldelta'))
1275 1277
1276 1278 def gddeltaconfig(ui):
1277 1279 """helper function to know if incoming delta should be optimised
1278 1280 """
1279 1281 # experimental config: format.generaldelta
1280 1282 return ui.configbool('format', 'generaldelta')
1281 1283
1282 1284 class simplekeyvaluefile(object):
1283 1285 """A simple file with key=value lines
1284 1286
1285 1287 Keys must be alphanumerics and start with a letter, values must not
1286 1288 contain '\n' characters"""
1287 1289 firstlinekey = '__firstline'
1288 1290
1289 1291 def __init__(self, vfs, path, keys=None):
1290 1292 self.vfs = vfs
1291 1293 self.path = path
1292 1294
1293 1295 def read(self, firstlinenonkeyval=False):
1294 1296 """Read the contents of a simple key-value file
1295 1297
1296 1298 'firstlinenonkeyval' indicates whether the first line of file should
1297 1299 be treated as a key-value pair or reuturned fully under the
1298 1300 __firstline key."""
1299 1301 lines = self.vfs.readlines(self.path)
1300 1302 d = {}
1301 1303 if firstlinenonkeyval:
1302 1304 if not lines:
1303 1305 e = _("empty simplekeyvalue file")
1304 1306 raise error.CorruptedState(e)
1305 1307 # we don't want to include '\n' in the __firstline
1306 1308 d[self.firstlinekey] = lines[0][:-1]
1307 1309 del lines[0]
1308 1310
1309 1311 try:
1310 1312 # the 'if line.strip()' part prevents us from failing on empty
1311 1313 # lines which only contain '\n' therefore are not skipped
1312 1314 # by 'if line'
1313 1315 updatedict = dict(line[:-1].split('=', 1) for line in lines
1314 1316 if line.strip())
1315 1317 if self.firstlinekey in updatedict:
1316 1318 e = _("%r can't be used as a key")
1317 1319 raise error.CorruptedState(e % self.firstlinekey)
1318 1320 d.update(updatedict)
1319 1321 except ValueError as e:
1320 1322 raise error.CorruptedState(str(e))
1321 1323 return d
1322 1324
1323 1325 def write(self, data, firstline=None):
1324 1326 """Write key=>value mapping to a file
1325 1327 data is a dict. Keys must be alphanumerical and start with a letter.
1326 1328 Values must not contain newline characters.
1327 1329
1328 1330 If 'firstline' is not None, it is written to file before
1329 1331 everything else, as it is, not in a key=value form"""
1330 1332 lines = []
1331 1333 if firstline is not None:
1332 1334 lines.append('%s\n' % firstline)
1333 1335
1334 1336 for k, v in data.items():
1335 1337 if k == self.firstlinekey:
1336 1338 e = "key name '%s' is reserved" % self.firstlinekey
1337 1339 raise error.ProgrammingError(e)
1338 1340 if not k[0:1].isalpha():
1339 1341 e = "keys must start with a letter in a key-value file"
1340 1342 raise error.ProgrammingError(e)
1341 1343 if not k.isalnum():
1342 1344 e = "invalid key name in a simple key-value file"
1343 1345 raise error.ProgrammingError(e)
1344 1346 if '\n' in v:
1345 1347 e = "invalid value in a simple key-value file"
1346 1348 raise error.ProgrammingError(e)
1347 1349 lines.append("%s=%s\n" % (k, v))
1348 1350 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1349 1351 fp.write(''.join(lines))
1350 1352
1351 1353 _reportobsoletedsource = [
1352 1354 'debugobsolete',
1353 1355 'pull',
1354 1356 'push',
1355 1357 'serve',
1356 1358 'unbundle',
1357 1359 ]
1358 1360
1359 1361 _reportnewcssource = [
1360 1362 'pull',
1361 1363 'unbundle',
1362 1364 ]
1363 1365
1364 1366 def prefetchfiles(repo, revs, match):
1365 1367 """Invokes the registered file prefetch functions, allowing extensions to
1366 1368 ensure the corresponding files are available locally, before the command
1367 1369 uses them."""
1368 1370 if match:
1369 1371 # The command itself will complain about files that don't exist, so
1370 1372 # don't duplicate the message.
1371 1373 match = matchmod.badmatch(match, lambda fn, msg: None)
1372 1374 else:
1373 1375 match = matchall(repo)
1374 1376
1375 1377 fileprefetchhooks(repo, revs, match)
1376 1378
1377 1379 # a list of (repo, revs, match) prefetch functions
1378 1380 fileprefetchhooks = util.hooks()
1379 1381
1380 1382 # A marker that tells the evolve extension to suppress its own reporting
1381 1383 _reportstroubledchangesets = True
1382 1384
1383 1385 def registersummarycallback(repo, otr, txnname=''):
1384 1386 """register a callback to issue a summary after the transaction is closed
1385 1387 """
1386 1388 def txmatch(sources):
1387 1389 return any(txnname.startswith(source) for source in sources)
1388 1390
1389 1391 categories = []
1390 1392
1391 1393 def reportsummary(func):
1392 1394 """decorator for report callbacks."""
1393 1395 # The repoview life cycle is shorter than the one of the actual
1394 1396 # underlying repository. So the filtered object can die before the
1395 1397 # weakref is used leading to troubles. We keep a reference to the
1396 1398 # unfiltered object and restore the filtering when retrieving the
1397 1399 # repository through the weakref.
1398 1400 filtername = repo.filtername
1399 1401 reporef = weakref.ref(repo.unfiltered())
1400 1402 def wrapped(tr):
1401 1403 repo = reporef()
1402 1404 if filtername:
1403 1405 repo = repo.filtered(filtername)
1404 1406 func(repo, tr)
1405 1407 newcat = '%02i-txnreport' % len(categories)
1406 1408 otr.addpostclose(newcat, wrapped)
1407 1409 categories.append(newcat)
1408 1410 return wrapped
1409 1411
1410 1412 if txmatch(_reportobsoletedsource):
1411 1413 @reportsummary
1412 1414 def reportobsoleted(repo, tr):
1413 1415 obsoleted = obsutil.getobsoleted(repo, tr)
1414 1416 if obsoleted:
1415 1417 repo.ui.status(_('obsoleted %i changesets\n')
1416 1418 % len(obsoleted))
1417 1419
1418 1420 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1419 1421 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1420 1422 instabilitytypes = [
1421 1423 ('orphan', 'orphan'),
1422 1424 ('phase-divergent', 'phasedivergent'),
1423 1425 ('content-divergent', 'contentdivergent'),
1424 1426 ]
1425 1427
1426 1428 def getinstabilitycounts(repo):
1427 1429 filtered = repo.changelog.filteredrevs
1428 1430 counts = {}
1429 1431 for instability, revset in instabilitytypes:
1430 1432 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1431 1433 filtered)
1432 1434 return counts
1433 1435
1434 1436 oldinstabilitycounts = getinstabilitycounts(repo)
1435 1437 @reportsummary
1436 1438 def reportnewinstabilities(repo, tr):
1437 1439 newinstabilitycounts = getinstabilitycounts(repo)
1438 1440 for instability, revset in instabilitytypes:
1439 1441 delta = (newinstabilitycounts[instability] -
1440 1442 oldinstabilitycounts[instability])
1441 1443 if delta > 0:
1442 1444 repo.ui.warn(_('%i new %s changesets\n') %
1443 1445 (delta, instability))
1444 1446
1445 1447 if txmatch(_reportnewcssource):
1446 1448 @reportsummary
1447 1449 def reportnewcs(repo, tr):
1448 1450 """Report the range of new revisions pulled/unbundled."""
1449 1451 newrevs = tr.changes.get('revs', xrange(0, 0))
1450 1452 if not newrevs:
1451 1453 return
1452 1454
1453 1455 # Compute the bounds of new revisions' range, excluding obsoletes.
1454 1456 unfi = repo.unfiltered()
1455 1457 revs = unfi.revs('%ld and not obsolete()', newrevs)
1456 1458 if not revs:
1457 1459 # Got only obsoletes.
1458 1460 return
1459 1461 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1460 1462
1461 1463 if minrev == maxrev:
1462 1464 revrange = minrev
1463 1465 else:
1464 1466 revrange = '%s:%s' % (minrev, maxrev)
1465 1467 repo.ui.status(_('new changesets %s\n') % revrange)
1466 1468
1467 1469 def nodesummaries(repo, nodes, maxnumnodes=4):
1468 1470 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1469 1471 return ' '.join(short(h) for h in nodes)
1470 1472 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1471 1473 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1472 1474
1473 1475 def enforcesinglehead(repo, tr, desc):
1474 1476 """check that no named branch has multiple heads"""
1475 1477 if desc in ('strip', 'repair'):
1476 1478 # skip the logic during strip
1477 1479 return
1478 1480 visible = repo.filtered('visible')
1479 1481 # possible improvement: we could restrict the check to affected branch
1480 1482 for name, heads in visible.branchmap().iteritems():
1481 1483 if len(heads) > 1:
1482 1484 msg = _('rejecting multiple heads on branch "%s"')
1483 1485 msg %= name
1484 1486 hint = _('%d heads: %s')
1485 1487 hint %= (len(heads), nodesummaries(repo, heads))
1486 1488 raise error.Abort(msg, hint=hint)
1487 1489
1488 1490 def wrapconvertsink(sink):
1489 1491 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1490 1492 before it is used, whether or not the convert extension was formally loaded.
1491 1493 """
1492 1494 return sink
1493 1495
1494 1496 def unhidehashlikerevs(repo, specs, hiddentype):
1495 1497 """parse the user specs and unhide changesets whose hash or revision number
1496 1498 is passed.
1497 1499
1498 1500 hiddentype can be: 1) 'warn': warn while unhiding changesets
1499 1501 2) 'nowarn': don't warn while unhiding changesets
1500 1502
1501 1503 returns a repo object with the required changesets unhidden
1502 1504 """
1503 1505 if not repo.filtername or not repo.ui.configbool('experimental',
1504 1506 'directaccess'):
1505 1507 return repo
1506 1508
1507 1509 if repo.filtername not in ('visible', 'visible-hidden'):
1508 1510 return repo
1509 1511
1510 1512 symbols = set()
1511 1513 for spec in specs:
1512 1514 try:
1513 1515 tree = revsetlang.parse(spec)
1514 1516 except error.ParseError: # will be reported by scmutil.revrange()
1515 1517 continue
1516 1518
1517 1519 symbols.update(revsetlang.gethashlikesymbols(tree))
1518 1520
1519 1521 if not symbols:
1520 1522 return repo
1521 1523
1522 1524 revs = _getrevsfromsymbols(repo, symbols)
1523 1525
1524 1526 if not revs:
1525 1527 return repo
1526 1528
1527 1529 if hiddentype == 'warn':
1528 1530 unfi = repo.unfiltered()
1529 1531 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1530 1532 repo.ui.warn(_("warning: accessing hidden changesets for write "
1531 1533 "operation: %s\n") % revstr)
1532 1534
1533 1535 # we have to use new filtername to separate branch/tags cache until we can
1534 1536 # disbale these cache when revisions are dynamically pinned.
1535 1537 return repo.filtered('visible-hidden', revs)
1536 1538
1537 1539 def _getrevsfromsymbols(repo, symbols):
1538 1540 """parse the list of symbols and returns a set of revision numbers of hidden
1539 1541 changesets present in symbols"""
1540 1542 revs = set()
1541 1543 unfi = repo.unfiltered()
1542 1544 unficl = unfi.changelog
1543 1545 cl = repo.changelog
1544 1546 tiprev = len(unficl)
1545 1547 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1546 1548 for s in symbols:
1547 1549 try:
1548 1550 n = int(s)
1549 1551 if n <= tiprev:
1550 1552 if not allowrevnums:
1551 1553 continue
1552 1554 else:
1553 1555 if n not in cl:
1554 1556 revs.add(n)
1555 1557 continue
1556 1558 except ValueError:
1557 1559 pass
1558 1560
1559 1561 try:
1560 1562 s = resolvehexnodeidprefix(unfi, s)
1561 1563 except (error.LookupError, error.WdirUnsupported):
1562 1564 s = None
1563 1565
1564 1566 if s is not None:
1565 1567 rev = unficl.rev(s)
1566 1568 if rev not in cl:
1567 1569 revs.add(rev)
1568 1570
1569 1571 return revs
General Comments 0
You need to be logged in to leave comments. Login now