##// END OF EJS Templates
scmutil: fix oversight in b76248e51605c6 where I forgot to use msg...
Augie Fackler -
r36713:c442c4a9 default
parent child Browse files
Show More
@@ -1,1425 +1,1425 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 hex,
22 22 nullid,
23 23 short,
24 24 wdirid,
25 25 wdirrev,
26 26 )
27 27
28 28 from . import (
29 29 encoding,
30 30 error,
31 31 match as matchmod,
32 32 obsolete,
33 33 obsutil,
34 34 pathutil,
35 35 phases,
36 36 pycompat,
37 37 revsetlang,
38 38 similar,
39 39 url,
40 40 util,
41 41 vfs,
42 42 )
43 43
44 44 if pycompat.iswindows:
45 45 from . import scmwindows as scmplatform
46 46 else:
47 47 from . import scmposix as scmplatform
48 48
49 49 termsize = scmplatform.termsize
50 50
51 51 class status(tuple):
52 52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
53 53 and 'ignored' properties are only relevant to the working copy.
54 54 '''
55 55
56 56 __slots__ = ()
57 57
58 58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
59 59 clean):
60 60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
61 61 ignored, clean))
62 62
63 63 @property
64 64 def modified(self):
65 65 '''files that have been modified'''
66 66 return self[0]
67 67
68 68 @property
69 69 def added(self):
70 70 '''files that have been added'''
71 71 return self[1]
72 72
73 73 @property
74 74 def removed(self):
75 75 '''files that have been removed'''
76 76 return self[2]
77 77
78 78 @property
79 79 def deleted(self):
80 80 '''files that are in the dirstate, but have been deleted from the
81 81 working copy (aka "missing")
82 82 '''
83 83 return self[3]
84 84
85 85 @property
86 86 def unknown(self):
87 87 '''files not in the dirstate that are not ignored'''
88 88 return self[4]
89 89
90 90 @property
91 91 def ignored(self):
92 92 '''files not in the dirstate that are ignored (by _dirignore())'''
93 93 return self[5]
94 94
95 95 @property
96 96 def clean(self):
97 97 '''files that have not been modified'''
98 98 return self[6]
99 99
100 100 def __repr__(self, *args, **kwargs):
101 101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
102 102 'unknown=%r, ignored=%r, clean=%r>') % self)
103 103
104 104 def itersubrepos(ctx1, ctx2):
105 105 """find subrepos in ctx1 or ctx2"""
106 106 # Create a (subpath, ctx) mapping where we prefer subpaths from
107 107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
108 108 # has been modified (in ctx2) but not yet committed (in ctx1).
109 109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
110 110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
111 111
112 112 missing = set()
113 113
114 114 for subpath in ctx2.substate:
115 115 if subpath not in ctx1.substate:
116 116 del subpaths[subpath]
117 117 missing.add(subpath)
118 118
119 119 for subpath, ctx in sorted(subpaths.iteritems()):
120 120 yield subpath, ctx.sub(subpath)
121 121
122 122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
123 123 # status and diff will have an accurate result when it does
124 124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
125 125 # against itself.
126 126 for subpath in missing:
127 127 yield subpath, ctx2.nullsub(subpath, ctx1)
128 128
129 129 def nochangesfound(ui, repo, excluded=None):
130 130 '''Report no changes for push/pull, excluded is None or a list of
131 131 nodes excluded from the push/pull.
132 132 '''
133 133 secretlist = []
134 134 if excluded:
135 135 for n in excluded:
136 136 ctx = repo[n]
137 137 if ctx.phase() >= phases.secret and not ctx.extinct():
138 138 secretlist.append(n)
139 139
140 140 if secretlist:
141 141 ui.status(_("no changes found (ignored %d secret changesets)\n")
142 142 % len(secretlist))
143 143 else:
144 144 ui.status(_("no changes found\n"))
145 145
146 146 def callcatch(ui, func):
147 147 """call func() with global exception handling
148 148
149 149 return func() if no exception happens. otherwise do some error handling
150 150 and return an exit code accordingly. does not handle all exceptions.
151 151 """
152 152 try:
153 153 try:
154 154 return func()
155 155 except: # re-raises
156 156 ui.traceback()
157 157 raise
158 158 # Global exception handling, alphabetically
159 159 # Mercurial-specific first, followed by built-in and library exceptions
160 160 except error.LockHeld as inst:
161 161 if inst.errno == errno.ETIMEDOUT:
162 162 reason = _('timed out waiting for lock held by %r') % inst.locker
163 163 else:
164 164 reason = _('lock held by %r') % inst.locker
165 165 ui.warn(_("abort: %s: %s\n")
166 166 % (inst.desc or util.forcebytestr(inst.filename), reason))
167 167 if not inst.locker:
168 168 ui.warn(_("(lock might be very busy)\n"))
169 169 except error.LockUnavailable as inst:
170 170 ui.warn(_("abort: could not lock %s: %s\n") %
171 171 (inst.desc or util.forcebytestr(inst.filename),
172 172 encoding.strtolocal(inst.strerror)))
173 173 except error.OutOfBandError as inst:
174 174 if inst.args:
175 175 msg = _("abort: remote error:\n")
176 176 else:
177 177 msg = _("abort: remote error\n")
178 178 ui.warn(msg)
179 179 if inst.args:
180 180 ui.warn(''.join(inst.args))
181 181 if inst.hint:
182 182 ui.warn('(%s)\n' % inst.hint)
183 183 except error.RepoError as inst:
184 184 ui.warn(_("abort: %s!\n") % inst)
185 185 if inst.hint:
186 186 ui.warn(_("(%s)\n") % inst.hint)
187 187 except error.ResponseError as inst:
188 188 ui.warn(_("abort: %s") % inst.args[0])
189 189 msg = inst.args[1]
190 190 if isinstance(msg, type(u'')):
191 191 msg = pycompat.sysbytes(msg)
192 elif not isinstance(inst.args[1], bytes):
193 ui.warn(" %r\n" % (inst.args[1],))
194 elif not inst.args[1]:
192 if not isinstance(msg, bytes):
193 ui.warn(" %r\n" % (msg,))
194 elif not msg:
195 195 ui.warn(_(" empty string\n"))
196 196 else:
197 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
197 ui.warn("\n%r\n" % util.ellipsis(msg))
198 198 except error.CensoredNodeError as inst:
199 199 ui.warn(_("abort: file censored %s!\n") % inst)
200 200 except error.RevlogError as inst:
201 201 ui.warn(_("abort: %s!\n") % inst)
202 202 except error.InterventionRequired as inst:
203 203 ui.warn("%s\n" % inst)
204 204 if inst.hint:
205 205 ui.warn(_("(%s)\n") % inst.hint)
206 206 return 1
207 207 except error.WdirUnsupported:
208 208 ui.warn(_("abort: working directory revision cannot be specified\n"))
209 209 except error.Abort as inst:
210 210 ui.warn(_("abort: %s\n") % inst)
211 211 if inst.hint:
212 212 ui.warn(_("(%s)\n") % inst.hint)
213 213 except ImportError as inst:
214 214 ui.warn(_("abort: %s!\n") % util.forcebytestr(inst))
215 215 m = util.forcebytestr(inst).split()[-1]
216 216 if m in "mpatch bdiff".split():
217 217 ui.warn(_("(did you forget to compile extensions?)\n"))
218 218 elif m in "zlib".split():
219 219 ui.warn(_("(is your Python install correct?)\n"))
220 220 except IOError as inst:
221 221 if util.safehasattr(inst, "code"):
222 222 ui.warn(_("abort: %s\n") % util.forcebytestr(inst))
223 223 elif util.safehasattr(inst, "reason"):
224 224 try: # usually it is in the form (errno, strerror)
225 225 reason = inst.reason.args[1]
226 226 except (AttributeError, IndexError):
227 227 # it might be anything, for example a string
228 228 reason = inst.reason
229 229 if isinstance(reason, unicode):
230 230 # SSLError of Python 2.7.9 contains a unicode
231 231 reason = encoding.unitolocal(reason)
232 232 ui.warn(_("abort: error: %s\n") % reason)
233 233 elif (util.safehasattr(inst, "args")
234 234 and inst.args and inst.args[0] == errno.EPIPE):
235 235 pass
236 236 elif getattr(inst, "strerror", None):
237 237 if getattr(inst, "filename", None):
238 238 ui.warn(_("abort: %s: %s\n") % (
239 239 encoding.strtolocal(inst.strerror),
240 240 util.forcebytestr(inst.filename)))
241 241 else:
242 242 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
243 243 else:
244 244 raise
245 245 except OSError as inst:
246 246 if getattr(inst, "filename", None) is not None:
247 247 ui.warn(_("abort: %s: '%s'\n") % (
248 248 encoding.strtolocal(inst.strerror),
249 249 util.forcebytestr(inst.filename)))
250 250 else:
251 251 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
252 252 except MemoryError:
253 253 ui.warn(_("abort: out of memory\n"))
254 254 except SystemExit as inst:
255 255 # Commands shouldn't sys.exit directly, but give a return code.
256 256 # Just in case catch this and and pass exit code to caller.
257 257 return inst.code
258 258 except socket.error as inst:
259 259 ui.warn(_("abort: %s\n") % util.forcebytestr(inst.args[-1]))
260 260
261 261 return -1
262 262
263 263 def checknewlabel(repo, lbl, kind):
264 264 # Do not use the "kind" parameter in ui output.
265 265 # It makes strings difficult to translate.
266 266 if lbl in ['tip', '.', 'null']:
267 267 raise error.Abort(_("the name '%s' is reserved") % lbl)
268 268 for c in (':', '\0', '\n', '\r'):
269 269 if c in lbl:
270 270 raise error.Abort(
271 271 _("%r cannot be used in a name") % pycompat.bytestr(c))
272 272 try:
273 273 int(lbl)
274 274 raise error.Abort(_("cannot use an integer as a name"))
275 275 except ValueError:
276 276 pass
277 277 if lbl.strip() != lbl:
278 278 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
279 279
280 280 def checkfilename(f):
281 281 '''Check that the filename f is an acceptable filename for a tracked file'''
282 282 if '\r' in f or '\n' in f:
283 283 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
284 284
285 285 def checkportable(ui, f):
286 286 '''Check if filename f is portable and warn or abort depending on config'''
287 287 checkfilename(f)
288 288 abort, warn = checkportabilityalert(ui)
289 289 if abort or warn:
290 290 msg = util.checkwinfilename(f)
291 291 if msg:
292 292 msg = "%s: %s" % (msg, util.shellquote(f))
293 293 if abort:
294 294 raise error.Abort(msg)
295 295 ui.warn(_("warning: %s\n") % msg)
296 296
297 297 def checkportabilityalert(ui):
298 298 '''check if the user's config requests nothing, a warning, or abort for
299 299 non-portable filenames'''
300 300 val = ui.config('ui', 'portablefilenames')
301 301 lval = val.lower()
302 302 bval = util.parsebool(val)
303 303 abort = pycompat.iswindows or lval == 'abort'
304 304 warn = bval or lval == 'warn'
305 305 if bval is None and not (warn or abort or lval == 'ignore'):
306 306 raise error.ConfigError(
307 307 _("ui.portablefilenames value is invalid ('%s')") % val)
308 308 return abort, warn
309 309
310 310 class casecollisionauditor(object):
311 311 def __init__(self, ui, abort, dirstate):
312 312 self._ui = ui
313 313 self._abort = abort
314 314 allfiles = '\0'.join(dirstate._map)
315 315 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
316 316 self._dirstate = dirstate
317 317 # The purpose of _newfiles is so that we don't complain about
318 318 # case collisions if someone were to call this object with the
319 319 # same filename twice.
320 320 self._newfiles = set()
321 321
322 322 def __call__(self, f):
323 323 if f in self._newfiles:
324 324 return
325 325 fl = encoding.lower(f)
326 326 if fl in self._loweredfiles and f not in self._dirstate:
327 327 msg = _('possible case-folding collision for %s') % f
328 328 if self._abort:
329 329 raise error.Abort(msg)
330 330 self._ui.warn(_("warning: %s\n") % msg)
331 331 self._loweredfiles.add(fl)
332 332 self._newfiles.add(f)
333 333
334 334 def filteredhash(repo, maxrev):
335 335 """build hash of filtered revisions in the current repoview.
336 336
337 337 Multiple caches perform up-to-date validation by checking that the
338 338 tiprev and tipnode stored in the cache file match the current repository.
339 339 However, this is not sufficient for validating repoviews because the set
340 340 of revisions in the view may change without the repository tiprev and
341 341 tipnode changing.
342 342
343 343 This function hashes all the revs filtered from the view and returns
344 344 that SHA-1 digest.
345 345 """
346 346 cl = repo.changelog
347 347 if not cl.filteredrevs:
348 348 return None
349 349 key = None
350 350 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
351 351 if revs:
352 352 s = hashlib.sha1()
353 353 for rev in revs:
354 354 s.update('%d;' % rev)
355 355 key = s.digest()
356 356 return key
357 357
358 358 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
359 359 '''yield every hg repository under path, always recursively.
360 360 The recurse flag will only control recursion into repo working dirs'''
361 361 def errhandler(err):
362 362 if err.filename == path:
363 363 raise err
364 364 samestat = getattr(os.path, 'samestat', None)
365 365 if followsym and samestat is not None:
366 366 def adddir(dirlst, dirname):
367 367 dirstat = os.stat(dirname)
368 368 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
369 369 if not match:
370 370 dirlst.append(dirstat)
371 371 return not match
372 372 else:
373 373 followsym = False
374 374
375 375 if (seen_dirs is None) and followsym:
376 376 seen_dirs = []
377 377 adddir(seen_dirs, path)
378 378 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
379 379 dirs.sort()
380 380 if '.hg' in dirs:
381 381 yield root # found a repository
382 382 qroot = os.path.join(root, '.hg', 'patches')
383 383 if os.path.isdir(os.path.join(qroot, '.hg')):
384 384 yield qroot # we have a patch queue repo here
385 385 if recurse:
386 386 # avoid recursing inside the .hg directory
387 387 dirs.remove('.hg')
388 388 else:
389 389 dirs[:] = [] # don't descend further
390 390 elif followsym:
391 391 newdirs = []
392 392 for d in dirs:
393 393 fname = os.path.join(root, d)
394 394 if adddir(seen_dirs, fname):
395 395 if os.path.islink(fname):
396 396 for hgname in walkrepos(fname, True, seen_dirs):
397 397 yield hgname
398 398 else:
399 399 newdirs.append(d)
400 400 dirs[:] = newdirs
401 401
402 402 def binnode(ctx):
403 403 """Return binary node id for a given basectx"""
404 404 node = ctx.node()
405 405 if node is None:
406 406 return wdirid
407 407 return node
408 408
409 409 def intrev(ctx):
410 410 """Return integer for a given basectx that can be used in comparison or
411 411 arithmetic operation"""
412 412 rev = ctx.rev()
413 413 if rev is None:
414 414 return wdirrev
415 415 return rev
416 416
417 417 def formatchangeid(ctx):
418 418 """Format changectx as '{rev}:{node|formatnode}', which is the default
419 419 template provided by logcmdutil.changesettemplater"""
420 420 repo = ctx.repo()
421 421 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
422 422
423 423 def formatrevnode(ui, rev, node):
424 424 """Format given revision and node depending on the current verbosity"""
425 425 if ui.debugflag:
426 426 hexfunc = hex
427 427 else:
428 428 hexfunc = short
429 429 return '%d:%s' % (rev, hexfunc(node))
430 430
431 431 def revsingle(repo, revspec, default='.', localalias=None):
432 432 if not revspec and revspec != 0:
433 433 return repo[default]
434 434
435 435 l = revrange(repo, [revspec], localalias=localalias)
436 436 if not l:
437 437 raise error.Abort(_('empty revision set'))
438 438 return repo[l.last()]
439 439
440 440 def _pairspec(revspec):
441 441 tree = revsetlang.parse(revspec)
442 442 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
443 443
444 444 def revpair(repo, revs):
445 445 if not revs:
446 446 return repo.dirstate.p1(), None
447 447
448 448 l = revrange(repo, revs)
449 449
450 450 if not l:
451 451 first = second = None
452 452 elif l.isascending():
453 453 first = l.min()
454 454 second = l.max()
455 455 elif l.isdescending():
456 456 first = l.max()
457 457 second = l.min()
458 458 else:
459 459 first = l.first()
460 460 second = l.last()
461 461
462 462 if first is None:
463 463 raise error.Abort(_('empty revision range'))
464 464 if (first == second and len(revs) >= 2
465 465 and not all(revrange(repo, [r]) for r in revs)):
466 466 raise error.Abort(_('empty revision on one side of range'))
467 467
468 468 # if top-level is range expression, the result must always be a pair
469 469 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
470 470 return repo.lookup(first), None
471 471
472 472 return repo.lookup(first), repo.lookup(second)
473 473
474 474 def revrange(repo, specs, localalias=None):
475 475 """Execute 1 to many revsets and return the union.
476 476
477 477 This is the preferred mechanism for executing revsets using user-specified
478 478 config options, such as revset aliases.
479 479
480 480 The revsets specified by ``specs`` will be executed via a chained ``OR``
481 481 expression. If ``specs`` is empty, an empty result is returned.
482 482
483 483 ``specs`` can contain integers, in which case they are assumed to be
484 484 revision numbers.
485 485
486 486 It is assumed the revsets are already formatted. If you have arguments
487 487 that need to be expanded in the revset, call ``revsetlang.formatspec()``
488 488 and pass the result as an element of ``specs``.
489 489
490 490 Specifying a single revset is allowed.
491 491
492 492 Returns a ``revset.abstractsmartset`` which is a list-like interface over
493 493 integer revisions.
494 494 """
495 495 allspecs = []
496 496 for spec in specs:
497 497 if isinstance(spec, int):
498 498 spec = revsetlang.formatspec('rev(%d)', spec)
499 499 allspecs.append(spec)
500 500 return repo.anyrevs(allspecs, user=True, localalias=localalias)
501 501
502 502 def meaningfulparents(repo, ctx):
503 503 """Return list of meaningful (or all if debug) parentrevs for rev.
504 504
505 505 For merges (two non-nullrev revisions) both parents are meaningful.
506 506 Otherwise the first parent revision is considered meaningful if it
507 507 is not the preceding revision.
508 508 """
509 509 parents = ctx.parents()
510 510 if len(parents) > 1:
511 511 return parents
512 512 if repo.ui.debugflag:
513 513 return [parents[0], repo['null']]
514 514 if parents[0].rev() >= intrev(ctx) - 1:
515 515 return []
516 516 return parents
517 517
518 518 def expandpats(pats):
519 519 '''Expand bare globs when running on windows.
520 520 On posix we assume it already has already been done by sh.'''
521 521 if not util.expandglobs:
522 522 return list(pats)
523 523 ret = []
524 524 for kindpat in pats:
525 525 kind, pat = matchmod._patsplit(kindpat, None)
526 526 if kind is None:
527 527 try:
528 528 globbed = glob.glob(pat)
529 529 except re.error:
530 530 globbed = [pat]
531 531 if globbed:
532 532 ret.extend(globbed)
533 533 continue
534 534 ret.append(kindpat)
535 535 return ret
536 536
537 537 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
538 538 badfn=None):
539 539 '''Return a matcher and the patterns that were used.
540 540 The matcher will warn about bad matches, unless an alternate badfn callback
541 541 is provided.'''
542 542 if pats == ("",):
543 543 pats = []
544 544 if opts is None:
545 545 opts = {}
546 546 if not globbed and default == 'relpath':
547 547 pats = expandpats(pats or [])
548 548
549 549 def bad(f, msg):
550 550 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
551 551
552 552 if badfn is None:
553 553 badfn = bad
554 554
555 555 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
556 556 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
557 557
558 558 if m.always():
559 559 pats = []
560 560 return m, pats
561 561
562 562 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
563 563 badfn=None):
564 564 '''Return a matcher that will warn about bad matches.'''
565 565 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
566 566
567 567 def matchall(repo):
568 568 '''Return a matcher that will efficiently match everything.'''
569 569 return matchmod.always(repo.root, repo.getcwd())
570 570
571 571 def matchfiles(repo, files, badfn=None):
572 572 '''Return a matcher that will efficiently match exactly these files.'''
573 573 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
574 574
575 575 def parsefollowlinespattern(repo, rev, pat, msg):
576 576 """Return a file name from `pat` pattern suitable for usage in followlines
577 577 logic.
578 578 """
579 579 if not matchmod.patkind(pat):
580 580 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
581 581 else:
582 582 ctx = repo[rev]
583 583 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
584 584 files = [f for f in ctx if m(f)]
585 585 if len(files) != 1:
586 586 raise error.ParseError(msg)
587 587 return files[0]
588 588
589 589 def origpath(ui, repo, filepath):
590 590 '''customize where .orig files are created
591 591
592 592 Fetch user defined path from config file: [ui] origbackuppath = <path>
593 593 Fall back to default (filepath with .orig suffix) if not specified
594 594 '''
595 595 origbackuppath = ui.config('ui', 'origbackuppath')
596 596 if not origbackuppath:
597 597 return filepath + ".orig"
598 598
599 599 # Convert filepath from an absolute path into a path inside the repo.
600 600 filepathfromroot = util.normpath(os.path.relpath(filepath,
601 601 start=repo.root))
602 602
603 603 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
604 604 origbackupdir = origvfs.dirname(filepathfromroot)
605 605 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
606 606 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
607 607
608 608 # Remove any files that conflict with the backup file's path
609 609 for f in reversed(list(util.finddirs(filepathfromroot))):
610 610 if origvfs.isfileorlink(f):
611 611 ui.note(_('removing conflicting file: %s\n')
612 612 % origvfs.join(f))
613 613 origvfs.unlink(f)
614 614 break
615 615
616 616 origvfs.makedirs(origbackupdir)
617 617
618 618 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
619 619 ui.note(_('removing conflicting directory: %s\n')
620 620 % origvfs.join(filepathfromroot))
621 621 origvfs.rmtree(filepathfromroot, forcibly=True)
622 622
623 623 return origvfs.join(filepathfromroot)
624 624
625 625 class _containsnode(object):
626 626 """proxy __contains__(node) to container.__contains__ which accepts revs"""
627 627
628 628 def __init__(self, repo, revcontainer):
629 629 self._torev = repo.changelog.rev
630 630 self._revcontains = revcontainer.__contains__
631 631
632 632 def __contains__(self, node):
633 633 return self._revcontains(self._torev(node))
634 634
635 635 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
636 636 """do common cleanups when old nodes are replaced by new nodes
637 637
638 638 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
639 639 (we might also want to move working directory parent in the future)
640 640
641 641 By default, bookmark moves are calculated automatically from 'replacements',
642 642 but 'moves' can be used to override that. Also, 'moves' may include
643 643 additional bookmark moves that should not have associated obsmarkers.
644 644
645 645 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
646 646 have replacements. operation is a string, like "rebase".
647 647
648 648 metadata is dictionary containing metadata to be stored in obsmarker if
649 649 obsolescence is enabled.
650 650 """
651 651 if not replacements and not moves:
652 652 return
653 653
654 654 # translate mapping's other forms
655 655 if not util.safehasattr(replacements, 'items'):
656 656 replacements = {n: () for n in replacements}
657 657
658 658 # Calculate bookmark movements
659 659 if moves is None:
660 660 moves = {}
661 661 # Unfiltered repo is needed since nodes in replacements might be hidden.
662 662 unfi = repo.unfiltered()
663 663 for oldnode, newnodes in replacements.items():
664 664 if oldnode in moves:
665 665 continue
666 666 if len(newnodes) > 1:
667 667 # usually a split, take the one with biggest rev number
668 668 newnode = next(unfi.set('max(%ln)', newnodes)).node()
669 669 elif len(newnodes) == 0:
670 670 # move bookmark backwards
671 671 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
672 672 list(replacements)))
673 673 if roots:
674 674 newnode = roots[0].node()
675 675 else:
676 676 newnode = nullid
677 677 else:
678 678 newnode = newnodes[0]
679 679 moves[oldnode] = newnode
680 680
681 681 with repo.transaction('cleanup') as tr:
682 682 # Move bookmarks
683 683 bmarks = repo._bookmarks
684 684 bmarkchanges = []
685 685 allnewnodes = [n for ns in replacements.values() for n in ns]
686 686 for oldnode, newnode in moves.items():
687 687 oldbmarks = repo.nodebookmarks(oldnode)
688 688 if not oldbmarks:
689 689 continue
690 690 from . import bookmarks # avoid import cycle
691 691 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
692 692 (oldbmarks, hex(oldnode), hex(newnode)))
693 693 # Delete divergent bookmarks being parents of related newnodes
694 694 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
695 695 allnewnodes, newnode, oldnode)
696 696 deletenodes = _containsnode(repo, deleterevs)
697 697 for name in oldbmarks:
698 698 bmarkchanges.append((name, newnode))
699 699 for b in bookmarks.divergent2delete(repo, deletenodes, name):
700 700 bmarkchanges.append((b, None))
701 701
702 702 if bmarkchanges:
703 703 bmarks.applychanges(repo, tr, bmarkchanges)
704 704
705 705 # Obsolete or strip nodes
706 706 if obsolete.isenabled(repo, obsolete.createmarkersopt):
707 707 # If a node is already obsoleted, and we want to obsolete it
708 708 # without a successor, skip that obssolete request since it's
709 709 # unnecessary. That's the "if s or not isobs(n)" check below.
710 710 # Also sort the node in topology order, that might be useful for
711 711 # some obsstore logic.
712 712 # NOTE: the filtering and sorting might belong to createmarkers.
713 713 isobs = unfi.obsstore.successors.__contains__
714 714 torev = unfi.changelog.rev
715 715 sortfunc = lambda ns: torev(ns[0])
716 716 rels = [(unfi[n], tuple(unfi[m] for m in s))
717 717 for n, s in sorted(replacements.items(), key=sortfunc)
718 718 if s or not isobs(n)]
719 719 if rels:
720 720 obsolete.createmarkers(repo, rels, operation=operation,
721 721 metadata=metadata)
722 722 else:
723 723 from . import repair # avoid import cycle
724 724 tostrip = list(replacements)
725 725 if tostrip:
726 726 repair.delayedstrip(repo.ui, repo, tostrip, operation)
727 727
728 728 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
729 729 if opts is None:
730 730 opts = {}
731 731 m = matcher
732 732 if dry_run is None:
733 733 dry_run = opts.get('dry_run')
734 734 if similarity is None:
735 735 similarity = float(opts.get('similarity') or 0)
736 736
737 737 ret = 0
738 738 join = lambda f: os.path.join(prefix, f)
739 739
740 740 wctx = repo[None]
741 741 for subpath in sorted(wctx.substate):
742 742 submatch = matchmod.subdirmatcher(subpath, m)
743 743 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
744 744 sub = wctx.sub(subpath)
745 745 try:
746 746 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
747 747 ret = 1
748 748 except error.LookupError:
749 749 repo.ui.status(_("skipping missing subrepository: %s\n")
750 750 % join(subpath))
751 751
752 752 rejected = []
753 753 def badfn(f, msg):
754 754 if f in m.files():
755 755 m.bad(f, msg)
756 756 rejected.append(f)
757 757
758 758 badmatch = matchmod.badmatch(m, badfn)
759 759 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
760 760 badmatch)
761 761
762 762 unknownset = set(unknown + forgotten)
763 763 toprint = unknownset.copy()
764 764 toprint.update(deleted)
765 765 for abs in sorted(toprint):
766 766 if repo.ui.verbose or not m.exact(abs):
767 767 if abs in unknownset:
768 768 status = _('adding %s\n') % m.uipath(abs)
769 769 else:
770 770 status = _('removing %s\n') % m.uipath(abs)
771 771 repo.ui.status(status)
772 772
773 773 renames = _findrenames(repo, m, added + unknown, removed + deleted,
774 774 similarity)
775 775
776 776 if not dry_run:
777 777 _markchanges(repo, unknown + forgotten, deleted, renames)
778 778
779 779 for f in rejected:
780 780 if f in m.files():
781 781 return 1
782 782 return ret
783 783
784 784 def marktouched(repo, files, similarity=0.0):
785 785 '''Assert that files have somehow been operated upon. files are relative to
786 786 the repo root.'''
787 787 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
788 788 rejected = []
789 789
790 790 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
791 791
792 792 if repo.ui.verbose:
793 793 unknownset = set(unknown + forgotten)
794 794 toprint = unknownset.copy()
795 795 toprint.update(deleted)
796 796 for abs in sorted(toprint):
797 797 if abs in unknownset:
798 798 status = _('adding %s\n') % abs
799 799 else:
800 800 status = _('removing %s\n') % abs
801 801 repo.ui.status(status)
802 802
803 803 renames = _findrenames(repo, m, added + unknown, removed + deleted,
804 804 similarity)
805 805
806 806 _markchanges(repo, unknown + forgotten, deleted, renames)
807 807
808 808 for f in rejected:
809 809 if f in m.files():
810 810 return 1
811 811 return 0
812 812
813 813 def _interestingfiles(repo, matcher):
814 814 '''Walk dirstate with matcher, looking for files that addremove would care
815 815 about.
816 816
817 817 This is different from dirstate.status because it doesn't care about
818 818 whether files are modified or clean.'''
819 819 added, unknown, deleted, removed, forgotten = [], [], [], [], []
820 820 audit_path = pathutil.pathauditor(repo.root, cached=True)
821 821
822 822 ctx = repo[None]
823 823 dirstate = repo.dirstate
824 824 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
825 825 unknown=True, ignored=False, full=False)
826 826 for abs, st in walkresults.iteritems():
827 827 dstate = dirstate[abs]
828 828 if dstate == '?' and audit_path.check(abs):
829 829 unknown.append(abs)
830 830 elif dstate != 'r' and not st:
831 831 deleted.append(abs)
832 832 elif dstate == 'r' and st:
833 833 forgotten.append(abs)
834 834 # for finding renames
835 835 elif dstate == 'r' and not st:
836 836 removed.append(abs)
837 837 elif dstate == 'a':
838 838 added.append(abs)
839 839
840 840 return added, unknown, deleted, removed, forgotten
841 841
842 842 def _findrenames(repo, matcher, added, removed, similarity):
843 843 '''Find renames from removed files to added ones.'''
844 844 renames = {}
845 845 if similarity > 0:
846 846 for old, new, score in similar.findrenames(repo, added, removed,
847 847 similarity):
848 848 if (repo.ui.verbose or not matcher.exact(old)
849 849 or not matcher.exact(new)):
850 850 repo.ui.status(_('recording removal of %s as rename to %s '
851 851 '(%d%% similar)\n') %
852 852 (matcher.rel(old), matcher.rel(new),
853 853 score * 100))
854 854 renames[new] = old
855 855 return renames
856 856
857 857 def _markchanges(repo, unknown, deleted, renames):
858 858 '''Marks the files in unknown as added, the files in deleted as removed,
859 859 and the files in renames as copied.'''
860 860 wctx = repo[None]
861 861 with repo.wlock():
862 862 wctx.forget(deleted)
863 863 wctx.add(unknown)
864 864 for new, old in renames.iteritems():
865 865 wctx.copy(old, new)
866 866
867 867 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
868 868 """Update the dirstate to reflect the intent of copying src to dst. For
869 869 different reasons it might not end with dst being marked as copied from src.
870 870 """
871 871 origsrc = repo.dirstate.copied(src) or src
872 872 if dst == origsrc: # copying back a copy?
873 873 if repo.dirstate[dst] not in 'mn' and not dryrun:
874 874 repo.dirstate.normallookup(dst)
875 875 else:
876 876 if repo.dirstate[origsrc] == 'a' and origsrc == src:
877 877 if not ui.quiet:
878 878 ui.warn(_("%s has not been committed yet, so no copy "
879 879 "data will be stored for %s.\n")
880 880 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
881 881 if repo.dirstate[dst] in '?r' and not dryrun:
882 882 wctx.add([dst])
883 883 elif not dryrun:
884 884 wctx.copy(origsrc, dst)
885 885
886 886 def readrequires(opener, supported):
887 887 '''Reads and parses .hg/requires and checks if all entries found
888 888 are in the list of supported features.'''
889 889 requirements = set(opener.read("requires").splitlines())
890 890 missings = []
891 891 for r in requirements:
892 892 if r not in supported:
893 893 if not r or not r[0:1].isalnum():
894 894 raise error.RequirementError(_(".hg/requires file is corrupt"))
895 895 missings.append(r)
896 896 missings.sort()
897 897 if missings:
898 898 raise error.RequirementError(
899 899 _("repository requires features unknown to this Mercurial: %s")
900 900 % " ".join(missings),
901 901 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
902 902 " for more information"))
903 903 return requirements
904 904
905 905 def writerequires(opener, requirements):
906 906 with opener('requires', 'w') as fp:
907 907 for r in sorted(requirements):
908 908 fp.write("%s\n" % r)
909 909
910 910 class filecachesubentry(object):
911 911 def __init__(self, path, stat):
912 912 self.path = path
913 913 self.cachestat = None
914 914 self._cacheable = None
915 915
916 916 if stat:
917 917 self.cachestat = filecachesubentry.stat(self.path)
918 918
919 919 if self.cachestat:
920 920 self._cacheable = self.cachestat.cacheable()
921 921 else:
922 922 # None means we don't know yet
923 923 self._cacheable = None
924 924
925 925 def refresh(self):
926 926 if self.cacheable():
927 927 self.cachestat = filecachesubentry.stat(self.path)
928 928
929 929 def cacheable(self):
930 930 if self._cacheable is not None:
931 931 return self._cacheable
932 932
933 933 # we don't know yet, assume it is for now
934 934 return True
935 935
936 936 def changed(self):
937 937 # no point in going further if we can't cache it
938 938 if not self.cacheable():
939 939 return True
940 940
941 941 newstat = filecachesubentry.stat(self.path)
942 942
943 943 # we may not know if it's cacheable yet, check again now
944 944 if newstat and self._cacheable is None:
945 945 self._cacheable = newstat.cacheable()
946 946
947 947 # check again
948 948 if not self._cacheable:
949 949 return True
950 950
951 951 if self.cachestat != newstat:
952 952 self.cachestat = newstat
953 953 return True
954 954 else:
955 955 return False
956 956
957 957 @staticmethod
958 958 def stat(path):
959 959 try:
960 960 return util.cachestat(path)
961 961 except OSError as e:
962 962 if e.errno != errno.ENOENT:
963 963 raise
964 964
965 965 class filecacheentry(object):
966 966 def __init__(self, paths, stat=True):
967 967 self._entries = []
968 968 for path in paths:
969 969 self._entries.append(filecachesubentry(path, stat))
970 970
971 971 def changed(self):
972 972 '''true if any entry has changed'''
973 973 for entry in self._entries:
974 974 if entry.changed():
975 975 return True
976 976 return False
977 977
978 978 def refresh(self):
979 979 for entry in self._entries:
980 980 entry.refresh()
981 981
982 982 class filecache(object):
983 983 '''A property like decorator that tracks files under .hg/ for updates.
984 984
985 985 Records stat info when called in _filecache.
986 986
987 987 On subsequent calls, compares old stat info with new info, and recreates the
988 988 object when any of the files changes, updating the new stat info in
989 989 _filecache.
990 990
991 991 Mercurial either atomic renames or appends for files under .hg,
992 992 so to ensure the cache is reliable we need the filesystem to be able
993 993 to tell us if a file has been replaced. If it can't, we fallback to
994 994 recreating the object on every call (essentially the same behavior as
995 995 propertycache).
996 996
997 997 '''
998 998 def __init__(self, *paths):
999 999 self.paths = paths
1000 1000
1001 1001 def join(self, obj, fname):
1002 1002 """Used to compute the runtime path of a cached file.
1003 1003
1004 1004 Users should subclass filecache and provide their own version of this
1005 1005 function to call the appropriate join function on 'obj' (an instance
1006 1006 of the class that its member function was decorated).
1007 1007 """
1008 1008 raise NotImplementedError
1009 1009
1010 1010 def __call__(self, func):
1011 1011 self.func = func
1012 1012 self.name = func.__name__.encode('ascii')
1013 1013 return self
1014 1014
1015 1015 def __get__(self, obj, type=None):
1016 1016 # if accessed on the class, return the descriptor itself.
1017 1017 if obj is None:
1018 1018 return self
1019 1019 # do we need to check if the file changed?
1020 1020 if self.name in obj.__dict__:
1021 1021 assert self.name in obj._filecache, self.name
1022 1022 return obj.__dict__[self.name]
1023 1023
1024 1024 entry = obj._filecache.get(self.name)
1025 1025
1026 1026 if entry:
1027 1027 if entry.changed():
1028 1028 entry.obj = self.func(obj)
1029 1029 else:
1030 1030 paths = [self.join(obj, path) for path in self.paths]
1031 1031
1032 1032 # We stat -before- creating the object so our cache doesn't lie if
1033 1033 # a writer modified between the time we read and stat
1034 1034 entry = filecacheentry(paths, True)
1035 1035 entry.obj = self.func(obj)
1036 1036
1037 1037 obj._filecache[self.name] = entry
1038 1038
1039 1039 obj.__dict__[self.name] = entry.obj
1040 1040 return entry.obj
1041 1041
1042 1042 def __set__(self, obj, value):
1043 1043 if self.name not in obj._filecache:
1044 1044 # we add an entry for the missing value because X in __dict__
1045 1045 # implies X in _filecache
1046 1046 paths = [self.join(obj, path) for path in self.paths]
1047 1047 ce = filecacheentry(paths, False)
1048 1048 obj._filecache[self.name] = ce
1049 1049 else:
1050 1050 ce = obj._filecache[self.name]
1051 1051
1052 1052 ce.obj = value # update cached copy
1053 1053 obj.__dict__[self.name] = value # update copy returned by obj.x
1054 1054
1055 1055 def __delete__(self, obj):
1056 1056 try:
1057 1057 del obj.__dict__[self.name]
1058 1058 except KeyError:
1059 1059 raise AttributeError(self.name)
1060 1060
1061 1061 def extdatasource(repo, source):
1062 1062 """Gather a map of rev -> value dict from the specified source
1063 1063
1064 1064 A source spec is treated as a URL, with a special case shell: type
1065 1065 for parsing the output from a shell command.
1066 1066
1067 1067 The data is parsed as a series of newline-separated records where
1068 1068 each record is a revision specifier optionally followed by a space
1069 1069 and a freeform string value. If the revision is known locally, it
1070 1070 is converted to a rev, otherwise the record is skipped.
1071 1071
1072 1072 Note that both key and value are treated as UTF-8 and converted to
1073 1073 the local encoding. This allows uniformity between local and
1074 1074 remote data sources.
1075 1075 """
1076 1076
1077 1077 spec = repo.ui.config("extdata", source)
1078 1078 if not spec:
1079 1079 raise error.Abort(_("unknown extdata source '%s'") % source)
1080 1080
1081 1081 data = {}
1082 1082 src = proc = None
1083 1083 try:
1084 1084 if spec.startswith("shell:"):
1085 1085 # external commands should be run relative to the repo root
1086 1086 cmd = spec[6:]
1087 1087 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1088 1088 close_fds=util.closefds,
1089 1089 stdout=subprocess.PIPE, cwd=repo.root)
1090 1090 src = proc.stdout
1091 1091 else:
1092 1092 # treat as a URL or file
1093 1093 src = url.open(repo.ui, spec)
1094 1094 for l in src:
1095 1095 if " " in l:
1096 1096 k, v = l.strip().split(" ", 1)
1097 1097 else:
1098 1098 k, v = l.strip(), ""
1099 1099
1100 1100 k = encoding.tolocal(k)
1101 1101 try:
1102 1102 data[repo[k].rev()] = encoding.tolocal(v)
1103 1103 except (error.LookupError, error.RepoLookupError):
1104 1104 pass # we ignore data for nodes that don't exist locally
1105 1105 finally:
1106 1106 if proc:
1107 1107 proc.communicate()
1108 1108 if src:
1109 1109 src.close()
1110 1110 if proc and proc.returncode != 0:
1111 1111 raise error.Abort(_("extdata command '%s' failed: %s")
1112 1112 % (cmd, util.explainexit(proc.returncode)[0]))
1113 1113
1114 1114 return data
1115 1115
1116 1116 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1117 1117 if lock is None:
1118 1118 raise error.LockInheritanceContractViolation(
1119 1119 'lock can only be inherited while held')
1120 1120 if environ is None:
1121 1121 environ = {}
1122 1122 with lock.inherit() as locker:
1123 1123 environ[envvar] = locker
1124 1124 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1125 1125
1126 1126 def wlocksub(repo, cmd, *args, **kwargs):
1127 1127 """run cmd as a subprocess that allows inheriting repo's wlock
1128 1128
1129 1129 This can only be called while the wlock is held. This takes all the
1130 1130 arguments that ui.system does, and returns the exit code of the
1131 1131 subprocess."""
1132 1132 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1133 1133 **kwargs)
1134 1134
1135 1135 def gdinitconfig(ui):
1136 1136 """helper function to know if a repo should be created as general delta
1137 1137 """
1138 1138 # experimental config: format.generaldelta
1139 1139 return (ui.configbool('format', 'generaldelta')
1140 1140 or ui.configbool('format', 'usegeneraldelta'))
1141 1141
1142 1142 def gddeltaconfig(ui):
1143 1143 """helper function to know if incoming delta should be optimised
1144 1144 """
1145 1145 # experimental config: format.generaldelta
1146 1146 return ui.configbool('format', 'generaldelta')
1147 1147
1148 1148 class simplekeyvaluefile(object):
1149 1149 """A simple file with key=value lines
1150 1150
1151 1151 Keys must be alphanumerics and start with a letter, values must not
1152 1152 contain '\n' characters"""
1153 1153 firstlinekey = '__firstline'
1154 1154
1155 1155 def __init__(self, vfs, path, keys=None):
1156 1156 self.vfs = vfs
1157 1157 self.path = path
1158 1158
1159 1159 def read(self, firstlinenonkeyval=False):
1160 1160 """Read the contents of a simple key-value file
1161 1161
1162 1162 'firstlinenonkeyval' indicates whether the first line of file should
1163 1163 be treated as a key-value pair or reuturned fully under the
1164 1164 __firstline key."""
1165 1165 lines = self.vfs.readlines(self.path)
1166 1166 d = {}
1167 1167 if firstlinenonkeyval:
1168 1168 if not lines:
1169 1169 e = _("empty simplekeyvalue file")
1170 1170 raise error.CorruptedState(e)
1171 1171 # we don't want to include '\n' in the __firstline
1172 1172 d[self.firstlinekey] = lines[0][:-1]
1173 1173 del lines[0]
1174 1174
1175 1175 try:
1176 1176 # the 'if line.strip()' part prevents us from failing on empty
1177 1177 # lines which only contain '\n' therefore are not skipped
1178 1178 # by 'if line'
1179 1179 updatedict = dict(line[:-1].split('=', 1) for line in lines
1180 1180 if line.strip())
1181 1181 if self.firstlinekey in updatedict:
1182 1182 e = _("%r can't be used as a key")
1183 1183 raise error.CorruptedState(e % self.firstlinekey)
1184 1184 d.update(updatedict)
1185 1185 except ValueError as e:
1186 1186 raise error.CorruptedState(str(e))
1187 1187 return d
1188 1188
1189 1189 def write(self, data, firstline=None):
1190 1190 """Write key=>value mapping to a file
1191 1191 data is a dict. Keys must be alphanumerical and start with a letter.
1192 1192 Values must not contain newline characters.
1193 1193
1194 1194 If 'firstline' is not None, it is written to file before
1195 1195 everything else, as it is, not in a key=value form"""
1196 1196 lines = []
1197 1197 if firstline is not None:
1198 1198 lines.append('%s\n' % firstline)
1199 1199
1200 1200 for k, v in data.items():
1201 1201 if k == self.firstlinekey:
1202 1202 e = "key name '%s' is reserved" % self.firstlinekey
1203 1203 raise error.ProgrammingError(e)
1204 1204 if not k[0:1].isalpha():
1205 1205 e = "keys must start with a letter in a key-value file"
1206 1206 raise error.ProgrammingError(e)
1207 1207 if not k.isalnum():
1208 1208 e = "invalid key name in a simple key-value file"
1209 1209 raise error.ProgrammingError(e)
1210 1210 if '\n' in v:
1211 1211 e = "invalid value in a simple key-value file"
1212 1212 raise error.ProgrammingError(e)
1213 1213 lines.append("%s=%s\n" % (k, v))
1214 1214 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1215 1215 fp.write(''.join(lines))
1216 1216
1217 1217 _reportobsoletedsource = [
1218 1218 'debugobsolete',
1219 1219 'pull',
1220 1220 'push',
1221 1221 'serve',
1222 1222 'unbundle',
1223 1223 ]
1224 1224
1225 1225 _reportnewcssource = [
1226 1226 'pull',
1227 1227 'unbundle',
1228 1228 ]
1229 1229
1230 1230 # a list of (repo, ctx, files) functions called by various commands to allow
1231 1231 # extensions to ensure the corresponding files are available locally, before the
1232 1232 # command uses them.
1233 1233 fileprefetchhooks = util.hooks()
1234 1234
1235 1235 # A marker that tells the evolve extension to suppress its own reporting
1236 1236 _reportstroubledchangesets = True
1237 1237
1238 1238 def registersummarycallback(repo, otr, txnname=''):
1239 1239 """register a callback to issue a summary after the transaction is closed
1240 1240 """
1241 1241 def txmatch(sources):
1242 1242 return any(txnname.startswith(source) for source in sources)
1243 1243
1244 1244 categories = []
1245 1245
1246 1246 def reportsummary(func):
1247 1247 """decorator for report callbacks."""
1248 1248 # The repoview life cycle is shorter than the one of the actual
1249 1249 # underlying repository. So the filtered object can die before the
1250 1250 # weakref is used leading to troubles. We keep a reference to the
1251 1251 # unfiltered object and restore the filtering when retrieving the
1252 1252 # repository through the weakref.
1253 1253 filtername = repo.filtername
1254 1254 reporef = weakref.ref(repo.unfiltered())
1255 1255 def wrapped(tr):
1256 1256 repo = reporef()
1257 1257 if filtername:
1258 1258 repo = repo.filtered(filtername)
1259 1259 func(repo, tr)
1260 1260 newcat = '%02i-txnreport' % len(categories)
1261 1261 otr.addpostclose(newcat, wrapped)
1262 1262 categories.append(newcat)
1263 1263 return wrapped
1264 1264
1265 1265 if txmatch(_reportobsoletedsource):
1266 1266 @reportsummary
1267 1267 def reportobsoleted(repo, tr):
1268 1268 obsoleted = obsutil.getobsoleted(repo, tr)
1269 1269 if obsoleted:
1270 1270 repo.ui.status(_('obsoleted %i changesets\n')
1271 1271 % len(obsoleted))
1272 1272
1273 1273 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1274 1274 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1275 1275 instabilitytypes = [
1276 1276 ('orphan', 'orphan'),
1277 1277 ('phase-divergent', 'phasedivergent'),
1278 1278 ('content-divergent', 'contentdivergent'),
1279 1279 ]
1280 1280
1281 1281 def getinstabilitycounts(repo):
1282 1282 filtered = repo.changelog.filteredrevs
1283 1283 counts = {}
1284 1284 for instability, revset in instabilitytypes:
1285 1285 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1286 1286 filtered)
1287 1287 return counts
1288 1288
1289 1289 oldinstabilitycounts = getinstabilitycounts(repo)
1290 1290 @reportsummary
1291 1291 def reportnewinstabilities(repo, tr):
1292 1292 newinstabilitycounts = getinstabilitycounts(repo)
1293 1293 for instability, revset in instabilitytypes:
1294 1294 delta = (newinstabilitycounts[instability] -
1295 1295 oldinstabilitycounts[instability])
1296 1296 if delta > 0:
1297 1297 repo.ui.warn(_('%i new %s changesets\n') %
1298 1298 (delta, instability))
1299 1299
1300 1300 if txmatch(_reportnewcssource):
1301 1301 @reportsummary
1302 1302 def reportnewcs(repo, tr):
1303 1303 """Report the range of new revisions pulled/unbundled."""
1304 1304 newrevs = tr.changes.get('revs', xrange(0, 0))
1305 1305 if not newrevs:
1306 1306 return
1307 1307
1308 1308 # Compute the bounds of new revisions' range, excluding obsoletes.
1309 1309 unfi = repo.unfiltered()
1310 1310 revs = unfi.revs('%ld and not obsolete()', newrevs)
1311 1311 if not revs:
1312 1312 # Got only obsoletes.
1313 1313 return
1314 1314 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1315 1315
1316 1316 if minrev == maxrev:
1317 1317 revrange = minrev
1318 1318 else:
1319 1319 revrange = '%s:%s' % (minrev, maxrev)
1320 1320 repo.ui.status(_('new changesets %s\n') % revrange)
1321 1321
1322 1322 def nodesummaries(repo, nodes, maxnumnodes=4):
1323 1323 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1324 1324 return ' '.join(short(h) for h in nodes)
1325 1325 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1326 1326 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1327 1327
1328 1328 def enforcesinglehead(repo, tr, desc):
1329 1329 """check that no named branch has multiple heads"""
1330 1330 if desc in ('strip', 'repair'):
1331 1331 # skip the logic during strip
1332 1332 return
1333 1333 visible = repo.filtered('visible')
1334 1334 # possible improvement: we could restrict the check to affected branch
1335 1335 for name, heads in visible.branchmap().iteritems():
1336 1336 if len(heads) > 1:
1337 1337 msg = _('rejecting multiple heads on branch "%s"')
1338 1338 msg %= name
1339 1339 hint = _('%d heads: %s')
1340 1340 hint %= (len(heads), nodesummaries(repo, heads))
1341 1341 raise error.Abort(msg, hint=hint)
1342 1342
1343 1343 def wrapconvertsink(sink):
1344 1344 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1345 1345 before it is used, whether or not the convert extension was formally loaded.
1346 1346 """
1347 1347 return sink
1348 1348
1349 1349 def unhidehashlikerevs(repo, specs, hiddentype):
1350 1350 """parse the user specs and unhide changesets whose hash or revision number
1351 1351 is passed.
1352 1352
1353 1353 hiddentype can be: 1) 'warn': warn while unhiding changesets
1354 1354 2) 'nowarn': don't warn while unhiding changesets
1355 1355
1356 1356 returns a repo object with the required changesets unhidden
1357 1357 """
1358 1358 if not repo.filtername or not repo.ui.configbool('experimental',
1359 1359 'directaccess'):
1360 1360 return repo
1361 1361
1362 1362 if repo.filtername not in ('visible', 'visible-hidden'):
1363 1363 return repo
1364 1364
1365 1365 symbols = set()
1366 1366 for spec in specs:
1367 1367 try:
1368 1368 tree = revsetlang.parse(spec)
1369 1369 except error.ParseError: # will be reported by scmutil.revrange()
1370 1370 continue
1371 1371
1372 1372 symbols.update(revsetlang.gethashlikesymbols(tree))
1373 1373
1374 1374 if not symbols:
1375 1375 return repo
1376 1376
1377 1377 revs = _getrevsfromsymbols(repo, symbols)
1378 1378
1379 1379 if not revs:
1380 1380 return repo
1381 1381
1382 1382 if hiddentype == 'warn':
1383 1383 unfi = repo.unfiltered()
1384 1384 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1385 1385 repo.ui.warn(_("warning: accessing hidden changesets for write "
1386 1386 "operation: %s\n") % revstr)
1387 1387
1388 1388 # we have to use new filtername to separate branch/tags cache until we can
1389 1389 # disbale these cache when revisions are dynamically pinned.
1390 1390 return repo.filtered('visible-hidden', revs)
1391 1391
1392 1392 def _getrevsfromsymbols(repo, symbols):
1393 1393 """parse the list of symbols and returns a set of revision numbers of hidden
1394 1394 changesets present in symbols"""
1395 1395 revs = set()
1396 1396 unfi = repo.unfiltered()
1397 1397 unficl = unfi.changelog
1398 1398 cl = repo.changelog
1399 1399 tiprev = len(unficl)
1400 1400 pmatch = unficl._partialmatch
1401 1401 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1402 1402 for s in symbols:
1403 1403 try:
1404 1404 n = int(s)
1405 1405 if n <= tiprev:
1406 1406 if not allowrevnums:
1407 1407 continue
1408 1408 else:
1409 1409 if n not in cl:
1410 1410 revs.add(n)
1411 1411 continue
1412 1412 except ValueError:
1413 1413 pass
1414 1414
1415 1415 try:
1416 1416 s = pmatch(s)
1417 1417 except error.LookupError:
1418 1418 s = None
1419 1419
1420 1420 if s is not None:
1421 1421 rev = unficl.rev(s)
1422 1422 if rev not in cl:
1423 1423 revs.add(rev)
1424 1424
1425 1425 return revs
General Comments 0
You need to be logged in to leave comments. Login now