##// END OF EJS Templates
scmutil: avoid using basestring and add explicit handling of unicodes...
Augie Fackler -
r36679:b76248e5 default
parent child Browse files
Show More
@@ -1,1422 +1,1425 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 hex,
22 22 nullid,
23 23 short,
24 24 wdirid,
25 25 wdirrev,
26 26 )
27 27
28 28 from . import (
29 29 encoding,
30 30 error,
31 31 match as matchmod,
32 32 obsolete,
33 33 obsutil,
34 34 pathutil,
35 35 phases,
36 36 pycompat,
37 37 revsetlang,
38 38 similar,
39 39 url,
40 40 util,
41 41 vfs,
42 42 )
43 43
44 44 if pycompat.iswindows:
45 45 from . import scmwindows as scmplatform
46 46 else:
47 47 from . import scmposix as scmplatform
48 48
49 49 termsize = scmplatform.termsize
50 50
51 51 class status(tuple):
52 52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
53 53 and 'ignored' properties are only relevant to the working copy.
54 54 '''
55 55
56 56 __slots__ = ()
57 57
58 58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
59 59 clean):
60 60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
61 61 ignored, clean))
62 62
63 63 @property
64 64 def modified(self):
65 65 '''files that have been modified'''
66 66 return self[0]
67 67
68 68 @property
69 69 def added(self):
70 70 '''files that have been added'''
71 71 return self[1]
72 72
73 73 @property
74 74 def removed(self):
75 75 '''files that have been removed'''
76 76 return self[2]
77 77
78 78 @property
79 79 def deleted(self):
80 80 '''files that are in the dirstate, but have been deleted from the
81 81 working copy (aka "missing")
82 82 '''
83 83 return self[3]
84 84
85 85 @property
86 86 def unknown(self):
87 87 '''files not in the dirstate that are not ignored'''
88 88 return self[4]
89 89
90 90 @property
91 91 def ignored(self):
92 92 '''files not in the dirstate that are ignored (by _dirignore())'''
93 93 return self[5]
94 94
95 95 @property
96 96 def clean(self):
97 97 '''files that have not been modified'''
98 98 return self[6]
99 99
100 100 def __repr__(self, *args, **kwargs):
101 101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
102 102 'unknown=%r, ignored=%r, clean=%r>') % self)
103 103
104 104 def itersubrepos(ctx1, ctx2):
105 105 """find subrepos in ctx1 or ctx2"""
106 106 # Create a (subpath, ctx) mapping where we prefer subpaths from
107 107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
108 108 # has been modified (in ctx2) but not yet committed (in ctx1).
109 109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
110 110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
111 111
112 112 missing = set()
113 113
114 114 for subpath in ctx2.substate:
115 115 if subpath not in ctx1.substate:
116 116 del subpaths[subpath]
117 117 missing.add(subpath)
118 118
119 119 for subpath, ctx in sorted(subpaths.iteritems()):
120 120 yield subpath, ctx.sub(subpath)
121 121
122 122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
123 123 # status and diff will have an accurate result when it does
124 124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
125 125 # against itself.
126 126 for subpath in missing:
127 127 yield subpath, ctx2.nullsub(subpath, ctx1)
128 128
129 129 def nochangesfound(ui, repo, excluded=None):
130 130 '''Report no changes for push/pull, excluded is None or a list of
131 131 nodes excluded from the push/pull.
132 132 '''
133 133 secretlist = []
134 134 if excluded:
135 135 for n in excluded:
136 136 ctx = repo[n]
137 137 if ctx.phase() >= phases.secret and not ctx.extinct():
138 138 secretlist.append(n)
139 139
140 140 if secretlist:
141 141 ui.status(_("no changes found (ignored %d secret changesets)\n")
142 142 % len(secretlist))
143 143 else:
144 144 ui.status(_("no changes found\n"))
145 145
146 146 def callcatch(ui, func):
147 147 """call func() with global exception handling
148 148
149 149 return func() if no exception happens. otherwise do some error handling
150 150 and return an exit code accordingly. does not handle all exceptions.
151 151 """
152 152 try:
153 153 try:
154 154 return func()
155 155 except: # re-raises
156 156 ui.traceback()
157 157 raise
158 158 # Global exception handling, alphabetically
159 159 # Mercurial-specific first, followed by built-in and library exceptions
160 160 except error.LockHeld as inst:
161 161 if inst.errno == errno.ETIMEDOUT:
162 162 reason = _('timed out waiting for lock held by %r') % inst.locker
163 163 else:
164 164 reason = _('lock held by %r') % inst.locker
165 165 ui.warn(_("abort: %s: %s\n")
166 166 % (inst.desc or util.forcebytestr(inst.filename), reason))
167 167 if not inst.locker:
168 168 ui.warn(_("(lock might be very busy)\n"))
169 169 except error.LockUnavailable as inst:
170 170 ui.warn(_("abort: could not lock %s: %s\n") %
171 171 (inst.desc or util.forcebytestr(inst.filename),
172 172 encoding.strtolocal(inst.strerror)))
173 173 except error.OutOfBandError as inst:
174 174 if inst.args:
175 175 msg = _("abort: remote error:\n")
176 176 else:
177 177 msg = _("abort: remote error\n")
178 178 ui.warn(msg)
179 179 if inst.args:
180 180 ui.warn(''.join(inst.args))
181 181 if inst.hint:
182 182 ui.warn('(%s)\n' % inst.hint)
183 183 except error.RepoError as inst:
184 184 ui.warn(_("abort: %s!\n") % inst)
185 185 if inst.hint:
186 186 ui.warn(_("(%s)\n") % inst.hint)
187 187 except error.ResponseError as inst:
188 188 ui.warn(_("abort: %s") % inst.args[0])
189 if not isinstance(inst.args[1], basestring):
189 msg = inst.args[1]
190 if isinstance(msg, type(u'')):
191 msg = pycompat.sysbytes(msg)
192 elif not isinstance(inst.args[1], bytes):
190 193 ui.warn(" %r\n" % (inst.args[1],))
191 194 elif not inst.args[1]:
192 195 ui.warn(_(" empty string\n"))
193 196 else:
194 197 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
195 198 except error.CensoredNodeError as inst:
196 199 ui.warn(_("abort: file censored %s!\n") % inst)
197 200 except error.RevlogError as inst:
198 201 ui.warn(_("abort: %s!\n") % inst)
199 202 except error.InterventionRequired as inst:
200 203 ui.warn("%s\n" % inst)
201 204 if inst.hint:
202 205 ui.warn(_("(%s)\n") % inst.hint)
203 206 return 1
204 207 except error.WdirUnsupported:
205 208 ui.warn(_("abort: working directory revision cannot be specified\n"))
206 209 except error.Abort as inst:
207 210 ui.warn(_("abort: %s\n") % inst)
208 211 if inst.hint:
209 212 ui.warn(_("(%s)\n") % inst.hint)
210 213 except ImportError as inst:
211 214 ui.warn(_("abort: %s!\n") % util.forcebytestr(inst))
212 215 m = util.forcebytestr(inst).split()[-1]
213 216 if m in "mpatch bdiff".split():
214 217 ui.warn(_("(did you forget to compile extensions?)\n"))
215 218 elif m in "zlib".split():
216 219 ui.warn(_("(is your Python install correct?)\n"))
217 220 except IOError as inst:
218 221 if util.safehasattr(inst, "code"):
219 222 ui.warn(_("abort: %s\n") % util.forcebytestr(inst))
220 223 elif util.safehasattr(inst, "reason"):
221 224 try: # usually it is in the form (errno, strerror)
222 225 reason = inst.reason.args[1]
223 226 except (AttributeError, IndexError):
224 227 # it might be anything, for example a string
225 228 reason = inst.reason
226 229 if isinstance(reason, unicode):
227 230 # SSLError of Python 2.7.9 contains a unicode
228 231 reason = encoding.unitolocal(reason)
229 232 ui.warn(_("abort: error: %s\n") % reason)
230 233 elif (util.safehasattr(inst, "args")
231 234 and inst.args and inst.args[0] == errno.EPIPE):
232 235 pass
233 236 elif getattr(inst, "strerror", None):
234 237 if getattr(inst, "filename", None):
235 238 ui.warn(_("abort: %s: %s\n") % (
236 239 encoding.strtolocal(inst.strerror),
237 240 util.forcebytestr(inst.filename)))
238 241 else:
239 242 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
240 243 else:
241 244 raise
242 245 except OSError as inst:
243 246 if getattr(inst, "filename", None) is not None:
244 247 ui.warn(_("abort: %s: '%s'\n") % (
245 248 encoding.strtolocal(inst.strerror),
246 249 util.forcebytestr(inst.filename)))
247 250 else:
248 251 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
249 252 except MemoryError:
250 253 ui.warn(_("abort: out of memory\n"))
251 254 except SystemExit as inst:
252 255 # Commands shouldn't sys.exit directly, but give a return code.
253 256 # Just in case catch this and and pass exit code to caller.
254 257 return inst.code
255 258 except socket.error as inst:
256 259 ui.warn(_("abort: %s\n") % util.forcebytestr(inst.args[-1]))
257 260
258 261 return -1
259 262
260 263 def checknewlabel(repo, lbl, kind):
261 264 # Do not use the "kind" parameter in ui output.
262 265 # It makes strings difficult to translate.
263 266 if lbl in ['tip', '.', 'null']:
264 267 raise error.Abort(_("the name '%s' is reserved") % lbl)
265 268 for c in (':', '\0', '\n', '\r'):
266 269 if c in lbl:
267 270 raise error.Abort(
268 271 _("%r cannot be used in a name") % pycompat.bytestr(c))
269 272 try:
270 273 int(lbl)
271 274 raise error.Abort(_("cannot use an integer as a name"))
272 275 except ValueError:
273 276 pass
274 277 if lbl.strip() != lbl:
275 278 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
276 279
277 280 def checkfilename(f):
278 281 '''Check that the filename f is an acceptable filename for a tracked file'''
279 282 if '\r' in f or '\n' in f:
280 283 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
281 284
282 285 def checkportable(ui, f):
283 286 '''Check if filename f is portable and warn or abort depending on config'''
284 287 checkfilename(f)
285 288 abort, warn = checkportabilityalert(ui)
286 289 if abort or warn:
287 290 msg = util.checkwinfilename(f)
288 291 if msg:
289 292 msg = "%s: %s" % (msg, util.shellquote(f))
290 293 if abort:
291 294 raise error.Abort(msg)
292 295 ui.warn(_("warning: %s\n") % msg)
293 296
294 297 def checkportabilityalert(ui):
295 298 '''check if the user's config requests nothing, a warning, or abort for
296 299 non-portable filenames'''
297 300 val = ui.config('ui', 'portablefilenames')
298 301 lval = val.lower()
299 302 bval = util.parsebool(val)
300 303 abort = pycompat.iswindows or lval == 'abort'
301 304 warn = bval or lval == 'warn'
302 305 if bval is None and not (warn or abort or lval == 'ignore'):
303 306 raise error.ConfigError(
304 307 _("ui.portablefilenames value is invalid ('%s')") % val)
305 308 return abort, warn
306 309
307 310 class casecollisionauditor(object):
308 311 def __init__(self, ui, abort, dirstate):
309 312 self._ui = ui
310 313 self._abort = abort
311 314 allfiles = '\0'.join(dirstate._map)
312 315 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
313 316 self._dirstate = dirstate
314 317 # The purpose of _newfiles is so that we don't complain about
315 318 # case collisions if someone were to call this object with the
316 319 # same filename twice.
317 320 self._newfiles = set()
318 321
319 322 def __call__(self, f):
320 323 if f in self._newfiles:
321 324 return
322 325 fl = encoding.lower(f)
323 326 if fl in self._loweredfiles and f not in self._dirstate:
324 327 msg = _('possible case-folding collision for %s') % f
325 328 if self._abort:
326 329 raise error.Abort(msg)
327 330 self._ui.warn(_("warning: %s\n") % msg)
328 331 self._loweredfiles.add(fl)
329 332 self._newfiles.add(f)
330 333
331 334 def filteredhash(repo, maxrev):
332 335 """build hash of filtered revisions in the current repoview.
333 336
334 337 Multiple caches perform up-to-date validation by checking that the
335 338 tiprev and tipnode stored in the cache file match the current repository.
336 339 However, this is not sufficient for validating repoviews because the set
337 340 of revisions in the view may change without the repository tiprev and
338 341 tipnode changing.
339 342
340 343 This function hashes all the revs filtered from the view and returns
341 344 that SHA-1 digest.
342 345 """
343 346 cl = repo.changelog
344 347 if not cl.filteredrevs:
345 348 return None
346 349 key = None
347 350 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
348 351 if revs:
349 352 s = hashlib.sha1()
350 353 for rev in revs:
351 354 s.update('%d;' % rev)
352 355 key = s.digest()
353 356 return key
354 357
355 358 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
356 359 '''yield every hg repository under path, always recursively.
357 360 The recurse flag will only control recursion into repo working dirs'''
358 361 def errhandler(err):
359 362 if err.filename == path:
360 363 raise err
361 364 samestat = getattr(os.path, 'samestat', None)
362 365 if followsym and samestat is not None:
363 366 def adddir(dirlst, dirname):
364 367 dirstat = os.stat(dirname)
365 368 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
366 369 if not match:
367 370 dirlst.append(dirstat)
368 371 return not match
369 372 else:
370 373 followsym = False
371 374
372 375 if (seen_dirs is None) and followsym:
373 376 seen_dirs = []
374 377 adddir(seen_dirs, path)
375 378 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
376 379 dirs.sort()
377 380 if '.hg' in dirs:
378 381 yield root # found a repository
379 382 qroot = os.path.join(root, '.hg', 'patches')
380 383 if os.path.isdir(os.path.join(qroot, '.hg')):
381 384 yield qroot # we have a patch queue repo here
382 385 if recurse:
383 386 # avoid recursing inside the .hg directory
384 387 dirs.remove('.hg')
385 388 else:
386 389 dirs[:] = [] # don't descend further
387 390 elif followsym:
388 391 newdirs = []
389 392 for d in dirs:
390 393 fname = os.path.join(root, d)
391 394 if adddir(seen_dirs, fname):
392 395 if os.path.islink(fname):
393 396 for hgname in walkrepos(fname, True, seen_dirs):
394 397 yield hgname
395 398 else:
396 399 newdirs.append(d)
397 400 dirs[:] = newdirs
398 401
399 402 def binnode(ctx):
400 403 """Return binary node id for a given basectx"""
401 404 node = ctx.node()
402 405 if node is None:
403 406 return wdirid
404 407 return node
405 408
406 409 def intrev(ctx):
407 410 """Return integer for a given basectx that can be used in comparison or
408 411 arithmetic operation"""
409 412 rev = ctx.rev()
410 413 if rev is None:
411 414 return wdirrev
412 415 return rev
413 416
414 417 def formatchangeid(ctx):
415 418 """Format changectx as '{rev}:{node|formatnode}', which is the default
416 419 template provided by logcmdutil.changesettemplater"""
417 420 repo = ctx.repo()
418 421 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
419 422
420 423 def formatrevnode(ui, rev, node):
421 424 """Format given revision and node depending on the current verbosity"""
422 425 if ui.debugflag:
423 426 hexfunc = hex
424 427 else:
425 428 hexfunc = short
426 429 return '%d:%s' % (rev, hexfunc(node))
427 430
428 431 def revsingle(repo, revspec, default='.', localalias=None):
429 432 if not revspec and revspec != 0:
430 433 return repo[default]
431 434
432 435 l = revrange(repo, [revspec], localalias=localalias)
433 436 if not l:
434 437 raise error.Abort(_('empty revision set'))
435 438 return repo[l.last()]
436 439
437 440 def _pairspec(revspec):
438 441 tree = revsetlang.parse(revspec)
439 442 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
440 443
441 444 def revpair(repo, revs):
442 445 if not revs:
443 446 return repo.dirstate.p1(), None
444 447
445 448 l = revrange(repo, revs)
446 449
447 450 if not l:
448 451 first = second = None
449 452 elif l.isascending():
450 453 first = l.min()
451 454 second = l.max()
452 455 elif l.isdescending():
453 456 first = l.max()
454 457 second = l.min()
455 458 else:
456 459 first = l.first()
457 460 second = l.last()
458 461
459 462 if first is None:
460 463 raise error.Abort(_('empty revision range'))
461 464 if (first == second and len(revs) >= 2
462 465 and not all(revrange(repo, [r]) for r in revs)):
463 466 raise error.Abort(_('empty revision on one side of range'))
464 467
465 468 # if top-level is range expression, the result must always be a pair
466 469 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
467 470 return repo.lookup(first), None
468 471
469 472 return repo.lookup(first), repo.lookup(second)
470 473
471 474 def revrange(repo, specs, localalias=None):
472 475 """Execute 1 to many revsets and return the union.
473 476
474 477 This is the preferred mechanism for executing revsets using user-specified
475 478 config options, such as revset aliases.
476 479
477 480 The revsets specified by ``specs`` will be executed via a chained ``OR``
478 481 expression. If ``specs`` is empty, an empty result is returned.
479 482
480 483 ``specs`` can contain integers, in which case they are assumed to be
481 484 revision numbers.
482 485
483 486 It is assumed the revsets are already formatted. If you have arguments
484 487 that need to be expanded in the revset, call ``revsetlang.formatspec()``
485 488 and pass the result as an element of ``specs``.
486 489
487 490 Specifying a single revset is allowed.
488 491
489 492 Returns a ``revset.abstractsmartset`` which is a list-like interface over
490 493 integer revisions.
491 494 """
492 495 allspecs = []
493 496 for spec in specs:
494 497 if isinstance(spec, int):
495 498 spec = revsetlang.formatspec('rev(%d)', spec)
496 499 allspecs.append(spec)
497 500 return repo.anyrevs(allspecs, user=True, localalias=localalias)
498 501
499 502 def meaningfulparents(repo, ctx):
500 503 """Return list of meaningful (or all if debug) parentrevs for rev.
501 504
502 505 For merges (two non-nullrev revisions) both parents are meaningful.
503 506 Otherwise the first parent revision is considered meaningful if it
504 507 is not the preceding revision.
505 508 """
506 509 parents = ctx.parents()
507 510 if len(parents) > 1:
508 511 return parents
509 512 if repo.ui.debugflag:
510 513 return [parents[0], repo['null']]
511 514 if parents[0].rev() >= intrev(ctx) - 1:
512 515 return []
513 516 return parents
514 517
515 518 def expandpats(pats):
516 519 '''Expand bare globs when running on windows.
517 520 On posix we assume it already has already been done by sh.'''
518 521 if not util.expandglobs:
519 522 return list(pats)
520 523 ret = []
521 524 for kindpat in pats:
522 525 kind, pat = matchmod._patsplit(kindpat, None)
523 526 if kind is None:
524 527 try:
525 528 globbed = glob.glob(pat)
526 529 except re.error:
527 530 globbed = [pat]
528 531 if globbed:
529 532 ret.extend(globbed)
530 533 continue
531 534 ret.append(kindpat)
532 535 return ret
533 536
534 537 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
535 538 badfn=None):
536 539 '''Return a matcher and the patterns that were used.
537 540 The matcher will warn about bad matches, unless an alternate badfn callback
538 541 is provided.'''
539 542 if pats == ("",):
540 543 pats = []
541 544 if opts is None:
542 545 opts = {}
543 546 if not globbed and default == 'relpath':
544 547 pats = expandpats(pats or [])
545 548
546 549 def bad(f, msg):
547 550 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
548 551
549 552 if badfn is None:
550 553 badfn = bad
551 554
552 555 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
553 556 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
554 557
555 558 if m.always():
556 559 pats = []
557 560 return m, pats
558 561
559 562 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
560 563 badfn=None):
561 564 '''Return a matcher that will warn about bad matches.'''
562 565 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
563 566
564 567 def matchall(repo):
565 568 '''Return a matcher that will efficiently match everything.'''
566 569 return matchmod.always(repo.root, repo.getcwd())
567 570
568 571 def matchfiles(repo, files, badfn=None):
569 572 '''Return a matcher that will efficiently match exactly these files.'''
570 573 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
571 574
572 575 def parsefollowlinespattern(repo, rev, pat, msg):
573 576 """Return a file name from `pat` pattern suitable for usage in followlines
574 577 logic.
575 578 """
576 579 if not matchmod.patkind(pat):
577 580 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
578 581 else:
579 582 ctx = repo[rev]
580 583 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
581 584 files = [f for f in ctx if m(f)]
582 585 if len(files) != 1:
583 586 raise error.ParseError(msg)
584 587 return files[0]
585 588
586 589 def origpath(ui, repo, filepath):
587 590 '''customize where .orig files are created
588 591
589 592 Fetch user defined path from config file: [ui] origbackuppath = <path>
590 593 Fall back to default (filepath with .orig suffix) if not specified
591 594 '''
592 595 origbackuppath = ui.config('ui', 'origbackuppath')
593 596 if not origbackuppath:
594 597 return filepath + ".orig"
595 598
596 599 # Convert filepath from an absolute path into a path inside the repo.
597 600 filepathfromroot = util.normpath(os.path.relpath(filepath,
598 601 start=repo.root))
599 602
600 603 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
601 604 origbackupdir = origvfs.dirname(filepathfromroot)
602 605 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
603 606 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
604 607
605 608 # Remove any files that conflict with the backup file's path
606 609 for f in reversed(list(util.finddirs(filepathfromroot))):
607 610 if origvfs.isfileorlink(f):
608 611 ui.note(_('removing conflicting file: %s\n')
609 612 % origvfs.join(f))
610 613 origvfs.unlink(f)
611 614 break
612 615
613 616 origvfs.makedirs(origbackupdir)
614 617
615 618 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
616 619 ui.note(_('removing conflicting directory: %s\n')
617 620 % origvfs.join(filepathfromroot))
618 621 origvfs.rmtree(filepathfromroot, forcibly=True)
619 622
620 623 return origvfs.join(filepathfromroot)
621 624
622 625 class _containsnode(object):
623 626 """proxy __contains__(node) to container.__contains__ which accepts revs"""
624 627
625 628 def __init__(self, repo, revcontainer):
626 629 self._torev = repo.changelog.rev
627 630 self._revcontains = revcontainer.__contains__
628 631
629 632 def __contains__(self, node):
630 633 return self._revcontains(self._torev(node))
631 634
632 635 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
633 636 """do common cleanups when old nodes are replaced by new nodes
634 637
635 638 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
636 639 (we might also want to move working directory parent in the future)
637 640
638 641 By default, bookmark moves are calculated automatically from 'replacements',
639 642 but 'moves' can be used to override that. Also, 'moves' may include
640 643 additional bookmark moves that should not have associated obsmarkers.
641 644
642 645 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
643 646 have replacements. operation is a string, like "rebase".
644 647
645 648 metadata is dictionary containing metadata to be stored in obsmarker if
646 649 obsolescence is enabled.
647 650 """
648 651 if not replacements and not moves:
649 652 return
650 653
651 654 # translate mapping's other forms
652 655 if not util.safehasattr(replacements, 'items'):
653 656 replacements = {n: () for n in replacements}
654 657
655 658 # Calculate bookmark movements
656 659 if moves is None:
657 660 moves = {}
658 661 # Unfiltered repo is needed since nodes in replacements might be hidden.
659 662 unfi = repo.unfiltered()
660 663 for oldnode, newnodes in replacements.items():
661 664 if oldnode in moves:
662 665 continue
663 666 if len(newnodes) > 1:
664 667 # usually a split, take the one with biggest rev number
665 668 newnode = next(unfi.set('max(%ln)', newnodes)).node()
666 669 elif len(newnodes) == 0:
667 670 # move bookmark backwards
668 671 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
669 672 list(replacements)))
670 673 if roots:
671 674 newnode = roots[0].node()
672 675 else:
673 676 newnode = nullid
674 677 else:
675 678 newnode = newnodes[0]
676 679 moves[oldnode] = newnode
677 680
678 681 with repo.transaction('cleanup') as tr:
679 682 # Move bookmarks
680 683 bmarks = repo._bookmarks
681 684 bmarkchanges = []
682 685 allnewnodes = [n for ns in replacements.values() for n in ns]
683 686 for oldnode, newnode in moves.items():
684 687 oldbmarks = repo.nodebookmarks(oldnode)
685 688 if not oldbmarks:
686 689 continue
687 690 from . import bookmarks # avoid import cycle
688 691 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
689 692 (oldbmarks, hex(oldnode), hex(newnode)))
690 693 # Delete divergent bookmarks being parents of related newnodes
691 694 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
692 695 allnewnodes, newnode, oldnode)
693 696 deletenodes = _containsnode(repo, deleterevs)
694 697 for name in oldbmarks:
695 698 bmarkchanges.append((name, newnode))
696 699 for b in bookmarks.divergent2delete(repo, deletenodes, name):
697 700 bmarkchanges.append((b, None))
698 701
699 702 if bmarkchanges:
700 703 bmarks.applychanges(repo, tr, bmarkchanges)
701 704
702 705 # Obsolete or strip nodes
703 706 if obsolete.isenabled(repo, obsolete.createmarkersopt):
704 707 # If a node is already obsoleted, and we want to obsolete it
705 708 # without a successor, skip that obssolete request since it's
706 709 # unnecessary. That's the "if s or not isobs(n)" check below.
707 710 # Also sort the node in topology order, that might be useful for
708 711 # some obsstore logic.
709 712 # NOTE: the filtering and sorting might belong to createmarkers.
710 713 isobs = unfi.obsstore.successors.__contains__
711 714 torev = unfi.changelog.rev
712 715 sortfunc = lambda ns: torev(ns[0])
713 716 rels = [(unfi[n], tuple(unfi[m] for m in s))
714 717 for n, s in sorted(replacements.items(), key=sortfunc)
715 718 if s or not isobs(n)]
716 719 if rels:
717 720 obsolete.createmarkers(repo, rels, operation=operation,
718 721 metadata=metadata)
719 722 else:
720 723 from . import repair # avoid import cycle
721 724 tostrip = list(replacements)
722 725 if tostrip:
723 726 repair.delayedstrip(repo.ui, repo, tostrip, operation)
724 727
725 728 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
726 729 if opts is None:
727 730 opts = {}
728 731 m = matcher
729 732 if dry_run is None:
730 733 dry_run = opts.get('dry_run')
731 734 if similarity is None:
732 735 similarity = float(opts.get('similarity') or 0)
733 736
734 737 ret = 0
735 738 join = lambda f: os.path.join(prefix, f)
736 739
737 740 wctx = repo[None]
738 741 for subpath in sorted(wctx.substate):
739 742 submatch = matchmod.subdirmatcher(subpath, m)
740 743 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
741 744 sub = wctx.sub(subpath)
742 745 try:
743 746 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
744 747 ret = 1
745 748 except error.LookupError:
746 749 repo.ui.status(_("skipping missing subrepository: %s\n")
747 750 % join(subpath))
748 751
749 752 rejected = []
750 753 def badfn(f, msg):
751 754 if f in m.files():
752 755 m.bad(f, msg)
753 756 rejected.append(f)
754 757
755 758 badmatch = matchmod.badmatch(m, badfn)
756 759 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
757 760 badmatch)
758 761
759 762 unknownset = set(unknown + forgotten)
760 763 toprint = unknownset.copy()
761 764 toprint.update(deleted)
762 765 for abs in sorted(toprint):
763 766 if repo.ui.verbose or not m.exact(abs):
764 767 if abs in unknownset:
765 768 status = _('adding %s\n') % m.uipath(abs)
766 769 else:
767 770 status = _('removing %s\n') % m.uipath(abs)
768 771 repo.ui.status(status)
769 772
770 773 renames = _findrenames(repo, m, added + unknown, removed + deleted,
771 774 similarity)
772 775
773 776 if not dry_run:
774 777 _markchanges(repo, unknown + forgotten, deleted, renames)
775 778
776 779 for f in rejected:
777 780 if f in m.files():
778 781 return 1
779 782 return ret
780 783
781 784 def marktouched(repo, files, similarity=0.0):
782 785 '''Assert that files have somehow been operated upon. files are relative to
783 786 the repo root.'''
784 787 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
785 788 rejected = []
786 789
787 790 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
788 791
789 792 if repo.ui.verbose:
790 793 unknownset = set(unknown + forgotten)
791 794 toprint = unknownset.copy()
792 795 toprint.update(deleted)
793 796 for abs in sorted(toprint):
794 797 if abs in unknownset:
795 798 status = _('adding %s\n') % abs
796 799 else:
797 800 status = _('removing %s\n') % abs
798 801 repo.ui.status(status)
799 802
800 803 renames = _findrenames(repo, m, added + unknown, removed + deleted,
801 804 similarity)
802 805
803 806 _markchanges(repo, unknown + forgotten, deleted, renames)
804 807
805 808 for f in rejected:
806 809 if f in m.files():
807 810 return 1
808 811 return 0
809 812
810 813 def _interestingfiles(repo, matcher):
811 814 '''Walk dirstate with matcher, looking for files that addremove would care
812 815 about.
813 816
814 817 This is different from dirstate.status because it doesn't care about
815 818 whether files are modified or clean.'''
816 819 added, unknown, deleted, removed, forgotten = [], [], [], [], []
817 820 audit_path = pathutil.pathauditor(repo.root, cached=True)
818 821
819 822 ctx = repo[None]
820 823 dirstate = repo.dirstate
821 824 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
822 825 unknown=True, ignored=False, full=False)
823 826 for abs, st in walkresults.iteritems():
824 827 dstate = dirstate[abs]
825 828 if dstate == '?' and audit_path.check(abs):
826 829 unknown.append(abs)
827 830 elif dstate != 'r' and not st:
828 831 deleted.append(abs)
829 832 elif dstate == 'r' and st:
830 833 forgotten.append(abs)
831 834 # for finding renames
832 835 elif dstate == 'r' and not st:
833 836 removed.append(abs)
834 837 elif dstate == 'a':
835 838 added.append(abs)
836 839
837 840 return added, unknown, deleted, removed, forgotten
838 841
839 842 def _findrenames(repo, matcher, added, removed, similarity):
840 843 '''Find renames from removed files to added ones.'''
841 844 renames = {}
842 845 if similarity > 0:
843 846 for old, new, score in similar.findrenames(repo, added, removed,
844 847 similarity):
845 848 if (repo.ui.verbose or not matcher.exact(old)
846 849 or not matcher.exact(new)):
847 850 repo.ui.status(_('recording removal of %s as rename to %s '
848 851 '(%d%% similar)\n') %
849 852 (matcher.rel(old), matcher.rel(new),
850 853 score * 100))
851 854 renames[new] = old
852 855 return renames
853 856
854 857 def _markchanges(repo, unknown, deleted, renames):
855 858 '''Marks the files in unknown as added, the files in deleted as removed,
856 859 and the files in renames as copied.'''
857 860 wctx = repo[None]
858 861 with repo.wlock():
859 862 wctx.forget(deleted)
860 863 wctx.add(unknown)
861 864 for new, old in renames.iteritems():
862 865 wctx.copy(old, new)
863 866
864 867 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
865 868 """Update the dirstate to reflect the intent of copying src to dst. For
866 869 different reasons it might not end with dst being marked as copied from src.
867 870 """
868 871 origsrc = repo.dirstate.copied(src) or src
869 872 if dst == origsrc: # copying back a copy?
870 873 if repo.dirstate[dst] not in 'mn' and not dryrun:
871 874 repo.dirstate.normallookup(dst)
872 875 else:
873 876 if repo.dirstate[origsrc] == 'a' and origsrc == src:
874 877 if not ui.quiet:
875 878 ui.warn(_("%s has not been committed yet, so no copy "
876 879 "data will be stored for %s.\n")
877 880 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
878 881 if repo.dirstate[dst] in '?r' and not dryrun:
879 882 wctx.add([dst])
880 883 elif not dryrun:
881 884 wctx.copy(origsrc, dst)
882 885
883 886 def readrequires(opener, supported):
884 887 '''Reads and parses .hg/requires and checks if all entries found
885 888 are in the list of supported features.'''
886 889 requirements = set(opener.read("requires").splitlines())
887 890 missings = []
888 891 for r in requirements:
889 892 if r not in supported:
890 893 if not r or not r[0:1].isalnum():
891 894 raise error.RequirementError(_(".hg/requires file is corrupt"))
892 895 missings.append(r)
893 896 missings.sort()
894 897 if missings:
895 898 raise error.RequirementError(
896 899 _("repository requires features unknown to this Mercurial: %s")
897 900 % " ".join(missings),
898 901 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
899 902 " for more information"))
900 903 return requirements
901 904
902 905 def writerequires(opener, requirements):
903 906 with opener('requires', 'w') as fp:
904 907 for r in sorted(requirements):
905 908 fp.write("%s\n" % r)
906 909
907 910 class filecachesubentry(object):
908 911 def __init__(self, path, stat):
909 912 self.path = path
910 913 self.cachestat = None
911 914 self._cacheable = None
912 915
913 916 if stat:
914 917 self.cachestat = filecachesubentry.stat(self.path)
915 918
916 919 if self.cachestat:
917 920 self._cacheable = self.cachestat.cacheable()
918 921 else:
919 922 # None means we don't know yet
920 923 self._cacheable = None
921 924
922 925 def refresh(self):
923 926 if self.cacheable():
924 927 self.cachestat = filecachesubentry.stat(self.path)
925 928
926 929 def cacheable(self):
927 930 if self._cacheable is not None:
928 931 return self._cacheable
929 932
930 933 # we don't know yet, assume it is for now
931 934 return True
932 935
933 936 def changed(self):
934 937 # no point in going further if we can't cache it
935 938 if not self.cacheable():
936 939 return True
937 940
938 941 newstat = filecachesubentry.stat(self.path)
939 942
940 943 # we may not know if it's cacheable yet, check again now
941 944 if newstat and self._cacheable is None:
942 945 self._cacheable = newstat.cacheable()
943 946
944 947 # check again
945 948 if not self._cacheable:
946 949 return True
947 950
948 951 if self.cachestat != newstat:
949 952 self.cachestat = newstat
950 953 return True
951 954 else:
952 955 return False
953 956
954 957 @staticmethod
955 958 def stat(path):
956 959 try:
957 960 return util.cachestat(path)
958 961 except OSError as e:
959 962 if e.errno != errno.ENOENT:
960 963 raise
961 964
962 965 class filecacheentry(object):
963 966 def __init__(self, paths, stat=True):
964 967 self._entries = []
965 968 for path in paths:
966 969 self._entries.append(filecachesubentry(path, stat))
967 970
968 971 def changed(self):
969 972 '''true if any entry has changed'''
970 973 for entry in self._entries:
971 974 if entry.changed():
972 975 return True
973 976 return False
974 977
975 978 def refresh(self):
976 979 for entry in self._entries:
977 980 entry.refresh()
978 981
979 982 class filecache(object):
980 983 '''A property like decorator that tracks files under .hg/ for updates.
981 984
982 985 Records stat info when called in _filecache.
983 986
984 987 On subsequent calls, compares old stat info with new info, and recreates the
985 988 object when any of the files changes, updating the new stat info in
986 989 _filecache.
987 990
988 991 Mercurial either atomic renames or appends for files under .hg,
989 992 so to ensure the cache is reliable we need the filesystem to be able
990 993 to tell us if a file has been replaced. If it can't, we fallback to
991 994 recreating the object on every call (essentially the same behavior as
992 995 propertycache).
993 996
994 997 '''
995 998 def __init__(self, *paths):
996 999 self.paths = paths
997 1000
998 1001 def join(self, obj, fname):
999 1002 """Used to compute the runtime path of a cached file.
1000 1003
1001 1004 Users should subclass filecache and provide their own version of this
1002 1005 function to call the appropriate join function on 'obj' (an instance
1003 1006 of the class that its member function was decorated).
1004 1007 """
1005 1008 raise NotImplementedError
1006 1009
1007 1010 def __call__(self, func):
1008 1011 self.func = func
1009 1012 self.name = func.__name__.encode('ascii')
1010 1013 return self
1011 1014
1012 1015 def __get__(self, obj, type=None):
1013 1016 # if accessed on the class, return the descriptor itself.
1014 1017 if obj is None:
1015 1018 return self
1016 1019 # do we need to check if the file changed?
1017 1020 if self.name in obj.__dict__:
1018 1021 assert self.name in obj._filecache, self.name
1019 1022 return obj.__dict__[self.name]
1020 1023
1021 1024 entry = obj._filecache.get(self.name)
1022 1025
1023 1026 if entry:
1024 1027 if entry.changed():
1025 1028 entry.obj = self.func(obj)
1026 1029 else:
1027 1030 paths = [self.join(obj, path) for path in self.paths]
1028 1031
1029 1032 # We stat -before- creating the object so our cache doesn't lie if
1030 1033 # a writer modified between the time we read and stat
1031 1034 entry = filecacheentry(paths, True)
1032 1035 entry.obj = self.func(obj)
1033 1036
1034 1037 obj._filecache[self.name] = entry
1035 1038
1036 1039 obj.__dict__[self.name] = entry.obj
1037 1040 return entry.obj
1038 1041
1039 1042 def __set__(self, obj, value):
1040 1043 if self.name not in obj._filecache:
1041 1044 # we add an entry for the missing value because X in __dict__
1042 1045 # implies X in _filecache
1043 1046 paths = [self.join(obj, path) for path in self.paths]
1044 1047 ce = filecacheentry(paths, False)
1045 1048 obj._filecache[self.name] = ce
1046 1049 else:
1047 1050 ce = obj._filecache[self.name]
1048 1051
1049 1052 ce.obj = value # update cached copy
1050 1053 obj.__dict__[self.name] = value # update copy returned by obj.x
1051 1054
1052 1055 def __delete__(self, obj):
1053 1056 try:
1054 1057 del obj.__dict__[self.name]
1055 1058 except KeyError:
1056 1059 raise AttributeError(self.name)
1057 1060
1058 1061 def extdatasource(repo, source):
1059 1062 """Gather a map of rev -> value dict from the specified source
1060 1063
1061 1064 A source spec is treated as a URL, with a special case shell: type
1062 1065 for parsing the output from a shell command.
1063 1066
1064 1067 The data is parsed as a series of newline-separated records where
1065 1068 each record is a revision specifier optionally followed by a space
1066 1069 and a freeform string value. If the revision is known locally, it
1067 1070 is converted to a rev, otherwise the record is skipped.
1068 1071
1069 1072 Note that both key and value are treated as UTF-8 and converted to
1070 1073 the local encoding. This allows uniformity between local and
1071 1074 remote data sources.
1072 1075 """
1073 1076
1074 1077 spec = repo.ui.config("extdata", source)
1075 1078 if not spec:
1076 1079 raise error.Abort(_("unknown extdata source '%s'") % source)
1077 1080
1078 1081 data = {}
1079 1082 src = proc = None
1080 1083 try:
1081 1084 if spec.startswith("shell:"):
1082 1085 # external commands should be run relative to the repo root
1083 1086 cmd = spec[6:]
1084 1087 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1085 1088 close_fds=util.closefds,
1086 1089 stdout=subprocess.PIPE, cwd=repo.root)
1087 1090 src = proc.stdout
1088 1091 else:
1089 1092 # treat as a URL or file
1090 1093 src = url.open(repo.ui, spec)
1091 1094 for l in src:
1092 1095 if " " in l:
1093 1096 k, v = l.strip().split(" ", 1)
1094 1097 else:
1095 1098 k, v = l.strip(), ""
1096 1099
1097 1100 k = encoding.tolocal(k)
1098 1101 try:
1099 1102 data[repo[k].rev()] = encoding.tolocal(v)
1100 1103 except (error.LookupError, error.RepoLookupError):
1101 1104 pass # we ignore data for nodes that don't exist locally
1102 1105 finally:
1103 1106 if proc:
1104 1107 proc.communicate()
1105 1108 if src:
1106 1109 src.close()
1107 1110 if proc and proc.returncode != 0:
1108 1111 raise error.Abort(_("extdata command '%s' failed: %s")
1109 1112 % (cmd, util.explainexit(proc.returncode)[0]))
1110 1113
1111 1114 return data
1112 1115
1113 1116 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1114 1117 if lock is None:
1115 1118 raise error.LockInheritanceContractViolation(
1116 1119 'lock can only be inherited while held')
1117 1120 if environ is None:
1118 1121 environ = {}
1119 1122 with lock.inherit() as locker:
1120 1123 environ[envvar] = locker
1121 1124 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1122 1125
1123 1126 def wlocksub(repo, cmd, *args, **kwargs):
1124 1127 """run cmd as a subprocess that allows inheriting repo's wlock
1125 1128
1126 1129 This can only be called while the wlock is held. This takes all the
1127 1130 arguments that ui.system does, and returns the exit code of the
1128 1131 subprocess."""
1129 1132 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1130 1133 **kwargs)
1131 1134
1132 1135 def gdinitconfig(ui):
1133 1136 """helper function to know if a repo should be created as general delta
1134 1137 """
1135 1138 # experimental config: format.generaldelta
1136 1139 return (ui.configbool('format', 'generaldelta')
1137 1140 or ui.configbool('format', 'usegeneraldelta'))
1138 1141
1139 1142 def gddeltaconfig(ui):
1140 1143 """helper function to know if incoming delta should be optimised
1141 1144 """
1142 1145 # experimental config: format.generaldelta
1143 1146 return ui.configbool('format', 'generaldelta')
1144 1147
1145 1148 class simplekeyvaluefile(object):
1146 1149 """A simple file with key=value lines
1147 1150
1148 1151 Keys must be alphanumerics and start with a letter, values must not
1149 1152 contain '\n' characters"""
1150 1153 firstlinekey = '__firstline'
1151 1154
1152 1155 def __init__(self, vfs, path, keys=None):
1153 1156 self.vfs = vfs
1154 1157 self.path = path
1155 1158
1156 1159 def read(self, firstlinenonkeyval=False):
1157 1160 """Read the contents of a simple key-value file
1158 1161
1159 1162 'firstlinenonkeyval' indicates whether the first line of file should
1160 1163 be treated as a key-value pair or reuturned fully under the
1161 1164 __firstline key."""
1162 1165 lines = self.vfs.readlines(self.path)
1163 1166 d = {}
1164 1167 if firstlinenonkeyval:
1165 1168 if not lines:
1166 1169 e = _("empty simplekeyvalue file")
1167 1170 raise error.CorruptedState(e)
1168 1171 # we don't want to include '\n' in the __firstline
1169 1172 d[self.firstlinekey] = lines[0][:-1]
1170 1173 del lines[0]
1171 1174
1172 1175 try:
1173 1176 # the 'if line.strip()' part prevents us from failing on empty
1174 1177 # lines which only contain '\n' therefore are not skipped
1175 1178 # by 'if line'
1176 1179 updatedict = dict(line[:-1].split('=', 1) for line in lines
1177 1180 if line.strip())
1178 1181 if self.firstlinekey in updatedict:
1179 1182 e = _("%r can't be used as a key")
1180 1183 raise error.CorruptedState(e % self.firstlinekey)
1181 1184 d.update(updatedict)
1182 1185 except ValueError as e:
1183 1186 raise error.CorruptedState(str(e))
1184 1187 return d
1185 1188
1186 1189 def write(self, data, firstline=None):
1187 1190 """Write key=>value mapping to a file
1188 1191 data is a dict. Keys must be alphanumerical and start with a letter.
1189 1192 Values must not contain newline characters.
1190 1193
1191 1194 If 'firstline' is not None, it is written to file before
1192 1195 everything else, as it is, not in a key=value form"""
1193 1196 lines = []
1194 1197 if firstline is not None:
1195 1198 lines.append('%s\n' % firstline)
1196 1199
1197 1200 for k, v in data.items():
1198 1201 if k == self.firstlinekey:
1199 1202 e = "key name '%s' is reserved" % self.firstlinekey
1200 1203 raise error.ProgrammingError(e)
1201 1204 if not k[0:1].isalpha():
1202 1205 e = "keys must start with a letter in a key-value file"
1203 1206 raise error.ProgrammingError(e)
1204 1207 if not k.isalnum():
1205 1208 e = "invalid key name in a simple key-value file"
1206 1209 raise error.ProgrammingError(e)
1207 1210 if '\n' in v:
1208 1211 e = "invalid value in a simple key-value file"
1209 1212 raise error.ProgrammingError(e)
1210 1213 lines.append("%s=%s\n" % (k, v))
1211 1214 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1212 1215 fp.write(''.join(lines))
1213 1216
1214 1217 _reportobsoletedsource = [
1215 1218 'debugobsolete',
1216 1219 'pull',
1217 1220 'push',
1218 1221 'serve',
1219 1222 'unbundle',
1220 1223 ]
1221 1224
1222 1225 _reportnewcssource = [
1223 1226 'pull',
1224 1227 'unbundle',
1225 1228 ]
1226 1229
1227 1230 # a list of (repo, ctx, files) functions called by various commands to allow
1228 1231 # extensions to ensure the corresponding files are available locally, before the
1229 1232 # command uses them.
1230 1233 fileprefetchhooks = util.hooks()
1231 1234
1232 1235 # A marker that tells the evolve extension to suppress its own reporting
1233 1236 _reportstroubledchangesets = True
1234 1237
1235 1238 def registersummarycallback(repo, otr, txnname=''):
1236 1239 """register a callback to issue a summary after the transaction is closed
1237 1240 """
1238 1241 def txmatch(sources):
1239 1242 return any(txnname.startswith(source) for source in sources)
1240 1243
1241 1244 categories = []
1242 1245
1243 1246 def reportsummary(func):
1244 1247 """decorator for report callbacks."""
1245 1248 # The repoview life cycle is shorter than the one of the actual
1246 1249 # underlying repository. So the filtered object can die before the
1247 1250 # weakref is used leading to troubles. We keep a reference to the
1248 1251 # unfiltered object and restore the filtering when retrieving the
1249 1252 # repository through the weakref.
1250 1253 filtername = repo.filtername
1251 1254 reporef = weakref.ref(repo.unfiltered())
1252 1255 def wrapped(tr):
1253 1256 repo = reporef()
1254 1257 if filtername:
1255 1258 repo = repo.filtered(filtername)
1256 1259 func(repo, tr)
1257 1260 newcat = '%02i-txnreport' % len(categories)
1258 1261 otr.addpostclose(newcat, wrapped)
1259 1262 categories.append(newcat)
1260 1263 return wrapped
1261 1264
1262 1265 if txmatch(_reportobsoletedsource):
1263 1266 @reportsummary
1264 1267 def reportobsoleted(repo, tr):
1265 1268 obsoleted = obsutil.getobsoleted(repo, tr)
1266 1269 if obsoleted:
1267 1270 repo.ui.status(_('obsoleted %i changesets\n')
1268 1271 % len(obsoleted))
1269 1272
1270 1273 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1271 1274 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1272 1275 instabilitytypes = [
1273 1276 ('orphan', 'orphan'),
1274 1277 ('phase-divergent', 'phasedivergent'),
1275 1278 ('content-divergent', 'contentdivergent'),
1276 1279 ]
1277 1280
1278 1281 def getinstabilitycounts(repo):
1279 1282 filtered = repo.changelog.filteredrevs
1280 1283 counts = {}
1281 1284 for instability, revset in instabilitytypes:
1282 1285 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1283 1286 filtered)
1284 1287 return counts
1285 1288
1286 1289 oldinstabilitycounts = getinstabilitycounts(repo)
1287 1290 @reportsummary
1288 1291 def reportnewinstabilities(repo, tr):
1289 1292 newinstabilitycounts = getinstabilitycounts(repo)
1290 1293 for instability, revset in instabilitytypes:
1291 1294 delta = (newinstabilitycounts[instability] -
1292 1295 oldinstabilitycounts[instability])
1293 1296 if delta > 0:
1294 1297 repo.ui.warn(_('%i new %s changesets\n') %
1295 1298 (delta, instability))
1296 1299
1297 1300 if txmatch(_reportnewcssource):
1298 1301 @reportsummary
1299 1302 def reportnewcs(repo, tr):
1300 1303 """Report the range of new revisions pulled/unbundled."""
1301 1304 newrevs = tr.changes.get('revs', xrange(0, 0))
1302 1305 if not newrevs:
1303 1306 return
1304 1307
1305 1308 # Compute the bounds of new revisions' range, excluding obsoletes.
1306 1309 unfi = repo.unfiltered()
1307 1310 revs = unfi.revs('%ld and not obsolete()', newrevs)
1308 1311 if not revs:
1309 1312 # Got only obsoletes.
1310 1313 return
1311 1314 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1312 1315
1313 1316 if minrev == maxrev:
1314 1317 revrange = minrev
1315 1318 else:
1316 1319 revrange = '%s:%s' % (minrev, maxrev)
1317 1320 repo.ui.status(_('new changesets %s\n') % revrange)
1318 1321
1319 1322 def nodesummaries(repo, nodes, maxnumnodes=4):
1320 1323 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1321 1324 return ' '.join(short(h) for h in nodes)
1322 1325 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1323 1326 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1324 1327
1325 1328 def enforcesinglehead(repo, tr, desc):
1326 1329 """check that no named branch has multiple heads"""
1327 1330 if desc in ('strip', 'repair'):
1328 1331 # skip the logic during strip
1329 1332 return
1330 1333 visible = repo.filtered('visible')
1331 1334 # possible improvement: we could restrict the check to affected branch
1332 1335 for name, heads in visible.branchmap().iteritems():
1333 1336 if len(heads) > 1:
1334 1337 msg = _('rejecting multiple heads on branch "%s"')
1335 1338 msg %= name
1336 1339 hint = _('%d heads: %s')
1337 1340 hint %= (len(heads), nodesummaries(repo, heads))
1338 1341 raise error.Abort(msg, hint=hint)
1339 1342
1340 1343 def wrapconvertsink(sink):
1341 1344 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1342 1345 before it is used, whether or not the convert extension was formally loaded.
1343 1346 """
1344 1347 return sink
1345 1348
1346 1349 def unhidehashlikerevs(repo, specs, hiddentype):
1347 1350 """parse the user specs and unhide changesets whose hash or revision number
1348 1351 is passed.
1349 1352
1350 1353 hiddentype can be: 1) 'warn': warn while unhiding changesets
1351 1354 2) 'nowarn': don't warn while unhiding changesets
1352 1355
1353 1356 returns a repo object with the required changesets unhidden
1354 1357 """
1355 1358 if not repo.filtername or not repo.ui.configbool('experimental',
1356 1359 'directaccess'):
1357 1360 return repo
1358 1361
1359 1362 if repo.filtername not in ('visible', 'visible-hidden'):
1360 1363 return repo
1361 1364
1362 1365 symbols = set()
1363 1366 for spec in specs:
1364 1367 try:
1365 1368 tree = revsetlang.parse(spec)
1366 1369 except error.ParseError: # will be reported by scmutil.revrange()
1367 1370 continue
1368 1371
1369 1372 symbols.update(revsetlang.gethashlikesymbols(tree))
1370 1373
1371 1374 if not symbols:
1372 1375 return repo
1373 1376
1374 1377 revs = _getrevsfromsymbols(repo, symbols)
1375 1378
1376 1379 if not revs:
1377 1380 return repo
1378 1381
1379 1382 if hiddentype == 'warn':
1380 1383 unfi = repo.unfiltered()
1381 1384 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1382 1385 repo.ui.warn(_("warning: accessing hidden changesets for write "
1383 1386 "operation: %s\n") % revstr)
1384 1387
1385 1388 # we have to use new filtername to separate branch/tags cache until we can
1386 1389 # disbale these cache when revisions are dynamically pinned.
1387 1390 return repo.filtered('visible-hidden', revs)
1388 1391
1389 1392 def _getrevsfromsymbols(repo, symbols):
1390 1393 """parse the list of symbols and returns a set of revision numbers of hidden
1391 1394 changesets present in symbols"""
1392 1395 revs = set()
1393 1396 unfi = repo.unfiltered()
1394 1397 unficl = unfi.changelog
1395 1398 cl = repo.changelog
1396 1399 tiprev = len(unficl)
1397 1400 pmatch = unficl._partialmatch
1398 1401 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1399 1402 for s in symbols:
1400 1403 try:
1401 1404 n = int(s)
1402 1405 if n <= tiprev:
1403 1406 if not allowrevnums:
1404 1407 continue
1405 1408 else:
1406 1409 if n not in cl:
1407 1410 revs.add(n)
1408 1411 continue
1409 1412 except ValueError:
1410 1413 pass
1411 1414
1412 1415 try:
1413 1416 s = pmatch(s)
1414 1417 except error.LookupError:
1415 1418 s = None
1416 1419
1417 1420 if s is not None:
1418 1421 rev = unficl.rev(s)
1419 1422 if rev not in cl:
1420 1423 revs.add(rev)
1421 1424
1422 1425 return revs
General Comments 0
You need to be logged in to leave comments. Login now