##// END OF EJS Templates
scmutil: 0-pad transaction report callback category...
Martin von Zweigbergk -
r35766:963a611b default
parent child Browse files
Show More
@@ -1,1415 +1,1415 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 hex,
22 22 nullid,
23 23 short,
24 24 wdirid,
25 25 wdirrev,
26 26 )
27 27
28 28 from . import (
29 29 encoding,
30 30 error,
31 31 match as matchmod,
32 32 obsolete,
33 33 obsutil,
34 34 pathutil,
35 35 phases,
36 36 pycompat,
37 37 revsetlang,
38 38 similar,
39 39 url,
40 40 util,
41 41 vfs,
42 42 )
43 43
44 44 if pycompat.iswindows:
45 45 from . import scmwindows as scmplatform
46 46 else:
47 47 from . import scmposix as scmplatform
48 48
49 49 termsize = scmplatform.termsize
50 50
51 51 class status(tuple):
52 52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
53 53 and 'ignored' properties are only relevant to the working copy.
54 54 '''
55 55
56 56 __slots__ = ()
57 57
58 58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
59 59 clean):
60 60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
61 61 ignored, clean))
62 62
63 63 @property
64 64 def modified(self):
65 65 '''files that have been modified'''
66 66 return self[0]
67 67
68 68 @property
69 69 def added(self):
70 70 '''files that have been added'''
71 71 return self[1]
72 72
73 73 @property
74 74 def removed(self):
75 75 '''files that have been removed'''
76 76 return self[2]
77 77
78 78 @property
79 79 def deleted(self):
80 80 '''files that are in the dirstate, but have been deleted from the
81 81 working copy (aka "missing")
82 82 '''
83 83 return self[3]
84 84
85 85 @property
86 86 def unknown(self):
87 87 '''files not in the dirstate that are not ignored'''
88 88 return self[4]
89 89
90 90 @property
91 91 def ignored(self):
92 92 '''files not in the dirstate that are ignored (by _dirignore())'''
93 93 return self[5]
94 94
95 95 @property
96 96 def clean(self):
97 97 '''files that have not been modified'''
98 98 return self[6]
99 99
100 100 def __repr__(self, *args, **kwargs):
101 101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
102 102 'unknown=%r, ignored=%r, clean=%r>') % self)
103 103
104 104 def itersubrepos(ctx1, ctx2):
105 105 """find subrepos in ctx1 or ctx2"""
106 106 # Create a (subpath, ctx) mapping where we prefer subpaths from
107 107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
108 108 # has been modified (in ctx2) but not yet committed (in ctx1).
109 109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
110 110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
111 111
112 112 missing = set()
113 113
114 114 for subpath in ctx2.substate:
115 115 if subpath not in ctx1.substate:
116 116 del subpaths[subpath]
117 117 missing.add(subpath)
118 118
119 119 for subpath, ctx in sorted(subpaths.iteritems()):
120 120 yield subpath, ctx.sub(subpath)
121 121
122 122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
123 123 # status and diff will have an accurate result when it does
124 124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
125 125 # against itself.
126 126 for subpath in missing:
127 127 yield subpath, ctx2.nullsub(subpath, ctx1)
128 128
129 129 def nochangesfound(ui, repo, excluded=None):
130 130 '''Report no changes for push/pull, excluded is None or a list of
131 131 nodes excluded from the push/pull.
132 132 '''
133 133 secretlist = []
134 134 if excluded:
135 135 for n in excluded:
136 136 ctx = repo[n]
137 137 if ctx.phase() >= phases.secret and not ctx.extinct():
138 138 secretlist.append(n)
139 139
140 140 if secretlist:
141 141 ui.status(_("no changes found (ignored %d secret changesets)\n")
142 142 % len(secretlist))
143 143 else:
144 144 ui.status(_("no changes found\n"))
145 145
146 146 def callcatch(ui, func):
147 147 """call func() with global exception handling
148 148
149 149 return func() if no exception happens. otherwise do some error handling
150 150 and return an exit code accordingly. does not handle all exceptions.
151 151 """
152 152 try:
153 153 try:
154 154 return func()
155 155 except: # re-raises
156 156 ui.traceback()
157 157 raise
158 158 # Global exception handling, alphabetically
159 159 # Mercurial-specific first, followed by built-in and library exceptions
160 160 except error.LockHeld as inst:
161 161 if inst.errno == errno.ETIMEDOUT:
162 162 reason = _('timed out waiting for lock held by %r') % inst.locker
163 163 else:
164 164 reason = _('lock held by %r') % inst.locker
165 165 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
166 166 if not inst.locker:
167 167 ui.warn(_("(lock might be very busy)\n"))
168 168 except error.LockUnavailable as inst:
169 169 ui.warn(_("abort: could not lock %s: %s\n") %
170 170 (inst.desc or inst.filename,
171 171 encoding.strtolocal(inst.strerror)))
172 172 except error.OutOfBandError as inst:
173 173 if inst.args:
174 174 msg = _("abort: remote error:\n")
175 175 else:
176 176 msg = _("abort: remote error\n")
177 177 ui.warn(msg)
178 178 if inst.args:
179 179 ui.warn(''.join(inst.args))
180 180 if inst.hint:
181 181 ui.warn('(%s)\n' % inst.hint)
182 182 except error.RepoError as inst:
183 183 ui.warn(_("abort: %s!\n") % inst)
184 184 if inst.hint:
185 185 ui.warn(_("(%s)\n") % inst.hint)
186 186 except error.ResponseError as inst:
187 187 ui.warn(_("abort: %s") % inst.args[0])
188 188 if not isinstance(inst.args[1], basestring):
189 189 ui.warn(" %r\n" % (inst.args[1],))
190 190 elif not inst.args[1]:
191 191 ui.warn(_(" empty string\n"))
192 192 else:
193 193 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
194 194 except error.CensoredNodeError as inst:
195 195 ui.warn(_("abort: file censored %s!\n") % inst)
196 196 except error.RevlogError as inst:
197 197 ui.warn(_("abort: %s!\n") % inst)
198 198 except error.InterventionRequired as inst:
199 199 ui.warn("%s\n" % inst)
200 200 if inst.hint:
201 201 ui.warn(_("(%s)\n") % inst.hint)
202 202 return 1
203 203 except error.WdirUnsupported:
204 204 ui.warn(_("abort: working directory revision cannot be specified\n"))
205 205 except error.Abort as inst:
206 206 ui.warn(_("abort: %s\n") % inst)
207 207 if inst.hint:
208 208 ui.warn(_("(%s)\n") % inst.hint)
209 209 except ImportError as inst:
210 210 ui.warn(_("abort: %s!\n") % inst)
211 211 m = str(inst).split()[-1]
212 212 if m in "mpatch bdiff".split():
213 213 ui.warn(_("(did you forget to compile extensions?)\n"))
214 214 elif m in "zlib".split():
215 215 ui.warn(_("(is your Python install correct?)\n"))
216 216 except IOError as inst:
217 217 if util.safehasattr(inst, "code"):
218 218 ui.warn(_("abort: %s\n") % inst)
219 219 elif util.safehasattr(inst, "reason"):
220 220 try: # usually it is in the form (errno, strerror)
221 221 reason = inst.reason.args[1]
222 222 except (AttributeError, IndexError):
223 223 # it might be anything, for example a string
224 224 reason = inst.reason
225 225 if isinstance(reason, unicode):
226 226 # SSLError of Python 2.7.9 contains a unicode
227 227 reason = encoding.unitolocal(reason)
228 228 ui.warn(_("abort: error: %s\n") % reason)
229 229 elif (util.safehasattr(inst, "args")
230 230 and inst.args and inst.args[0] == errno.EPIPE):
231 231 pass
232 232 elif getattr(inst, "strerror", None):
233 233 if getattr(inst, "filename", None):
234 234 ui.warn(_("abort: %s: %s\n") % (
235 235 encoding.strtolocal(inst.strerror), inst.filename))
236 236 else:
237 237 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
238 238 else:
239 239 raise
240 240 except OSError as inst:
241 241 if getattr(inst, "filename", None) is not None:
242 242 ui.warn(_("abort: %s: '%s'\n") % (
243 243 encoding.strtolocal(inst.strerror), inst.filename))
244 244 else:
245 245 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
246 246 except MemoryError:
247 247 ui.warn(_("abort: out of memory\n"))
248 248 except SystemExit as inst:
249 249 # Commands shouldn't sys.exit directly, but give a return code.
250 250 # Just in case catch this and and pass exit code to caller.
251 251 return inst.code
252 252 except socket.error as inst:
253 253 ui.warn(_("abort: %s\n") % inst.args[-1])
254 254
255 255 return -1
256 256
257 257 def checknewlabel(repo, lbl, kind):
258 258 # Do not use the "kind" parameter in ui output.
259 259 # It makes strings difficult to translate.
260 260 if lbl in ['tip', '.', 'null']:
261 261 raise error.Abort(_("the name '%s' is reserved") % lbl)
262 262 for c in (':', '\0', '\n', '\r'):
263 263 if c in lbl:
264 264 raise error.Abort(_("%r cannot be used in a name") % c)
265 265 try:
266 266 int(lbl)
267 267 raise error.Abort(_("cannot use an integer as a name"))
268 268 except ValueError:
269 269 pass
270 270
271 271 def checkfilename(f):
272 272 '''Check that the filename f is an acceptable filename for a tracked file'''
273 273 if '\r' in f or '\n' in f:
274 274 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
275 275
276 276 def checkportable(ui, f):
277 277 '''Check if filename f is portable and warn or abort depending on config'''
278 278 checkfilename(f)
279 279 abort, warn = checkportabilityalert(ui)
280 280 if abort or warn:
281 281 msg = util.checkwinfilename(f)
282 282 if msg:
283 283 msg = "%s: %s" % (msg, util.shellquote(f))
284 284 if abort:
285 285 raise error.Abort(msg)
286 286 ui.warn(_("warning: %s\n") % msg)
287 287
288 288 def checkportabilityalert(ui):
289 289 '''check if the user's config requests nothing, a warning, or abort for
290 290 non-portable filenames'''
291 291 val = ui.config('ui', 'portablefilenames')
292 292 lval = val.lower()
293 293 bval = util.parsebool(val)
294 294 abort = pycompat.iswindows or lval == 'abort'
295 295 warn = bval or lval == 'warn'
296 296 if bval is None and not (warn or abort or lval == 'ignore'):
297 297 raise error.ConfigError(
298 298 _("ui.portablefilenames value is invalid ('%s')") % val)
299 299 return abort, warn
300 300
301 301 class casecollisionauditor(object):
302 302 def __init__(self, ui, abort, dirstate):
303 303 self._ui = ui
304 304 self._abort = abort
305 305 allfiles = '\0'.join(dirstate._map)
306 306 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
307 307 self._dirstate = dirstate
308 308 # The purpose of _newfiles is so that we don't complain about
309 309 # case collisions if someone were to call this object with the
310 310 # same filename twice.
311 311 self._newfiles = set()
312 312
313 313 def __call__(self, f):
314 314 if f in self._newfiles:
315 315 return
316 316 fl = encoding.lower(f)
317 317 if fl in self._loweredfiles and f not in self._dirstate:
318 318 msg = _('possible case-folding collision for %s') % f
319 319 if self._abort:
320 320 raise error.Abort(msg)
321 321 self._ui.warn(_("warning: %s\n") % msg)
322 322 self._loweredfiles.add(fl)
323 323 self._newfiles.add(f)
324 324
325 325 def filteredhash(repo, maxrev):
326 326 """build hash of filtered revisions in the current repoview.
327 327
328 328 Multiple caches perform up-to-date validation by checking that the
329 329 tiprev and tipnode stored in the cache file match the current repository.
330 330 However, this is not sufficient for validating repoviews because the set
331 331 of revisions in the view may change without the repository tiprev and
332 332 tipnode changing.
333 333
334 334 This function hashes all the revs filtered from the view and returns
335 335 that SHA-1 digest.
336 336 """
337 337 cl = repo.changelog
338 338 if not cl.filteredrevs:
339 339 return None
340 340 key = None
341 341 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
342 342 if revs:
343 343 s = hashlib.sha1()
344 344 for rev in revs:
345 345 s.update('%d;' % rev)
346 346 key = s.digest()
347 347 return key
348 348
349 349 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
350 350 '''yield every hg repository under path, always recursively.
351 351 The recurse flag will only control recursion into repo working dirs'''
352 352 def errhandler(err):
353 353 if err.filename == path:
354 354 raise err
355 355 samestat = getattr(os.path, 'samestat', None)
356 356 if followsym and samestat is not None:
357 357 def adddir(dirlst, dirname):
358 358 match = False
359 359 dirstat = os.stat(dirname)
360 360 for lstdirstat in dirlst:
361 361 if samestat(dirstat, lstdirstat):
362 362 match = True
363 363 break
364 364 if not match:
365 365 dirlst.append(dirstat)
366 366 return not match
367 367 else:
368 368 followsym = False
369 369
370 370 if (seen_dirs is None) and followsym:
371 371 seen_dirs = []
372 372 adddir(seen_dirs, path)
373 373 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
374 374 dirs.sort()
375 375 if '.hg' in dirs:
376 376 yield root # found a repository
377 377 qroot = os.path.join(root, '.hg', 'patches')
378 378 if os.path.isdir(os.path.join(qroot, '.hg')):
379 379 yield qroot # we have a patch queue repo here
380 380 if recurse:
381 381 # avoid recursing inside the .hg directory
382 382 dirs.remove('.hg')
383 383 else:
384 384 dirs[:] = [] # don't descend further
385 385 elif followsym:
386 386 newdirs = []
387 387 for d in dirs:
388 388 fname = os.path.join(root, d)
389 389 if adddir(seen_dirs, fname):
390 390 if os.path.islink(fname):
391 391 for hgname in walkrepos(fname, True, seen_dirs):
392 392 yield hgname
393 393 else:
394 394 newdirs.append(d)
395 395 dirs[:] = newdirs
396 396
397 397 def binnode(ctx):
398 398 """Return binary node id for a given basectx"""
399 399 node = ctx.node()
400 400 if node is None:
401 401 return wdirid
402 402 return node
403 403
404 404 def intrev(ctx):
405 405 """Return integer for a given basectx that can be used in comparison or
406 406 arithmetic operation"""
407 407 rev = ctx.rev()
408 408 if rev is None:
409 409 return wdirrev
410 410 return rev
411 411
412 412 def formatchangeid(ctx):
413 413 """Format changectx as '{rev}:{node|formatnode}', which is the default
414 414 template provided by cmdutil.changeset_templater"""
415 415 repo = ctx.repo()
416 416 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
417 417
418 418 def formatrevnode(ui, rev, node):
419 419 """Format given revision and node depending on the current verbosity"""
420 420 if ui.debugflag:
421 421 hexfunc = hex
422 422 else:
423 423 hexfunc = short
424 424 return '%d:%s' % (rev, hexfunc(node))
425 425
426 426 def revsingle(repo, revspec, default='.', localalias=None):
427 427 if not revspec and revspec != 0:
428 428 return repo[default]
429 429
430 430 l = revrange(repo, [revspec], localalias=localalias)
431 431 if not l:
432 432 raise error.Abort(_('empty revision set'))
433 433 return repo[l.last()]
434 434
435 435 def _pairspec(revspec):
436 436 tree = revsetlang.parse(revspec)
437 437 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
438 438
439 439 def revpair(repo, revs):
440 440 if not revs:
441 441 return repo.dirstate.p1(), None
442 442
443 443 l = revrange(repo, revs)
444 444
445 445 if not l:
446 446 first = second = None
447 447 elif l.isascending():
448 448 first = l.min()
449 449 second = l.max()
450 450 elif l.isdescending():
451 451 first = l.max()
452 452 second = l.min()
453 453 else:
454 454 first = l.first()
455 455 second = l.last()
456 456
457 457 if first is None:
458 458 raise error.Abort(_('empty revision range'))
459 459 if (first == second and len(revs) >= 2
460 460 and not all(revrange(repo, [r]) for r in revs)):
461 461 raise error.Abort(_('empty revision on one side of range'))
462 462
463 463 # if top-level is range expression, the result must always be a pair
464 464 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
465 465 return repo.lookup(first), None
466 466
467 467 return repo.lookup(first), repo.lookup(second)
468 468
469 469 def revrange(repo, specs, localalias=None):
470 470 """Execute 1 to many revsets and return the union.
471 471
472 472 This is the preferred mechanism for executing revsets using user-specified
473 473 config options, such as revset aliases.
474 474
475 475 The revsets specified by ``specs`` will be executed via a chained ``OR``
476 476 expression. If ``specs`` is empty, an empty result is returned.
477 477
478 478 ``specs`` can contain integers, in which case they are assumed to be
479 479 revision numbers.
480 480
481 481 It is assumed the revsets are already formatted. If you have arguments
482 482 that need to be expanded in the revset, call ``revsetlang.formatspec()``
483 483 and pass the result as an element of ``specs``.
484 484
485 485 Specifying a single revset is allowed.
486 486
487 487 Returns a ``revset.abstractsmartset`` which is a list-like interface over
488 488 integer revisions.
489 489 """
490 490 allspecs = []
491 491 for spec in specs:
492 492 if isinstance(spec, int):
493 493 spec = revsetlang.formatspec('rev(%d)', spec)
494 494 allspecs.append(spec)
495 495 return repo.anyrevs(allspecs, user=True, localalias=localalias)
496 496
497 497 def meaningfulparents(repo, ctx):
498 498 """Return list of meaningful (or all if debug) parentrevs for rev.
499 499
500 500 For merges (two non-nullrev revisions) both parents are meaningful.
501 501 Otherwise the first parent revision is considered meaningful if it
502 502 is not the preceding revision.
503 503 """
504 504 parents = ctx.parents()
505 505 if len(parents) > 1:
506 506 return parents
507 507 if repo.ui.debugflag:
508 508 return [parents[0], repo['null']]
509 509 if parents[0].rev() >= intrev(ctx) - 1:
510 510 return []
511 511 return parents
512 512
513 513 def expandpats(pats):
514 514 '''Expand bare globs when running on windows.
515 515 On posix we assume it already has already been done by sh.'''
516 516 if not util.expandglobs:
517 517 return list(pats)
518 518 ret = []
519 519 for kindpat in pats:
520 520 kind, pat = matchmod._patsplit(kindpat, None)
521 521 if kind is None:
522 522 try:
523 523 globbed = glob.glob(pat)
524 524 except re.error:
525 525 globbed = [pat]
526 526 if globbed:
527 527 ret.extend(globbed)
528 528 continue
529 529 ret.append(kindpat)
530 530 return ret
531 531
532 532 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
533 533 badfn=None):
534 534 '''Return a matcher and the patterns that were used.
535 535 The matcher will warn about bad matches, unless an alternate badfn callback
536 536 is provided.'''
537 537 if pats == ("",):
538 538 pats = []
539 539 if opts is None:
540 540 opts = {}
541 541 if not globbed and default == 'relpath':
542 542 pats = expandpats(pats or [])
543 543
544 544 def bad(f, msg):
545 545 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
546 546
547 547 if badfn is None:
548 548 badfn = bad
549 549
550 550 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
551 551 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
552 552
553 553 if m.always():
554 554 pats = []
555 555 return m, pats
556 556
557 557 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
558 558 badfn=None):
559 559 '''Return a matcher that will warn about bad matches.'''
560 560 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
561 561
562 562 def matchall(repo):
563 563 '''Return a matcher that will efficiently match everything.'''
564 564 return matchmod.always(repo.root, repo.getcwd())
565 565
566 566 def matchfiles(repo, files, badfn=None):
567 567 '''Return a matcher that will efficiently match exactly these files.'''
568 568 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
569 569
570 570 def parsefollowlinespattern(repo, rev, pat, msg):
571 571 """Return a file name from `pat` pattern suitable for usage in followlines
572 572 logic.
573 573 """
574 574 if not matchmod.patkind(pat):
575 575 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
576 576 else:
577 577 ctx = repo[rev]
578 578 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
579 579 files = [f for f in ctx if m(f)]
580 580 if len(files) != 1:
581 581 raise error.ParseError(msg)
582 582 return files[0]
583 583
584 584 def origpath(ui, repo, filepath):
585 585 '''customize where .orig files are created
586 586
587 587 Fetch user defined path from config file: [ui] origbackuppath = <path>
588 588 Fall back to default (filepath with .orig suffix) if not specified
589 589 '''
590 590 origbackuppath = ui.config('ui', 'origbackuppath')
591 591 if not origbackuppath:
592 592 return filepath + ".orig"
593 593
594 594 # Convert filepath from an absolute path into a path inside the repo.
595 595 filepathfromroot = util.normpath(os.path.relpath(filepath,
596 596 start=repo.root))
597 597
598 598 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
599 599 origbackupdir = origvfs.dirname(filepathfromroot)
600 600 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
601 601 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
602 602
603 603 # Remove any files that conflict with the backup file's path
604 604 for f in reversed(list(util.finddirs(filepathfromroot))):
605 605 if origvfs.isfileorlink(f):
606 606 ui.note(_('removing conflicting file: %s\n')
607 607 % origvfs.join(f))
608 608 origvfs.unlink(f)
609 609 break
610 610
611 611 origvfs.makedirs(origbackupdir)
612 612
613 613 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
614 614 ui.note(_('removing conflicting directory: %s\n')
615 615 % origvfs.join(filepathfromroot))
616 616 origvfs.rmtree(filepathfromroot, forcibly=True)
617 617
618 618 return origvfs.join(filepathfromroot)
619 619
620 620 class _containsnode(object):
621 621 """proxy __contains__(node) to container.__contains__ which accepts revs"""
622 622
623 623 def __init__(self, repo, revcontainer):
624 624 self._torev = repo.changelog.rev
625 625 self._revcontains = revcontainer.__contains__
626 626
627 627 def __contains__(self, node):
628 628 return self._revcontains(self._torev(node))
629 629
630 630 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
631 631 """do common cleanups when old nodes are replaced by new nodes
632 632
633 633 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
634 634 (we might also want to move working directory parent in the future)
635 635
636 636 By default, bookmark moves are calculated automatically from 'replacements',
637 637 but 'moves' can be used to override that. Also, 'moves' may include
638 638 additional bookmark moves that should not have associated obsmarkers.
639 639
640 640 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
641 641 have replacements. operation is a string, like "rebase".
642 642
643 643 metadata is dictionary containing metadata to be stored in obsmarker if
644 644 obsolescence is enabled.
645 645 """
646 646 if not replacements and not moves:
647 647 return
648 648
649 649 # translate mapping's other forms
650 650 if not util.safehasattr(replacements, 'items'):
651 651 replacements = {n: () for n in replacements}
652 652
653 653 # Calculate bookmark movements
654 654 if moves is None:
655 655 moves = {}
656 656 # Unfiltered repo is needed since nodes in replacements might be hidden.
657 657 unfi = repo.unfiltered()
658 658 for oldnode, newnodes in replacements.items():
659 659 if oldnode in moves:
660 660 continue
661 661 if len(newnodes) > 1:
662 662 # usually a split, take the one with biggest rev number
663 663 newnode = next(unfi.set('max(%ln)', newnodes)).node()
664 664 elif len(newnodes) == 0:
665 665 # move bookmark backwards
666 666 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
667 667 list(replacements)))
668 668 if roots:
669 669 newnode = roots[0].node()
670 670 else:
671 671 newnode = nullid
672 672 else:
673 673 newnode = newnodes[0]
674 674 moves[oldnode] = newnode
675 675
676 676 with repo.transaction('cleanup') as tr:
677 677 # Move bookmarks
678 678 bmarks = repo._bookmarks
679 679 bmarkchanges = []
680 680 allnewnodes = [n for ns in replacements.values() for n in ns]
681 681 for oldnode, newnode in moves.items():
682 682 oldbmarks = repo.nodebookmarks(oldnode)
683 683 if not oldbmarks:
684 684 continue
685 685 from . import bookmarks # avoid import cycle
686 686 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
687 687 (oldbmarks, hex(oldnode), hex(newnode)))
688 688 # Delete divergent bookmarks being parents of related newnodes
689 689 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
690 690 allnewnodes, newnode, oldnode)
691 691 deletenodes = _containsnode(repo, deleterevs)
692 692 for name in oldbmarks:
693 693 bmarkchanges.append((name, newnode))
694 694 for b in bookmarks.divergent2delete(repo, deletenodes, name):
695 695 bmarkchanges.append((b, None))
696 696
697 697 if bmarkchanges:
698 698 bmarks.applychanges(repo, tr, bmarkchanges)
699 699
700 700 # Obsolete or strip nodes
701 701 if obsolete.isenabled(repo, obsolete.createmarkersopt):
702 702 # If a node is already obsoleted, and we want to obsolete it
703 703 # without a successor, skip that obssolete request since it's
704 704 # unnecessary. That's the "if s or not isobs(n)" check below.
705 705 # Also sort the node in topology order, that might be useful for
706 706 # some obsstore logic.
707 707 # NOTE: the filtering and sorting might belong to createmarkers.
708 708 isobs = unfi.obsstore.successors.__contains__
709 709 torev = unfi.changelog.rev
710 710 sortfunc = lambda ns: torev(ns[0])
711 711 rels = [(unfi[n], tuple(unfi[m] for m in s))
712 712 for n, s in sorted(replacements.items(), key=sortfunc)
713 713 if s or not isobs(n)]
714 714 if rels:
715 715 obsolete.createmarkers(repo, rels, operation=operation,
716 716 metadata=metadata)
717 717 else:
718 718 from . import repair # avoid import cycle
719 719 tostrip = list(replacements)
720 720 if tostrip:
721 721 repair.delayedstrip(repo.ui, repo, tostrip, operation)
722 722
723 723 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
724 724 if opts is None:
725 725 opts = {}
726 726 m = matcher
727 727 if dry_run is None:
728 728 dry_run = opts.get('dry_run')
729 729 if similarity is None:
730 730 similarity = float(opts.get('similarity') or 0)
731 731
732 732 ret = 0
733 733 join = lambda f: os.path.join(prefix, f)
734 734
735 735 wctx = repo[None]
736 736 for subpath in sorted(wctx.substate):
737 737 submatch = matchmod.subdirmatcher(subpath, m)
738 738 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
739 739 sub = wctx.sub(subpath)
740 740 try:
741 741 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
742 742 ret = 1
743 743 except error.LookupError:
744 744 repo.ui.status(_("skipping missing subrepository: %s\n")
745 745 % join(subpath))
746 746
747 747 rejected = []
748 748 def badfn(f, msg):
749 749 if f in m.files():
750 750 m.bad(f, msg)
751 751 rejected.append(f)
752 752
753 753 badmatch = matchmod.badmatch(m, badfn)
754 754 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
755 755 badmatch)
756 756
757 757 unknownset = set(unknown + forgotten)
758 758 toprint = unknownset.copy()
759 759 toprint.update(deleted)
760 760 for abs in sorted(toprint):
761 761 if repo.ui.verbose or not m.exact(abs):
762 762 if abs in unknownset:
763 763 status = _('adding %s\n') % m.uipath(abs)
764 764 else:
765 765 status = _('removing %s\n') % m.uipath(abs)
766 766 repo.ui.status(status)
767 767
768 768 renames = _findrenames(repo, m, added + unknown, removed + deleted,
769 769 similarity)
770 770
771 771 if not dry_run:
772 772 _markchanges(repo, unknown + forgotten, deleted, renames)
773 773
774 774 for f in rejected:
775 775 if f in m.files():
776 776 return 1
777 777 return ret
778 778
779 779 def marktouched(repo, files, similarity=0.0):
780 780 '''Assert that files have somehow been operated upon. files are relative to
781 781 the repo root.'''
782 782 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
783 783 rejected = []
784 784
785 785 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
786 786
787 787 if repo.ui.verbose:
788 788 unknownset = set(unknown + forgotten)
789 789 toprint = unknownset.copy()
790 790 toprint.update(deleted)
791 791 for abs in sorted(toprint):
792 792 if abs in unknownset:
793 793 status = _('adding %s\n') % abs
794 794 else:
795 795 status = _('removing %s\n') % abs
796 796 repo.ui.status(status)
797 797
798 798 renames = _findrenames(repo, m, added + unknown, removed + deleted,
799 799 similarity)
800 800
801 801 _markchanges(repo, unknown + forgotten, deleted, renames)
802 802
803 803 for f in rejected:
804 804 if f in m.files():
805 805 return 1
806 806 return 0
807 807
808 808 def _interestingfiles(repo, matcher):
809 809 '''Walk dirstate with matcher, looking for files that addremove would care
810 810 about.
811 811
812 812 This is different from dirstate.status because it doesn't care about
813 813 whether files are modified or clean.'''
814 814 added, unknown, deleted, removed, forgotten = [], [], [], [], []
815 815 audit_path = pathutil.pathauditor(repo.root, cached=True)
816 816
817 817 ctx = repo[None]
818 818 dirstate = repo.dirstate
819 819 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
820 820 unknown=True, ignored=False, full=False)
821 821 for abs, st in walkresults.iteritems():
822 822 dstate = dirstate[abs]
823 823 if dstate == '?' and audit_path.check(abs):
824 824 unknown.append(abs)
825 825 elif dstate != 'r' and not st:
826 826 deleted.append(abs)
827 827 elif dstate == 'r' and st:
828 828 forgotten.append(abs)
829 829 # for finding renames
830 830 elif dstate == 'r' and not st:
831 831 removed.append(abs)
832 832 elif dstate == 'a':
833 833 added.append(abs)
834 834
835 835 return added, unknown, deleted, removed, forgotten
836 836
837 837 def _findrenames(repo, matcher, added, removed, similarity):
838 838 '''Find renames from removed files to added ones.'''
839 839 renames = {}
840 840 if similarity > 0:
841 841 for old, new, score in similar.findrenames(repo, added, removed,
842 842 similarity):
843 843 if (repo.ui.verbose or not matcher.exact(old)
844 844 or not matcher.exact(new)):
845 845 repo.ui.status(_('recording removal of %s as rename to %s '
846 846 '(%d%% similar)\n') %
847 847 (matcher.rel(old), matcher.rel(new),
848 848 score * 100))
849 849 renames[new] = old
850 850 return renames
851 851
852 852 def _markchanges(repo, unknown, deleted, renames):
853 853 '''Marks the files in unknown as added, the files in deleted as removed,
854 854 and the files in renames as copied.'''
855 855 wctx = repo[None]
856 856 with repo.wlock():
857 857 wctx.forget(deleted)
858 858 wctx.add(unknown)
859 859 for new, old in renames.iteritems():
860 860 wctx.copy(old, new)
861 861
862 862 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
863 863 """Update the dirstate to reflect the intent of copying src to dst. For
864 864 different reasons it might not end with dst being marked as copied from src.
865 865 """
866 866 origsrc = repo.dirstate.copied(src) or src
867 867 if dst == origsrc: # copying back a copy?
868 868 if repo.dirstate[dst] not in 'mn' and not dryrun:
869 869 repo.dirstate.normallookup(dst)
870 870 else:
871 871 if repo.dirstate[origsrc] == 'a' and origsrc == src:
872 872 if not ui.quiet:
873 873 ui.warn(_("%s has not been committed yet, so no copy "
874 874 "data will be stored for %s.\n")
875 875 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
876 876 if repo.dirstate[dst] in '?r' and not dryrun:
877 877 wctx.add([dst])
878 878 elif not dryrun:
879 879 wctx.copy(origsrc, dst)
880 880
881 881 def readrequires(opener, supported):
882 882 '''Reads and parses .hg/requires and checks if all entries found
883 883 are in the list of supported features.'''
884 884 requirements = set(opener.read("requires").splitlines())
885 885 missings = []
886 886 for r in requirements:
887 887 if r not in supported:
888 888 if not r or not r[0].isalnum():
889 889 raise error.RequirementError(_(".hg/requires file is corrupt"))
890 890 missings.append(r)
891 891 missings.sort()
892 892 if missings:
893 893 raise error.RequirementError(
894 894 _("repository requires features unknown to this Mercurial: %s")
895 895 % " ".join(missings),
896 896 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
897 897 " for more information"))
898 898 return requirements
899 899
900 900 def writerequires(opener, requirements):
901 901 with opener('requires', 'w') as fp:
902 902 for r in sorted(requirements):
903 903 fp.write("%s\n" % r)
904 904
905 905 class filecachesubentry(object):
906 906 def __init__(self, path, stat):
907 907 self.path = path
908 908 self.cachestat = None
909 909 self._cacheable = None
910 910
911 911 if stat:
912 912 self.cachestat = filecachesubentry.stat(self.path)
913 913
914 914 if self.cachestat:
915 915 self._cacheable = self.cachestat.cacheable()
916 916 else:
917 917 # None means we don't know yet
918 918 self._cacheable = None
919 919
920 920 def refresh(self):
921 921 if self.cacheable():
922 922 self.cachestat = filecachesubentry.stat(self.path)
923 923
924 924 def cacheable(self):
925 925 if self._cacheable is not None:
926 926 return self._cacheable
927 927
928 928 # we don't know yet, assume it is for now
929 929 return True
930 930
931 931 def changed(self):
932 932 # no point in going further if we can't cache it
933 933 if not self.cacheable():
934 934 return True
935 935
936 936 newstat = filecachesubentry.stat(self.path)
937 937
938 938 # we may not know if it's cacheable yet, check again now
939 939 if newstat and self._cacheable is None:
940 940 self._cacheable = newstat.cacheable()
941 941
942 942 # check again
943 943 if not self._cacheable:
944 944 return True
945 945
946 946 if self.cachestat != newstat:
947 947 self.cachestat = newstat
948 948 return True
949 949 else:
950 950 return False
951 951
952 952 @staticmethod
953 953 def stat(path):
954 954 try:
955 955 return util.cachestat(path)
956 956 except OSError as e:
957 957 if e.errno != errno.ENOENT:
958 958 raise
959 959
960 960 class filecacheentry(object):
961 961 def __init__(self, paths, stat=True):
962 962 self._entries = []
963 963 for path in paths:
964 964 self._entries.append(filecachesubentry(path, stat))
965 965
966 966 def changed(self):
967 967 '''true if any entry has changed'''
968 968 for entry in self._entries:
969 969 if entry.changed():
970 970 return True
971 971 return False
972 972
973 973 def refresh(self):
974 974 for entry in self._entries:
975 975 entry.refresh()
976 976
977 977 class filecache(object):
978 978 '''A property like decorator that tracks files under .hg/ for updates.
979 979
980 980 Records stat info when called in _filecache.
981 981
982 982 On subsequent calls, compares old stat info with new info, and recreates the
983 983 object when any of the files changes, updating the new stat info in
984 984 _filecache.
985 985
986 986 Mercurial either atomic renames or appends for files under .hg,
987 987 so to ensure the cache is reliable we need the filesystem to be able
988 988 to tell us if a file has been replaced. If it can't, we fallback to
989 989 recreating the object on every call (essentially the same behavior as
990 990 propertycache).
991 991
992 992 '''
993 993 def __init__(self, *paths):
994 994 self.paths = paths
995 995
996 996 def join(self, obj, fname):
997 997 """Used to compute the runtime path of a cached file.
998 998
999 999 Users should subclass filecache and provide their own version of this
1000 1000 function to call the appropriate join function on 'obj' (an instance
1001 1001 of the class that its member function was decorated).
1002 1002 """
1003 1003 raise NotImplementedError
1004 1004
1005 1005 def __call__(self, func):
1006 1006 self.func = func
1007 1007 self.name = func.__name__.encode('ascii')
1008 1008 return self
1009 1009
1010 1010 def __get__(self, obj, type=None):
1011 1011 # if accessed on the class, return the descriptor itself.
1012 1012 if obj is None:
1013 1013 return self
1014 1014 # do we need to check if the file changed?
1015 1015 if self.name in obj.__dict__:
1016 1016 assert self.name in obj._filecache, self.name
1017 1017 return obj.__dict__[self.name]
1018 1018
1019 1019 entry = obj._filecache.get(self.name)
1020 1020
1021 1021 if entry:
1022 1022 if entry.changed():
1023 1023 entry.obj = self.func(obj)
1024 1024 else:
1025 1025 paths = [self.join(obj, path) for path in self.paths]
1026 1026
1027 1027 # We stat -before- creating the object so our cache doesn't lie if
1028 1028 # a writer modified between the time we read and stat
1029 1029 entry = filecacheentry(paths, True)
1030 1030 entry.obj = self.func(obj)
1031 1031
1032 1032 obj._filecache[self.name] = entry
1033 1033
1034 1034 obj.__dict__[self.name] = entry.obj
1035 1035 return entry.obj
1036 1036
1037 1037 def __set__(self, obj, value):
1038 1038 if self.name not in obj._filecache:
1039 1039 # we add an entry for the missing value because X in __dict__
1040 1040 # implies X in _filecache
1041 1041 paths = [self.join(obj, path) for path in self.paths]
1042 1042 ce = filecacheentry(paths, False)
1043 1043 obj._filecache[self.name] = ce
1044 1044 else:
1045 1045 ce = obj._filecache[self.name]
1046 1046
1047 1047 ce.obj = value # update cached copy
1048 1048 obj.__dict__[self.name] = value # update copy returned by obj.x
1049 1049
1050 1050 def __delete__(self, obj):
1051 1051 try:
1052 1052 del obj.__dict__[self.name]
1053 1053 except KeyError:
1054 1054 raise AttributeError(self.name)
1055 1055
1056 1056 def extdatasource(repo, source):
1057 1057 """Gather a map of rev -> value dict from the specified source
1058 1058
1059 1059 A source spec is treated as a URL, with a special case shell: type
1060 1060 for parsing the output from a shell command.
1061 1061
1062 1062 The data is parsed as a series of newline-separated records where
1063 1063 each record is a revision specifier optionally followed by a space
1064 1064 and a freeform string value. If the revision is known locally, it
1065 1065 is converted to a rev, otherwise the record is skipped.
1066 1066
1067 1067 Note that both key and value are treated as UTF-8 and converted to
1068 1068 the local encoding. This allows uniformity between local and
1069 1069 remote data sources.
1070 1070 """
1071 1071
1072 1072 spec = repo.ui.config("extdata", source)
1073 1073 if not spec:
1074 1074 raise error.Abort(_("unknown extdata source '%s'") % source)
1075 1075
1076 1076 data = {}
1077 1077 src = proc = None
1078 1078 try:
1079 1079 if spec.startswith("shell:"):
1080 1080 # external commands should be run relative to the repo root
1081 1081 cmd = spec[6:]
1082 1082 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1083 1083 close_fds=util.closefds,
1084 1084 stdout=subprocess.PIPE, cwd=repo.root)
1085 1085 src = proc.stdout
1086 1086 else:
1087 1087 # treat as a URL or file
1088 1088 src = url.open(repo.ui, spec)
1089 1089 for l in src:
1090 1090 if " " in l:
1091 1091 k, v = l.strip().split(" ", 1)
1092 1092 else:
1093 1093 k, v = l.strip(), ""
1094 1094
1095 1095 k = encoding.tolocal(k)
1096 1096 try:
1097 1097 data[repo[k].rev()] = encoding.tolocal(v)
1098 1098 except (error.LookupError, error.RepoLookupError):
1099 1099 pass # we ignore data for nodes that don't exist locally
1100 1100 finally:
1101 1101 if proc:
1102 1102 proc.communicate()
1103 1103 if src:
1104 1104 src.close()
1105 1105 if proc and proc.returncode != 0:
1106 1106 raise error.Abort(_("extdata command '%s' failed: %s")
1107 1107 % (cmd, util.explainexit(proc.returncode)[0]))
1108 1108
1109 1109 return data
1110 1110
1111 1111 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1112 1112 if lock is None:
1113 1113 raise error.LockInheritanceContractViolation(
1114 1114 'lock can only be inherited while held')
1115 1115 if environ is None:
1116 1116 environ = {}
1117 1117 with lock.inherit() as locker:
1118 1118 environ[envvar] = locker
1119 1119 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1120 1120
1121 1121 def wlocksub(repo, cmd, *args, **kwargs):
1122 1122 """run cmd as a subprocess that allows inheriting repo's wlock
1123 1123
1124 1124 This can only be called while the wlock is held. This takes all the
1125 1125 arguments that ui.system does, and returns the exit code of the
1126 1126 subprocess."""
1127 1127 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1128 1128 **kwargs)
1129 1129
1130 1130 def gdinitconfig(ui):
1131 1131 """helper function to know if a repo should be created as general delta
1132 1132 """
1133 1133 # experimental config: format.generaldelta
1134 1134 return (ui.configbool('format', 'generaldelta')
1135 1135 or ui.configbool('format', 'usegeneraldelta'))
1136 1136
1137 1137 def gddeltaconfig(ui):
1138 1138 """helper function to know if incoming delta should be optimised
1139 1139 """
1140 1140 # experimental config: format.generaldelta
1141 1141 return ui.configbool('format', 'generaldelta')
1142 1142
1143 1143 class simplekeyvaluefile(object):
1144 1144 """A simple file with key=value lines
1145 1145
1146 1146 Keys must be alphanumerics and start with a letter, values must not
1147 1147 contain '\n' characters"""
1148 1148 firstlinekey = '__firstline'
1149 1149
1150 1150 def __init__(self, vfs, path, keys=None):
1151 1151 self.vfs = vfs
1152 1152 self.path = path
1153 1153
1154 1154 def read(self, firstlinenonkeyval=False):
1155 1155 """Read the contents of a simple key-value file
1156 1156
1157 1157 'firstlinenonkeyval' indicates whether the first line of file should
1158 1158 be treated as a key-value pair or reuturned fully under the
1159 1159 __firstline key."""
1160 1160 lines = self.vfs.readlines(self.path)
1161 1161 d = {}
1162 1162 if firstlinenonkeyval:
1163 1163 if not lines:
1164 1164 e = _("empty simplekeyvalue file")
1165 1165 raise error.CorruptedState(e)
1166 1166 # we don't want to include '\n' in the __firstline
1167 1167 d[self.firstlinekey] = lines[0][:-1]
1168 1168 del lines[0]
1169 1169
1170 1170 try:
1171 1171 # the 'if line.strip()' part prevents us from failing on empty
1172 1172 # lines which only contain '\n' therefore are not skipped
1173 1173 # by 'if line'
1174 1174 updatedict = dict(line[:-1].split('=', 1) for line in lines
1175 1175 if line.strip())
1176 1176 if self.firstlinekey in updatedict:
1177 1177 e = _("%r can't be used as a key")
1178 1178 raise error.CorruptedState(e % self.firstlinekey)
1179 1179 d.update(updatedict)
1180 1180 except ValueError as e:
1181 1181 raise error.CorruptedState(str(e))
1182 1182 return d
1183 1183
1184 1184 def write(self, data, firstline=None):
1185 1185 """Write key=>value mapping to a file
1186 1186 data is a dict. Keys must be alphanumerical and start with a letter.
1187 1187 Values must not contain newline characters.
1188 1188
1189 1189 If 'firstline' is not None, it is written to file before
1190 1190 everything else, as it is, not in a key=value form"""
1191 1191 lines = []
1192 1192 if firstline is not None:
1193 1193 lines.append('%s\n' % firstline)
1194 1194
1195 1195 for k, v in data.items():
1196 1196 if k == self.firstlinekey:
1197 1197 e = "key name '%s' is reserved" % self.firstlinekey
1198 1198 raise error.ProgrammingError(e)
1199 1199 if not k[0].isalpha():
1200 1200 e = "keys must start with a letter in a key-value file"
1201 1201 raise error.ProgrammingError(e)
1202 1202 if not k.isalnum():
1203 1203 e = "invalid key name in a simple key-value file"
1204 1204 raise error.ProgrammingError(e)
1205 1205 if '\n' in v:
1206 1206 e = "invalid value in a simple key-value file"
1207 1207 raise error.ProgrammingError(e)
1208 1208 lines.append("%s=%s\n" % (k, v))
1209 1209 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1210 1210 fp.write(''.join(lines))
1211 1211
1212 1212 _reportobsoletedsource = [
1213 1213 'debugobsolete',
1214 1214 'pull',
1215 1215 'push',
1216 1216 'serve',
1217 1217 'unbundle',
1218 1218 ]
1219 1219
1220 1220 _reportnewcssource = [
1221 1221 'pull',
1222 1222 'unbundle',
1223 1223 ]
1224 1224
1225 1225 # A marker that tells the evolve extension to suppress its own reporting
1226 1226 _reportstroubledchangesets = True
1227 1227
1228 1228 def registersummarycallback(repo, otr, txnname=''):
1229 1229 """register a callback to issue a summary after the transaction is closed
1230 1230 """
1231 1231 def txmatch(sources):
1232 1232 return any(txnname.startswith(source) for source in sources)
1233 1233
1234 1234 categories = []
1235 1235
1236 1236 def reportsummary(func):
1237 1237 """decorator for report callbacks."""
1238 1238 # The repoview life cycle is shorter than the one of the actual
1239 1239 # underlying repository. So the filtered object can die before the
1240 1240 # weakref is used leading to troubles. We keep a reference to the
1241 1241 # unfiltered object and restore the filtering when retrieving the
1242 1242 # repository through the weakref.
1243 1243 filtername = repo.filtername
1244 1244 reporef = weakref.ref(repo.unfiltered())
1245 1245 def wrapped(tr):
1246 1246 repo = reporef()
1247 1247 if filtername:
1248 1248 repo = repo.filtered(filtername)
1249 1249 func(repo, tr)
1250 newcat = '%2i-txnreport' % len(categories)
1250 newcat = '%02i-txnreport' % len(categories)
1251 1251 otr.addpostclose(newcat, wrapped)
1252 1252 categories.append(newcat)
1253 1253 return wrapped
1254 1254
1255 1255 if txmatch(_reportobsoletedsource):
1256 1256 @reportsummary
1257 1257 def reportobsoleted(repo, tr):
1258 1258 obsoleted = obsutil.getobsoleted(repo, tr)
1259 1259 if obsoleted:
1260 1260 repo.ui.status(_('obsoleted %i changesets\n')
1261 1261 % len(obsoleted))
1262 1262
1263 1263 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1264 1264 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1265 1265 instabilitytypes = [
1266 1266 ('orphan', 'orphan'),
1267 1267 ('phase-divergent', 'phasedivergent'),
1268 1268 ('content-divergent', 'contentdivergent'),
1269 1269 ]
1270 1270
1271 1271 def getinstabilitycounts(repo):
1272 1272 filtered = repo.changelog.filteredrevs
1273 1273 counts = {}
1274 1274 for instability, revset in instabilitytypes:
1275 1275 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1276 1276 filtered)
1277 1277 return counts
1278 1278
1279 1279 oldinstabilitycounts = getinstabilitycounts(repo)
1280 1280 @reportsummary
1281 1281 def reportnewinstabilities(repo, tr):
1282 1282 newinstabilitycounts = getinstabilitycounts(repo)
1283 1283 for instability, revset in instabilitytypes:
1284 1284 delta = (newinstabilitycounts[instability] -
1285 1285 oldinstabilitycounts[instability])
1286 1286 if delta > 0:
1287 1287 repo.ui.warn(_('%i new %s changesets\n') %
1288 1288 (delta, instability))
1289 1289
1290 1290 if txmatch(_reportnewcssource):
1291 1291 @reportsummary
1292 1292 def reportnewcs(repo, tr):
1293 1293 """Report the range of new revisions pulled/unbundled."""
1294 1294 newrevs = tr.changes.get('revs', xrange(0, 0))
1295 1295 if not newrevs:
1296 1296 return
1297 1297
1298 1298 # Compute the bounds of new revisions' range, excluding obsoletes.
1299 1299 unfi = repo.unfiltered()
1300 1300 revs = unfi.revs('%ld and not obsolete()', newrevs)
1301 1301 if not revs:
1302 1302 # Got only obsoletes.
1303 1303 return
1304 1304 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1305 1305
1306 1306 if minrev == maxrev:
1307 1307 revrange = minrev
1308 1308 else:
1309 1309 revrange = '%s:%s' % (minrev, maxrev)
1310 1310 repo.ui.status(_('new changesets %s\n') % revrange)
1311 1311
1312 1312 def nodesummaries(repo, nodes, maxnumnodes=4):
1313 1313 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1314 1314 return ' '.join(short(h) for h in nodes)
1315 1315 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1316 1316 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1317 1317
1318 1318 def enforcesinglehead(repo, tr, desc):
1319 1319 """check that no named branch has multiple heads"""
1320 1320 if desc in ('strip', 'repair'):
1321 1321 # skip the logic during strip
1322 1322 return
1323 1323 visible = repo.filtered('visible')
1324 1324 # possible improvement: we could restrict the check to affected branch
1325 1325 for name, heads in visible.branchmap().iteritems():
1326 1326 if len(heads) > 1:
1327 1327 msg = _('rejecting multiple heads on branch "%s"')
1328 1328 msg %= name
1329 1329 hint = _('%d heads: %s')
1330 1330 hint %= (len(heads), nodesummaries(repo, heads))
1331 1331 raise error.Abort(msg, hint=hint)
1332 1332
1333 1333 def wrapconvertsink(sink):
1334 1334 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1335 1335 before it is used, whether or not the convert extension was formally loaded.
1336 1336 """
1337 1337 return sink
1338 1338
1339 1339 def unhidehashlikerevs(repo, specs, hiddentype):
1340 1340 """parse the user specs and unhide changesets whose hash or revision number
1341 1341 is passed.
1342 1342
1343 1343 hiddentype can be: 1) 'warn': warn while unhiding changesets
1344 1344 2) 'nowarn': don't warn while unhiding changesets
1345 1345
1346 1346 returns a repo object with the required changesets unhidden
1347 1347 """
1348 1348 if not repo.filtername or not repo.ui.configbool('experimental',
1349 1349 'directaccess'):
1350 1350 return repo
1351 1351
1352 1352 if repo.filtername not in ('visible', 'visible-hidden'):
1353 1353 return repo
1354 1354
1355 1355 symbols = set()
1356 1356 for spec in specs:
1357 1357 try:
1358 1358 tree = revsetlang.parse(spec)
1359 1359 except error.ParseError: # will be reported by scmutil.revrange()
1360 1360 continue
1361 1361
1362 1362 symbols.update(revsetlang.gethashlikesymbols(tree))
1363 1363
1364 1364 if not symbols:
1365 1365 return repo
1366 1366
1367 1367 revs = _getrevsfromsymbols(repo, symbols)
1368 1368
1369 1369 if not revs:
1370 1370 return repo
1371 1371
1372 1372 if hiddentype == 'warn':
1373 1373 unfi = repo.unfiltered()
1374 1374 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1375 1375 repo.ui.warn(_("warning: accessing hidden changesets for write "
1376 1376 "operation: %s\n") % revstr)
1377 1377
1378 1378 # we have to use new filtername to separate branch/tags cache until we can
1379 1379 # disbale these cache when revisions are dynamically pinned.
1380 1380 return repo.filtered('visible-hidden', revs)
1381 1381
1382 1382 def _getrevsfromsymbols(repo, symbols):
1383 1383 """parse the list of symbols and returns a set of revision numbers of hidden
1384 1384 changesets present in symbols"""
1385 1385 revs = set()
1386 1386 unfi = repo.unfiltered()
1387 1387 unficl = unfi.changelog
1388 1388 cl = repo.changelog
1389 1389 tiprev = len(unficl)
1390 1390 pmatch = unficl._partialmatch
1391 1391 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1392 1392 for s in symbols:
1393 1393 try:
1394 1394 n = int(s)
1395 1395 if n <= tiprev:
1396 1396 if not allowrevnums:
1397 1397 continue
1398 1398 else:
1399 1399 if n not in cl:
1400 1400 revs.add(n)
1401 1401 continue
1402 1402 except ValueError:
1403 1403 pass
1404 1404
1405 1405 try:
1406 1406 s = pmatch(s)
1407 1407 except error.LookupError:
1408 1408 s = None
1409 1409
1410 1410 if s is not None:
1411 1411 rev = unficl.rev(s)
1412 1412 if rev not in cl:
1413 1413 revs.add(rev)
1414 1414
1415 1415 return revs
General Comments 0
You need to be logged in to leave comments. Login now