##// END OF EJS Templates
scmutil: deprecate revpairnodes()...
Martin von Zweigbergk -
r37276:d29f6fbd default
parent child Browse files
Show More
@@ -1,1435 +1,1436 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 hex,
22 22 nullid,
23 23 short,
24 24 wdirid,
25 25 wdirrev,
26 26 )
27 27
28 28 from . import (
29 29 encoding,
30 30 error,
31 31 match as matchmod,
32 32 obsolete,
33 33 obsutil,
34 34 pathutil,
35 35 phases,
36 36 pycompat,
37 37 revsetlang,
38 38 similar,
39 39 url,
40 40 util,
41 41 vfs,
42 42 )
43 43
44 44 from .utils import (
45 45 procutil,
46 46 stringutil,
47 47 )
48 48
49 49 if pycompat.iswindows:
50 50 from . import scmwindows as scmplatform
51 51 else:
52 52 from . import scmposix as scmplatform
53 53
54 54 termsize = scmplatform.termsize
55 55
56 56 class status(tuple):
57 57 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
58 58 and 'ignored' properties are only relevant to the working copy.
59 59 '''
60 60
61 61 __slots__ = ()
62 62
63 63 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
64 64 clean):
65 65 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
66 66 ignored, clean))
67 67
68 68 @property
69 69 def modified(self):
70 70 '''files that have been modified'''
71 71 return self[0]
72 72
73 73 @property
74 74 def added(self):
75 75 '''files that have been added'''
76 76 return self[1]
77 77
78 78 @property
79 79 def removed(self):
80 80 '''files that have been removed'''
81 81 return self[2]
82 82
83 83 @property
84 84 def deleted(self):
85 85 '''files that are in the dirstate, but have been deleted from the
86 86 working copy (aka "missing")
87 87 '''
88 88 return self[3]
89 89
90 90 @property
91 91 def unknown(self):
92 92 '''files not in the dirstate that are not ignored'''
93 93 return self[4]
94 94
95 95 @property
96 96 def ignored(self):
97 97 '''files not in the dirstate that are ignored (by _dirignore())'''
98 98 return self[5]
99 99
100 100 @property
101 101 def clean(self):
102 102 '''files that have not been modified'''
103 103 return self[6]
104 104
105 105 def __repr__(self, *args, **kwargs):
106 106 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
107 107 'unknown=%r, ignored=%r, clean=%r>') % self)
108 108
109 109 def itersubrepos(ctx1, ctx2):
110 110 """find subrepos in ctx1 or ctx2"""
111 111 # Create a (subpath, ctx) mapping where we prefer subpaths from
112 112 # ctx1. The subpaths from ctx2 are important when the .hgsub file
113 113 # has been modified (in ctx2) but not yet committed (in ctx1).
114 114 subpaths = dict.fromkeys(ctx2.substate, ctx2)
115 115 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
116 116
117 117 missing = set()
118 118
119 119 for subpath in ctx2.substate:
120 120 if subpath not in ctx1.substate:
121 121 del subpaths[subpath]
122 122 missing.add(subpath)
123 123
124 124 for subpath, ctx in sorted(subpaths.iteritems()):
125 125 yield subpath, ctx.sub(subpath)
126 126
127 127 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
128 128 # status and diff will have an accurate result when it does
129 129 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
130 130 # against itself.
131 131 for subpath in missing:
132 132 yield subpath, ctx2.nullsub(subpath, ctx1)
133 133
134 134 def nochangesfound(ui, repo, excluded=None):
135 135 '''Report no changes for push/pull, excluded is None or a list of
136 136 nodes excluded from the push/pull.
137 137 '''
138 138 secretlist = []
139 139 if excluded:
140 140 for n in excluded:
141 141 ctx = repo[n]
142 142 if ctx.phase() >= phases.secret and not ctx.extinct():
143 143 secretlist.append(n)
144 144
145 145 if secretlist:
146 146 ui.status(_("no changes found (ignored %d secret changesets)\n")
147 147 % len(secretlist))
148 148 else:
149 149 ui.status(_("no changes found\n"))
150 150
151 151 def callcatch(ui, func):
152 152 """call func() with global exception handling
153 153
154 154 return func() if no exception happens. otherwise do some error handling
155 155 and return an exit code accordingly. does not handle all exceptions.
156 156 """
157 157 try:
158 158 try:
159 159 return func()
160 160 except: # re-raises
161 161 ui.traceback()
162 162 raise
163 163 # Global exception handling, alphabetically
164 164 # Mercurial-specific first, followed by built-in and library exceptions
165 165 except error.LockHeld as inst:
166 166 if inst.errno == errno.ETIMEDOUT:
167 167 reason = _('timed out waiting for lock held by %r') % inst.locker
168 168 else:
169 169 reason = _('lock held by %r') % inst.locker
170 170 ui.warn(_("abort: %s: %s\n")
171 171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
172 172 if not inst.locker:
173 173 ui.warn(_("(lock might be very busy)\n"))
174 174 except error.LockUnavailable as inst:
175 175 ui.warn(_("abort: could not lock %s: %s\n") %
176 176 (inst.desc or stringutil.forcebytestr(inst.filename),
177 177 encoding.strtolocal(inst.strerror)))
178 178 except error.OutOfBandError as inst:
179 179 if inst.args:
180 180 msg = _("abort: remote error:\n")
181 181 else:
182 182 msg = _("abort: remote error\n")
183 183 ui.warn(msg)
184 184 if inst.args:
185 185 ui.warn(''.join(inst.args))
186 186 if inst.hint:
187 187 ui.warn('(%s)\n' % inst.hint)
188 188 except error.RepoError as inst:
189 189 ui.warn(_("abort: %s!\n") % inst)
190 190 if inst.hint:
191 191 ui.warn(_("(%s)\n") % inst.hint)
192 192 except error.ResponseError as inst:
193 193 ui.warn(_("abort: %s") % inst.args[0])
194 194 msg = inst.args[1]
195 195 if isinstance(msg, type(u'')):
196 196 msg = pycompat.sysbytes(msg)
197 197 if not isinstance(msg, bytes):
198 198 ui.warn(" %r\n" % (msg,))
199 199 elif not msg:
200 200 ui.warn(_(" empty string\n"))
201 201 else:
202 202 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
203 203 except error.CensoredNodeError as inst:
204 204 ui.warn(_("abort: file censored %s!\n") % inst)
205 205 except error.RevlogError as inst:
206 206 ui.warn(_("abort: %s!\n") % inst)
207 207 except error.InterventionRequired as inst:
208 208 ui.warn("%s\n" % inst)
209 209 if inst.hint:
210 210 ui.warn(_("(%s)\n") % inst.hint)
211 211 return 1
212 212 except error.WdirUnsupported:
213 213 ui.warn(_("abort: working directory revision cannot be specified\n"))
214 214 except error.Abort as inst:
215 215 ui.warn(_("abort: %s\n") % inst)
216 216 if inst.hint:
217 217 ui.warn(_("(%s)\n") % inst.hint)
218 218 except ImportError as inst:
219 219 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
220 220 m = stringutil.forcebytestr(inst).split()[-1]
221 221 if m in "mpatch bdiff".split():
222 222 ui.warn(_("(did you forget to compile extensions?)\n"))
223 223 elif m in "zlib".split():
224 224 ui.warn(_("(is your Python install correct?)\n"))
225 225 except IOError as inst:
226 226 if util.safehasattr(inst, "code"):
227 227 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
228 228 elif util.safehasattr(inst, "reason"):
229 229 try: # usually it is in the form (errno, strerror)
230 230 reason = inst.reason.args[1]
231 231 except (AttributeError, IndexError):
232 232 # it might be anything, for example a string
233 233 reason = inst.reason
234 234 if isinstance(reason, unicode):
235 235 # SSLError of Python 2.7.9 contains a unicode
236 236 reason = encoding.unitolocal(reason)
237 237 ui.warn(_("abort: error: %s\n") % reason)
238 238 elif (util.safehasattr(inst, "args")
239 239 and inst.args and inst.args[0] == errno.EPIPE):
240 240 pass
241 241 elif getattr(inst, "strerror", None):
242 242 if getattr(inst, "filename", None):
243 243 ui.warn(_("abort: %s: %s\n") % (
244 244 encoding.strtolocal(inst.strerror),
245 245 stringutil.forcebytestr(inst.filename)))
246 246 else:
247 247 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
248 248 else:
249 249 raise
250 250 except OSError as inst:
251 251 if getattr(inst, "filename", None) is not None:
252 252 ui.warn(_("abort: %s: '%s'\n") % (
253 253 encoding.strtolocal(inst.strerror),
254 254 stringutil.forcebytestr(inst.filename)))
255 255 else:
256 256 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 257 except MemoryError:
258 258 ui.warn(_("abort: out of memory\n"))
259 259 except SystemExit as inst:
260 260 # Commands shouldn't sys.exit directly, but give a return code.
261 261 # Just in case catch this and and pass exit code to caller.
262 262 return inst.code
263 263 except socket.error as inst:
264 264 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
265 265
266 266 return -1
267 267
268 268 def checknewlabel(repo, lbl, kind):
269 269 # Do not use the "kind" parameter in ui output.
270 270 # It makes strings difficult to translate.
271 271 if lbl in ['tip', '.', 'null']:
272 272 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 273 for c in (':', '\0', '\n', '\r'):
274 274 if c in lbl:
275 275 raise error.Abort(
276 276 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 277 try:
278 278 int(lbl)
279 279 raise error.Abort(_("cannot use an integer as a name"))
280 280 except ValueError:
281 281 pass
282 282 if lbl.strip() != lbl:
283 283 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284 284
285 285 def checkfilename(f):
286 286 '''Check that the filename f is an acceptable filename for a tracked file'''
287 287 if '\r' in f or '\n' in f:
288 288 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
289 289
290 290 def checkportable(ui, f):
291 291 '''Check if filename f is portable and warn or abort depending on config'''
292 292 checkfilename(f)
293 293 abort, warn = checkportabilityalert(ui)
294 294 if abort or warn:
295 295 msg = util.checkwinfilename(f)
296 296 if msg:
297 297 msg = "%s: %s" % (msg, procutil.shellquote(f))
298 298 if abort:
299 299 raise error.Abort(msg)
300 300 ui.warn(_("warning: %s\n") % msg)
301 301
302 302 def checkportabilityalert(ui):
303 303 '''check if the user's config requests nothing, a warning, or abort for
304 304 non-portable filenames'''
305 305 val = ui.config('ui', 'portablefilenames')
306 306 lval = val.lower()
307 307 bval = stringutil.parsebool(val)
308 308 abort = pycompat.iswindows or lval == 'abort'
309 309 warn = bval or lval == 'warn'
310 310 if bval is None and not (warn or abort or lval == 'ignore'):
311 311 raise error.ConfigError(
312 312 _("ui.portablefilenames value is invalid ('%s')") % val)
313 313 return abort, warn
314 314
315 315 class casecollisionauditor(object):
316 316 def __init__(self, ui, abort, dirstate):
317 317 self._ui = ui
318 318 self._abort = abort
319 319 allfiles = '\0'.join(dirstate._map)
320 320 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
321 321 self._dirstate = dirstate
322 322 # The purpose of _newfiles is so that we don't complain about
323 323 # case collisions if someone were to call this object with the
324 324 # same filename twice.
325 325 self._newfiles = set()
326 326
327 327 def __call__(self, f):
328 328 if f in self._newfiles:
329 329 return
330 330 fl = encoding.lower(f)
331 331 if fl in self._loweredfiles and f not in self._dirstate:
332 332 msg = _('possible case-folding collision for %s') % f
333 333 if self._abort:
334 334 raise error.Abort(msg)
335 335 self._ui.warn(_("warning: %s\n") % msg)
336 336 self._loweredfiles.add(fl)
337 337 self._newfiles.add(f)
338 338
339 339 def filteredhash(repo, maxrev):
340 340 """build hash of filtered revisions in the current repoview.
341 341
342 342 Multiple caches perform up-to-date validation by checking that the
343 343 tiprev and tipnode stored in the cache file match the current repository.
344 344 However, this is not sufficient for validating repoviews because the set
345 345 of revisions in the view may change without the repository tiprev and
346 346 tipnode changing.
347 347
348 348 This function hashes all the revs filtered from the view and returns
349 349 that SHA-1 digest.
350 350 """
351 351 cl = repo.changelog
352 352 if not cl.filteredrevs:
353 353 return None
354 354 key = None
355 355 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
356 356 if revs:
357 357 s = hashlib.sha1()
358 358 for rev in revs:
359 359 s.update('%d;' % rev)
360 360 key = s.digest()
361 361 return key
362 362
363 363 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
364 364 '''yield every hg repository under path, always recursively.
365 365 The recurse flag will only control recursion into repo working dirs'''
366 366 def errhandler(err):
367 367 if err.filename == path:
368 368 raise err
369 369 samestat = getattr(os.path, 'samestat', None)
370 370 if followsym and samestat is not None:
371 371 def adddir(dirlst, dirname):
372 372 dirstat = os.stat(dirname)
373 373 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
374 374 if not match:
375 375 dirlst.append(dirstat)
376 376 return not match
377 377 else:
378 378 followsym = False
379 379
380 380 if (seen_dirs is None) and followsym:
381 381 seen_dirs = []
382 382 adddir(seen_dirs, path)
383 383 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
384 384 dirs.sort()
385 385 if '.hg' in dirs:
386 386 yield root # found a repository
387 387 qroot = os.path.join(root, '.hg', 'patches')
388 388 if os.path.isdir(os.path.join(qroot, '.hg')):
389 389 yield qroot # we have a patch queue repo here
390 390 if recurse:
391 391 # avoid recursing inside the .hg directory
392 392 dirs.remove('.hg')
393 393 else:
394 394 dirs[:] = [] # don't descend further
395 395 elif followsym:
396 396 newdirs = []
397 397 for d in dirs:
398 398 fname = os.path.join(root, d)
399 399 if adddir(seen_dirs, fname):
400 400 if os.path.islink(fname):
401 401 for hgname in walkrepos(fname, True, seen_dirs):
402 402 yield hgname
403 403 else:
404 404 newdirs.append(d)
405 405 dirs[:] = newdirs
406 406
407 407 def binnode(ctx):
408 408 """Return binary node id for a given basectx"""
409 409 node = ctx.node()
410 410 if node is None:
411 411 return wdirid
412 412 return node
413 413
414 414 def intrev(ctx):
415 415 """Return integer for a given basectx that can be used in comparison or
416 416 arithmetic operation"""
417 417 rev = ctx.rev()
418 418 if rev is None:
419 419 return wdirrev
420 420 return rev
421 421
422 422 def formatchangeid(ctx):
423 423 """Format changectx as '{rev}:{node|formatnode}', which is the default
424 424 template provided by logcmdutil.changesettemplater"""
425 425 repo = ctx.repo()
426 426 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
427 427
428 428 def formatrevnode(ui, rev, node):
429 429 """Format given revision and node depending on the current verbosity"""
430 430 if ui.debugflag:
431 431 hexfunc = hex
432 432 else:
433 433 hexfunc = short
434 434 return '%d:%s' % (rev, hexfunc(node))
435 435
436 436 def revsingle(repo, revspec, default='.', localalias=None):
437 437 if not revspec and revspec != 0:
438 438 return repo[default]
439 439
440 440 l = revrange(repo, [revspec], localalias=localalias)
441 441 if not l:
442 442 raise error.Abort(_('empty revision set'))
443 443 return repo[l.last()]
444 444
445 445 def _pairspec(revspec):
446 446 tree = revsetlang.parse(revspec)
447 447 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
448 448
449 449 def revpairnodes(repo, revs):
450 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
450 451 ctx1, ctx2 = revpair(repo, revs)
451 452 return ctx1.node(), ctx2.node()
452 453
453 454 def revpair(repo, revs):
454 455 if not revs:
455 456 return repo['.'], repo[None]
456 457
457 458 l = revrange(repo, revs)
458 459
459 460 if not l:
460 461 first = second = None
461 462 elif l.isascending():
462 463 first = l.min()
463 464 second = l.max()
464 465 elif l.isdescending():
465 466 first = l.max()
466 467 second = l.min()
467 468 else:
468 469 first = l.first()
469 470 second = l.last()
470 471
471 472 if first is None:
472 473 raise error.Abort(_('empty revision range'))
473 474 if (first == second and len(revs) >= 2
474 475 and not all(revrange(repo, [r]) for r in revs)):
475 476 raise error.Abort(_('empty revision on one side of range'))
476 477
477 478 # if top-level is range expression, the result must always be a pair
478 479 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
479 480 return repo[first], repo[None]
480 481
481 482 return repo[first], repo[second]
482 483
483 484 def revrange(repo, specs, localalias=None):
484 485 """Execute 1 to many revsets and return the union.
485 486
486 487 This is the preferred mechanism for executing revsets using user-specified
487 488 config options, such as revset aliases.
488 489
489 490 The revsets specified by ``specs`` will be executed via a chained ``OR``
490 491 expression. If ``specs`` is empty, an empty result is returned.
491 492
492 493 ``specs`` can contain integers, in which case they are assumed to be
493 494 revision numbers.
494 495
495 496 It is assumed the revsets are already formatted. If you have arguments
496 497 that need to be expanded in the revset, call ``revsetlang.formatspec()``
497 498 and pass the result as an element of ``specs``.
498 499
499 500 Specifying a single revset is allowed.
500 501
501 502 Returns a ``revset.abstractsmartset`` which is a list-like interface over
502 503 integer revisions.
503 504 """
504 505 allspecs = []
505 506 for spec in specs:
506 507 if isinstance(spec, int):
507 508 spec = revsetlang.formatspec('rev(%d)', spec)
508 509 allspecs.append(spec)
509 510 return repo.anyrevs(allspecs, user=True, localalias=localalias)
510 511
511 512 def meaningfulparents(repo, ctx):
512 513 """Return list of meaningful (or all if debug) parentrevs for rev.
513 514
514 515 For merges (two non-nullrev revisions) both parents are meaningful.
515 516 Otherwise the first parent revision is considered meaningful if it
516 517 is not the preceding revision.
517 518 """
518 519 parents = ctx.parents()
519 520 if len(parents) > 1:
520 521 return parents
521 522 if repo.ui.debugflag:
522 523 return [parents[0], repo['null']]
523 524 if parents[0].rev() >= intrev(ctx) - 1:
524 525 return []
525 526 return parents
526 527
527 528 def expandpats(pats):
528 529 '''Expand bare globs when running on windows.
529 530 On posix we assume it already has already been done by sh.'''
530 531 if not util.expandglobs:
531 532 return list(pats)
532 533 ret = []
533 534 for kindpat in pats:
534 535 kind, pat = matchmod._patsplit(kindpat, None)
535 536 if kind is None:
536 537 try:
537 538 globbed = glob.glob(pat)
538 539 except re.error:
539 540 globbed = [pat]
540 541 if globbed:
541 542 ret.extend(globbed)
542 543 continue
543 544 ret.append(kindpat)
544 545 return ret
545 546
546 547 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
547 548 badfn=None):
548 549 '''Return a matcher and the patterns that were used.
549 550 The matcher will warn about bad matches, unless an alternate badfn callback
550 551 is provided.'''
551 552 if pats == ("",):
552 553 pats = []
553 554 if opts is None:
554 555 opts = {}
555 556 if not globbed and default == 'relpath':
556 557 pats = expandpats(pats or [])
557 558
558 559 def bad(f, msg):
559 560 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
560 561
561 562 if badfn is None:
562 563 badfn = bad
563 564
564 565 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
565 566 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
566 567
567 568 if m.always():
568 569 pats = []
569 570 return m, pats
570 571
571 572 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
572 573 badfn=None):
573 574 '''Return a matcher that will warn about bad matches.'''
574 575 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
575 576
576 577 def matchall(repo):
577 578 '''Return a matcher that will efficiently match everything.'''
578 579 return matchmod.always(repo.root, repo.getcwd())
579 580
580 581 def matchfiles(repo, files, badfn=None):
581 582 '''Return a matcher that will efficiently match exactly these files.'''
582 583 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
583 584
584 585 def parsefollowlinespattern(repo, rev, pat, msg):
585 586 """Return a file name from `pat` pattern suitable for usage in followlines
586 587 logic.
587 588 """
588 589 if not matchmod.patkind(pat):
589 590 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
590 591 else:
591 592 ctx = repo[rev]
592 593 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
593 594 files = [f for f in ctx if m(f)]
594 595 if len(files) != 1:
595 596 raise error.ParseError(msg)
596 597 return files[0]
597 598
598 599 def origpath(ui, repo, filepath):
599 600 '''customize where .orig files are created
600 601
601 602 Fetch user defined path from config file: [ui] origbackuppath = <path>
602 603 Fall back to default (filepath with .orig suffix) if not specified
603 604 '''
604 605 origbackuppath = ui.config('ui', 'origbackuppath')
605 606 if not origbackuppath:
606 607 return filepath + ".orig"
607 608
608 609 # Convert filepath from an absolute path into a path inside the repo.
609 610 filepathfromroot = util.normpath(os.path.relpath(filepath,
610 611 start=repo.root))
611 612
612 613 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
613 614 origbackupdir = origvfs.dirname(filepathfromroot)
614 615 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
615 616 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
616 617
617 618 # Remove any files that conflict with the backup file's path
618 619 for f in reversed(list(util.finddirs(filepathfromroot))):
619 620 if origvfs.isfileorlink(f):
620 621 ui.note(_('removing conflicting file: %s\n')
621 622 % origvfs.join(f))
622 623 origvfs.unlink(f)
623 624 break
624 625
625 626 origvfs.makedirs(origbackupdir)
626 627
627 628 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
628 629 ui.note(_('removing conflicting directory: %s\n')
629 630 % origvfs.join(filepathfromroot))
630 631 origvfs.rmtree(filepathfromroot, forcibly=True)
631 632
632 633 return origvfs.join(filepathfromroot)
633 634
634 635 class _containsnode(object):
635 636 """proxy __contains__(node) to container.__contains__ which accepts revs"""
636 637
637 638 def __init__(self, repo, revcontainer):
638 639 self._torev = repo.changelog.rev
639 640 self._revcontains = revcontainer.__contains__
640 641
641 642 def __contains__(self, node):
642 643 return self._revcontains(self._torev(node))
643 644
644 645 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
645 646 """do common cleanups when old nodes are replaced by new nodes
646 647
647 648 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
648 649 (we might also want to move working directory parent in the future)
649 650
650 651 By default, bookmark moves are calculated automatically from 'replacements',
651 652 but 'moves' can be used to override that. Also, 'moves' may include
652 653 additional bookmark moves that should not have associated obsmarkers.
653 654
654 655 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
655 656 have replacements. operation is a string, like "rebase".
656 657
657 658 metadata is dictionary containing metadata to be stored in obsmarker if
658 659 obsolescence is enabled.
659 660 """
660 661 if not replacements and not moves:
661 662 return
662 663
663 664 # translate mapping's other forms
664 665 if not util.safehasattr(replacements, 'items'):
665 666 replacements = {n: () for n in replacements}
666 667
667 668 # Calculate bookmark movements
668 669 if moves is None:
669 670 moves = {}
670 671 # Unfiltered repo is needed since nodes in replacements might be hidden.
671 672 unfi = repo.unfiltered()
672 673 for oldnode, newnodes in replacements.items():
673 674 if oldnode in moves:
674 675 continue
675 676 if len(newnodes) > 1:
676 677 # usually a split, take the one with biggest rev number
677 678 newnode = next(unfi.set('max(%ln)', newnodes)).node()
678 679 elif len(newnodes) == 0:
679 680 # move bookmark backwards
680 681 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
681 682 list(replacements)))
682 683 if roots:
683 684 newnode = roots[0].node()
684 685 else:
685 686 newnode = nullid
686 687 else:
687 688 newnode = newnodes[0]
688 689 moves[oldnode] = newnode
689 690
690 691 with repo.transaction('cleanup') as tr:
691 692 # Move bookmarks
692 693 bmarks = repo._bookmarks
693 694 bmarkchanges = []
694 695 allnewnodes = [n for ns in replacements.values() for n in ns]
695 696 for oldnode, newnode in moves.items():
696 697 oldbmarks = repo.nodebookmarks(oldnode)
697 698 if not oldbmarks:
698 699 continue
699 700 from . import bookmarks # avoid import cycle
700 701 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
701 702 (util.rapply(pycompat.maybebytestr, oldbmarks),
702 703 hex(oldnode), hex(newnode)))
703 704 # Delete divergent bookmarks being parents of related newnodes
704 705 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
705 706 allnewnodes, newnode, oldnode)
706 707 deletenodes = _containsnode(repo, deleterevs)
707 708 for name in oldbmarks:
708 709 bmarkchanges.append((name, newnode))
709 710 for b in bookmarks.divergent2delete(repo, deletenodes, name):
710 711 bmarkchanges.append((b, None))
711 712
712 713 if bmarkchanges:
713 714 bmarks.applychanges(repo, tr, bmarkchanges)
714 715
715 716 # Obsolete or strip nodes
716 717 if obsolete.isenabled(repo, obsolete.createmarkersopt):
717 718 # If a node is already obsoleted, and we want to obsolete it
718 719 # without a successor, skip that obssolete request since it's
719 720 # unnecessary. That's the "if s or not isobs(n)" check below.
720 721 # Also sort the node in topology order, that might be useful for
721 722 # some obsstore logic.
722 723 # NOTE: the filtering and sorting might belong to createmarkers.
723 724 isobs = unfi.obsstore.successors.__contains__
724 725 torev = unfi.changelog.rev
725 726 sortfunc = lambda ns: torev(ns[0])
726 727 rels = [(unfi[n], tuple(unfi[m] for m in s))
727 728 for n, s in sorted(replacements.items(), key=sortfunc)
728 729 if s or not isobs(n)]
729 730 if rels:
730 731 obsolete.createmarkers(repo, rels, operation=operation,
731 732 metadata=metadata)
732 733 else:
733 734 from . import repair # avoid import cycle
734 735 tostrip = list(replacements)
735 736 if tostrip:
736 737 repair.delayedstrip(repo.ui, repo, tostrip, operation)
737 738
738 739 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
739 740 if opts is None:
740 741 opts = {}
741 742 m = matcher
742 743 if dry_run is None:
743 744 dry_run = opts.get('dry_run')
744 745 if similarity is None:
745 746 similarity = float(opts.get('similarity') or 0)
746 747
747 748 ret = 0
748 749 join = lambda f: os.path.join(prefix, f)
749 750
750 751 wctx = repo[None]
751 752 for subpath in sorted(wctx.substate):
752 753 submatch = matchmod.subdirmatcher(subpath, m)
753 754 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
754 755 sub = wctx.sub(subpath)
755 756 try:
756 757 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
757 758 ret = 1
758 759 except error.LookupError:
759 760 repo.ui.status(_("skipping missing subrepository: %s\n")
760 761 % join(subpath))
761 762
762 763 rejected = []
763 764 def badfn(f, msg):
764 765 if f in m.files():
765 766 m.bad(f, msg)
766 767 rejected.append(f)
767 768
768 769 badmatch = matchmod.badmatch(m, badfn)
769 770 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
770 771 badmatch)
771 772
772 773 unknownset = set(unknown + forgotten)
773 774 toprint = unknownset.copy()
774 775 toprint.update(deleted)
775 776 for abs in sorted(toprint):
776 777 if repo.ui.verbose or not m.exact(abs):
777 778 if abs in unknownset:
778 779 status = _('adding %s\n') % m.uipath(abs)
779 780 else:
780 781 status = _('removing %s\n') % m.uipath(abs)
781 782 repo.ui.status(status)
782 783
783 784 renames = _findrenames(repo, m, added + unknown, removed + deleted,
784 785 similarity)
785 786
786 787 if not dry_run:
787 788 _markchanges(repo, unknown + forgotten, deleted, renames)
788 789
789 790 for f in rejected:
790 791 if f in m.files():
791 792 return 1
792 793 return ret
793 794
794 795 def marktouched(repo, files, similarity=0.0):
795 796 '''Assert that files have somehow been operated upon. files are relative to
796 797 the repo root.'''
797 798 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
798 799 rejected = []
799 800
800 801 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
801 802
802 803 if repo.ui.verbose:
803 804 unknownset = set(unknown + forgotten)
804 805 toprint = unknownset.copy()
805 806 toprint.update(deleted)
806 807 for abs in sorted(toprint):
807 808 if abs in unknownset:
808 809 status = _('adding %s\n') % abs
809 810 else:
810 811 status = _('removing %s\n') % abs
811 812 repo.ui.status(status)
812 813
813 814 renames = _findrenames(repo, m, added + unknown, removed + deleted,
814 815 similarity)
815 816
816 817 _markchanges(repo, unknown + forgotten, deleted, renames)
817 818
818 819 for f in rejected:
819 820 if f in m.files():
820 821 return 1
821 822 return 0
822 823
823 824 def _interestingfiles(repo, matcher):
824 825 '''Walk dirstate with matcher, looking for files that addremove would care
825 826 about.
826 827
827 828 This is different from dirstate.status because it doesn't care about
828 829 whether files are modified or clean.'''
829 830 added, unknown, deleted, removed, forgotten = [], [], [], [], []
830 831 audit_path = pathutil.pathauditor(repo.root, cached=True)
831 832
832 833 ctx = repo[None]
833 834 dirstate = repo.dirstate
834 835 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
835 836 unknown=True, ignored=False, full=False)
836 837 for abs, st in walkresults.iteritems():
837 838 dstate = dirstate[abs]
838 839 if dstate == '?' and audit_path.check(abs):
839 840 unknown.append(abs)
840 841 elif dstate != 'r' and not st:
841 842 deleted.append(abs)
842 843 elif dstate == 'r' and st:
843 844 forgotten.append(abs)
844 845 # for finding renames
845 846 elif dstate == 'r' and not st:
846 847 removed.append(abs)
847 848 elif dstate == 'a':
848 849 added.append(abs)
849 850
850 851 return added, unknown, deleted, removed, forgotten
851 852
852 853 def _findrenames(repo, matcher, added, removed, similarity):
853 854 '''Find renames from removed files to added ones.'''
854 855 renames = {}
855 856 if similarity > 0:
856 857 for old, new, score in similar.findrenames(repo, added, removed,
857 858 similarity):
858 859 if (repo.ui.verbose or not matcher.exact(old)
859 860 or not matcher.exact(new)):
860 861 repo.ui.status(_('recording removal of %s as rename to %s '
861 862 '(%d%% similar)\n') %
862 863 (matcher.rel(old), matcher.rel(new),
863 864 score * 100))
864 865 renames[new] = old
865 866 return renames
866 867
867 868 def _markchanges(repo, unknown, deleted, renames):
868 869 '''Marks the files in unknown as added, the files in deleted as removed,
869 870 and the files in renames as copied.'''
870 871 wctx = repo[None]
871 872 with repo.wlock():
872 873 wctx.forget(deleted)
873 874 wctx.add(unknown)
874 875 for new, old in renames.iteritems():
875 876 wctx.copy(old, new)
876 877
877 878 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
878 879 """Update the dirstate to reflect the intent of copying src to dst. For
879 880 different reasons it might not end with dst being marked as copied from src.
880 881 """
881 882 origsrc = repo.dirstate.copied(src) or src
882 883 if dst == origsrc: # copying back a copy?
883 884 if repo.dirstate[dst] not in 'mn' and not dryrun:
884 885 repo.dirstate.normallookup(dst)
885 886 else:
886 887 if repo.dirstate[origsrc] == 'a' and origsrc == src:
887 888 if not ui.quiet:
888 889 ui.warn(_("%s has not been committed yet, so no copy "
889 890 "data will be stored for %s.\n")
890 891 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
891 892 if repo.dirstate[dst] in '?r' and not dryrun:
892 893 wctx.add([dst])
893 894 elif not dryrun:
894 895 wctx.copy(origsrc, dst)
895 896
896 897 def readrequires(opener, supported):
897 898 '''Reads and parses .hg/requires and checks if all entries found
898 899 are in the list of supported features.'''
899 900 requirements = set(opener.read("requires").splitlines())
900 901 missings = []
901 902 for r in requirements:
902 903 if r not in supported:
903 904 if not r or not r[0:1].isalnum():
904 905 raise error.RequirementError(_(".hg/requires file is corrupt"))
905 906 missings.append(r)
906 907 missings.sort()
907 908 if missings:
908 909 raise error.RequirementError(
909 910 _("repository requires features unknown to this Mercurial: %s")
910 911 % " ".join(missings),
911 912 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
912 913 " for more information"))
913 914 return requirements
914 915
915 916 def writerequires(opener, requirements):
916 917 with opener('requires', 'w') as fp:
917 918 for r in sorted(requirements):
918 919 fp.write("%s\n" % r)
919 920
920 921 class filecachesubentry(object):
921 922 def __init__(self, path, stat):
922 923 self.path = path
923 924 self.cachestat = None
924 925 self._cacheable = None
925 926
926 927 if stat:
927 928 self.cachestat = filecachesubentry.stat(self.path)
928 929
929 930 if self.cachestat:
930 931 self._cacheable = self.cachestat.cacheable()
931 932 else:
932 933 # None means we don't know yet
933 934 self._cacheable = None
934 935
935 936 def refresh(self):
936 937 if self.cacheable():
937 938 self.cachestat = filecachesubentry.stat(self.path)
938 939
939 940 def cacheable(self):
940 941 if self._cacheable is not None:
941 942 return self._cacheable
942 943
943 944 # we don't know yet, assume it is for now
944 945 return True
945 946
946 947 def changed(self):
947 948 # no point in going further if we can't cache it
948 949 if not self.cacheable():
949 950 return True
950 951
951 952 newstat = filecachesubentry.stat(self.path)
952 953
953 954 # we may not know if it's cacheable yet, check again now
954 955 if newstat and self._cacheable is None:
955 956 self._cacheable = newstat.cacheable()
956 957
957 958 # check again
958 959 if not self._cacheable:
959 960 return True
960 961
961 962 if self.cachestat != newstat:
962 963 self.cachestat = newstat
963 964 return True
964 965 else:
965 966 return False
966 967
967 968 @staticmethod
968 969 def stat(path):
969 970 try:
970 971 return util.cachestat(path)
971 972 except OSError as e:
972 973 if e.errno != errno.ENOENT:
973 974 raise
974 975
975 976 class filecacheentry(object):
976 977 def __init__(self, paths, stat=True):
977 978 self._entries = []
978 979 for path in paths:
979 980 self._entries.append(filecachesubentry(path, stat))
980 981
981 982 def changed(self):
982 983 '''true if any entry has changed'''
983 984 for entry in self._entries:
984 985 if entry.changed():
985 986 return True
986 987 return False
987 988
988 989 def refresh(self):
989 990 for entry in self._entries:
990 991 entry.refresh()
991 992
992 993 class filecache(object):
993 994 '''A property like decorator that tracks files under .hg/ for updates.
994 995
995 996 Records stat info when called in _filecache.
996 997
997 998 On subsequent calls, compares old stat info with new info, and recreates the
998 999 object when any of the files changes, updating the new stat info in
999 1000 _filecache.
1000 1001
1001 1002 Mercurial either atomic renames or appends for files under .hg,
1002 1003 so to ensure the cache is reliable we need the filesystem to be able
1003 1004 to tell us if a file has been replaced. If it can't, we fallback to
1004 1005 recreating the object on every call (essentially the same behavior as
1005 1006 propertycache).
1006 1007
1007 1008 '''
1008 1009 def __init__(self, *paths):
1009 1010 self.paths = paths
1010 1011
1011 1012 def join(self, obj, fname):
1012 1013 """Used to compute the runtime path of a cached file.
1013 1014
1014 1015 Users should subclass filecache and provide their own version of this
1015 1016 function to call the appropriate join function on 'obj' (an instance
1016 1017 of the class that its member function was decorated).
1017 1018 """
1018 1019 raise NotImplementedError
1019 1020
1020 1021 def __call__(self, func):
1021 1022 self.func = func
1022 1023 self.name = func.__name__.encode('ascii')
1023 1024 return self
1024 1025
1025 1026 def __get__(self, obj, type=None):
1026 1027 # if accessed on the class, return the descriptor itself.
1027 1028 if obj is None:
1028 1029 return self
1029 1030 # do we need to check if the file changed?
1030 1031 if self.name in obj.__dict__:
1031 1032 assert self.name in obj._filecache, self.name
1032 1033 return obj.__dict__[self.name]
1033 1034
1034 1035 entry = obj._filecache.get(self.name)
1035 1036
1036 1037 if entry:
1037 1038 if entry.changed():
1038 1039 entry.obj = self.func(obj)
1039 1040 else:
1040 1041 paths = [self.join(obj, path) for path in self.paths]
1041 1042
1042 1043 # We stat -before- creating the object so our cache doesn't lie if
1043 1044 # a writer modified between the time we read and stat
1044 1045 entry = filecacheentry(paths, True)
1045 1046 entry.obj = self.func(obj)
1046 1047
1047 1048 obj._filecache[self.name] = entry
1048 1049
1049 1050 obj.__dict__[self.name] = entry.obj
1050 1051 return entry.obj
1051 1052
1052 1053 def __set__(self, obj, value):
1053 1054 if self.name not in obj._filecache:
1054 1055 # we add an entry for the missing value because X in __dict__
1055 1056 # implies X in _filecache
1056 1057 paths = [self.join(obj, path) for path in self.paths]
1057 1058 ce = filecacheentry(paths, False)
1058 1059 obj._filecache[self.name] = ce
1059 1060 else:
1060 1061 ce = obj._filecache[self.name]
1061 1062
1062 1063 ce.obj = value # update cached copy
1063 1064 obj.__dict__[self.name] = value # update copy returned by obj.x
1064 1065
1065 1066 def __delete__(self, obj):
1066 1067 try:
1067 1068 del obj.__dict__[self.name]
1068 1069 except KeyError:
1069 1070 raise AttributeError(self.name)
1070 1071
1071 1072 def extdatasource(repo, source):
1072 1073 """Gather a map of rev -> value dict from the specified source
1073 1074
1074 1075 A source spec is treated as a URL, with a special case shell: type
1075 1076 for parsing the output from a shell command.
1076 1077
1077 1078 The data is parsed as a series of newline-separated records where
1078 1079 each record is a revision specifier optionally followed by a space
1079 1080 and a freeform string value. If the revision is known locally, it
1080 1081 is converted to a rev, otherwise the record is skipped.
1081 1082
1082 1083 Note that both key and value are treated as UTF-8 and converted to
1083 1084 the local encoding. This allows uniformity between local and
1084 1085 remote data sources.
1085 1086 """
1086 1087
1087 1088 spec = repo.ui.config("extdata", source)
1088 1089 if not spec:
1089 1090 raise error.Abort(_("unknown extdata source '%s'") % source)
1090 1091
1091 1092 data = {}
1092 1093 src = proc = None
1093 1094 try:
1094 1095 if spec.startswith("shell:"):
1095 1096 # external commands should be run relative to the repo root
1096 1097 cmd = spec[6:]
1097 1098 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1098 1099 close_fds=procutil.closefds,
1099 1100 stdout=subprocess.PIPE, cwd=repo.root)
1100 1101 src = proc.stdout
1101 1102 else:
1102 1103 # treat as a URL or file
1103 1104 src = url.open(repo.ui, spec)
1104 1105 for l in src:
1105 1106 if " " in l:
1106 1107 k, v = l.strip().split(" ", 1)
1107 1108 else:
1108 1109 k, v = l.strip(), ""
1109 1110
1110 1111 k = encoding.tolocal(k)
1111 1112 try:
1112 1113 data[repo[k].rev()] = encoding.tolocal(v)
1113 1114 except (error.LookupError, error.RepoLookupError):
1114 1115 pass # we ignore data for nodes that don't exist locally
1115 1116 finally:
1116 1117 if proc:
1117 1118 proc.communicate()
1118 1119 if src:
1119 1120 src.close()
1120 1121 if proc and proc.returncode != 0:
1121 1122 raise error.Abort(_("extdata command '%s' failed: %s")
1122 1123 % (cmd, procutil.explainexit(proc.returncode)[0]))
1123 1124
1124 1125 return data
1125 1126
1126 1127 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1127 1128 if lock is None:
1128 1129 raise error.LockInheritanceContractViolation(
1129 1130 'lock can only be inherited while held')
1130 1131 if environ is None:
1131 1132 environ = {}
1132 1133 with lock.inherit() as locker:
1133 1134 environ[envvar] = locker
1134 1135 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1135 1136
1136 1137 def wlocksub(repo, cmd, *args, **kwargs):
1137 1138 """run cmd as a subprocess that allows inheriting repo's wlock
1138 1139
1139 1140 This can only be called while the wlock is held. This takes all the
1140 1141 arguments that ui.system does, and returns the exit code of the
1141 1142 subprocess."""
1142 1143 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1143 1144 **kwargs)
1144 1145
1145 1146 def gdinitconfig(ui):
1146 1147 """helper function to know if a repo should be created as general delta
1147 1148 """
1148 1149 # experimental config: format.generaldelta
1149 1150 return (ui.configbool('format', 'generaldelta')
1150 1151 or ui.configbool('format', 'usegeneraldelta'))
1151 1152
1152 1153 def gddeltaconfig(ui):
1153 1154 """helper function to know if incoming delta should be optimised
1154 1155 """
1155 1156 # experimental config: format.generaldelta
1156 1157 return ui.configbool('format', 'generaldelta')
1157 1158
1158 1159 class simplekeyvaluefile(object):
1159 1160 """A simple file with key=value lines
1160 1161
1161 1162 Keys must be alphanumerics and start with a letter, values must not
1162 1163 contain '\n' characters"""
1163 1164 firstlinekey = '__firstline'
1164 1165
1165 1166 def __init__(self, vfs, path, keys=None):
1166 1167 self.vfs = vfs
1167 1168 self.path = path
1168 1169
1169 1170 def read(self, firstlinenonkeyval=False):
1170 1171 """Read the contents of a simple key-value file
1171 1172
1172 1173 'firstlinenonkeyval' indicates whether the first line of file should
1173 1174 be treated as a key-value pair or reuturned fully under the
1174 1175 __firstline key."""
1175 1176 lines = self.vfs.readlines(self.path)
1176 1177 d = {}
1177 1178 if firstlinenonkeyval:
1178 1179 if not lines:
1179 1180 e = _("empty simplekeyvalue file")
1180 1181 raise error.CorruptedState(e)
1181 1182 # we don't want to include '\n' in the __firstline
1182 1183 d[self.firstlinekey] = lines[0][:-1]
1183 1184 del lines[0]
1184 1185
1185 1186 try:
1186 1187 # the 'if line.strip()' part prevents us from failing on empty
1187 1188 # lines which only contain '\n' therefore are not skipped
1188 1189 # by 'if line'
1189 1190 updatedict = dict(line[:-1].split('=', 1) for line in lines
1190 1191 if line.strip())
1191 1192 if self.firstlinekey in updatedict:
1192 1193 e = _("%r can't be used as a key")
1193 1194 raise error.CorruptedState(e % self.firstlinekey)
1194 1195 d.update(updatedict)
1195 1196 except ValueError as e:
1196 1197 raise error.CorruptedState(str(e))
1197 1198 return d
1198 1199
1199 1200 def write(self, data, firstline=None):
1200 1201 """Write key=>value mapping to a file
1201 1202 data is a dict. Keys must be alphanumerical and start with a letter.
1202 1203 Values must not contain newline characters.
1203 1204
1204 1205 If 'firstline' is not None, it is written to file before
1205 1206 everything else, as it is, not in a key=value form"""
1206 1207 lines = []
1207 1208 if firstline is not None:
1208 1209 lines.append('%s\n' % firstline)
1209 1210
1210 1211 for k, v in data.items():
1211 1212 if k == self.firstlinekey:
1212 1213 e = "key name '%s' is reserved" % self.firstlinekey
1213 1214 raise error.ProgrammingError(e)
1214 1215 if not k[0:1].isalpha():
1215 1216 e = "keys must start with a letter in a key-value file"
1216 1217 raise error.ProgrammingError(e)
1217 1218 if not k.isalnum():
1218 1219 e = "invalid key name in a simple key-value file"
1219 1220 raise error.ProgrammingError(e)
1220 1221 if '\n' in v:
1221 1222 e = "invalid value in a simple key-value file"
1222 1223 raise error.ProgrammingError(e)
1223 1224 lines.append("%s=%s\n" % (k, v))
1224 1225 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1225 1226 fp.write(''.join(lines))
1226 1227
1227 1228 _reportobsoletedsource = [
1228 1229 'debugobsolete',
1229 1230 'pull',
1230 1231 'push',
1231 1232 'serve',
1232 1233 'unbundle',
1233 1234 ]
1234 1235
1235 1236 _reportnewcssource = [
1236 1237 'pull',
1237 1238 'unbundle',
1238 1239 ]
1239 1240
1240 1241 # a list of (repo, ctx, files) functions called by various commands to allow
1241 1242 # extensions to ensure the corresponding files are available locally, before the
1242 1243 # command uses them.
1243 1244 fileprefetchhooks = util.hooks()
1244 1245
1245 1246 # A marker that tells the evolve extension to suppress its own reporting
1246 1247 _reportstroubledchangesets = True
1247 1248
1248 1249 def registersummarycallback(repo, otr, txnname=''):
1249 1250 """register a callback to issue a summary after the transaction is closed
1250 1251 """
1251 1252 def txmatch(sources):
1252 1253 return any(txnname.startswith(source) for source in sources)
1253 1254
1254 1255 categories = []
1255 1256
1256 1257 def reportsummary(func):
1257 1258 """decorator for report callbacks."""
1258 1259 # The repoview life cycle is shorter than the one of the actual
1259 1260 # underlying repository. So the filtered object can die before the
1260 1261 # weakref is used leading to troubles. We keep a reference to the
1261 1262 # unfiltered object and restore the filtering when retrieving the
1262 1263 # repository through the weakref.
1263 1264 filtername = repo.filtername
1264 1265 reporef = weakref.ref(repo.unfiltered())
1265 1266 def wrapped(tr):
1266 1267 repo = reporef()
1267 1268 if filtername:
1268 1269 repo = repo.filtered(filtername)
1269 1270 func(repo, tr)
1270 1271 newcat = '%02i-txnreport' % len(categories)
1271 1272 otr.addpostclose(newcat, wrapped)
1272 1273 categories.append(newcat)
1273 1274 return wrapped
1274 1275
1275 1276 if txmatch(_reportobsoletedsource):
1276 1277 @reportsummary
1277 1278 def reportobsoleted(repo, tr):
1278 1279 obsoleted = obsutil.getobsoleted(repo, tr)
1279 1280 if obsoleted:
1280 1281 repo.ui.status(_('obsoleted %i changesets\n')
1281 1282 % len(obsoleted))
1282 1283
1283 1284 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1284 1285 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1285 1286 instabilitytypes = [
1286 1287 ('orphan', 'orphan'),
1287 1288 ('phase-divergent', 'phasedivergent'),
1288 1289 ('content-divergent', 'contentdivergent'),
1289 1290 ]
1290 1291
1291 1292 def getinstabilitycounts(repo):
1292 1293 filtered = repo.changelog.filteredrevs
1293 1294 counts = {}
1294 1295 for instability, revset in instabilitytypes:
1295 1296 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1296 1297 filtered)
1297 1298 return counts
1298 1299
1299 1300 oldinstabilitycounts = getinstabilitycounts(repo)
1300 1301 @reportsummary
1301 1302 def reportnewinstabilities(repo, tr):
1302 1303 newinstabilitycounts = getinstabilitycounts(repo)
1303 1304 for instability, revset in instabilitytypes:
1304 1305 delta = (newinstabilitycounts[instability] -
1305 1306 oldinstabilitycounts[instability])
1306 1307 if delta > 0:
1307 1308 repo.ui.warn(_('%i new %s changesets\n') %
1308 1309 (delta, instability))
1309 1310
1310 1311 if txmatch(_reportnewcssource):
1311 1312 @reportsummary
1312 1313 def reportnewcs(repo, tr):
1313 1314 """Report the range of new revisions pulled/unbundled."""
1314 1315 newrevs = tr.changes.get('revs', xrange(0, 0))
1315 1316 if not newrevs:
1316 1317 return
1317 1318
1318 1319 # Compute the bounds of new revisions' range, excluding obsoletes.
1319 1320 unfi = repo.unfiltered()
1320 1321 revs = unfi.revs('%ld and not obsolete()', newrevs)
1321 1322 if not revs:
1322 1323 # Got only obsoletes.
1323 1324 return
1324 1325 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1325 1326
1326 1327 if minrev == maxrev:
1327 1328 revrange = minrev
1328 1329 else:
1329 1330 revrange = '%s:%s' % (minrev, maxrev)
1330 1331 repo.ui.status(_('new changesets %s\n') % revrange)
1331 1332
1332 1333 def nodesummaries(repo, nodes, maxnumnodes=4):
1333 1334 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1334 1335 return ' '.join(short(h) for h in nodes)
1335 1336 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1336 1337 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1337 1338
1338 1339 def enforcesinglehead(repo, tr, desc):
1339 1340 """check that no named branch has multiple heads"""
1340 1341 if desc in ('strip', 'repair'):
1341 1342 # skip the logic during strip
1342 1343 return
1343 1344 visible = repo.filtered('visible')
1344 1345 # possible improvement: we could restrict the check to affected branch
1345 1346 for name, heads in visible.branchmap().iteritems():
1346 1347 if len(heads) > 1:
1347 1348 msg = _('rejecting multiple heads on branch "%s"')
1348 1349 msg %= name
1349 1350 hint = _('%d heads: %s')
1350 1351 hint %= (len(heads), nodesummaries(repo, heads))
1351 1352 raise error.Abort(msg, hint=hint)
1352 1353
1353 1354 def wrapconvertsink(sink):
1354 1355 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1355 1356 before it is used, whether or not the convert extension was formally loaded.
1356 1357 """
1357 1358 return sink
1358 1359
1359 1360 def unhidehashlikerevs(repo, specs, hiddentype):
1360 1361 """parse the user specs and unhide changesets whose hash or revision number
1361 1362 is passed.
1362 1363
1363 1364 hiddentype can be: 1) 'warn': warn while unhiding changesets
1364 1365 2) 'nowarn': don't warn while unhiding changesets
1365 1366
1366 1367 returns a repo object with the required changesets unhidden
1367 1368 """
1368 1369 if not repo.filtername or not repo.ui.configbool('experimental',
1369 1370 'directaccess'):
1370 1371 return repo
1371 1372
1372 1373 if repo.filtername not in ('visible', 'visible-hidden'):
1373 1374 return repo
1374 1375
1375 1376 symbols = set()
1376 1377 for spec in specs:
1377 1378 try:
1378 1379 tree = revsetlang.parse(spec)
1379 1380 except error.ParseError: # will be reported by scmutil.revrange()
1380 1381 continue
1381 1382
1382 1383 symbols.update(revsetlang.gethashlikesymbols(tree))
1383 1384
1384 1385 if not symbols:
1385 1386 return repo
1386 1387
1387 1388 revs = _getrevsfromsymbols(repo, symbols)
1388 1389
1389 1390 if not revs:
1390 1391 return repo
1391 1392
1392 1393 if hiddentype == 'warn':
1393 1394 unfi = repo.unfiltered()
1394 1395 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1395 1396 repo.ui.warn(_("warning: accessing hidden changesets for write "
1396 1397 "operation: %s\n") % revstr)
1397 1398
1398 1399 # we have to use new filtername to separate branch/tags cache until we can
1399 1400 # disbale these cache when revisions are dynamically pinned.
1400 1401 return repo.filtered('visible-hidden', revs)
1401 1402
1402 1403 def _getrevsfromsymbols(repo, symbols):
1403 1404 """parse the list of symbols and returns a set of revision numbers of hidden
1404 1405 changesets present in symbols"""
1405 1406 revs = set()
1406 1407 unfi = repo.unfiltered()
1407 1408 unficl = unfi.changelog
1408 1409 cl = repo.changelog
1409 1410 tiprev = len(unficl)
1410 1411 pmatch = unficl._partialmatch
1411 1412 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1412 1413 for s in symbols:
1413 1414 try:
1414 1415 n = int(s)
1415 1416 if n <= tiprev:
1416 1417 if not allowrevnums:
1417 1418 continue
1418 1419 else:
1419 1420 if n not in cl:
1420 1421 revs.add(n)
1421 1422 continue
1422 1423 except ValueError:
1423 1424 pass
1424 1425
1425 1426 try:
1426 1427 s = pmatch(s)
1427 1428 except (error.LookupError, error.WdirUnsupported):
1428 1429 s = None
1429 1430
1430 1431 if s is not None:
1431 1432 rev = unficl.rev(s)
1432 1433 if rev not in cl:
1433 1434 revs.add(rev)
1434 1435
1435 1436 return revs
General Comments 0
You need to be logged in to leave comments. Login now