##// END OF EJS Templates
directaccess: do not abort by 'ff...' hash...
Yuya Nishihara -
r37112:7f025c9b default
parent child Browse files
Show More
@@ -1,1430 +1,1430 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 hex,
22 22 nullid,
23 23 short,
24 24 wdirid,
25 25 wdirrev,
26 26 )
27 27
28 28 from . import (
29 29 encoding,
30 30 error,
31 31 match as matchmod,
32 32 obsolete,
33 33 obsutil,
34 34 pathutil,
35 35 phases,
36 36 pycompat,
37 37 revsetlang,
38 38 similar,
39 39 url,
40 40 util,
41 41 vfs,
42 42 )
43 43
44 44 from .utils import (
45 45 stringutil,
46 46 )
47 47
48 48 if pycompat.iswindows:
49 49 from . import scmwindows as scmplatform
50 50 else:
51 51 from . import scmposix as scmplatform
52 52
53 53 termsize = scmplatform.termsize
54 54
55 55 class status(tuple):
56 56 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
57 57 and 'ignored' properties are only relevant to the working copy.
58 58 '''
59 59
60 60 __slots__ = ()
61 61
62 62 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
63 63 clean):
64 64 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
65 65 ignored, clean))
66 66
67 67 @property
68 68 def modified(self):
69 69 '''files that have been modified'''
70 70 return self[0]
71 71
72 72 @property
73 73 def added(self):
74 74 '''files that have been added'''
75 75 return self[1]
76 76
77 77 @property
78 78 def removed(self):
79 79 '''files that have been removed'''
80 80 return self[2]
81 81
82 82 @property
83 83 def deleted(self):
84 84 '''files that are in the dirstate, but have been deleted from the
85 85 working copy (aka "missing")
86 86 '''
87 87 return self[3]
88 88
89 89 @property
90 90 def unknown(self):
91 91 '''files not in the dirstate that are not ignored'''
92 92 return self[4]
93 93
94 94 @property
95 95 def ignored(self):
96 96 '''files not in the dirstate that are ignored (by _dirignore())'''
97 97 return self[5]
98 98
99 99 @property
100 100 def clean(self):
101 101 '''files that have not been modified'''
102 102 return self[6]
103 103
104 104 def __repr__(self, *args, **kwargs):
105 105 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
106 106 'unknown=%r, ignored=%r, clean=%r>') % self)
107 107
108 108 def itersubrepos(ctx1, ctx2):
109 109 """find subrepos in ctx1 or ctx2"""
110 110 # Create a (subpath, ctx) mapping where we prefer subpaths from
111 111 # ctx1. The subpaths from ctx2 are important when the .hgsub file
112 112 # has been modified (in ctx2) but not yet committed (in ctx1).
113 113 subpaths = dict.fromkeys(ctx2.substate, ctx2)
114 114 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
115 115
116 116 missing = set()
117 117
118 118 for subpath in ctx2.substate:
119 119 if subpath not in ctx1.substate:
120 120 del subpaths[subpath]
121 121 missing.add(subpath)
122 122
123 123 for subpath, ctx in sorted(subpaths.iteritems()):
124 124 yield subpath, ctx.sub(subpath)
125 125
126 126 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
127 127 # status and diff will have an accurate result when it does
128 128 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
129 129 # against itself.
130 130 for subpath in missing:
131 131 yield subpath, ctx2.nullsub(subpath, ctx1)
132 132
133 133 def nochangesfound(ui, repo, excluded=None):
134 134 '''Report no changes for push/pull, excluded is None or a list of
135 135 nodes excluded from the push/pull.
136 136 '''
137 137 secretlist = []
138 138 if excluded:
139 139 for n in excluded:
140 140 ctx = repo[n]
141 141 if ctx.phase() >= phases.secret and not ctx.extinct():
142 142 secretlist.append(n)
143 143
144 144 if secretlist:
145 145 ui.status(_("no changes found (ignored %d secret changesets)\n")
146 146 % len(secretlist))
147 147 else:
148 148 ui.status(_("no changes found\n"))
149 149
150 150 def callcatch(ui, func):
151 151 """call func() with global exception handling
152 152
153 153 return func() if no exception happens. otherwise do some error handling
154 154 and return an exit code accordingly. does not handle all exceptions.
155 155 """
156 156 try:
157 157 try:
158 158 return func()
159 159 except: # re-raises
160 160 ui.traceback()
161 161 raise
162 162 # Global exception handling, alphabetically
163 163 # Mercurial-specific first, followed by built-in and library exceptions
164 164 except error.LockHeld as inst:
165 165 if inst.errno == errno.ETIMEDOUT:
166 166 reason = _('timed out waiting for lock held by %r') % inst.locker
167 167 else:
168 168 reason = _('lock held by %r') % inst.locker
169 169 ui.warn(_("abort: %s: %s\n")
170 170 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
171 171 if not inst.locker:
172 172 ui.warn(_("(lock might be very busy)\n"))
173 173 except error.LockUnavailable as inst:
174 174 ui.warn(_("abort: could not lock %s: %s\n") %
175 175 (inst.desc or stringutil.forcebytestr(inst.filename),
176 176 encoding.strtolocal(inst.strerror)))
177 177 except error.OutOfBandError as inst:
178 178 if inst.args:
179 179 msg = _("abort: remote error:\n")
180 180 else:
181 181 msg = _("abort: remote error\n")
182 182 ui.warn(msg)
183 183 if inst.args:
184 184 ui.warn(''.join(inst.args))
185 185 if inst.hint:
186 186 ui.warn('(%s)\n' % inst.hint)
187 187 except error.RepoError as inst:
188 188 ui.warn(_("abort: %s!\n") % inst)
189 189 if inst.hint:
190 190 ui.warn(_("(%s)\n") % inst.hint)
191 191 except error.ResponseError as inst:
192 192 ui.warn(_("abort: %s") % inst.args[0])
193 193 msg = inst.args[1]
194 194 if isinstance(msg, type(u'')):
195 195 msg = pycompat.sysbytes(msg)
196 196 if not isinstance(msg, bytes):
197 197 ui.warn(" %r\n" % (msg,))
198 198 elif not msg:
199 199 ui.warn(_(" empty string\n"))
200 200 else:
201 201 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
202 202 except error.CensoredNodeError as inst:
203 203 ui.warn(_("abort: file censored %s!\n") % inst)
204 204 except error.RevlogError as inst:
205 205 ui.warn(_("abort: %s!\n") % inst)
206 206 except error.InterventionRequired as inst:
207 207 ui.warn("%s\n" % inst)
208 208 if inst.hint:
209 209 ui.warn(_("(%s)\n") % inst.hint)
210 210 return 1
211 211 except error.WdirUnsupported:
212 212 ui.warn(_("abort: working directory revision cannot be specified\n"))
213 213 except error.Abort as inst:
214 214 ui.warn(_("abort: %s\n") % inst)
215 215 if inst.hint:
216 216 ui.warn(_("(%s)\n") % inst.hint)
217 217 except ImportError as inst:
218 218 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
219 219 m = stringutil.forcebytestr(inst).split()[-1]
220 220 if m in "mpatch bdiff".split():
221 221 ui.warn(_("(did you forget to compile extensions?)\n"))
222 222 elif m in "zlib".split():
223 223 ui.warn(_("(is your Python install correct?)\n"))
224 224 except IOError as inst:
225 225 if util.safehasattr(inst, "code"):
226 226 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
227 227 elif util.safehasattr(inst, "reason"):
228 228 try: # usually it is in the form (errno, strerror)
229 229 reason = inst.reason.args[1]
230 230 except (AttributeError, IndexError):
231 231 # it might be anything, for example a string
232 232 reason = inst.reason
233 233 if isinstance(reason, unicode):
234 234 # SSLError of Python 2.7.9 contains a unicode
235 235 reason = encoding.unitolocal(reason)
236 236 ui.warn(_("abort: error: %s\n") % reason)
237 237 elif (util.safehasattr(inst, "args")
238 238 and inst.args and inst.args[0] == errno.EPIPE):
239 239 pass
240 240 elif getattr(inst, "strerror", None):
241 241 if getattr(inst, "filename", None):
242 242 ui.warn(_("abort: %s: %s\n") % (
243 243 encoding.strtolocal(inst.strerror),
244 244 stringutil.forcebytestr(inst.filename)))
245 245 else:
246 246 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
247 247 else:
248 248 raise
249 249 except OSError as inst:
250 250 if getattr(inst, "filename", None) is not None:
251 251 ui.warn(_("abort: %s: '%s'\n") % (
252 252 encoding.strtolocal(inst.strerror),
253 253 stringutil.forcebytestr(inst.filename)))
254 254 else:
255 255 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
256 256 except MemoryError:
257 257 ui.warn(_("abort: out of memory\n"))
258 258 except SystemExit as inst:
259 259 # Commands shouldn't sys.exit directly, but give a return code.
260 260 # Just in case catch this and and pass exit code to caller.
261 261 return inst.code
262 262 except socket.error as inst:
263 263 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
264 264
265 265 return -1
266 266
267 267 def checknewlabel(repo, lbl, kind):
268 268 # Do not use the "kind" parameter in ui output.
269 269 # It makes strings difficult to translate.
270 270 if lbl in ['tip', '.', 'null']:
271 271 raise error.Abort(_("the name '%s' is reserved") % lbl)
272 272 for c in (':', '\0', '\n', '\r'):
273 273 if c in lbl:
274 274 raise error.Abort(
275 275 _("%r cannot be used in a name") % pycompat.bytestr(c))
276 276 try:
277 277 int(lbl)
278 278 raise error.Abort(_("cannot use an integer as a name"))
279 279 except ValueError:
280 280 pass
281 281 if lbl.strip() != lbl:
282 282 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
283 283
284 284 def checkfilename(f):
285 285 '''Check that the filename f is an acceptable filename for a tracked file'''
286 286 if '\r' in f or '\n' in f:
287 287 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
288 288
289 289 def checkportable(ui, f):
290 290 '''Check if filename f is portable and warn or abort depending on config'''
291 291 checkfilename(f)
292 292 abort, warn = checkportabilityalert(ui)
293 293 if abort or warn:
294 294 msg = util.checkwinfilename(f)
295 295 if msg:
296 296 msg = "%s: %s" % (msg, util.shellquote(f))
297 297 if abort:
298 298 raise error.Abort(msg)
299 299 ui.warn(_("warning: %s\n") % msg)
300 300
301 301 def checkportabilityalert(ui):
302 302 '''check if the user's config requests nothing, a warning, or abort for
303 303 non-portable filenames'''
304 304 val = ui.config('ui', 'portablefilenames')
305 305 lval = val.lower()
306 306 bval = stringutil.parsebool(val)
307 307 abort = pycompat.iswindows or lval == 'abort'
308 308 warn = bval or lval == 'warn'
309 309 if bval is None and not (warn or abort or lval == 'ignore'):
310 310 raise error.ConfigError(
311 311 _("ui.portablefilenames value is invalid ('%s')") % val)
312 312 return abort, warn
313 313
314 314 class casecollisionauditor(object):
315 315 def __init__(self, ui, abort, dirstate):
316 316 self._ui = ui
317 317 self._abort = abort
318 318 allfiles = '\0'.join(dirstate._map)
319 319 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
320 320 self._dirstate = dirstate
321 321 # The purpose of _newfiles is so that we don't complain about
322 322 # case collisions if someone were to call this object with the
323 323 # same filename twice.
324 324 self._newfiles = set()
325 325
326 326 def __call__(self, f):
327 327 if f in self._newfiles:
328 328 return
329 329 fl = encoding.lower(f)
330 330 if fl in self._loweredfiles and f not in self._dirstate:
331 331 msg = _('possible case-folding collision for %s') % f
332 332 if self._abort:
333 333 raise error.Abort(msg)
334 334 self._ui.warn(_("warning: %s\n") % msg)
335 335 self._loweredfiles.add(fl)
336 336 self._newfiles.add(f)
337 337
338 338 def filteredhash(repo, maxrev):
339 339 """build hash of filtered revisions in the current repoview.
340 340
341 341 Multiple caches perform up-to-date validation by checking that the
342 342 tiprev and tipnode stored in the cache file match the current repository.
343 343 However, this is not sufficient for validating repoviews because the set
344 344 of revisions in the view may change without the repository tiprev and
345 345 tipnode changing.
346 346
347 347 This function hashes all the revs filtered from the view and returns
348 348 that SHA-1 digest.
349 349 """
350 350 cl = repo.changelog
351 351 if not cl.filteredrevs:
352 352 return None
353 353 key = None
354 354 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
355 355 if revs:
356 356 s = hashlib.sha1()
357 357 for rev in revs:
358 358 s.update('%d;' % rev)
359 359 key = s.digest()
360 360 return key
361 361
362 362 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
363 363 '''yield every hg repository under path, always recursively.
364 364 The recurse flag will only control recursion into repo working dirs'''
365 365 def errhandler(err):
366 366 if err.filename == path:
367 367 raise err
368 368 samestat = getattr(os.path, 'samestat', None)
369 369 if followsym and samestat is not None:
370 370 def adddir(dirlst, dirname):
371 371 dirstat = os.stat(dirname)
372 372 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
373 373 if not match:
374 374 dirlst.append(dirstat)
375 375 return not match
376 376 else:
377 377 followsym = False
378 378
379 379 if (seen_dirs is None) and followsym:
380 380 seen_dirs = []
381 381 adddir(seen_dirs, path)
382 382 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
383 383 dirs.sort()
384 384 if '.hg' in dirs:
385 385 yield root # found a repository
386 386 qroot = os.path.join(root, '.hg', 'patches')
387 387 if os.path.isdir(os.path.join(qroot, '.hg')):
388 388 yield qroot # we have a patch queue repo here
389 389 if recurse:
390 390 # avoid recursing inside the .hg directory
391 391 dirs.remove('.hg')
392 392 else:
393 393 dirs[:] = [] # don't descend further
394 394 elif followsym:
395 395 newdirs = []
396 396 for d in dirs:
397 397 fname = os.path.join(root, d)
398 398 if adddir(seen_dirs, fname):
399 399 if os.path.islink(fname):
400 400 for hgname in walkrepos(fname, True, seen_dirs):
401 401 yield hgname
402 402 else:
403 403 newdirs.append(d)
404 404 dirs[:] = newdirs
405 405
406 406 def binnode(ctx):
407 407 """Return binary node id for a given basectx"""
408 408 node = ctx.node()
409 409 if node is None:
410 410 return wdirid
411 411 return node
412 412
413 413 def intrev(ctx):
414 414 """Return integer for a given basectx that can be used in comparison or
415 415 arithmetic operation"""
416 416 rev = ctx.rev()
417 417 if rev is None:
418 418 return wdirrev
419 419 return rev
420 420
421 421 def formatchangeid(ctx):
422 422 """Format changectx as '{rev}:{node|formatnode}', which is the default
423 423 template provided by logcmdutil.changesettemplater"""
424 424 repo = ctx.repo()
425 425 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
426 426
427 427 def formatrevnode(ui, rev, node):
428 428 """Format given revision and node depending on the current verbosity"""
429 429 if ui.debugflag:
430 430 hexfunc = hex
431 431 else:
432 432 hexfunc = short
433 433 return '%d:%s' % (rev, hexfunc(node))
434 434
435 435 def revsingle(repo, revspec, default='.', localalias=None):
436 436 if not revspec and revspec != 0:
437 437 return repo[default]
438 438
439 439 l = revrange(repo, [revspec], localalias=localalias)
440 440 if not l:
441 441 raise error.Abort(_('empty revision set'))
442 442 return repo[l.last()]
443 443
444 444 def _pairspec(revspec):
445 445 tree = revsetlang.parse(revspec)
446 446 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
447 447
448 448 def revpair(repo, revs):
449 449 if not revs:
450 450 return repo.dirstate.p1(), None
451 451
452 452 l = revrange(repo, revs)
453 453
454 454 if not l:
455 455 first = second = None
456 456 elif l.isascending():
457 457 first = l.min()
458 458 second = l.max()
459 459 elif l.isdescending():
460 460 first = l.max()
461 461 second = l.min()
462 462 else:
463 463 first = l.first()
464 464 second = l.last()
465 465
466 466 if first is None:
467 467 raise error.Abort(_('empty revision range'))
468 468 if (first == second and len(revs) >= 2
469 469 and not all(revrange(repo, [r]) for r in revs)):
470 470 raise error.Abort(_('empty revision on one side of range'))
471 471
472 472 # if top-level is range expression, the result must always be a pair
473 473 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
474 474 return repo.lookup(first), None
475 475
476 476 return repo.lookup(first), repo.lookup(second)
477 477
478 478 def revrange(repo, specs, localalias=None):
479 479 """Execute 1 to many revsets and return the union.
480 480
481 481 This is the preferred mechanism for executing revsets using user-specified
482 482 config options, such as revset aliases.
483 483
484 484 The revsets specified by ``specs`` will be executed via a chained ``OR``
485 485 expression. If ``specs`` is empty, an empty result is returned.
486 486
487 487 ``specs`` can contain integers, in which case they are assumed to be
488 488 revision numbers.
489 489
490 490 It is assumed the revsets are already formatted. If you have arguments
491 491 that need to be expanded in the revset, call ``revsetlang.formatspec()``
492 492 and pass the result as an element of ``specs``.
493 493
494 494 Specifying a single revset is allowed.
495 495
496 496 Returns a ``revset.abstractsmartset`` which is a list-like interface over
497 497 integer revisions.
498 498 """
499 499 allspecs = []
500 500 for spec in specs:
501 501 if isinstance(spec, int):
502 502 spec = revsetlang.formatspec('rev(%d)', spec)
503 503 allspecs.append(spec)
504 504 return repo.anyrevs(allspecs, user=True, localalias=localalias)
505 505
506 506 def meaningfulparents(repo, ctx):
507 507 """Return list of meaningful (or all if debug) parentrevs for rev.
508 508
509 509 For merges (two non-nullrev revisions) both parents are meaningful.
510 510 Otherwise the first parent revision is considered meaningful if it
511 511 is not the preceding revision.
512 512 """
513 513 parents = ctx.parents()
514 514 if len(parents) > 1:
515 515 return parents
516 516 if repo.ui.debugflag:
517 517 return [parents[0], repo['null']]
518 518 if parents[0].rev() >= intrev(ctx) - 1:
519 519 return []
520 520 return parents
521 521
522 522 def expandpats(pats):
523 523 '''Expand bare globs when running on windows.
524 524 On posix we assume it already has already been done by sh.'''
525 525 if not util.expandglobs:
526 526 return list(pats)
527 527 ret = []
528 528 for kindpat in pats:
529 529 kind, pat = matchmod._patsplit(kindpat, None)
530 530 if kind is None:
531 531 try:
532 532 globbed = glob.glob(pat)
533 533 except re.error:
534 534 globbed = [pat]
535 535 if globbed:
536 536 ret.extend(globbed)
537 537 continue
538 538 ret.append(kindpat)
539 539 return ret
540 540
541 541 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
542 542 badfn=None):
543 543 '''Return a matcher and the patterns that were used.
544 544 The matcher will warn about bad matches, unless an alternate badfn callback
545 545 is provided.'''
546 546 if pats == ("",):
547 547 pats = []
548 548 if opts is None:
549 549 opts = {}
550 550 if not globbed and default == 'relpath':
551 551 pats = expandpats(pats or [])
552 552
553 553 def bad(f, msg):
554 554 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
555 555
556 556 if badfn is None:
557 557 badfn = bad
558 558
559 559 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
560 560 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
561 561
562 562 if m.always():
563 563 pats = []
564 564 return m, pats
565 565
566 566 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
567 567 badfn=None):
568 568 '''Return a matcher that will warn about bad matches.'''
569 569 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
570 570
571 571 def matchall(repo):
572 572 '''Return a matcher that will efficiently match everything.'''
573 573 return matchmod.always(repo.root, repo.getcwd())
574 574
575 575 def matchfiles(repo, files, badfn=None):
576 576 '''Return a matcher that will efficiently match exactly these files.'''
577 577 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
578 578
579 579 def parsefollowlinespattern(repo, rev, pat, msg):
580 580 """Return a file name from `pat` pattern suitable for usage in followlines
581 581 logic.
582 582 """
583 583 if not matchmod.patkind(pat):
584 584 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
585 585 else:
586 586 ctx = repo[rev]
587 587 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
588 588 files = [f for f in ctx if m(f)]
589 589 if len(files) != 1:
590 590 raise error.ParseError(msg)
591 591 return files[0]
592 592
593 593 def origpath(ui, repo, filepath):
594 594 '''customize where .orig files are created
595 595
596 596 Fetch user defined path from config file: [ui] origbackuppath = <path>
597 597 Fall back to default (filepath with .orig suffix) if not specified
598 598 '''
599 599 origbackuppath = ui.config('ui', 'origbackuppath')
600 600 if not origbackuppath:
601 601 return filepath + ".orig"
602 602
603 603 # Convert filepath from an absolute path into a path inside the repo.
604 604 filepathfromroot = util.normpath(os.path.relpath(filepath,
605 605 start=repo.root))
606 606
607 607 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
608 608 origbackupdir = origvfs.dirname(filepathfromroot)
609 609 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
610 610 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
611 611
612 612 # Remove any files that conflict with the backup file's path
613 613 for f in reversed(list(util.finddirs(filepathfromroot))):
614 614 if origvfs.isfileorlink(f):
615 615 ui.note(_('removing conflicting file: %s\n')
616 616 % origvfs.join(f))
617 617 origvfs.unlink(f)
618 618 break
619 619
620 620 origvfs.makedirs(origbackupdir)
621 621
622 622 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
623 623 ui.note(_('removing conflicting directory: %s\n')
624 624 % origvfs.join(filepathfromroot))
625 625 origvfs.rmtree(filepathfromroot, forcibly=True)
626 626
627 627 return origvfs.join(filepathfromroot)
628 628
629 629 class _containsnode(object):
630 630 """proxy __contains__(node) to container.__contains__ which accepts revs"""
631 631
632 632 def __init__(self, repo, revcontainer):
633 633 self._torev = repo.changelog.rev
634 634 self._revcontains = revcontainer.__contains__
635 635
636 636 def __contains__(self, node):
637 637 return self._revcontains(self._torev(node))
638 638
639 639 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
640 640 """do common cleanups when old nodes are replaced by new nodes
641 641
642 642 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
643 643 (we might also want to move working directory parent in the future)
644 644
645 645 By default, bookmark moves are calculated automatically from 'replacements',
646 646 but 'moves' can be used to override that. Also, 'moves' may include
647 647 additional bookmark moves that should not have associated obsmarkers.
648 648
649 649 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
650 650 have replacements. operation is a string, like "rebase".
651 651
652 652 metadata is dictionary containing metadata to be stored in obsmarker if
653 653 obsolescence is enabled.
654 654 """
655 655 if not replacements and not moves:
656 656 return
657 657
658 658 # translate mapping's other forms
659 659 if not util.safehasattr(replacements, 'items'):
660 660 replacements = {n: () for n in replacements}
661 661
662 662 # Calculate bookmark movements
663 663 if moves is None:
664 664 moves = {}
665 665 # Unfiltered repo is needed since nodes in replacements might be hidden.
666 666 unfi = repo.unfiltered()
667 667 for oldnode, newnodes in replacements.items():
668 668 if oldnode in moves:
669 669 continue
670 670 if len(newnodes) > 1:
671 671 # usually a split, take the one with biggest rev number
672 672 newnode = next(unfi.set('max(%ln)', newnodes)).node()
673 673 elif len(newnodes) == 0:
674 674 # move bookmark backwards
675 675 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
676 676 list(replacements)))
677 677 if roots:
678 678 newnode = roots[0].node()
679 679 else:
680 680 newnode = nullid
681 681 else:
682 682 newnode = newnodes[0]
683 683 moves[oldnode] = newnode
684 684
685 685 with repo.transaction('cleanup') as tr:
686 686 # Move bookmarks
687 687 bmarks = repo._bookmarks
688 688 bmarkchanges = []
689 689 allnewnodes = [n for ns in replacements.values() for n in ns]
690 690 for oldnode, newnode in moves.items():
691 691 oldbmarks = repo.nodebookmarks(oldnode)
692 692 if not oldbmarks:
693 693 continue
694 694 from . import bookmarks # avoid import cycle
695 695 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
696 696 (util.rapply(pycompat.maybebytestr, oldbmarks),
697 697 hex(oldnode), hex(newnode)))
698 698 # Delete divergent bookmarks being parents of related newnodes
699 699 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
700 700 allnewnodes, newnode, oldnode)
701 701 deletenodes = _containsnode(repo, deleterevs)
702 702 for name in oldbmarks:
703 703 bmarkchanges.append((name, newnode))
704 704 for b in bookmarks.divergent2delete(repo, deletenodes, name):
705 705 bmarkchanges.append((b, None))
706 706
707 707 if bmarkchanges:
708 708 bmarks.applychanges(repo, tr, bmarkchanges)
709 709
710 710 # Obsolete or strip nodes
711 711 if obsolete.isenabled(repo, obsolete.createmarkersopt):
712 712 # If a node is already obsoleted, and we want to obsolete it
713 713 # without a successor, skip that obssolete request since it's
714 714 # unnecessary. That's the "if s or not isobs(n)" check below.
715 715 # Also sort the node in topology order, that might be useful for
716 716 # some obsstore logic.
717 717 # NOTE: the filtering and sorting might belong to createmarkers.
718 718 isobs = unfi.obsstore.successors.__contains__
719 719 torev = unfi.changelog.rev
720 720 sortfunc = lambda ns: torev(ns[0])
721 721 rels = [(unfi[n], tuple(unfi[m] for m in s))
722 722 for n, s in sorted(replacements.items(), key=sortfunc)
723 723 if s or not isobs(n)]
724 724 if rels:
725 725 obsolete.createmarkers(repo, rels, operation=operation,
726 726 metadata=metadata)
727 727 else:
728 728 from . import repair # avoid import cycle
729 729 tostrip = list(replacements)
730 730 if tostrip:
731 731 repair.delayedstrip(repo.ui, repo, tostrip, operation)
732 732
733 733 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
734 734 if opts is None:
735 735 opts = {}
736 736 m = matcher
737 737 if dry_run is None:
738 738 dry_run = opts.get('dry_run')
739 739 if similarity is None:
740 740 similarity = float(opts.get('similarity') or 0)
741 741
742 742 ret = 0
743 743 join = lambda f: os.path.join(prefix, f)
744 744
745 745 wctx = repo[None]
746 746 for subpath in sorted(wctx.substate):
747 747 submatch = matchmod.subdirmatcher(subpath, m)
748 748 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
749 749 sub = wctx.sub(subpath)
750 750 try:
751 751 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
752 752 ret = 1
753 753 except error.LookupError:
754 754 repo.ui.status(_("skipping missing subrepository: %s\n")
755 755 % join(subpath))
756 756
757 757 rejected = []
758 758 def badfn(f, msg):
759 759 if f in m.files():
760 760 m.bad(f, msg)
761 761 rejected.append(f)
762 762
763 763 badmatch = matchmod.badmatch(m, badfn)
764 764 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
765 765 badmatch)
766 766
767 767 unknownset = set(unknown + forgotten)
768 768 toprint = unknownset.copy()
769 769 toprint.update(deleted)
770 770 for abs in sorted(toprint):
771 771 if repo.ui.verbose or not m.exact(abs):
772 772 if abs in unknownset:
773 773 status = _('adding %s\n') % m.uipath(abs)
774 774 else:
775 775 status = _('removing %s\n') % m.uipath(abs)
776 776 repo.ui.status(status)
777 777
778 778 renames = _findrenames(repo, m, added + unknown, removed + deleted,
779 779 similarity)
780 780
781 781 if not dry_run:
782 782 _markchanges(repo, unknown + forgotten, deleted, renames)
783 783
784 784 for f in rejected:
785 785 if f in m.files():
786 786 return 1
787 787 return ret
788 788
789 789 def marktouched(repo, files, similarity=0.0):
790 790 '''Assert that files have somehow been operated upon. files are relative to
791 791 the repo root.'''
792 792 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
793 793 rejected = []
794 794
795 795 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
796 796
797 797 if repo.ui.verbose:
798 798 unknownset = set(unknown + forgotten)
799 799 toprint = unknownset.copy()
800 800 toprint.update(deleted)
801 801 for abs in sorted(toprint):
802 802 if abs in unknownset:
803 803 status = _('adding %s\n') % abs
804 804 else:
805 805 status = _('removing %s\n') % abs
806 806 repo.ui.status(status)
807 807
808 808 renames = _findrenames(repo, m, added + unknown, removed + deleted,
809 809 similarity)
810 810
811 811 _markchanges(repo, unknown + forgotten, deleted, renames)
812 812
813 813 for f in rejected:
814 814 if f in m.files():
815 815 return 1
816 816 return 0
817 817
818 818 def _interestingfiles(repo, matcher):
819 819 '''Walk dirstate with matcher, looking for files that addremove would care
820 820 about.
821 821
822 822 This is different from dirstate.status because it doesn't care about
823 823 whether files are modified or clean.'''
824 824 added, unknown, deleted, removed, forgotten = [], [], [], [], []
825 825 audit_path = pathutil.pathauditor(repo.root, cached=True)
826 826
827 827 ctx = repo[None]
828 828 dirstate = repo.dirstate
829 829 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
830 830 unknown=True, ignored=False, full=False)
831 831 for abs, st in walkresults.iteritems():
832 832 dstate = dirstate[abs]
833 833 if dstate == '?' and audit_path.check(abs):
834 834 unknown.append(abs)
835 835 elif dstate != 'r' and not st:
836 836 deleted.append(abs)
837 837 elif dstate == 'r' and st:
838 838 forgotten.append(abs)
839 839 # for finding renames
840 840 elif dstate == 'r' and not st:
841 841 removed.append(abs)
842 842 elif dstate == 'a':
843 843 added.append(abs)
844 844
845 845 return added, unknown, deleted, removed, forgotten
846 846
847 847 def _findrenames(repo, matcher, added, removed, similarity):
848 848 '''Find renames from removed files to added ones.'''
849 849 renames = {}
850 850 if similarity > 0:
851 851 for old, new, score in similar.findrenames(repo, added, removed,
852 852 similarity):
853 853 if (repo.ui.verbose or not matcher.exact(old)
854 854 or not matcher.exact(new)):
855 855 repo.ui.status(_('recording removal of %s as rename to %s '
856 856 '(%d%% similar)\n') %
857 857 (matcher.rel(old), matcher.rel(new),
858 858 score * 100))
859 859 renames[new] = old
860 860 return renames
861 861
862 862 def _markchanges(repo, unknown, deleted, renames):
863 863 '''Marks the files in unknown as added, the files in deleted as removed,
864 864 and the files in renames as copied.'''
865 865 wctx = repo[None]
866 866 with repo.wlock():
867 867 wctx.forget(deleted)
868 868 wctx.add(unknown)
869 869 for new, old in renames.iteritems():
870 870 wctx.copy(old, new)
871 871
872 872 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
873 873 """Update the dirstate to reflect the intent of copying src to dst. For
874 874 different reasons it might not end with dst being marked as copied from src.
875 875 """
876 876 origsrc = repo.dirstate.copied(src) or src
877 877 if dst == origsrc: # copying back a copy?
878 878 if repo.dirstate[dst] not in 'mn' and not dryrun:
879 879 repo.dirstate.normallookup(dst)
880 880 else:
881 881 if repo.dirstate[origsrc] == 'a' and origsrc == src:
882 882 if not ui.quiet:
883 883 ui.warn(_("%s has not been committed yet, so no copy "
884 884 "data will be stored for %s.\n")
885 885 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
886 886 if repo.dirstate[dst] in '?r' and not dryrun:
887 887 wctx.add([dst])
888 888 elif not dryrun:
889 889 wctx.copy(origsrc, dst)
890 890
891 891 def readrequires(opener, supported):
892 892 '''Reads and parses .hg/requires and checks if all entries found
893 893 are in the list of supported features.'''
894 894 requirements = set(opener.read("requires").splitlines())
895 895 missings = []
896 896 for r in requirements:
897 897 if r not in supported:
898 898 if not r or not r[0:1].isalnum():
899 899 raise error.RequirementError(_(".hg/requires file is corrupt"))
900 900 missings.append(r)
901 901 missings.sort()
902 902 if missings:
903 903 raise error.RequirementError(
904 904 _("repository requires features unknown to this Mercurial: %s")
905 905 % " ".join(missings),
906 906 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
907 907 " for more information"))
908 908 return requirements
909 909
910 910 def writerequires(opener, requirements):
911 911 with opener('requires', 'w') as fp:
912 912 for r in sorted(requirements):
913 913 fp.write("%s\n" % r)
914 914
915 915 class filecachesubentry(object):
916 916 def __init__(self, path, stat):
917 917 self.path = path
918 918 self.cachestat = None
919 919 self._cacheable = None
920 920
921 921 if stat:
922 922 self.cachestat = filecachesubentry.stat(self.path)
923 923
924 924 if self.cachestat:
925 925 self._cacheable = self.cachestat.cacheable()
926 926 else:
927 927 # None means we don't know yet
928 928 self._cacheable = None
929 929
930 930 def refresh(self):
931 931 if self.cacheable():
932 932 self.cachestat = filecachesubentry.stat(self.path)
933 933
934 934 def cacheable(self):
935 935 if self._cacheable is not None:
936 936 return self._cacheable
937 937
938 938 # we don't know yet, assume it is for now
939 939 return True
940 940
941 941 def changed(self):
942 942 # no point in going further if we can't cache it
943 943 if not self.cacheable():
944 944 return True
945 945
946 946 newstat = filecachesubentry.stat(self.path)
947 947
948 948 # we may not know if it's cacheable yet, check again now
949 949 if newstat and self._cacheable is None:
950 950 self._cacheable = newstat.cacheable()
951 951
952 952 # check again
953 953 if not self._cacheable:
954 954 return True
955 955
956 956 if self.cachestat != newstat:
957 957 self.cachestat = newstat
958 958 return True
959 959 else:
960 960 return False
961 961
962 962 @staticmethod
963 963 def stat(path):
964 964 try:
965 965 return util.cachestat(path)
966 966 except OSError as e:
967 967 if e.errno != errno.ENOENT:
968 968 raise
969 969
970 970 class filecacheentry(object):
971 971 def __init__(self, paths, stat=True):
972 972 self._entries = []
973 973 for path in paths:
974 974 self._entries.append(filecachesubentry(path, stat))
975 975
976 976 def changed(self):
977 977 '''true if any entry has changed'''
978 978 for entry in self._entries:
979 979 if entry.changed():
980 980 return True
981 981 return False
982 982
983 983 def refresh(self):
984 984 for entry in self._entries:
985 985 entry.refresh()
986 986
987 987 class filecache(object):
988 988 '''A property like decorator that tracks files under .hg/ for updates.
989 989
990 990 Records stat info when called in _filecache.
991 991
992 992 On subsequent calls, compares old stat info with new info, and recreates the
993 993 object when any of the files changes, updating the new stat info in
994 994 _filecache.
995 995
996 996 Mercurial either atomic renames or appends for files under .hg,
997 997 so to ensure the cache is reliable we need the filesystem to be able
998 998 to tell us if a file has been replaced. If it can't, we fallback to
999 999 recreating the object on every call (essentially the same behavior as
1000 1000 propertycache).
1001 1001
1002 1002 '''
1003 1003 def __init__(self, *paths):
1004 1004 self.paths = paths
1005 1005
1006 1006 def join(self, obj, fname):
1007 1007 """Used to compute the runtime path of a cached file.
1008 1008
1009 1009 Users should subclass filecache and provide their own version of this
1010 1010 function to call the appropriate join function on 'obj' (an instance
1011 1011 of the class that its member function was decorated).
1012 1012 """
1013 1013 raise NotImplementedError
1014 1014
1015 1015 def __call__(self, func):
1016 1016 self.func = func
1017 1017 self.name = func.__name__.encode('ascii')
1018 1018 return self
1019 1019
1020 1020 def __get__(self, obj, type=None):
1021 1021 # if accessed on the class, return the descriptor itself.
1022 1022 if obj is None:
1023 1023 return self
1024 1024 # do we need to check if the file changed?
1025 1025 if self.name in obj.__dict__:
1026 1026 assert self.name in obj._filecache, self.name
1027 1027 return obj.__dict__[self.name]
1028 1028
1029 1029 entry = obj._filecache.get(self.name)
1030 1030
1031 1031 if entry:
1032 1032 if entry.changed():
1033 1033 entry.obj = self.func(obj)
1034 1034 else:
1035 1035 paths = [self.join(obj, path) for path in self.paths]
1036 1036
1037 1037 # We stat -before- creating the object so our cache doesn't lie if
1038 1038 # a writer modified between the time we read and stat
1039 1039 entry = filecacheentry(paths, True)
1040 1040 entry.obj = self.func(obj)
1041 1041
1042 1042 obj._filecache[self.name] = entry
1043 1043
1044 1044 obj.__dict__[self.name] = entry.obj
1045 1045 return entry.obj
1046 1046
1047 1047 def __set__(self, obj, value):
1048 1048 if self.name not in obj._filecache:
1049 1049 # we add an entry for the missing value because X in __dict__
1050 1050 # implies X in _filecache
1051 1051 paths = [self.join(obj, path) for path in self.paths]
1052 1052 ce = filecacheentry(paths, False)
1053 1053 obj._filecache[self.name] = ce
1054 1054 else:
1055 1055 ce = obj._filecache[self.name]
1056 1056
1057 1057 ce.obj = value # update cached copy
1058 1058 obj.__dict__[self.name] = value # update copy returned by obj.x
1059 1059
1060 1060 def __delete__(self, obj):
1061 1061 try:
1062 1062 del obj.__dict__[self.name]
1063 1063 except KeyError:
1064 1064 raise AttributeError(self.name)
1065 1065
1066 1066 def extdatasource(repo, source):
1067 1067 """Gather a map of rev -> value dict from the specified source
1068 1068
1069 1069 A source spec is treated as a URL, with a special case shell: type
1070 1070 for parsing the output from a shell command.
1071 1071
1072 1072 The data is parsed as a series of newline-separated records where
1073 1073 each record is a revision specifier optionally followed by a space
1074 1074 and a freeform string value. If the revision is known locally, it
1075 1075 is converted to a rev, otherwise the record is skipped.
1076 1076
1077 1077 Note that both key and value are treated as UTF-8 and converted to
1078 1078 the local encoding. This allows uniformity between local and
1079 1079 remote data sources.
1080 1080 """
1081 1081
1082 1082 spec = repo.ui.config("extdata", source)
1083 1083 if not spec:
1084 1084 raise error.Abort(_("unknown extdata source '%s'") % source)
1085 1085
1086 1086 data = {}
1087 1087 src = proc = None
1088 1088 try:
1089 1089 if spec.startswith("shell:"):
1090 1090 # external commands should be run relative to the repo root
1091 1091 cmd = spec[6:]
1092 1092 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1093 1093 close_fds=util.closefds,
1094 1094 stdout=subprocess.PIPE, cwd=repo.root)
1095 1095 src = proc.stdout
1096 1096 else:
1097 1097 # treat as a URL or file
1098 1098 src = url.open(repo.ui, spec)
1099 1099 for l in src:
1100 1100 if " " in l:
1101 1101 k, v = l.strip().split(" ", 1)
1102 1102 else:
1103 1103 k, v = l.strip(), ""
1104 1104
1105 1105 k = encoding.tolocal(k)
1106 1106 try:
1107 1107 data[repo[k].rev()] = encoding.tolocal(v)
1108 1108 except (error.LookupError, error.RepoLookupError):
1109 1109 pass # we ignore data for nodes that don't exist locally
1110 1110 finally:
1111 1111 if proc:
1112 1112 proc.communicate()
1113 1113 if src:
1114 1114 src.close()
1115 1115 if proc and proc.returncode != 0:
1116 1116 raise error.Abort(_("extdata command '%s' failed: %s")
1117 1117 % (cmd, util.explainexit(proc.returncode)[0]))
1118 1118
1119 1119 return data
1120 1120
1121 1121 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1122 1122 if lock is None:
1123 1123 raise error.LockInheritanceContractViolation(
1124 1124 'lock can only be inherited while held')
1125 1125 if environ is None:
1126 1126 environ = {}
1127 1127 with lock.inherit() as locker:
1128 1128 environ[envvar] = locker
1129 1129 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1130 1130
1131 1131 def wlocksub(repo, cmd, *args, **kwargs):
1132 1132 """run cmd as a subprocess that allows inheriting repo's wlock
1133 1133
1134 1134 This can only be called while the wlock is held. This takes all the
1135 1135 arguments that ui.system does, and returns the exit code of the
1136 1136 subprocess."""
1137 1137 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1138 1138 **kwargs)
1139 1139
1140 1140 def gdinitconfig(ui):
1141 1141 """helper function to know if a repo should be created as general delta
1142 1142 """
1143 1143 # experimental config: format.generaldelta
1144 1144 return (ui.configbool('format', 'generaldelta')
1145 1145 or ui.configbool('format', 'usegeneraldelta'))
1146 1146
1147 1147 def gddeltaconfig(ui):
1148 1148 """helper function to know if incoming delta should be optimised
1149 1149 """
1150 1150 # experimental config: format.generaldelta
1151 1151 return ui.configbool('format', 'generaldelta')
1152 1152
1153 1153 class simplekeyvaluefile(object):
1154 1154 """A simple file with key=value lines
1155 1155
1156 1156 Keys must be alphanumerics and start with a letter, values must not
1157 1157 contain '\n' characters"""
1158 1158 firstlinekey = '__firstline'
1159 1159
1160 1160 def __init__(self, vfs, path, keys=None):
1161 1161 self.vfs = vfs
1162 1162 self.path = path
1163 1163
1164 1164 def read(self, firstlinenonkeyval=False):
1165 1165 """Read the contents of a simple key-value file
1166 1166
1167 1167 'firstlinenonkeyval' indicates whether the first line of file should
1168 1168 be treated as a key-value pair or reuturned fully under the
1169 1169 __firstline key."""
1170 1170 lines = self.vfs.readlines(self.path)
1171 1171 d = {}
1172 1172 if firstlinenonkeyval:
1173 1173 if not lines:
1174 1174 e = _("empty simplekeyvalue file")
1175 1175 raise error.CorruptedState(e)
1176 1176 # we don't want to include '\n' in the __firstline
1177 1177 d[self.firstlinekey] = lines[0][:-1]
1178 1178 del lines[0]
1179 1179
1180 1180 try:
1181 1181 # the 'if line.strip()' part prevents us from failing on empty
1182 1182 # lines which only contain '\n' therefore are not skipped
1183 1183 # by 'if line'
1184 1184 updatedict = dict(line[:-1].split('=', 1) for line in lines
1185 1185 if line.strip())
1186 1186 if self.firstlinekey in updatedict:
1187 1187 e = _("%r can't be used as a key")
1188 1188 raise error.CorruptedState(e % self.firstlinekey)
1189 1189 d.update(updatedict)
1190 1190 except ValueError as e:
1191 1191 raise error.CorruptedState(str(e))
1192 1192 return d
1193 1193
1194 1194 def write(self, data, firstline=None):
1195 1195 """Write key=>value mapping to a file
1196 1196 data is a dict. Keys must be alphanumerical and start with a letter.
1197 1197 Values must not contain newline characters.
1198 1198
1199 1199 If 'firstline' is not None, it is written to file before
1200 1200 everything else, as it is, not in a key=value form"""
1201 1201 lines = []
1202 1202 if firstline is not None:
1203 1203 lines.append('%s\n' % firstline)
1204 1204
1205 1205 for k, v in data.items():
1206 1206 if k == self.firstlinekey:
1207 1207 e = "key name '%s' is reserved" % self.firstlinekey
1208 1208 raise error.ProgrammingError(e)
1209 1209 if not k[0:1].isalpha():
1210 1210 e = "keys must start with a letter in a key-value file"
1211 1211 raise error.ProgrammingError(e)
1212 1212 if not k.isalnum():
1213 1213 e = "invalid key name in a simple key-value file"
1214 1214 raise error.ProgrammingError(e)
1215 1215 if '\n' in v:
1216 1216 e = "invalid value in a simple key-value file"
1217 1217 raise error.ProgrammingError(e)
1218 1218 lines.append("%s=%s\n" % (k, v))
1219 1219 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1220 1220 fp.write(''.join(lines))
1221 1221
1222 1222 _reportobsoletedsource = [
1223 1223 'debugobsolete',
1224 1224 'pull',
1225 1225 'push',
1226 1226 'serve',
1227 1227 'unbundle',
1228 1228 ]
1229 1229
1230 1230 _reportnewcssource = [
1231 1231 'pull',
1232 1232 'unbundle',
1233 1233 ]
1234 1234
1235 1235 # a list of (repo, ctx, files) functions called by various commands to allow
1236 1236 # extensions to ensure the corresponding files are available locally, before the
1237 1237 # command uses them.
1238 1238 fileprefetchhooks = util.hooks()
1239 1239
1240 1240 # A marker that tells the evolve extension to suppress its own reporting
1241 1241 _reportstroubledchangesets = True
1242 1242
1243 1243 def registersummarycallback(repo, otr, txnname=''):
1244 1244 """register a callback to issue a summary after the transaction is closed
1245 1245 """
1246 1246 def txmatch(sources):
1247 1247 return any(txnname.startswith(source) for source in sources)
1248 1248
1249 1249 categories = []
1250 1250
1251 1251 def reportsummary(func):
1252 1252 """decorator for report callbacks."""
1253 1253 # The repoview life cycle is shorter than the one of the actual
1254 1254 # underlying repository. So the filtered object can die before the
1255 1255 # weakref is used leading to troubles. We keep a reference to the
1256 1256 # unfiltered object and restore the filtering when retrieving the
1257 1257 # repository through the weakref.
1258 1258 filtername = repo.filtername
1259 1259 reporef = weakref.ref(repo.unfiltered())
1260 1260 def wrapped(tr):
1261 1261 repo = reporef()
1262 1262 if filtername:
1263 1263 repo = repo.filtered(filtername)
1264 1264 func(repo, tr)
1265 1265 newcat = '%02i-txnreport' % len(categories)
1266 1266 otr.addpostclose(newcat, wrapped)
1267 1267 categories.append(newcat)
1268 1268 return wrapped
1269 1269
1270 1270 if txmatch(_reportobsoletedsource):
1271 1271 @reportsummary
1272 1272 def reportobsoleted(repo, tr):
1273 1273 obsoleted = obsutil.getobsoleted(repo, tr)
1274 1274 if obsoleted:
1275 1275 repo.ui.status(_('obsoleted %i changesets\n')
1276 1276 % len(obsoleted))
1277 1277
1278 1278 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1279 1279 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1280 1280 instabilitytypes = [
1281 1281 ('orphan', 'orphan'),
1282 1282 ('phase-divergent', 'phasedivergent'),
1283 1283 ('content-divergent', 'contentdivergent'),
1284 1284 ]
1285 1285
1286 1286 def getinstabilitycounts(repo):
1287 1287 filtered = repo.changelog.filteredrevs
1288 1288 counts = {}
1289 1289 for instability, revset in instabilitytypes:
1290 1290 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1291 1291 filtered)
1292 1292 return counts
1293 1293
1294 1294 oldinstabilitycounts = getinstabilitycounts(repo)
1295 1295 @reportsummary
1296 1296 def reportnewinstabilities(repo, tr):
1297 1297 newinstabilitycounts = getinstabilitycounts(repo)
1298 1298 for instability, revset in instabilitytypes:
1299 1299 delta = (newinstabilitycounts[instability] -
1300 1300 oldinstabilitycounts[instability])
1301 1301 if delta > 0:
1302 1302 repo.ui.warn(_('%i new %s changesets\n') %
1303 1303 (delta, instability))
1304 1304
1305 1305 if txmatch(_reportnewcssource):
1306 1306 @reportsummary
1307 1307 def reportnewcs(repo, tr):
1308 1308 """Report the range of new revisions pulled/unbundled."""
1309 1309 newrevs = tr.changes.get('revs', xrange(0, 0))
1310 1310 if not newrevs:
1311 1311 return
1312 1312
1313 1313 # Compute the bounds of new revisions' range, excluding obsoletes.
1314 1314 unfi = repo.unfiltered()
1315 1315 revs = unfi.revs('%ld and not obsolete()', newrevs)
1316 1316 if not revs:
1317 1317 # Got only obsoletes.
1318 1318 return
1319 1319 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1320 1320
1321 1321 if minrev == maxrev:
1322 1322 revrange = minrev
1323 1323 else:
1324 1324 revrange = '%s:%s' % (minrev, maxrev)
1325 1325 repo.ui.status(_('new changesets %s\n') % revrange)
1326 1326
1327 1327 def nodesummaries(repo, nodes, maxnumnodes=4):
1328 1328 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1329 1329 return ' '.join(short(h) for h in nodes)
1330 1330 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1331 1331 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1332 1332
1333 1333 def enforcesinglehead(repo, tr, desc):
1334 1334 """check that no named branch has multiple heads"""
1335 1335 if desc in ('strip', 'repair'):
1336 1336 # skip the logic during strip
1337 1337 return
1338 1338 visible = repo.filtered('visible')
1339 1339 # possible improvement: we could restrict the check to affected branch
1340 1340 for name, heads in visible.branchmap().iteritems():
1341 1341 if len(heads) > 1:
1342 1342 msg = _('rejecting multiple heads on branch "%s"')
1343 1343 msg %= name
1344 1344 hint = _('%d heads: %s')
1345 1345 hint %= (len(heads), nodesummaries(repo, heads))
1346 1346 raise error.Abort(msg, hint=hint)
1347 1347
1348 1348 def wrapconvertsink(sink):
1349 1349 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1350 1350 before it is used, whether or not the convert extension was formally loaded.
1351 1351 """
1352 1352 return sink
1353 1353
1354 1354 def unhidehashlikerevs(repo, specs, hiddentype):
1355 1355 """parse the user specs and unhide changesets whose hash or revision number
1356 1356 is passed.
1357 1357
1358 1358 hiddentype can be: 1) 'warn': warn while unhiding changesets
1359 1359 2) 'nowarn': don't warn while unhiding changesets
1360 1360
1361 1361 returns a repo object with the required changesets unhidden
1362 1362 """
1363 1363 if not repo.filtername or not repo.ui.configbool('experimental',
1364 1364 'directaccess'):
1365 1365 return repo
1366 1366
1367 1367 if repo.filtername not in ('visible', 'visible-hidden'):
1368 1368 return repo
1369 1369
1370 1370 symbols = set()
1371 1371 for spec in specs:
1372 1372 try:
1373 1373 tree = revsetlang.parse(spec)
1374 1374 except error.ParseError: # will be reported by scmutil.revrange()
1375 1375 continue
1376 1376
1377 1377 symbols.update(revsetlang.gethashlikesymbols(tree))
1378 1378
1379 1379 if not symbols:
1380 1380 return repo
1381 1381
1382 1382 revs = _getrevsfromsymbols(repo, symbols)
1383 1383
1384 1384 if not revs:
1385 1385 return repo
1386 1386
1387 1387 if hiddentype == 'warn':
1388 1388 unfi = repo.unfiltered()
1389 1389 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1390 1390 repo.ui.warn(_("warning: accessing hidden changesets for write "
1391 1391 "operation: %s\n") % revstr)
1392 1392
1393 1393 # we have to use new filtername to separate branch/tags cache until we can
1394 1394 # disbale these cache when revisions are dynamically pinned.
1395 1395 return repo.filtered('visible-hidden', revs)
1396 1396
1397 1397 def _getrevsfromsymbols(repo, symbols):
1398 1398 """parse the list of symbols and returns a set of revision numbers of hidden
1399 1399 changesets present in symbols"""
1400 1400 revs = set()
1401 1401 unfi = repo.unfiltered()
1402 1402 unficl = unfi.changelog
1403 1403 cl = repo.changelog
1404 1404 tiprev = len(unficl)
1405 1405 pmatch = unficl._partialmatch
1406 1406 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1407 1407 for s in symbols:
1408 1408 try:
1409 1409 n = int(s)
1410 1410 if n <= tiprev:
1411 1411 if not allowrevnums:
1412 1412 continue
1413 1413 else:
1414 1414 if n not in cl:
1415 1415 revs.add(n)
1416 1416 continue
1417 1417 except ValueError:
1418 1418 pass
1419 1419
1420 1420 try:
1421 1421 s = pmatch(s)
1422 except error.LookupError:
1422 except (error.LookupError, error.WdirUnsupported):
1423 1423 s = None
1424 1424
1425 1425 if s is not None:
1426 1426 rev = unficl.rev(s)
1427 1427 if rev not in cl:
1428 1428 revs.add(rev)
1429 1429
1430 1430 return revs
@@ -1,201 +1,214 b''
1 1 Tests for access level on hidden commits by various commands on based of their
2 2 type.
3 3
4 4 Setting the required config to start this
5 5
6 6 $ cat >> $HGRCPATH <<EOF
7 7 > [experimental]
8 8 > evolution=createmarkers, allowunstable
9 9 > directaccess=True
10 10 > directaccess.revnums=True
11 11 > [extensions]
12 12 > amend =
13 13 > EOF
14 14
15 15 $ hg init repo
16 16 $ cd repo
17 17 $ for ch in a b c; do touch $ch; echo "foo" >> $ch; hg ci -Aqm "Added "$ch; done
18 18
19 19 $ hg log -G -T '{rev}:{node} {desc}' --hidden
20 20 @ 2:28ad74487de9599d00d81085be739c61fc340652 Added c
21 21 |
22 22 o 1:29becc82797a4bc11ec8880b58eaecd2ab3e7760 Added b
23 23 |
24 24 o 0:18d04c59bb5d2d4090ad9a5b59bd6274adb63add Added a
25 25
26 26 $ echo "bar" >> c
27 27 $ hg amend
28 28
29 29 $ hg log -G -T '{rev}:{node} {desc}' --hidden
30 30 @ 3:2443a0e664694756d8b435d06b6ad84f941b6fc0 Added c
31 31 |
32 32 | x 2:28ad74487de9599d00d81085be739c61fc340652 Added c
33 33 |/
34 34 o 1:29becc82797a4bc11ec8880b58eaecd2ab3e7760 Added b
35 35 |
36 36 o 0:18d04c59bb5d2d4090ad9a5b59bd6274adb63add Added a
37 37
38 38 Testing read only commands on the hidden revision
39 39
40 40 Testing with rev number
41 41
42 42 $ hg exp 2 --config experimental.directaccess.revnums=False
43 43 abort: hidden revision '2' was rewritten as: 2443a0e66469!
44 44 (use --hidden to access hidden revisions)
45 45 [255]
46 46
47 47 $ hg exp 2
48 48 # HG changeset patch
49 49 # User test
50 50 # Date 0 0
51 51 # Thu Jan 01 00:00:00 1970 +0000
52 52 # Node ID 28ad74487de9599d00d81085be739c61fc340652
53 53 # Parent 29becc82797a4bc11ec8880b58eaecd2ab3e7760
54 54 Added c
55 55
56 56 diff -r 29becc82797a -r 28ad74487de9 c
57 57 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
58 58 +++ b/c Thu Jan 01 00:00:00 1970 +0000
59 59 @@ -0,0 +1,1 @@
60 60 +foo
61 61
62 62 $ hg log -r 2
63 63 changeset: 2:28ad74487de9
64 64 user: test
65 65 date: Thu Jan 01 00:00:00 1970 +0000
66 66 obsolete: rewritten using amend as 3:2443a0e66469
67 67 summary: Added c
68 68
69 69 $ hg identify -r 2
70 70 28ad74487de9
71 71
72 72 $ hg status --change 2
73 73 A c
74 74
75 75 $ hg status --change 2 --config experimental.directaccess.revnums=False
76 76 abort: hidden revision '2' was rewritten as: 2443a0e66469!
77 77 (use --hidden to access hidden revisions)
78 78 [255]
79 79
80 80 $ hg diff -c 2
81 81 diff -r 29becc82797a -r 28ad74487de9 c
82 82 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
83 83 +++ b/c Thu Jan 01 00:00:00 1970 +0000
84 84 @@ -0,0 +1,1 @@
85 85 +foo
86 86
87 87 Testing with hash
88 88
89 89 `hg export`
90 90
91 91 $ hg exp 28ad74
92 92 # HG changeset patch
93 93 # User test
94 94 # Date 0 0
95 95 # Thu Jan 01 00:00:00 1970 +0000
96 96 # Node ID 28ad74487de9599d00d81085be739c61fc340652
97 97 # Parent 29becc82797a4bc11ec8880b58eaecd2ab3e7760
98 98 Added c
99 99
100 100 diff -r 29becc82797a -r 28ad74487de9 c
101 101 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
102 102 +++ b/c Thu Jan 01 00:00:00 1970 +0000
103 103 @@ -0,0 +1,1 @@
104 104 +foo
105 105
106 106 `hg log`
107 107
108 108 $ hg log -r 28ad74
109 109 changeset: 2:28ad74487de9
110 110 user: test
111 111 date: Thu Jan 01 00:00:00 1970 +0000
112 112 obsolete: rewritten using amend as 3:2443a0e66469
113 113 summary: Added c
114 114
115 115 `hg cat`
116 116
117 117 $ hg cat -r 28ad74 c
118 118 foo
119 119
120 120 `hg diff`
121 121
122 122 $ hg diff -c 28ad74
123 123 diff -r 29becc82797a -r 28ad74487de9 c
124 124 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
125 125 +++ b/c Thu Jan 01 00:00:00 1970 +0000
126 126 @@ -0,0 +1,1 @@
127 127 +foo
128 128
129 129 `hg files`
130 130
131 131 $ hg files -r 28ad74
132 132 a
133 133 b
134 134 c
135 135
136 136 `hg identify`
137 137
138 138 $ hg identify -r 28ad74
139 139 28ad74487de9
140 140
141 141 `hg annotate`
142 142
143 143 $ hg annotate -r 28ad74 a
144 144 0: foo
145 145
146 146 `hg status`
147 147
148 148 $ hg status --change 28ad74
149 149 A c
150 150
151 151 `hg archive`
152 152
153 153 This should not throw error
154 154 $ hg archive -r 28ad74 foo
155 155
156 156 `hg update`
157 157
158 158 $ hg up 28ad74
159 159 updating to a hidden changeset 28ad74487de9
160 160 (hidden revision '28ad74487de9' was rewritten as: 2443a0e66469)
161 161 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
162 162
163 163 $ hg up 3
164 164 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
165 165
166 166 $ hg up
167 167 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
168 168
169 169 `hg revert`
170 170
171 171 $ hg revert -r 28ad74 --all
172 172 reverting c
173 173
174 174 $ hg diff
175 175 diff -r 2443a0e66469 c
176 176 --- a/c Thu Jan 01 00:00:00 1970 +0000
177 177 +++ b/c Thu Jan 01 00:00:00 1970 +0000
178 178 @@ -1,2 +1,1 @@
179 179 foo
180 180 -bar
181 181
182 Test special hash/rev
183
184 $ hg log -qr 'null:wdir() & 000000000000'
185 -1:000000000000
186 $ hg log -qr 'null:wdir() & ffffffffffff'
187 2147483647:ffffffffffff
188 $ hg log -qr 'null:wdir() & rev(-1)'
189 -1:000000000000
190 $ hg log -qr 'null:wdir() & rev(2147483647)'
191 2147483647:ffffffffffff
192 $ hg log -qr 'null:wdir() & 2147483647'
193 2147483647:ffffffffffff
194
182 195 Commands with undefined cmdtype should not work right now
183 196
184 197 $ hg phase -r 28ad74
185 198 abort: hidden revision '28ad74' was rewritten as: 2443a0e66469!
186 199 (use --hidden to access hidden revisions)
187 200 [255]
188 201
189 202 $ hg phase -r 2
190 203 abort: hidden revision '2' was rewritten as: 2443a0e66469!
191 204 (use --hidden to access hidden revisions)
192 205 [255]
193 206
194 207 Setting a bookmark will make that changeset unhidden, so this should come in end
195 208
196 209 $ hg bookmarks -r 28ad74 book
197 210 bookmarking hidden changeset 28ad74487de9
198 211 (hidden revision '28ad74487de9' was rewritten as: 2443a0e66469)
199 212
200 213 $ hg bookmarks
201 214 book 2:28ad74487de9
General Comments 0
You need to be logged in to leave comments. Login now