##// END OF EJS Templates
errors: catch urllib errors specifically instead of using safehasattr()...
Martin von Zweigbergk -
r46442:ae00e170 default
parent child Browse files
Show More
@@ -1,2301 +1,2301 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import os
13 13 import posixpath
14 14 import re
15 15 import subprocess
16 16 import weakref
17 17
18 18 from .i18n import _
19 19 from .node import (
20 20 bin,
21 21 hex,
22 22 nullid,
23 23 nullrev,
24 24 short,
25 25 wdirid,
26 26 wdirrev,
27 27 )
28 28 from .pycompat import getattr
29 29 from .thirdparty import attr
30 30 from . import (
31 31 copies as copiesmod,
32 32 encoding,
33 33 error,
34 34 match as matchmod,
35 35 obsolete,
36 36 obsutil,
37 37 pathutil,
38 38 phases,
39 39 policy,
40 40 pycompat,
41 41 requirements as requirementsmod,
42 42 revsetlang,
43 43 similar,
44 44 smartset,
45 45 url,
46 46 util,
47 47 vfs,
48 48 )
49 49
50 50 from .utils import (
51 51 hashutil,
52 52 procutil,
53 53 stringutil,
54 54 )
55 55
56 56 if pycompat.iswindows:
57 57 from . import scmwindows as scmplatform
58 58 else:
59 59 from . import scmposix as scmplatform
60 60
61 61 parsers = policy.importmod('parsers')
62 62 rustrevlog = policy.importrust('revlog')
63 63
64 64 termsize = scmplatform.termsize
65 65
66 66
67 67 @attr.s(slots=True, repr=False)
68 68 class status(object):
69 69 '''Struct with a list of files per status.
70 70
71 71 The 'deleted', 'unknown' and 'ignored' properties are only
72 72 relevant to the working copy.
73 73 '''
74 74
75 75 modified = attr.ib(default=attr.Factory(list))
76 76 added = attr.ib(default=attr.Factory(list))
77 77 removed = attr.ib(default=attr.Factory(list))
78 78 deleted = attr.ib(default=attr.Factory(list))
79 79 unknown = attr.ib(default=attr.Factory(list))
80 80 ignored = attr.ib(default=attr.Factory(list))
81 81 clean = attr.ib(default=attr.Factory(list))
82 82
83 83 def __iter__(self):
84 84 yield self.modified
85 85 yield self.added
86 86 yield self.removed
87 87 yield self.deleted
88 88 yield self.unknown
89 89 yield self.ignored
90 90 yield self.clean
91 91
92 92 def __repr__(self):
93 93 return (
94 94 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
95 95 r'unknown=%s, ignored=%s, clean=%s>'
96 96 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
97 97
98 98
99 99 def itersubrepos(ctx1, ctx2):
100 100 """find subrepos in ctx1 or ctx2"""
101 101 # Create a (subpath, ctx) mapping where we prefer subpaths from
102 102 # ctx1. The subpaths from ctx2 are important when the .hgsub file
103 103 # has been modified (in ctx2) but not yet committed (in ctx1).
104 104 subpaths = dict.fromkeys(ctx2.substate, ctx2)
105 105 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
106 106
107 107 missing = set()
108 108
109 109 for subpath in ctx2.substate:
110 110 if subpath not in ctx1.substate:
111 111 del subpaths[subpath]
112 112 missing.add(subpath)
113 113
114 114 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
115 115 yield subpath, ctx.sub(subpath)
116 116
117 117 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
118 118 # status and diff will have an accurate result when it does
119 119 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
120 120 # against itself.
121 121 for subpath in missing:
122 122 yield subpath, ctx2.nullsub(subpath, ctx1)
123 123
124 124
125 125 def nochangesfound(ui, repo, excluded=None):
126 126 '''Report no changes for push/pull, excluded is None or a list of
127 127 nodes excluded from the push/pull.
128 128 '''
129 129 secretlist = []
130 130 if excluded:
131 131 for n in excluded:
132 132 ctx = repo[n]
133 133 if ctx.phase() >= phases.secret and not ctx.extinct():
134 134 secretlist.append(n)
135 135
136 136 if secretlist:
137 137 ui.status(
138 138 _(b"no changes found (ignored %d secret changesets)\n")
139 139 % len(secretlist)
140 140 )
141 141 else:
142 142 ui.status(_(b"no changes found\n"))
143 143
144 144
145 145 def callcatch(ui, func):
146 146 """call func() with global exception handling
147 147
148 148 return func() if no exception happens. otherwise do some error handling
149 149 and return an exit code accordingly. does not handle all exceptions.
150 150 """
151 151 coarse_exit_code = -1
152 152 detailed_exit_code = -1
153 153 try:
154 154 try:
155 155 return func()
156 156 except: # re-raises
157 157 ui.traceback()
158 158 raise
159 159 # Global exception handling, alphabetically
160 160 # Mercurial-specific first, followed by built-in and library exceptions
161 161 except error.LockHeld as inst:
162 162 detailed_exit_code = 20
163 163 if inst.errno == errno.ETIMEDOUT:
164 164 reason = _(b'timed out waiting for lock held by %r') % (
165 165 pycompat.bytestr(inst.locker)
166 166 )
167 167 else:
168 168 reason = _(b'lock held by %r') % inst.locker
169 169 ui.error(
170 170 _(b"abort: %s: %s\n")
171 171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
172 172 )
173 173 if not inst.locker:
174 174 ui.error(_(b"(lock might be very busy)\n"))
175 175 except error.LockUnavailable as inst:
176 176 detailed_exit_code = 20
177 177 ui.error(
178 178 _(b"abort: could not lock %s: %s\n")
179 179 % (
180 180 inst.desc or stringutil.forcebytestr(inst.filename),
181 181 encoding.strtolocal(inst.strerror),
182 182 )
183 183 )
184 184 except error.OutOfBandError as inst:
185 185 if inst.args:
186 186 msg = _(b"abort: remote error:\n")
187 187 else:
188 188 msg = _(b"abort: remote error\n")
189 189 ui.error(msg)
190 190 if inst.args:
191 191 ui.error(b''.join(inst.args))
192 192 if inst.hint:
193 193 ui.error(b'(%s)\n' % inst.hint)
194 194 except error.RepoError as inst:
195 195 ui.error(_(b"abort: %s!\n") % inst)
196 196 if inst.hint:
197 197 ui.error(_(b"(%s)\n") % inst.hint)
198 198 except error.ResponseError as inst:
199 199 ui.error(_(b"abort: %s") % inst.args[0])
200 200 msg = inst.args[1]
201 201 if isinstance(msg, type(u'')):
202 202 msg = pycompat.sysbytes(msg)
203 203 if not isinstance(msg, bytes):
204 204 ui.error(b" %r\n" % (msg,))
205 205 elif not msg:
206 206 ui.error(_(b" empty string\n"))
207 207 else:
208 208 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
209 209 except error.CensoredNodeError as inst:
210 210 ui.error(_(b"abort: file censored %s!\n") % inst)
211 211 except error.StorageError as inst:
212 212 ui.error(_(b"abort: %s!\n") % inst)
213 213 if inst.hint:
214 214 ui.error(_(b"(%s)\n") % inst.hint)
215 215 except error.InterventionRequired as inst:
216 216 ui.error(b"%s\n" % inst)
217 217 if inst.hint:
218 218 ui.error(_(b"(%s)\n") % inst.hint)
219 219 detailed_exit_code = 240
220 220 coarse_exit_code = 1
221 221 except error.WdirUnsupported:
222 222 ui.error(_(b"abort: working directory revision cannot be specified\n"))
223 223 except error.Abort as inst:
224 224 if isinstance(inst, error.InputError):
225 225 detailed_exit_code = 10
226 226 ui.error(_(b"abort: %s\n") % inst.message)
227 227 if inst.hint:
228 228 ui.error(_(b"(%s)\n") % inst.hint)
229 229 except error.WorkerError as inst:
230 230 # Don't print a message -- the worker already should have
231 231 return inst.status_code
232 232 except ImportError as inst:
233 233 ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
234 234 m = stringutil.forcebytestr(inst).split()[-1]
235 235 if m in b"mpatch bdiff".split():
236 236 ui.error(_(b"(did you forget to compile extensions?)\n"))
237 237 elif m in b"zlib".split():
238 238 ui.error(_(b"(is your Python install correct?)\n"))
239 except util.urlerr.httperror as inst:
240 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
241 except util.urlerr.urlerror as inst:
242 try: # usually it is in the form (errno, strerror)
243 reason = inst.reason.args[1]
244 except (AttributeError, IndexError):
245 # it might be anything, for example a string
246 reason = inst.reason
247 if isinstance(reason, pycompat.unicode):
248 # SSLError of Python 2.7.9 contains a unicode
249 reason = encoding.unitolocal(reason)
250 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
239 251 except (IOError, OSError) as inst:
240 if util.safehasattr(inst, b"code"): # HTTPError
241 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
242 elif util.safehasattr(inst, b"reason"): # URLError or SSLError
243 try: # usually it is in the form (errno, strerror)
244 reason = inst.reason.args[1]
245 except (AttributeError, IndexError):
246 # it might be anything, for example a string
247 reason = inst.reason
248 if isinstance(reason, pycompat.unicode):
249 # SSLError of Python 2.7.9 contains a unicode
250 reason = encoding.unitolocal(reason)
251 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
252 elif (
252 if (
253 253 util.safehasattr(inst, b"args")
254 254 and inst.args
255 255 and inst.args[0] == errno.EPIPE
256 256 ):
257 257 pass
258 258 elif getattr(inst, "strerror", None): # common IOError or OSError
259 259 if getattr(inst, "filename", None) is not None:
260 260 ui.error(
261 261 _(b"abort: %s: '%s'\n")
262 262 % (
263 263 encoding.strtolocal(inst.strerror),
264 264 stringutil.forcebytestr(inst.filename),
265 265 )
266 266 )
267 267 else:
268 268 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
269 269 else: # suspicious IOError
270 270 raise
271 271 except MemoryError:
272 272 ui.error(_(b"abort: out of memory\n"))
273 273 except SystemExit as inst:
274 274 # Commands shouldn't sys.exit directly, but give a return code.
275 275 # Just in case catch this and and pass exit code to caller.
276 276 detailed_exit_code = 254
277 277 coarse_exit_code = inst.code
278 278
279 279 if ui.configbool(b'ui', b'detailed-exit-code'):
280 280 return detailed_exit_code
281 281 else:
282 282 return coarse_exit_code
283 283
284 284
285 285 def checknewlabel(repo, lbl, kind):
286 286 # Do not use the "kind" parameter in ui output.
287 287 # It makes strings difficult to translate.
288 288 if lbl in [b'tip', b'.', b'null']:
289 289 raise error.Abort(_(b"the name '%s' is reserved") % lbl)
290 290 for c in (b':', b'\0', b'\n', b'\r'):
291 291 if c in lbl:
292 292 raise error.Abort(
293 293 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
294 294 )
295 295 try:
296 296 int(lbl)
297 297 raise error.Abort(_(b"cannot use an integer as a name"))
298 298 except ValueError:
299 299 pass
300 300 if lbl.strip() != lbl:
301 301 raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl)
302 302
303 303
304 304 def checkfilename(f):
305 305 '''Check that the filename f is an acceptable filename for a tracked file'''
306 306 if b'\r' in f or b'\n' in f:
307 307 raise error.Abort(
308 308 _(b"'\\n' and '\\r' disallowed in filenames: %r")
309 309 % pycompat.bytestr(f)
310 310 )
311 311
312 312
313 313 def checkportable(ui, f):
314 314 '''Check if filename f is portable and warn or abort depending on config'''
315 315 checkfilename(f)
316 316 abort, warn = checkportabilityalert(ui)
317 317 if abort or warn:
318 318 msg = util.checkwinfilename(f)
319 319 if msg:
320 320 msg = b"%s: %s" % (msg, procutil.shellquote(f))
321 321 if abort:
322 322 raise error.Abort(msg)
323 323 ui.warn(_(b"warning: %s\n") % msg)
324 324
325 325
326 326 def checkportabilityalert(ui):
327 327 '''check if the user's config requests nothing, a warning, or abort for
328 328 non-portable filenames'''
329 329 val = ui.config(b'ui', b'portablefilenames')
330 330 lval = val.lower()
331 331 bval = stringutil.parsebool(val)
332 332 abort = pycompat.iswindows or lval == b'abort'
333 333 warn = bval or lval == b'warn'
334 334 if bval is None and not (warn or abort or lval == b'ignore'):
335 335 raise error.ConfigError(
336 336 _(b"ui.portablefilenames value is invalid ('%s')") % val
337 337 )
338 338 return abort, warn
339 339
340 340
341 341 class casecollisionauditor(object):
342 342 def __init__(self, ui, abort, dirstate):
343 343 self._ui = ui
344 344 self._abort = abort
345 345 allfiles = b'\0'.join(dirstate)
346 346 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
347 347 self._dirstate = dirstate
348 348 # The purpose of _newfiles is so that we don't complain about
349 349 # case collisions if someone were to call this object with the
350 350 # same filename twice.
351 351 self._newfiles = set()
352 352
353 353 def __call__(self, f):
354 354 if f in self._newfiles:
355 355 return
356 356 fl = encoding.lower(f)
357 357 if fl in self._loweredfiles and f not in self._dirstate:
358 358 msg = _(b'possible case-folding collision for %s') % f
359 359 if self._abort:
360 360 raise error.Abort(msg)
361 361 self._ui.warn(_(b"warning: %s\n") % msg)
362 362 self._loweredfiles.add(fl)
363 363 self._newfiles.add(f)
364 364
365 365
366 366 def filteredhash(repo, maxrev):
367 367 """build hash of filtered revisions in the current repoview.
368 368
369 369 Multiple caches perform up-to-date validation by checking that the
370 370 tiprev and tipnode stored in the cache file match the current repository.
371 371 However, this is not sufficient for validating repoviews because the set
372 372 of revisions in the view may change without the repository tiprev and
373 373 tipnode changing.
374 374
375 375 This function hashes all the revs filtered from the view and returns
376 376 that SHA-1 digest.
377 377 """
378 378 cl = repo.changelog
379 379 if not cl.filteredrevs:
380 380 return None
381 381 key = cl._filteredrevs_hashcache.get(maxrev)
382 382 if not key:
383 383 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
384 384 if revs:
385 385 s = hashutil.sha1()
386 386 for rev in revs:
387 387 s.update(b'%d;' % rev)
388 388 key = s.digest()
389 389 cl._filteredrevs_hashcache[maxrev] = key
390 390 return key
391 391
392 392
393 393 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
394 394 '''yield every hg repository under path, always recursively.
395 395 The recurse flag will only control recursion into repo working dirs'''
396 396
397 397 def errhandler(err):
398 398 if err.filename == path:
399 399 raise err
400 400
401 401 samestat = getattr(os.path, 'samestat', None)
402 402 if followsym and samestat is not None:
403 403
404 404 def adddir(dirlst, dirname):
405 405 dirstat = os.stat(dirname)
406 406 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
407 407 if not match:
408 408 dirlst.append(dirstat)
409 409 return not match
410 410
411 411 else:
412 412 followsym = False
413 413
414 414 if (seen_dirs is None) and followsym:
415 415 seen_dirs = []
416 416 adddir(seen_dirs, path)
417 417 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
418 418 dirs.sort()
419 419 if b'.hg' in dirs:
420 420 yield root # found a repository
421 421 qroot = os.path.join(root, b'.hg', b'patches')
422 422 if os.path.isdir(os.path.join(qroot, b'.hg')):
423 423 yield qroot # we have a patch queue repo here
424 424 if recurse:
425 425 # avoid recursing inside the .hg directory
426 426 dirs.remove(b'.hg')
427 427 else:
428 428 dirs[:] = [] # don't descend further
429 429 elif followsym:
430 430 newdirs = []
431 431 for d in dirs:
432 432 fname = os.path.join(root, d)
433 433 if adddir(seen_dirs, fname):
434 434 if os.path.islink(fname):
435 435 for hgname in walkrepos(fname, True, seen_dirs):
436 436 yield hgname
437 437 else:
438 438 newdirs.append(d)
439 439 dirs[:] = newdirs
440 440
441 441
442 442 def binnode(ctx):
443 443 """Return binary node id for a given basectx"""
444 444 node = ctx.node()
445 445 if node is None:
446 446 return wdirid
447 447 return node
448 448
449 449
450 450 def intrev(ctx):
451 451 """Return integer for a given basectx that can be used in comparison or
452 452 arithmetic operation"""
453 453 rev = ctx.rev()
454 454 if rev is None:
455 455 return wdirrev
456 456 return rev
457 457
458 458
459 459 def formatchangeid(ctx):
460 460 """Format changectx as '{rev}:{node|formatnode}', which is the default
461 461 template provided by logcmdutil.changesettemplater"""
462 462 repo = ctx.repo()
463 463 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
464 464
465 465
466 466 def formatrevnode(ui, rev, node):
467 467 """Format given revision and node depending on the current verbosity"""
468 468 if ui.debugflag:
469 469 hexfunc = hex
470 470 else:
471 471 hexfunc = short
472 472 return b'%d:%s' % (rev, hexfunc(node))
473 473
474 474
475 475 def resolvehexnodeidprefix(repo, prefix):
476 476 if prefix.startswith(b'x'):
477 477 prefix = prefix[1:]
478 478 try:
479 479 # Uses unfiltered repo because it's faster when prefix is ambiguous/
480 480 # This matches the shortesthexnodeidprefix() function below.
481 481 node = repo.unfiltered().changelog._partialmatch(prefix)
482 482 except error.AmbiguousPrefixLookupError:
483 483 revset = repo.ui.config(
484 484 b'experimental', b'revisions.disambiguatewithin'
485 485 )
486 486 if revset:
487 487 # Clear config to avoid infinite recursion
488 488 configoverrides = {
489 489 (b'experimental', b'revisions.disambiguatewithin'): None
490 490 }
491 491 with repo.ui.configoverride(configoverrides):
492 492 revs = repo.anyrevs([revset], user=True)
493 493 matches = []
494 494 for rev in revs:
495 495 node = repo.changelog.node(rev)
496 496 if hex(node).startswith(prefix):
497 497 matches.append(node)
498 498 if len(matches) == 1:
499 499 return matches[0]
500 500 raise
501 501 if node is None:
502 502 return
503 503 repo.changelog.rev(node) # make sure node isn't filtered
504 504 return node
505 505
506 506
507 507 def mayberevnum(repo, prefix):
508 508 """Checks if the given prefix may be mistaken for a revision number"""
509 509 try:
510 510 i = int(prefix)
511 511 # if we are a pure int, then starting with zero will not be
512 512 # confused as a rev; or, obviously, if the int is larger
513 513 # than the value of the tip rev. We still need to disambiguate if
514 514 # prefix == '0', since that *is* a valid revnum.
515 515 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
516 516 return False
517 517 return True
518 518 except ValueError:
519 519 return False
520 520
521 521
522 522 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
523 523 """Find the shortest unambiguous prefix that matches hexnode.
524 524
525 525 If "cache" is not None, it must be a dictionary that can be used for
526 526 caching between calls to this method.
527 527 """
528 528 # _partialmatch() of filtered changelog could take O(len(repo)) time,
529 529 # which would be unacceptably slow. so we look for hash collision in
530 530 # unfiltered space, which means some hashes may be slightly longer.
531 531
532 532 minlength = max(minlength, 1)
533 533
534 534 def disambiguate(prefix):
535 535 """Disambiguate against revnums."""
536 536 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
537 537 if mayberevnum(repo, prefix):
538 538 return b'x' + prefix
539 539 else:
540 540 return prefix
541 541
542 542 hexnode = hex(node)
543 543 for length in range(len(prefix), len(hexnode) + 1):
544 544 prefix = hexnode[:length]
545 545 if not mayberevnum(repo, prefix):
546 546 return prefix
547 547
548 548 cl = repo.unfiltered().changelog
549 549 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
550 550 if revset:
551 551 revs = None
552 552 if cache is not None:
553 553 revs = cache.get(b'disambiguationrevset')
554 554 if revs is None:
555 555 revs = repo.anyrevs([revset], user=True)
556 556 if cache is not None:
557 557 cache[b'disambiguationrevset'] = revs
558 558 if cl.rev(node) in revs:
559 559 hexnode = hex(node)
560 560 nodetree = None
561 561 if cache is not None:
562 562 nodetree = cache.get(b'disambiguationnodetree')
563 563 if not nodetree:
564 564 if util.safehasattr(parsers, 'nodetree'):
565 565 # The CExt is the only implementation to provide a nodetree
566 566 # class so far.
567 567 index = cl.index
568 568 if util.safehasattr(index, 'get_cindex'):
569 569 # the rust wrapped need to give access to its internal index
570 570 index = index.get_cindex()
571 571 nodetree = parsers.nodetree(index, len(revs))
572 572 for r in revs:
573 573 nodetree.insert(r)
574 574 if cache is not None:
575 575 cache[b'disambiguationnodetree'] = nodetree
576 576 if nodetree is not None:
577 577 length = max(nodetree.shortest(node), minlength)
578 578 prefix = hexnode[:length]
579 579 return disambiguate(prefix)
580 580 for length in range(minlength, len(hexnode) + 1):
581 581 matches = []
582 582 prefix = hexnode[:length]
583 583 for rev in revs:
584 584 otherhexnode = repo[rev].hex()
585 585 if prefix == otherhexnode[:length]:
586 586 matches.append(otherhexnode)
587 587 if len(matches) == 1:
588 588 return disambiguate(prefix)
589 589
590 590 try:
591 591 return disambiguate(cl.shortest(node, minlength))
592 592 except error.LookupError:
593 593 raise error.RepoLookupError()
594 594
595 595
596 596 def isrevsymbol(repo, symbol):
597 597 """Checks if a symbol exists in the repo.
598 598
599 599 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
600 600 symbol is an ambiguous nodeid prefix.
601 601 """
602 602 try:
603 603 revsymbol(repo, symbol)
604 604 return True
605 605 except error.RepoLookupError:
606 606 return False
607 607
608 608
609 609 def revsymbol(repo, symbol):
610 610 """Returns a context given a single revision symbol (as string).
611 611
612 612 This is similar to revsingle(), but accepts only a single revision symbol,
613 613 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
614 614 not "max(public())".
615 615 """
616 616 if not isinstance(symbol, bytes):
617 617 msg = (
618 618 b"symbol (%s of type %s) was not a string, did you mean "
619 619 b"repo[symbol]?" % (symbol, type(symbol))
620 620 )
621 621 raise error.ProgrammingError(msg)
622 622 try:
623 623 if symbol in (b'.', b'tip', b'null'):
624 624 return repo[symbol]
625 625
626 626 try:
627 627 r = int(symbol)
628 628 if b'%d' % r != symbol:
629 629 raise ValueError
630 630 l = len(repo.changelog)
631 631 if r < 0:
632 632 r += l
633 633 if r < 0 or r >= l and r != wdirrev:
634 634 raise ValueError
635 635 return repo[r]
636 636 except error.FilteredIndexError:
637 637 raise
638 638 except (ValueError, OverflowError, IndexError):
639 639 pass
640 640
641 641 if len(symbol) == 40:
642 642 try:
643 643 node = bin(symbol)
644 644 rev = repo.changelog.rev(node)
645 645 return repo[rev]
646 646 except error.FilteredLookupError:
647 647 raise
648 648 except (TypeError, LookupError):
649 649 pass
650 650
651 651 # look up bookmarks through the name interface
652 652 try:
653 653 node = repo.names.singlenode(repo, symbol)
654 654 rev = repo.changelog.rev(node)
655 655 return repo[rev]
656 656 except KeyError:
657 657 pass
658 658
659 659 node = resolvehexnodeidprefix(repo, symbol)
660 660 if node is not None:
661 661 rev = repo.changelog.rev(node)
662 662 return repo[rev]
663 663
664 664 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
665 665
666 666 except error.WdirUnsupported:
667 667 return repo[None]
668 668 except (
669 669 error.FilteredIndexError,
670 670 error.FilteredLookupError,
671 671 error.FilteredRepoLookupError,
672 672 ):
673 673 raise _filterederror(repo, symbol)
674 674
675 675
676 676 def _filterederror(repo, changeid):
677 677 """build an exception to be raised about a filtered changeid
678 678
679 679 This is extracted in a function to help extensions (eg: evolve) to
680 680 experiment with various message variants."""
681 681 if repo.filtername.startswith(b'visible'):
682 682
683 683 # Check if the changeset is obsolete
684 684 unfilteredrepo = repo.unfiltered()
685 685 ctx = revsymbol(unfilteredrepo, changeid)
686 686
687 687 # If the changeset is obsolete, enrich the message with the reason
688 688 # that made this changeset not visible
689 689 if ctx.obsolete():
690 690 msg = obsutil._getfilteredreason(repo, changeid, ctx)
691 691 else:
692 692 msg = _(b"hidden revision '%s'") % changeid
693 693
694 694 hint = _(b'use --hidden to access hidden revisions')
695 695
696 696 return error.FilteredRepoLookupError(msg, hint=hint)
697 697 msg = _(b"filtered revision '%s' (not in '%s' subset)")
698 698 msg %= (changeid, repo.filtername)
699 699 return error.FilteredRepoLookupError(msg)
700 700
701 701
702 702 def revsingle(repo, revspec, default=b'.', localalias=None):
703 703 if not revspec and revspec != 0:
704 704 return repo[default]
705 705
706 706 l = revrange(repo, [revspec], localalias=localalias)
707 707 if not l:
708 708 raise error.Abort(_(b'empty revision set'))
709 709 return repo[l.last()]
710 710
711 711
712 712 def _pairspec(revspec):
713 713 tree = revsetlang.parse(revspec)
714 714 return tree and tree[0] in (
715 715 b'range',
716 716 b'rangepre',
717 717 b'rangepost',
718 718 b'rangeall',
719 719 )
720 720
721 721
722 722 def revpair(repo, revs):
723 723 if not revs:
724 724 return repo[b'.'], repo[None]
725 725
726 726 l = revrange(repo, revs)
727 727
728 728 if not l:
729 729 raise error.Abort(_(b'empty revision range'))
730 730
731 731 first = l.first()
732 732 second = l.last()
733 733
734 734 if (
735 735 first == second
736 736 and len(revs) >= 2
737 737 and not all(revrange(repo, [r]) for r in revs)
738 738 ):
739 739 raise error.Abort(_(b'empty revision on one side of range'))
740 740
741 741 # if top-level is range expression, the result must always be a pair
742 742 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
743 743 return repo[first], repo[None]
744 744
745 745 return repo[first], repo[second]
746 746
747 747
748 748 def revrange(repo, specs, localalias=None):
749 749 """Execute 1 to many revsets and return the union.
750 750
751 751 This is the preferred mechanism for executing revsets using user-specified
752 752 config options, such as revset aliases.
753 753
754 754 The revsets specified by ``specs`` will be executed via a chained ``OR``
755 755 expression. If ``specs`` is empty, an empty result is returned.
756 756
757 757 ``specs`` can contain integers, in which case they are assumed to be
758 758 revision numbers.
759 759
760 760 It is assumed the revsets are already formatted. If you have arguments
761 761 that need to be expanded in the revset, call ``revsetlang.formatspec()``
762 762 and pass the result as an element of ``specs``.
763 763
764 764 Specifying a single revset is allowed.
765 765
766 766 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
767 767 integer revisions.
768 768 """
769 769 allspecs = []
770 770 for spec in specs:
771 771 if isinstance(spec, int):
772 772 spec = revsetlang.formatspec(b'%d', spec)
773 773 allspecs.append(spec)
774 774 return repo.anyrevs(allspecs, user=True, localalias=localalias)
775 775
776 776
777 777 def increasingwindows(windowsize=8, sizelimit=512):
778 778 while True:
779 779 yield windowsize
780 780 if windowsize < sizelimit:
781 781 windowsize *= 2
782 782
783 783
784 784 def walkchangerevs(repo, revs, makefilematcher, prepare):
785 785 '''Iterate over files and the revs in a "windowed" way.
786 786
787 787 Callers most commonly need to iterate backwards over the history
788 788 in which they are interested. Doing so has awful (quadratic-looking)
789 789 performance, so we use iterators in a "windowed" way.
790 790
791 791 We walk a window of revisions in the desired order. Within the
792 792 window, we first walk forwards to gather data, then in the desired
793 793 order (usually backwards) to display it.
794 794
795 795 This function returns an iterator yielding contexts. Before
796 796 yielding each context, the iterator will first call the prepare
797 797 function on each context in the window in forward order.'''
798 798
799 799 if not revs:
800 800 return []
801 801 change = repo.__getitem__
802 802
803 803 def iterate():
804 804 it = iter(revs)
805 805 stopiteration = False
806 806 for windowsize in increasingwindows():
807 807 nrevs = []
808 808 for i in pycompat.xrange(windowsize):
809 809 rev = next(it, None)
810 810 if rev is None:
811 811 stopiteration = True
812 812 break
813 813 nrevs.append(rev)
814 814 for rev in sorted(nrevs):
815 815 ctx = change(rev)
816 816 prepare(ctx, makefilematcher(ctx))
817 817 for rev in nrevs:
818 818 yield change(rev)
819 819
820 820 if stopiteration:
821 821 break
822 822
823 823 return iterate()
824 824
825 825
826 826 def meaningfulparents(repo, ctx):
827 827 """Return list of meaningful (or all if debug) parentrevs for rev.
828 828
829 829 For merges (two non-nullrev revisions) both parents are meaningful.
830 830 Otherwise the first parent revision is considered meaningful if it
831 831 is not the preceding revision.
832 832 """
833 833 parents = ctx.parents()
834 834 if len(parents) > 1:
835 835 return parents
836 836 if repo.ui.debugflag:
837 837 return [parents[0], repo[nullrev]]
838 838 if parents[0].rev() >= intrev(ctx) - 1:
839 839 return []
840 840 return parents
841 841
842 842
843 843 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
844 844 """Return a function that produced paths for presenting to the user.
845 845
846 846 The returned function takes a repo-relative path and produces a path
847 847 that can be presented in the UI.
848 848
849 849 Depending on the value of ui.relative-paths, either a repo-relative or
850 850 cwd-relative path will be produced.
851 851
852 852 legacyrelativevalue is the value to use if ui.relative-paths=legacy
853 853
854 854 If forcerelativevalue is not None, then that value will be used regardless
855 855 of what ui.relative-paths is set to.
856 856 """
857 857 if forcerelativevalue is not None:
858 858 relative = forcerelativevalue
859 859 else:
860 860 config = repo.ui.config(b'ui', b'relative-paths')
861 861 if config == b'legacy':
862 862 relative = legacyrelativevalue
863 863 else:
864 864 relative = stringutil.parsebool(config)
865 865 if relative is None:
866 866 raise error.ConfigError(
867 867 _(b"ui.relative-paths is not a boolean ('%s')") % config
868 868 )
869 869
870 870 if relative:
871 871 cwd = repo.getcwd()
872 872 if cwd != b'':
873 873 # this branch would work even if cwd == b'' (ie cwd = repo
874 874 # root), but its generality makes the returned function slower
875 875 pathto = repo.pathto
876 876 return lambda f: pathto(f, cwd)
877 877 if repo.ui.configbool(b'ui', b'slash'):
878 878 return lambda f: f
879 879 else:
880 880 return util.localpath
881 881
882 882
883 883 def subdiruipathfn(subpath, uipathfn):
884 884 '''Create a new uipathfn that treats the file as relative to subpath.'''
885 885 return lambda f: uipathfn(posixpath.join(subpath, f))
886 886
887 887
888 888 def anypats(pats, opts):
889 889 '''Checks if any patterns, including --include and --exclude were given.
890 890
891 891 Some commands (e.g. addremove) use this condition for deciding whether to
892 892 print absolute or relative paths.
893 893 '''
894 894 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
895 895
896 896
897 897 def expandpats(pats):
898 898 '''Expand bare globs when running on windows.
899 899 On posix we assume it already has already been done by sh.'''
900 900 if not util.expandglobs:
901 901 return list(pats)
902 902 ret = []
903 903 for kindpat in pats:
904 904 kind, pat = matchmod._patsplit(kindpat, None)
905 905 if kind is None:
906 906 try:
907 907 globbed = glob.glob(pat)
908 908 except re.error:
909 909 globbed = [pat]
910 910 if globbed:
911 911 ret.extend(globbed)
912 912 continue
913 913 ret.append(kindpat)
914 914 return ret
915 915
916 916
917 917 def matchandpats(
918 918 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
919 919 ):
920 920 '''Return a matcher and the patterns that were used.
921 921 The matcher will warn about bad matches, unless an alternate badfn callback
922 922 is provided.'''
923 923 if opts is None:
924 924 opts = {}
925 925 if not globbed and default == b'relpath':
926 926 pats = expandpats(pats or [])
927 927
928 928 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
929 929
930 930 def bad(f, msg):
931 931 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
932 932
933 933 if badfn is None:
934 934 badfn = bad
935 935
936 936 m = ctx.match(
937 937 pats,
938 938 opts.get(b'include'),
939 939 opts.get(b'exclude'),
940 940 default,
941 941 listsubrepos=opts.get(b'subrepos'),
942 942 badfn=badfn,
943 943 )
944 944
945 945 if m.always():
946 946 pats = []
947 947 return m, pats
948 948
949 949
950 950 def match(
951 951 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
952 952 ):
953 953 '''Return a matcher that will warn about bad matches.'''
954 954 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
955 955
956 956
957 957 def matchall(repo):
958 958 '''Return a matcher that will efficiently match everything.'''
959 959 return matchmod.always()
960 960
961 961
962 962 def matchfiles(repo, files, badfn=None):
963 963 '''Return a matcher that will efficiently match exactly these files.'''
964 964 return matchmod.exact(files, badfn=badfn)
965 965
966 966
967 967 def parsefollowlinespattern(repo, rev, pat, msg):
968 968 """Return a file name from `pat` pattern suitable for usage in followlines
969 969 logic.
970 970 """
971 971 if not matchmod.patkind(pat):
972 972 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
973 973 else:
974 974 ctx = repo[rev]
975 975 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
976 976 files = [f for f in ctx if m(f)]
977 977 if len(files) != 1:
978 978 raise error.ParseError(msg)
979 979 return files[0]
980 980
981 981
982 982 def getorigvfs(ui, repo):
983 983 """return a vfs suitable to save 'orig' file
984 984
985 985 return None if no special directory is configured"""
986 986 origbackuppath = ui.config(b'ui', b'origbackuppath')
987 987 if not origbackuppath:
988 988 return None
989 989 return vfs.vfs(repo.wvfs.join(origbackuppath))
990 990
991 991
992 992 def backuppath(ui, repo, filepath):
993 993 '''customize where working copy backup files (.orig files) are created
994 994
995 995 Fetch user defined path from config file: [ui] origbackuppath = <path>
996 996 Fall back to default (filepath with .orig suffix) if not specified
997 997
998 998 filepath is repo-relative
999 999
1000 1000 Returns an absolute path
1001 1001 '''
1002 1002 origvfs = getorigvfs(ui, repo)
1003 1003 if origvfs is None:
1004 1004 return repo.wjoin(filepath + b".orig")
1005 1005
1006 1006 origbackupdir = origvfs.dirname(filepath)
1007 1007 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
1008 1008 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
1009 1009
1010 1010 # Remove any files that conflict with the backup file's path
1011 1011 for f in reversed(list(pathutil.finddirs(filepath))):
1012 1012 if origvfs.isfileorlink(f):
1013 1013 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
1014 1014 origvfs.unlink(f)
1015 1015 break
1016 1016
1017 1017 origvfs.makedirs(origbackupdir)
1018 1018
1019 1019 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
1020 1020 ui.note(
1021 1021 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
1022 1022 )
1023 1023 origvfs.rmtree(filepath, forcibly=True)
1024 1024
1025 1025 return origvfs.join(filepath)
1026 1026
1027 1027
1028 1028 class _containsnode(object):
1029 1029 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1030 1030
1031 1031 def __init__(self, repo, revcontainer):
1032 1032 self._torev = repo.changelog.rev
1033 1033 self._revcontains = revcontainer.__contains__
1034 1034
1035 1035 def __contains__(self, node):
1036 1036 return self._revcontains(self._torev(node))
1037 1037
1038 1038
1039 1039 def cleanupnodes(
1040 1040 repo,
1041 1041 replacements,
1042 1042 operation,
1043 1043 moves=None,
1044 1044 metadata=None,
1045 1045 fixphase=False,
1046 1046 targetphase=None,
1047 1047 backup=True,
1048 1048 ):
1049 1049 """do common cleanups when old nodes are replaced by new nodes
1050 1050
1051 1051 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1052 1052 (we might also want to move working directory parent in the future)
1053 1053
1054 1054 By default, bookmark moves are calculated automatically from 'replacements',
1055 1055 but 'moves' can be used to override that. Also, 'moves' may include
1056 1056 additional bookmark moves that should not have associated obsmarkers.
1057 1057
1058 1058 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1059 1059 have replacements. operation is a string, like "rebase".
1060 1060
1061 1061 metadata is dictionary containing metadata to be stored in obsmarker if
1062 1062 obsolescence is enabled.
1063 1063 """
1064 1064 assert fixphase or targetphase is None
1065 1065 if not replacements and not moves:
1066 1066 return
1067 1067
1068 1068 # translate mapping's other forms
1069 1069 if not util.safehasattr(replacements, b'items'):
1070 1070 replacements = {(n,): () for n in replacements}
1071 1071 else:
1072 1072 # upgrading non tuple "source" to tuple ones for BC
1073 1073 repls = {}
1074 1074 for key, value in replacements.items():
1075 1075 if not isinstance(key, tuple):
1076 1076 key = (key,)
1077 1077 repls[key] = value
1078 1078 replacements = repls
1079 1079
1080 1080 # Unfiltered repo is needed since nodes in replacements might be hidden.
1081 1081 unfi = repo.unfiltered()
1082 1082
1083 1083 # Calculate bookmark movements
1084 1084 if moves is None:
1085 1085 moves = {}
1086 1086 for oldnodes, newnodes in replacements.items():
1087 1087 for oldnode in oldnodes:
1088 1088 if oldnode in moves:
1089 1089 continue
1090 1090 if len(newnodes) > 1:
1091 1091 # usually a split, take the one with biggest rev number
1092 1092 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1093 1093 elif len(newnodes) == 0:
1094 1094 # move bookmark backwards
1095 1095 allreplaced = []
1096 1096 for rep in replacements:
1097 1097 allreplaced.extend(rep)
1098 1098 roots = list(
1099 1099 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1100 1100 )
1101 1101 if roots:
1102 1102 newnode = roots[0].node()
1103 1103 else:
1104 1104 newnode = nullid
1105 1105 else:
1106 1106 newnode = newnodes[0]
1107 1107 moves[oldnode] = newnode
1108 1108
1109 1109 allnewnodes = [n for ns in replacements.values() for n in ns]
1110 1110 toretract = {}
1111 1111 toadvance = {}
1112 1112 if fixphase:
1113 1113 precursors = {}
1114 1114 for oldnodes, newnodes in replacements.items():
1115 1115 for oldnode in oldnodes:
1116 1116 for newnode in newnodes:
1117 1117 precursors.setdefault(newnode, []).append(oldnode)
1118 1118
1119 1119 allnewnodes.sort(key=lambda n: unfi[n].rev())
1120 1120 newphases = {}
1121 1121
1122 1122 def phase(ctx):
1123 1123 return newphases.get(ctx.node(), ctx.phase())
1124 1124
1125 1125 for newnode in allnewnodes:
1126 1126 ctx = unfi[newnode]
1127 1127 parentphase = max(phase(p) for p in ctx.parents())
1128 1128 if targetphase is None:
1129 1129 oldphase = max(
1130 1130 unfi[oldnode].phase() for oldnode in precursors[newnode]
1131 1131 )
1132 1132 newphase = max(oldphase, parentphase)
1133 1133 else:
1134 1134 newphase = max(targetphase, parentphase)
1135 1135 newphases[newnode] = newphase
1136 1136 if newphase > ctx.phase():
1137 1137 toretract.setdefault(newphase, []).append(newnode)
1138 1138 elif newphase < ctx.phase():
1139 1139 toadvance.setdefault(newphase, []).append(newnode)
1140 1140
1141 1141 with repo.transaction(b'cleanup') as tr:
1142 1142 # Move bookmarks
1143 1143 bmarks = repo._bookmarks
1144 1144 bmarkchanges = []
1145 1145 for oldnode, newnode in moves.items():
1146 1146 oldbmarks = repo.nodebookmarks(oldnode)
1147 1147 if not oldbmarks:
1148 1148 continue
1149 1149 from . import bookmarks # avoid import cycle
1150 1150
1151 1151 repo.ui.debug(
1152 1152 b'moving bookmarks %r from %s to %s\n'
1153 1153 % (
1154 1154 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1155 1155 hex(oldnode),
1156 1156 hex(newnode),
1157 1157 )
1158 1158 )
1159 1159 # Delete divergent bookmarks being parents of related newnodes
1160 1160 deleterevs = repo.revs(
1161 1161 b'parents(roots(%ln & (::%n))) - parents(%n)',
1162 1162 allnewnodes,
1163 1163 newnode,
1164 1164 oldnode,
1165 1165 )
1166 1166 deletenodes = _containsnode(repo, deleterevs)
1167 1167 for name in oldbmarks:
1168 1168 bmarkchanges.append((name, newnode))
1169 1169 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1170 1170 bmarkchanges.append((b, None))
1171 1171
1172 1172 if bmarkchanges:
1173 1173 bmarks.applychanges(repo, tr, bmarkchanges)
1174 1174
1175 1175 for phase, nodes in toretract.items():
1176 1176 phases.retractboundary(repo, tr, phase, nodes)
1177 1177 for phase, nodes in toadvance.items():
1178 1178 phases.advanceboundary(repo, tr, phase, nodes)
1179 1179
1180 1180 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1181 1181 # Obsolete or strip nodes
1182 1182 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1183 1183 # If a node is already obsoleted, and we want to obsolete it
1184 1184 # without a successor, skip that obssolete request since it's
1185 1185 # unnecessary. That's the "if s or not isobs(n)" check below.
1186 1186 # Also sort the node in topology order, that might be useful for
1187 1187 # some obsstore logic.
1188 1188 # NOTE: the sorting might belong to createmarkers.
1189 1189 torev = unfi.changelog.rev
1190 1190 sortfunc = lambda ns: torev(ns[0][0])
1191 1191 rels = []
1192 1192 for ns, s in sorted(replacements.items(), key=sortfunc):
1193 1193 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1194 1194 rels.append(rel)
1195 1195 if rels:
1196 1196 obsolete.createmarkers(
1197 1197 repo, rels, operation=operation, metadata=metadata
1198 1198 )
1199 1199 elif phases.supportinternal(repo) and mayusearchived:
1200 1200 # this assume we do not have "unstable" nodes above the cleaned ones
1201 1201 allreplaced = set()
1202 1202 for ns in replacements.keys():
1203 1203 allreplaced.update(ns)
1204 1204 if backup:
1205 1205 from . import repair # avoid import cycle
1206 1206
1207 1207 node = min(allreplaced, key=repo.changelog.rev)
1208 1208 repair.backupbundle(
1209 1209 repo, allreplaced, allreplaced, node, operation
1210 1210 )
1211 1211 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1212 1212 else:
1213 1213 from . import repair # avoid import cycle
1214 1214
1215 1215 tostrip = list(n for ns in replacements for n in ns)
1216 1216 if tostrip:
1217 1217 repair.delayedstrip(
1218 1218 repo.ui, repo, tostrip, operation, backup=backup
1219 1219 )
1220 1220
1221 1221
1222 1222 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1223 1223 if opts is None:
1224 1224 opts = {}
1225 1225 m = matcher
1226 1226 dry_run = opts.get(b'dry_run')
1227 1227 try:
1228 1228 similarity = float(opts.get(b'similarity') or 0)
1229 1229 except ValueError:
1230 1230 raise error.Abort(_(b'similarity must be a number'))
1231 1231 if similarity < 0 or similarity > 100:
1232 1232 raise error.Abort(_(b'similarity must be between 0 and 100'))
1233 1233 similarity /= 100.0
1234 1234
1235 1235 ret = 0
1236 1236
1237 1237 wctx = repo[None]
1238 1238 for subpath in sorted(wctx.substate):
1239 1239 submatch = matchmod.subdirmatcher(subpath, m)
1240 1240 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1241 1241 sub = wctx.sub(subpath)
1242 1242 subprefix = repo.wvfs.reljoin(prefix, subpath)
1243 1243 subuipathfn = subdiruipathfn(subpath, uipathfn)
1244 1244 try:
1245 1245 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1246 1246 ret = 1
1247 1247 except error.LookupError:
1248 1248 repo.ui.status(
1249 1249 _(b"skipping missing subrepository: %s\n")
1250 1250 % uipathfn(subpath)
1251 1251 )
1252 1252
1253 1253 rejected = []
1254 1254
1255 1255 def badfn(f, msg):
1256 1256 if f in m.files():
1257 1257 m.bad(f, msg)
1258 1258 rejected.append(f)
1259 1259
1260 1260 badmatch = matchmod.badmatch(m, badfn)
1261 1261 added, unknown, deleted, removed, forgotten = _interestingfiles(
1262 1262 repo, badmatch
1263 1263 )
1264 1264
1265 1265 unknownset = set(unknown + forgotten)
1266 1266 toprint = unknownset.copy()
1267 1267 toprint.update(deleted)
1268 1268 for abs in sorted(toprint):
1269 1269 if repo.ui.verbose or not m.exact(abs):
1270 1270 if abs in unknownset:
1271 1271 status = _(b'adding %s\n') % uipathfn(abs)
1272 1272 label = b'ui.addremove.added'
1273 1273 else:
1274 1274 status = _(b'removing %s\n') % uipathfn(abs)
1275 1275 label = b'ui.addremove.removed'
1276 1276 repo.ui.status(status, label=label)
1277 1277
1278 1278 renames = _findrenames(
1279 1279 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1280 1280 )
1281 1281
1282 1282 if not dry_run:
1283 1283 _markchanges(repo, unknown + forgotten, deleted, renames)
1284 1284
1285 1285 for f in rejected:
1286 1286 if f in m.files():
1287 1287 return 1
1288 1288 return ret
1289 1289
1290 1290
1291 1291 def marktouched(repo, files, similarity=0.0):
1292 1292 '''Assert that files have somehow been operated upon. files are relative to
1293 1293 the repo root.'''
1294 1294 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1295 1295 rejected = []
1296 1296
1297 1297 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1298 1298
1299 1299 if repo.ui.verbose:
1300 1300 unknownset = set(unknown + forgotten)
1301 1301 toprint = unknownset.copy()
1302 1302 toprint.update(deleted)
1303 1303 for abs in sorted(toprint):
1304 1304 if abs in unknownset:
1305 1305 status = _(b'adding %s\n') % abs
1306 1306 else:
1307 1307 status = _(b'removing %s\n') % abs
1308 1308 repo.ui.status(status)
1309 1309
1310 1310 # TODO: We should probably have the caller pass in uipathfn and apply it to
1311 1311 # the messages above too. legacyrelativevalue=True is consistent with how
1312 1312 # it used to work.
1313 1313 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1314 1314 renames = _findrenames(
1315 1315 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1316 1316 )
1317 1317
1318 1318 _markchanges(repo, unknown + forgotten, deleted, renames)
1319 1319
1320 1320 for f in rejected:
1321 1321 if f in m.files():
1322 1322 return 1
1323 1323 return 0
1324 1324
1325 1325
1326 1326 def _interestingfiles(repo, matcher):
1327 1327 '''Walk dirstate with matcher, looking for files that addremove would care
1328 1328 about.
1329 1329
1330 1330 This is different from dirstate.status because it doesn't care about
1331 1331 whether files are modified or clean.'''
1332 1332 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1333 1333 audit_path = pathutil.pathauditor(repo.root, cached=True)
1334 1334
1335 1335 ctx = repo[None]
1336 1336 dirstate = repo.dirstate
1337 1337 matcher = repo.narrowmatch(matcher, includeexact=True)
1338 1338 walkresults = dirstate.walk(
1339 1339 matcher,
1340 1340 subrepos=sorted(ctx.substate),
1341 1341 unknown=True,
1342 1342 ignored=False,
1343 1343 full=False,
1344 1344 )
1345 1345 for abs, st in pycompat.iteritems(walkresults):
1346 1346 dstate = dirstate[abs]
1347 1347 if dstate == b'?' and audit_path.check(abs):
1348 1348 unknown.append(abs)
1349 1349 elif dstate != b'r' and not st:
1350 1350 deleted.append(abs)
1351 1351 elif dstate == b'r' and st:
1352 1352 forgotten.append(abs)
1353 1353 # for finding renames
1354 1354 elif dstate == b'r' and not st:
1355 1355 removed.append(abs)
1356 1356 elif dstate == b'a':
1357 1357 added.append(abs)
1358 1358
1359 1359 return added, unknown, deleted, removed, forgotten
1360 1360
1361 1361
1362 1362 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1363 1363 '''Find renames from removed files to added ones.'''
1364 1364 renames = {}
1365 1365 if similarity > 0:
1366 1366 for old, new, score in similar.findrenames(
1367 1367 repo, added, removed, similarity
1368 1368 ):
1369 1369 if (
1370 1370 repo.ui.verbose
1371 1371 or not matcher.exact(old)
1372 1372 or not matcher.exact(new)
1373 1373 ):
1374 1374 repo.ui.status(
1375 1375 _(
1376 1376 b'recording removal of %s as rename to %s '
1377 1377 b'(%d%% similar)\n'
1378 1378 )
1379 1379 % (uipathfn(old), uipathfn(new), score * 100)
1380 1380 )
1381 1381 renames[new] = old
1382 1382 return renames
1383 1383
1384 1384
1385 1385 def _markchanges(repo, unknown, deleted, renames):
1386 1386 '''Marks the files in unknown as added, the files in deleted as removed,
1387 1387 and the files in renames as copied.'''
1388 1388 wctx = repo[None]
1389 1389 with repo.wlock():
1390 1390 wctx.forget(deleted)
1391 1391 wctx.add(unknown)
1392 1392 for new, old in pycompat.iteritems(renames):
1393 1393 wctx.copy(old, new)
1394 1394
1395 1395
1396 1396 def getrenamedfn(repo, endrev=None):
1397 1397 if copiesmod.usechangesetcentricalgo(repo):
1398 1398
1399 1399 def getrenamed(fn, rev):
1400 1400 ctx = repo[rev]
1401 1401 p1copies = ctx.p1copies()
1402 1402 if fn in p1copies:
1403 1403 return p1copies[fn]
1404 1404 p2copies = ctx.p2copies()
1405 1405 if fn in p2copies:
1406 1406 return p2copies[fn]
1407 1407 return None
1408 1408
1409 1409 return getrenamed
1410 1410
1411 1411 rcache = {}
1412 1412 if endrev is None:
1413 1413 endrev = len(repo)
1414 1414
1415 1415 def getrenamed(fn, rev):
1416 1416 '''looks up all renames for a file (up to endrev) the first
1417 1417 time the file is given. It indexes on the changerev and only
1418 1418 parses the manifest if linkrev != changerev.
1419 1419 Returns rename info for fn at changerev rev.'''
1420 1420 if fn not in rcache:
1421 1421 rcache[fn] = {}
1422 1422 fl = repo.file(fn)
1423 1423 for i in fl:
1424 1424 lr = fl.linkrev(i)
1425 1425 renamed = fl.renamed(fl.node(i))
1426 1426 rcache[fn][lr] = renamed and renamed[0]
1427 1427 if lr >= endrev:
1428 1428 break
1429 1429 if rev in rcache[fn]:
1430 1430 return rcache[fn][rev]
1431 1431
1432 1432 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1433 1433 # filectx logic.
1434 1434 try:
1435 1435 return repo[rev][fn].copysource()
1436 1436 except error.LookupError:
1437 1437 return None
1438 1438
1439 1439 return getrenamed
1440 1440
1441 1441
1442 1442 def getcopiesfn(repo, endrev=None):
1443 1443 if copiesmod.usechangesetcentricalgo(repo):
1444 1444
1445 1445 def copiesfn(ctx):
1446 1446 if ctx.p2copies():
1447 1447 allcopies = ctx.p1copies().copy()
1448 1448 # There should be no overlap
1449 1449 allcopies.update(ctx.p2copies())
1450 1450 return sorted(allcopies.items())
1451 1451 else:
1452 1452 return sorted(ctx.p1copies().items())
1453 1453
1454 1454 else:
1455 1455 getrenamed = getrenamedfn(repo, endrev)
1456 1456
1457 1457 def copiesfn(ctx):
1458 1458 copies = []
1459 1459 for fn in ctx.files():
1460 1460 rename = getrenamed(fn, ctx.rev())
1461 1461 if rename:
1462 1462 copies.append((fn, rename))
1463 1463 return copies
1464 1464
1465 1465 return copiesfn
1466 1466
1467 1467
1468 1468 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1469 1469 """Update the dirstate to reflect the intent of copying src to dst. For
1470 1470 different reasons it might not end with dst being marked as copied from src.
1471 1471 """
1472 1472 origsrc = repo.dirstate.copied(src) or src
1473 1473 if dst == origsrc: # copying back a copy?
1474 1474 if repo.dirstate[dst] not in b'mn' and not dryrun:
1475 1475 repo.dirstate.normallookup(dst)
1476 1476 else:
1477 1477 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1478 1478 if not ui.quiet:
1479 1479 ui.warn(
1480 1480 _(
1481 1481 b"%s has not been committed yet, so no copy "
1482 1482 b"data will be stored for %s.\n"
1483 1483 )
1484 1484 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1485 1485 )
1486 1486 if repo.dirstate[dst] in b'?r' and not dryrun:
1487 1487 wctx.add([dst])
1488 1488 elif not dryrun:
1489 1489 wctx.copy(origsrc, dst)
1490 1490
1491 1491
1492 1492 def movedirstate(repo, newctx, match=None):
1493 1493 """Move the dirstate to newctx and adjust it as necessary.
1494 1494
1495 1495 A matcher can be provided as an optimization. It is probably a bug to pass
1496 1496 a matcher that doesn't match all the differences between the parent of the
1497 1497 working copy and newctx.
1498 1498 """
1499 1499 oldctx = repo[b'.']
1500 1500 ds = repo.dirstate
1501 1501 copies = dict(ds.copies())
1502 1502 ds.setparents(newctx.node(), nullid)
1503 1503 s = newctx.status(oldctx, match=match)
1504 1504 for f in s.modified:
1505 1505 if ds[f] == b'r':
1506 1506 # modified + removed -> removed
1507 1507 continue
1508 1508 ds.normallookup(f)
1509 1509
1510 1510 for f in s.added:
1511 1511 if ds[f] == b'r':
1512 1512 # added + removed -> unknown
1513 1513 ds.drop(f)
1514 1514 elif ds[f] != b'a':
1515 1515 ds.add(f)
1516 1516
1517 1517 for f in s.removed:
1518 1518 if ds[f] == b'a':
1519 1519 # removed + added -> normal
1520 1520 ds.normallookup(f)
1521 1521 elif ds[f] != b'r':
1522 1522 ds.remove(f)
1523 1523
1524 1524 # Merge old parent and old working dir copies
1525 1525 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1526 1526 oldcopies.update(copies)
1527 1527 copies = {
1528 1528 dst: oldcopies.get(src, src)
1529 1529 for dst, src in pycompat.iteritems(oldcopies)
1530 1530 }
1531 1531 # Adjust the dirstate copies
1532 1532 for dst, src in pycompat.iteritems(copies):
1533 1533 if src not in newctx or dst in newctx or ds[dst] != b'a':
1534 1534 src = None
1535 1535 ds.copy(src, dst)
1536 1536 repo._quick_access_changeid_invalidate()
1537 1537
1538 1538
1539 1539 def filterrequirements(requirements):
1540 1540 """ filters the requirements into two sets:
1541 1541
1542 1542 wcreq: requirements which should be written in .hg/requires
1543 1543 storereq: which should be written in .hg/store/requires
1544 1544
1545 1545 Returns (wcreq, storereq)
1546 1546 """
1547 1547 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
1548 1548 wc, store = set(), set()
1549 1549 for r in requirements:
1550 1550 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
1551 1551 wc.add(r)
1552 1552 else:
1553 1553 store.add(r)
1554 1554 return wc, store
1555 1555 return requirements, None
1556 1556
1557 1557
1558 1558 def istreemanifest(repo):
1559 1559 """ returns whether the repository is using treemanifest or not """
1560 1560 return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements
1561 1561
1562 1562
1563 1563 def writereporequirements(repo, requirements=None):
1564 1564 """ writes requirements for the repo to .hg/requires """
1565 1565 if requirements:
1566 1566 repo.requirements = requirements
1567 1567 wcreq, storereq = filterrequirements(repo.requirements)
1568 1568 if wcreq is not None:
1569 1569 writerequires(repo.vfs, wcreq)
1570 1570 if storereq is not None:
1571 1571 writerequires(repo.svfs, storereq)
1572 1572
1573 1573
1574 1574 def writerequires(opener, requirements):
1575 1575 with opener(b'requires', b'w', atomictemp=True) as fp:
1576 1576 for r in sorted(requirements):
1577 1577 fp.write(b"%s\n" % r)
1578 1578
1579 1579
1580 1580 class filecachesubentry(object):
1581 1581 def __init__(self, path, stat):
1582 1582 self.path = path
1583 1583 self.cachestat = None
1584 1584 self._cacheable = None
1585 1585
1586 1586 if stat:
1587 1587 self.cachestat = filecachesubentry.stat(self.path)
1588 1588
1589 1589 if self.cachestat:
1590 1590 self._cacheable = self.cachestat.cacheable()
1591 1591 else:
1592 1592 # None means we don't know yet
1593 1593 self._cacheable = None
1594 1594
1595 1595 def refresh(self):
1596 1596 if self.cacheable():
1597 1597 self.cachestat = filecachesubentry.stat(self.path)
1598 1598
1599 1599 def cacheable(self):
1600 1600 if self._cacheable is not None:
1601 1601 return self._cacheable
1602 1602
1603 1603 # we don't know yet, assume it is for now
1604 1604 return True
1605 1605
1606 1606 def changed(self):
1607 1607 # no point in going further if we can't cache it
1608 1608 if not self.cacheable():
1609 1609 return True
1610 1610
1611 1611 newstat = filecachesubentry.stat(self.path)
1612 1612
1613 1613 # we may not know if it's cacheable yet, check again now
1614 1614 if newstat and self._cacheable is None:
1615 1615 self._cacheable = newstat.cacheable()
1616 1616
1617 1617 # check again
1618 1618 if not self._cacheable:
1619 1619 return True
1620 1620
1621 1621 if self.cachestat != newstat:
1622 1622 self.cachestat = newstat
1623 1623 return True
1624 1624 else:
1625 1625 return False
1626 1626
1627 1627 @staticmethod
1628 1628 def stat(path):
1629 1629 try:
1630 1630 return util.cachestat(path)
1631 1631 except OSError as e:
1632 1632 if e.errno != errno.ENOENT:
1633 1633 raise
1634 1634
1635 1635
1636 1636 class filecacheentry(object):
1637 1637 def __init__(self, paths, stat=True):
1638 1638 self._entries = []
1639 1639 for path in paths:
1640 1640 self._entries.append(filecachesubentry(path, stat))
1641 1641
1642 1642 def changed(self):
1643 1643 '''true if any entry has changed'''
1644 1644 for entry in self._entries:
1645 1645 if entry.changed():
1646 1646 return True
1647 1647 return False
1648 1648
1649 1649 def refresh(self):
1650 1650 for entry in self._entries:
1651 1651 entry.refresh()
1652 1652
1653 1653
1654 1654 class filecache(object):
1655 1655 """A property like decorator that tracks files under .hg/ for updates.
1656 1656
1657 1657 On first access, the files defined as arguments are stat()ed and the
1658 1658 results cached. The decorated function is called. The results are stashed
1659 1659 away in a ``_filecache`` dict on the object whose method is decorated.
1660 1660
1661 1661 On subsequent access, the cached result is used as it is set to the
1662 1662 instance dictionary.
1663 1663
1664 1664 On external property set/delete operations, the caller must update the
1665 1665 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1666 1666 instead of directly setting <attr>.
1667 1667
1668 1668 When using the property API, the cached data is always used if available.
1669 1669 No stat() is performed to check if the file has changed.
1670 1670
1671 1671 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1672 1672 can populate an entry before the property's getter is called. In this case,
1673 1673 entries in ``_filecache`` will be used during property operations,
1674 1674 if available. If the underlying file changes, it is up to external callers
1675 1675 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1676 1676 method result as well as possibly calling ``del obj._filecache[attr]`` to
1677 1677 remove the ``filecacheentry``.
1678 1678 """
1679 1679
1680 1680 def __init__(self, *paths):
1681 1681 self.paths = paths
1682 1682
1683 1683 def join(self, obj, fname):
1684 1684 """Used to compute the runtime path of a cached file.
1685 1685
1686 1686 Users should subclass filecache and provide their own version of this
1687 1687 function to call the appropriate join function on 'obj' (an instance
1688 1688 of the class that its member function was decorated).
1689 1689 """
1690 1690 raise NotImplementedError
1691 1691
1692 1692 def __call__(self, func):
1693 1693 self.func = func
1694 1694 self.sname = func.__name__
1695 1695 self.name = pycompat.sysbytes(self.sname)
1696 1696 return self
1697 1697
1698 1698 def __get__(self, obj, type=None):
1699 1699 # if accessed on the class, return the descriptor itself.
1700 1700 if obj is None:
1701 1701 return self
1702 1702
1703 1703 assert self.sname not in obj.__dict__
1704 1704
1705 1705 entry = obj._filecache.get(self.name)
1706 1706
1707 1707 if entry:
1708 1708 if entry.changed():
1709 1709 entry.obj = self.func(obj)
1710 1710 else:
1711 1711 paths = [self.join(obj, path) for path in self.paths]
1712 1712
1713 1713 # We stat -before- creating the object so our cache doesn't lie if
1714 1714 # a writer modified between the time we read and stat
1715 1715 entry = filecacheentry(paths, True)
1716 1716 entry.obj = self.func(obj)
1717 1717
1718 1718 obj._filecache[self.name] = entry
1719 1719
1720 1720 obj.__dict__[self.sname] = entry.obj
1721 1721 return entry.obj
1722 1722
1723 1723 # don't implement __set__(), which would make __dict__ lookup as slow as
1724 1724 # function call.
1725 1725
1726 1726 def set(self, obj, value):
1727 1727 if self.name not in obj._filecache:
1728 1728 # we add an entry for the missing value because X in __dict__
1729 1729 # implies X in _filecache
1730 1730 paths = [self.join(obj, path) for path in self.paths]
1731 1731 ce = filecacheentry(paths, False)
1732 1732 obj._filecache[self.name] = ce
1733 1733 else:
1734 1734 ce = obj._filecache[self.name]
1735 1735
1736 1736 ce.obj = value # update cached copy
1737 1737 obj.__dict__[self.sname] = value # update copy returned by obj.x
1738 1738
1739 1739
1740 1740 def extdatasource(repo, source):
1741 1741 """Gather a map of rev -> value dict from the specified source
1742 1742
1743 1743 A source spec is treated as a URL, with a special case shell: type
1744 1744 for parsing the output from a shell command.
1745 1745
1746 1746 The data is parsed as a series of newline-separated records where
1747 1747 each record is a revision specifier optionally followed by a space
1748 1748 and a freeform string value. If the revision is known locally, it
1749 1749 is converted to a rev, otherwise the record is skipped.
1750 1750
1751 1751 Note that both key and value are treated as UTF-8 and converted to
1752 1752 the local encoding. This allows uniformity between local and
1753 1753 remote data sources.
1754 1754 """
1755 1755
1756 1756 spec = repo.ui.config(b"extdata", source)
1757 1757 if not spec:
1758 1758 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1759 1759
1760 1760 data = {}
1761 1761 src = proc = None
1762 1762 try:
1763 1763 if spec.startswith(b"shell:"):
1764 1764 # external commands should be run relative to the repo root
1765 1765 cmd = spec[6:]
1766 1766 proc = subprocess.Popen(
1767 1767 procutil.tonativestr(cmd),
1768 1768 shell=True,
1769 1769 bufsize=-1,
1770 1770 close_fds=procutil.closefds,
1771 1771 stdout=subprocess.PIPE,
1772 1772 cwd=procutil.tonativestr(repo.root),
1773 1773 )
1774 1774 src = proc.stdout
1775 1775 else:
1776 1776 # treat as a URL or file
1777 1777 src = url.open(repo.ui, spec)
1778 1778 for l in src:
1779 1779 if b" " in l:
1780 1780 k, v = l.strip().split(b" ", 1)
1781 1781 else:
1782 1782 k, v = l.strip(), b""
1783 1783
1784 1784 k = encoding.tolocal(k)
1785 1785 try:
1786 1786 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1787 1787 except (error.LookupError, error.RepoLookupError):
1788 1788 pass # we ignore data for nodes that don't exist locally
1789 1789 finally:
1790 1790 if proc:
1791 1791 try:
1792 1792 proc.communicate()
1793 1793 except ValueError:
1794 1794 # This happens if we started iterating src and then
1795 1795 # get a parse error on a line. It should be safe to ignore.
1796 1796 pass
1797 1797 if src:
1798 1798 src.close()
1799 1799 if proc and proc.returncode != 0:
1800 1800 raise error.Abort(
1801 1801 _(b"extdata command '%s' failed: %s")
1802 1802 % (cmd, procutil.explainexit(proc.returncode))
1803 1803 )
1804 1804
1805 1805 return data
1806 1806
1807 1807
1808 1808 class progress(object):
1809 1809 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1810 1810 self.ui = ui
1811 1811 self.pos = 0
1812 1812 self.topic = topic
1813 1813 self.unit = unit
1814 1814 self.total = total
1815 1815 self.debug = ui.configbool(b'progress', b'debug')
1816 1816 self._updatebar = updatebar
1817 1817
1818 1818 def __enter__(self):
1819 1819 return self
1820 1820
1821 1821 def __exit__(self, exc_type, exc_value, exc_tb):
1822 1822 self.complete()
1823 1823
1824 1824 def update(self, pos, item=b"", total=None):
1825 1825 assert pos is not None
1826 1826 if total:
1827 1827 self.total = total
1828 1828 self.pos = pos
1829 1829 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1830 1830 if self.debug:
1831 1831 self._printdebug(item)
1832 1832
1833 1833 def increment(self, step=1, item=b"", total=None):
1834 1834 self.update(self.pos + step, item, total)
1835 1835
1836 1836 def complete(self):
1837 1837 self.pos = None
1838 1838 self.unit = b""
1839 1839 self.total = None
1840 1840 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1841 1841
1842 1842 def _printdebug(self, item):
1843 1843 unit = b''
1844 1844 if self.unit:
1845 1845 unit = b' ' + self.unit
1846 1846 if item:
1847 1847 item = b' ' + item
1848 1848
1849 1849 if self.total:
1850 1850 pct = 100.0 * self.pos / self.total
1851 1851 self.ui.debug(
1852 1852 b'%s:%s %d/%d%s (%4.2f%%)\n'
1853 1853 % (self.topic, item, self.pos, self.total, unit, pct)
1854 1854 )
1855 1855 else:
1856 1856 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1857 1857
1858 1858
1859 1859 def gdinitconfig(ui):
1860 1860 """helper function to know if a repo should be created as general delta
1861 1861 """
1862 1862 # experimental config: format.generaldelta
1863 1863 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1864 1864 b'format', b'usegeneraldelta'
1865 1865 )
1866 1866
1867 1867
1868 1868 def gddeltaconfig(ui):
1869 1869 """helper function to know if incoming delta should be optimised
1870 1870 """
1871 1871 # experimental config: format.generaldelta
1872 1872 return ui.configbool(b'format', b'generaldelta')
1873 1873
1874 1874
1875 1875 class simplekeyvaluefile(object):
1876 1876 """A simple file with key=value lines
1877 1877
1878 1878 Keys must be alphanumerics and start with a letter, values must not
1879 1879 contain '\n' characters"""
1880 1880
1881 1881 firstlinekey = b'__firstline'
1882 1882
1883 1883 def __init__(self, vfs, path, keys=None):
1884 1884 self.vfs = vfs
1885 1885 self.path = path
1886 1886
1887 1887 def read(self, firstlinenonkeyval=False):
1888 1888 """Read the contents of a simple key-value file
1889 1889
1890 1890 'firstlinenonkeyval' indicates whether the first line of file should
1891 1891 be treated as a key-value pair or reuturned fully under the
1892 1892 __firstline key."""
1893 1893 lines = self.vfs.readlines(self.path)
1894 1894 d = {}
1895 1895 if firstlinenonkeyval:
1896 1896 if not lines:
1897 1897 e = _(b"empty simplekeyvalue file")
1898 1898 raise error.CorruptedState(e)
1899 1899 # we don't want to include '\n' in the __firstline
1900 1900 d[self.firstlinekey] = lines[0][:-1]
1901 1901 del lines[0]
1902 1902
1903 1903 try:
1904 1904 # the 'if line.strip()' part prevents us from failing on empty
1905 1905 # lines which only contain '\n' therefore are not skipped
1906 1906 # by 'if line'
1907 1907 updatedict = dict(
1908 1908 line[:-1].split(b'=', 1) for line in lines if line.strip()
1909 1909 )
1910 1910 if self.firstlinekey in updatedict:
1911 1911 e = _(b"%r can't be used as a key")
1912 1912 raise error.CorruptedState(e % self.firstlinekey)
1913 1913 d.update(updatedict)
1914 1914 except ValueError as e:
1915 1915 raise error.CorruptedState(stringutil.forcebytestr(e))
1916 1916 return d
1917 1917
1918 1918 def write(self, data, firstline=None):
1919 1919 """Write key=>value mapping to a file
1920 1920 data is a dict. Keys must be alphanumerical and start with a letter.
1921 1921 Values must not contain newline characters.
1922 1922
1923 1923 If 'firstline' is not None, it is written to file before
1924 1924 everything else, as it is, not in a key=value form"""
1925 1925 lines = []
1926 1926 if firstline is not None:
1927 1927 lines.append(b'%s\n' % firstline)
1928 1928
1929 1929 for k, v in data.items():
1930 1930 if k == self.firstlinekey:
1931 1931 e = b"key name '%s' is reserved" % self.firstlinekey
1932 1932 raise error.ProgrammingError(e)
1933 1933 if not k[0:1].isalpha():
1934 1934 e = b"keys must start with a letter in a key-value file"
1935 1935 raise error.ProgrammingError(e)
1936 1936 if not k.isalnum():
1937 1937 e = b"invalid key name in a simple key-value file"
1938 1938 raise error.ProgrammingError(e)
1939 1939 if b'\n' in v:
1940 1940 e = b"invalid value in a simple key-value file"
1941 1941 raise error.ProgrammingError(e)
1942 1942 lines.append(b"%s=%s\n" % (k, v))
1943 1943 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1944 1944 fp.write(b''.join(lines))
1945 1945
1946 1946
1947 1947 _reportobsoletedsource = [
1948 1948 b'debugobsolete',
1949 1949 b'pull',
1950 1950 b'push',
1951 1951 b'serve',
1952 1952 b'unbundle',
1953 1953 ]
1954 1954
1955 1955 _reportnewcssource = [
1956 1956 b'pull',
1957 1957 b'unbundle',
1958 1958 ]
1959 1959
1960 1960
1961 1961 def prefetchfiles(repo, revmatches):
1962 1962 """Invokes the registered file prefetch functions, allowing extensions to
1963 1963 ensure the corresponding files are available locally, before the command
1964 1964 uses them.
1965 1965
1966 1966 Args:
1967 1967 revmatches: a list of (revision, match) tuples to indicate the files to
1968 1968 fetch at each revision. If any of the match elements is None, it matches
1969 1969 all files.
1970 1970 """
1971 1971
1972 1972 def _matcher(m):
1973 1973 if m:
1974 1974 assert isinstance(m, matchmod.basematcher)
1975 1975 # The command itself will complain about files that don't exist, so
1976 1976 # don't duplicate the message.
1977 1977 return matchmod.badmatch(m, lambda fn, msg: None)
1978 1978 else:
1979 1979 return matchall(repo)
1980 1980
1981 1981 revbadmatches = [(rev, _matcher(match)) for (rev, match) in revmatches]
1982 1982
1983 1983 fileprefetchhooks(repo, revbadmatches)
1984 1984
1985 1985
1986 1986 # a list of (repo, revs, match) prefetch functions
1987 1987 fileprefetchhooks = util.hooks()
1988 1988
1989 1989 # A marker that tells the evolve extension to suppress its own reporting
1990 1990 _reportstroubledchangesets = True
1991 1991
1992 1992
1993 1993 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
1994 1994 """register a callback to issue a summary after the transaction is closed
1995 1995
1996 1996 If as_validator is true, then the callbacks are registered as transaction
1997 1997 validators instead
1998 1998 """
1999 1999
2000 2000 def txmatch(sources):
2001 2001 return any(txnname.startswith(source) for source in sources)
2002 2002
2003 2003 categories = []
2004 2004
2005 2005 def reportsummary(func):
2006 2006 """decorator for report callbacks."""
2007 2007 # The repoview life cycle is shorter than the one of the actual
2008 2008 # underlying repository. So the filtered object can die before the
2009 2009 # weakref is used leading to troubles. We keep a reference to the
2010 2010 # unfiltered object and restore the filtering when retrieving the
2011 2011 # repository through the weakref.
2012 2012 filtername = repo.filtername
2013 2013 reporef = weakref.ref(repo.unfiltered())
2014 2014
2015 2015 def wrapped(tr):
2016 2016 repo = reporef()
2017 2017 if filtername:
2018 2018 assert repo is not None # help pytype
2019 2019 repo = repo.filtered(filtername)
2020 2020 func(repo, tr)
2021 2021
2022 2022 newcat = b'%02i-txnreport' % len(categories)
2023 2023 if as_validator:
2024 2024 otr.addvalidator(newcat, wrapped)
2025 2025 else:
2026 2026 otr.addpostclose(newcat, wrapped)
2027 2027 categories.append(newcat)
2028 2028 return wrapped
2029 2029
2030 2030 @reportsummary
2031 2031 def reportchangegroup(repo, tr):
2032 2032 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
2033 2033 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
2034 2034 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
2035 2035 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
2036 2036 if cgchangesets or cgrevisions or cgfiles:
2037 2037 htext = b""
2038 2038 if cgheads:
2039 2039 htext = _(b" (%+d heads)") % cgheads
2040 2040 msg = _(b"added %d changesets with %d changes to %d files%s\n")
2041 2041 if as_validator:
2042 2042 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
2043 2043 assert repo is not None # help pytype
2044 2044 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
2045 2045
2046 2046 if txmatch(_reportobsoletedsource):
2047 2047
2048 2048 @reportsummary
2049 2049 def reportobsoleted(repo, tr):
2050 2050 obsoleted = obsutil.getobsoleted(repo, tr)
2051 2051 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
2052 2052 if newmarkers:
2053 2053 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
2054 2054 if obsoleted:
2055 2055 msg = _(b'obsoleted %i changesets\n')
2056 2056 if as_validator:
2057 2057 msg = _(b'obsoleting %i changesets\n')
2058 2058 repo.ui.status(msg % len(obsoleted))
2059 2059
2060 2060 if obsolete.isenabled(
2061 2061 repo, obsolete.createmarkersopt
2062 2062 ) and repo.ui.configbool(
2063 2063 b'experimental', b'evolution.report-instabilities'
2064 2064 ):
2065 2065 instabilitytypes = [
2066 2066 (b'orphan', b'orphan'),
2067 2067 (b'phase-divergent', b'phasedivergent'),
2068 2068 (b'content-divergent', b'contentdivergent'),
2069 2069 ]
2070 2070
2071 2071 def getinstabilitycounts(repo):
2072 2072 filtered = repo.changelog.filteredrevs
2073 2073 counts = {}
2074 2074 for instability, revset in instabilitytypes:
2075 2075 counts[instability] = len(
2076 2076 set(obsolete.getrevs(repo, revset)) - filtered
2077 2077 )
2078 2078 return counts
2079 2079
2080 2080 oldinstabilitycounts = getinstabilitycounts(repo)
2081 2081
2082 2082 @reportsummary
2083 2083 def reportnewinstabilities(repo, tr):
2084 2084 newinstabilitycounts = getinstabilitycounts(repo)
2085 2085 for instability, revset in instabilitytypes:
2086 2086 delta = (
2087 2087 newinstabilitycounts[instability]
2088 2088 - oldinstabilitycounts[instability]
2089 2089 )
2090 2090 msg = getinstabilitymessage(delta, instability)
2091 2091 if msg:
2092 2092 repo.ui.warn(msg)
2093 2093
2094 2094 if txmatch(_reportnewcssource):
2095 2095
2096 2096 @reportsummary
2097 2097 def reportnewcs(repo, tr):
2098 2098 """Report the range of new revisions pulled/unbundled."""
2099 2099 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2100 2100 unfi = repo.unfiltered()
2101 2101 if origrepolen >= len(unfi):
2102 2102 return
2103 2103
2104 2104 # Compute the bounds of new visible revisions' range.
2105 2105 revs = smartset.spanset(repo, start=origrepolen)
2106 2106 if revs:
2107 2107 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2108 2108
2109 2109 if minrev == maxrev:
2110 2110 revrange = minrev
2111 2111 else:
2112 2112 revrange = b'%s:%s' % (minrev, maxrev)
2113 2113 draft = len(repo.revs(b'%ld and draft()', revs))
2114 2114 secret = len(repo.revs(b'%ld and secret()', revs))
2115 2115 if not (draft or secret):
2116 2116 msg = _(b'new changesets %s\n') % revrange
2117 2117 elif draft and secret:
2118 2118 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2119 2119 msg %= (revrange, draft, secret)
2120 2120 elif draft:
2121 2121 msg = _(b'new changesets %s (%d drafts)\n')
2122 2122 msg %= (revrange, draft)
2123 2123 elif secret:
2124 2124 msg = _(b'new changesets %s (%d secrets)\n')
2125 2125 msg %= (revrange, secret)
2126 2126 else:
2127 2127 errormsg = b'entered unreachable condition'
2128 2128 raise error.ProgrammingError(errormsg)
2129 2129 repo.ui.status(msg)
2130 2130
2131 2131 # search new changesets directly pulled as obsolete
2132 2132 duplicates = tr.changes.get(b'revduplicates', ())
2133 2133 obsadded = unfi.revs(
2134 2134 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2135 2135 )
2136 2136 cl = repo.changelog
2137 2137 extinctadded = [r for r in obsadded if r not in cl]
2138 2138 if extinctadded:
2139 2139 # They are not just obsolete, but obsolete and invisible
2140 2140 # we call them "extinct" internally but the terms have not been
2141 2141 # exposed to users.
2142 2142 msg = b'(%d other changesets obsolete on arrival)\n'
2143 2143 repo.ui.status(msg % len(extinctadded))
2144 2144
2145 2145 @reportsummary
2146 2146 def reportphasechanges(repo, tr):
2147 2147 """Report statistics of phase changes for changesets pre-existing
2148 2148 pull/unbundle.
2149 2149 """
2150 2150 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2151 2151 published = []
2152 2152 for revs, (old, new) in tr.changes.get(b'phases', []):
2153 2153 if new != phases.public:
2154 2154 continue
2155 2155 published.extend(rev for rev in revs if rev < origrepolen)
2156 2156 if not published:
2157 2157 return
2158 2158 msg = _(b'%d local changesets published\n')
2159 2159 if as_validator:
2160 2160 msg = _(b'%d local changesets will be published\n')
2161 2161 repo.ui.status(msg % len(published))
2162 2162
2163 2163
2164 2164 def getinstabilitymessage(delta, instability):
2165 2165 """function to return the message to show warning about new instabilities
2166 2166
2167 2167 exists as a separate function so that extension can wrap to show more
2168 2168 information like how to fix instabilities"""
2169 2169 if delta > 0:
2170 2170 return _(b'%i new %s changesets\n') % (delta, instability)
2171 2171
2172 2172
2173 2173 def nodesummaries(repo, nodes, maxnumnodes=4):
2174 2174 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2175 2175 return b' '.join(short(h) for h in nodes)
2176 2176 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2177 2177 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2178 2178
2179 2179
2180 2180 def enforcesinglehead(repo, tr, desc, accountclosed=False):
2181 2181 """check that no named branch has multiple heads"""
2182 2182 if desc in (b'strip', b'repair'):
2183 2183 # skip the logic during strip
2184 2184 return
2185 2185 visible = repo.filtered(b'visible')
2186 2186 # possible improvement: we could restrict the check to affected branch
2187 2187 bm = visible.branchmap()
2188 2188 for name in bm:
2189 2189 heads = bm.branchheads(name, closed=accountclosed)
2190 2190 if len(heads) > 1:
2191 2191 msg = _(b'rejecting multiple heads on branch "%s"')
2192 2192 msg %= name
2193 2193 hint = _(b'%d heads: %s')
2194 2194 hint %= (len(heads), nodesummaries(repo, heads))
2195 2195 raise error.Abort(msg, hint=hint)
2196 2196
2197 2197
2198 2198 def wrapconvertsink(sink):
2199 2199 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2200 2200 before it is used, whether or not the convert extension was formally loaded.
2201 2201 """
2202 2202 return sink
2203 2203
2204 2204
2205 2205 def unhidehashlikerevs(repo, specs, hiddentype):
2206 2206 """parse the user specs and unhide changesets whose hash or revision number
2207 2207 is passed.
2208 2208
2209 2209 hiddentype can be: 1) 'warn': warn while unhiding changesets
2210 2210 2) 'nowarn': don't warn while unhiding changesets
2211 2211
2212 2212 returns a repo object with the required changesets unhidden
2213 2213 """
2214 2214 if not repo.filtername or not repo.ui.configbool(
2215 2215 b'experimental', b'directaccess'
2216 2216 ):
2217 2217 return repo
2218 2218
2219 2219 if repo.filtername not in (b'visible', b'visible-hidden'):
2220 2220 return repo
2221 2221
2222 2222 symbols = set()
2223 2223 for spec in specs:
2224 2224 try:
2225 2225 tree = revsetlang.parse(spec)
2226 2226 except error.ParseError: # will be reported by scmutil.revrange()
2227 2227 continue
2228 2228
2229 2229 symbols.update(revsetlang.gethashlikesymbols(tree))
2230 2230
2231 2231 if not symbols:
2232 2232 return repo
2233 2233
2234 2234 revs = _getrevsfromsymbols(repo, symbols)
2235 2235
2236 2236 if not revs:
2237 2237 return repo
2238 2238
2239 2239 if hiddentype == b'warn':
2240 2240 unfi = repo.unfiltered()
2241 2241 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2242 2242 repo.ui.warn(
2243 2243 _(
2244 2244 b"warning: accessing hidden changesets for write "
2245 2245 b"operation: %s\n"
2246 2246 )
2247 2247 % revstr
2248 2248 )
2249 2249
2250 2250 # we have to use new filtername to separate branch/tags cache until we can
2251 2251 # disbale these cache when revisions are dynamically pinned.
2252 2252 return repo.filtered(b'visible-hidden', revs)
2253 2253
2254 2254
2255 2255 def _getrevsfromsymbols(repo, symbols):
2256 2256 """parse the list of symbols and returns a set of revision numbers of hidden
2257 2257 changesets present in symbols"""
2258 2258 revs = set()
2259 2259 unfi = repo.unfiltered()
2260 2260 unficl = unfi.changelog
2261 2261 cl = repo.changelog
2262 2262 tiprev = len(unficl)
2263 2263 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2264 2264 for s in symbols:
2265 2265 try:
2266 2266 n = int(s)
2267 2267 if n <= tiprev:
2268 2268 if not allowrevnums:
2269 2269 continue
2270 2270 else:
2271 2271 if n not in cl:
2272 2272 revs.add(n)
2273 2273 continue
2274 2274 except ValueError:
2275 2275 pass
2276 2276
2277 2277 try:
2278 2278 s = resolvehexnodeidprefix(unfi, s)
2279 2279 except (error.LookupError, error.WdirUnsupported):
2280 2280 s = None
2281 2281
2282 2282 if s is not None:
2283 2283 rev = unficl.rev(s)
2284 2284 if rev not in cl:
2285 2285 revs.add(rev)
2286 2286
2287 2287 return revs
2288 2288
2289 2289
2290 2290 def bookmarkrevs(repo, mark):
2291 2291 """
2292 2292 Select revisions reachable by a given bookmark
2293 2293 """
2294 2294 return repo.revs(
2295 2295 b"ancestors(bookmark(%s)) - "
2296 2296 b"ancestors(head() and not bookmark(%s)) - "
2297 2297 b"ancestors(bookmark() and not bookmark(%s))",
2298 2298 mark,
2299 2299 mark,
2300 2300 mark,
2301 2301 )
General Comments 0
You need to be logged in to leave comments. Login now