##// END OF EJS Templates
addremove: add labels for messages about added and removed files...
Boris Feld -
r39123:ad88726d default
parent child Browse files
Show More
@@ -1,1753 +1,1755 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 bin,
22 22 hex,
23 23 nullid,
24 24 short,
25 25 wdirid,
26 26 wdirrev,
27 27 )
28 28
29 29 from . import (
30 30 encoding,
31 31 error,
32 32 match as matchmod,
33 33 obsolete,
34 34 obsutil,
35 35 pathutil,
36 36 phases,
37 37 pycompat,
38 38 revsetlang,
39 39 similar,
40 40 url,
41 41 util,
42 42 vfs,
43 43 )
44 44
45 45 from .utils import (
46 46 procutil,
47 47 stringutil,
48 48 )
49 49
50 50 if pycompat.iswindows:
51 51 from . import scmwindows as scmplatform
52 52 else:
53 53 from . import scmposix as scmplatform
54 54
55 55 termsize = scmplatform.termsize
56 56
57 57 class status(tuple):
58 58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
59 59 and 'ignored' properties are only relevant to the working copy.
60 60 '''
61 61
62 62 __slots__ = ()
63 63
64 64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
65 65 clean):
66 66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
67 67 ignored, clean))
68 68
69 69 @property
70 70 def modified(self):
71 71 '''files that have been modified'''
72 72 return self[0]
73 73
74 74 @property
75 75 def added(self):
76 76 '''files that have been added'''
77 77 return self[1]
78 78
79 79 @property
80 80 def removed(self):
81 81 '''files that have been removed'''
82 82 return self[2]
83 83
84 84 @property
85 85 def deleted(self):
86 86 '''files that are in the dirstate, but have been deleted from the
87 87 working copy (aka "missing")
88 88 '''
89 89 return self[3]
90 90
91 91 @property
92 92 def unknown(self):
93 93 '''files not in the dirstate that are not ignored'''
94 94 return self[4]
95 95
96 96 @property
97 97 def ignored(self):
98 98 '''files not in the dirstate that are ignored (by _dirignore())'''
99 99 return self[5]
100 100
101 101 @property
102 102 def clean(self):
103 103 '''files that have not been modified'''
104 104 return self[6]
105 105
106 106 def __repr__(self, *args, **kwargs):
107 107 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
108 108 r'unknown=%s, ignored=%s, clean=%s>') %
109 109 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
110 110
111 111 def itersubrepos(ctx1, ctx2):
112 112 """find subrepos in ctx1 or ctx2"""
113 113 # Create a (subpath, ctx) mapping where we prefer subpaths from
114 114 # ctx1. The subpaths from ctx2 are important when the .hgsub file
115 115 # has been modified (in ctx2) but not yet committed (in ctx1).
116 116 subpaths = dict.fromkeys(ctx2.substate, ctx2)
117 117 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
118 118
119 119 missing = set()
120 120
121 121 for subpath in ctx2.substate:
122 122 if subpath not in ctx1.substate:
123 123 del subpaths[subpath]
124 124 missing.add(subpath)
125 125
126 126 for subpath, ctx in sorted(subpaths.iteritems()):
127 127 yield subpath, ctx.sub(subpath)
128 128
129 129 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
130 130 # status and diff will have an accurate result when it does
131 131 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
132 132 # against itself.
133 133 for subpath in missing:
134 134 yield subpath, ctx2.nullsub(subpath, ctx1)
135 135
136 136 def nochangesfound(ui, repo, excluded=None):
137 137 '''Report no changes for push/pull, excluded is None or a list of
138 138 nodes excluded from the push/pull.
139 139 '''
140 140 secretlist = []
141 141 if excluded:
142 142 for n in excluded:
143 143 ctx = repo[n]
144 144 if ctx.phase() >= phases.secret and not ctx.extinct():
145 145 secretlist.append(n)
146 146
147 147 if secretlist:
148 148 ui.status(_("no changes found (ignored %d secret changesets)\n")
149 149 % len(secretlist))
150 150 else:
151 151 ui.status(_("no changes found\n"))
152 152
153 153 def callcatch(ui, func):
154 154 """call func() with global exception handling
155 155
156 156 return func() if no exception happens. otherwise do some error handling
157 157 and return an exit code accordingly. does not handle all exceptions.
158 158 """
159 159 try:
160 160 try:
161 161 return func()
162 162 except: # re-raises
163 163 ui.traceback()
164 164 raise
165 165 # Global exception handling, alphabetically
166 166 # Mercurial-specific first, followed by built-in and library exceptions
167 167 except error.LockHeld as inst:
168 168 if inst.errno == errno.ETIMEDOUT:
169 169 reason = _('timed out waiting for lock held by %r') % inst.locker
170 170 else:
171 171 reason = _('lock held by %r') % inst.locker
172 172 ui.error(_("abort: %s: %s\n") % (
173 173 inst.desc or stringutil.forcebytestr(inst.filename), reason))
174 174 if not inst.locker:
175 175 ui.error(_("(lock might be very busy)\n"))
176 176 except error.LockUnavailable as inst:
177 177 ui.error(_("abort: could not lock %s: %s\n") %
178 178 (inst.desc or stringutil.forcebytestr(inst.filename),
179 179 encoding.strtolocal(inst.strerror)))
180 180 except error.OutOfBandError as inst:
181 181 if inst.args:
182 182 msg = _("abort: remote error:\n")
183 183 else:
184 184 msg = _("abort: remote error\n")
185 185 ui.error(msg)
186 186 if inst.args:
187 187 ui.error(''.join(inst.args))
188 188 if inst.hint:
189 189 ui.error('(%s)\n' % inst.hint)
190 190 except error.RepoError as inst:
191 191 ui.error(_("abort: %s!\n") % inst)
192 192 if inst.hint:
193 193 ui.error(_("(%s)\n") % inst.hint)
194 194 except error.ResponseError as inst:
195 195 ui.error(_("abort: %s") % inst.args[0])
196 196 msg = inst.args[1]
197 197 if isinstance(msg, type(u'')):
198 198 msg = pycompat.sysbytes(msg)
199 199 if not isinstance(msg, bytes):
200 200 ui.error(" %r\n" % (msg,))
201 201 elif not msg:
202 202 ui.error(_(" empty string\n"))
203 203 else:
204 204 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
205 205 except error.CensoredNodeError as inst:
206 206 ui.error(_("abort: file censored %s!\n") % inst)
207 207 except error.RevlogError as inst:
208 208 ui.error(_("abort: %s!\n") % inst)
209 209 except error.InterventionRequired as inst:
210 210 ui.error("%s\n" % inst)
211 211 if inst.hint:
212 212 ui.error(_("(%s)\n") % inst.hint)
213 213 return 1
214 214 except error.WdirUnsupported:
215 215 ui.error(_("abort: working directory revision cannot be specified\n"))
216 216 except error.Abort as inst:
217 217 ui.error(_("abort: %s\n") % inst)
218 218 if inst.hint:
219 219 ui.error(_("(%s)\n") % inst.hint)
220 220 except ImportError as inst:
221 221 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
222 222 m = stringutil.forcebytestr(inst).split()[-1]
223 223 if m in "mpatch bdiff".split():
224 224 ui.error(_("(did you forget to compile extensions?)\n"))
225 225 elif m in "zlib".split():
226 226 ui.error(_("(is your Python install correct?)\n"))
227 227 except IOError as inst:
228 228 if util.safehasattr(inst, "code"):
229 229 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
230 230 elif util.safehasattr(inst, "reason"):
231 231 try: # usually it is in the form (errno, strerror)
232 232 reason = inst.reason.args[1]
233 233 except (AttributeError, IndexError):
234 234 # it might be anything, for example a string
235 235 reason = inst.reason
236 236 if isinstance(reason, pycompat.unicode):
237 237 # SSLError of Python 2.7.9 contains a unicode
238 238 reason = encoding.unitolocal(reason)
239 239 ui.error(_("abort: error: %s\n") % reason)
240 240 elif (util.safehasattr(inst, "args")
241 241 and inst.args and inst.args[0] == errno.EPIPE):
242 242 pass
243 243 elif getattr(inst, "strerror", None):
244 244 if getattr(inst, "filename", None):
245 245 ui.error(_("abort: %s: %s\n") % (
246 246 encoding.strtolocal(inst.strerror),
247 247 stringutil.forcebytestr(inst.filename)))
248 248 else:
249 249 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
250 250 else:
251 251 raise
252 252 except OSError as inst:
253 253 if getattr(inst, "filename", None) is not None:
254 254 ui.error(_("abort: %s: '%s'\n") % (
255 255 encoding.strtolocal(inst.strerror),
256 256 stringutil.forcebytestr(inst.filename)))
257 257 else:
258 258 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
259 259 except MemoryError:
260 260 ui.error(_("abort: out of memory\n"))
261 261 except SystemExit as inst:
262 262 # Commands shouldn't sys.exit directly, but give a return code.
263 263 # Just in case catch this and and pass exit code to caller.
264 264 return inst.code
265 265 except socket.error as inst:
266 266 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
267 267
268 268 return -1
269 269
270 270 def checknewlabel(repo, lbl, kind):
271 271 # Do not use the "kind" parameter in ui output.
272 272 # It makes strings difficult to translate.
273 273 if lbl in ['tip', '.', 'null']:
274 274 raise error.Abort(_("the name '%s' is reserved") % lbl)
275 275 for c in (':', '\0', '\n', '\r'):
276 276 if c in lbl:
277 277 raise error.Abort(
278 278 _("%r cannot be used in a name") % pycompat.bytestr(c))
279 279 try:
280 280 int(lbl)
281 281 raise error.Abort(_("cannot use an integer as a name"))
282 282 except ValueError:
283 283 pass
284 284 if lbl.strip() != lbl:
285 285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
286 286
287 287 def checkfilename(f):
288 288 '''Check that the filename f is an acceptable filename for a tracked file'''
289 289 if '\r' in f or '\n' in f:
290 290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
291 291 % pycompat.bytestr(f))
292 292
293 293 def checkportable(ui, f):
294 294 '''Check if filename f is portable and warn or abort depending on config'''
295 295 checkfilename(f)
296 296 abort, warn = checkportabilityalert(ui)
297 297 if abort or warn:
298 298 msg = util.checkwinfilename(f)
299 299 if msg:
300 300 msg = "%s: %s" % (msg, procutil.shellquote(f))
301 301 if abort:
302 302 raise error.Abort(msg)
303 303 ui.warn(_("warning: %s\n") % msg)
304 304
305 305 def checkportabilityalert(ui):
306 306 '''check if the user's config requests nothing, a warning, or abort for
307 307 non-portable filenames'''
308 308 val = ui.config('ui', 'portablefilenames')
309 309 lval = val.lower()
310 310 bval = stringutil.parsebool(val)
311 311 abort = pycompat.iswindows or lval == 'abort'
312 312 warn = bval or lval == 'warn'
313 313 if bval is None and not (warn or abort or lval == 'ignore'):
314 314 raise error.ConfigError(
315 315 _("ui.portablefilenames value is invalid ('%s')") % val)
316 316 return abort, warn
317 317
318 318 class casecollisionauditor(object):
319 319 def __init__(self, ui, abort, dirstate):
320 320 self._ui = ui
321 321 self._abort = abort
322 322 allfiles = '\0'.join(dirstate._map)
323 323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
324 324 self._dirstate = dirstate
325 325 # The purpose of _newfiles is so that we don't complain about
326 326 # case collisions if someone were to call this object with the
327 327 # same filename twice.
328 328 self._newfiles = set()
329 329
330 330 def __call__(self, f):
331 331 if f in self._newfiles:
332 332 return
333 333 fl = encoding.lower(f)
334 334 if fl in self._loweredfiles and f not in self._dirstate:
335 335 msg = _('possible case-folding collision for %s') % f
336 336 if self._abort:
337 337 raise error.Abort(msg)
338 338 self._ui.warn(_("warning: %s\n") % msg)
339 339 self._loweredfiles.add(fl)
340 340 self._newfiles.add(f)
341 341
342 342 def filteredhash(repo, maxrev):
343 343 """build hash of filtered revisions in the current repoview.
344 344
345 345 Multiple caches perform up-to-date validation by checking that the
346 346 tiprev and tipnode stored in the cache file match the current repository.
347 347 However, this is not sufficient for validating repoviews because the set
348 348 of revisions in the view may change without the repository tiprev and
349 349 tipnode changing.
350 350
351 351 This function hashes all the revs filtered from the view and returns
352 352 that SHA-1 digest.
353 353 """
354 354 cl = repo.changelog
355 355 if not cl.filteredrevs:
356 356 return None
357 357 key = None
358 358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
359 359 if revs:
360 360 s = hashlib.sha1()
361 361 for rev in revs:
362 362 s.update('%d;' % rev)
363 363 key = s.digest()
364 364 return key
365 365
366 366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
367 367 '''yield every hg repository under path, always recursively.
368 368 The recurse flag will only control recursion into repo working dirs'''
369 369 def errhandler(err):
370 370 if err.filename == path:
371 371 raise err
372 372 samestat = getattr(os.path, 'samestat', None)
373 373 if followsym and samestat is not None:
374 374 def adddir(dirlst, dirname):
375 375 dirstat = os.stat(dirname)
376 376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
377 377 if not match:
378 378 dirlst.append(dirstat)
379 379 return not match
380 380 else:
381 381 followsym = False
382 382
383 383 if (seen_dirs is None) and followsym:
384 384 seen_dirs = []
385 385 adddir(seen_dirs, path)
386 386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
387 387 dirs.sort()
388 388 if '.hg' in dirs:
389 389 yield root # found a repository
390 390 qroot = os.path.join(root, '.hg', 'patches')
391 391 if os.path.isdir(os.path.join(qroot, '.hg')):
392 392 yield qroot # we have a patch queue repo here
393 393 if recurse:
394 394 # avoid recursing inside the .hg directory
395 395 dirs.remove('.hg')
396 396 else:
397 397 dirs[:] = [] # don't descend further
398 398 elif followsym:
399 399 newdirs = []
400 400 for d in dirs:
401 401 fname = os.path.join(root, d)
402 402 if adddir(seen_dirs, fname):
403 403 if os.path.islink(fname):
404 404 for hgname in walkrepos(fname, True, seen_dirs):
405 405 yield hgname
406 406 else:
407 407 newdirs.append(d)
408 408 dirs[:] = newdirs
409 409
410 410 def binnode(ctx):
411 411 """Return binary node id for a given basectx"""
412 412 node = ctx.node()
413 413 if node is None:
414 414 return wdirid
415 415 return node
416 416
417 417 def intrev(ctx):
418 418 """Return integer for a given basectx that can be used in comparison or
419 419 arithmetic operation"""
420 420 rev = ctx.rev()
421 421 if rev is None:
422 422 return wdirrev
423 423 return rev
424 424
425 425 def formatchangeid(ctx):
426 426 """Format changectx as '{rev}:{node|formatnode}', which is the default
427 427 template provided by logcmdutil.changesettemplater"""
428 428 repo = ctx.repo()
429 429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
430 430
431 431 def formatrevnode(ui, rev, node):
432 432 """Format given revision and node depending on the current verbosity"""
433 433 if ui.debugflag:
434 434 hexfunc = hex
435 435 else:
436 436 hexfunc = short
437 437 return '%d:%s' % (rev, hexfunc(node))
438 438
439 439 def resolvehexnodeidprefix(repo, prefix):
440 440 if (prefix.startswith('x') and
441 441 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
442 442 prefix = prefix[1:]
443 443 try:
444 444 # Uses unfiltered repo because it's faster when prefix is ambiguous/
445 445 # This matches the shortesthexnodeidprefix() function below.
446 446 node = repo.unfiltered().changelog._partialmatch(prefix)
447 447 except error.AmbiguousPrefixLookupError:
448 448 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
449 449 if revset:
450 450 # Clear config to avoid infinite recursion
451 451 configoverrides = {('experimental',
452 452 'revisions.disambiguatewithin'): None}
453 453 with repo.ui.configoverride(configoverrides):
454 454 revs = repo.anyrevs([revset], user=True)
455 455 matches = []
456 456 for rev in revs:
457 457 node = repo.changelog.node(rev)
458 458 if hex(node).startswith(prefix):
459 459 matches.append(node)
460 460 if len(matches) == 1:
461 461 return matches[0]
462 462 raise
463 463 if node is None:
464 464 return
465 465 repo.changelog.rev(node) # make sure node isn't filtered
466 466 return node
467 467
468 468 def mayberevnum(repo, prefix):
469 469 """Checks if the given prefix may be mistaken for a revision number"""
470 470 try:
471 471 i = int(prefix)
472 472 # if we are a pure int, then starting with zero will not be
473 473 # confused as a rev; or, obviously, if the int is larger
474 474 # than the value of the tip rev
475 475 if prefix[0:1] == b'0' or i > len(repo):
476 476 return False
477 477 return True
478 478 except ValueError:
479 479 return False
480 480
481 481 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
482 482 """Find the shortest unambiguous prefix that matches hexnode.
483 483
484 484 If "cache" is not None, it must be a dictionary that can be used for
485 485 caching between calls to this method.
486 486 """
487 487 # _partialmatch() of filtered changelog could take O(len(repo)) time,
488 488 # which would be unacceptably slow. so we look for hash collision in
489 489 # unfiltered space, which means some hashes may be slightly longer.
490 490
491 491 def disambiguate(prefix):
492 492 """Disambiguate against revnums."""
493 493 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
494 494 if mayberevnum(repo, prefix):
495 495 return 'x' + prefix
496 496 else:
497 497 return prefix
498 498
499 499 hexnode = hex(node)
500 500 for length in range(len(prefix), len(hexnode) + 1):
501 501 prefix = hexnode[:length]
502 502 if not mayberevnum(repo, prefix):
503 503 return prefix
504 504
505 505 cl = repo.unfiltered().changelog
506 506 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
507 507 if revset:
508 508 revs = None
509 509 if cache is not None:
510 510 revs = cache.get('disambiguationrevset')
511 511 if revs is None:
512 512 revs = repo.anyrevs([revset], user=True)
513 513 if cache is not None:
514 514 cache['disambiguationrevset'] = revs
515 515 if cl.rev(node) in revs:
516 516 hexnode = hex(node)
517 517 for length in range(minlength, len(hexnode) + 1):
518 518 matches = []
519 519 prefix = hexnode[:length]
520 520 for rev in revs:
521 521 otherhexnode = repo[rev].hex()
522 522 if prefix == otherhexnode[:length]:
523 523 matches.append(otherhexnode)
524 524 if len(matches) == 1:
525 525 return disambiguate(prefix)
526 526
527 527 try:
528 528 return disambiguate(cl.shortest(node, minlength))
529 529 except error.LookupError:
530 530 raise error.RepoLookupError()
531 531
532 532 def isrevsymbol(repo, symbol):
533 533 """Checks if a symbol exists in the repo.
534 534
535 535 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
536 536 symbol is an ambiguous nodeid prefix.
537 537 """
538 538 try:
539 539 revsymbol(repo, symbol)
540 540 return True
541 541 except error.RepoLookupError:
542 542 return False
543 543
544 544 def revsymbol(repo, symbol):
545 545 """Returns a context given a single revision symbol (as string).
546 546
547 547 This is similar to revsingle(), but accepts only a single revision symbol,
548 548 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
549 549 not "max(public())".
550 550 """
551 551 if not isinstance(symbol, bytes):
552 552 msg = ("symbol (%s of type %s) was not a string, did you mean "
553 553 "repo[symbol]?" % (symbol, type(symbol)))
554 554 raise error.ProgrammingError(msg)
555 555 try:
556 556 if symbol in ('.', 'tip', 'null'):
557 557 return repo[symbol]
558 558
559 559 try:
560 560 r = int(symbol)
561 561 if '%d' % r != symbol:
562 562 raise ValueError
563 563 l = len(repo.changelog)
564 564 if r < 0:
565 565 r += l
566 566 if r < 0 or r >= l and r != wdirrev:
567 567 raise ValueError
568 568 return repo[r]
569 569 except error.FilteredIndexError:
570 570 raise
571 571 except (ValueError, OverflowError, IndexError):
572 572 pass
573 573
574 574 if len(symbol) == 40:
575 575 try:
576 576 node = bin(symbol)
577 577 rev = repo.changelog.rev(node)
578 578 return repo[rev]
579 579 except error.FilteredLookupError:
580 580 raise
581 581 except (TypeError, LookupError):
582 582 pass
583 583
584 584 # look up bookmarks through the name interface
585 585 try:
586 586 node = repo.names.singlenode(repo, symbol)
587 587 rev = repo.changelog.rev(node)
588 588 return repo[rev]
589 589 except KeyError:
590 590 pass
591 591
592 592 node = resolvehexnodeidprefix(repo, symbol)
593 593 if node is not None:
594 594 rev = repo.changelog.rev(node)
595 595 return repo[rev]
596 596
597 597 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
598 598
599 599 except error.WdirUnsupported:
600 600 return repo[None]
601 601 except (error.FilteredIndexError, error.FilteredLookupError,
602 602 error.FilteredRepoLookupError):
603 603 raise _filterederror(repo, symbol)
604 604
605 605 def _filterederror(repo, changeid):
606 606 """build an exception to be raised about a filtered changeid
607 607
608 608 This is extracted in a function to help extensions (eg: evolve) to
609 609 experiment with various message variants."""
610 610 if repo.filtername.startswith('visible'):
611 611
612 612 # Check if the changeset is obsolete
613 613 unfilteredrepo = repo.unfiltered()
614 614 ctx = revsymbol(unfilteredrepo, changeid)
615 615
616 616 # If the changeset is obsolete, enrich the message with the reason
617 617 # that made this changeset not visible
618 618 if ctx.obsolete():
619 619 msg = obsutil._getfilteredreason(repo, changeid, ctx)
620 620 else:
621 621 msg = _("hidden revision '%s'") % changeid
622 622
623 623 hint = _('use --hidden to access hidden revisions')
624 624
625 625 return error.FilteredRepoLookupError(msg, hint=hint)
626 626 msg = _("filtered revision '%s' (not in '%s' subset)")
627 627 msg %= (changeid, repo.filtername)
628 628 return error.FilteredRepoLookupError(msg)
629 629
630 630 def revsingle(repo, revspec, default='.', localalias=None):
631 631 if not revspec and revspec != 0:
632 632 return repo[default]
633 633
634 634 l = revrange(repo, [revspec], localalias=localalias)
635 635 if not l:
636 636 raise error.Abort(_('empty revision set'))
637 637 return repo[l.last()]
638 638
639 639 def _pairspec(revspec):
640 640 tree = revsetlang.parse(revspec)
641 641 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
642 642
643 643 def revpair(repo, revs):
644 644 if not revs:
645 645 return repo['.'], repo[None]
646 646
647 647 l = revrange(repo, revs)
648 648
649 649 if not l:
650 650 first = second = None
651 651 elif l.isascending():
652 652 first = l.min()
653 653 second = l.max()
654 654 elif l.isdescending():
655 655 first = l.max()
656 656 second = l.min()
657 657 else:
658 658 first = l.first()
659 659 second = l.last()
660 660
661 661 if first is None:
662 662 raise error.Abort(_('empty revision range'))
663 663 if (first == second and len(revs) >= 2
664 664 and not all(revrange(repo, [r]) for r in revs)):
665 665 raise error.Abort(_('empty revision on one side of range'))
666 666
667 667 # if top-level is range expression, the result must always be a pair
668 668 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
669 669 return repo[first], repo[None]
670 670
671 671 return repo[first], repo[second]
672 672
673 673 def revrange(repo, specs, localalias=None):
674 674 """Execute 1 to many revsets and return the union.
675 675
676 676 This is the preferred mechanism for executing revsets using user-specified
677 677 config options, such as revset aliases.
678 678
679 679 The revsets specified by ``specs`` will be executed via a chained ``OR``
680 680 expression. If ``specs`` is empty, an empty result is returned.
681 681
682 682 ``specs`` can contain integers, in which case they are assumed to be
683 683 revision numbers.
684 684
685 685 It is assumed the revsets are already formatted. If you have arguments
686 686 that need to be expanded in the revset, call ``revsetlang.formatspec()``
687 687 and pass the result as an element of ``specs``.
688 688
689 689 Specifying a single revset is allowed.
690 690
691 691 Returns a ``revset.abstractsmartset`` which is a list-like interface over
692 692 integer revisions.
693 693 """
694 694 allspecs = []
695 695 for spec in specs:
696 696 if isinstance(spec, int):
697 697 spec = revsetlang.formatspec('rev(%d)', spec)
698 698 allspecs.append(spec)
699 699 return repo.anyrevs(allspecs, user=True, localalias=localalias)
700 700
701 701 def meaningfulparents(repo, ctx):
702 702 """Return list of meaningful (or all if debug) parentrevs for rev.
703 703
704 704 For merges (two non-nullrev revisions) both parents are meaningful.
705 705 Otherwise the first parent revision is considered meaningful if it
706 706 is not the preceding revision.
707 707 """
708 708 parents = ctx.parents()
709 709 if len(parents) > 1:
710 710 return parents
711 711 if repo.ui.debugflag:
712 712 return [parents[0], repo['null']]
713 713 if parents[0].rev() >= intrev(ctx) - 1:
714 714 return []
715 715 return parents
716 716
717 717 def expandpats(pats):
718 718 '''Expand bare globs when running on windows.
719 719 On posix we assume it already has already been done by sh.'''
720 720 if not util.expandglobs:
721 721 return list(pats)
722 722 ret = []
723 723 for kindpat in pats:
724 724 kind, pat = matchmod._patsplit(kindpat, None)
725 725 if kind is None:
726 726 try:
727 727 globbed = glob.glob(pat)
728 728 except re.error:
729 729 globbed = [pat]
730 730 if globbed:
731 731 ret.extend(globbed)
732 732 continue
733 733 ret.append(kindpat)
734 734 return ret
735 735
736 736 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
737 737 badfn=None):
738 738 '''Return a matcher and the patterns that were used.
739 739 The matcher will warn about bad matches, unless an alternate badfn callback
740 740 is provided.'''
741 741 if pats == ("",):
742 742 pats = []
743 743 if opts is None:
744 744 opts = {}
745 745 if not globbed and default == 'relpath':
746 746 pats = expandpats(pats or [])
747 747
748 748 def bad(f, msg):
749 749 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
750 750
751 751 if badfn is None:
752 752 badfn = bad
753 753
754 754 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
755 755 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
756 756
757 757 if m.always():
758 758 pats = []
759 759 return m, pats
760 760
761 761 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
762 762 badfn=None):
763 763 '''Return a matcher that will warn about bad matches.'''
764 764 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
765 765
766 766 def matchall(repo):
767 767 '''Return a matcher that will efficiently match everything.'''
768 768 return matchmod.always(repo.root, repo.getcwd())
769 769
770 770 def matchfiles(repo, files, badfn=None):
771 771 '''Return a matcher that will efficiently match exactly these files.'''
772 772 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
773 773
774 774 def parsefollowlinespattern(repo, rev, pat, msg):
775 775 """Return a file name from `pat` pattern suitable for usage in followlines
776 776 logic.
777 777 """
778 778 if not matchmod.patkind(pat):
779 779 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
780 780 else:
781 781 ctx = repo[rev]
782 782 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
783 783 files = [f for f in ctx if m(f)]
784 784 if len(files) != 1:
785 785 raise error.ParseError(msg)
786 786 return files[0]
787 787
788 788 def origpath(ui, repo, filepath):
789 789 '''customize where .orig files are created
790 790
791 791 Fetch user defined path from config file: [ui] origbackuppath = <path>
792 792 Fall back to default (filepath with .orig suffix) if not specified
793 793 '''
794 794 origbackuppath = ui.config('ui', 'origbackuppath')
795 795 if not origbackuppath:
796 796 return filepath + ".orig"
797 797
798 798 # Convert filepath from an absolute path into a path inside the repo.
799 799 filepathfromroot = util.normpath(os.path.relpath(filepath,
800 800 start=repo.root))
801 801
802 802 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
803 803 origbackupdir = origvfs.dirname(filepathfromroot)
804 804 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
805 805 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
806 806
807 807 # Remove any files that conflict with the backup file's path
808 808 for f in reversed(list(util.finddirs(filepathfromroot))):
809 809 if origvfs.isfileorlink(f):
810 810 ui.note(_('removing conflicting file: %s\n')
811 811 % origvfs.join(f))
812 812 origvfs.unlink(f)
813 813 break
814 814
815 815 origvfs.makedirs(origbackupdir)
816 816
817 817 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
818 818 ui.note(_('removing conflicting directory: %s\n')
819 819 % origvfs.join(filepathfromroot))
820 820 origvfs.rmtree(filepathfromroot, forcibly=True)
821 821
822 822 return origvfs.join(filepathfromroot)
823 823
824 824 class _containsnode(object):
825 825 """proxy __contains__(node) to container.__contains__ which accepts revs"""
826 826
827 827 def __init__(self, repo, revcontainer):
828 828 self._torev = repo.changelog.rev
829 829 self._revcontains = revcontainer.__contains__
830 830
831 831 def __contains__(self, node):
832 832 return self._revcontains(self._torev(node))
833 833
834 834 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
835 835 fixphase=False, targetphase=None, backup=True):
836 836 """do common cleanups when old nodes are replaced by new nodes
837 837
838 838 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
839 839 (we might also want to move working directory parent in the future)
840 840
841 841 By default, bookmark moves are calculated automatically from 'replacements',
842 842 but 'moves' can be used to override that. Also, 'moves' may include
843 843 additional bookmark moves that should not have associated obsmarkers.
844 844
845 845 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
846 846 have replacements. operation is a string, like "rebase".
847 847
848 848 metadata is dictionary containing metadata to be stored in obsmarker if
849 849 obsolescence is enabled.
850 850 """
851 851 assert fixphase or targetphase is None
852 852 if not replacements and not moves:
853 853 return
854 854
855 855 # translate mapping's other forms
856 856 if not util.safehasattr(replacements, 'items'):
857 857 replacements = {n: () for n in replacements}
858 858
859 859 # Calculate bookmark movements
860 860 if moves is None:
861 861 moves = {}
862 862 # Unfiltered repo is needed since nodes in replacements might be hidden.
863 863 unfi = repo.unfiltered()
864 864 for oldnode, newnodes in replacements.items():
865 865 if oldnode in moves:
866 866 continue
867 867 if len(newnodes) > 1:
868 868 # usually a split, take the one with biggest rev number
869 869 newnode = next(unfi.set('max(%ln)', newnodes)).node()
870 870 elif len(newnodes) == 0:
871 871 # move bookmark backwards
872 872 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
873 873 list(replacements)))
874 874 if roots:
875 875 newnode = roots[0].node()
876 876 else:
877 877 newnode = nullid
878 878 else:
879 879 newnode = newnodes[0]
880 880 moves[oldnode] = newnode
881 881
882 882 allnewnodes = [n for ns in replacements.values() for n in ns]
883 883 toretract = {}
884 884 toadvance = {}
885 885 if fixphase:
886 886 precursors = {}
887 887 for oldnode, newnodes in replacements.items():
888 888 for newnode in newnodes:
889 889 precursors.setdefault(newnode, []).append(oldnode)
890 890
891 891 allnewnodes.sort(key=lambda n: unfi[n].rev())
892 892 newphases = {}
893 893 def phase(ctx):
894 894 return newphases.get(ctx.node(), ctx.phase())
895 895 for newnode in allnewnodes:
896 896 ctx = unfi[newnode]
897 897 parentphase = max(phase(p) for p in ctx.parents())
898 898 if targetphase is None:
899 899 oldphase = max(unfi[oldnode].phase()
900 900 for oldnode in precursors[newnode])
901 901 newphase = max(oldphase, parentphase)
902 902 else:
903 903 newphase = max(targetphase, parentphase)
904 904 newphases[newnode] = newphase
905 905 if newphase > ctx.phase():
906 906 toretract.setdefault(newphase, []).append(newnode)
907 907 elif newphase < ctx.phase():
908 908 toadvance.setdefault(newphase, []).append(newnode)
909 909
910 910 with repo.transaction('cleanup') as tr:
911 911 # Move bookmarks
912 912 bmarks = repo._bookmarks
913 913 bmarkchanges = []
914 914 for oldnode, newnode in moves.items():
915 915 oldbmarks = repo.nodebookmarks(oldnode)
916 916 if not oldbmarks:
917 917 continue
918 918 from . import bookmarks # avoid import cycle
919 919 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
920 920 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
921 921 hex(oldnode), hex(newnode)))
922 922 # Delete divergent bookmarks being parents of related newnodes
923 923 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
924 924 allnewnodes, newnode, oldnode)
925 925 deletenodes = _containsnode(repo, deleterevs)
926 926 for name in oldbmarks:
927 927 bmarkchanges.append((name, newnode))
928 928 for b in bookmarks.divergent2delete(repo, deletenodes, name):
929 929 bmarkchanges.append((b, None))
930 930
931 931 if bmarkchanges:
932 932 bmarks.applychanges(repo, tr, bmarkchanges)
933 933
934 934 for phase, nodes in toretract.items():
935 935 phases.retractboundary(repo, tr, phase, nodes)
936 936 for phase, nodes in toadvance.items():
937 937 phases.advanceboundary(repo, tr, phase, nodes)
938 938
939 939 # Obsolete or strip nodes
940 940 if obsolete.isenabled(repo, obsolete.createmarkersopt):
941 941 # If a node is already obsoleted, and we want to obsolete it
942 942 # without a successor, skip that obssolete request since it's
943 943 # unnecessary. That's the "if s or not isobs(n)" check below.
944 944 # Also sort the node in topology order, that might be useful for
945 945 # some obsstore logic.
946 946 # NOTE: the filtering and sorting might belong to createmarkers.
947 947 isobs = unfi.obsstore.successors.__contains__
948 948 torev = unfi.changelog.rev
949 949 sortfunc = lambda ns: torev(ns[0])
950 950 rels = [(unfi[n], tuple(unfi[m] for m in s))
951 951 for n, s in sorted(replacements.items(), key=sortfunc)
952 952 if s or not isobs(n)]
953 953 if rels:
954 954 obsolete.createmarkers(repo, rels, operation=operation,
955 955 metadata=metadata)
956 956 else:
957 957 from . import repair # avoid import cycle
958 958 tostrip = list(replacements)
959 959 if tostrip:
960 960 repair.delayedstrip(repo.ui, repo, tostrip, operation,
961 961 backup=backup)
962 962
963 963 def addremove(repo, matcher, prefix, opts=None):
964 964 if opts is None:
965 965 opts = {}
966 966 m = matcher
967 967 dry_run = opts.get('dry_run')
968 968 try:
969 969 similarity = float(opts.get('similarity') or 0)
970 970 except ValueError:
971 971 raise error.Abort(_('similarity must be a number'))
972 972 if similarity < 0 or similarity > 100:
973 973 raise error.Abort(_('similarity must be between 0 and 100'))
974 974 similarity /= 100.0
975 975
976 976 ret = 0
977 977 join = lambda f: os.path.join(prefix, f)
978 978
979 979 wctx = repo[None]
980 980 for subpath in sorted(wctx.substate):
981 981 submatch = matchmod.subdirmatcher(subpath, m)
982 982 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
983 983 sub = wctx.sub(subpath)
984 984 try:
985 985 if sub.addremove(submatch, prefix, opts):
986 986 ret = 1
987 987 except error.LookupError:
988 988 repo.ui.status(_("skipping missing subrepository: %s\n")
989 989 % join(subpath))
990 990
991 991 rejected = []
992 992 def badfn(f, msg):
993 993 if f in m.files():
994 994 m.bad(f, msg)
995 995 rejected.append(f)
996 996
997 997 badmatch = matchmod.badmatch(m, badfn)
998 998 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
999 999 badmatch)
1000 1000
1001 1001 unknownset = set(unknown + forgotten)
1002 1002 toprint = unknownset.copy()
1003 1003 toprint.update(deleted)
1004 1004 for abs in sorted(toprint):
1005 1005 if repo.ui.verbose or not m.exact(abs):
1006 1006 if abs in unknownset:
1007 1007 status = _('adding %s\n') % m.uipath(abs)
1008 label = 'addremove.added'
1008 1009 else:
1009 1010 status = _('removing %s\n') % m.uipath(abs)
1010 repo.ui.status(status)
1011 label = 'addremove.removed'
1012 repo.ui.status(status, label=label)
1011 1013
1012 1014 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1013 1015 similarity)
1014 1016
1015 1017 if not dry_run:
1016 1018 _markchanges(repo, unknown + forgotten, deleted, renames)
1017 1019
1018 1020 for f in rejected:
1019 1021 if f in m.files():
1020 1022 return 1
1021 1023 return ret
1022 1024
1023 1025 def marktouched(repo, files, similarity=0.0):
1024 1026 '''Assert that files have somehow been operated upon. files are relative to
1025 1027 the repo root.'''
1026 1028 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1027 1029 rejected = []
1028 1030
1029 1031 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1030 1032
1031 1033 if repo.ui.verbose:
1032 1034 unknownset = set(unknown + forgotten)
1033 1035 toprint = unknownset.copy()
1034 1036 toprint.update(deleted)
1035 1037 for abs in sorted(toprint):
1036 1038 if abs in unknownset:
1037 1039 status = _('adding %s\n') % abs
1038 1040 else:
1039 1041 status = _('removing %s\n') % abs
1040 1042 repo.ui.status(status)
1041 1043
1042 1044 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1043 1045 similarity)
1044 1046
1045 1047 _markchanges(repo, unknown + forgotten, deleted, renames)
1046 1048
1047 1049 for f in rejected:
1048 1050 if f in m.files():
1049 1051 return 1
1050 1052 return 0
1051 1053
1052 1054 def _interestingfiles(repo, matcher):
1053 1055 '''Walk dirstate with matcher, looking for files that addremove would care
1054 1056 about.
1055 1057
1056 1058 This is different from dirstate.status because it doesn't care about
1057 1059 whether files are modified or clean.'''
1058 1060 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1059 1061 audit_path = pathutil.pathauditor(repo.root, cached=True)
1060 1062
1061 1063 ctx = repo[None]
1062 1064 dirstate = repo.dirstate
1063 1065 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1064 1066 unknown=True, ignored=False, full=False)
1065 1067 for abs, st in walkresults.iteritems():
1066 1068 dstate = dirstate[abs]
1067 1069 if dstate == '?' and audit_path.check(abs):
1068 1070 unknown.append(abs)
1069 1071 elif dstate != 'r' and not st:
1070 1072 deleted.append(abs)
1071 1073 elif dstate == 'r' and st:
1072 1074 forgotten.append(abs)
1073 1075 # for finding renames
1074 1076 elif dstate == 'r' and not st:
1075 1077 removed.append(abs)
1076 1078 elif dstate == 'a':
1077 1079 added.append(abs)
1078 1080
1079 1081 return added, unknown, deleted, removed, forgotten
1080 1082
1081 1083 def _findrenames(repo, matcher, added, removed, similarity):
1082 1084 '''Find renames from removed files to added ones.'''
1083 1085 renames = {}
1084 1086 if similarity > 0:
1085 1087 for old, new, score in similar.findrenames(repo, added, removed,
1086 1088 similarity):
1087 1089 if (repo.ui.verbose or not matcher.exact(old)
1088 1090 or not matcher.exact(new)):
1089 1091 repo.ui.status(_('recording removal of %s as rename to %s '
1090 1092 '(%d%% similar)\n') %
1091 1093 (matcher.rel(old), matcher.rel(new),
1092 1094 score * 100))
1093 1095 renames[new] = old
1094 1096 return renames
1095 1097
1096 1098 def _markchanges(repo, unknown, deleted, renames):
1097 1099 '''Marks the files in unknown as added, the files in deleted as removed,
1098 1100 and the files in renames as copied.'''
1099 1101 wctx = repo[None]
1100 1102 with repo.wlock():
1101 1103 wctx.forget(deleted)
1102 1104 wctx.add(unknown)
1103 1105 for new, old in renames.iteritems():
1104 1106 wctx.copy(old, new)
1105 1107
1106 1108 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1107 1109 """Update the dirstate to reflect the intent of copying src to dst. For
1108 1110 different reasons it might not end with dst being marked as copied from src.
1109 1111 """
1110 1112 origsrc = repo.dirstate.copied(src) or src
1111 1113 if dst == origsrc: # copying back a copy?
1112 1114 if repo.dirstate[dst] not in 'mn' and not dryrun:
1113 1115 repo.dirstate.normallookup(dst)
1114 1116 else:
1115 1117 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1116 1118 if not ui.quiet:
1117 1119 ui.warn(_("%s has not been committed yet, so no copy "
1118 1120 "data will be stored for %s.\n")
1119 1121 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1120 1122 if repo.dirstate[dst] in '?r' and not dryrun:
1121 1123 wctx.add([dst])
1122 1124 elif not dryrun:
1123 1125 wctx.copy(origsrc, dst)
1124 1126
1125 1127 def readrequires(opener, supported):
1126 1128 '''Reads and parses .hg/requires and checks if all entries found
1127 1129 are in the list of supported features.'''
1128 1130 requirements = set(opener.read("requires").splitlines())
1129 1131 missings = []
1130 1132 for r in requirements:
1131 1133 if r not in supported:
1132 1134 if not r or not r[0:1].isalnum():
1133 1135 raise error.RequirementError(_(".hg/requires file is corrupt"))
1134 1136 missings.append(r)
1135 1137 missings.sort()
1136 1138 if missings:
1137 1139 raise error.RequirementError(
1138 1140 _("repository requires features unknown to this Mercurial: %s")
1139 1141 % " ".join(missings),
1140 1142 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1141 1143 " for more information"))
1142 1144 return requirements
1143 1145
1144 1146 def writerequires(opener, requirements):
1145 1147 with opener('requires', 'w') as fp:
1146 1148 for r in sorted(requirements):
1147 1149 fp.write("%s\n" % r)
1148 1150
1149 1151 class filecachesubentry(object):
1150 1152 def __init__(self, path, stat):
1151 1153 self.path = path
1152 1154 self.cachestat = None
1153 1155 self._cacheable = None
1154 1156
1155 1157 if stat:
1156 1158 self.cachestat = filecachesubentry.stat(self.path)
1157 1159
1158 1160 if self.cachestat:
1159 1161 self._cacheable = self.cachestat.cacheable()
1160 1162 else:
1161 1163 # None means we don't know yet
1162 1164 self._cacheable = None
1163 1165
1164 1166 def refresh(self):
1165 1167 if self.cacheable():
1166 1168 self.cachestat = filecachesubentry.stat(self.path)
1167 1169
1168 1170 def cacheable(self):
1169 1171 if self._cacheable is not None:
1170 1172 return self._cacheable
1171 1173
1172 1174 # we don't know yet, assume it is for now
1173 1175 return True
1174 1176
1175 1177 def changed(self):
1176 1178 # no point in going further if we can't cache it
1177 1179 if not self.cacheable():
1178 1180 return True
1179 1181
1180 1182 newstat = filecachesubentry.stat(self.path)
1181 1183
1182 1184 # we may not know if it's cacheable yet, check again now
1183 1185 if newstat and self._cacheable is None:
1184 1186 self._cacheable = newstat.cacheable()
1185 1187
1186 1188 # check again
1187 1189 if not self._cacheable:
1188 1190 return True
1189 1191
1190 1192 if self.cachestat != newstat:
1191 1193 self.cachestat = newstat
1192 1194 return True
1193 1195 else:
1194 1196 return False
1195 1197
1196 1198 @staticmethod
1197 1199 def stat(path):
1198 1200 try:
1199 1201 return util.cachestat(path)
1200 1202 except OSError as e:
1201 1203 if e.errno != errno.ENOENT:
1202 1204 raise
1203 1205
1204 1206 class filecacheentry(object):
1205 1207 def __init__(self, paths, stat=True):
1206 1208 self._entries = []
1207 1209 for path in paths:
1208 1210 self._entries.append(filecachesubentry(path, stat))
1209 1211
1210 1212 def changed(self):
1211 1213 '''true if any entry has changed'''
1212 1214 for entry in self._entries:
1213 1215 if entry.changed():
1214 1216 return True
1215 1217 return False
1216 1218
1217 1219 def refresh(self):
1218 1220 for entry in self._entries:
1219 1221 entry.refresh()
1220 1222
1221 1223 class filecache(object):
1222 1224 """A property like decorator that tracks files under .hg/ for updates.
1223 1225
1224 1226 On first access, the files defined as arguments are stat()ed and the
1225 1227 results cached. The decorated function is called. The results are stashed
1226 1228 away in a ``_filecache`` dict on the object whose method is decorated.
1227 1229
1228 1230 On subsequent access, the cached result is returned.
1229 1231
1230 1232 On external property set operations, stat() calls are performed and the new
1231 1233 value is cached.
1232 1234
1233 1235 On property delete operations, cached data is removed.
1234 1236
1235 1237 When using the property API, cached data is always returned, if available:
1236 1238 no stat() is performed to check if the file has changed and if the function
1237 1239 needs to be called to reflect file changes.
1238 1240
1239 1241 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1240 1242 can populate an entry before the property's getter is called. In this case,
1241 1243 entries in ``_filecache`` will be used during property operations,
1242 1244 if available. If the underlying file changes, it is up to external callers
1243 1245 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1244 1246 method result as well as possibly calling ``del obj._filecache[attr]`` to
1245 1247 remove the ``filecacheentry``.
1246 1248 """
1247 1249
1248 1250 def __init__(self, *paths):
1249 1251 self.paths = paths
1250 1252
1251 1253 def join(self, obj, fname):
1252 1254 """Used to compute the runtime path of a cached file.
1253 1255
1254 1256 Users should subclass filecache and provide their own version of this
1255 1257 function to call the appropriate join function on 'obj' (an instance
1256 1258 of the class that its member function was decorated).
1257 1259 """
1258 1260 raise NotImplementedError
1259 1261
1260 1262 def __call__(self, func):
1261 1263 self.func = func
1262 1264 self.sname = func.__name__
1263 1265 self.name = pycompat.sysbytes(self.sname)
1264 1266 return self
1265 1267
1266 1268 def __get__(self, obj, type=None):
1267 1269 # if accessed on the class, return the descriptor itself.
1268 1270 if obj is None:
1269 1271 return self
1270 1272 # do we need to check if the file changed?
1271 1273 if self.sname in obj.__dict__:
1272 1274 assert self.name in obj._filecache, self.name
1273 1275 return obj.__dict__[self.sname]
1274 1276
1275 1277 entry = obj._filecache.get(self.name)
1276 1278
1277 1279 if entry:
1278 1280 if entry.changed():
1279 1281 entry.obj = self.func(obj)
1280 1282 else:
1281 1283 paths = [self.join(obj, path) for path in self.paths]
1282 1284
1283 1285 # We stat -before- creating the object so our cache doesn't lie if
1284 1286 # a writer modified between the time we read and stat
1285 1287 entry = filecacheentry(paths, True)
1286 1288 entry.obj = self.func(obj)
1287 1289
1288 1290 obj._filecache[self.name] = entry
1289 1291
1290 1292 obj.__dict__[self.sname] = entry.obj
1291 1293 return entry.obj
1292 1294
1293 1295 def __set__(self, obj, value):
1294 1296 if self.name not in obj._filecache:
1295 1297 # we add an entry for the missing value because X in __dict__
1296 1298 # implies X in _filecache
1297 1299 paths = [self.join(obj, path) for path in self.paths]
1298 1300 ce = filecacheentry(paths, False)
1299 1301 obj._filecache[self.name] = ce
1300 1302 else:
1301 1303 ce = obj._filecache[self.name]
1302 1304
1303 1305 ce.obj = value # update cached copy
1304 1306 obj.__dict__[self.sname] = value # update copy returned by obj.x
1305 1307
1306 1308 def __delete__(self, obj):
1307 1309 try:
1308 1310 del obj.__dict__[self.sname]
1309 1311 except KeyError:
1310 1312 raise AttributeError(self.sname)
1311 1313
1312 1314 def extdatasource(repo, source):
1313 1315 """Gather a map of rev -> value dict from the specified source
1314 1316
1315 1317 A source spec is treated as a URL, with a special case shell: type
1316 1318 for parsing the output from a shell command.
1317 1319
1318 1320 The data is parsed as a series of newline-separated records where
1319 1321 each record is a revision specifier optionally followed by a space
1320 1322 and a freeform string value. If the revision is known locally, it
1321 1323 is converted to a rev, otherwise the record is skipped.
1322 1324
1323 1325 Note that both key and value are treated as UTF-8 and converted to
1324 1326 the local encoding. This allows uniformity between local and
1325 1327 remote data sources.
1326 1328 """
1327 1329
1328 1330 spec = repo.ui.config("extdata", source)
1329 1331 if not spec:
1330 1332 raise error.Abort(_("unknown extdata source '%s'") % source)
1331 1333
1332 1334 data = {}
1333 1335 src = proc = None
1334 1336 try:
1335 1337 if spec.startswith("shell:"):
1336 1338 # external commands should be run relative to the repo root
1337 1339 cmd = spec[6:]
1338 1340 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1339 1341 close_fds=procutil.closefds,
1340 1342 stdout=subprocess.PIPE, cwd=repo.root)
1341 1343 src = proc.stdout
1342 1344 else:
1343 1345 # treat as a URL or file
1344 1346 src = url.open(repo.ui, spec)
1345 1347 for l in src:
1346 1348 if " " in l:
1347 1349 k, v = l.strip().split(" ", 1)
1348 1350 else:
1349 1351 k, v = l.strip(), ""
1350 1352
1351 1353 k = encoding.tolocal(k)
1352 1354 try:
1353 1355 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1354 1356 except (error.LookupError, error.RepoLookupError):
1355 1357 pass # we ignore data for nodes that don't exist locally
1356 1358 finally:
1357 1359 if proc:
1358 1360 proc.communicate()
1359 1361 if src:
1360 1362 src.close()
1361 1363 if proc and proc.returncode != 0:
1362 1364 raise error.Abort(_("extdata command '%s' failed: %s")
1363 1365 % (cmd, procutil.explainexit(proc.returncode)))
1364 1366
1365 1367 return data
1366 1368
1367 1369 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1368 1370 if lock is None:
1369 1371 raise error.LockInheritanceContractViolation(
1370 1372 'lock can only be inherited while held')
1371 1373 if environ is None:
1372 1374 environ = {}
1373 1375 with lock.inherit() as locker:
1374 1376 environ[envvar] = locker
1375 1377 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1376 1378
1377 1379 def wlocksub(repo, cmd, *args, **kwargs):
1378 1380 """run cmd as a subprocess that allows inheriting repo's wlock
1379 1381
1380 1382 This can only be called while the wlock is held. This takes all the
1381 1383 arguments that ui.system does, and returns the exit code of the
1382 1384 subprocess."""
1383 1385 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1384 1386 **kwargs)
1385 1387
1386 1388 class progress(object):
1387 1389 def __init__(self, ui, topic, unit="", total=None):
1388 1390 self.ui = ui
1389 1391 self.pos = 0
1390 1392 self.topic = topic
1391 1393 self.unit = unit
1392 1394 self.total = total
1393 1395
1394 1396 def __enter__(self):
1395 1397 return self
1396 1398
1397 1399 def __exit__(self, exc_type, exc_value, exc_tb):
1398 1400 self.complete()
1399 1401
1400 1402 def update(self, pos, item="", total=None):
1401 1403 assert pos is not None
1402 1404 if total:
1403 1405 self.total = total
1404 1406 self.pos = pos
1405 1407 self._print(item)
1406 1408
1407 1409 def increment(self, step=1, item="", total=None):
1408 1410 self.update(self.pos + step, item, total)
1409 1411
1410 1412 def complete(self):
1411 1413 self.ui.progress(self.topic, None)
1412 1414
1413 1415 def _print(self, item):
1414 1416 self.ui.progress(self.topic, self.pos, item, self.unit,
1415 1417 self.total)
1416 1418
1417 1419 def gdinitconfig(ui):
1418 1420 """helper function to know if a repo should be created as general delta
1419 1421 """
1420 1422 # experimental config: format.generaldelta
1421 1423 return (ui.configbool('format', 'generaldelta')
1422 1424 or ui.configbool('format', 'usegeneraldelta')
1423 1425 or ui.configbool('format', 'sparse-revlog'))
1424 1426
1425 1427 def gddeltaconfig(ui):
1426 1428 """helper function to know if incoming delta should be optimised
1427 1429 """
1428 1430 # experimental config: format.generaldelta
1429 1431 return ui.configbool('format', 'generaldelta')
1430 1432
1431 1433 class simplekeyvaluefile(object):
1432 1434 """A simple file with key=value lines
1433 1435
1434 1436 Keys must be alphanumerics and start with a letter, values must not
1435 1437 contain '\n' characters"""
1436 1438 firstlinekey = '__firstline'
1437 1439
1438 1440 def __init__(self, vfs, path, keys=None):
1439 1441 self.vfs = vfs
1440 1442 self.path = path
1441 1443
1442 1444 def read(self, firstlinenonkeyval=False):
1443 1445 """Read the contents of a simple key-value file
1444 1446
1445 1447 'firstlinenonkeyval' indicates whether the first line of file should
1446 1448 be treated as a key-value pair or reuturned fully under the
1447 1449 __firstline key."""
1448 1450 lines = self.vfs.readlines(self.path)
1449 1451 d = {}
1450 1452 if firstlinenonkeyval:
1451 1453 if not lines:
1452 1454 e = _("empty simplekeyvalue file")
1453 1455 raise error.CorruptedState(e)
1454 1456 # we don't want to include '\n' in the __firstline
1455 1457 d[self.firstlinekey] = lines[0][:-1]
1456 1458 del lines[0]
1457 1459
1458 1460 try:
1459 1461 # the 'if line.strip()' part prevents us from failing on empty
1460 1462 # lines which only contain '\n' therefore are not skipped
1461 1463 # by 'if line'
1462 1464 updatedict = dict(line[:-1].split('=', 1) for line in lines
1463 1465 if line.strip())
1464 1466 if self.firstlinekey in updatedict:
1465 1467 e = _("%r can't be used as a key")
1466 1468 raise error.CorruptedState(e % self.firstlinekey)
1467 1469 d.update(updatedict)
1468 1470 except ValueError as e:
1469 1471 raise error.CorruptedState(str(e))
1470 1472 return d
1471 1473
1472 1474 def write(self, data, firstline=None):
1473 1475 """Write key=>value mapping to a file
1474 1476 data is a dict. Keys must be alphanumerical and start with a letter.
1475 1477 Values must not contain newline characters.
1476 1478
1477 1479 If 'firstline' is not None, it is written to file before
1478 1480 everything else, as it is, not in a key=value form"""
1479 1481 lines = []
1480 1482 if firstline is not None:
1481 1483 lines.append('%s\n' % firstline)
1482 1484
1483 1485 for k, v in data.items():
1484 1486 if k == self.firstlinekey:
1485 1487 e = "key name '%s' is reserved" % self.firstlinekey
1486 1488 raise error.ProgrammingError(e)
1487 1489 if not k[0:1].isalpha():
1488 1490 e = "keys must start with a letter in a key-value file"
1489 1491 raise error.ProgrammingError(e)
1490 1492 if not k.isalnum():
1491 1493 e = "invalid key name in a simple key-value file"
1492 1494 raise error.ProgrammingError(e)
1493 1495 if '\n' in v:
1494 1496 e = "invalid value in a simple key-value file"
1495 1497 raise error.ProgrammingError(e)
1496 1498 lines.append("%s=%s\n" % (k, v))
1497 1499 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1498 1500 fp.write(''.join(lines))
1499 1501
1500 1502 _reportobsoletedsource = [
1501 1503 'debugobsolete',
1502 1504 'pull',
1503 1505 'push',
1504 1506 'serve',
1505 1507 'unbundle',
1506 1508 ]
1507 1509
1508 1510 _reportnewcssource = [
1509 1511 'pull',
1510 1512 'unbundle',
1511 1513 ]
1512 1514
1513 1515 def prefetchfiles(repo, revs, match):
1514 1516 """Invokes the registered file prefetch functions, allowing extensions to
1515 1517 ensure the corresponding files are available locally, before the command
1516 1518 uses them."""
1517 1519 if match:
1518 1520 # The command itself will complain about files that don't exist, so
1519 1521 # don't duplicate the message.
1520 1522 match = matchmod.badmatch(match, lambda fn, msg: None)
1521 1523 else:
1522 1524 match = matchall(repo)
1523 1525
1524 1526 fileprefetchhooks(repo, revs, match)
1525 1527
1526 1528 # a list of (repo, revs, match) prefetch functions
1527 1529 fileprefetchhooks = util.hooks()
1528 1530
1529 1531 # A marker that tells the evolve extension to suppress its own reporting
1530 1532 _reportstroubledchangesets = True
1531 1533
1532 1534 def registersummarycallback(repo, otr, txnname=''):
1533 1535 """register a callback to issue a summary after the transaction is closed
1534 1536 """
1535 1537 def txmatch(sources):
1536 1538 return any(txnname.startswith(source) for source in sources)
1537 1539
1538 1540 categories = []
1539 1541
1540 1542 def reportsummary(func):
1541 1543 """decorator for report callbacks."""
1542 1544 # The repoview life cycle is shorter than the one of the actual
1543 1545 # underlying repository. So the filtered object can die before the
1544 1546 # weakref is used leading to troubles. We keep a reference to the
1545 1547 # unfiltered object and restore the filtering when retrieving the
1546 1548 # repository through the weakref.
1547 1549 filtername = repo.filtername
1548 1550 reporef = weakref.ref(repo.unfiltered())
1549 1551 def wrapped(tr):
1550 1552 repo = reporef()
1551 1553 if filtername:
1552 1554 repo = repo.filtered(filtername)
1553 1555 func(repo, tr)
1554 1556 newcat = '%02i-txnreport' % len(categories)
1555 1557 otr.addpostclose(newcat, wrapped)
1556 1558 categories.append(newcat)
1557 1559 return wrapped
1558 1560
1559 1561 if txmatch(_reportobsoletedsource):
1560 1562 @reportsummary
1561 1563 def reportobsoleted(repo, tr):
1562 1564 obsoleted = obsutil.getobsoleted(repo, tr)
1563 1565 if obsoleted:
1564 1566 repo.ui.status(_('obsoleted %i changesets\n')
1565 1567 % len(obsoleted))
1566 1568
1567 1569 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1568 1570 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1569 1571 instabilitytypes = [
1570 1572 ('orphan', 'orphan'),
1571 1573 ('phase-divergent', 'phasedivergent'),
1572 1574 ('content-divergent', 'contentdivergent'),
1573 1575 ]
1574 1576
1575 1577 def getinstabilitycounts(repo):
1576 1578 filtered = repo.changelog.filteredrevs
1577 1579 counts = {}
1578 1580 for instability, revset in instabilitytypes:
1579 1581 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1580 1582 filtered)
1581 1583 return counts
1582 1584
1583 1585 oldinstabilitycounts = getinstabilitycounts(repo)
1584 1586 @reportsummary
1585 1587 def reportnewinstabilities(repo, tr):
1586 1588 newinstabilitycounts = getinstabilitycounts(repo)
1587 1589 for instability, revset in instabilitytypes:
1588 1590 delta = (newinstabilitycounts[instability] -
1589 1591 oldinstabilitycounts[instability])
1590 1592 msg = getinstabilitymessage(delta, instability)
1591 1593 if msg:
1592 1594 repo.ui.warn(msg)
1593 1595
1594 1596 if txmatch(_reportnewcssource):
1595 1597 @reportsummary
1596 1598 def reportnewcs(repo, tr):
1597 1599 """Report the range of new revisions pulled/unbundled."""
1598 1600 newrevs = tr.changes.get('revs', pycompat.xrange(0, 0))
1599 1601 if not newrevs:
1600 1602 return
1601 1603
1602 1604 # Compute the bounds of new revisions' range, excluding obsoletes.
1603 1605 unfi = repo.unfiltered()
1604 1606 revs = unfi.revs('%ld and not obsolete()', newrevs)
1605 1607 if not revs:
1606 1608 # Got only obsoletes.
1607 1609 return
1608 1610 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1609 1611
1610 1612 if minrev == maxrev:
1611 1613 revrange = minrev
1612 1614 else:
1613 1615 revrange = '%s:%s' % (minrev, maxrev)
1614 1616 repo.ui.status(_('new changesets %s\n') % revrange)
1615 1617
1616 1618 @reportsummary
1617 1619 def reportphasechanges(repo, tr):
1618 1620 """Report statistics of phase changes for changesets pre-existing
1619 1621 pull/unbundle.
1620 1622 """
1621 1623 newrevs = tr.changes.get('revs', pycompat.xrange(0, 0))
1622 1624 phasetracking = tr.changes.get('phases', {})
1623 1625 if not phasetracking:
1624 1626 return
1625 1627 published = [
1626 1628 rev for rev, (old, new) in phasetracking.iteritems()
1627 1629 if new == phases.public and rev not in newrevs
1628 1630 ]
1629 1631 if not published:
1630 1632 return
1631 1633 repo.ui.status(_('%d local changesets published\n')
1632 1634 % len(published))
1633 1635
1634 1636 def getinstabilitymessage(delta, instability):
1635 1637 """function to return the message to show warning about new instabilities
1636 1638
1637 1639 exists as a separate function so that extension can wrap to show more
1638 1640 information like how to fix instabilities"""
1639 1641 if delta > 0:
1640 1642 return _('%i new %s changesets\n') % (delta, instability)
1641 1643
1642 1644 def nodesummaries(repo, nodes, maxnumnodes=4):
1643 1645 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1644 1646 return ' '.join(short(h) for h in nodes)
1645 1647 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1646 1648 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1647 1649
1648 1650 def enforcesinglehead(repo, tr, desc):
1649 1651 """check that no named branch has multiple heads"""
1650 1652 if desc in ('strip', 'repair'):
1651 1653 # skip the logic during strip
1652 1654 return
1653 1655 visible = repo.filtered('visible')
1654 1656 # possible improvement: we could restrict the check to affected branch
1655 1657 for name, heads in visible.branchmap().iteritems():
1656 1658 if len(heads) > 1:
1657 1659 msg = _('rejecting multiple heads on branch "%s"')
1658 1660 msg %= name
1659 1661 hint = _('%d heads: %s')
1660 1662 hint %= (len(heads), nodesummaries(repo, heads))
1661 1663 raise error.Abort(msg, hint=hint)
1662 1664
1663 1665 def wrapconvertsink(sink):
1664 1666 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1665 1667 before it is used, whether or not the convert extension was formally loaded.
1666 1668 """
1667 1669 return sink
1668 1670
1669 1671 def unhidehashlikerevs(repo, specs, hiddentype):
1670 1672 """parse the user specs and unhide changesets whose hash or revision number
1671 1673 is passed.
1672 1674
1673 1675 hiddentype can be: 1) 'warn': warn while unhiding changesets
1674 1676 2) 'nowarn': don't warn while unhiding changesets
1675 1677
1676 1678 returns a repo object with the required changesets unhidden
1677 1679 """
1678 1680 if not repo.filtername or not repo.ui.configbool('experimental',
1679 1681 'directaccess'):
1680 1682 return repo
1681 1683
1682 1684 if repo.filtername not in ('visible', 'visible-hidden'):
1683 1685 return repo
1684 1686
1685 1687 symbols = set()
1686 1688 for spec in specs:
1687 1689 try:
1688 1690 tree = revsetlang.parse(spec)
1689 1691 except error.ParseError: # will be reported by scmutil.revrange()
1690 1692 continue
1691 1693
1692 1694 symbols.update(revsetlang.gethashlikesymbols(tree))
1693 1695
1694 1696 if not symbols:
1695 1697 return repo
1696 1698
1697 1699 revs = _getrevsfromsymbols(repo, symbols)
1698 1700
1699 1701 if not revs:
1700 1702 return repo
1701 1703
1702 1704 if hiddentype == 'warn':
1703 1705 unfi = repo.unfiltered()
1704 1706 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1705 1707 repo.ui.warn(_("warning: accessing hidden changesets for write "
1706 1708 "operation: %s\n") % revstr)
1707 1709
1708 1710 # we have to use new filtername to separate branch/tags cache until we can
1709 1711 # disbale these cache when revisions are dynamically pinned.
1710 1712 return repo.filtered('visible-hidden', revs)
1711 1713
1712 1714 def _getrevsfromsymbols(repo, symbols):
1713 1715 """parse the list of symbols and returns a set of revision numbers of hidden
1714 1716 changesets present in symbols"""
1715 1717 revs = set()
1716 1718 unfi = repo.unfiltered()
1717 1719 unficl = unfi.changelog
1718 1720 cl = repo.changelog
1719 1721 tiprev = len(unficl)
1720 1722 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1721 1723 for s in symbols:
1722 1724 try:
1723 1725 n = int(s)
1724 1726 if n <= tiprev:
1725 1727 if not allowrevnums:
1726 1728 continue
1727 1729 else:
1728 1730 if n not in cl:
1729 1731 revs.add(n)
1730 1732 continue
1731 1733 except ValueError:
1732 1734 pass
1733 1735
1734 1736 try:
1735 1737 s = resolvehexnodeidprefix(unfi, s)
1736 1738 except (error.LookupError, error.WdirUnsupported):
1737 1739 s = None
1738 1740
1739 1741 if s is not None:
1740 1742 rev = unficl.rev(s)
1741 1743 if rev not in cl:
1742 1744 revs.add(rev)
1743 1745
1744 1746 return revs
1745 1747
1746 1748 def bookmarkrevs(repo, mark):
1747 1749 """
1748 1750 Select revisions reachable by a given bookmark
1749 1751 """
1750 1752 return repo.revs("ancestors(bookmark(%s)) - "
1751 1753 "ancestors(head() and not bookmark(%s)) - "
1752 1754 "ancestors(bookmark() and not bookmark(%s))",
1753 1755 mark, mark, mark)
@@ -1,94 +1,100 b''
1 1 $ hg init rep
2 2 $ cd rep
3 3 $ mkdir dir
4 4 $ touch foo dir/bar
5 5 $ hg -v addremove
6 6 adding dir/bar
7 7 adding foo
8 8 $ hg -v commit -m "add 1"
9 9 committing files:
10 10 dir/bar
11 11 foo
12 12 committing manifest
13 13 committing changelog
14 14 committed changeset 0:6f7f953567a2
15 15 $ cd dir/
16 16 $ touch ../foo_2 bar_2
17 17 $ hg -v addremove
18 18 adding dir/bar_2
19 19 adding foo_2
20 20 $ hg -v commit -m "add 2"
21 21 committing files:
22 22 dir/bar_2
23 23 foo_2
24 24 committing manifest
25 25 committing changelog
26 26 committed changeset 1:e65414bf35c5
27 27 $ cd ..
28 28 $ hg forget foo
29 29 $ hg -v addremove
30 30 adding foo
31 31 $ hg forget foo
32 32
33 33 $ hg -v addremove nonexistent
34 34 nonexistent: $ENOENT$
35 35 [1]
36 36
37 37 $ cd ..
38 38
39 39 $ hg init subdir
40 40 $ cd subdir
41 41 $ mkdir dir
42 42 $ cd dir
43 43 $ touch a.py
44 44 $ hg addremove 'glob:*.py'
45 45 adding a.py
46 46 $ hg forget a.py
47 47 $ hg addremove -I 'glob:*.py'
48 48 adding a.py
49 49 $ hg forget a.py
50 50 $ hg addremove
51 51 adding dir/a.py
52 52 $ cd ..
53 53
54 54 $ hg init sim
55 55 $ cd sim
56 56 $ echo a > a
57 57 $ echo a >> a
58 58 $ echo a >> a
59 59 $ echo c > c
60 60 $ hg commit -Ama
61 61 adding a
62 62 adding c
63 63 $ mv a b
64 64 $ rm c
65 65 $ echo d > d
66 66 $ hg addremove -n -s 50 # issue 1696
67 67 removing a
68 68 adding b
69 69 removing c
70 70 adding d
71 71 recording removal of a as rename to b (100% similar)
72 $ hg addremove -ns 50 --color debug
73 [addremove.removed ui.status|removing a]
74 [addremove.added ui.status|adding b]
75 [addremove.removed ui.status|removing c]
76 [addremove.added ui.status|adding d]
77 [ ui.status|recording removal of a as rename to b (100% similar)]
72 78 $ hg addremove -s 50
73 79 removing a
74 80 adding b
75 81 removing c
76 82 adding d
77 83 recording removal of a as rename to b (100% similar)
78 84 $ hg commit -mb
79 85 $ cp b c
80 86 $ hg forget b
81 87 $ hg addremove -s 50
82 88 adding b
83 89 adding c
84 90
85 91 $ rm c
86 92
87 93 $ hg ci -A -m "c" nonexistent
88 94 nonexistent: $ENOENT$
89 95 abort: failed to mark all new/missing files as added/removed
90 96 [255]
91 97
92 98 $ hg st
93 99 ! c
94 100 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now