##// END OF EJS Templates
scmutil: make cleanupnodes handle filtered node...
Jun Wu -
r33330:ba43e5ee default
parent child Browse files
Show More
@@ -1,1076 +1,1079
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import weakref
17 17
18 18 from .i18n import _
19 19 from .node import (
20 20 hex,
21 21 nullid,
22 22 wdirid,
23 23 wdirrev,
24 24 )
25 25
26 26 from .i18n import _
27 27 from . import (
28 28 encoding,
29 29 error,
30 30 match as matchmod,
31 31 obsolete,
32 32 obsutil,
33 33 pathutil,
34 34 phases,
35 35 pycompat,
36 36 revsetlang,
37 37 similar,
38 38 util,
39 39 )
40 40
41 41 if pycompat.osname == 'nt':
42 42 from . import scmwindows as scmplatform
43 43 else:
44 44 from . import scmposix as scmplatform
45 45
46 46 termsize = scmplatform.termsize
47 47
48 48 class status(tuple):
49 49 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
50 50 and 'ignored' properties are only relevant to the working copy.
51 51 '''
52 52
53 53 __slots__ = ()
54 54
55 55 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
56 56 clean):
57 57 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
58 58 ignored, clean))
59 59
60 60 @property
61 61 def modified(self):
62 62 '''files that have been modified'''
63 63 return self[0]
64 64
65 65 @property
66 66 def added(self):
67 67 '''files that have been added'''
68 68 return self[1]
69 69
70 70 @property
71 71 def removed(self):
72 72 '''files that have been removed'''
73 73 return self[2]
74 74
75 75 @property
76 76 def deleted(self):
77 77 '''files that are in the dirstate, but have been deleted from the
78 78 working copy (aka "missing")
79 79 '''
80 80 return self[3]
81 81
82 82 @property
83 83 def unknown(self):
84 84 '''files not in the dirstate that are not ignored'''
85 85 return self[4]
86 86
87 87 @property
88 88 def ignored(self):
89 89 '''files not in the dirstate that are ignored (by _dirignore())'''
90 90 return self[5]
91 91
92 92 @property
93 93 def clean(self):
94 94 '''files that have not been modified'''
95 95 return self[6]
96 96
97 97 def __repr__(self, *args, **kwargs):
98 98 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
99 99 'unknown=%r, ignored=%r, clean=%r>') % self)
100 100
101 101 def itersubrepos(ctx1, ctx2):
102 102 """find subrepos in ctx1 or ctx2"""
103 103 # Create a (subpath, ctx) mapping where we prefer subpaths from
104 104 # ctx1. The subpaths from ctx2 are important when the .hgsub file
105 105 # has been modified (in ctx2) but not yet committed (in ctx1).
106 106 subpaths = dict.fromkeys(ctx2.substate, ctx2)
107 107 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
108 108
109 109 missing = set()
110 110
111 111 for subpath in ctx2.substate:
112 112 if subpath not in ctx1.substate:
113 113 del subpaths[subpath]
114 114 missing.add(subpath)
115 115
116 116 for subpath, ctx in sorted(subpaths.iteritems()):
117 117 yield subpath, ctx.sub(subpath)
118 118
119 119 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
120 120 # status and diff will have an accurate result when it does
121 121 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
122 122 # against itself.
123 123 for subpath in missing:
124 124 yield subpath, ctx2.nullsub(subpath, ctx1)
125 125
126 126 def nochangesfound(ui, repo, excluded=None):
127 127 '''Report no changes for push/pull, excluded is None or a list of
128 128 nodes excluded from the push/pull.
129 129 '''
130 130 secretlist = []
131 131 if excluded:
132 132 for n in excluded:
133 133 ctx = repo[n]
134 134 if ctx.phase() >= phases.secret and not ctx.extinct():
135 135 secretlist.append(n)
136 136
137 137 if secretlist:
138 138 ui.status(_("no changes found (ignored %d secret changesets)\n")
139 139 % len(secretlist))
140 140 else:
141 141 ui.status(_("no changes found\n"))
142 142
143 143 def callcatch(ui, func):
144 144 """call func() with global exception handling
145 145
146 146 return func() if no exception happens. otherwise do some error handling
147 147 and return an exit code accordingly. does not handle all exceptions.
148 148 """
149 149 try:
150 150 try:
151 151 return func()
152 152 except: # re-raises
153 153 ui.traceback()
154 154 raise
155 155 # Global exception handling, alphabetically
156 156 # Mercurial-specific first, followed by built-in and library exceptions
157 157 except error.LockHeld as inst:
158 158 if inst.errno == errno.ETIMEDOUT:
159 159 reason = _('timed out waiting for lock held by %r') % inst.locker
160 160 else:
161 161 reason = _('lock held by %r') % inst.locker
162 162 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
163 163 if not inst.locker:
164 164 ui.warn(_("(lock might be very busy)\n"))
165 165 except error.LockUnavailable as inst:
166 166 ui.warn(_("abort: could not lock %s: %s\n") %
167 167 (inst.desc or inst.filename, inst.strerror))
168 168 except error.OutOfBandError as inst:
169 169 if inst.args:
170 170 msg = _("abort: remote error:\n")
171 171 else:
172 172 msg = _("abort: remote error\n")
173 173 ui.warn(msg)
174 174 if inst.args:
175 175 ui.warn(''.join(inst.args))
176 176 if inst.hint:
177 177 ui.warn('(%s)\n' % inst.hint)
178 178 except error.RepoError as inst:
179 179 ui.warn(_("abort: %s!\n") % inst)
180 180 if inst.hint:
181 181 ui.warn(_("(%s)\n") % inst.hint)
182 182 except error.ResponseError as inst:
183 183 ui.warn(_("abort: %s") % inst.args[0])
184 184 if not isinstance(inst.args[1], basestring):
185 185 ui.warn(" %r\n" % (inst.args[1],))
186 186 elif not inst.args[1]:
187 187 ui.warn(_(" empty string\n"))
188 188 else:
189 189 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
190 190 except error.CensoredNodeError as inst:
191 191 ui.warn(_("abort: file censored %s!\n") % inst)
192 192 except error.RevlogError as inst:
193 193 ui.warn(_("abort: %s!\n") % inst)
194 194 except error.InterventionRequired as inst:
195 195 ui.warn("%s\n" % inst)
196 196 if inst.hint:
197 197 ui.warn(_("(%s)\n") % inst.hint)
198 198 return 1
199 199 except error.WdirUnsupported:
200 200 ui.warn(_("abort: working directory revision cannot be specified\n"))
201 201 except error.Abort as inst:
202 202 ui.warn(_("abort: %s\n") % inst)
203 203 if inst.hint:
204 204 ui.warn(_("(%s)\n") % inst.hint)
205 205 except ImportError as inst:
206 206 ui.warn(_("abort: %s!\n") % inst)
207 207 m = str(inst).split()[-1]
208 208 if m in "mpatch bdiff".split():
209 209 ui.warn(_("(did you forget to compile extensions?)\n"))
210 210 elif m in "zlib".split():
211 211 ui.warn(_("(is your Python install correct?)\n"))
212 212 except IOError as inst:
213 213 if util.safehasattr(inst, "code"):
214 214 ui.warn(_("abort: %s\n") % inst)
215 215 elif util.safehasattr(inst, "reason"):
216 216 try: # usually it is in the form (errno, strerror)
217 217 reason = inst.reason.args[1]
218 218 except (AttributeError, IndexError):
219 219 # it might be anything, for example a string
220 220 reason = inst.reason
221 221 if isinstance(reason, unicode):
222 222 # SSLError of Python 2.7.9 contains a unicode
223 223 reason = encoding.unitolocal(reason)
224 224 ui.warn(_("abort: error: %s\n") % reason)
225 225 elif (util.safehasattr(inst, "args")
226 226 and inst.args and inst.args[0] == errno.EPIPE):
227 227 pass
228 228 elif getattr(inst, "strerror", None):
229 229 if getattr(inst, "filename", None):
230 230 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
231 231 else:
232 232 ui.warn(_("abort: %s\n") % inst.strerror)
233 233 else:
234 234 raise
235 235 except OSError as inst:
236 236 if getattr(inst, "filename", None) is not None:
237 237 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
238 238 else:
239 239 ui.warn(_("abort: %s\n") % inst.strerror)
240 240 except MemoryError:
241 241 ui.warn(_("abort: out of memory\n"))
242 242 except SystemExit as inst:
243 243 # Commands shouldn't sys.exit directly, but give a return code.
244 244 # Just in case catch this and and pass exit code to caller.
245 245 return inst.code
246 246 except socket.error as inst:
247 247 ui.warn(_("abort: %s\n") % inst.args[-1])
248 248
249 249 return -1
250 250
251 251 def checknewlabel(repo, lbl, kind):
252 252 # Do not use the "kind" parameter in ui output.
253 253 # It makes strings difficult to translate.
254 254 if lbl in ['tip', '.', 'null']:
255 255 raise error.Abort(_("the name '%s' is reserved") % lbl)
256 256 for c in (':', '\0', '\n', '\r'):
257 257 if c in lbl:
258 258 raise error.Abort(_("%r cannot be used in a name") % c)
259 259 try:
260 260 int(lbl)
261 261 raise error.Abort(_("cannot use an integer as a name"))
262 262 except ValueError:
263 263 pass
264 264
265 265 def checkfilename(f):
266 266 '''Check that the filename f is an acceptable filename for a tracked file'''
267 267 if '\r' in f or '\n' in f:
268 268 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
269 269
270 270 def checkportable(ui, f):
271 271 '''Check if filename f is portable and warn or abort depending on config'''
272 272 checkfilename(f)
273 273 abort, warn = checkportabilityalert(ui)
274 274 if abort or warn:
275 275 msg = util.checkwinfilename(f)
276 276 if msg:
277 277 msg = "%s: %r" % (msg, f)
278 278 if abort:
279 279 raise error.Abort(msg)
280 280 ui.warn(_("warning: %s\n") % msg)
281 281
282 282 def checkportabilityalert(ui):
283 283 '''check if the user's config requests nothing, a warning, or abort for
284 284 non-portable filenames'''
285 285 val = ui.config('ui', 'portablefilenames', 'warn')
286 286 lval = val.lower()
287 287 bval = util.parsebool(val)
288 288 abort = pycompat.osname == 'nt' or lval == 'abort'
289 289 warn = bval or lval == 'warn'
290 290 if bval is None and not (warn or abort or lval == 'ignore'):
291 291 raise error.ConfigError(
292 292 _("ui.portablefilenames value is invalid ('%s')") % val)
293 293 return abort, warn
294 294
295 295 class casecollisionauditor(object):
296 296 def __init__(self, ui, abort, dirstate):
297 297 self._ui = ui
298 298 self._abort = abort
299 299 allfiles = '\0'.join(dirstate._map)
300 300 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
301 301 self._dirstate = dirstate
302 302 # The purpose of _newfiles is so that we don't complain about
303 303 # case collisions if someone were to call this object with the
304 304 # same filename twice.
305 305 self._newfiles = set()
306 306
307 307 def __call__(self, f):
308 308 if f in self._newfiles:
309 309 return
310 310 fl = encoding.lower(f)
311 311 if fl in self._loweredfiles and f not in self._dirstate:
312 312 msg = _('possible case-folding collision for %s') % f
313 313 if self._abort:
314 314 raise error.Abort(msg)
315 315 self._ui.warn(_("warning: %s\n") % msg)
316 316 self._loweredfiles.add(fl)
317 317 self._newfiles.add(f)
318 318
319 319 def filteredhash(repo, maxrev):
320 320 """build hash of filtered revisions in the current repoview.
321 321
322 322 Multiple caches perform up-to-date validation by checking that the
323 323 tiprev and tipnode stored in the cache file match the current repository.
324 324 However, this is not sufficient for validating repoviews because the set
325 325 of revisions in the view may change without the repository tiprev and
326 326 tipnode changing.
327 327
328 328 This function hashes all the revs filtered from the view and returns
329 329 that SHA-1 digest.
330 330 """
331 331 cl = repo.changelog
332 332 if not cl.filteredrevs:
333 333 return None
334 334 key = None
335 335 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
336 336 if revs:
337 337 s = hashlib.sha1()
338 338 for rev in revs:
339 339 s.update('%d;' % rev)
340 340 key = s.digest()
341 341 return key
342 342
343 343 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
344 344 '''yield every hg repository under path, always recursively.
345 345 The recurse flag will only control recursion into repo working dirs'''
346 346 def errhandler(err):
347 347 if err.filename == path:
348 348 raise err
349 349 samestat = getattr(os.path, 'samestat', None)
350 350 if followsym and samestat is not None:
351 351 def adddir(dirlst, dirname):
352 352 match = False
353 353 dirstat = os.stat(dirname)
354 354 for lstdirstat in dirlst:
355 355 if samestat(dirstat, lstdirstat):
356 356 match = True
357 357 break
358 358 if not match:
359 359 dirlst.append(dirstat)
360 360 return not match
361 361 else:
362 362 followsym = False
363 363
364 364 if (seen_dirs is None) and followsym:
365 365 seen_dirs = []
366 366 adddir(seen_dirs, path)
367 367 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
368 368 dirs.sort()
369 369 if '.hg' in dirs:
370 370 yield root # found a repository
371 371 qroot = os.path.join(root, '.hg', 'patches')
372 372 if os.path.isdir(os.path.join(qroot, '.hg')):
373 373 yield qroot # we have a patch queue repo here
374 374 if recurse:
375 375 # avoid recursing inside the .hg directory
376 376 dirs.remove('.hg')
377 377 else:
378 378 dirs[:] = [] # don't descend further
379 379 elif followsym:
380 380 newdirs = []
381 381 for d in dirs:
382 382 fname = os.path.join(root, d)
383 383 if adddir(seen_dirs, fname):
384 384 if os.path.islink(fname):
385 385 for hgname in walkrepos(fname, True, seen_dirs):
386 386 yield hgname
387 387 else:
388 388 newdirs.append(d)
389 389 dirs[:] = newdirs
390 390
391 391 def binnode(ctx):
392 392 """Return binary node id for a given basectx"""
393 393 node = ctx.node()
394 394 if node is None:
395 395 return wdirid
396 396 return node
397 397
398 398 def intrev(ctx):
399 399 """Return integer for a given basectx that can be used in comparison or
400 400 arithmetic operation"""
401 401 rev = ctx.rev()
402 402 if rev is None:
403 403 return wdirrev
404 404 return rev
405 405
406 406 def revsingle(repo, revspec, default='.'):
407 407 if not revspec and revspec != 0:
408 408 return repo[default]
409 409
410 410 l = revrange(repo, [revspec])
411 411 if not l:
412 412 raise error.Abort(_('empty revision set'))
413 413 return repo[l.last()]
414 414
415 415 def _pairspec(revspec):
416 416 tree = revsetlang.parse(revspec)
417 417 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
418 418
419 419 def revpair(repo, revs):
420 420 if not revs:
421 421 return repo.dirstate.p1(), None
422 422
423 423 l = revrange(repo, revs)
424 424
425 425 if not l:
426 426 first = second = None
427 427 elif l.isascending():
428 428 first = l.min()
429 429 second = l.max()
430 430 elif l.isdescending():
431 431 first = l.max()
432 432 second = l.min()
433 433 else:
434 434 first = l.first()
435 435 second = l.last()
436 436
437 437 if first is None:
438 438 raise error.Abort(_('empty revision range'))
439 439 if (first == second and len(revs) >= 2
440 440 and not all(revrange(repo, [r]) for r in revs)):
441 441 raise error.Abort(_('empty revision on one side of range'))
442 442
443 443 # if top-level is range expression, the result must always be a pair
444 444 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
445 445 return repo.lookup(first), None
446 446
447 447 return repo.lookup(first), repo.lookup(second)
448 448
449 449 def revrange(repo, specs):
450 450 """Execute 1 to many revsets and return the union.
451 451
452 452 This is the preferred mechanism for executing revsets using user-specified
453 453 config options, such as revset aliases.
454 454
455 455 The revsets specified by ``specs`` will be executed via a chained ``OR``
456 456 expression. If ``specs`` is empty, an empty result is returned.
457 457
458 458 ``specs`` can contain integers, in which case they are assumed to be
459 459 revision numbers.
460 460
461 461 It is assumed the revsets are already formatted. If you have arguments
462 462 that need to be expanded in the revset, call ``revsetlang.formatspec()``
463 463 and pass the result as an element of ``specs``.
464 464
465 465 Specifying a single revset is allowed.
466 466
467 467 Returns a ``revset.abstractsmartset`` which is a list-like interface over
468 468 integer revisions.
469 469 """
470 470 allspecs = []
471 471 for spec in specs:
472 472 if isinstance(spec, int):
473 473 spec = revsetlang.formatspec('rev(%d)', spec)
474 474 allspecs.append(spec)
475 475 return repo.anyrevs(allspecs, user=True)
476 476
477 477 def meaningfulparents(repo, ctx):
478 478 """Return list of meaningful (or all if debug) parentrevs for rev.
479 479
480 480 For merges (two non-nullrev revisions) both parents are meaningful.
481 481 Otherwise the first parent revision is considered meaningful if it
482 482 is not the preceding revision.
483 483 """
484 484 parents = ctx.parents()
485 485 if len(parents) > 1:
486 486 return parents
487 487 if repo.ui.debugflag:
488 488 return [parents[0], repo['null']]
489 489 if parents[0].rev() >= intrev(ctx) - 1:
490 490 return []
491 491 return parents
492 492
493 493 def expandpats(pats):
494 494 '''Expand bare globs when running on windows.
495 495 On posix we assume it already has already been done by sh.'''
496 496 if not util.expandglobs:
497 497 return list(pats)
498 498 ret = []
499 499 for kindpat in pats:
500 500 kind, pat = matchmod._patsplit(kindpat, None)
501 501 if kind is None:
502 502 try:
503 503 globbed = glob.glob(pat)
504 504 except re.error:
505 505 globbed = [pat]
506 506 if globbed:
507 507 ret.extend(globbed)
508 508 continue
509 509 ret.append(kindpat)
510 510 return ret
511 511
512 512 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
513 513 badfn=None):
514 514 '''Return a matcher and the patterns that were used.
515 515 The matcher will warn about bad matches, unless an alternate badfn callback
516 516 is provided.'''
517 517 if pats == ("",):
518 518 pats = []
519 519 if opts is None:
520 520 opts = {}
521 521 if not globbed and default == 'relpath':
522 522 pats = expandpats(pats or [])
523 523
524 524 def bad(f, msg):
525 525 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
526 526
527 527 if badfn is None:
528 528 badfn = bad
529 529
530 530 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
531 531 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
532 532
533 533 if m.always():
534 534 pats = []
535 535 return m, pats
536 536
537 537 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
538 538 badfn=None):
539 539 '''Return a matcher that will warn about bad matches.'''
540 540 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
541 541
542 542 def matchall(repo):
543 543 '''Return a matcher that will efficiently match everything.'''
544 544 return matchmod.always(repo.root, repo.getcwd())
545 545
546 546 def matchfiles(repo, files, badfn=None):
547 547 '''Return a matcher that will efficiently match exactly these files.'''
548 548 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
549 549
550 550 def origpath(ui, repo, filepath):
551 551 '''customize where .orig files are created
552 552
553 553 Fetch user defined path from config file: [ui] origbackuppath = <path>
554 554 Fall back to default (filepath) if not specified
555 555 '''
556 556 origbackuppath = ui.config('ui', 'origbackuppath', None)
557 557 if origbackuppath is None:
558 558 return filepath + ".orig"
559 559
560 560 filepathfromroot = os.path.relpath(filepath, start=repo.root)
561 561 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
562 562
563 563 origbackupdir = repo.vfs.dirname(fullorigpath)
564 564 if not repo.vfs.exists(origbackupdir):
565 565 ui.note(_('creating directory: %s\n') % origbackupdir)
566 566 util.makedirs(origbackupdir)
567 567
568 568 return fullorigpath + ".orig"
569 569
570 570 def cleanupnodes(repo, mapping, operation):
571 571 """do common cleanups when old nodes are replaced by new nodes
572 572
573 573 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
574 574 (we might also want to move working directory parent in the future)
575 575
576 576 mapping is {oldnode: [newnode]} or a iterable of nodes if they do not have
577 577 replacements. operation is a string, like "rebase".
578 578 """
579 579 if not util.safehasattr(mapping, 'items'):
580 580 mapping = {n: () for n in mapping}
581 581
582 582 with repo.transaction('cleanup') as tr:
583 583 # Move bookmarks
584 584 bmarks = repo._bookmarks
585 585 bmarkchanged = False
586 586 for oldnode, newnodes in mapping.items():
587 587 oldbmarks = repo.nodebookmarks(oldnode)
588 588 if not oldbmarks:
589 589 continue
590 590 bmarkchanged = True
591 591 if len(newnodes) > 1:
592 592 heads = list(repo.set('heads(%ln)', newnodes))
593 593 if len(heads) != 1:
594 594 raise error.ProgrammingError(
595 595 'cannot figure out bookmark movement')
596 596 newnode = heads[0].node()
597 597 elif len(newnodes) == 0:
598 598 # move bookmark backwards
599 599 roots = list(repo.set('max((::%n) - %ln)', oldnode,
600 600 list(mapping)))
601 601 if roots:
602 602 newnode = roots[0].node()
603 603 else:
604 604 newnode = nullid
605 605 else:
606 606 newnode = newnodes[0]
607 607 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
608 608 (oldbmarks, hex(oldnode), hex(newnode)))
609 609 for name in oldbmarks:
610 610 bmarks[name] = newnode
611 611 if bmarkchanged:
612 612 bmarks.recordchange(tr)
613 613
614 614 # Obsolete or strip nodes
615 615 if obsolete.isenabled(repo, obsolete.createmarkersopt):
616 616 # If a node is already obsoleted, and we want to obsolete it
617 617 # without a successor, skip that obssolete request since it's
618 618 # unnecessary. That's the "if s or not isobs(n)" check below.
619 619 # Also sort the node in topology order, that might be useful for
620 620 # some obsstore logic.
621 621 # NOTE: the filtering and sorting might belong to createmarkers.
622 isobs = repo.obsstore.successors.__contains__
623 sortfunc = lambda ns: repo.changelog.rev(ns[0])
624 rels = [(repo[n], (repo[m] for m in s))
622 # Unfiltered repo is needed since nodes in mapping might be hidden.
623 unfi = repo.unfiltered()
624 isobs = unfi.obsstore.successors.__contains__
625 torev = unfi.changelog.rev
626 sortfunc = lambda ns: torev(ns[0])
627 rels = [(unfi[n], (unfi[m] for m in s))
625 628 for n, s in sorted(mapping.items(), key=sortfunc)
626 629 if s or not isobs(n)]
627 630 obsolete.createmarkers(repo, rels, operation=operation)
628 631 else:
629 632 from . import repair # avoid import cycle
630 633 repair.delayedstrip(repo.ui, repo, list(mapping), operation)
631 634
632 635 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
633 636 if opts is None:
634 637 opts = {}
635 638 m = matcher
636 639 if dry_run is None:
637 640 dry_run = opts.get('dry_run')
638 641 if similarity is None:
639 642 similarity = float(opts.get('similarity') or 0)
640 643
641 644 ret = 0
642 645 join = lambda f: os.path.join(prefix, f)
643 646
644 647 wctx = repo[None]
645 648 for subpath in sorted(wctx.substate):
646 649 submatch = matchmod.subdirmatcher(subpath, m)
647 650 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
648 651 sub = wctx.sub(subpath)
649 652 try:
650 653 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
651 654 ret = 1
652 655 except error.LookupError:
653 656 repo.ui.status(_("skipping missing subrepository: %s\n")
654 657 % join(subpath))
655 658
656 659 rejected = []
657 660 def badfn(f, msg):
658 661 if f in m.files():
659 662 m.bad(f, msg)
660 663 rejected.append(f)
661 664
662 665 badmatch = matchmod.badmatch(m, badfn)
663 666 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
664 667 badmatch)
665 668
666 669 unknownset = set(unknown + forgotten)
667 670 toprint = unknownset.copy()
668 671 toprint.update(deleted)
669 672 for abs in sorted(toprint):
670 673 if repo.ui.verbose or not m.exact(abs):
671 674 if abs in unknownset:
672 675 status = _('adding %s\n') % m.uipath(abs)
673 676 else:
674 677 status = _('removing %s\n') % m.uipath(abs)
675 678 repo.ui.status(status)
676 679
677 680 renames = _findrenames(repo, m, added + unknown, removed + deleted,
678 681 similarity)
679 682
680 683 if not dry_run:
681 684 _markchanges(repo, unknown + forgotten, deleted, renames)
682 685
683 686 for f in rejected:
684 687 if f in m.files():
685 688 return 1
686 689 return ret
687 690
688 691 def marktouched(repo, files, similarity=0.0):
689 692 '''Assert that files have somehow been operated upon. files are relative to
690 693 the repo root.'''
691 694 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
692 695 rejected = []
693 696
694 697 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
695 698
696 699 if repo.ui.verbose:
697 700 unknownset = set(unknown + forgotten)
698 701 toprint = unknownset.copy()
699 702 toprint.update(deleted)
700 703 for abs in sorted(toprint):
701 704 if abs in unknownset:
702 705 status = _('adding %s\n') % abs
703 706 else:
704 707 status = _('removing %s\n') % abs
705 708 repo.ui.status(status)
706 709
707 710 renames = _findrenames(repo, m, added + unknown, removed + deleted,
708 711 similarity)
709 712
710 713 _markchanges(repo, unknown + forgotten, deleted, renames)
711 714
712 715 for f in rejected:
713 716 if f in m.files():
714 717 return 1
715 718 return 0
716 719
717 720 def _interestingfiles(repo, matcher):
718 721 '''Walk dirstate with matcher, looking for files that addremove would care
719 722 about.
720 723
721 724 This is different from dirstate.status because it doesn't care about
722 725 whether files are modified or clean.'''
723 726 added, unknown, deleted, removed, forgotten = [], [], [], [], []
724 727 audit_path = pathutil.pathauditor(repo.root)
725 728
726 729 ctx = repo[None]
727 730 dirstate = repo.dirstate
728 731 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
729 732 full=False)
730 733 for abs, st in walkresults.iteritems():
731 734 dstate = dirstate[abs]
732 735 if dstate == '?' and audit_path.check(abs):
733 736 unknown.append(abs)
734 737 elif dstate != 'r' and not st:
735 738 deleted.append(abs)
736 739 elif dstate == 'r' and st:
737 740 forgotten.append(abs)
738 741 # for finding renames
739 742 elif dstate == 'r' and not st:
740 743 removed.append(abs)
741 744 elif dstate == 'a':
742 745 added.append(abs)
743 746
744 747 return added, unknown, deleted, removed, forgotten
745 748
746 749 def _findrenames(repo, matcher, added, removed, similarity):
747 750 '''Find renames from removed files to added ones.'''
748 751 renames = {}
749 752 if similarity > 0:
750 753 for old, new, score in similar.findrenames(repo, added, removed,
751 754 similarity):
752 755 if (repo.ui.verbose or not matcher.exact(old)
753 756 or not matcher.exact(new)):
754 757 repo.ui.status(_('recording removal of %s as rename to %s '
755 758 '(%d%% similar)\n') %
756 759 (matcher.rel(old), matcher.rel(new),
757 760 score * 100))
758 761 renames[new] = old
759 762 return renames
760 763
761 764 def _markchanges(repo, unknown, deleted, renames):
762 765 '''Marks the files in unknown as added, the files in deleted as removed,
763 766 and the files in renames as copied.'''
764 767 wctx = repo[None]
765 768 with repo.wlock():
766 769 wctx.forget(deleted)
767 770 wctx.add(unknown)
768 771 for new, old in renames.iteritems():
769 772 wctx.copy(old, new)
770 773
771 774 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
772 775 """Update the dirstate to reflect the intent of copying src to dst. For
773 776 different reasons it might not end with dst being marked as copied from src.
774 777 """
775 778 origsrc = repo.dirstate.copied(src) or src
776 779 if dst == origsrc: # copying back a copy?
777 780 if repo.dirstate[dst] not in 'mn' and not dryrun:
778 781 repo.dirstate.normallookup(dst)
779 782 else:
780 783 if repo.dirstate[origsrc] == 'a' and origsrc == src:
781 784 if not ui.quiet:
782 785 ui.warn(_("%s has not been committed yet, so no copy "
783 786 "data will be stored for %s.\n")
784 787 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
785 788 if repo.dirstate[dst] in '?r' and not dryrun:
786 789 wctx.add([dst])
787 790 elif not dryrun:
788 791 wctx.copy(origsrc, dst)
789 792
790 793 def readrequires(opener, supported):
791 794 '''Reads and parses .hg/requires and checks if all entries found
792 795 are in the list of supported features.'''
793 796 requirements = set(opener.read("requires").splitlines())
794 797 missings = []
795 798 for r in requirements:
796 799 if r not in supported:
797 800 if not r or not r[0].isalnum():
798 801 raise error.RequirementError(_(".hg/requires file is corrupt"))
799 802 missings.append(r)
800 803 missings.sort()
801 804 if missings:
802 805 raise error.RequirementError(
803 806 _("repository requires features unknown to this Mercurial: %s")
804 807 % " ".join(missings),
805 808 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
806 809 " for more information"))
807 810 return requirements
808 811
809 812 def writerequires(opener, requirements):
810 813 with opener('requires', 'w') as fp:
811 814 for r in sorted(requirements):
812 815 fp.write("%s\n" % r)
813 816
814 817 class filecachesubentry(object):
815 818 def __init__(self, path, stat):
816 819 self.path = path
817 820 self.cachestat = None
818 821 self._cacheable = None
819 822
820 823 if stat:
821 824 self.cachestat = filecachesubentry.stat(self.path)
822 825
823 826 if self.cachestat:
824 827 self._cacheable = self.cachestat.cacheable()
825 828 else:
826 829 # None means we don't know yet
827 830 self._cacheable = None
828 831
829 832 def refresh(self):
830 833 if self.cacheable():
831 834 self.cachestat = filecachesubentry.stat(self.path)
832 835
833 836 def cacheable(self):
834 837 if self._cacheable is not None:
835 838 return self._cacheable
836 839
837 840 # we don't know yet, assume it is for now
838 841 return True
839 842
840 843 def changed(self):
841 844 # no point in going further if we can't cache it
842 845 if not self.cacheable():
843 846 return True
844 847
845 848 newstat = filecachesubentry.stat(self.path)
846 849
847 850 # we may not know if it's cacheable yet, check again now
848 851 if newstat and self._cacheable is None:
849 852 self._cacheable = newstat.cacheable()
850 853
851 854 # check again
852 855 if not self._cacheable:
853 856 return True
854 857
855 858 if self.cachestat != newstat:
856 859 self.cachestat = newstat
857 860 return True
858 861 else:
859 862 return False
860 863
861 864 @staticmethod
862 865 def stat(path):
863 866 try:
864 867 return util.cachestat(path)
865 868 except OSError as e:
866 869 if e.errno != errno.ENOENT:
867 870 raise
868 871
869 872 class filecacheentry(object):
870 873 def __init__(self, paths, stat=True):
871 874 self._entries = []
872 875 for path in paths:
873 876 self._entries.append(filecachesubentry(path, stat))
874 877
875 878 def changed(self):
876 879 '''true if any entry has changed'''
877 880 for entry in self._entries:
878 881 if entry.changed():
879 882 return True
880 883 return False
881 884
882 885 def refresh(self):
883 886 for entry in self._entries:
884 887 entry.refresh()
885 888
886 889 class filecache(object):
887 890 '''A property like decorator that tracks files under .hg/ for updates.
888 891
889 892 Records stat info when called in _filecache.
890 893
891 894 On subsequent calls, compares old stat info with new info, and recreates the
892 895 object when any of the files changes, updating the new stat info in
893 896 _filecache.
894 897
895 898 Mercurial either atomic renames or appends for files under .hg,
896 899 so to ensure the cache is reliable we need the filesystem to be able
897 900 to tell us if a file has been replaced. If it can't, we fallback to
898 901 recreating the object on every call (essentially the same behavior as
899 902 propertycache).
900 903
901 904 '''
902 905 def __init__(self, *paths):
903 906 self.paths = paths
904 907
905 908 def join(self, obj, fname):
906 909 """Used to compute the runtime path of a cached file.
907 910
908 911 Users should subclass filecache and provide their own version of this
909 912 function to call the appropriate join function on 'obj' (an instance
910 913 of the class that its member function was decorated).
911 914 """
912 915 raise NotImplementedError
913 916
914 917 def __call__(self, func):
915 918 self.func = func
916 919 self.name = func.__name__.encode('ascii')
917 920 return self
918 921
919 922 def __get__(self, obj, type=None):
920 923 # if accessed on the class, return the descriptor itself.
921 924 if obj is None:
922 925 return self
923 926 # do we need to check if the file changed?
924 927 if self.name in obj.__dict__:
925 928 assert self.name in obj._filecache, self.name
926 929 return obj.__dict__[self.name]
927 930
928 931 entry = obj._filecache.get(self.name)
929 932
930 933 if entry:
931 934 if entry.changed():
932 935 entry.obj = self.func(obj)
933 936 else:
934 937 paths = [self.join(obj, path) for path in self.paths]
935 938
936 939 # We stat -before- creating the object so our cache doesn't lie if
937 940 # a writer modified between the time we read and stat
938 941 entry = filecacheentry(paths, True)
939 942 entry.obj = self.func(obj)
940 943
941 944 obj._filecache[self.name] = entry
942 945
943 946 obj.__dict__[self.name] = entry.obj
944 947 return entry.obj
945 948
946 949 def __set__(self, obj, value):
947 950 if self.name not in obj._filecache:
948 951 # we add an entry for the missing value because X in __dict__
949 952 # implies X in _filecache
950 953 paths = [self.join(obj, path) for path in self.paths]
951 954 ce = filecacheentry(paths, False)
952 955 obj._filecache[self.name] = ce
953 956 else:
954 957 ce = obj._filecache[self.name]
955 958
956 959 ce.obj = value # update cached copy
957 960 obj.__dict__[self.name] = value # update copy returned by obj.x
958 961
959 962 def __delete__(self, obj):
960 963 try:
961 964 del obj.__dict__[self.name]
962 965 except KeyError:
963 966 raise AttributeError(self.name)
964 967
965 968 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
966 969 if lock is None:
967 970 raise error.LockInheritanceContractViolation(
968 971 'lock can only be inherited while held')
969 972 if environ is None:
970 973 environ = {}
971 974 with lock.inherit() as locker:
972 975 environ[envvar] = locker
973 976 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
974 977
975 978 def wlocksub(repo, cmd, *args, **kwargs):
976 979 """run cmd as a subprocess that allows inheriting repo's wlock
977 980
978 981 This can only be called while the wlock is held. This takes all the
979 982 arguments that ui.system does, and returns the exit code of the
980 983 subprocess."""
981 984 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
982 985 **kwargs)
983 986
984 987 def gdinitconfig(ui):
985 988 """helper function to know if a repo should be created as general delta
986 989 """
987 990 # experimental config: format.generaldelta
988 991 return (ui.configbool('format', 'generaldelta')
989 992 or ui.configbool('format', 'usegeneraldelta'))
990 993
991 994 def gddeltaconfig(ui):
992 995 """helper function to know if incoming delta should be optimised
993 996 """
994 997 # experimental config: format.generaldelta
995 998 return ui.configbool('format', 'generaldelta')
996 999
997 1000 class simplekeyvaluefile(object):
998 1001 """A simple file with key=value lines
999 1002
1000 1003 Keys must be alphanumerics and start with a letter, values must not
1001 1004 contain '\n' characters"""
1002 1005 firstlinekey = '__firstline'
1003 1006
1004 1007 def __init__(self, vfs, path, keys=None):
1005 1008 self.vfs = vfs
1006 1009 self.path = path
1007 1010
1008 1011 def read(self, firstlinenonkeyval=False):
1009 1012 """Read the contents of a simple key-value file
1010 1013
1011 1014 'firstlinenonkeyval' indicates whether the first line of file should
1012 1015 be treated as a key-value pair or reuturned fully under the
1013 1016 __firstline key."""
1014 1017 lines = self.vfs.readlines(self.path)
1015 1018 d = {}
1016 1019 if firstlinenonkeyval:
1017 1020 if not lines:
1018 1021 e = _("empty simplekeyvalue file")
1019 1022 raise error.CorruptedState(e)
1020 1023 # we don't want to include '\n' in the __firstline
1021 1024 d[self.firstlinekey] = lines[0][:-1]
1022 1025 del lines[0]
1023 1026
1024 1027 try:
1025 1028 # the 'if line.strip()' part prevents us from failing on empty
1026 1029 # lines which only contain '\n' therefore are not skipped
1027 1030 # by 'if line'
1028 1031 updatedict = dict(line[:-1].split('=', 1) for line in lines
1029 1032 if line.strip())
1030 1033 if self.firstlinekey in updatedict:
1031 1034 e = _("%r can't be used as a key")
1032 1035 raise error.CorruptedState(e % self.firstlinekey)
1033 1036 d.update(updatedict)
1034 1037 except ValueError as e:
1035 1038 raise error.CorruptedState(str(e))
1036 1039 return d
1037 1040
1038 1041 def write(self, data, firstline=None):
1039 1042 """Write key=>value mapping to a file
1040 1043 data is a dict. Keys must be alphanumerical and start with a letter.
1041 1044 Values must not contain newline characters.
1042 1045
1043 1046 If 'firstline' is not None, it is written to file before
1044 1047 everything else, as it is, not in a key=value form"""
1045 1048 lines = []
1046 1049 if firstline is not None:
1047 1050 lines.append('%s\n' % firstline)
1048 1051
1049 1052 for k, v in data.items():
1050 1053 if k == self.firstlinekey:
1051 1054 e = "key name '%s' is reserved" % self.firstlinekey
1052 1055 raise error.ProgrammingError(e)
1053 1056 if not k[0].isalpha():
1054 1057 e = "keys must start with a letter in a key-value file"
1055 1058 raise error.ProgrammingError(e)
1056 1059 if not k.isalnum():
1057 1060 e = "invalid key name in a simple key-value file"
1058 1061 raise error.ProgrammingError(e)
1059 1062 if '\n' in v:
1060 1063 e = "invalid value in a simple key-value file"
1061 1064 raise error.ProgrammingError(e)
1062 1065 lines.append("%s=%s\n" % (k, v))
1063 1066 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1064 1067 fp.write(''.join(lines))
1065 1068
1066 1069 def registersummarycallback(repo, otr):
1067 1070 """register a callback to issue a summary after the transaction is closed
1068 1071 """
1069 1072 reporef = weakref.ref(repo)
1070 1073 def reportsummary(tr):
1071 1074 """the actual callback reporting the summary"""
1072 1075 repo = reporef()
1073 1076 obsoleted = obsutil.getobsoleted(repo, tr)
1074 1077 if obsoleted:
1075 1078 repo.ui.status(_('obsoleted %i changesets\n') % len(obsoleted))
1076 1079 otr.addpostclose('00-txnreport', reportsummary)
@@ -1,619 +1,666
1 1 Test file dedicated to testing the divergent troubles from obsolete changeset.
2 2
3 3 This is the most complex troubles from far so we isolate it in a dedicated
4 4 file.
5 5
6 6 Enable obsolete
7 7
8 8 $ cat >> $HGRCPATH << EOF
9 9 > [ui]
10 10 > logtemplate = {rev}:{node|short} {desc}\n
11 11 > [experimental]
12 12 > evolution=createmarkers
13 > [extensions]
14 > drawdag=$TESTDIR/drawdag.py
13 15 > [alias]
14 16 > debugobsolete = debugobsolete -d '0 0'
15 17 > [phases]
16 18 > publish=False
17 19 > EOF
18 20
19 21
20 22 $ mkcommit() {
21 23 > echo "$1" > "$1"
22 24 > hg add "$1"
23 25 > hg ci -m "$1"
24 26 > }
25 27 $ getid() {
26 28 > hg log --hidden -r "desc('$1')" -T '{node}\n'
27 29 > }
28 30
29 31 setup repo
30 32
31 33 $ hg init reference
32 34 $ cd reference
33 35 $ mkcommit base
34 36 $ mkcommit A_0
35 37 $ hg up 0
36 38 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
37 39 $ mkcommit A_1
38 40 created new head
39 41 $ hg up 0
40 42 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
41 43 $ mkcommit A_2
42 44 created new head
43 45 $ hg up 0
44 46 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
45 47 $ cd ..
46 48
47 49
48 50 $ newcase() {
49 51 > hg clone -u 0 -q reference $1
50 52 > cd $1
51 53 > }
52 54
53 55 direct divergence
54 56 -----------------
55 57
56 58 A_1 have two direct and divergent successors A_1 and A_1
57 59
58 60 $ newcase direct
59 61 $ hg debugobsolete `getid A_0` `getid A_1`
60 62 $ hg debugobsolete `getid A_0` `getid A_2`
61 63 $ hg log -G --hidden
62 64 o 3:392fd25390da A_2
63 65 |
64 66 | o 2:82623d38b9ba A_1
65 67 |/
66 68 | x 1:007dc284c1f8 A_0
67 69 |/
68 70 @ 0:d20a80d4def3 base
69 71
70 72 $ hg debugsuccessorssets --hidden 'all()'
71 73 d20a80d4def3
72 74 d20a80d4def3
73 75 007dc284c1f8
74 76 82623d38b9ba
75 77 392fd25390da
76 78 82623d38b9ba
77 79 82623d38b9ba
78 80 392fd25390da
79 81 392fd25390da
80 82 $ hg log -r 'divergent()'
81 83 2:82623d38b9ba A_1
82 84 3:392fd25390da A_2
83 85 $ hg debugsuccessorssets 'all()' --closest
84 86 d20a80d4def3
85 87 d20a80d4def3
86 88 82623d38b9ba
87 89 82623d38b9ba
88 90 392fd25390da
89 91 392fd25390da
90 92 $ hg debugsuccessorssets 'all()' --closest --hidden
91 93 d20a80d4def3
92 94 d20a80d4def3
93 95 007dc284c1f8
94 96 82623d38b9ba
95 97 392fd25390da
96 98 82623d38b9ba
97 99 82623d38b9ba
98 100 392fd25390da
99 101 392fd25390da
100 102
101 103 check that mercurial refuse to push
102 104
103 105 $ hg init ../other
104 106 $ hg push ../other
105 107 pushing to ../other
106 108 searching for changes
107 109 abort: push includes divergent changeset: 392fd25390da!
108 110 [255]
109 111
110 112 $ cd ..
111 113
112 114
113 115 indirect divergence with known changeset
114 116 -------------------------------------------
115 117
116 118 $ newcase indirect_known
117 119 $ hg debugobsolete `getid A_0` `getid A_1`
118 120 $ hg debugobsolete `getid A_0` `getid A_2`
119 121 $ mkcommit A_3
120 122 created new head
121 123 $ hg debugobsolete `getid A_2` `getid A_3`
122 124 $ hg log -G --hidden
123 125 @ 4:01f36c5a8fda A_3
124 126 |
125 127 | x 3:392fd25390da A_2
126 128 |/
127 129 | o 2:82623d38b9ba A_1
128 130 |/
129 131 | x 1:007dc284c1f8 A_0
130 132 |/
131 133 o 0:d20a80d4def3 base
132 134
133 135 $ hg debugsuccessorssets --hidden 'all()'
134 136 d20a80d4def3
135 137 d20a80d4def3
136 138 007dc284c1f8
137 139 82623d38b9ba
138 140 01f36c5a8fda
139 141 82623d38b9ba
140 142 82623d38b9ba
141 143 392fd25390da
142 144 01f36c5a8fda
143 145 01f36c5a8fda
144 146 01f36c5a8fda
145 147 $ hg log -r 'divergent()'
146 148 2:82623d38b9ba A_1
147 149 4:01f36c5a8fda A_3
148 150 $ hg debugsuccessorssets 'all()' --closest
149 151 d20a80d4def3
150 152 d20a80d4def3
151 153 82623d38b9ba
152 154 82623d38b9ba
153 155 01f36c5a8fda
154 156 01f36c5a8fda
155 157 $ hg debugsuccessorssets 'all()' --closest --hidden
156 158 d20a80d4def3
157 159 d20a80d4def3
158 160 007dc284c1f8
159 161 82623d38b9ba
160 162 392fd25390da
161 163 82623d38b9ba
162 164 82623d38b9ba
163 165 392fd25390da
164 166 392fd25390da
165 167 01f36c5a8fda
166 168 01f36c5a8fda
167 169 $ cd ..
168 170
169 171
170 172 indirect divergence with known changeset
171 173 -------------------------------------------
172 174
173 175 $ newcase indirect_unknown
174 176 $ hg debugobsolete `getid A_0` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
175 177 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1`
176 178 $ hg debugobsolete `getid A_0` `getid A_2`
177 179 $ hg log -G --hidden
178 180 o 3:392fd25390da A_2
179 181 |
180 182 | o 2:82623d38b9ba A_1
181 183 |/
182 184 | x 1:007dc284c1f8 A_0
183 185 |/
184 186 @ 0:d20a80d4def3 base
185 187
186 188 $ hg debugsuccessorssets --hidden 'all()'
187 189 d20a80d4def3
188 190 d20a80d4def3
189 191 007dc284c1f8
190 192 82623d38b9ba
191 193 392fd25390da
192 194 82623d38b9ba
193 195 82623d38b9ba
194 196 392fd25390da
195 197 392fd25390da
196 198 $ hg log -r 'divergent()'
197 199 2:82623d38b9ba A_1
198 200 3:392fd25390da A_2
199 201 $ hg debugsuccessorssets 'all()' --closest
200 202 d20a80d4def3
201 203 d20a80d4def3
202 204 82623d38b9ba
203 205 82623d38b9ba
204 206 392fd25390da
205 207 392fd25390da
206 208 $ hg debugsuccessorssets 'all()' --closest --hidden
207 209 d20a80d4def3
208 210 d20a80d4def3
209 211 007dc284c1f8
210 212 82623d38b9ba
211 213 392fd25390da
212 214 82623d38b9ba
213 215 82623d38b9ba
214 216 392fd25390da
215 217 392fd25390da
216 218 $ cd ..
217 219
218 220 do not take unknown node in account if they are final
219 221 -----------------------------------------------------
220 222
221 223 $ newcase final-unknown
222 224 $ hg debugobsolete `getid A_0` `getid A_1`
223 225 $ hg debugobsolete `getid A_1` `getid A_2`
224 226 $ hg debugobsolete `getid A_0` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
225 227 $ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccccccccccccccccccc
226 228 $ hg debugobsolete `getid A_1` dddddddddddddddddddddddddddddddddddddddd
227 229
228 230 $ hg debugsuccessorssets --hidden 'desc('A_0')'
229 231 007dc284c1f8
230 232 392fd25390da
231 233 $ hg debugsuccessorssets 'desc('A_0')' --closest
232 234 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
233 235 007dc284c1f8
234 236 82623d38b9ba
235 237
236 238 $ cd ..
237 239
238 240 divergence that converge again is not divergence anymore
239 241 -----------------------------------------------------
240 242
241 243 $ newcase converged_divergence
242 244 $ hg debugobsolete `getid A_0` `getid A_1`
243 245 $ hg debugobsolete `getid A_0` `getid A_2`
244 246 $ mkcommit A_3
245 247 created new head
246 248 $ hg debugobsolete `getid A_1` `getid A_3`
247 249 $ hg debugobsolete `getid A_2` `getid A_3`
248 250 $ hg log -G --hidden
249 251 @ 4:01f36c5a8fda A_3
250 252 |
251 253 | x 3:392fd25390da A_2
252 254 |/
253 255 | x 2:82623d38b9ba A_1
254 256 |/
255 257 | x 1:007dc284c1f8 A_0
256 258 |/
257 259 o 0:d20a80d4def3 base
258 260
259 261 $ hg debugsuccessorssets --hidden 'all()'
260 262 d20a80d4def3
261 263 d20a80d4def3
262 264 007dc284c1f8
263 265 01f36c5a8fda
264 266 82623d38b9ba
265 267 01f36c5a8fda
266 268 392fd25390da
267 269 01f36c5a8fda
268 270 01f36c5a8fda
269 271 01f36c5a8fda
270 272 $ hg log -r 'divergent()'
271 273 $ hg debugsuccessorssets 'all()' --closest
272 274 d20a80d4def3
273 275 d20a80d4def3
274 276 01f36c5a8fda
275 277 01f36c5a8fda
276 278 $ hg debugsuccessorssets 'all()' --closest --hidden
277 279 d20a80d4def3
278 280 d20a80d4def3
279 281 007dc284c1f8
280 282 82623d38b9ba
281 283 392fd25390da
282 284 82623d38b9ba
283 285 82623d38b9ba
284 286 392fd25390da
285 287 392fd25390da
286 288 01f36c5a8fda
287 289 01f36c5a8fda
288 290 $ cd ..
289 291
290 292 split is not divergences
291 293 -----------------------------
292 294
293 295 $ newcase split
294 296 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
295 297 $ hg log -G --hidden
296 298 o 3:392fd25390da A_2
297 299 |
298 300 | o 2:82623d38b9ba A_1
299 301 |/
300 302 | x 1:007dc284c1f8 A_0
301 303 |/
302 304 @ 0:d20a80d4def3 base
303 305
304 306 $ hg debugsuccessorssets --hidden 'all()'
305 307 d20a80d4def3
306 308 d20a80d4def3
307 309 007dc284c1f8
308 310 82623d38b9ba 392fd25390da
309 311 82623d38b9ba
310 312 82623d38b9ba
311 313 392fd25390da
312 314 392fd25390da
313 315 $ hg log -r 'divergent()'
314 316 $ hg debugsuccessorssets 'all()' --closest
315 317 d20a80d4def3
316 318 d20a80d4def3
317 319 82623d38b9ba
318 320 82623d38b9ba
319 321 392fd25390da
320 322 392fd25390da
321 323 $ hg debugsuccessorssets 'all()' --closest --hidden
322 324 d20a80d4def3
323 325 d20a80d4def3
324 326 007dc284c1f8
325 327 82623d38b9ba 392fd25390da
326 328 82623d38b9ba
327 329 82623d38b9ba
328 330 392fd25390da
329 331 392fd25390da
330 332
331 333 Even when subsequent rewriting happen
332 334
333 335 $ mkcommit A_3
334 336 created new head
335 337 $ hg debugobsolete `getid A_1` `getid A_3`
336 338 $ hg up 0
337 339 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
338 340 $ mkcommit A_4
339 341 created new head
340 342 $ hg debugobsolete `getid A_2` `getid A_4`
341 343 $ hg up 0
342 344 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
343 345 $ mkcommit A_5
344 346 created new head
345 347 $ hg debugobsolete `getid A_4` `getid A_5`
346 348 $ hg log -G --hidden
347 349 @ 6:e442cfc57690 A_5
348 350 |
349 351 | x 5:6a411f0d7a0a A_4
350 352 |/
351 353 | o 4:01f36c5a8fda A_3
352 354 |/
353 355 | x 3:392fd25390da A_2
354 356 |/
355 357 | x 2:82623d38b9ba A_1
356 358 |/
357 359 | x 1:007dc284c1f8 A_0
358 360 |/
359 361 o 0:d20a80d4def3 base
360 362
361 363 $ hg debugsuccessorssets --hidden 'all()'
362 364 d20a80d4def3
363 365 d20a80d4def3
364 366 007dc284c1f8
365 367 01f36c5a8fda e442cfc57690
366 368 82623d38b9ba
367 369 01f36c5a8fda
368 370 392fd25390da
369 371 e442cfc57690
370 372 01f36c5a8fda
371 373 01f36c5a8fda
372 374 6a411f0d7a0a
373 375 e442cfc57690
374 376 e442cfc57690
375 377 e442cfc57690
376 378 $ hg debugsuccessorssets 'all()' --closest
377 379 d20a80d4def3
378 380 d20a80d4def3
379 381 01f36c5a8fda
380 382 01f36c5a8fda
381 383 e442cfc57690
382 384 e442cfc57690
383 385 $ hg debugsuccessorssets 'all()' --closest --hidden
384 386 d20a80d4def3
385 387 d20a80d4def3
386 388 007dc284c1f8
387 389 82623d38b9ba 392fd25390da
388 390 82623d38b9ba
389 391 82623d38b9ba
390 392 392fd25390da
391 393 392fd25390da
392 394 01f36c5a8fda
393 395 01f36c5a8fda
394 396 6a411f0d7a0a
395 397 e442cfc57690
396 398 e442cfc57690
397 399 e442cfc57690
398 400 $ hg log -r 'divergent()'
399 401
400 402 Check more complex obsolescence graft (with divergence)
401 403
402 404 $ mkcommit B_0; hg up 0
403 405 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
404 406 $ hg debugobsolete `getid B_0` `getid A_2`
405 407 $ mkcommit A_7; hg up 0
406 408 created new head
407 409 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
408 410 $ mkcommit A_8; hg up 0
409 411 created new head
410 412 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
411 413 $ hg debugobsolete `getid A_5` `getid A_7` `getid A_8`
412 414 $ mkcommit A_9; hg up 0
413 415 created new head
414 416 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
415 417 $ hg debugobsolete `getid A_5` `getid A_9`
416 418 $ hg log -G --hidden
417 419 o 10:bed64f5d2f5a A_9
418 420 |
419 421 | o 9:14608b260df8 A_8
420 422 |/
421 423 | o 8:7ae126973a96 A_7
422 424 |/
423 425 | x 7:3750ebee865d B_0
424 426 | |
425 427 | x 6:e442cfc57690 A_5
426 428 |/
427 429 | x 5:6a411f0d7a0a A_4
428 430 |/
429 431 | o 4:01f36c5a8fda A_3
430 432 |/
431 433 | x 3:392fd25390da A_2
432 434 |/
433 435 | x 2:82623d38b9ba A_1
434 436 |/
435 437 | x 1:007dc284c1f8 A_0
436 438 |/
437 439 @ 0:d20a80d4def3 base
438 440
439 441 $ hg debugsuccessorssets --hidden 'all()'
440 442 d20a80d4def3
441 443 d20a80d4def3
442 444 007dc284c1f8
443 445 01f36c5a8fda bed64f5d2f5a
444 446 01f36c5a8fda 7ae126973a96 14608b260df8
445 447 82623d38b9ba
446 448 01f36c5a8fda
447 449 392fd25390da
448 450 bed64f5d2f5a
449 451 7ae126973a96 14608b260df8
450 452 01f36c5a8fda
451 453 01f36c5a8fda
452 454 6a411f0d7a0a
453 455 bed64f5d2f5a
454 456 7ae126973a96 14608b260df8
455 457 e442cfc57690
456 458 bed64f5d2f5a
457 459 7ae126973a96 14608b260df8
458 460 3750ebee865d
459 461 bed64f5d2f5a
460 462 7ae126973a96 14608b260df8
461 463 7ae126973a96
462 464 7ae126973a96
463 465 14608b260df8
464 466 14608b260df8
465 467 bed64f5d2f5a
466 468 bed64f5d2f5a
467 469 $ hg debugsuccessorssets 'all()' --closest
468 470 d20a80d4def3
469 471 d20a80d4def3
470 472 01f36c5a8fda
471 473 01f36c5a8fda
472 474 7ae126973a96
473 475 7ae126973a96
474 476 14608b260df8
475 477 14608b260df8
476 478 bed64f5d2f5a
477 479 bed64f5d2f5a
478 480 $ hg debugsuccessorssets 'all()' --closest --hidden
479 481 d20a80d4def3
480 482 d20a80d4def3
481 483 007dc284c1f8
482 484 82623d38b9ba 392fd25390da
483 485 82623d38b9ba
484 486 82623d38b9ba
485 487 392fd25390da
486 488 392fd25390da
487 489 01f36c5a8fda
488 490 01f36c5a8fda
489 491 6a411f0d7a0a
490 492 e442cfc57690
491 493 e442cfc57690
492 494 e442cfc57690
493 495 3750ebee865d
494 496 392fd25390da
495 497 7ae126973a96
496 498 7ae126973a96
497 499 14608b260df8
498 500 14608b260df8
499 501 bed64f5d2f5a
500 502 bed64f5d2f5a
501 503 $ hg log -r 'divergent()'
502 504 4:01f36c5a8fda A_3
503 505 8:7ae126973a96 A_7
504 506 9:14608b260df8 A_8
505 507 10:bed64f5d2f5a A_9
506 508
507 509 fix the divergence
508 510
509 511 $ mkcommit A_A; hg up 0
510 512 created new head
511 513 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
512 514 $ hg debugobsolete `getid A_9` `getid A_A`
513 515 $ hg debugobsolete `getid A_7` `getid A_A`
514 516 $ hg debugobsolete `getid A_8` `getid A_A`
515 517 $ hg log -G --hidden
516 518 o 11:a139f71be9da A_A
517 519 |
518 520 | x 10:bed64f5d2f5a A_9
519 521 |/
520 522 | x 9:14608b260df8 A_8
521 523 |/
522 524 | x 8:7ae126973a96 A_7
523 525 |/
524 526 | x 7:3750ebee865d B_0
525 527 | |
526 528 | x 6:e442cfc57690 A_5
527 529 |/
528 530 | x 5:6a411f0d7a0a A_4
529 531 |/
530 532 | o 4:01f36c5a8fda A_3
531 533 |/
532 534 | x 3:392fd25390da A_2
533 535 |/
534 536 | x 2:82623d38b9ba A_1
535 537 |/
536 538 | x 1:007dc284c1f8 A_0
537 539 |/
538 540 @ 0:d20a80d4def3 base
539 541
540 542 $ hg debugsuccessorssets --hidden 'all()'
541 543 d20a80d4def3
542 544 d20a80d4def3
543 545 007dc284c1f8
544 546 01f36c5a8fda a139f71be9da
545 547 82623d38b9ba
546 548 01f36c5a8fda
547 549 392fd25390da
548 550 a139f71be9da
549 551 01f36c5a8fda
550 552 01f36c5a8fda
551 553 6a411f0d7a0a
552 554 a139f71be9da
553 555 e442cfc57690
554 556 a139f71be9da
555 557 3750ebee865d
556 558 a139f71be9da
557 559 7ae126973a96
558 560 a139f71be9da
559 561 14608b260df8
560 562 a139f71be9da
561 563 bed64f5d2f5a
562 564 a139f71be9da
563 565 a139f71be9da
564 566 a139f71be9da
565 567 $ hg debugsuccessorssets 'all()' --closest
566 568 d20a80d4def3
567 569 d20a80d4def3
568 570 01f36c5a8fda
569 571 01f36c5a8fda
570 572 a139f71be9da
571 573 a139f71be9da
572 574 $ hg debugsuccessorssets 'all()' --closest --hidden
573 575 d20a80d4def3
574 576 d20a80d4def3
575 577 007dc284c1f8
576 578 82623d38b9ba 392fd25390da
577 579 82623d38b9ba
578 580 82623d38b9ba
579 581 392fd25390da
580 582 392fd25390da
581 583 01f36c5a8fda
582 584 01f36c5a8fda
583 585 6a411f0d7a0a
584 586 e442cfc57690
585 587 e442cfc57690
586 588 e442cfc57690
587 589 3750ebee865d
588 590 392fd25390da
589 591 7ae126973a96
590 592 a139f71be9da
591 593 14608b260df8
592 594 a139f71be9da
593 595 bed64f5d2f5a
594 596 a139f71be9da
595 597 a139f71be9da
596 598 a139f71be9da
597 599 $ hg log -r 'divergent()'
598 600
599 601 $ cd ..
600 602
601 603
602 604 Subset does not diverge
603 605 ------------------------------
604 606
605 607 Do not report divergent successors-set if it is a subset of another
606 608 successors-set. (report [A,B] not [A] + [A,B])
607 609
608 610 $ newcase subset
609 611 $ hg debugobsolete `getid A_0` `getid A_2`
610 612 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
611 613 $ hg debugsuccessorssets --hidden 'desc('A_0')'
612 614 007dc284c1f8
613 615 82623d38b9ba 392fd25390da
614 616 $ hg debugsuccessorssets 'desc('A_0')' --closest
615 617 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
616 618 007dc284c1f8
617 619 82623d38b9ba 392fd25390da
618 620
619 621 $ cd ..
622
623 Use scmutil.cleanupnodes API to create divergence
624
625 $ hg init cleanupnodes
626 $ cd cleanupnodes
627 $ hg debugdrawdag <<'EOS'
628 > B1 B3 B4
629 > | \|
630 > A Z
631 > EOS
632
633 $ hg update -q B1
634 $ echo 3 >> B
635 $ hg commit --amend -m B2
636 $ cat > $TESTTMP/scmutilcleanup.py <<EOF
637 > from mercurial import registrar, scmutil
638 > cmdtable = {}
639 > command = registrar.command(cmdtable)
640 > @command('cleanup')
641 > def cleanup(ui, repo):
642 > def node(expr):
643 > unfi = repo.unfiltered()
644 > rev = unfi.revs(expr).first()
645 > return unfi.changelog.node(rev)
646 > with repo.wlock(), repo.lock(), repo.transaction('delayedstrip'):
647 > mapping = {node('desc(B1)'): [node('desc(B3)')],
648 > node('desc(B3)'): [node('desc(B4)')]}
649 > scmutil.cleanupnodes(repo, mapping, 'test')
650 > EOF
651
652 $ rm .hg/localtags
653 $ hg cleanup --config extensions.t=$TESTTMP/scmutilcleanup.py
654 $ hg log -G -T '{rev}:{node|short} {desc} {troubles}' -r 'sort(all(), topo)'
655 @ 5:1a2a9b5b0030 B2 divergent
656 |
657 | o 4:70d5a63ca112 B4 divergent
658 | |
659 | o 1:48b9aae0607f Z
660 |
661 o 0:426bada5c675 A
662
663 $ hg debugobsolete
664 a178212c3433c4e77b573f6011e29affb8aefa33 1a2a9b5b0030632400aa78e00388c20f99d3ec44 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
665 a178212c3433c4e77b573f6011e29affb8aefa33 ad6478fb94ecec98b86daae98722865d494ac561 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
666 ad6478fb94ecec98b86daae98722865d494ac561 70d5a63ca112acb3764bc1d7320ca90ea688d671 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
General Comments 0
You need to be logged in to leave comments. Login now