##// END OF EJS Templates
cleanupnode: do not use generator for node mapping...
Octobus -
r33352:967ac37f default
parent child Browse files
Show More
@@ -1,1093 +1,1093 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import weakref
17 17
18 18 from .i18n import _
19 19 from .node import (
20 20 hex,
21 21 nullid,
22 22 wdirid,
23 23 wdirrev,
24 24 )
25 25
26 26 from .i18n import _
27 27 from . import (
28 28 encoding,
29 29 error,
30 30 match as matchmod,
31 31 obsolete,
32 32 obsutil,
33 33 pathutil,
34 34 phases,
35 35 pycompat,
36 36 revsetlang,
37 37 similar,
38 38 util,
39 39 )
40 40
41 41 if pycompat.osname == 'nt':
42 42 from . import scmwindows as scmplatform
43 43 else:
44 44 from . import scmposix as scmplatform
45 45
46 46 termsize = scmplatform.termsize
47 47
48 48 class status(tuple):
49 49 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
50 50 and 'ignored' properties are only relevant to the working copy.
51 51 '''
52 52
53 53 __slots__ = ()
54 54
55 55 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
56 56 clean):
57 57 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
58 58 ignored, clean))
59 59
60 60 @property
61 61 def modified(self):
62 62 '''files that have been modified'''
63 63 return self[0]
64 64
65 65 @property
66 66 def added(self):
67 67 '''files that have been added'''
68 68 return self[1]
69 69
70 70 @property
71 71 def removed(self):
72 72 '''files that have been removed'''
73 73 return self[2]
74 74
75 75 @property
76 76 def deleted(self):
77 77 '''files that are in the dirstate, but have been deleted from the
78 78 working copy (aka "missing")
79 79 '''
80 80 return self[3]
81 81
82 82 @property
83 83 def unknown(self):
84 84 '''files not in the dirstate that are not ignored'''
85 85 return self[4]
86 86
87 87 @property
88 88 def ignored(self):
89 89 '''files not in the dirstate that are ignored (by _dirignore())'''
90 90 return self[5]
91 91
92 92 @property
93 93 def clean(self):
94 94 '''files that have not been modified'''
95 95 return self[6]
96 96
97 97 def __repr__(self, *args, **kwargs):
98 98 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
99 99 'unknown=%r, ignored=%r, clean=%r>') % self)
100 100
101 101 def itersubrepos(ctx1, ctx2):
102 102 """find subrepos in ctx1 or ctx2"""
103 103 # Create a (subpath, ctx) mapping where we prefer subpaths from
104 104 # ctx1. The subpaths from ctx2 are important when the .hgsub file
105 105 # has been modified (in ctx2) but not yet committed (in ctx1).
106 106 subpaths = dict.fromkeys(ctx2.substate, ctx2)
107 107 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
108 108
109 109 missing = set()
110 110
111 111 for subpath in ctx2.substate:
112 112 if subpath not in ctx1.substate:
113 113 del subpaths[subpath]
114 114 missing.add(subpath)
115 115
116 116 for subpath, ctx in sorted(subpaths.iteritems()):
117 117 yield subpath, ctx.sub(subpath)
118 118
119 119 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
120 120 # status and diff will have an accurate result when it does
121 121 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
122 122 # against itself.
123 123 for subpath in missing:
124 124 yield subpath, ctx2.nullsub(subpath, ctx1)
125 125
126 126 def nochangesfound(ui, repo, excluded=None):
127 127 '''Report no changes for push/pull, excluded is None or a list of
128 128 nodes excluded from the push/pull.
129 129 '''
130 130 secretlist = []
131 131 if excluded:
132 132 for n in excluded:
133 133 ctx = repo[n]
134 134 if ctx.phase() >= phases.secret and not ctx.extinct():
135 135 secretlist.append(n)
136 136
137 137 if secretlist:
138 138 ui.status(_("no changes found (ignored %d secret changesets)\n")
139 139 % len(secretlist))
140 140 else:
141 141 ui.status(_("no changes found\n"))
142 142
143 143 def callcatch(ui, func):
144 144 """call func() with global exception handling
145 145
146 146 return func() if no exception happens. otherwise do some error handling
147 147 and return an exit code accordingly. does not handle all exceptions.
148 148 """
149 149 try:
150 150 try:
151 151 return func()
152 152 except: # re-raises
153 153 ui.traceback()
154 154 raise
155 155 # Global exception handling, alphabetically
156 156 # Mercurial-specific first, followed by built-in and library exceptions
157 157 except error.LockHeld as inst:
158 158 if inst.errno == errno.ETIMEDOUT:
159 159 reason = _('timed out waiting for lock held by %r') % inst.locker
160 160 else:
161 161 reason = _('lock held by %r') % inst.locker
162 162 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
163 163 if not inst.locker:
164 164 ui.warn(_("(lock might be very busy)\n"))
165 165 except error.LockUnavailable as inst:
166 166 ui.warn(_("abort: could not lock %s: %s\n") %
167 167 (inst.desc or inst.filename, inst.strerror))
168 168 except error.OutOfBandError as inst:
169 169 if inst.args:
170 170 msg = _("abort: remote error:\n")
171 171 else:
172 172 msg = _("abort: remote error\n")
173 173 ui.warn(msg)
174 174 if inst.args:
175 175 ui.warn(''.join(inst.args))
176 176 if inst.hint:
177 177 ui.warn('(%s)\n' % inst.hint)
178 178 except error.RepoError as inst:
179 179 ui.warn(_("abort: %s!\n") % inst)
180 180 if inst.hint:
181 181 ui.warn(_("(%s)\n") % inst.hint)
182 182 except error.ResponseError as inst:
183 183 ui.warn(_("abort: %s") % inst.args[0])
184 184 if not isinstance(inst.args[1], basestring):
185 185 ui.warn(" %r\n" % (inst.args[1],))
186 186 elif not inst.args[1]:
187 187 ui.warn(_(" empty string\n"))
188 188 else:
189 189 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
190 190 except error.CensoredNodeError as inst:
191 191 ui.warn(_("abort: file censored %s!\n") % inst)
192 192 except error.RevlogError as inst:
193 193 ui.warn(_("abort: %s!\n") % inst)
194 194 except error.InterventionRequired as inst:
195 195 ui.warn("%s\n" % inst)
196 196 if inst.hint:
197 197 ui.warn(_("(%s)\n") % inst.hint)
198 198 return 1
199 199 except error.WdirUnsupported:
200 200 ui.warn(_("abort: working directory revision cannot be specified\n"))
201 201 except error.Abort as inst:
202 202 ui.warn(_("abort: %s\n") % inst)
203 203 if inst.hint:
204 204 ui.warn(_("(%s)\n") % inst.hint)
205 205 except ImportError as inst:
206 206 ui.warn(_("abort: %s!\n") % inst)
207 207 m = str(inst).split()[-1]
208 208 if m in "mpatch bdiff".split():
209 209 ui.warn(_("(did you forget to compile extensions?)\n"))
210 210 elif m in "zlib".split():
211 211 ui.warn(_("(is your Python install correct?)\n"))
212 212 except IOError as inst:
213 213 if util.safehasattr(inst, "code"):
214 214 ui.warn(_("abort: %s\n") % inst)
215 215 elif util.safehasattr(inst, "reason"):
216 216 try: # usually it is in the form (errno, strerror)
217 217 reason = inst.reason.args[1]
218 218 except (AttributeError, IndexError):
219 219 # it might be anything, for example a string
220 220 reason = inst.reason
221 221 if isinstance(reason, unicode):
222 222 # SSLError of Python 2.7.9 contains a unicode
223 223 reason = encoding.unitolocal(reason)
224 224 ui.warn(_("abort: error: %s\n") % reason)
225 225 elif (util.safehasattr(inst, "args")
226 226 and inst.args and inst.args[0] == errno.EPIPE):
227 227 pass
228 228 elif getattr(inst, "strerror", None):
229 229 if getattr(inst, "filename", None):
230 230 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
231 231 else:
232 232 ui.warn(_("abort: %s\n") % inst.strerror)
233 233 else:
234 234 raise
235 235 except OSError as inst:
236 236 if getattr(inst, "filename", None) is not None:
237 237 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
238 238 else:
239 239 ui.warn(_("abort: %s\n") % inst.strerror)
240 240 except MemoryError:
241 241 ui.warn(_("abort: out of memory\n"))
242 242 except SystemExit as inst:
243 243 # Commands shouldn't sys.exit directly, but give a return code.
244 244 # Just in case catch this and and pass exit code to caller.
245 245 return inst.code
246 246 except socket.error as inst:
247 247 ui.warn(_("abort: %s\n") % inst.args[-1])
248 248
249 249 return -1
250 250
251 251 def checknewlabel(repo, lbl, kind):
252 252 # Do not use the "kind" parameter in ui output.
253 253 # It makes strings difficult to translate.
254 254 if lbl in ['tip', '.', 'null']:
255 255 raise error.Abort(_("the name '%s' is reserved") % lbl)
256 256 for c in (':', '\0', '\n', '\r'):
257 257 if c in lbl:
258 258 raise error.Abort(_("%r cannot be used in a name") % c)
259 259 try:
260 260 int(lbl)
261 261 raise error.Abort(_("cannot use an integer as a name"))
262 262 except ValueError:
263 263 pass
264 264
265 265 def checkfilename(f):
266 266 '''Check that the filename f is an acceptable filename for a tracked file'''
267 267 if '\r' in f or '\n' in f:
268 268 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
269 269
270 270 def checkportable(ui, f):
271 271 '''Check if filename f is portable and warn or abort depending on config'''
272 272 checkfilename(f)
273 273 abort, warn = checkportabilityalert(ui)
274 274 if abort or warn:
275 275 msg = util.checkwinfilename(f)
276 276 if msg:
277 277 msg = "%s: %r" % (msg, f)
278 278 if abort:
279 279 raise error.Abort(msg)
280 280 ui.warn(_("warning: %s\n") % msg)
281 281
282 282 def checkportabilityalert(ui):
283 283 '''check if the user's config requests nothing, a warning, or abort for
284 284 non-portable filenames'''
285 285 val = ui.config('ui', 'portablefilenames', 'warn')
286 286 lval = val.lower()
287 287 bval = util.parsebool(val)
288 288 abort = pycompat.osname == 'nt' or lval == 'abort'
289 289 warn = bval or lval == 'warn'
290 290 if bval is None and not (warn or abort or lval == 'ignore'):
291 291 raise error.ConfigError(
292 292 _("ui.portablefilenames value is invalid ('%s')") % val)
293 293 return abort, warn
294 294
295 295 class casecollisionauditor(object):
296 296 def __init__(self, ui, abort, dirstate):
297 297 self._ui = ui
298 298 self._abort = abort
299 299 allfiles = '\0'.join(dirstate._map)
300 300 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
301 301 self._dirstate = dirstate
302 302 # The purpose of _newfiles is so that we don't complain about
303 303 # case collisions if someone were to call this object with the
304 304 # same filename twice.
305 305 self._newfiles = set()
306 306
307 307 def __call__(self, f):
308 308 if f in self._newfiles:
309 309 return
310 310 fl = encoding.lower(f)
311 311 if fl in self._loweredfiles and f not in self._dirstate:
312 312 msg = _('possible case-folding collision for %s') % f
313 313 if self._abort:
314 314 raise error.Abort(msg)
315 315 self._ui.warn(_("warning: %s\n") % msg)
316 316 self._loweredfiles.add(fl)
317 317 self._newfiles.add(f)
318 318
319 319 def filteredhash(repo, maxrev):
320 320 """build hash of filtered revisions in the current repoview.
321 321
322 322 Multiple caches perform up-to-date validation by checking that the
323 323 tiprev and tipnode stored in the cache file match the current repository.
324 324 However, this is not sufficient for validating repoviews because the set
325 325 of revisions in the view may change without the repository tiprev and
326 326 tipnode changing.
327 327
328 328 This function hashes all the revs filtered from the view and returns
329 329 that SHA-1 digest.
330 330 """
331 331 cl = repo.changelog
332 332 if not cl.filteredrevs:
333 333 return None
334 334 key = None
335 335 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
336 336 if revs:
337 337 s = hashlib.sha1()
338 338 for rev in revs:
339 339 s.update('%d;' % rev)
340 340 key = s.digest()
341 341 return key
342 342
343 343 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
344 344 '''yield every hg repository under path, always recursively.
345 345 The recurse flag will only control recursion into repo working dirs'''
346 346 def errhandler(err):
347 347 if err.filename == path:
348 348 raise err
349 349 samestat = getattr(os.path, 'samestat', None)
350 350 if followsym and samestat is not None:
351 351 def adddir(dirlst, dirname):
352 352 match = False
353 353 dirstat = os.stat(dirname)
354 354 for lstdirstat in dirlst:
355 355 if samestat(dirstat, lstdirstat):
356 356 match = True
357 357 break
358 358 if not match:
359 359 dirlst.append(dirstat)
360 360 return not match
361 361 else:
362 362 followsym = False
363 363
364 364 if (seen_dirs is None) and followsym:
365 365 seen_dirs = []
366 366 adddir(seen_dirs, path)
367 367 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
368 368 dirs.sort()
369 369 if '.hg' in dirs:
370 370 yield root # found a repository
371 371 qroot = os.path.join(root, '.hg', 'patches')
372 372 if os.path.isdir(os.path.join(qroot, '.hg')):
373 373 yield qroot # we have a patch queue repo here
374 374 if recurse:
375 375 # avoid recursing inside the .hg directory
376 376 dirs.remove('.hg')
377 377 else:
378 378 dirs[:] = [] # don't descend further
379 379 elif followsym:
380 380 newdirs = []
381 381 for d in dirs:
382 382 fname = os.path.join(root, d)
383 383 if adddir(seen_dirs, fname):
384 384 if os.path.islink(fname):
385 385 for hgname in walkrepos(fname, True, seen_dirs):
386 386 yield hgname
387 387 else:
388 388 newdirs.append(d)
389 389 dirs[:] = newdirs
390 390
391 391 def binnode(ctx):
392 392 """Return binary node id for a given basectx"""
393 393 node = ctx.node()
394 394 if node is None:
395 395 return wdirid
396 396 return node
397 397
398 398 def intrev(ctx):
399 399 """Return integer for a given basectx that can be used in comparison or
400 400 arithmetic operation"""
401 401 rev = ctx.rev()
402 402 if rev is None:
403 403 return wdirrev
404 404 return rev
405 405
406 406 def revsingle(repo, revspec, default='.'):
407 407 if not revspec and revspec != 0:
408 408 return repo[default]
409 409
410 410 l = revrange(repo, [revspec])
411 411 if not l:
412 412 raise error.Abort(_('empty revision set'))
413 413 return repo[l.last()]
414 414
415 415 def _pairspec(revspec):
416 416 tree = revsetlang.parse(revspec)
417 417 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
418 418
419 419 def revpair(repo, revs):
420 420 if not revs:
421 421 return repo.dirstate.p1(), None
422 422
423 423 l = revrange(repo, revs)
424 424
425 425 if not l:
426 426 first = second = None
427 427 elif l.isascending():
428 428 first = l.min()
429 429 second = l.max()
430 430 elif l.isdescending():
431 431 first = l.max()
432 432 second = l.min()
433 433 else:
434 434 first = l.first()
435 435 second = l.last()
436 436
437 437 if first is None:
438 438 raise error.Abort(_('empty revision range'))
439 439 if (first == second and len(revs) >= 2
440 440 and not all(revrange(repo, [r]) for r in revs)):
441 441 raise error.Abort(_('empty revision on one side of range'))
442 442
443 443 # if top-level is range expression, the result must always be a pair
444 444 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
445 445 return repo.lookup(first), None
446 446
447 447 return repo.lookup(first), repo.lookup(second)
448 448
449 449 def revrange(repo, specs):
450 450 """Execute 1 to many revsets and return the union.
451 451
452 452 This is the preferred mechanism for executing revsets using user-specified
453 453 config options, such as revset aliases.
454 454
455 455 The revsets specified by ``specs`` will be executed via a chained ``OR``
456 456 expression. If ``specs`` is empty, an empty result is returned.
457 457
458 458 ``specs`` can contain integers, in which case they are assumed to be
459 459 revision numbers.
460 460
461 461 It is assumed the revsets are already formatted. If you have arguments
462 462 that need to be expanded in the revset, call ``revsetlang.formatspec()``
463 463 and pass the result as an element of ``specs``.
464 464
465 465 Specifying a single revset is allowed.
466 466
467 467 Returns a ``revset.abstractsmartset`` which is a list-like interface over
468 468 integer revisions.
469 469 """
470 470 allspecs = []
471 471 for spec in specs:
472 472 if isinstance(spec, int):
473 473 spec = revsetlang.formatspec('rev(%d)', spec)
474 474 allspecs.append(spec)
475 475 return repo.anyrevs(allspecs, user=True)
476 476
477 477 def meaningfulparents(repo, ctx):
478 478 """Return list of meaningful (or all if debug) parentrevs for rev.
479 479
480 480 For merges (two non-nullrev revisions) both parents are meaningful.
481 481 Otherwise the first parent revision is considered meaningful if it
482 482 is not the preceding revision.
483 483 """
484 484 parents = ctx.parents()
485 485 if len(parents) > 1:
486 486 return parents
487 487 if repo.ui.debugflag:
488 488 return [parents[0], repo['null']]
489 489 if parents[0].rev() >= intrev(ctx) - 1:
490 490 return []
491 491 return parents
492 492
493 493 def expandpats(pats):
494 494 '''Expand bare globs when running on windows.
495 495 On posix we assume it already has already been done by sh.'''
496 496 if not util.expandglobs:
497 497 return list(pats)
498 498 ret = []
499 499 for kindpat in pats:
500 500 kind, pat = matchmod._patsplit(kindpat, None)
501 501 if kind is None:
502 502 try:
503 503 globbed = glob.glob(pat)
504 504 except re.error:
505 505 globbed = [pat]
506 506 if globbed:
507 507 ret.extend(globbed)
508 508 continue
509 509 ret.append(kindpat)
510 510 return ret
511 511
512 512 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
513 513 badfn=None):
514 514 '''Return a matcher and the patterns that were used.
515 515 The matcher will warn about bad matches, unless an alternate badfn callback
516 516 is provided.'''
517 517 if pats == ("",):
518 518 pats = []
519 519 if opts is None:
520 520 opts = {}
521 521 if not globbed and default == 'relpath':
522 522 pats = expandpats(pats or [])
523 523
524 524 def bad(f, msg):
525 525 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
526 526
527 527 if badfn is None:
528 528 badfn = bad
529 529
530 530 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
531 531 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
532 532
533 533 if m.always():
534 534 pats = []
535 535 return m, pats
536 536
537 537 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
538 538 badfn=None):
539 539 '''Return a matcher that will warn about bad matches.'''
540 540 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
541 541
542 542 def matchall(repo):
543 543 '''Return a matcher that will efficiently match everything.'''
544 544 return matchmod.always(repo.root, repo.getcwd())
545 545
546 546 def matchfiles(repo, files, badfn=None):
547 547 '''Return a matcher that will efficiently match exactly these files.'''
548 548 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
549 549
550 550 def origpath(ui, repo, filepath):
551 551 '''customize where .orig files are created
552 552
553 553 Fetch user defined path from config file: [ui] origbackuppath = <path>
554 554 Fall back to default (filepath) if not specified
555 555 '''
556 556 origbackuppath = ui.config('ui', 'origbackuppath', None)
557 557 if origbackuppath is None:
558 558 return filepath + ".orig"
559 559
560 560 filepathfromroot = os.path.relpath(filepath, start=repo.root)
561 561 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
562 562
563 563 origbackupdir = repo.vfs.dirname(fullorigpath)
564 564 if not repo.vfs.exists(origbackupdir):
565 565 ui.note(_('creating directory: %s\n') % origbackupdir)
566 566 util.makedirs(origbackupdir)
567 567
568 568 return fullorigpath + ".orig"
569 569
570 570 class _containsnode(object):
571 571 """proxy __contains__(node) to container.__contains__ which accepts revs"""
572 572
573 573 def __init__(self, repo, revcontainer):
574 574 self._torev = repo.changelog.rev
575 575 self._revcontains = revcontainer.__contains__
576 576
577 577 def __contains__(self, node):
578 578 return self._revcontains(self._torev(node))
579 579
580 580 def cleanupnodes(repo, mapping, operation):
581 581 """do common cleanups when old nodes are replaced by new nodes
582 582
583 583 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
584 584 (we might also want to move working directory parent in the future)
585 585
586 586 mapping is {oldnode: [newnode]} or a iterable of nodes if they do not have
587 587 replacements. operation is a string, like "rebase".
588 588 """
589 589 if not util.safehasattr(mapping, 'items'):
590 590 mapping = {n: () for n in mapping}
591 591
592 592 with repo.transaction('cleanup') as tr:
593 593 # Move bookmarks
594 594 bmarks = repo._bookmarks
595 595 bmarkchanged = False
596 596 allnewnodes = [n for ns in mapping.values() for n in ns]
597 597 for oldnode, newnodes in mapping.items():
598 598 oldbmarks = repo.nodebookmarks(oldnode)
599 599 if not oldbmarks:
600 600 continue
601 601 from . import bookmarks # avoid import cycle
602 602 bmarkchanged = True
603 603 if len(newnodes) > 1:
604 604 # usually a split, take the one with biggest rev number
605 605 newnode = next(repo.set('max(%ln)', newnodes)).node()
606 606 elif len(newnodes) == 0:
607 607 # move bookmark backwards
608 608 roots = list(repo.set('max((::%n) - %ln)', oldnode,
609 609 list(mapping)))
610 610 if roots:
611 611 newnode = roots[0].node()
612 612 else:
613 613 newnode = nullid
614 614 else:
615 615 newnode = newnodes[0]
616 616 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
617 617 (oldbmarks, hex(oldnode), hex(newnode)))
618 618 # Delete divergent bookmarks being parents of related newnodes
619 619 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
620 620 allnewnodes, newnode, oldnode)
621 621 deletenodes = _containsnode(repo, deleterevs)
622 622 for name in oldbmarks:
623 623 bmarks[name] = newnode
624 624 bookmarks.deletedivergent(repo, deletenodes, name)
625 625 if bmarkchanged:
626 626 bmarks.recordchange(tr)
627 627
628 628 # Obsolete or strip nodes
629 629 if obsolete.isenabled(repo, obsolete.createmarkersopt):
630 630 # If a node is already obsoleted, and we want to obsolete it
631 631 # without a successor, skip that obssolete request since it's
632 632 # unnecessary. That's the "if s or not isobs(n)" check below.
633 633 # Also sort the node in topology order, that might be useful for
634 634 # some obsstore logic.
635 635 # NOTE: the filtering and sorting might belong to createmarkers.
636 636 # Unfiltered repo is needed since nodes in mapping might be hidden.
637 637 unfi = repo.unfiltered()
638 638 isobs = unfi.obsstore.successors.__contains__
639 639 torev = unfi.changelog.rev
640 640 sortfunc = lambda ns: torev(ns[0])
641 rels = [(unfi[n], (unfi[m] for m in s))
641 rels = [(unfi[n], tuple(unfi[m] for m in s))
642 642 for n, s in sorted(mapping.items(), key=sortfunc)
643 643 if s or not isobs(n)]
644 644 obsolete.createmarkers(repo, rels, operation=operation)
645 645 else:
646 646 from . import repair # avoid import cycle
647 647 repair.delayedstrip(repo.ui, repo, list(mapping), operation)
648 648
649 649 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
650 650 if opts is None:
651 651 opts = {}
652 652 m = matcher
653 653 if dry_run is None:
654 654 dry_run = opts.get('dry_run')
655 655 if similarity is None:
656 656 similarity = float(opts.get('similarity') or 0)
657 657
658 658 ret = 0
659 659 join = lambda f: os.path.join(prefix, f)
660 660
661 661 wctx = repo[None]
662 662 for subpath in sorted(wctx.substate):
663 663 submatch = matchmod.subdirmatcher(subpath, m)
664 664 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
665 665 sub = wctx.sub(subpath)
666 666 try:
667 667 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
668 668 ret = 1
669 669 except error.LookupError:
670 670 repo.ui.status(_("skipping missing subrepository: %s\n")
671 671 % join(subpath))
672 672
673 673 rejected = []
674 674 def badfn(f, msg):
675 675 if f in m.files():
676 676 m.bad(f, msg)
677 677 rejected.append(f)
678 678
679 679 badmatch = matchmod.badmatch(m, badfn)
680 680 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
681 681 badmatch)
682 682
683 683 unknownset = set(unknown + forgotten)
684 684 toprint = unknownset.copy()
685 685 toprint.update(deleted)
686 686 for abs in sorted(toprint):
687 687 if repo.ui.verbose or not m.exact(abs):
688 688 if abs in unknownset:
689 689 status = _('adding %s\n') % m.uipath(abs)
690 690 else:
691 691 status = _('removing %s\n') % m.uipath(abs)
692 692 repo.ui.status(status)
693 693
694 694 renames = _findrenames(repo, m, added + unknown, removed + deleted,
695 695 similarity)
696 696
697 697 if not dry_run:
698 698 _markchanges(repo, unknown + forgotten, deleted, renames)
699 699
700 700 for f in rejected:
701 701 if f in m.files():
702 702 return 1
703 703 return ret
704 704
705 705 def marktouched(repo, files, similarity=0.0):
706 706 '''Assert that files have somehow been operated upon. files are relative to
707 707 the repo root.'''
708 708 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
709 709 rejected = []
710 710
711 711 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
712 712
713 713 if repo.ui.verbose:
714 714 unknownset = set(unknown + forgotten)
715 715 toprint = unknownset.copy()
716 716 toprint.update(deleted)
717 717 for abs in sorted(toprint):
718 718 if abs in unknownset:
719 719 status = _('adding %s\n') % abs
720 720 else:
721 721 status = _('removing %s\n') % abs
722 722 repo.ui.status(status)
723 723
724 724 renames = _findrenames(repo, m, added + unknown, removed + deleted,
725 725 similarity)
726 726
727 727 _markchanges(repo, unknown + forgotten, deleted, renames)
728 728
729 729 for f in rejected:
730 730 if f in m.files():
731 731 return 1
732 732 return 0
733 733
734 734 def _interestingfiles(repo, matcher):
735 735 '''Walk dirstate with matcher, looking for files that addremove would care
736 736 about.
737 737
738 738 This is different from dirstate.status because it doesn't care about
739 739 whether files are modified or clean.'''
740 740 added, unknown, deleted, removed, forgotten = [], [], [], [], []
741 741 audit_path = pathutil.pathauditor(repo.root)
742 742
743 743 ctx = repo[None]
744 744 dirstate = repo.dirstate
745 745 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
746 746 full=False)
747 747 for abs, st in walkresults.iteritems():
748 748 dstate = dirstate[abs]
749 749 if dstate == '?' and audit_path.check(abs):
750 750 unknown.append(abs)
751 751 elif dstate != 'r' and not st:
752 752 deleted.append(abs)
753 753 elif dstate == 'r' and st:
754 754 forgotten.append(abs)
755 755 # for finding renames
756 756 elif dstate == 'r' and not st:
757 757 removed.append(abs)
758 758 elif dstate == 'a':
759 759 added.append(abs)
760 760
761 761 return added, unknown, deleted, removed, forgotten
762 762
763 763 def _findrenames(repo, matcher, added, removed, similarity):
764 764 '''Find renames from removed files to added ones.'''
765 765 renames = {}
766 766 if similarity > 0:
767 767 for old, new, score in similar.findrenames(repo, added, removed,
768 768 similarity):
769 769 if (repo.ui.verbose or not matcher.exact(old)
770 770 or not matcher.exact(new)):
771 771 repo.ui.status(_('recording removal of %s as rename to %s '
772 772 '(%d%% similar)\n') %
773 773 (matcher.rel(old), matcher.rel(new),
774 774 score * 100))
775 775 renames[new] = old
776 776 return renames
777 777
778 778 def _markchanges(repo, unknown, deleted, renames):
779 779 '''Marks the files in unknown as added, the files in deleted as removed,
780 780 and the files in renames as copied.'''
781 781 wctx = repo[None]
782 782 with repo.wlock():
783 783 wctx.forget(deleted)
784 784 wctx.add(unknown)
785 785 for new, old in renames.iteritems():
786 786 wctx.copy(old, new)
787 787
788 788 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
789 789 """Update the dirstate to reflect the intent of copying src to dst. For
790 790 different reasons it might not end with dst being marked as copied from src.
791 791 """
792 792 origsrc = repo.dirstate.copied(src) or src
793 793 if dst == origsrc: # copying back a copy?
794 794 if repo.dirstate[dst] not in 'mn' and not dryrun:
795 795 repo.dirstate.normallookup(dst)
796 796 else:
797 797 if repo.dirstate[origsrc] == 'a' and origsrc == src:
798 798 if not ui.quiet:
799 799 ui.warn(_("%s has not been committed yet, so no copy "
800 800 "data will be stored for %s.\n")
801 801 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
802 802 if repo.dirstate[dst] in '?r' and not dryrun:
803 803 wctx.add([dst])
804 804 elif not dryrun:
805 805 wctx.copy(origsrc, dst)
806 806
807 807 def readrequires(opener, supported):
808 808 '''Reads and parses .hg/requires and checks if all entries found
809 809 are in the list of supported features.'''
810 810 requirements = set(opener.read("requires").splitlines())
811 811 missings = []
812 812 for r in requirements:
813 813 if r not in supported:
814 814 if not r or not r[0].isalnum():
815 815 raise error.RequirementError(_(".hg/requires file is corrupt"))
816 816 missings.append(r)
817 817 missings.sort()
818 818 if missings:
819 819 raise error.RequirementError(
820 820 _("repository requires features unknown to this Mercurial: %s")
821 821 % " ".join(missings),
822 822 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
823 823 " for more information"))
824 824 return requirements
825 825
826 826 def writerequires(opener, requirements):
827 827 with opener('requires', 'w') as fp:
828 828 for r in sorted(requirements):
829 829 fp.write("%s\n" % r)
830 830
831 831 class filecachesubentry(object):
832 832 def __init__(self, path, stat):
833 833 self.path = path
834 834 self.cachestat = None
835 835 self._cacheable = None
836 836
837 837 if stat:
838 838 self.cachestat = filecachesubentry.stat(self.path)
839 839
840 840 if self.cachestat:
841 841 self._cacheable = self.cachestat.cacheable()
842 842 else:
843 843 # None means we don't know yet
844 844 self._cacheable = None
845 845
846 846 def refresh(self):
847 847 if self.cacheable():
848 848 self.cachestat = filecachesubentry.stat(self.path)
849 849
850 850 def cacheable(self):
851 851 if self._cacheable is not None:
852 852 return self._cacheable
853 853
854 854 # we don't know yet, assume it is for now
855 855 return True
856 856
857 857 def changed(self):
858 858 # no point in going further if we can't cache it
859 859 if not self.cacheable():
860 860 return True
861 861
862 862 newstat = filecachesubentry.stat(self.path)
863 863
864 864 # we may not know if it's cacheable yet, check again now
865 865 if newstat and self._cacheable is None:
866 866 self._cacheable = newstat.cacheable()
867 867
868 868 # check again
869 869 if not self._cacheable:
870 870 return True
871 871
872 872 if self.cachestat != newstat:
873 873 self.cachestat = newstat
874 874 return True
875 875 else:
876 876 return False
877 877
878 878 @staticmethod
879 879 def stat(path):
880 880 try:
881 881 return util.cachestat(path)
882 882 except OSError as e:
883 883 if e.errno != errno.ENOENT:
884 884 raise
885 885
886 886 class filecacheentry(object):
887 887 def __init__(self, paths, stat=True):
888 888 self._entries = []
889 889 for path in paths:
890 890 self._entries.append(filecachesubentry(path, stat))
891 891
892 892 def changed(self):
893 893 '''true if any entry has changed'''
894 894 for entry in self._entries:
895 895 if entry.changed():
896 896 return True
897 897 return False
898 898
899 899 def refresh(self):
900 900 for entry in self._entries:
901 901 entry.refresh()
902 902
903 903 class filecache(object):
904 904 '''A property like decorator that tracks files under .hg/ for updates.
905 905
906 906 Records stat info when called in _filecache.
907 907
908 908 On subsequent calls, compares old stat info with new info, and recreates the
909 909 object when any of the files changes, updating the new stat info in
910 910 _filecache.
911 911
912 912 Mercurial either atomic renames or appends for files under .hg,
913 913 so to ensure the cache is reliable we need the filesystem to be able
914 914 to tell us if a file has been replaced. If it can't, we fallback to
915 915 recreating the object on every call (essentially the same behavior as
916 916 propertycache).
917 917
918 918 '''
919 919 def __init__(self, *paths):
920 920 self.paths = paths
921 921
922 922 def join(self, obj, fname):
923 923 """Used to compute the runtime path of a cached file.
924 924
925 925 Users should subclass filecache and provide their own version of this
926 926 function to call the appropriate join function on 'obj' (an instance
927 927 of the class that its member function was decorated).
928 928 """
929 929 raise NotImplementedError
930 930
931 931 def __call__(self, func):
932 932 self.func = func
933 933 self.name = func.__name__.encode('ascii')
934 934 return self
935 935
936 936 def __get__(self, obj, type=None):
937 937 # if accessed on the class, return the descriptor itself.
938 938 if obj is None:
939 939 return self
940 940 # do we need to check if the file changed?
941 941 if self.name in obj.__dict__:
942 942 assert self.name in obj._filecache, self.name
943 943 return obj.__dict__[self.name]
944 944
945 945 entry = obj._filecache.get(self.name)
946 946
947 947 if entry:
948 948 if entry.changed():
949 949 entry.obj = self.func(obj)
950 950 else:
951 951 paths = [self.join(obj, path) for path in self.paths]
952 952
953 953 # We stat -before- creating the object so our cache doesn't lie if
954 954 # a writer modified between the time we read and stat
955 955 entry = filecacheentry(paths, True)
956 956 entry.obj = self.func(obj)
957 957
958 958 obj._filecache[self.name] = entry
959 959
960 960 obj.__dict__[self.name] = entry.obj
961 961 return entry.obj
962 962
963 963 def __set__(self, obj, value):
964 964 if self.name not in obj._filecache:
965 965 # we add an entry for the missing value because X in __dict__
966 966 # implies X in _filecache
967 967 paths = [self.join(obj, path) for path in self.paths]
968 968 ce = filecacheentry(paths, False)
969 969 obj._filecache[self.name] = ce
970 970 else:
971 971 ce = obj._filecache[self.name]
972 972
973 973 ce.obj = value # update cached copy
974 974 obj.__dict__[self.name] = value # update copy returned by obj.x
975 975
976 976 def __delete__(self, obj):
977 977 try:
978 978 del obj.__dict__[self.name]
979 979 except KeyError:
980 980 raise AttributeError(self.name)
981 981
982 982 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
983 983 if lock is None:
984 984 raise error.LockInheritanceContractViolation(
985 985 'lock can only be inherited while held')
986 986 if environ is None:
987 987 environ = {}
988 988 with lock.inherit() as locker:
989 989 environ[envvar] = locker
990 990 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
991 991
992 992 def wlocksub(repo, cmd, *args, **kwargs):
993 993 """run cmd as a subprocess that allows inheriting repo's wlock
994 994
995 995 This can only be called while the wlock is held. This takes all the
996 996 arguments that ui.system does, and returns the exit code of the
997 997 subprocess."""
998 998 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
999 999 **kwargs)
1000 1000
1001 1001 def gdinitconfig(ui):
1002 1002 """helper function to know if a repo should be created as general delta
1003 1003 """
1004 1004 # experimental config: format.generaldelta
1005 1005 return (ui.configbool('format', 'generaldelta')
1006 1006 or ui.configbool('format', 'usegeneraldelta'))
1007 1007
1008 1008 def gddeltaconfig(ui):
1009 1009 """helper function to know if incoming delta should be optimised
1010 1010 """
1011 1011 # experimental config: format.generaldelta
1012 1012 return ui.configbool('format', 'generaldelta')
1013 1013
1014 1014 class simplekeyvaluefile(object):
1015 1015 """A simple file with key=value lines
1016 1016
1017 1017 Keys must be alphanumerics and start with a letter, values must not
1018 1018 contain '\n' characters"""
1019 1019 firstlinekey = '__firstline'
1020 1020
1021 1021 def __init__(self, vfs, path, keys=None):
1022 1022 self.vfs = vfs
1023 1023 self.path = path
1024 1024
1025 1025 def read(self, firstlinenonkeyval=False):
1026 1026 """Read the contents of a simple key-value file
1027 1027
1028 1028 'firstlinenonkeyval' indicates whether the first line of file should
1029 1029 be treated as a key-value pair or reuturned fully under the
1030 1030 __firstline key."""
1031 1031 lines = self.vfs.readlines(self.path)
1032 1032 d = {}
1033 1033 if firstlinenonkeyval:
1034 1034 if not lines:
1035 1035 e = _("empty simplekeyvalue file")
1036 1036 raise error.CorruptedState(e)
1037 1037 # we don't want to include '\n' in the __firstline
1038 1038 d[self.firstlinekey] = lines[0][:-1]
1039 1039 del lines[0]
1040 1040
1041 1041 try:
1042 1042 # the 'if line.strip()' part prevents us from failing on empty
1043 1043 # lines which only contain '\n' therefore are not skipped
1044 1044 # by 'if line'
1045 1045 updatedict = dict(line[:-1].split('=', 1) for line in lines
1046 1046 if line.strip())
1047 1047 if self.firstlinekey in updatedict:
1048 1048 e = _("%r can't be used as a key")
1049 1049 raise error.CorruptedState(e % self.firstlinekey)
1050 1050 d.update(updatedict)
1051 1051 except ValueError as e:
1052 1052 raise error.CorruptedState(str(e))
1053 1053 return d
1054 1054
1055 1055 def write(self, data, firstline=None):
1056 1056 """Write key=>value mapping to a file
1057 1057 data is a dict. Keys must be alphanumerical and start with a letter.
1058 1058 Values must not contain newline characters.
1059 1059
1060 1060 If 'firstline' is not None, it is written to file before
1061 1061 everything else, as it is, not in a key=value form"""
1062 1062 lines = []
1063 1063 if firstline is not None:
1064 1064 lines.append('%s\n' % firstline)
1065 1065
1066 1066 for k, v in data.items():
1067 1067 if k == self.firstlinekey:
1068 1068 e = "key name '%s' is reserved" % self.firstlinekey
1069 1069 raise error.ProgrammingError(e)
1070 1070 if not k[0].isalpha():
1071 1071 e = "keys must start with a letter in a key-value file"
1072 1072 raise error.ProgrammingError(e)
1073 1073 if not k.isalnum():
1074 1074 e = "invalid key name in a simple key-value file"
1075 1075 raise error.ProgrammingError(e)
1076 1076 if '\n' in v:
1077 1077 e = "invalid value in a simple key-value file"
1078 1078 raise error.ProgrammingError(e)
1079 1079 lines.append("%s=%s\n" % (k, v))
1080 1080 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1081 1081 fp.write(''.join(lines))
1082 1082
1083 1083 def registersummarycallback(repo, otr):
1084 1084 """register a callback to issue a summary after the transaction is closed
1085 1085 """
1086 1086 reporef = weakref.ref(repo)
1087 1087 def reportsummary(tr):
1088 1088 """the actual callback reporting the summary"""
1089 1089 repo = reporef()
1090 1090 obsoleted = obsutil.getobsoleted(repo, tr)
1091 1091 if obsoleted:
1092 1092 repo.ui.status(_('obsoleted %i changesets\n') % len(obsoleted))
1093 1093 otr.addpostclose('00-txnreport', reportsummary)
General Comments 0
You need to be logged in to leave comments. Login now