##// END OF EJS Templates
extdata: add extdatasource reader...
Matt Mackall -
r34457:7757cc48 default
parent child Browse files
Show More
@@ -1,1142 +1,1193
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import weakref
17 17
18 18 from .i18n import _
19 19 from .node import (
20 20 hex,
21 21 nullid,
22 22 short,
23 23 wdirid,
24 24 wdirrev,
25 25 )
26 26
27 27 from . import (
28 28 encoding,
29 29 error,
30 30 match as matchmod,
31 31 obsolete,
32 32 obsutil,
33 33 pathutil,
34 34 phases,
35 35 pycompat,
36 36 revsetlang,
37 37 similar,
38 url,
38 39 util,
39 40 )
40 41
41 42 if pycompat.osname == 'nt':
42 43 from . import scmwindows as scmplatform
43 44 else:
44 45 from . import scmposix as scmplatform
45 46
46 47 termsize = scmplatform.termsize
47 48
48 49 class status(tuple):
49 50 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
50 51 and 'ignored' properties are only relevant to the working copy.
51 52 '''
52 53
53 54 __slots__ = ()
54 55
55 56 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
56 57 clean):
57 58 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
58 59 ignored, clean))
59 60
60 61 @property
61 62 def modified(self):
62 63 '''files that have been modified'''
63 64 return self[0]
64 65
65 66 @property
66 67 def added(self):
67 68 '''files that have been added'''
68 69 return self[1]
69 70
70 71 @property
71 72 def removed(self):
72 73 '''files that have been removed'''
73 74 return self[2]
74 75
75 76 @property
76 77 def deleted(self):
77 78 '''files that are in the dirstate, but have been deleted from the
78 79 working copy (aka "missing")
79 80 '''
80 81 return self[3]
81 82
82 83 @property
83 84 def unknown(self):
84 85 '''files not in the dirstate that are not ignored'''
85 86 return self[4]
86 87
87 88 @property
88 89 def ignored(self):
89 90 '''files not in the dirstate that are ignored (by _dirignore())'''
90 91 return self[5]
91 92
92 93 @property
93 94 def clean(self):
94 95 '''files that have not been modified'''
95 96 return self[6]
96 97
97 98 def __repr__(self, *args, **kwargs):
98 99 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
99 100 'unknown=%r, ignored=%r, clean=%r>') % self)
100 101
101 102 def itersubrepos(ctx1, ctx2):
102 103 """find subrepos in ctx1 or ctx2"""
103 104 # Create a (subpath, ctx) mapping where we prefer subpaths from
104 105 # ctx1. The subpaths from ctx2 are important when the .hgsub file
105 106 # has been modified (in ctx2) but not yet committed (in ctx1).
106 107 subpaths = dict.fromkeys(ctx2.substate, ctx2)
107 108 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
108 109
109 110 missing = set()
110 111
111 112 for subpath in ctx2.substate:
112 113 if subpath not in ctx1.substate:
113 114 del subpaths[subpath]
114 115 missing.add(subpath)
115 116
116 117 for subpath, ctx in sorted(subpaths.iteritems()):
117 118 yield subpath, ctx.sub(subpath)
118 119
119 120 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
120 121 # status and diff will have an accurate result when it does
121 122 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
122 123 # against itself.
123 124 for subpath in missing:
124 125 yield subpath, ctx2.nullsub(subpath, ctx1)
125 126
126 127 def nochangesfound(ui, repo, excluded=None):
127 128 '''Report no changes for push/pull, excluded is None or a list of
128 129 nodes excluded from the push/pull.
129 130 '''
130 131 secretlist = []
131 132 if excluded:
132 133 for n in excluded:
133 134 ctx = repo[n]
134 135 if ctx.phase() >= phases.secret and not ctx.extinct():
135 136 secretlist.append(n)
136 137
137 138 if secretlist:
138 139 ui.status(_("no changes found (ignored %d secret changesets)\n")
139 140 % len(secretlist))
140 141 else:
141 142 ui.status(_("no changes found\n"))
142 143
143 144 def callcatch(ui, func):
144 145 """call func() with global exception handling
145 146
146 147 return func() if no exception happens. otherwise do some error handling
147 148 and return an exit code accordingly. does not handle all exceptions.
148 149 """
149 150 try:
150 151 try:
151 152 return func()
152 153 except: # re-raises
153 154 ui.traceback()
154 155 raise
155 156 # Global exception handling, alphabetically
156 157 # Mercurial-specific first, followed by built-in and library exceptions
157 158 except error.LockHeld as inst:
158 159 if inst.errno == errno.ETIMEDOUT:
159 160 reason = _('timed out waiting for lock held by %r') % inst.locker
160 161 else:
161 162 reason = _('lock held by %r') % inst.locker
162 163 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
163 164 if not inst.locker:
164 165 ui.warn(_("(lock might be very busy)\n"))
165 166 except error.LockUnavailable as inst:
166 167 ui.warn(_("abort: could not lock %s: %s\n") %
167 168 (inst.desc or inst.filename,
168 169 encoding.strtolocal(inst.strerror)))
169 170 except error.OutOfBandError as inst:
170 171 if inst.args:
171 172 msg = _("abort: remote error:\n")
172 173 else:
173 174 msg = _("abort: remote error\n")
174 175 ui.warn(msg)
175 176 if inst.args:
176 177 ui.warn(''.join(inst.args))
177 178 if inst.hint:
178 179 ui.warn('(%s)\n' % inst.hint)
179 180 except error.RepoError as inst:
180 181 ui.warn(_("abort: %s!\n") % inst)
181 182 if inst.hint:
182 183 ui.warn(_("(%s)\n") % inst.hint)
183 184 except error.ResponseError as inst:
184 185 ui.warn(_("abort: %s") % inst.args[0])
185 186 if not isinstance(inst.args[1], basestring):
186 187 ui.warn(" %r\n" % (inst.args[1],))
187 188 elif not inst.args[1]:
188 189 ui.warn(_(" empty string\n"))
189 190 else:
190 191 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
191 192 except error.CensoredNodeError as inst:
192 193 ui.warn(_("abort: file censored %s!\n") % inst)
193 194 except error.RevlogError as inst:
194 195 ui.warn(_("abort: %s!\n") % inst)
195 196 except error.InterventionRequired as inst:
196 197 ui.warn("%s\n" % inst)
197 198 if inst.hint:
198 199 ui.warn(_("(%s)\n") % inst.hint)
199 200 return 1
200 201 except error.WdirUnsupported:
201 202 ui.warn(_("abort: working directory revision cannot be specified\n"))
202 203 except error.Abort as inst:
203 204 ui.warn(_("abort: %s\n") % inst)
204 205 if inst.hint:
205 206 ui.warn(_("(%s)\n") % inst.hint)
206 207 except ImportError as inst:
207 208 ui.warn(_("abort: %s!\n") % inst)
208 209 m = str(inst).split()[-1]
209 210 if m in "mpatch bdiff".split():
210 211 ui.warn(_("(did you forget to compile extensions?)\n"))
211 212 elif m in "zlib".split():
212 213 ui.warn(_("(is your Python install correct?)\n"))
213 214 except IOError as inst:
214 215 if util.safehasattr(inst, "code"):
215 216 ui.warn(_("abort: %s\n") % inst)
216 217 elif util.safehasattr(inst, "reason"):
217 218 try: # usually it is in the form (errno, strerror)
218 219 reason = inst.reason.args[1]
219 220 except (AttributeError, IndexError):
220 221 # it might be anything, for example a string
221 222 reason = inst.reason
222 223 if isinstance(reason, unicode):
223 224 # SSLError of Python 2.7.9 contains a unicode
224 225 reason = encoding.unitolocal(reason)
225 226 ui.warn(_("abort: error: %s\n") % reason)
226 227 elif (util.safehasattr(inst, "args")
227 228 and inst.args and inst.args[0] == errno.EPIPE):
228 229 pass
229 230 elif getattr(inst, "strerror", None):
230 231 if getattr(inst, "filename", None):
231 232 ui.warn(_("abort: %s: %s\n") % (
232 233 encoding.strtolocal(inst.strerror), inst.filename))
233 234 else:
234 235 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
235 236 else:
236 237 raise
237 238 except OSError as inst:
238 239 if getattr(inst, "filename", None) is not None:
239 240 ui.warn(_("abort: %s: '%s'\n") % (
240 241 encoding.strtolocal(inst.strerror), inst.filename))
241 242 else:
242 243 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
243 244 except MemoryError:
244 245 ui.warn(_("abort: out of memory\n"))
245 246 except SystemExit as inst:
246 247 # Commands shouldn't sys.exit directly, but give a return code.
247 248 # Just in case catch this and and pass exit code to caller.
248 249 return inst.code
249 250 except socket.error as inst:
250 251 ui.warn(_("abort: %s\n") % inst.args[-1])
251 252
252 253 return -1
253 254
254 255 def checknewlabel(repo, lbl, kind):
255 256 # Do not use the "kind" parameter in ui output.
256 257 # It makes strings difficult to translate.
257 258 if lbl in ['tip', '.', 'null']:
258 259 raise error.Abort(_("the name '%s' is reserved") % lbl)
259 260 for c in (':', '\0', '\n', '\r'):
260 261 if c in lbl:
261 262 raise error.Abort(_("%r cannot be used in a name") % c)
262 263 try:
263 264 int(lbl)
264 265 raise error.Abort(_("cannot use an integer as a name"))
265 266 except ValueError:
266 267 pass
267 268
268 269 def checkfilename(f):
269 270 '''Check that the filename f is an acceptable filename for a tracked file'''
270 271 if '\r' in f or '\n' in f:
271 272 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
272 273
273 274 def checkportable(ui, f):
274 275 '''Check if filename f is portable and warn or abort depending on config'''
275 276 checkfilename(f)
276 277 abort, warn = checkportabilityalert(ui)
277 278 if abort or warn:
278 279 msg = util.checkwinfilename(f)
279 280 if msg:
280 281 msg = "%s: %s" % (msg, util.shellquote(f))
281 282 if abort:
282 283 raise error.Abort(msg)
283 284 ui.warn(_("warning: %s\n") % msg)
284 285
285 286 def checkportabilityalert(ui):
286 287 '''check if the user's config requests nothing, a warning, or abort for
287 288 non-portable filenames'''
288 289 val = ui.config('ui', 'portablefilenames')
289 290 lval = val.lower()
290 291 bval = util.parsebool(val)
291 292 abort = pycompat.osname == 'nt' or lval == 'abort'
292 293 warn = bval or lval == 'warn'
293 294 if bval is None and not (warn or abort or lval == 'ignore'):
294 295 raise error.ConfigError(
295 296 _("ui.portablefilenames value is invalid ('%s')") % val)
296 297 return abort, warn
297 298
298 299 class casecollisionauditor(object):
299 300 def __init__(self, ui, abort, dirstate):
300 301 self._ui = ui
301 302 self._abort = abort
302 303 allfiles = '\0'.join(dirstate._map)
303 304 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
304 305 self._dirstate = dirstate
305 306 # The purpose of _newfiles is so that we don't complain about
306 307 # case collisions if someone were to call this object with the
307 308 # same filename twice.
308 309 self._newfiles = set()
309 310
310 311 def __call__(self, f):
311 312 if f in self._newfiles:
312 313 return
313 314 fl = encoding.lower(f)
314 315 if fl in self._loweredfiles and f not in self._dirstate:
315 316 msg = _('possible case-folding collision for %s') % f
316 317 if self._abort:
317 318 raise error.Abort(msg)
318 319 self._ui.warn(_("warning: %s\n") % msg)
319 320 self._loweredfiles.add(fl)
320 321 self._newfiles.add(f)
321 322
322 323 def filteredhash(repo, maxrev):
323 324 """build hash of filtered revisions in the current repoview.
324 325
325 326 Multiple caches perform up-to-date validation by checking that the
326 327 tiprev and tipnode stored in the cache file match the current repository.
327 328 However, this is not sufficient for validating repoviews because the set
328 329 of revisions in the view may change without the repository tiprev and
329 330 tipnode changing.
330 331
331 332 This function hashes all the revs filtered from the view and returns
332 333 that SHA-1 digest.
333 334 """
334 335 cl = repo.changelog
335 336 if not cl.filteredrevs:
336 337 return None
337 338 key = None
338 339 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
339 340 if revs:
340 341 s = hashlib.sha1()
341 342 for rev in revs:
342 343 s.update('%d;' % rev)
343 344 key = s.digest()
344 345 return key
345 346
346 347 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
347 348 '''yield every hg repository under path, always recursively.
348 349 The recurse flag will only control recursion into repo working dirs'''
349 350 def errhandler(err):
350 351 if err.filename == path:
351 352 raise err
352 353 samestat = getattr(os.path, 'samestat', None)
353 354 if followsym and samestat is not None:
354 355 def adddir(dirlst, dirname):
355 356 match = False
356 357 dirstat = os.stat(dirname)
357 358 for lstdirstat in dirlst:
358 359 if samestat(dirstat, lstdirstat):
359 360 match = True
360 361 break
361 362 if not match:
362 363 dirlst.append(dirstat)
363 364 return not match
364 365 else:
365 366 followsym = False
366 367
367 368 if (seen_dirs is None) and followsym:
368 369 seen_dirs = []
369 370 adddir(seen_dirs, path)
370 371 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
371 372 dirs.sort()
372 373 if '.hg' in dirs:
373 374 yield root # found a repository
374 375 qroot = os.path.join(root, '.hg', 'patches')
375 376 if os.path.isdir(os.path.join(qroot, '.hg')):
376 377 yield qroot # we have a patch queue repo here
377 378 if recurse:
378 379 # avoid recursing inside the .hg directory
379 380 dirs.remove('.hg')
380 381 else:
381 382 dirs[:] = [] # don't descend further
382 383 elif followsym:
383 384 newdirs = []
384 385 for d in dirs:
385 386 fname = os.path.join(root, d)
386 387 if adddir(seen_dirs, fname):
387 388 if os.path.islink(fname):
388 389 for hgname in walkrepos(fname, True, seen_dirs):
389 390 yield hgname
390 391 else:
391 392 newdirs.append(d)
392 393 dirs[:] = newdirs
393 394
394 395 def binnode(ctx):
395 396 """Return binary node id for a given basectx"""
396 397 node = ctx.node()
397 398 if node is None:
398 399 return wdirid
399 400 return node
400 401
401 402 def intrev(ctx):
402 403 """Return integer for a given basectx that can be used in comparison or
403 404 arithmetic operation"""
404 405 rev = ctx.rev()
405 406 if rev is None:
406 407 return wdirrev
407 408 return rev
408 409
409 410 def formatchangeid(ctx):
410 411 """Format changectx as '{rev}:{node|formatnode}', which is the default
411 412 template provided by cmdutil.changeset_templater"""
412 413 repo = ctx.repo()
413 414 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
414 415
415 416 def formatrevnode(ui, rev, node):
416 417 """Format given revision and node depending on the current verbosity"""
417 418 if ui.debugflag:
418 419 hexfunc = hex
419 420 else:
420 421 hexfunc = short
421 422 return '%d:%s' % (rev, hexfunc(node))
422 423
423 424 def revsingle(repo, revspec, default='.', localalias=None):
424 425 if not revspec and revspec != 0:
425 426 return repo[default]
426 427
427 428 l = revrange(repo, [revspec], localalias=localalias)
428 429 if not l:
429 430 raise error.Abort(_('empty revision set'))
430 431 return repo[l.last()]
431 432
432 433 def _pairspec(revspec):
433 434 tree = revsetlang.parse(revspec)
434 435 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
435 436
436 437 def revpair(repo, revs):
437 438 if not revs:
438 439 return repo.dirstate.p1(), None
439 440
440 441 l = revrange(repo, revs)
441 442
442 443 if not l:
443 444 first = second = None
444 445 elif l.isascending():
445 446 first = l.min()
446 447 second = l.max()
447 448 elif l.isdescending():
448 449 first = l.max()
449 450 second = l.min()
450 451 else:
451 452 first = l.first()
452 453 second = l.last()
453 454
454 455 if first is None:
455 456 raise error.Abort(_('empty revision range'))
456 457 if (first == second and len(revs) >= 2
457 458 and not all(revrange(repo, [r]) for r in revs)):
458 459 raise error.Abort(_('empty revision on one side of range'))
459 460
460 461 # if top-level is range expression, the result must always be a pair
461 462 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
462 463 return repo.lookup(first), None
463 464
464 465 return repo.lookup(first), repo.lookup(second)
465 466
466 467 def revrange(repo, specs, localalias=None):
467 468 """Execute 1 to many revsets and return the union.
468 469
469 470 This is the preferred mechanism for executing revsets using user-specified
470 471 config options, such as revset aliases.
471 472
472 473 The revsets specified by ``specs`` will be executed via a chained ``OR``
473 474 expression. If ``specs`` is empty, an empty result is returned.
474 475
475 476 ``specs`` can contain integers, in which case they are assumed to be
476 477 revision numbers.
477 478
478 479 It is assumed the revsets are already formatted. If you have arguments
479 480 that need to be expanded in the revset, call ``revsetlang.formatspec()``
480 481 and pass the result as an element of ``specs``.
481 482
482 483 Specifying a single revset is allowed.
483 484
484 485 Returns a ``revset.abstractsmartset`` which is a list-like interface over
485 486 integer revisions.
486 487 """
487 488 allspecs = []
488 489 for spec in specs:
489 490 if isinstance(spec, int):
490 491 spec = revsetlang.formatspec('rev(%d)', spec)
491 492 allspecs.append(spec)
492 493 return repo.anyrevs(allspecs, user=True, localalias=localalias)
493 494
494 495 def meaningfulparents(repo, ctx):
495 496 """Return list of meaningful (or all if debug) parentrevs for rev.
496 497
497 498 For merges (two non-nullrev revisions) both parents are meaningful.
498 499 Otherwise the first parent revision is considered meaningful if it
499 500 is not the preceding revision.
500 501 """
501 502 parents = ctx.parents()
502 503 if len(parents) > 1:
503 504 return parents
504 505 if repo.ui.debugflag:
505 506 return [parents[0], repo['null']]
506 507 if parents[0].rev() >= intrev(ctx) - 1:
507 508 return []
508 509 return parents
509 510
510 511 def expandpats(pats):
511 512 '''Expand bare globs when running on windows.
512 513 On posix we assume it already has already been done by sh.'''
513 514 if not util.expandglobs:
514 515 return list(pats)
515 516 ret = []
516 517 for kindpat in pats:
517 518 kind, pat = matchmod._patsplit(kindpat, None)
518 519 if kind is None:
519 520 try:
520 521 globbed = glob.glob(pat)
521 522 except re.error:
522 523 globbed = [pat]
523 524 if globbed:
524 525 ret.extend(globbed)
525 526 continue
526 527 ret.append(kindpat)
527 528 return ret
528 529
529 530 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
530 531 badfn=None):
531 532 '''Return a matcher and the patterns that were used.
532 533 The matcher will warn about bad matches, unless an alternate badfn callback
533 534 is provided.'''
534 535 if pats == ("",):
535 536 pats = []
536 537 if opts is None:
537 538 opts = {}
538 539 if not globbed and default == 'relpath':
539 540 pats = expandpats(pats or [])
540 541
541 542 def bad(f, msg):
542 543 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
543 544
544 545 if badfn is None:
545 546 badfn = bad
546 547
547 548 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
548 549 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
549 550
550 551 if m.always():
551 552 pats = []
552 553 return m, pats
553 554
554 555 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
555 556 badfn=None):
556 557 '''Return a matcher that will warn about bad matches.'''
557 558 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
558 559
559 560 def matchall(repo):
560 561 '''Return a matcher that will efficiently match everything.'''
561 562 return matchmod.always(repo.root, repo.getcwd())
562 563
563 564 def matchfiles(repo, files, badfn=None):
564 565 '''Return a matcher that will efficiently match exactly these files.'''
565 566 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
566 567
567 568 def origpath(ui, repo, filepath):
568 569 '''customize where .orig files are created
569 570
570 571 Fetch user defined path from config file: [ui] origbackuppath = <path>
571 572 Fall back to default (filepath with .orig suffix) if not specified
572 573 '''
573 574 origbackuppath = ui.config('ui', 'origbackuppath')
574 575 if origbackuppath is None:
575 576 return filepath + ".orig"
576 577
577 578 filepathfromroot = os.path.relpath(filepath, start=repo.root)
578 579 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
579 580
580 581 origbackupdir = repo.vfs.dirname(fullorigpath)
581 582 if not repo.vfs.exists(origbackupdir):
582 583 ui.note(_('creating directory: %s\n') % origbackupdir)
583 584 util.makedirs(origbackupdir)
584 585
585 586 return fullorigpath
586 587
587 588 class _containsnode(object):
588 589 """proxy __contains__(node) to container.__contains__ which accepts revs"""
589 590
590 591 def __init__(self, repo, revcontainer):
591 592 self._torev = repo.changelog.rev
592 593 self._revcontains = revcontainer.__contains__
593 594
594 595 def __contains__(self, node):
595 596 return self._revcontains(self._torev(node))
596 597
597 598 def cleanupnodes(repo, replacements, operation, moves=None):
598 599 """do common cleanups when old nodes are replaced by new nodes
599 600
600 601 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
601 602 (we might also want to move working directory parent in the future)
602 603
603 604 By default, bookmark moves are calculated automatically from 'replacements',
604 605 but 'moves' can be used to override that. Also, 'moves' may include
605 606 additional bookmark moves that should not have associated obsmarkers.
606 607
607 608 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
608 609 have replacements. operation is a string, like "rebase".
609 610 """
610 611 if not replacements and not moves:
611 612 return
612 613
613 614 # translate mapping's other forms
614 615 if not util.safehasattr(replacements, 'items'):
615 616 replacements = {n: () for n in replacements}
616 617
617 618 # Calculate bookmark movements
618 619 if moves is None:
619 620 moves = {}
620 621 # Unfiltered repo is needed since nodes in replacements might be hidden.
621 622 unfi = repo.unfiltered()
622 623 for oldnode, newnodes in replacements.items():
623 624 if oldnode in moves:
624 625 continue
625 626 if len(newnodes) > 1:
626 627 # usually a split, take the one with biggest rev number
627 628 newnode = next(unfi.set('max(%ln)', newnodes)).node()
628 629 elif len(newnodes) == 0:
629 630 # move bookmark backwards
630 631 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
631 632 list(replacements)))
632 633 if roots:
633 634 newnode = roots[0].node()
634 635 else:
635 636 newnode = nullid
636 637 else:
637 638 newnode = newnodes[0]
638 639 moves[oldnode] = newnode
639 640
640 641 with repo.transaction('cleanup') as tr:
641 642 # Move bookmarks
642 643 bmarks = repo._bookmarks
643 644 bmarkchanges = []
644 645 allnewnodes = [n for ns in replacements.values() for n in ns]
645 646 for oldnode, newnode in moves.items():
646 647 oldbmarks = repo.nodebookmarks(oldnode)
647 648 if not oldbmarks:
648 649 continue
649 650 from . import bookmarks # avoid import cycle
650 651 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
651 652 (oldbmarks, hex(oldnode), hex(newnode)))
652 653 # Delete divergent bookmarks being parents of related newnodes
653 654 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
654 655 allnewnodes, newnode, oldnode)
655 656 deletenodes = _containsnode(repo, deleterevs)
656 657 for name in oldbmarks:
657 658 bmarkchanges.append((name, newnode))
658 659 for b in bookmarks.divergent2delete(repo, deletenodes, name):
659 660 bmarkchanges.append((b, None))
660 661
661 662 if bmarkchanges:
662 663 bmarks.applychanges(repo, tr, bmarkchanges)
663 664
664 665 # Obsolete or strip nodes
665 666 if obsolete.isenabled(repo, obsolete.createmarkersopt):
666 667 # If a node is already obsoleted, and we want to obsolete it
667 668 # without a successor, skip that obssolete request since it's
668 669 # unnecessary. That's the "if s or not isobs(n)" check below.
669 670 # Also sort the node in topology order, that might be useful for
670 671 # some obsstore logic.
671 672 # NOTE: the filtering and sorting might belong to createmarkers.
672 673 isobs = unfi.obsstore.successors.__contains__
673 674 torev = unfi.changelog.rev
674 675 sortfunc = lambda ns: torev(ns[0])
675 676 rels = [(unfi[n], tuple(unfi[m] for m in s))
676 677 for n, s in sorted(replacements.items(), key=sortfunc)
677 678 if s or not isobs(n)]
678 679 if rels:
679 680 obsolete.createmarkers(repo, rels, operation=operation)
680 681 else:
681 682 from . import repair # avoid import cycle
682 683 tostrip = list(replacements)
683 684 if tostrip:
684 685 repair.delayedstrip(repo.ui, repo, tostrip, operation)
685 686
686 687 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
687 688 if opts is None:
688 689 opts = {}
689 690 m = matcher
690 691 if dry_run is None:
691 692 dry_run = opts.get('dry_run')
692 693 if similarity is None:
693 694 similarity = float(opts.get('similarity') or 0)
694 695
695 696 ret = 0
696 697 join = lambda f: os.path.join(prefix, f)
697 698
698 699 wctx = repo[None]
699 700 for subpath in sorted(wctx.substate):
700 701 submatch = matchmod.subdirmatcher(subpath, m)
701 702 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
702 703 sub = wctx.sub(subpath)
703 704 try:
704 705 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
705 706 ret = 1
706 707 except error.LookupError:
707 708 repo.ui.status(_("skipping missing subrepository: %s\n")
708 709 % join(subpath))
709 710
710 711 rejected = []
711 712 def badfn(f, msg):
712 713 if f in m.files():
713 714 m.bad(f, msg)
714 715 rejected.append(f)
715 716
716 717 badmatch = matchmod.badmatch(m, badfn)
717 718 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
718 719 badmatch)
719 720
720 721 unknownset = set(unknown + forgotten)
721 722 toprint = unknownset.copy()
722 723 toprint.update(deleted)
723 724 for abs in sorted(toprint):
724 725 if repo.ui.verbose or not m.exact(abs):
725 726 if abs in unknownset:
726 727 status = _('adding %s\n') % m.uipath(abs)
727 728 else:
728 729 status = _('removing %s\n') % m.uipath(abs)
729 730 repo.ui.status(status)
730 731
731 732 renames = _findrenames(repo, m, added + unknown, removed + deleted,
732 733 similarity)
733 734
734 735 if not dry_run:
735 736 _markchanges(repo, unknown + forgotten, deleted, renames)
736 737
737 738 for f in rejected:
738 739 if f in m.files():
739 740 return 1
740 741 return ret
741 742
742 743 def marktouched(repo, files, similarity=0.0):
743 744 '''Assert that files have somehow been operated upon. files are relative to
744 745 the repo root.'''
745 746 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
746 747 rejected = []
747 748
748 749 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
749 750
750 751 if repo.ui.verbose:
751 752 unknownset = set(unknown + forgotten)
752 753 toprint = unknownset.copy()
753 754 toprint.update(deleted)
754 755 for abs in sorted(toprint):
755 756 if abs in unknownset:
756 757 status = _('adding %s\n') % abs
757 758 else:
758 759 status = _('removing %s\n') % abs
759 760 repo.ui.status(status)
760 761
761 762 renames = _findrenames(repo, m, added + unknown, removed + deleted,
762 763 similarity)
763 764
764 765 _markchanges(repo, unknown + forgotten, deleted, renames)
765 766
766 767 for f in rejected:
767 768 if f in m.files():
768 769 return 1
769 770 return 0
770 771
771 772 def _interestingfiles(repo, matcher):
772 773 '''Walk dirstate with matcher, looking for files that addremove would care
773 774 about.
774 775
775 776 This is different from dirstate.status because it doesn't care about
776 777 whether files are modified or clean.'''
777 778 added, unknown, deleted, removed, forgotten = [], [], [], [], []
778 779 audit_path = pathutil.pathauditor(repo.root, cached=True)
779 780
780 781 ctx = repo[None]
781 782 dirstate = repo.dirstate
782 783 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
783 784 unknown=True, ignored=False, full=False)
784 785 for abs, st in walkresults.iteritems():
785 786 dstate = dirstate[abs]
786 787 if dstate == '?' and audit_path.check(abs):
787 788 unknown.append(abs)
788 789 elif dstate != 'r' and not st:
789 790 deleted.append(abs)
790 791 elif dstate == 'r' and st:
791 792 forgotten.append(abs)
792 793 # for finding renames
793 794 elif dstate == 'r' and not st:
794 795 removed.append(abs)
795 796 elif dstate == 'a':
796 797 added.append(abs)
797 798
798 799 return added, unknown, deleted, removed, forgotten
799 800
800 801 def _findrenames(repo, matcher, added, removed, similarity):
801 802 '''Find renames from removed files to added ones.'''
802 803 renames = {}
803 804 if similarity > 0:
804 805 for old, new, score in similar.findrenames(repo, added, removed,
805 806 similarity):
806 807 if (repo.ui.verbose or not matcher.exact(old)
807 808 or not matcher.exact(new)):
808 809 repo.ui.status(_('recording removal of %s as rename to %s '
809 810 '(%d%% similar)\n') %
810 811 (matcher.rel(old), matcher.rel(new),
811 812 score * 100))
812 813 renames[new] = old
813 814 return renames
814 815
815 816 def _markchanges(repo, unknown, deleted, renames):
816 817 '''Marks the files in unknown as added, the files in deleted as removed,
817 818 and the files in renames as copied.'''
818 819 wctx = repo[None]
819 820 with repo.wlock():
820 821 wctx.forget(deleted)
821 822 wctx.add(unknown)
822 823 for new, old in renames.iteritems():
823 824 wctx.copy(old, new)
824 825
825 826 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
826 827 """Update the dirstate to reflect the intent of copying src to dst. For
827 828 different reasons it might not end with dst being marked as copied from src.
828 829 """
829 830 origsrc = repo.dirstate.copied(src) or src
830 831 if dst == origsrc: # copying back a copy?
831 832 if repo.dirstate[dst] not in 'mn' and not dryrun:
832 833 repo.dirstate.normallookup(dst)
833 834 else:
834 835 if repo.dirstate[origsrc] == 'a' and origsrc == src:
835 836 if not ui.quiet:
836 837 ui.warn(_("%s has not been committed yet, so no copy "
837 838 "data will be stored for %s.\n")
838 839 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
839 840 if repo.dirstate[dst] in '?r' and not dryrun:
840 841 wctx.add([dst])
841 842 elif not dryrun:
842 843 wctx.copy(origsrc, dst)
843 844
844 845 def readrequires(opener, supported):
845 846 '''Reads and parses .hg/requires and checks if all entries found
846 847 are in the list of supported features.'''
847 848 requirements = set(opener.read("requires").splitlines())
848 849 missings = []
849 850 for r in requirements:
850 851 if r not in supported:
851 852 if not r or not r[0].isalnum():
852 853 raise error.RequirementError(_(".hg/requires file is corrupt"))
853 854 missings.append(r)
854 855 missings.sort()
855 856 if missings:
856 857 raise error.RequirementError(
857 858 _("repository requires features unknown to this Mercurial: %s")
858 859 % " ".join(missings),
859 860 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
860 861 " for more information"))
861 862 return requirements
862 863
863 864 def writerequires(opener, requirements):
864 865 with opener('requires', 'w') as fp:
865 866 for r in sorted(requirements):
866 867 fp.write("%s\n" % r)
867 868
868 869 class filecachesubentry(object):
869 870 def __init__(self, path, stat):
870 871 self.path = path
871 872 self.cachestat = None
872 873 self._cacheable = None
873 874
874 875 if stat:
875 876 self.cachestat = filecachesubentry.stat(self.path)
876 877
877 878 if self.cachestat:
878 879 self._cacheable = self.cachestat.cacheable()
879 880 else:
880 881 # None means we don't know yet
881 882 self._cacheable = None
882 883
883 884 def refresh(self):
884 885 if self.cacheable():
885 886 self.cachestat = filecachesubentry.stat(self.path)
886 887
887 888 def cacheable(self):
888 889 if self._cacheable is not None:
889 890 return self._cacheable
890 891
891 892 # we don't know yet, assume it is for now
892 893 return True
893 894
894 895 def changed(self):
895 896 # no point in going further if we can't cache it
896 897 if not self.cacheable():
897 898 return True
898 899
899 900 newstat = filecachesubentry.stat(self.path)
900 901
901 902 # we may not know if it's cacheable yet, check again now
902 903 if newstat and self._cacheable is None:
903 904 self._cacheable = newstat.cacheable()
904 905
905 906 # check again
906 907 if not self._cacheable:
907 908 return True
908 909
909 910 if self.cachestat != newstat:
910 911 self.cachestat = newstat
911 912 return True
912 913 else:
913 914 return False
914 915
915 916 @staticmethod
916 917 def stat(path):
917 918 try:
918 919 return util.cachestat(path)
919 920 except OSError as e:
920 921 if e.errno != errno.ENOENT:
921 922 raise
922 923
923 924 class filecacheentry(object):
924 925 def __init__(self, paths, stat=True):
925 926 self._entries = []
926 927 for path in paths:
927 928 self._entries.append(filecachesubentry(path, stat))
928 929
929 930 def changed(self):
930 931 '''true if any entry has changed'''
931 932 for entry in self._entries:
932 933 if entry.changed():
933 934 return True
934 935 return False
935 936
936 937 def refresh(self):
937 938 for entry in self._entries:
938 939 entry.refresh()
939 940
940 941 class filecache(object):
941 942 '''A property like decorator that tracks files under .hg/ for updates.
942 943
943 944 Records stat info when called in _filecache.
944 945
945 946 On subsequent calls, compares old stat info with new info, and recreates the
946 947 object when any of the files changes, updating the new stat info in
947 948 _filecache.
948 949
949 950 Mercurial either atomic renames or appends for files under .hg,
950 951 so to ensure the cache is reliable we need the filesystem to be able
951 952 to tell us if a file has been replaced. If it can't, we fallback to
952 953 recreating the object on every call (essentially the same behavior as
953 954 propertycache).
954 955
955 956 '''
956 957 def __init__(self, *paths):
957 958 self.paths = paths
958 959
959 960 def join(self, obj, fname):
960 961 """Used to compute the runtime path of a cached file.
961 962
962 963 Users should subclass filecache and provide their own version of this
963 964 function to call the appropriate join function on 'obj' (an instance
964 965 of the class that its member function was decorated).
965 966 """
966 967 raise NotImplementedError
967 968
968 969 def __call__(self, func):
969 970 self.func = func
970 971 self.name = func.__name__.encode('ascii')
971 972 return self
972 973
973 974 def __get__(self, obj, type=None):
974 975 # if accessed on the class, return the descriptor itself.
975 976 if obj is None:
976 977 return self
977 978 # do we need to check if the file changed?
978 979 if self.name in obj.__dict__:
979 980 assert self.name in obj._filecache, self.name
980 981 return obj.__dict__[self.name]
981 982
982 983 entry = obj._filecache.get(self.name)
983 984
984 985 if entry:
985 986 if entry.changed():
986 987 entry.obj = self.func(obj)
987 988 else:
988 989 paths = [self.join(obj, path) for path in self.paths]
989 990
990 991 # We stat -before- creating the object so our cache doesn't lie if
991 992 # a writer modified between the time we read and stat
992 993 entry = filecacheentry(paths, True)
993 994 entry.obj = self.func(obj)
994 995
995 996 obj._filecache[self.name] = entry
996 997
997 998 obj.__dict__[self.name] = entry.obj
998 999 return entry.obj
999 1000
1000 1001 def __set__(self, obj, value):
1001 1002 if self.name not in obj._filecache:
1002 1003 # we add an entry for the missing value because X in __dict__
1003 1004 # implies X in _filecache
1004 1005 paths = [self.join(obj, path) for path in self.paths]
1005 1006 ce = filecacheentry(paths, False)
1006 1007 obj._filecache[self.name] = ce
1007 1008 else:
1008 1009 ce = obj._filecache[self.name]
1009 1010
1010 1011 ce.obj = value # update cached copy
1011 1012 obj.__dict__[self.name] = value # update copy returned by obj.x
1012 1013
1013 1014 def __delete__(self, obj):
1014 1015 try:
1015 1016 del obj.__dict__[self.name]
1016 1017 except KeyError:
1017 1018 raise AttributeError(self.name)
1018 1019
1020 def extdatasource(repo, source):
1021 """Gather a map of rev -> value dict from the specified source
1022
1023 A source spec is treated as a URL, with a special case shell: type
1024 for parsing the output from a shell command.
1025
1026 The data is parsed as a series of newline-separated records where
1027 each record is a revision specifier optionally followed by a space
1028 and a freeform string value. If the revision is known locally, it
1029 is converted to a rev, otherwise the record is skipped.
1030
1031 Note that both key and value are treated as UTF-8 and converted to
1032 the local encoding. This allows uniformity between local and
1033 remote data sources.
1034 """
1035
1036 spec = repo.ui.config("extdata", source)
1037 if not spec:
1038 raise error.Abort(_("unknown extdata source '%s'") % source)
1039
1040 data = {}
1041 if spec.startswith("shell:"):
1042 # external commands should be run relative to the repo root
1043 cmd = spec[6:]
1044 cwd = os.getcwd()
1045 os.chdir(repo.root)
1046 try:
1047 src = util.popen(cmd)
1048 finally:
1049 os.chdir(cwd)
1050 else:
1051 # treat as a URL or file
1052 src = url.open(repo.ui, spec)
1053
1054 try:
1055 for l in src.readlines():
1056 if " " in l:
1057 k, v = l.strip().split(" ", 1)
1058 else:
1059 k, v = l.strip(), ""
1060
1061 k = encoding.tolocal(k)
1062 if k in repo:
1063 # we ignore data for nodes that don't exist locally
1064 data[repo[k].rev()] = encoding.tolocal(v)
1065 finally:
1066 src.close()
1067
1068 return data
1069
1019 1070 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1020 1071 if lock is None:
1021 1072 raise error.LockInheritanceContractViolation(
1022 1073 'lock can only be inherited while held')
1023 1074 if environ is None:
1024 1075 environ = {}
1025 1076 with lock.inherit() as locker:
1026 1077 environ[envvar] = locker
1027 1078 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1028 1079
1029 1080 def wlocksub(repo, cmd, *args, **kwargs):
1030 1081 """run cmd as a subprocess that allows inheriting repo's wlock
1031 1082
1032 1083 This can only be called while the wlock is held. This takes all the
1033 1084 arguments that ui.system does, and returns the exit code of the
1034 1085 subprocess."""
1035 1086 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1036 1087 **kwargs)
1037 1088
1038 1089 def gdinitconfig(ui):
1039 1090 """helper function to know if a repo should be created as general delta
1040 1091 """
1041 1092 # experimental config: format.generaldelta
1042 1093 return (ui.configbool('format', 'generaldelta')
1043 1094 or ui.configbool('format', 'usegeneraldelta'))
1044 1095
1045 1096 def gddeltaconfig(ui):
1046 1097 """helper function to know if incoming delta should be optimised
1047 1098 """
1048 1099 # experimental config: format.generaldelta
1049 1100 return ui.configbool('format', 'generaldelta')
1050 1101
1051 1102 class simplekeyvaluefile(object):
1052 1103 """A simple file with key=value lines
1053 1104
1054 1105 Keys must be alphanumerics and start with a letter, values must not
1055 1106 contain '\n' characters"""
1056 1107 firstlinekey = '__firstline'
1057 1108
1058 1109 def __init__(self, vfs, path, keys=None):
1059 1110 self.vfs = vfs
1060 1111 self.path = path
1061 1112
1062 1113 def read(self, firstlinenonkeyval=False):
1063 1114 """Read the contents of a simple key-value file
1064 1115
1065 1116 'firstlinenonkeyval' indicates whether the first line of file should
1066 1117 be treated as a key-value pair or reuturned fully under the
1067 1118 __firstline key."""
1068 1119 lines = self.vfs.readlines(self.path)
1069 1120 d = {}
1070 1121 if firstlinenonkeyval:
1071 1122 if not lines:
1072 1123 e = _("empty simplekeyvalue file")
1073 1124 raise error.CorruptedState(e)
1074 1125 # we don't want to include '\n' in the __firstline
1075 1126 d[self.firstlinekey] = lines[0][:-1]
1076 1127 del lines[0]
1077 1128
1078 1129 try:
1079 1130 # the 'if line.strip()' part prevents us from failing on empty
1080 1131 # lines which only contain '\n' therefore are not skipped
1081 1132 # by 'if line'
1082 1133 updatedict = dict(line[:-1].split('=', 1) for line in lines
1083 1134 if line.strip())
1084 1135 if self.firstlinekey in updatedict:
1085 1136 e = _("%r can't be used as a key")
1086 1137 raise error.CorruptedState(e % self.firstlinekey)
1087 1138 d.update(updatedict)
1088 1139 except ValueError as e:
1089 1140 raise error.CorruptedState(str(e))
1090 1141 return d
1091 1142
1092 1143 def write(self, data, firstline=None):
1093 1144 """Write key=>value mapping to a file
1094 1145 data is a dict. Keys must be alphanumerical and start with a letter.
1095 1146 Values must not contain newline characters.
1096 1147
1097 1148 If 'firstline' is not None, it is written to file before
1098 1149 everything else, as it is, not in a key=value form"""
1099 1150 lines = []
1100 1151 if firstline is not None:
1101 1152 lines.append('%s\n' % firstline)
1102 1153
1103 1154 for k, v in data.items():
1104 1155 if k == self.firstlinekey:
1105 1156 e = "key name '%s' is reserved" % self.firstlinekey
1106 1157 raise error.ProgrammingError(e)
1107 1158 if not k[0].isalpha():
1108 1159 e = "keys must start with a letter in a key-value file"
1109 1160 raise error.ProgrammingError(e)
1110 1161 if not k.isalnum():
1111 1162 e = "invalid key name in a simple key-value file"
1112 1163 raise error.ProgrammingError(e)
1113 1164 if '\n' in v:
1114 1165 e = "invalid value in a simple key-value file"
1115 1166 raise error.ProgrammingError(e)
1116 1167 lines.append("%s=%s\n" % (k, v))
1117 1168 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1118 1169 fp.write(''.join(lines))
1119 1170
1120 1171 _reportobsoletedsource = [
1121 1172 'debugobsolete',
1122 1173 'pull',
1123 1174 'push',
1124 1175 'serve',
1125 1176 'unbundle',
1126 1177 ]
1127 1178
1128 1179 def registersummarycallback(repo, otr, txnname=''):
1129 1180 """register a callback to issue a summary after the transaction is closed
1130 1181 """
1131 1182 for source in _reportobsoletedsource:
1132 1183 if txnname.startswith(source):
1133 1184 reporef = weakref.ref(repo)
1134 1185 def reportsummary(tr):
1135 1186 """the actual callback reporting the summary"""
1136 1187 repo = reporef()
1137 1188 obsoleted = obsutil.getobsoleted(repo, tr)
1138 1189 if obsoleted:
1139 1190 repo.ui.status(_('obsoleted %i changesets\n')
1140 1191 % len(obsoleted))
1141 1192 otr.addpostclose('00-txnreport', reportsummary)
1142 1193 break
General Comments 0
You need to be logged in to leave comments. Login now