##// END OF EJS Templates
subrepo: adjust subrepo prefix before calling subrepo.addremove() (API)...
Martin von Zweigbergk -
r41778:5ee3c49f default
parent child Browse files
Show More
@@ -1,1841 +1,1842 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import subprocess
16 16 import weakref
17 17
18 18 from .i18n import _
19 19 from .node import (
20 20 bin,
21 21 hex,
22 22 nullid,
23 23 nullrev,
24 24 short,
25 25 wdirid,
26 26 wdirrev,
27 27 )
28 28
29 29 from . import (
30 30 encoding,
31 31 error,
32 32 match as matchmod,
33 33 obsolete,
34 34 obsutil,
35 35 pathutil,
36 36 phases,
37 37 policy,
38 38 pycompat,
39 39 revsetlang,
40 40 similar,
41 41 smartset,
42 42 url,
43 43 util,
44 44 vfs,
45 45 )
46 46
47 47 from .utils import (
48 48 procutil,
49 49 stringutil,
50 50 )
51 51
52 52 if pycompat.iswindows:
53 53 from . import scmwindows as scmplatform
54 54 else:
55 55 from . import scmposix as scmplatform
56 56
57 57 parsers = policy.importmod(r'parsers')
58 58
59 59 termsize = scmplatform.termsize
60 60
61 61 class status(tuple):
62 62 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
63 63 and 'ignored' properties are only relevant to the working copy.
64 64 '''
65 65
66 66 __slots__ = ()
67 67
68 68 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
69 69 clean):
70 70 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
71 71 ignored, clean))
72 72
73 73 @property
74 74 def modified(self):
75 75 '''files that have been modified'''
76 76 return self[0]
77 77
78 78 @property
79 79 def added(self):
80 80 '''files that have been added'''
81 81 return self[1]
82 82
83 83 @property
84 84 def removed(self):
85 85 '''files that have been removed'''
86 86 return self[2]
87 87
88 88 @property
89 89 def deleted(self):
90 90 '''files that are in the dirstate, but have been deleted from the
91 91 working copy (aka "missing")
92 92 '''
93 93 return self[3]
94 94
95 95 @property
96 96 def unknown(self):
97 97 '''files not in the dirstate that are not ignored'''
98 98 return self[4]
99 99
100 100 @property
101 101 def ignored(self):
102 102 '''files not in the dirstate that are ignored (by _dirignore())'''
103 103 return self[5]
104 104
105 105 @property
106 106 def clean(self):
107 107 '''files that have not been modified'''
108 108 return self[6]
109 109
110 110 def __repr__(self, *args, **kwargs):
111 111 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
112 112 r'unknown=%s, ignored=%s, clean=%s>') %
113 113 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
114 114
115 115 def itersubrepos(ctx1, ctx2):
116 116 """find subrepos in ctx1 or ctx2"""
117 117 # Create a (subpath, ctx) mapping where we prefer subpaths from
118 118 # ctx1. The subpaths from ctx2 are important when the .hgsub file
119 119 # has been modified (in ctx2) but not yet committed (in ctx1).
120 120 subpaths = dict.fromkeys(ctx2.substate, ctx2)
121 121 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
122 122
123 123 missing = set()
124 124
125 125 for subpath in ctx2.substate:
126 126 if subpath not in ctx1.substate:
127 127 del subpaths[subpath]
128 128 missing.add(subpath)
129 129
130 130 for subpath, ctx in sorted(subpaths.iteritems()):
131 131 yield subpath, ctx.sub(subpath)
132 132
133 133 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
134 134 # status and diff will have an accurate result when it does
135 135 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
136 136 # against itself.
137 137 for subpath in missing:
138 138 yield subpath, ctx2.nullsub(subpath, ctx1)
139 139
140 140 def nochangesfound(ui, repo, excluded=None):
141 141 '''Report no changes for push/pull, excluded is None or a list of
142 142 nodes excluded from the push/pull.
143 143 '''
144 144 secretlist = []
145 145 if excluded:
146 146 for n in excluded:
147 147 ctx = repo[n]
148 148 if ctx.phase() >= phases.secret and not ctx.extinct():
149 149 secretlist.append(n)
150 150
151 151 if secretlist:
152 152 ui.status(_("no changes found (ignored %d secret changesets)\n")
153 153 % len(secretlist))
154 154 else:
155 155 ui.status(_("no changes found\n"))
156 156
157 157 def callcatch(ui, func):
158 158 """call func() with global exception handling
159 159
160 160 return func() if no exception happens. otherwise do some error handling
161 161 and return an exit code accordingly. does not handle all exceptions.
162 162 """
163 163 try:
164 164 try:
165 165 return func()
166 166 except: # re-raises
167 167 ui.traceback()
168 168 raise
169 169 # Global exception handling, alphabetically
170 170 # Mercurial-specific first, followed by built-in and library exceptions
171 171 except error.LockHeld as inst:
172 172 if inst.errno == errno.ETIMEDOUT:
173 173 reason = _('timed out waiting for lock held by %r') % (
174 174 pycompat.bytestr(inst.locker))
175 175 else:
176 176 reason = _('lock held by %r') % inst.locker
177 177 ui.error(_("abort: %s: %s\n") % (
178 178 inst.desc or stringutil.forcebytestr(inst.filename), reason))
179 179 if not inst.locker:
180 180 ui.error(_("(lock might be very busy)\n"))
181 181 except error.LockUnavailable as inst:
182 182 ui.error(_("abort: could not lock %s: %s\n") %
183 183 (inst.desc or stringutil.forcebytestr(inst.filename),
184 184 encoding.strtolocal(inst.strerror)))
185 185 except error.OutOfBandError as inst:
186 186 if inst.args:
187 187 msg = _("abort: remote error:\n")
188 188 else:
189 189 msg = _("abort: remote error\n")
190 190 ui.error(msg)
191 191 if inst.args:
192 192 ui.error(''.join(inst.args))
193 193 if inst.hint:
194 194 ui.error('(%s)\n' % inst.hint)
195 195 except error.RepoError as inst:
196 196 ui.error(_("abort: %s!\n") % inst)
197 197 if inst.hint:
198 198 ui.error(_("(%s)\n") % inst.hint)
199 199 except error.ResponseError as inst:
200 200 ui.error(_("abort: %s") % inst.args[0])
201 201 msg = inst.args[1]
202 202 if isinstance(msg, type(u'')):
203 203 msg = pycompat.sysbytes(msg)
204 204 if not isinstance(msg, bytes):
205 205 ui.error(" %r\n" % (msg,))
206 206 elif not msg:
207 207 ui.error(_(" empty string\n"))
208 208 else:
209 209 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
210 210 except error.CensoredNodeError as inst:
211 211 ui.error(_("abort: file censored %s!\n") % inst)
212 212 except error.StorageError as inst:
213 213 ui.error(_("abort: %s!\n") % inst)
214 214 if inst.hint:
215 215 ui.error(_("(%s)\n") % inst.hint)
216 216 except error.InterventionRequired as inst:
217 217 ui.error("%s\n" % inst)
218 218 if inst.hint:
219 219 ui.error(_("(%s)\n") % inst.hint)
220 220 return 1
221 221 except error.WdirUnsupported:
222 222 ui.error(_("abort: working directory revision cannot be specified\n"))
223 223 except error.Abort as inst:
224 224 ui.error(_("abort: %s\n") % inst)
225 225 if inst.hint:
226 226 ui.error(_("(%s)\n") % inst.hint)
227 227 except ImportError as inst:
228 228 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
229 229 m = stringutil.forcebytestr(inst).split()[-1]
230 230 if m in "mpatch bdiff".split():
231 231 ui.error(_("(did you forget to compile extensions?)\n"))
232 232 elif m in "zlib".split():
233 233 ui.error(_("(is your Python install correct?)\n"))
234 234 except (IOError, OSError) as inst:
235 235 if util.safehasattr(inst, "code"): # HTTPError
236 236 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
237 237 elif util.safehasattr(inst, "reason"): # URLError or SSLError
238 238 try: # usually it is in the form (errno, strerror)
239 239 reason = inst.reason.args[1]
240 240 except (AttributeError, IndexError):
241 241 # it might be anything, for example a string
242 242 reason = inst.reason
243 243 if isinstance(reason, pycompat.unicode):
244 244 # SSLError of Python 2.7.9 contains a unicode
245 245 reason = encoding.unitolocal(reason)
246 246 ui.error(_("abort: error: %s\n") % reason)
247 247 elif (util.safehasattr(inst, "args")
248 248 and inst.args and inst.args[0] == errno.EPIPE):
249 249 pass
250 250 elif getattr(inst, "strerror", None): # common IOError or OSError
251 251 if getattr(inst, "filename", None) is not None:
252 252 ui.error(_("abort: %s: '%s'\n") % (
253 253 encoding.strtolocal(inst.strerror),
254 254 stringutil.forcebytestr(inst.filename)))
255 255 else:
256 256 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 257 else: # suspicious IOError
258 258 raise
259 259 except MemoryError:
260 260 ui.error(_("abort: out of memory\n"))
261 261 except SystemExit as inst:
262 262 # Commands shouldn't sys.exit directly, but give a return code.
263 263 # Just in case catch this and and pass exit code to caller.
264 264 return inst.code
265 265
266 266 return -1
267 267
268 268 def checknewlabel(repo, lbl, kind):
269 269 # Do not use the "kind" parameter in ui output.
270 270 # It makes strings difficult to translate.
271 271 if lbl in ['tip', '.', 'null']:
272 272 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 273 for c in (':', '\0', '\n', '\r'):
274 274 if c in lbl:
275 275 raise error.Abort(
276 276 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 277 try:
278 278 int(lbl)
279 279 raise error.Abort(_("cannot use an integer as a name"))
280 280 except ValueError:
281 281 pass
282 282 if lbl.strip() != lbl:
283 283 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284 284
285 285 def checkfilename(f):
286 286 '''Check that the filename f is an acceptable filename for a tracked file'''
287 287 if '\r' in f or '\n' in f:
288 288 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
289 289 % pycompat.bytestr(f))
290 290
291 291 def checkportable(ui, f):
292 292 '''Check if filename f is portable and warn or abort depending on config'''
293 293 checkfilename(f)
294 294 abort, warn = checkportabilityalert(ui)
295 295 if abort or warn:
296 296 msg = util.checkwinfilename(f)
297 297 if msg:
298 298 msg = "%s: %s" % (msg, procutil.shellquote(f))
299 299 if abort:
300 300 raise error.Abort(msg)
301 301 ui.warn(_("warning: %s\n") % msg)
302 302
303 303 def checkportabilityalert(ui):
304 304 '''check if the user's config requests nothing, a warning, or abort for
305 305 non-portable filenames'''
306 306 val = ui.config('ui', 'portablefilenames')
307 307 lval = val.lower()
308 308 bval = stringutil.parsebool(val)
309 309 abort = pycompat.iswindows or lval == 'abort'
310 310 warn = bval or lval == 'warn'
311 311 if bval is None and not (warn or abort or lval == 'ignore'):
312 312 raise error.ConfigError(
313 313 _("ui.portablefilenames value is invalid ('%s')") % val)
314 314 return abort, warn
315 315
316 316 class casecollisionauditor(object):
317 317 def __init__(self, ui, abort, dirstate):
318 318 self._ui = ui
319 319 self._abort = abort
320 320 allfiles = '\0'.join(dirstate._map)
321 321 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
322 322 self._dirstate = dirstate
323 323 # The purpose of _newfiles is so that we don't complain about
324 324 # case collisions if someone were to call this object with the
325 325 # same filename twice.
326 326 self._newfiles = set()
327 327
328 328 def __call__(self, f):
329 329 if f in self._newfiles:
330 330 return
331 331 fl = encoding.lower(f)
332 332 if fl in self._loweredfiles and f not in self._dirstate:
333 333 msg = _('possible case-folding collision for %s') % f
334 334 if self._abort:
335 335 raise error.Abort(msg)
336 336 self._ui.warn(_("warning: %s\n") % msg)
337 337 self._loweredfiles.add(fl)
338 338 self._newfiles.add(f)
339 339
340 340 def filteredhash(repo, maxrev):
341 341 """build hash of filtered revisions in the current repoview.
342 342
343 343 Multiple caches perform up-to-date validation by checking that the
344 344 tiprev and tipnode stored in the cache file match the current repository.
345 345 However, this is not sufficient for validating repoviews because the set
346 346 of revisions in the view may change without the repository tiprev and
347 347 tipnode changing.
348 348
349 349 This function hashes all the revs filtered from the view and returns
350 350 that SHA-1 digest.
351 351 """
352 352 cl = repo.changelog
353 353 if not cl.filteredrevs:
354 354 return None
355 355 key = None
356 356 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
357 357 if revs:
358 358 s = hashlib.sha1()
359 359 for rev in revs:
360 360 s.update('%d;' % rev)
361 361 key = s.digest()
362 362 return key
363 363
364 364 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
365 365 '''yield every hg repository under path, always recursively.
366 366 The recurse flag will only control recursion into repo working dirs'''
367 367 def errhandler(err):
368 368 if err.filename == path:
369 369 raise err
370 370 samestat = getattr(os.path, 'samestat', None)
371 371 if followsym and samestat is not None:
372 372 def adddir(dirlst, dirname):
373 373 dirstat = os.stat(dirname)
374 374 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
375 375 if not match:
376 376 dirlst.append(dirstat)
377 377 return not match
378 378 else:
379 379 followsym = False
380 380
381 381 if (seen_dirs is None) and followsym:
382 382 seen_dirs = []
383 383 adddir(seen_dirs, path)
384 384 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
385 385 dirs.sort()
386 386 if '.hg' in dirs:
387 387 yield root # found a repository
388 388 qroot = os.path.join(root, '.hg', 'patches')
389 389 if os.path.isdir(os.path.join(qroot, '.hg')):
390 390 yield qroot # we have a patch queue repo here
391 391 if recurse:
392 392 # avoid recursing inside the .hg directory
393 393 dirs.remove('.hg')
394 394 else:
395 395 dirs[:] = [] # don't descend further
396 396 elif followsym:
397 397 newdirs = []
398 398 for d in dirs:
399 399 fname = os.path.join(root, d)
400 400 if adddir(seen_dirs, fname):
401 401 if os.path.islink(fname):
402 402 for hgname in walkrepos(fname, True, seen_dirs):
403 403 yield hgname
404 404 else:
405 405 newdirs.append(d)
406 406 dirs[:] = newdirs
407 407
408 408 def binnode(ctx):
409 409 """Return binary node id for a given basectx"""
410 410 node = ctx.node()
411 411 if node is None:
412 412 return wdirid
413 413 return node
414 414
415 415 def intrev(ctx):
416 416 """Return integer for a given basectx that can be used in comparison or
417 417 arithmetic operation"""
418 418 rev = ctx.rev()
419 419 if rev is None:
420 420 return wdirrev
421 421 return rev
422 422
423 423 def formatchangeid(ctx):
424 424 """Format changectx as '{rev}:{node|formatnode}', which is the default
425 425 template provided by logcmdutil.changesettemplater"""
426 426 repo = ctx.repo()
427 427 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
428 428
429 429 def formatrevnode(ui, rev, node):
430 430 """Format given revision and node depending on the current verbosity"""
431 431 if ui.debugflag:
432 432 hexfunc = hex
433 433 else:
434 434 hexfunc = short
435 435 return '%d:%s' % (rev, hexfunc(node))
436 436
437 437 def resolvehexnodeidprefix(repo, prefix):
438 438 if (prefix.startswith('x') and
439 439 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
440 440 prefix = prefix[1:]
441 441 try:
442 442 # Uses unfiltered repo because it's faster when prefix is ambiguous/
443 443 # This matches the shortesthexnodeidprefix() function below.
444 444 node = repo.unfiltered().changelog._partialmatch(prefix)
445 445 except error.AmbiguousPrefixLookupError:
446 446 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
447 447 if revset:
448 448 # Clear config to avoid infinite recursion
449 449 configoverrides = {('experimental',
450 450 'revisions.disambiguatewithin'): None}
451 451 with repo.ui.configoverride(configoverrides):
452 452 revs = repo.anyrevs([revset], user=True)
453 453 matches = []
454 454 for rev in revs:
455 455 node = repo.changelog.node(rev)
456 456 if hex(node).startswith(prefix):
457 457 matches.append(node)
458 458 if len(matches) == 1:
459 459 return matches[0]
460 460 raise
461 461 if node is None:
462 462 return
463 463 repo.changelog.rev(node) # make sure node isn't filtered
464 464 return node
465 465
466 466 def mayberevnum(repo, prefix):
467 467 """Checks if the given prefix may be mistaken for a revision number"""
468 468 try:
469 469 i = int(prefix)
470 470 # if we are a pure int, then starting with zero will not be
471 471 # confused as a rev; or, obviously, if the int is larger
472 472 # than the value of the tip rev. We still need to disambiguate if
473 473 # prefix == '0', since that *is* a valid revnum.
474 474 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
475 475 return False
476 476 return True
477 477 except ValueError:
478 478 return False
479 479
480 480 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
481 481 """Find the shortest unambiguous prefix that matches hexnode.
482 482
483 483 If "cache" is not None, it must be a dictionary that can be used for
484 484 caching between calls to this method.
485 485 """
486 486 # _partialmatch() of filtered changelog could take O(len(repo)) time,
487 487 # which would be unacceptably slow. so we look for hash collision in
488 488 # unfiltered space, which means some hashes may be slightly longer.
489 489
490 490 minlength=max(minlength, 1)
491 491
492 492 def disambiguate(prefix):
493 493 """Disambiguate against revnums."""
494 494 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
495 495 if mayberevnum(repo, prefix):
496 496 return 'x' + prefix
497 497 else:
498 498 return prefix
499 499
500 500 hexnode = hex(node)
501 501 for length in range(len(prefix), len(hexnode) + 1):
502 502 prefix = hexnode[:length]
503 503 if not mayberevnum(repo, prefix):
504 504 return prefix
505 505
506 506 cl = repo.unfiltered().changelog
507 507 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
508 508 if revset:
509 509 revs = None
510 510 if cache is not None:
511 511 revs = cache.get('disambiguationrevset')
512 512 if revs is None:
513 513 revs = repo.anyrevs([revset], user=True)
514 514 if cache is not None:
515 515 cache['disambiguationrevset'] = revs
516 516 if cl.rev(node) in revs:
517 517 hexnode = hex(node)
518 518 nodetree = None
519 519 if cache is not None:
520 520 nodetree = cache.get('disambiguationnodetree')
521 521 if not nodetree:
522 522 try:
523 523 nodetree = parsers.nodetree(cl.index, len(revs))
524 524 except AttributeError:
525 525 # no native nodetree
526 526 pass
527 527 else:
528 528 for r in revs:
529 529 nodetree.insert(r)
530 530 if cache is not None:
531 531 cache['disambiguationnodetree'] = nodetree
532 532 if nodetree is not None:
533 533 length = max(nodetree.shortest(node), minlength)
534 534 prefix = hexnode[:length]
535 535 return disambiguate(prefix)
536 536 for length in range(minlength, len(hexnode) + 1):
537 537 matches = []
538 538 prefix = hexnode[:length]
539 539 for rev in revs:
540 540 otherhexnode = repo[rev].hex()
541 541 if prefix == otherhexnode[:length]:
542 542 matches.append(otherhexnode)
543 543 if len(matches) == 1:
544 544 return disambiguate(prefix)
545 545
546 546 try:
547 547 return disambiguate(cl.shortest(node, minlength))
548 548 except error.LookupError:
549 549 raise error.RepoLookupError()
550 550
551 551 def isrevsymbol(repo, symbol):
552 552 """Checks if a symbol exists in the repo.
553 553
554 554 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
555 555 symbol is an ambiguous nodeid prefix.
556 556 """
557 557 try:
558 558 revsymbol(repo, symbol)
559 559 return True
560 560 except error.RepoLookupError:
561 561 return False
562 562
563 563 def revsymbol(repo, symbol):
564 564 """Returns a context given a single revision symbol (as string).
565 565
566 566 This is similar to revsingle(), but accepts only a single revision symbol,
567 567 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
568 568 not "max(public())".
569 569 """
570 570 if not isinstance(symbol, bytes):
571 571 msg = ("symbol (%s of type %s) was not a string, did you mean "
572 572 "repo[symbol]?" % (symbol, type(symbol)))
573 573 raise error.ProgrammingError(msg)
574 574 try:
575 575 if symbol in ('.', 'tip', 'null'):
576 576 return repo[symbol]
577 577
578 578 try:
579 579 r = int(symbol)
580 580 if '%d' % r != symbol:
581 581 raise ValueError
582 582 l = len(repo.changelog)
583 583 if r < 0:
584 584 r += l
585 585 if r < 0 or r >= l and r != wdirrev:
586 586 raise ValueError
587 587 return repo[r]
588 588 except error.FilteredIndexError:
589 589 raise
590 590 except (ValueError, OverflowError, IndexError):
591 591 pass
592 592
593 593 if len(symbol) == 40:
594 594 try:
595 595 node = bin(symbol)
596 596 rev = repo.changelog.rev(node)
597 597 return repo[rev]
598 598 except error.FilteredLookupError:
599 599 raise
600 600 except (TypeError, LookupError):
601 601 pass
602 602
603 603 # look up bookmarks through the name interface
604 604 try:
605 605 node = repo.names.singlenode(repo, symbol)
606 606 rev = repo.changelog.rev(node)
607 607 return repo[rev]
608 608 except KeyError:
609 609 pass
610 610
611 611 node = resolvehexnodeidprefix(repo, symbol)
612 612 if node is not None:
613 613 rev = repo.changelog.rev(node)
614 614 return repo[rev]
615 615
616 616 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
617 617
618 618 except error.WdirUnsupported:
619 619 return repo[None]
620 620 except (error.FilteredIndexError, error.FilteredLookupError,
621 621 error.FilteredRepoLookupError):
622 622 raise _filterederror(repo, symbol)
623 623
624 624 def _filterederror(repo, changeid):
625 625 """build an exception to be raised about a filtered changeid
626 626
627 627 This is extracted in a function to help extensions (eg: evolve) to
628 628 experiment with various message variants."""
629 629 if repo.filtername.startswith('visible'):
630 630
631 631 # Check if the changeset is obsolete
632 632 unfilteredrepo = repo.unfiltered()
633 633 ctx = revsymbol(unfilteredrepo, changeid)
634 634
635 635 # If the changeset is obsolete, enrich the message with the reason
636 636 # that made this changeset not visible
637 637 if ctx.obsolete():
638 638 msg = obsutil._getfilteredreason(repo, changeid, ctx)
639 639 else:
640 640 msg = _("hidden revision '%s'") % changeid
641 641
642 642 hint = _('use --hidden to access hidden revisions')
643 643
644 644 return error.FilteredRepoLookupError(msg, hint=hint)
645 645 msg = _("filtered revision '%s' (not in '%s' subset)")
646 646 msg %= (changeid, repo.filtername)
647 647 return error.FilteredRepoLookupError(msg)
648 648
649 649 def revsingle(repo, revspec, default='.', localalias=None):
650 650 if not revspec and revspec != 0:
651 651 return repo[default]
652 652
653 653 l = revrange(repo, [revspec], localalias=localalias)
654 654 if not l:
655 655 raise error.Abort(_('empty revision set'))
656 656 return repo[l.last()]
657 657
658 658 def _pairspec(revspec):
659 659 tree = revsetlang.parse(revspec)
660 660 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
661 661
662 662 def revpair(repo, revs):
663 663 if not revs:
664 664 return repo['.'], repo[None]
665 665
666 666 l = revrange(repo, revs)
667 667
668 668 if not l:
669 669 raise error.Abort(_('empty revision range'))
670 670
671 671 first = l.first()
672 672 second = l.last()
673 673
674 674 if (first == second and len(revs) >= 2
675 675 and not all(revrange(repo, [r]) for r in revs)):
676 676 raise error.Abort(_('empty revision on one side of range'))
677 677
678 678 # if top-level is range expression, the result must always be a pair
679 679 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
680 680 return repo[first], repo[None]
681 681
682 682 return repo[first], repo[second]
683 683
684 684 def revrange(repo, specs, localalias=None):
685 685 """Execute 1 to many revsets and return the union.
686 686
687 687 This is the preferred mechanism for executing revsets using user-specified
688 688 config options, such as revset aliases.
689 689
690 690 The revsets specified by ``specs`` will be executed via a chained ``OR``
691 691 expression. If ``specs`` is empty, an empty result is returned.
692 692
693 693 ``specs`` can contain integers, in which case they are assumed to be
694 694 revision numbers.
695 695
696 696 It is assumed the revsets are already formatted. If you have arguments
697 697 that need to be expanded in the revset, call ``revsetlang.formatspec()``
698 698 and pass the result as an element of ``specs``.
699 699
700 700 Specifying a single revset is allowed.
701 701
702 702 Returns a ``revset.abstractsmartset`` which is a list-like interface over
703 703 integer revisions.
704 704 """
705 705 allspecs = []
706 706 for spec in specs:
707 707 if isinstance(spec, int):
708 708 spec = revsetlang.formatspec('%d', spec)
709 709 allspecs.append(spec)
710 710 return repo.anyrevs(allspecs, user=True, localalias=localalias)
711 711
712 712 def meaningfulparents(repo, ctx):
713 713 """Return list of meaningful (or all if debug) parentrevs for rev.
714 714
715 715 For merges (two non-nullrev revisions) both parents are meaningful.
716 716 Otherwise the first parent revision is considered meaningful if it
717 717 is not the preceding revision.
718 718 """
719 719 parents = ctx.parents()
720 720 if len(parents) > 1:
721 721 return parents
722 722 if repo.ui.debugflag:
723 723 return [parents[0], repo[nullrev]]
724 724 if parents[0].rev() >= intrev(ctx) - 1:
725 725 return []
726 726 return parents
727 727
728 728 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
729 729 """Return a function that produced paths for presenting to the user.
730 730
731 731 The returned function takes a repo-relative path and produces a path
732 732 that can be presented in the UI.
733 733
734 734 Depending on the value of ui.relative-paths, either a repo-relative or
735 735 cwd-relative path will be produced.
736 736
737 737 legacyrelativevalue is the value to use if ui.relative-paths=legacy
738 738
739 739 If forcerelativevalue is not None, then that value will be used regardless
740 740 of what ui.relative-paths is set to.
741 741 """
742 742 if forcerelativevalue is not None:
743 743 relative = forcerelativevalue
744 744 else:
745 745 config = repo.ui.config('ui', 'relative-paths')
746 746 if config == 'legacy':
747 747 relative = legacyrelativevalue
748 748 else:
749 749 relative = stringutil.parsebool(config)
750 750 if relative is None:
751 751 raise error.ConfigError(
752 752 _("ui.relative-paths is not a boolean ('%s')") % config)
753 753
754 754 if relative:
755 755 cwd = repo.getcwd()
756 756 pathto = repo.pathto
757 757 return lambda f: pathto(f, cwd)
758 758 else:
759 759 return lambda f: f
760 760
761 761 def expandpats(pats):
762 762 '''Expand bare globs when running on windows.
763 763 On posix we assume it already has already been done by sh.'''
764 764 if not util.expandglobs:
765 765 return list(pats)
766 766 ret = []
767 767 for kindpat in pats:
768 768 kind, pat = matchmod._patsplit(kindpat, None)
769 769 if kind is None:
770 770 try:
771 771 globbed = glob.glob(pat)
772 772 except re.error:
773 773 globbed = [pat]
774 774 if globbed:
775 775 ret.extend(globbed)
776 776 continue
777 777 ret.append(kindpat)
778 778 return ret
779 779
780 780 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
781 781 badfn=None):
782 782 '''Return a matcher and the patterns that were used.
783 783 The matcher will warn about bad matches, unless an alternate badfn callback
784 784 is provided.'''
785 785 if pats == ("",):
786 786 pats = []
787 787 if opts is None:
788 788 opts = {}
789 789 if not globbed and default == 'relpath':
790 790 pats = expandpats(pats or [])
791 791
792 792 def bad(f, msg):
793 793 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
794 794
795 795 if badfn is None:
796 796 badfn = bad
797 797
798 798 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
799 799 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
800 800
801 801 if m.always():
802 802 pats = []
803 803 return m, pats
804 804
805 805 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
806 806 badfn=None):
807 807 '''Return a matcher that will warn about bad matches.'''
808 808 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
809 809
810 810 def matchall(repo):
811 811 '''Return a matcher that will efficiently match everything.'''
812 812 return matchmod.always(repo.root, repo.getcwd())
813 813
814 814 def matchfiles(repo, files, badfn=None):
815 815 '''Return a matcher that will efficiently match exactly these files.'''
816 816 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
817 817
818 818 def parsefollowlinespattern(repo, rev, pat, msg):
819 819 """Return a file name from `pat` pattern suitable for usage in followlines
820 820 logic.
821 821 """
822 822 if not matchmod.patkind(pat):
823 823 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
824 824 else:
825 825 ctx = repo[rev]
826 826 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
827 827 files = [f for f in ctx if m(f)]
828 828 if len(files) != 1:
829 829 raise error.ParseError(msg)
830 830 return files[0]
831 831
832 832 def getorigvfs(ui, repo):
833 833 """return a vfs suitable to save 'orig' file
834 834
835 835 return None if no special directory is configured"""
836 836 origbackuppath = ui.config('ui', 'origbackuppath')
837 837 if not origbackuppath:
838 838 return None
839 839 return vfs.vfs(repo.wvfs.join(origbackuppath))
840 840
841 841 def backuppath(ui, repo, filepath):
842 842 '''customize where working copy backup files (.orig files) are created
843 843
844 844 Fetch user defined path from config file: [ui] origbackuppath = <path>
845 845 Fall back to default (filepath with .orig suffix) if not specified
846 846
847 847 filepath is repo-relative
848 848
849 849 Returns an absolute path
850 850 '''
851 851 origvfs = getorigvfs(ui, repo)
852 852 if origvfs is None:
853 853 return repo.wjoin(filepath + ".orig")
854 854
855 855 origbackupdir = origvfs.dirname(filepath)
856 856 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
857 857 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
858 858
859 859 # Remove any files that conflict with the backup file's path
860 860 for f in reversed(list(util.finddirs(filepath))):
861 861 if origvfs.isfileorlink(f):
862 862 ui.note(_('removing conflicting file: %s\n')
863 863 % origvfs.join(f))
864 864 origvfs.unlink(f)
865 865 break
866 866
867 867 origvfs.makedirs(origbackupdir)
868 868
869 869 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
870 870 ui.note(_('removing conflicting directory: %s\n')
871 871 % origvfs.join(filepath))
872 872 origvfs.rmtree(filepath, forcibly=True)
873 873
874 874 return origvfs.join(filepath)
875 875
876 876 class _containsnode(object):
877 877 """proxy __contains__(node) to container.__contains__ which accepts revs"""
878 878
879 879 def __init__(self, repo, revcontainer):
880 880 self._torev = repo.changelog.rev
881 881 self._revcontains = revcontainer.__contains__
882 882
883 883 def __contains__(self, node):
884 884 return self._revcontains(self._torev(node))
885 885
886 886 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
887 887 fixphase=False, targetphase=None, backup=True):
888 888 """do common cleanups when old nodes are replaced by new nodes
889 889
890 890 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
891 891 (we might also want to move working directory parent in the future)
892 892
893 893 By default, bookmark moves are calculated automatically from 'replacements',
894 894 but 'moves' can be used to override that. Also, 'moves' may include
895 895 additional bookmark moves that should not have associated obsmarkers.
896 896
897 897 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
898 898 have replacements. operation is a string, like "rebase".
899 899
900 900 metadata is dictionary containing metadata to be stored in obsmarker if
901 901 obsolescence is enabled.
902 902 """
903 903 assert fixphase or targetphase is None
904 904 if not replacements and not moves:
905 905 return
906 906
907 907 # translate mapping's other forms
908 908 if not util.safehasattr(replacements, 'items'):
909 909 replacements = {(n,): () for n in replacements}
910 910 else:
911 911 # upgrading non tuple "source" to tuple ones for BC
912 912 repls = {}
913 913 for key, value in replacements.items():
914 914 if not isinstance(key, tuple):
915 915 key = (key,)
916 916 repls[key] = value
917 917 replacements = repls
918 918
919 919 # Unfiltered repo is needed since nodes in replacements might be hidden.
920 920 unfi = repo.unfiltered()
921 921
922 922 # Calculate bookmark movements
923 923 if moves is None:
924 924 moves = {}
925 925 for oldnodes, newnodes in replacements.items():
926 926 for oldnode in oldnodes:
927 927 if oldnode in moves:
928 928 continue
929 929 if len(newnodes) > 1:
930 930 # usually a split, take the one with biggest rev number
931 931 newnode = next(unfi.set('max(%ln)', newnodes)).node()
932 932 elif len(newnodes) == 0:
933 933 # move bookmark backwards
934 934 allreplaced = []
935 935 for rep in replacements:
936 936 allreplaced.extend(rep)
937 937 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
938 938 allreplaced))
939 939 if roots:
940 940 newnode = roots[0].node()
941 941 else:
942 942 newnode = nullid
943 943 else:
944 944 newnode = newnodes[0]
945 945 moves[oldnode] = newnode
946 946
947 947 allnewnodes = [n for ns in replacements.values() for n in ns]
948 948 toretract = {}
949 949 toadvance = {}
950 950 if fixphase:
951 951 precursors = {}
952 952 for oldnodes, newnodes in replacements.items():
953 953 for oldnode in oldnodes:
954 954 for newnode in newnodes:
955 955 precursors.setdefault(newnode, []).append(oldnode)
956 956
957 957 allnewnodes.sort(key=lambda n: unfi[n].rev())
958 958 newphases = {}
959 959 def phase(ctx):
960 960 return newphases.get(ctx.node(), ctx.phase())
961 961 for newnode in allnewnodes:
962 962 ctx = unfi[newnode]
963 963 parentphase = max(phase(p) for p in ctx.parents())
964 964 if targetphase is None:
965 965 oldphase = max(unfi[oldnode].phase()
966 966 for oldnode in precursors[newnode])
967 967 newphase = max(oldphase, parentphase)
968 968 else:
969 969 newphase = max(targetphase, parentphase)
970 970 newphases[newnode] = newphase
971 971 if newphase > ctx.phase():
972 972 toretract.setdefault(newphase, []).append(newnode)
973 973 elif newphase < ctx.phase():
974 974 toadvance.setdefault(newphase, []).append(newnode)
975 975
976 976 with repo.transaction('cleanup') as tr:
977 977 # Move bookmarks
978 978 bmarks = repo._bookmarks
979 979 bmarkchanges = []
980 980 for oldnode, newnode in moves.items():
981 981 oldbmarks = repo.nodebookmarks(oldnode)
982 982 if not oldbmarks:
983 983 continue
984 984 from . import bookmarks # avoid import cycle
985 985 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
986 986 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
987 987 hex(oldnode), hex(newnode)))
988 988 # Delete divergent bookmarks being parents of related newnodes
989 989 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
990 990 allnewnodes, newnode, oldnode)
991 991 deletenodes = _containsnode(repo, deleterevs)
992 992 for name in oldbmarks:
993 993 bmarkchanges.append((name, newnode))
994 994 for b in bookmarks.divergent2delete(repo, deletenodes, name):
995 995 bmarkchanges.append((b, None))
996 996
997 997 if bmarkchanges:
998 998 bmarks.applychanges(repo, tr, bmarkchanges)
999 999
1000 1000 for phase, nodes in toretract.items():
1001 1001 phases.retractboundary(repo, tr, phase, nodes)
1002 1002 for phase, nodes in toadvance.items():
1003 1003 phases.advanceboundary(repo, tr, phase, nodes)
1004 1004
1005 1005 # Obsolete or strip nodes
1006 1006 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1007 1007 # If a node is already obsoleted, and we want to obsolete it
1008 1008 # without a successor, skip that obssolete request since it's
1009 1009 # unnecessary. That's the "if s or not isobs(n)" check below.
1010 1010 # Also sort the node in topology order, that might be useful for
1011 1011 # some obsstore logic.
1012 1012 # NOTE: the sorting might belong to createmarkers.
1013 1013 torev = unfi.changelog.rev
1014 1014 sortfunc = lambda ns: torev(ns[0][0])
1015 1015 rels = []
1016 1016 for ns, s in sorted(replacements.items(), key=sortfunc):
1017 1017 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1018 1018 rels.append(rel)
1019 1019 if rels:
1020 1020 obsolete.createmarkers(repo, rels, operation=operation,
1021 1021 metadata=metadata)
1022 1022 else:
1023 1023 from . import repair # avoid import cycle
1024 1024 tostrip = list(n for ns in replacements for n in ns)
1025 1025 if tostrip:
1026 1026 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1027 1027 backup=backup)
1028 1028
1029 1029 def addremove(repo, matcher, prefix, opts=None):
1030 1030 if opts is None:
1031 1031 opts = {}
1032 1032 m = matcher
1033 1033 dry_run = opts.get('dry_run')
1034 1034 try:
1035 1035 similarity = float(opts.get('similarity') or 0)
1036 1036 except ValueError:
1037 1037 raise error.Abort(_('similarity must be a number'))
1038 1038 if similarity < 0 or similarity > 100:
1039 1039 raise error.Abort(_('similarity must be between 0 and 100'))
1040 1040 similarity /= 100.0
1041 1041
1042 1042 ret = 0
1043 1043 join = lambda f: os.path.join(prefix, f)
1044 1044
1045 1045 wctx = repo[None]
1046 1046 for subpath in sorted(wctx.substate):
1047 1047 submatch = matchmod.subdirmatcher(subpath, m)
1048 1048 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1049 1049 sub = wctx.sub(subpath)
1050 subprefix = repo.wvfs.reljoin(prefix, subpath)
1050 1051 try:
1051 if sub.addremove(submatch, prefix, opts):
1052 if sub.addremove(submatch, subprefix, opts):
1052 1053 ret = 1
1053 1054 except error.LookupError:
1054 1055 repo.ui.status(_("skipping missing subrepository: %s\n")
1055 1056 % join(subpath))
1056 1057
1057 1058 rejected = []
1058 1059 def badfn(f, msg):
1059 1060 if f in m.files():
1060 1061 m.bad(f, msg)
1061 1062 rejected.append(f)
1062 1063
1063 1064 badmatch = matchmod.badmatch(m, badfn)
1064 1065 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1065 1066 badmatch)
1066 1067
1067 1068 unknownset = set(unknown + forgotten)
1068 1069 toprint = unknownset.copy()
1069 1070 toprint.update(deleted)
1070 1071 for abs in sorted(toprint):
1071 1072 if repo.ui.verbose or not m.exact(abs):
1072 1073 if abs in unknownset:
1073 1074 status = _('adding %s\n') % m.uipath(abs)
1074 1075 label = 'ui.addremove.added'
1075 1076 else:
1076 1077 status = _('removing %s\n') % m.uipath(abs)
1077 1078 label = 'ui.addremove.removed'
1078 1079 repo.ui.status(status, label=label)
1079 1080
1080 1081 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1081 1082 similarity)
1082 1083
1083 1084 if not dry_run:
1084 1085 _markchanges(repo, unknown + forgotten, deleted, renames)
1085 1086
1086 1087 for f in rejected:
1087 1088 if f in m.files():
1088 1089 return 1
1089 1090 return ret
1090 1091
1091 1092 def marktouched(repo, files, similarity=0.0):
1092 1093 '''Assert that files have somehow been operated upon. files are relative to
1093 1094 the repo root.'''
1094 1095 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1095 1096 rejected = []
1096 1097
1097 1098 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1098 1099
1099 1100 if repo.ui.verbose:
1100 1101 unknownset = set(unknown + forgotten)
1101 1102 toprint = unknownset.copy()
1102 1103 toprint.update(deleted)
1103 1104 for abs in sorted(toprint):
1104 1105 if abs in unknownset:
1105 1106 status = _('adding %s\n') % abs
1106 1107 else:
1107 1108 status = _('removing %s\n') % abs
1108 1109 repo.ui.status(status)
1109 1110
1110 1111 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1111 1112 similarity)
1112 1113
1113 1114 _markchanges(repo, unknown + forgotten, deleted, renames)
1114 1115
1115 1116 for f in rejected:
1116 1117 if f in m.files():
1117 1118 return 1
1118 1119 return 0
1119 1120
1120 1121 def _interestingfiles(repo, matcher):
1121 1122 '''Walk dirstate with matcher, looking for files that addremove would care
1122 1123 about.
1123 1124
1124 1125 This is different from dirstate.status because it doesn't care about
1125 1126 whether files are modified or clean.'''
1126 1127 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1127 1128 audit_path = pathutil.pathauditor(repo.root, cached=True)
1128 1129
1129 1130 ctx = repo[None]
1130 1131 dirstate = repo.dirstate
1131 1132 matcher = repo.narrowmatch(matcher, includeexact=True)
1132 1133 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1133 1134 unknown=True, ignored=False, full=False)
1134 1135 for abs, st in walkresults.iteritems():
1135 1136 dstate = dirstate[abs]
1136 1137 if dstate == '?' and audit_path.check(abs):
1137 1138 unknown.append(abs)
1138 1139 elif dstate != 'r' and not st:
1139 1140 deleted.append(abs)
1140 1141 elif dstate == 'r' and st:
1141 1142 forgotten.append(abs)
1142 1143 # for finding renames
1143 1144 elif dstate == 'r' and not st:
1144 1145 removed.append(abs)
1145 1146 elif dstate == 'a':
1146 1147 added.append(abs)
1147 1148
1148 1149 return added, unknown, deleted, removed, forgotten
1149 1150
1150 1151 def _findrenames(repo, matcher, added, removed, similarity):
1151 1152 '''Find renames from removed files to added ones.'''
1152 1153 renames = {}
1153 1154 if similarity > 0:
1154 1155 for old, new, score in similar.findrenames(repo, added, removed,
1155 1156 similarity):
1156 1157 if (repo.ui.verbose or not matcher.exact(old)
1157 1158 or not matcher.exact(new)):
1158 1159 repo.ui.status(_('recording removal of %s as rename to %s '
1159 1160 '(%d%% similar)\n') %
1160 1161 (matcher.rel(old), matcher.rel(new),
1161 1162 score * 100))
1162 1163 renames[new] = old
1163 1164 return renames
1164 1165
1165 1166 def _markchanges(repo, unknown, deleted, renames):
1166 1167 '''Marks the files in unknown as added, the files in deleted as removed,
1167 1168 and the files in renames as copied.'''
1168 1169 wctx = repo[None]
1169 1170 with repo.wlock():
1170 1171 wctx.forget(deleted)
1171 1172 wctx.add(unknown)
1172 1173 for new, old in renames.iteritems():
1173 1174 wctx.copy(old, new)
1174 1175
1175 1176 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1176 1177 """Update the dirstate to reflect the intent of copying src to dst. For
1177 1178 different reasons it might not end with dst being marked as copied from src.
1178 1179 """
1179 1180 origsrc = repo.dirstate.copied(src) or src
1180 1181 if dst == origsrc: # copying back a copy?
1181 1182 if repo.dirstate[dst] not in 'mn' and not dryrun:
1182 1183 repo.dirstate.normallookup(dst)
1183 1184 else:
1184 1185 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1185 1186 if not ui.quiet:
1186 1187 ui.warn(_("%s has not been committed yet, so no copy "
1187 1188 "data will be stored for %s.\n")
1188 1189 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1189 1190 if repo.dirstate[dst] in '?r' and not dryrun:
1190 1191 wctx.add([dst])
1191 1192 elif not dryrun:
1192 1193 wctx.copy(origsrc, dst)
1193 1194
1194 1195 def writerequires(opener, requirements):
1195 1196 with opener('requires', 'w', atomictemp=True) as fp:
1196 1197 for r in sorted(requirements):
1197 1198 fp.write("%s\n" % r)
1198 1199
1199 1200 class filecachesubentry(object):
1200 1201 def __init__(self, path, stat):
1201 1202 self.path = path
1202 1203 self.cachestat = None
1203 1204 self._cacheable = None
1204 1205
1205 1206 if stat:
1206 1207 self.cachestat = filecachesubentry.stat(self.path)
1207 1208
1208 1209 if self.cachestat:
1209 1210 self._cacheable = self.cachestat.cacheable()
1210 1211 else:
1211 1212 # None means we don't know yet
1212 1213 self._cacheable = None
1213 1214
1214 1215 def refresh(self):
1215 1216 if self.cacheable():
1216 1217 self.cachestat = filecachesubentry.stat(self.path)
1217 1218
1218 1219 def cacheable(self):
1219 1220 if self._cacheable is not None:
1220 1221 return self._cacheable
1221 1222
1222 1223 # we don't know yet, assume it is for now
1223 1224 return True
1224 1225
1225 1226 def changed(self):
1226 1227 # no point in going further if we can't cache it
1227 1228 if not self.cacheable():
1228 1229 return True
1229 1230
1230 1231 newstat = filecachesubentry.stat(self.path)
1231 1232
1232 1233 # we may not know if it's cacheable yet, check again now
1233 1234 if newstat and self._cacheable is None:
1234 1235 self._cacheable = newstat.cacheable()
1235 1236
1236 1237 # check again
1237 1238 if not self._cacheable:
1238 1239 return True
1239 1240
1240 1241 if self.cachestat != newstat:
1241 1242 self.cachestat = newstat
1242 1243 return True
1243 1244 else:
1244 1245 return False
1245 1246
1246 1247 @staticmethod
1247 1248 def stat(path):
1248 1249 try:
1249 1250 return util.cachestat(path)
1250 1251 except OSError as e:
1251 1252 if e.errno != errno.ENOENT:
1252 1253 raise
1253 1254
1254 1255 class filecacheentry(object):
1255 1256 def __init__(self, paths, stat=True):
1256 1257 self._entries = []
1257 1258 for path in paths:
1258 1259 self._entries.append(filecachesubentry(path, stat))
1259 1260
1260 1261 def changed(self):
1261 1262 '''true if any entry has changed'''
1262 1263 for entry in self._entries:
1263 1264 if entry.changed():
1264 1265 return True
1265 1266 return False
1266 1267
1267 1268 def refresh(self):
1268 1269 for entry in self._entries:
1269 1270 entry.refresh()
1270 1271
1271 1272 class filecache(object):
1272 1273 """A property like decorator that tracks files under .hg/ for updates.
1273 1274
1274 1275 On first access, the files defined as arguments are stat()ed and the
1275 1276 results cached. The decorated function is called. The results are stashed
1276 1277 away in a ``_filecache`` dict on the object whose method is decorated.
1277 1278
1278 1279 On subsequent access, the cached result is used as it is set to the
1279 1280 instance dictionary.
1280 1281
1281 1282 On external property set/delete operations, the caller must update the
1282 1283 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1283 1284 instead of directly setting <attr>.
1284 1285
1285 1286 When using the property API, the cached data is always used if available.
1286 1287 No stat() is performed to check if the file has changed.
1287 1288
1288 1289 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1289 1290 can populate an entry before the property's getter is called. In this case,
1290 1291 entries in ``_filecache`` will be used during property operations,
1291 1292 if available. If the underlying file changes, it is up to external callers
1292 1293 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1293 1294 method result as well as possibly calling ``del obj._filecache[attr]`` to
1294 1295 remove the ``filecacheentry``.
1295 1296 """
1296 1297
1297 1298 def __init__(self, *paths):
1298 1299 self.paths = paths
1299 1300
1300 1301 def join(self, obj, fname):
1301 1302 """Used to compute the runtime path of a cached file.
1302 1303
1303 1304 Users should subclass filecache and provide their own version of this
1304 1305 function to call the appropriate join function on 'obj' (an instance
1305 1306 of the class that its member function was decorated).
1306 1307 """
1307 1308 raise NotImplementedError
1308 1309
1309 1310 def __call__(self, func):
1310 1311 self.func = func
1311 1312 self.sname = func.__name__
1312 1313 self.name = pycompat.sysbytes(self.sname)
1313 1314 return self
1314 1315
1315 1316 def __get__(self, obj, type=None):
1316 1317 # if accessed on the class, return the descriptor itself.
1317 1318 if obj is None:
1318 1319 return self
1319 1320
1320 1321 assert self.sname not in obj.__dict__
1321 1322
1322 1323 entry = obj._filecache.get(self.name)
1323 1324
1324 1325 if entry:
1325 1326 if entry.changed():
1326 1327 entry.obj = self.func(obj)
1327 1328 else:
1328 1329 paths = [self.join(obj, path) for path in self.paths]
1329 1330
1330 1331 # We stat -before- creating the object so our cache doesn't lie if
1331 1332 # a writer modified between the time we read and stat
1332 1333 entry = filecacheentry(paths, True)
1333 1334 entry.obj = self.func(obj)
1334 1335
1335 1336 obj._filecache[self.name] = entry
1336 1337
1337 1338 obj.__dict__[self.sname] = entry.obj
1338 1339 return entry.obj
1339 1340
1340 1341 # don't implement __set__(), which would make __dict__ lookup as slow as
1341 1342 # function call.
1342 1343
1343 1344 def set(self, obj, value):
1344 1345 if self.name not in obj._filecache:
1345 1346 # we add an entry for the missing value because X in __dict__
1346 1347 # implies X in _filecache
1347 1348 paths = [self.join(obj, path) for path in self.paths]
1348 1349 ce = filecacheentry(paths, False)
1349 1350 obj._filecache[self.name] = ce
1350 1351 else:
1351 1352 ce = obj._filecache[self.name]
1352 1353
1353 1354 ce.obj = value # update cached copy
1354 1355 obj.__dict__[self.sname] = value # update copy returned by obj.x
1355 1356
1356 1357 def extdatasource(repo, source):
1357 1358 """Gather a map of rev -> value dict from the specified source
1358 1359
1359 1360 A source spec is treated as a URL, with a special case shell: type
1360 1361 for parsing the output from a shell command.
1361 1362
1362 1363 The data is parsed as a series of newline-separated records where
1363 1364 each record is a revision specifier optionally followed by a space
1364 1365 and a freeform string value. If the revision is known locally, it
1365 1366 is converted to a rev, otherwise the record is skipped.
1366 1367
1367 1368 Note that both key and value are treated as UTF-8 and converted to
1368 1369 the local encoding. This allows uniformity between local and
1369 1370 remote data sources.
1370 1371 """
1371 1372
1372 1373 spec = repo.ui.config("extdata", source)
1373 1374 if not spec:
1374 1375 raise error.Abort(_("unknown extdata source '%s'") % source)
1375 1376
1376 1377 data = {}
1377 1378 src = proc = None
1378 1379 try:
1379 1380 if spec.startswith("shell:"):
1380 1381 # external commands should be run relative to the repo root
1381 1382 cmd = spec[6:]
1382 1383 proc = subprocess.Popen(procutil.tonativestr(cmd),
1383 1384 shell=True, bufsize=-1,
1384 1385 close_fds=procutil.closefds,
1385 1386 stdout=subprocess.PIPE,
1386 1387 cwd=procutil.tonativestr(repo.root))
1387 1388 src = proc.stdout
1388 1389 else:
1389 1390 # treat as a URL or file
1390 1391 src = url.open(repo.ui, spec)
1391 1392 for l in src:
1392 1393 if " " in l:
1393 1394 k, v = l.strip().split(" ", 1)
1394 1395 else:
1395 1396 k, v = l.strip(), ""
1396 1397
1397 1398 k = encoding.tolocal(k)
1398 1399 try:
1399 1400 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1400 1401 except (error.LookupError, error.RepoLookupError):
1401 1402 pass # we ignore data for nodes that don't exist locally
1402 1403 finally:
1403 1404 if proc:
1404 1405 proc.communicate()
1405 1406 if src:
1406 1407 src.close()
1407 1408 if proc and proc.returncode != 0:
1408 1409 raise error.Abort(_("extdata command '%s' failed: %s")
1409 1410 % (cmd, procutil.explainexit(proc.returncode)))
1410 1411
1411 1412 return data
1412 1413
1413 1414 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1414 1415 if lock is None:
1415 1416 raise error.LockInheritanceContractViolation(
1416 1417 'lock can only be inherited while held')
1417 1418 if environ is None:
1418 1419 environ = {}
1419 1420 with lock.inherit() as locker:
1420 1421 environ[envvar] = locker
1421 1422 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1422 1423
1423 1424 def wlocksub(repo, cmd, *args, **kwargs):
1424 1425 """run cmd as a subprocess that allows inheriting repo's wlock
1425 1426
1426 1427 This can only be called while the wlock is held. This takes all the
1427 1428 arguments that ui.system does, and returns the exit code of the
1428 1429 subprocess."""
1429 1430 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1430 1431 **kwargs)
1431 1432
1432 1433 class progress(object):
1433 1434 def __init__(self, ui, updatebar, topic, unit="", total=None):
1434 1435 self.ui = ui
1435 1436 self.pos = 0
1436 1437 self.topic = topic
1437 1438 self.unit = unit
1438 1439 self.total = total
1439 1440 self.debug = ui.configbool('progress', 'debug')
1440 1441 self._updatebar = updatebar
1441 1442
1442 1443 def __enter__(self):
1443 1444 return self
1444 1445
1445 1446 def __exit__(self, exc_type, exc_value, exc_tb):
1446 1447 self.complete()
1447 1448
1448 1449 def update(self, pos, item="", total=None):
1449 1450 assert pos is not None
1450 1451 if total:
1451 1452 self.total = total
1452 1453 self.pos = pos
1453 1454 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1454 1455 if self.debug:
1455 1456 self._printdebug(item)
1456 1457
1457 1458 def increment(self, step=1, item="", total=None):
1458 1459 self.update(self.pos + step, item, total)
1459 1460
1460 1461 def complete(self):
1461 1462 self.pos = None
1462 1463 self.unit = ""
1463 1464 self.total = None
1464 1465 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1465 1466
1466 1467 def _printdebug(self, item):
1467 1468 if self.unit:
1468 1469 unit = ' ' + self.unit
1469 1470 if item:
1470 1471 item = ' ' + item
1471 1472
1472 1473 if self.total:
1473 1474 pct = 100.0 * self.pos / self.total
1474 1475 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1475 1476 % (self.topic, item, self.pos, self.total, unit, pct))
1476 1477 else:
1477 1478 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1478 1479
1479 1480 def gdinitconfig(ui):
1480 1481 """helper function to know if a repo should be created as general delta
1481 1482 """
1482 1483 # experimental config: format.generaldelta
1483 1484 return (ui.configbool('format', 'generaldelta')
1484 1485 or ui.configbool('format', 'usegeneraldelta'))
1485 1486
1486 1487 def gddeltaconfig(ui):
1487 1488 """helper function to know if incoming delta should be optimised
1488 1489 """
1489 1490 # experimental config: format.generaldelta
1490 1491 return ui.configbool('format', 'generaldelta')
1491 1492
1492 1493 class simplekeyvaluefile(object):
1493 1494 """A simple file with key=value lines
1494 1495
1495 1496 Keys must be alphanumerics and start with a letter, values must not
1496 1497 contain '\n' characters"""
1497 1498 firstlinekey = '__firstline'
1498 1499
1499 1500 def __init__(self, vfs, path, keys=None):
1500 1501 self.vfs = vfs
1501 1502 self.path = path
1502 1503
1503 1504 def read(self, firstlinenonkeyval=False):
1504 1505 """Read the contents of a simple key-value file
1505 1506
1506 1507 'firstlinenonkeyval' indicates whether the first line of file should
1507 1508 be treated as a key-value pair or reuturned fully under the
1508 1509 __firstline key."""
1509 1510 lines = self.vfs.readlines(self.path)
1510 1511 d = {}
1511 1512 if firstlinenonkeyval:
1512 1513 if not lines:
1513 1514 e = _("empty simplekeyvalue file")
1514 1515 raise error.CorruptedState(e)
1515 1516 # we don't want to include '\n' in the __firstline
1516 1517 d[self.firstlinekey] = lines[0][:-1]
1517 1518 del lines[0]
1518 1519
1519 1520 try:
1520 1521 # the 'if line.strip()' part prevents us from failing on empty
1521 1522 # lines which only contain '\n' therefore are not skipped
1522 1523 # by 'if line'
1523 1524 updatedict = dict(line[:-1].split('=', 1) for line in lines
1524 1525 if line.strip())
1525 1526 if self.firstlinekey in updatedict:
1526 1527 e = _("%r can't be used as a key")
1527 1528 raise error.CorruptedState(e % self.firstlinekey)
1528 1529 d.update(updatedict)
1529 1530 except ValueError as e:
1530 1531 raise error.CorruptedState(str(e))
1531 1532 return d
1532 1533
1533 1534 def write(self, data, firstline=None):
1534 1535 """Write key=>value mapping to a file
1535 1536 data is a dict. Keys must be alphanumerical and start with a letter.
1536 1537 Values must not contain newline characters.
1537 1538
1538 1539 If 'firstline' is not None, it is written to file before
1539 1540 everything else, as it is, not in a key=value form"""
1540 1541 lines = []
1541 1542 if firstline is not None:
1542 1543 lines.append('%s\n' % firstline)
1543 1544
1544 1545 for k, v in data.items():
1545 1546 if k == self.firstlinekey:
1546 1547 e = "key name '%s' is reserved" % self.firstlinekey
1547 1548 raise error.ProgrammingError(e)
1548 1549 if not k[0:1].isalpha():
1549 1550 e = "keys must start with a letter in a key-value file"
1550 1551 raise error.ProgrammingError(e)
1551 1552 if not k.isalnum():
1552 1553 e = "invalid key name in a simple key-value file"
1553 1554 raise error.ProgrammingError(e)
1554 1555 if '\n' in v:
1555 1556 e = "invalid value in a simple key-value file"
1556 1557 raise error.ProgrammingError(e)
1557 1558 lines.append("%s=%s\n" % (k, v))
1558 1559 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1559 1560 fp.write(''.join(lines))
1560 1561
1561 1562 _reportobsoletedsource = [
1562 1563 'debugobsolete',
1563 1564 'pull',
1564 1565 'push',
1565 1566 'serve',
1566 1567 'unbundle',
1567 1568 ]
1568 1569
1569 1570 _reportnewcssource = [
1570 1571 'pull',
1571 1572 'unbundle',
1572 1573 ]
1573 1574
1574 1575 def prefetchfiles(repo, revs, match):
1575 1576 """Invokes the registered file prefetch functions, allowing extensions to
1576 1577 ensure the corresponding files are available locally, before the command
1577 1578 uses them."""
1578 1579 if match:
1579 1580 # The command itself will complain about files that don't exist, so
1580 1581 # don't duplicate the message.
1581 1582 match = matchmod.badmatch(match, lambda fn, msg: None)
1582 1583 else:
1583 1584 match = matchall(repo)
1584 1585
1585 1586 fileprefetchhooks(repo, revs, match)
1586 1587
1587 1588 # a list of (repo, revs, match) prefetch functions
1588 1589 fileprefetchhooks = util.hooks()
1589 1590
1590 1591 # A marker that tells the evolve extension to suppress its own reporting
1591 1592 _reportstroubledchangesets = True
1592 1593
1593 1594 def registersummarycallback(repo, otr, txnname=''):
1594 1595 """register a callback to issue a summary after the transaction is closed
1595 1596 """
1596 1597 def txmatch(sources):
1597 1598 return any(txnname.startswith(source) for source in sources)
1598 1599
1599 1600 categories = []
1600 1601
1601 1602 def reportsummary(func):
1602 1603 """decorator for report callbacks."""
1603 1604 # The repoview life cycle is shorter than the one of the actual
1604 1605 # underlying repository. So the filtered object can die before the
1605 1606 # weakref is used leading to troubles. We keep a reference to the
1606 1607 # unfiltered object and restore the filtering when retrieving the
1607 1608 # repository through the weakref.
1608 1609 filtername = repo.filtername
1609 1610 reporef = weakref.ref(repo.unfiltered())
1610 1611 def wrapped(tr):
1611 1612 repo = reporef()
1612 1613 if filtername:
1613 1614 repo = repo.filtered(filtername)
1614 1615 func(repo, tr)
1615 1616 newcat = '%02i-txnreport' % len(categories)
1616 1617 otr.addpostclose(newcat, wrapped)
1617 1618 categories.append(newcat)
1618 1619 return wrapped
1619 1620
1620 1621 if txmatch(_reportobsoletedsource):
1621 1622 @reportsummary
1622 1623 def reportobsoleted(repo, tr):
1623 1624 obsoleted = obsutil.getobsoleted(repo, tr)
1624 1625 if obsoleted:
1625 1626 repo.ui.status(_('obsoleted %i changesets\n')
1626 1627 % len(obsoleted))
1627 1628
1628 1629 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1629 1630 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1630 1631 instabilitytypes = [
1631 1632 ('orphan', 'orphan'),
1632 1633 ('phase-divergent', 'phasedivergent'),
1633 1634 ('content-divergent', 'contentdivergent'),
1634 1635 ]
1635 1636
1636 1637 def getinstabilitycounts(repo):
1637 1638 filtered = repo.changelog.filteredrevs
1638 1639 counts = {}
1639 1640 for instability, revset in instabilitytypes:
1640 1641 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1641 1642 filtered)
1642 1643 return counts
1643 1644
1644 1645 oldinstabilitycounts = getinstabilitycounts(repo)
1645 1646 @reportsummary
1646 1647 def reportnewinstabilities(repo, tr):
1647 1648 newinstabilitycounts = getinstabilitycounts(repo)
1648 1649 for instability, revset in instabilitytypes:
1649 1650 delta = (newinstabilitycounts[instability] -
1650 1651 oldinstabilitycounts[instability])
1651 1652 msg = getinstabilitymessage(delta, instability)
1652 1653 if msg:
1653 1654 repo.ui.warn(msg)
1654 1655
1655 1656 if txmatch(_reportnewcssource):
1656 1657 @reportsummary
1657 1658 def reportnewcs(repo, tr):
1658 1659 """Report the range of new revisions pulled/unbundled."""
1659 1660 origrepolen = tr.changes.get('origrepolen', len(repo))
1660 1661 unfi = repo.unfiltered()
1661 1662 if origrepolen >= len(unfi):
1662 1663 return
1663 1664
1664 1665 # Compute the bounds of new visible revisions' range.
1665 1666 revs = smartset.spanset(repo, start=origrepolen)
1666 1667 if revs:
1667 1668 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1668 1669
1669 1670 if minrev == maxrev:
1670 1671 revrange = minrev
1671 1672 else:
1672 1673 revrange = '%s:%s' % (minrev, maxrev)
1673 1674 draft = len(repo.revs('%ld and draft()', revs))
1674 1675 secret = len(repo.revs('%ld and secret()', revs))
1675 1676 if not (draft or secret):
1676 1677 msg = _('new changesets %s\n') % revrange
1677 1678 elif draft and secret:
1678 1679 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1679 1680 msg %= (revrange, draft, secret)
1680 1681 elif draft:
1681 1682 msg = _('new changesets %s (%d drafts)\n')
1682 1683 msg %= (revrange, draft)
1683 1684 elif secret:
1684 1685 msg = _('new changesets %s (%d secrets)\n')
1685 1686 msg %= (revrange, secret)
1686 1687 else:
1687 1688 errormsg = 'entered unreachable condition'
1688 1689 raise error.ProgrammingError(errormsg)
1689 1690 repo.ui.status(msg)
1690 1691
1691 1692 # search new changesets directly pulled as obsolete
1692 1693 duplicates = tr.changes.get('revduplicates', ())
1693 1694 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1694 1695 origrepolen, duplicates)
1695 1696 cl = repo.changelog
1696 1697 extinctadded = [r for r in obsadded if r not in cl]
1697 1698 if extinctadded:
1698 1699 # They are not just obsolete, but obsolete and invisible
1699 1700 # we call them "extinct" internally but the terms have not been
1700 1701 # exposed to users.
1701 1702 msg = '(%d other changesets obsolete on arrival)\n'
1702 1703 repo.ui.status(msg % len(extinctadded))
1703 1704
1704 1705 @reportsummary
1705 1706 def reportphasechanges(repo, tr):
1706 1707 """Report statistics of phase changes for changesets pre-existing
1707 1708 pull/unbundle.
1708 1709 """
1709 1710 origrepolen = tr.changes.get('origrepolen', len(repo))
1710 1711 phasetracking = tr.changes.get('phases', {})
1711 1712 if not phasetracking:
1712 1713 return
1713 1714 published = [
1714 1715 rev for rev, (old, new) in phasetracking.iteritems()
1715 1716 if new == phases.public and rev < origrepolen
1716 1717 ]
1717 1718 if not published:
1718 1719 return
1719 1720 repo.ui.status(_('%d local changesets published\n')
1720 1721 % len(published))
1721 1722
1722 1723 def getinstabilitymessage(delta, instability):
1723 1724 """function to return the message to show warning about new instabilities
1724 1725
1725 1726 exists as a separate function so that extension can wrap to show more
1726 1727 information like how to fix instabilities"""
1727 1728 if delta > 0:
1728 1729 return _('%i new %s changesets\n') % (delta, instability)
1729 1730
1730 1731 def nodesummaries(repo, nodes, maxnumnodes=4):
1731 1732 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1732 1733 return ' '.join(short(h) for h in nodes)
1733 1734 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1734 1735 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1735 1736
1736 1737 def enforcesinglehead(repo, tr, desc):
1737 1738 """check that no named branch has multiple heads"""
1738 1739 if desc in ('strip', 'repair'):
1739 1740 # skip the logic during strip
1740 1741 return
1741 1742 visible = repo.filtered('visible')
1742 1743 # possible improvement: we could restrict the check to affected branch
1743 1744 for name, heads in visible.branchmap().iteritems():
1744 1745 if len(heads) > 1:
1745 1746 msg = _('rejecting multiple heads on branch "%s"')
1746 1747 msg %= name
1747 1748 hint = _('%d heads: %s')
1748 1749 hint %= (len(heads), nodesummaries(repo, heads))
1749 1750 raise error.Abort(msg, hint=hint)
1750 1751
1751 1752 def wrapconvertsink(sink):
1752 1753 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1753 1754 before it is used, whether or not the convert extension was formally loaded.
1754 1755 """
1755 1756 return sink
1756 1757
1757 1758 def unhidehashlikerevs(repo, specs, hiddentype):
1758 1759 """parse the user specs and unhide changesets whose hash or revision number
1759 1760 is passed.
1760 1761
1761 1762 hiddentype can be: 1) 'warn': warn while unhiding changesets
1762 1763 2) 'nowarn': don't warn while unhiding changesets
1763 1764
1764 1765 returns a repo object with the required changesets unhidden
1765 1766 """
1766 1767 if not repo.filtername or not repo.ui.configbool('experimental',
1767 1768 'directaccess'):
1768 1769 return repo
1769 1770
1770 1771 if repo.filtername not in ('visible', 'visible-hidden'):
1771 1772 return repo
1772 1773
1773 1774 symbols = set()
1774 1775 for spec in specs:
1775 1776 try:
1776 1777 tree = revsetlang.parse(spec)
1777 1778 except error.ParseError: # will be reported by scmutil.revrange()
1778 1779 continue
1779 1780
1780 1781 symbols.update(revsetlang.gethashlikesymbols(tree))
1781 1782
1782 1783 if not symbols:
1783 1784 return repo
1784 1785
1785 1786 revs = _getrevsfromsymbols(repo, symbols)
1786 1787
1787 1788 if not revs:
1788 1789 return repo
1789 1790
1790 1791 if hiddentype == 'warn':
1791 1792 unfi = repo.unfiltered()
1792 1793 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1793 1794 repo.ui.warn(_("warning: accessing hidden changesets for write "
1794 1795 "operation: %s\n") % revstr)
1795 1796
1796 1797 # we have to use new filtername to separate branch/tags cache until we can
1797 1798 # disbale these cache when revisions are dynamically pinned.
1798 1799 return repo.filtered('visible-hidden', revs)
1799 1800
1800 1801 def _getrevsfromsymbols(repo, symbols):
1801 1802 """parse the list of symbols and returns a set of revision numbers of hidden
1802 1803 changesets present in symbols"""
1803 1804 revs = set()
1804 1805 unfi = repo.unfiltered()
1805 1806 unficl = unfi.changelog
1806 1807 cl = repo.changelog
1807 1808 tiprev = len(unficl)
1808 1809 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1809 1810 for s in symbols:
1810 1811 try:
1811 1812 n = int(s)
1812 1813 if n <= tiprev:
1813 1814 if not allowrevnums:
1814 1815 continue
1815 1816 else:
1816 1817 if n not in cl:
1817 1818 revs.add(n)
1818 1819 continue
1819 1820 except ValueError:
1820 1821 pass
1821 1822
1822 1823 try:
1823 1824 s = resolvehexnodeidprefix(unfi, s)
1824 1825 except (error.LookupError, error.WdirUnsupported):
1825 1826 s = None
1826 1827
1827 1828 if s is not None:
1828 1829 rev = unficl.rev(s)
1829 1830 if rev not in cl:
1830 1831 revs.add(rev)
1831 1832
1832 1833 return revs
1833 1834
1834 1835 def bookmarkrevs(repo, mark):
1835 1836 """
1836 1837 Select revisions reachable by a given bookmark
1837 1838 """
1838 1839 return repo.revs("ancestors(bookmark(%s)) - "
1839 1840 "ancestors(head() and not bookmark(%s)) - "
1840 1841 "ancestors(bookmark() and not bookmark(%s))",
1841 1842 mark, mark, mark)
@@ -1,1844 +1,1843 b''
1 1 # subrepo.py - sub-repository classes and factory
2 2 #
3 3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import copy
11 11 import errno
12 12 import hashlib
13 13 import os
14 14 import posixpath
15 15 import re
16 16 import stat
17 17 import subprocess
18 18 import sys
19 19 import tarfile
20 20 import xml.dom.minidom
21 21
22 22 from .i18n import _
23 23 from . import (
24 24 cmdutil,
25 25 encoding,
26 26 error,
27 27 exchange,
28 28 logcmdutil,
29 29 match as matchmod,
30 30 node,
31 31 pathutil,
32 32 phases,
33 33 pycompat,
34 34 scmutil,
35 35 subrepoutil,
36 36 util,
37 37 vfs as vfsmod,
38 38 )
39 39 from .utils import (
40 40 dateutil,
41 41 procutil,
42 42 stringutil,
43 43 )
44 44
45 45 hg = None
46 46 reporelpath = subrepoutil.reporelpath
47 47 subrelpath = subrepoutil.subrelpath
48 48 _abssource = subrepoutil._abssource
49 49 propertycache = util.propertycache
50 50
51 51 def _expandedabspath(path):
52 52 '''
53 53 get a path or url and if it is a path expand it and return an absolute path
54 54 '''
55 55 expandedpath = util.urllocalpath(util.expandpath(path))
56 56 u = util.url(expandedpath)
57 57 if not u.scheme:
58 58 path = util.normpath(os.path.abspath(u.path))
59 59 return path
60 60
61 61 def _getstorehashcachename(remotepath):
62 62 '''get a unique filename for the store hash cache of a remote repository'''
63 63 return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
64 64
65 65 class SubrepoAbort(error.Abort):
66 66 """Exception class used to avoid handling a subrepo error more than once"""
67 67 def __init__(self, *args, **kw):
68 68 self.subrepo = kw.pop(r'subrepo', None)
69 69 self.cause = kw.pop(r'cause', None)
70 70 error.Abort.__init__(self, *args, **kw)
71 71
72 72 def annotatesubrepoerror(func):
73 73 def decoratedmethod(self, *args, **kargs):
74 74 try:
75 75 res = func(self, *args, **kargs)
76 76 except SubrepoAbort as ex:
77 77 # This exception has already been handled
78 78 raise ex
79 79 except error.Abort as ex:
80 80 subrepo = subrelpath(self)
81 81 errormsg = (stringutil.forcebytestr(ex) + ' '
82 82 + _('(in subrepository "%s")') % subrepo)
83 83 # avoid handling this exception by raising a SubrepoAbort exception
84 84 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
85 85 cause=sys.exc_info())
86 86 return res
87 87 return decoratedmethod
88 88
89 89 def _updateprompt(ui, sub, dirty, local, remote):
90 90 if dirty:
91 91 msg = (_(' subrepository sources for %s differ\n'
92 92 'use (l)ocal source (%s) or (r)emote source (%s)?'
93 93 '$$ &Local $$ &Remote')
94 94 % (subrelpath(sub), local, remote))
95 95 else:
96 96 msg = (_(' subrepository sources for %s differ (in checked out '
97 97 'version)\n'
98 98 'use (l)ocal source (%s) or (r)emote source (%s)?'
99 99 '$$ &Local $$ &Remote')
100 100 % (subrelpath(sub), local, remote))
101 101 return ui.promptchoice(msg, 0)
102 102
103 103 def _sanitize(ui, vfs, ignore):
104 104 for dirname, dirs, names in vfs.walk():
105 105 for i, d in enumerate(dirs):
106 106 if d.lower() == ignore:
107 107 del dirs[i]
108 108 break
109 109 if vfs.basename(dirname).lower() != '.hg':
110 110 continue
111 111 for f in names:
112 112 if f.lower() == 'hgrc':
113 113 ui.warn(_("warning: removing potentially hostile 'hgrc' "
114 114 "in '%s'\n") % vfs.join(dirname))
115 115 vfs.unlink(vfs.reljoin(dirname, f))
116 116
117 117 def _auditsubrepopath(repo, path):
118 118 # sanity check for potentially unsafe paths such as '~' and '$FOO'
119 119 if path.startswith('~') or '$' in path or util.expandpath(path) != path:
120 120 raise error.Abort(_('subrepo path contains illegal component: %s')
121 121 % path)
122 122 # auditor doesn't check if the path itself is a symlink
123 123 pathutil.pathauditor(repo.root)(path)
124 124 if repo.wvfs.islink(path):
125 125 raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
126 126
127 127 SUBREPO_ALLOWED_DEFAULTS = {
128 128 'hg': True,
129 129 'git': False,
130 130 'svn': False,
131 131 }
132 132
133 133 def _checktype(ui, kind):
134 134 # subrepos.allowed is a master kill switch. If disabled, subrepos are
135 135 # disabled period.
136 136 if not ui.configbool('subrepos', 'allowed', True):
137 137 raise error.Abort(_('subrepos not enabled'),
138 138 hint=_("see 'hg help config.subrepos' for details"))
139 139
140 140 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
141 141 if not ui.configbool('subrepos', '%s:allowed' % kind, default):
142 142 raise error.Abort(_('%s subrepos not allowed') % kind,
143 143 hint=_("see 'hg help config.subrepos' for details"))
144 144
145 145 if kind not in types:
146 146 raise error.Abort(_('unknown subrepo type %s') % kind)
147 147
148 148 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
149 149 """return instance of the right subrepo class for subrepo in path"""
150 150 # subrepo inherently violates our import layering rules
151 151 # because it wants to make repo objects from deep inside the stack
152 152 # so we manually delay the circular imports to not break
153 153 # scripts that don't use our demand-loading
154 154 global hg
155 155 from . import hg as h
156 156 hg = h
157 157
158 158 repo = ctx.repo()
159 159 _auditsubrepopath(repo, path)
160 160 state = ctx.substate[path]
161 161 _checktype(repo.ui, state[2])
162 162 if allowwdir:
163 163 state = (state[0], ctx.subrev(path), state[2])
164 164 return types[state[2]](ctx, path, state[:2], allowcreate)
165 165
166 166 def nullsubrepo(ctx, path, pctx):
167 167 """return an empty subrepo in pctx for the extant subrepo in ctx"""
168 168 # subrepo inherently violates our import layering rules
169 169 # because it wants to make repo objects from deep inside the stack
170 170 # so we manually delay the circular imports to not break
171 171 # scripts that don't use our demand-loading
172 172 global hg
173 173 from . import hg as h
174 174 hg = h
175 175
176 176 repo = ctx.repo()
177 177 _auditsubrepopath(repo, path)
178 178 state = ctx.substate[path]
179 179 _checktype(repo.ui, state[2])
180 180 subrev = ''
181 181 if state[2] == 'hg':
182 182 subrev = "0" * 40
183 183 return types[state[2]](pctx, path, (state[0], subrev), True)
184 184
185 185 # subrepo classes need to implement the following abstract class:
186 186
187 187 class abstractsubrepo(object):
188 188
189 189 def __init__(self, ctx, path):
190 190 """Initialize abstractsubrepo part
191 191
192 192 ``ctx`` is the context referring this subrepository in the
193 193 parent repository.
194 194
195 195 ``path`` is the path to this subrepository as seen from
196 196 innermost repository.
197 197 """
198 198 self.ui = ctx.repo().ui
199 199 self._ctx = ctx
200 200 self._path = path
201 201
202 202 def addwebdirpath(self, serverpath, webconf):
203 203 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
204 204
205 205 ``serverpath`` is the path component of the URL for this repo.
206 206
207 207 ``webconf`` is the dictionary of hgwebdir entries.
208 208 """
209 209 pass
210 210
211 211 def storeclean(self, path):
212 212 """
213 213 returns true if the repository has not changed since it was last
214 214 cloned from or pushed to a given repository.
215 215 """
216 216 return False
217 217
218 218 def dirty(self, ignoreupdate=False, missing=False):
219 219 """returns true if the dirstate of the subrepo is dirty or does not
220 220 match current stored state. If ignoreupdate is true, only check
221 221 whether the subrepo has uncommitted changes in its dirstate. If missing
222 222 is true, check for deleted files.
223 223 """
224 224 raise NotImplementedError
225 225
226 226 def dirtyreason(self, ignoreupdate=False, missing=False):
227 227 """return reason string if it is ``dirty()``
228 228
229 229 Returned string should have enough information for the message
230 230 of exception.
231 231
232 232 This returns None, otherwise.
233 233 """
234 234 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
235 235 return _('uncommitted changes in subrepository "%s"'
236 236 ) % subrelpath(self)
237 237
238 238 def bailifchanged(self, ignoreupdate=False, hint=None):
239 239 """raise Abort if subrepository is ``dirty()``
240 240 """
241 241 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
242 242 missing=True)
243 243 if dirtyreason:
244 244 raise error.Abort(dirtyreason, hint=hint)
245 245
246 246 def basestate(self):
247 247 """current working directory base state, disregarding .hgsubstate
248 248 state and working directory modifications"""
249 249 raise NotImplementedError
250 250
251 251 def checknested(self, path):
252 252 """check if path is a subrepository within this repository"""
253 253 return False
254 254
255 255 def commit(self, text, user, date):
256 256 """commit the current changes to the subrepo with the given
257 257 log message. Use given user and date if possible. Return the
258 258 new state of the subrepo.
259 259 """
260 260 raise NotImplementedError
261 261
262 262 def phase(self, state):
263 263 """returns phase of specified state in the subrepository.
264 264 """
265 265 return phases.public
266 266
267 267 def remove(self):
268 268 """remove the subrepo
269 269
270 270 (should verify the dirstate is not dirty first)
271 271 """
272 272 raise NotImplementedError
273 273
274 274 def get(self, state, overwrite=False):
275 275 """run whatever commands are needed to put the subrepo into
276 276 this state
277 277 """
278 278 raise NotImplementedError
279 279
280 280 def merge(self, state):
281 281 """merge currently-saved state with the new state."""
282 282 raise NotImplementedError
283 283
284 284 def push(self, opts):
285 285 """perform whatever action is analogous to 'hg push'
286 286
287 287 This may be a no-op on some systems.
288 288 """
289 289 raise NotImplementedError
290 290
291 291 def add(self, ui, match, prefix, explicitonly, **opts):
292 292 return []
293 293
294 294 def addremove(self, matcher, prefix, opts):
295 295 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
296 296 return 1
297 297
298 298 def cat(self, match, fm, fntemplate, prefix, **opts):
299 299 return 1
300 300
301 301 def status(self, rev2, **opts):
302 302 return scmutil.status([], [], [], [], [], [], [])
303 303
304 304 def diff(self, ui, diffopts, node2, match, prefix, **opts):
305 305 pass
306 306
307 307 def outgoing(self, ui, dest, opts):
308 308 return 1
309 309
310 310 def incoming(self, ui, source, opts):
311 311 return 1
312 312
313 313 def files(self):
314 314 """return filename iterator"""
315 315 raise NotImplementedError
316 316
317 317 def filedata(self, name, decode):
318 318 """return file data, optionally passed through repo decoders"""
319 319 raise NotImplementedError
320 320
321 321 def fileflags(self, name):
322 322 """return file flags"""
323 323 return ''
324 324
325 325 def matchfileset(self, expr, badfn=None):
326 326 """Resolve the fileset expression for this repo"""
327 327 return matchmod.nevermatcher(self.wvfs.base, '', badfn=badfn)
328 328
329 329 def printfiles(self, ui, m, fm, fmt, subrepos):
330 330 """handle the files command for this subrepo"""
331 331 return 1
332 332
333 333 def archive(self, archiver, prefix, match=None, decode=True):
334 334 if match is not None:
335 335 files = [f for f in self.files() if match(f)]
336 336 else:
337 337 files = self.files()
338 338 total = len(files)
339 339 relpath = subrelpath(self)
340 340 progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
341 341 unit=_('files'), total=total)
342 342 progress.update(0)
343 343 for name in files:
344 344 flags = self.fileflags(name)
345 345 mode = 'x' in flags and 0o755 or 0o644
346 346 symlink = 'l' in flags
347 347 archiver.addfile(prefix + self._path + '/' + name,
348 348 mode, symlink, self.filedata(name, decode))
349 349 progress.increment()
350 350 progress.complete()
351 351 return total
352 352
353 353 def walk(self, match):
354 354 '''
355 355 walk recursively through the directory tree, finding all files
356 356 matched by the match function
357 357 '''
358 358
359 359 def forget(self, match, prefix, dryrun, interactive):
360 360 return ([], [])
361 361
362 362 def removefiles(self, matcher, prefix, after, force, subrepos,
363 363 dryrun, warnings):
364 364 """remove the matched files from the subrepository and the filesystem,
365 365 possibly by force and/or after the file has been removed from the
366 366 filesystem. Return 0 on success, 1 on any warning.
367 367 """
368 368 warnings.append(_("warning: removefiles not implemented (%s)")
369 369 % self._path)
370 370 return 1
371 371
372 372 def revert(self, substate, *pats, **opts):
373 373 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
374 374 % (substate[0], substate[2]))
375 375 return []
376 376
377 377 def shortid(self, revid):
378 378 return revid
379 379
380 380 def unshare(self):
381 381 '''
382 382 convert this repository from shared to normal storage.
383 383 '''
384 384
385 385 def verify(self):
386 386 '''verify the integrity of the repository. Return 0 on success or
387 387 warning, 1 on any error.
388 388 '''
389 389 return 0
390 390
391 391 @propertycache
392 392 def wvfs(self):
393 393 """return vfs to access the working directory of this subrepository
394 394 """
395 395 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
396 396
397 397 @propertycache
398 398 def _relpath(self):
399 399 """return path to this subrepository as seen from outermost repository
400 400 """
401 401 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
402 402
403 403 class hgsubrepo(abstractsubrepo):
404 404 def __init__(self, ctx, path, state, allowcreate):
405 405 super(hgsubrepo, self).__init__(ctx, path)
406 406 self._state = state
407 407 r = ctx.repo()
408 408 root = r.wjoin(path)
409 409 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
410 410 # repository constructor does expand variables in path, which is
411 411 # unsafe since subrepo path might come from untrusted source.
412 412 if os.path.realpath(util.expandpath(root)) != root:
413 413 raise error.Abort(_('subrepo path contains illegal component: %s')
414 414 % path)
415 415 self._repo = hg.repository(r.baseui, root, create=create)
416 416 if self._repo.root != root:
417 417 raise error.ProgrammingError('failed to reject unsafe subrepo '
418 418 'path: %s (expanded to %s)'
419 419 % (root, self._repo.root))
420 420
421 421 # Propagate the parent's --hidden option
422 422 if r is r.unfiltered():
423 423 self._repo = self._repo.unfiltered()
424 424
425 425 self.ui = self._repo.ui
426 426 for s, k in [('ui', 'commitsubrepos')]:
427 427 v = r.ui.config(s, k)
428 428 if v:
429 429 self.ui.setconfig(s, k, v, 'subrepo')
430 430 # internal config: ui._usedassubrepo
431 431 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
432 432 self._initrepo(r, state[0], create)
433 433
434 434 @annotatesubrepoerror
435 435 def addwebdirpath(self, serverpath, webconf):
436 436 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
437 437
438 438 def storeclean(self, path):
439 439 with self._repo.lock():
440 440 return self._storeclean(path)
441 441
442 442 def _storeclean(self, path):
443 443 clean = True
444 444 itercache = self._calcstorehash(path)
445 445 for filehash in self._readstorehashcache(path):
446 446 if filehash != next(itercache, None):
447 447 clean = False
448 448 break
449 449 if clean:
450 450 # if not empty:
451 451 # the cached and current pull states have a different size
452 452 clean = next(itercache, None) is None
453 453 return clean
454 454
455 455 def _calcstorehash(self, remotepath):
456 456 '''calculate a unique "store hash"
457 457
458 458 This method is used to to detect when there are changes that may
459 459 require a push to a given remote path.'''
460 460 # sort the files that will be hashed in increasing (likely) file size
461 461 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
462 462 yield '# %s\n' % _expandedabspath(remotepath)
463 463 vfs = self._repo.vfs
464 464 for relname in filelist:
465 465 filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
466 466 yield '%s = %s\n' % (relname, filehash)
467 467
468 468 @propertycache
469 469 def _cachestorehashvfs(self):
470 470 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
471 471
472 472 def _readstorehashcache(self, remotepath):
473 473 '''read the store hash cache for a given remote repository'''
474 474 cachefile = _getstorehashcachename(remotepath)
475 475 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
476 476
477 477 def _cachestorehash(self, remotepath):
478 478 '''cache the current store hash
479 479
480 480 Each remote repo requires its own store hash cache, because a subrepo
481 481 store may be "clean" versus a given remote repo, but not versus another
482 482 '''
483 483 cachefile = _getstorehashcachename(remotepath)
484 484 with self._repo.lock():
485 485 storehash = list(self._calcstorehash(remotepath))
486 486 vfs = self._cachestorehashvfs
487 487 vfs.writelines(cachefile, storehash, mode='wb', notindexed=True)
488 488
489 489 def _getctx(self):
490 490 '''fetch the context for this subrepo revision, possibly a workingctx
491 491 '''
492 492 if self._ctx.rev() is None:
493 493 return self._repo[None] # workingctx if parent is workingctx
494 494 else:
495 495 rev = self._state[1]
496 496 return self._repo[rev]
497 497
498 498 @annotatesubrepoerror
499 499 def _initrepo(self, parentrepo, source, create):
500 500 self._repo._subparent = parentrepo
501 501 self._repo._subsource = source
502 502
503 503 if create:
504 504 lines = ['[paths]\n']
505 505
506 506 def addpathconfig(key, value):
507 507 if value:
508 508 lines.append('%s = %s\n' % (key, value))
509 509 self.ui.setconfig('paths', key, value, 'subrepo')
510 510
511 511 defpath = _abssource(self._repo, abort=False)
512 512 defpushpath = _abssource(self._repo, True, abort=False)
513 513 addpathconfig('default', defpath)
514 514 if defpath != defpushpath:
515 515 addpathconfig('default-push', defpushpath)
516 516
517 517 self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines)))
518 518
519 519 @annotatesubrepoerror
520 520 def add(self, ui, match, prefix, explicitonly, **opts):
521 521 return cmdutil.add(ui, self._repo, match, prefix, explicitonly, **opts)
522 522
523 523 @annotatesubrepoerror
524 524 def addremove(self, m, prefix, opts):
525 525 # In the same way as sub directories are processed, once in a subrepo,
526 526 # always entry any of its subrepos. Don't corrupt the options that will
527 527 # be used to process sibling subrepos however.
528 528 opts = copy.copy(opts)
529 529 opts['subrepos'] = True
530 return scmutil.addremove(self._repo, m,
531 self.wvfs.reljoin(prefix, self._path), opts)
530 return scmutil.addremove(self._repo, m, prefix, opts)
532 531
533 532 @annotatesubrepoerror
534 533 def cat(self, match, fm, fntemplate, prefix, **opts):
535 534 rev = self._state[1]
536 535 ctx = self._repo[rev]
537 536 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
538 537 prefix, **opts)
539 538
540 539 @annotatesubrepoerror
541 540 def status(self, rev2, **opts):
542 541 try:
543 542 rev1 = self._state[1]
544 543 ctx1 = self._repo[rev1]
545 544 ctx2 = self._repo[rev2]
546 545 return self._repo.status(ctx1, ctx2, **opts)
547 546 except error.RepoLookupError as inst:
548 547 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
549 548 % (inst, subrelpath(self)))
550 549 return scmutil.status([], [], [], [], [], [], [])
551 550
552 551 @annotatesubrepoerror
553 552 def diff(self, ui, diffopts, node2, match, prefix, **opts):
554 553 try:
555 554 node1 = node.bin(self._state[1])
556 555 # We currently expect node2 to come from substate and be
557 556 # in hex format
558 557 if node2 is not None:
559 558 node2 = node.bin(node2)
560 559 logcmdutil.diffordiffstat(ui, self._repo, diffopts,
561 560 node1, node2, match,
562 561 prefix=posixpath.join(prefix, self._path),
563 562 listsubrepos=True, **opts)
564 563 except error.RepoLookupError as inst:
565 564 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
566 565 % (inst, subrelpath(self)))
567 566
568 567 @annotatesubrepoerror
569 568 def archive(self, archiver, prefix, match=None, decode=True):
570 569 self._get(self._state + ('hg',))
571 570 files = self.files()
572 571 if match:
573 572 files = [f for f in files if match(f)]
574 573 rev = self._state[1]
575 574 ctx = self._repo[rev]
576 575 scmutil.prefetchfiles(self._repo, [ctx.rev()],
577 576 scmutil.matchfiles(self._repo, files))
578 577 total = abstractsubrepo.archive(self, archiver, prefix, match)
579 578 for subpath in ctx.substate:
580 579 s = subrepo(ctx, subpath, True)
581 580 submatch = matchmod.subdirmatcher(subpath, match)
582 581 total += s.archive(archiver, prefix + self._path + '/', submatch,
583 582 decode)
584 583 return total
585 584
586 585 @annotatesubrepoerror
587 586 def dirty(self, ignoreupdate=False, missing=False):
588 587 r = self._state[1]
589 588 if r == '' and not ignoreupdate: # no state recorded
590 589 return True
591 590 w = self._repo[None]
592 591 if r != w.p1().hex() and not ignoreupdate:
593 592 # different version checked out
594 593 return True
595 594 return w.dirty(missing=missing) # working directory changed
596 595
597 596 def basestate(self):
598 597 return self._repo['.'].hex()
599 598
600 599 def checknested(self, path):
601 600 return self._repo._checknested(self._repo.wjoin(path))
602 601
603 602 @annotatesubrepoerror
604 603 def commit(self, text, user, date):
605 604 # don't bother committing in the subrepo if it's only been
606 605 # updated
607 606 if not self.dirty(True):
608 607 return self._repo['.'].hex()
609 608 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
610 609 n = self._repo.commit(text, user, date)
611 610 if not n:
612 611 return self._repo['.'].hex() # different version checked out
613 612 return node.hex(n)
614 613
615 614 @annotatesubrepoerror
616 615 def phase(self, state):
617 616 return self._repo[state or '.'].phase()
618 617
619 618 @annotatesubrepoerror
620 619 def remove(self):
621 620 # we can't fully delete the repository as it may contain
622 621 # local-only history
623 622 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
624 623 hg.clean(self._repo, node.nullid, False)
625 624
626 625 def _get(self, state):
627 626 source, revision, kind = state
628 627 parentrepo = self._repo._subparent
629 628
630 629 if revision in self._repo.unfiltered():
631 630 # Allow shared subrepos tracked at null to setup the sharedpath
632 631 if len(self._repo) != 0 or not parentrepo.shared():
633 632 return True
634 633 self._repo._subsource = source
635 634 srcurl = _abssource(self._repo)
636 635
637 636 # Defer creating the peer until after the status message is logged, in
638 637 # case there are network problems.
639 638 getpeer = lambda: hg.peer(self._repo, {}, srcurl)
640 639
641 640 if len(self._repo) == 0:
642 641 # use self._repo.vfs instead of self.wvfs to remove .hg only
643 642 self._repo.vfs.rmtree()
644 643
645 644 # A remote subrepo could be shared if there is a local copy
646 645 # relative to the parent's share source. But clone pooling doesn't
647 646 # assemble the repos in a tree, so that can't be consistently done.
648 647 # A simpler option is for the user to configure clone pooling, and
649 648 # work with that.
650 649 if parentrepo.shared() and hg.islocal(srcurl):
651 650 self.ui.status(_('sharing subrepo %s from %s\n')
652 651 % (subrelpath(self), srcurl))
653 652 shared = hg.share(self._repo._subparent.baseui,
654 653 getpeer(), self._repo.root,
655 654 update=False, bookmarks=False)
656 655 self._repo = shared.local()
657 656 else:
658 657 # TODO: find a common place for this and this code in the
659 658 # share.py wrap of the clone command.
660 659 if parentrepo.shared():
661 660 pool = self.ui.config('share', 'pool')
662 661 if pool:
663 662 pool = util.expandpath(pool)
664 663
665 664 shareopts = {
666 665 'pool': pool,
667 666 'mode': self.ui.config('share', 'poolnaming'),
668 667 }
669 668 else:
670 669 shareopts = {}
671 670
672 671 self.ui.status(_('cloning subrepo %s from %s\n')
673 672 % (subrelpath(self), util.hidepassword(srcurl)))
674 673 other, cloned = hg.clone(self._repo._subparent.baseui, {},
675 674 getpeer(), self._repo.root,
676 675 update=False, shareopts=shareopts)
677 676 self._repo = cloned.local()
678 677 self._initrepo(parentrepo, source, create=True)
679 678 self._cachestorehash(srcurl)
680 679 else:
681 680 self.ui.status(_('pulling subrepo %s from %s\n')
682 681 % (subrelpath(self), util.hidepassword(srcurl)))
683 682 cleansub = self.storeclean(srcurl)
684 683 exchange.pull(self._repo, getpeer())
685 684 if cleansub:
686 685 # keep the repo clean after pull
687 686 self._cachestorehash(srcurl)
688 687 return False
689 688
690 689 @annotatesubrepoerror
691 690 def get(self, state, overwrite=False):
692 691 inrepo = self._get(state)
693 692 source, revision, kind = state
694 693 repo = self._repo
695 694 repo.ui.debug("getting subrepo %s\n" % self._path)
696 695 if inrepo:
697 696 urepo = repo.unfiltered()
698 697 ctx = urepo[revision]
699 698 if ctx.hidden():
700 699 urepo.ui.warn(
701 700 _('revision %s in subrepository "%s" is hidden\n') \
702 701 % (revision[0:12], self._path))
703 702 repo = urepo
704 703 hg.updaterepo(repo, revision, overwrite)
705 704
706 705 @annotatesubrepoerror
707 706 def merge(self, state):
708 707 self._get(state)
709 708 cur = self._repo['.']
710 709 dst = self._repo[state[1]]
711 710 anc = dst.ancestor(cur)
712 711
713 712 def mergefunc():
714 713 if anc == cur and dst.branch() == cur.branch():
715 714 self.ui.debug('updating subrepository "%s"\n'
716 715 % subrelpath(self))
717 716 hg.update(self._repo, state[1])
718 717 elif anc == dst:
719 718 self.ui.debug('skipping subrepository "%s"\n'
720 719 % subrelpath(self))
721 720 else:
722 721 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
723 722 hg.merge(self._repo, state[1], remind=False)
724 723
725 724 wctx = self._repo[None]
726 725 if self.dirty():
727 726 if anc != dst:
728 727 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
729 728 mergefunc()
730 729 else:
731 730 mergefunc()
732 731 else:
733 732 mergefunc()
734 733
735 734 @annotatesubrepoerror
736 735 def push(self, opts):
737 736 force = opts.get('force')
738 737 newbranch = opts.get('new_branch')
739 738 ssh = opts.get('ssh')
740 739
741 740 # push subrepos depth-first for coherent ordering
742 741 c = self._repo['.']
743 742 subs = c.substate # only repos that are committed
744 743 for s in sorted(subs):
745 744 if c.sub(s).push(opts) == 0:
746 745 return False
747 746
748 747 dsturl = _abssource(self._repo, True)
749 748 if not force:
750 749 if self.storeclean(dsturl):
751 750 self.ui.status(
752 751 _('no changes made to subrepo %s since last push to %s\n')
753 752 % (subrelpath(self), util.hidepassword(dsturl)))
754 753 return None
755 754 self.ui.status(_('pushing subrepo %s to %s\n') %
756 755 (subrelpath(self), util.hidepassword(dsturl)))
757 756 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
758 757 res = exchange.push(self._repo, other, force, newbranch=newbranch)
759 758
760 759 # the repo is now clean
761 760 self._cachestorehash(dsturl)
762 761 return res.cgresult
763 762
764 763 @annotatesubrepoerror
765 764 def outgoing(self, ui, dest, opts):
766 765 if 'rev' in opts or 'branch' in opts:
767 766 opts = copy.copy(opts)
768 767 opts.pop('rev', None)
769 768 opts.pop('branch', None)
770 769 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
771 770
772 771 @annotatesubrepoerror
773 772 def incoming(self, ui, source, opts):
774 773 if 'rev' in opts or 'branch' in opts:
775 774 opts = copy.copy(opts)
776 775 opts.pop('rev', None)
777 776 opts.pop('branch', None)
778 777 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
779 778
780 779 @annotatesubrepoerror
781 780 def files(self):
782 781 rev = self._state[1]
783 782 ctx = self._repo[rev]
784 783 return ctx.manifest().keys()
785 784
786 785 def filedata(self, name, decode):
787 786 rev = self._state[1]
788 787 data = self._repo[rev][name].data()
789 788 if decode:
790 789 data = self._repo.wwritedata(name, data)
791 790 return data
792 791
793 792 def fileflags(self, name):
794 793 rev = self._state[1]
795 794 ctx = self._repo[rev]
796 795 return ctx.flags(name)
797 796
798 797 @annotatesubrepoerror
799 798 def printfiles(self, ui, m, fm, fmt, subrepos):
800 799 # If the parent context is a workingctx, use the workingctx here for
801 800 # consistency.
802 801 if self._ctx.rev() is None:
803 802 ctx = self._repo[None]
804 803 else:
805 804 rev = self._state[1]
806 805 ctx = self._repo[rev]
807 806 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
808 807
809 808 @annotatesubrepoerror
810 809 def matchfileset(self, expr, badfn=None):
811 810 repo = self._repo
812 811 if self._ctx.rev() is None:
813 812 ctx = repo[None]
814 813 else:
815 814 rev = self._state[1]
816 815 ctx = repo[rev]
817 816
818 817 matchers = [ctx.matchfileset(expr, badfn=badfn)]
819 818
820 819 for subpath in ctx.substate:
821 820 sub = ctx.sub(subpath)
822 821
823 822 try:
824 823 sm = sub.matchfileset(expr, badfn=badfn)
825 824 pm = matchmod.prefixdirmatcher(repo.root, repo.getcwd(),
826 825 subpath, sm, badfn=badfn)
827 826 matchers.append(pm)
828 827 except error.LookupError:
829 828 self.ui.status(_("skipping missing subrepository: %s\n")
830 829 % self.wvfs.reljoin(reporelpath(self), subpath))
831 830 if len(matchers) == 1:
832 831 return matchers[0]
833 832 return matchmod.unionmatcher(matchers)
834 833
835 834 def walk(self, match):
836 835 ctx = self._repo[None]
837 836 return ctx.walk(match)
838 837
839 838 @annotatesubrepoerror
840 839 def forget(self, match, prefix, dryrun, interactive):
841 840 return cmdutil.forget(self.ui, self._repo, match, prefix,
842 841 True, dryrun=dryrun, interactive=interactive)
843 842
844 843 @annotatesubrepoerror
845 844 def removefiles(self, matcher, prefix, after, force, subrepos,
846 845 dryrun, warnings):
847 846 return cmdutil.remove(self.ui, self._repo, matcher, prefix,
848 847 after, force, subrepos, dryrun)
849 848
850 849 @annotatesubrepoerror
851 850 def revert(self, substate, *pats, **opts):
852 851 # reverting a subrepo is a 2 step process:
853 852 # 1. if the no_backup is not set, revert all modified
854 853 # files inside the subrepo
855 854 # 2. update the subrepo to the revision specified in
856 855 # the corresponding substate dictionary
857 856 self.ui.status(_('reverting subrepo %s\n') % substate[0])
858 857 if not opts.get(r'no_backup'):
859 858 # Revert all files on the subrepo, creating backups
860 859 # Note that this will not recursively revert subrepos
861 860 # We could do it if there was a set:subrepos() predicate
862 861 opts = opts.copy()
863 862 opts[r'date'] = None
864 863 opts[r'rev'] = substate[1]
865 864
866 865 self.filerevert(*pats, **opts)
867 866
868 867 # Update the repo to the revision specified in the given substate
869 868 if not opts.get(r'dry_run'):
870 869 self.get(substate, overwrite=True)
871 870
872 871 def filerevert(self, *pats, **opts):
873 872 ctx = self._repo[opts[r'rev']]
874 873 parents = self._repo.dirstate.parents()
875 874 if opts.get(r'all'):
876 875 pats = ['set:modified()']
877 876 else:
878 877 pats = []
879 878 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
880 879
881 880 def shortid(self, revid):
882 881 return revid[:12]
883 882
884 883 @annotatesubrepoerror
885 884 def unshare(self):
886 885 # subrepo inherently violates our import layering rules
887 886 # because it wants to make repo objects from deep inside the stack
888 887 # so we manually delay the circular imports to not break
889 888 # scripts that don't use our demand-loading
890 889 global hg
891 890 from . import hg as h
892 891 hg = h
893 892
894 893 # Nothing prevents a user from sharing in a repo, and then making that a
895 894 # subrepo. Alternately, the previous unshare attempt may have failed
896 895 # part way through. So recurse whether or not this layer is shared.
897 896 if self._repo.shared():
898 897 self.ui.status(_("unsharing subrepo '%s'\n") % self._relpath)
899 898
900 899 hg.unshare(self.ui, self._repo)
901 900
902 901 def verify(self):
903 902 try:
904 903 rev = self._state[1]
905 904 ctx = self._repo.unfiltered()[rev]
906 905 if ctx.hidden():
907 906 # Since hidden revisions aren't pushed/pulled, it seems worth an
908 907 # explicit warning.
909 908 ui = self._repo.ui
910 909 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
911 910 (self._relpath, node.short(self._ctx.node())))
912 911 return 0
913 912 except error.RepoLookupError:
914 913 # A missing subrepo revision may be a case of needing to pull it, so
915 914 # don't treat this as an error.
916 915 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
917 916 (self._relpath, node.short(self._ctx.node())))
918 917 return 0
919 918
920 919 @propertycache
921 920 def wvfs(self):
922 921 """return own wvfs for efficiency and consistency
923 922 """
924 923 return self._repo.wvfs
925 924
926 925 @propertycache
927 926 def _relpath(self):
928 927 """return path to this subrepository as seen from outermost repository
929 928 """
930 929 # Keep consistent dir separators by avoiding vfs.join(self._path)
931 930 return reporelpath(self._repo)
932 931
933 932 class svnsubrepo(abstractsubrepo):
934 933 def __init__(self, ctx, path, state, allowcreate):
935 934 super(svnsubrepo, self).__init__(ctx, path)
936 935 self._state = state
937 936 self._exe = procutil.findexe('svn')
938 937 if not self._exe:
939 938 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
940 939 % self._path)
941 940
942 941 def _svncommand(self, commands, filename='', failok=False):
943 942 cmd = [self._exe]
944 943 extrakw = {}
945 944 if not self.ui.interactive():
946 945 # Making stdin be a pipe should prevent svn from behaving
947 946 # interactively even if we can't pass --non-interactive.
948 947 extrakw[r'stdin'] = subprocess.PIPE
949 948 # Starting in svn 1.5 --non-interactive is a global flag
950 949 # instead of being per-command, but we need to support 1.4 so
951 950 # we have to be intelligent about what commands take
952 951 # --non-interactive.
953 952 if commands[0] in ('update', 'checkout', 'commit'):
954 953 cmd.append('--non-interactive')
955 954 cmd.extend(commands)
956 955 if filename is not None:
957 956 path = self.wvfs.reljoin(self._ctx.repo().origroot,
958 957 self._path, filename)
959 958 cmd.append(path)
960 959 env = dict(encoding.environ)
961 960 # Avoid localized output, preserve current locale for everything else.
962 961 lc_all = env.get('LC_ALL')
963 962 if lc_all:
964 963 env['LANG'] = lc_all
965 964 del env['LC_ALL']
966 965 env['LC_MESSAGES'] = 'C'
967 966 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmd),
968 967 bufsize=-1, close_fds=procutil.closefds,
969 968 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
970 969 env=procutil.tonativeenv(env), **extrakw)
971 970 stdout, stderr = map(util.fromnativeeol, p.communicate())
972 971 stderr = stderr.strip()
973 972 if not failok:
974 973 if p.returncode:
975 974 raise error.Abort(stderr or 'exited with code %d'
976 975 % p.returncode)
977 976 if stderr:
978 977 self.ui.warn(stderr + '\n')
979 978 return stdout, stderr
980 979
981 980 @propertycache
982 981 def _svnversion(self):
983 982 output, err = self._svncommand(['--version', '--quiet'], filename=None)
984 983 m = re.search(br'^(\d+)\.(\d+)', output)
985 984 if not m:
986 985 raise error.Abort(_('cannot retrieve svn tool version'))
987 986 return (int(m.group(1)), int(m.group(2)))
988 987
989 988 def _svnmissing(self):
990 989 return not self.wvfs.exists('.svn')
991 990
992 991 def _wcrevs(self):
993 992 # Get the working directory revision as well as the last
994 993 # commit revision so we can compare the subrepo state with
995 994 # both. We used to store the working directory one.
996 995 output, err = self._svncommand(['info', '--xml'])
997 996 doc = xml.dom.minidom.parseString(output)
998 997 entries = doc.getElementsByTagName(r'entry')
999 998 lastrev, rev = '0', '0'
1000 999 if entries:
1001 1000 rev = pycompat.bytestr(entries[0].getAttribute(r'revision')) or '0'
1002 1001 commits = entries[0].getElementsByTagName(r'commit')
1003 1002 if commits:
1004 1003 lastrev = pycompat.bytestr(
1005 1004 commits[0].getAttribute(r'revision')) or '0'
1006 1005 return (lastrev, rev)
1007 1006
1008 1007 def _wcrev(self):
1009 1008 return self._wcrevs()[0]
1010 1009
1011 1010 def _wcchanged(self):
1012 1011 """Return (changes, extchanges, missing) where changes is True
1013 1012 if the working directory was changed, extchanges is
1014 1013 True if any of these changes concern an external entry and missing
1015 1014 is True if any change is a missing entry.
1016 1015 """
1017 1016 output, err = self._svncommand(['status', '--xml'])
1018 1017 externals, changes, missing = [], [], []
1019 1018 doc = xml.dom.minidom.parseString(output)
1020 1019 for e in doc.getElementsByTagName(r'entry'):
1021 1020 s = e.getElementsByTagName(r'wc-status')
1022 1021 if not s:
1023 1022 continue
1024 1023 item = s[0].getAttribute(r'item')
1025 1024 props = s[0].getAttribute(r'props')
1026 1025 path = e.getAttribute(r'path').encode('utf8')
1027 1026 if item == r'external':
1028 1027 externals.append(path)
1029 1028 elif item == r'missing':
1030 1029 missing.append(path)
1031 1030 if (item not in (r'', r'normal', r'unversioned', r'external')
1032 1031 or props not in (r'', r'none', r'normal')):
1033 1032 changes.append(path)
1034 1033 for path in changes:
1035 1034 for ext in externals:
1036 1035 if path == ext or path.startswith(ext + pycompat.ossep):
1037 1036 return True, True, bool(missing)
1038 1037 return bool(changes), False, bool(missing)
1039 1038
1040 1039 @annotatesubrepoerror
1041 1040 def dirty(self, ignoreupdate=False, missing=False):
1042 1041 if self._svnmissing():
1043 1042 return self._state[1] != ''
1044 1043 wcchanged = self._wcchanged()
1045 1044 changed = wcchanged[0] or (missing and wcchanged[2])
1046 1045 if not changed:
1047 1046 if self._state[1] in self._wcrevs() or ignoreupdate:
1048 1047 return False
1049 1048 return True
1050 1049
1051 1050 def basestate(self):
1052 1051 lastrev, rev = self._wcrevs()
1053 1052 if lastrev != rev:
1054 1053 # Last committed rev is not the same than rev. We would
1055 1054 # like to take lastrev but we do not know if the subrepo
1056 1055 # URL exists at lastrev. Test it and fallback to rev it
1057 1056 # is not there.
1058 1057 try:
1059 1058 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1060 1059 return lastrev
1061 1060 except error.Abort:
1062 1061 pass
1063 1062 return rev
1064 1063
1065 1064 @annotatesubrepoerror
1066 1065 def commit(self, text, user, date):
1067 1066 # user and date are out of our hands since svn is centralized
1068 1067 changed, extchanged, missing = self._wcchanged()
1069 1068 if not changed:
1070 1069 return self.basestate()
1071 1070 if extchanged:
1072 1071 # Do not try to commit externals
1073 1072 raise error.Abort(_('cannot commit svn externals'))
1074 1073 if missing:
1075 1074 # svn can commit with missing entries but aborting like hg
1076 1075 # seems a better approach.
1077 1076 raise error.Abort(_('cannot commit missing svn entries'))
1078 1077 commitinfo, err = self._svncommand(['commit', '-m', text])
1079 1078 self.ui.status(commitinfo)
1080 1079 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1081 1080 if not newrev:
1082 1081 if not commitinfo.strip():
1083 1082 # Sometimes, our definition of "changed" differs from
1084 1083 # svn one. For instance, svn ignores missing files
1085 1084 # when committing. If there are only missing files, no
1086 1085 # commit is made, no output and no error code.
1087 1086 raise error.Abort(_('failed to commit svn changes'))
1088 1087 raise error.Abort(commitinfo.splitlines()[-1])
1089 1088 newrev = newrev.groups()[0]
1090 1089 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1091 1090 return newrev
1092 1091
1093 1092 @annotatesubrepoerror
1094 1093 def remove(self):
1095 1094 if self.dirty():
1096 1095 self.ui.warn(_('not removing repo %s because '
1097 1096 'it has changes.\n') % self._path)
1098 1097 return
1099 1098 self.ui.note(_('removing subrepo %s\n') % self._path)
1100 1099
1101 1100 self.wvfs.rmtree(forcibly=True)
1102 1101 try:
1103 1102 pwvfs = self._ctx.repo().wvfs
1104 1103 pwvfs.removedirs(pwvfs.dirname(self._path))
1105 1104 except OSError:
1106 1105 pass
1107 1106
1108 1107 @annotatesubrepoerror
1109 1108 def get(self, state, overwrite=False):
1110 1109 if overwrite:
1111 1110 self._svncommand(['revert', '--recursive'])
1112 1111 args = ['checkout']
1113 1112 if self._svnversion >= (1, 5):
1114 1113 args.append('--force')
1115 1114 # The revision must be specified at the end of the URL to properly
1116 1115 # update to a directory which has since been deleted and recreated.
1117 1116 args.append('%s@%s' % (state[0], state[1]))
1118 1117
1119 1118 # SEC: check that the ssh url is safe
1120 1119 util.checksafessh(state[0])
1121 1120
1122 1121 status, err = self._svncommand(args, failok=True)
1123 1122 _sanitize(self.ui, self.wvfs, '.svn')
1124 1123 if not re.search('Checked out revision [0-9]+.', status):
1125 1124 if ('is already a working copy for a different URL' in err
1126 1125 and (self._wcchanged()[:2] == (False, False))):
1127 1126 # obstructed but clean working copy, so just blow it away.
1128 1127 self.remove()
1129 1128 self.get(state, overwrite=False)
1130 1129 return
1131 1130 raise error.Abort((status or err).splitlines()[-1])
1132 1131 self.ui.status(status)
1133 1132
1134 1133 @annotatesubrepoerror
1135 1134 def merge(self, state):
1136 1135 old = self._state[1]
1137 1136 new = state[1]
1138 1137 wcrev = self._wcrev()
1139 1138 if new != wcrev:
1140 1139 dirty = old == wcrev or self._wcchanged()[0]
1141 1140 if _updateprompt(self.ui, self, dirty, wcrev, new):
1142 1141 self.get(state, False)
1143 1142
1144 1143 def push(self, opts):
1145 1144 # push is a no-op for SVN
1146 1145 return True
1147 1146
1148 1147 @annotatesubrepoerror
1149 1148 def files(self):
1150 1149 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1151 1150 doc = xml.dom.minidom.parseString(output)
1152 1151 paths = []
1153 1152 for e in doc.getElementsByTagName(r'entry'):
1154 1153 kind = pycompat.bytestr(e.getAttribute(r'kind'))
1155 1154 if kind != 'file':
1156 1155 continue
1157 1156 name = r''.join(c.data for c
1158 1157 in e.getElementsByTagName(r'name')[0].childNodes
1159 1158 if c.nodeType == c.TEXT_NODE)
1160 1159 paths.append(name.encode('utf8'))
1161 1160 return paths
1162 1161
1163 1162 def filedata(self, name, decode):
1164 1163 return self._svncommand(['cat'], name)[0]
1165 1164
1166 1165
1167 1166 class gitsubrepo(abstractsubrepo):
1168 1167 def __init__(self, ctx, path, state, allowcreate):
1169 1168 super(gitsubrepo, self).__init__(ctx, path)
1170 1169 self._state = state
1171 1170 self._abspath = ctx.repo().wjoin(path)
1172 1171 self._subparent = ctx.repo()
1173 1172 self._ensuregit()
1174 1173
1175 1174 def _ensuregit(self):
1176 1175 try:
1177 1176 self._gitexecutable = 'git'
1178 1177 out, err = self._gitnodir(['--version'])
1179 1178 except OSError as e:
1180 1179 genericerror = _("error executing git for subrepo '%s': %s")
1181 1180 notfoundhint = _("check git is installed and in your PATH")
1182 1181 if e.errno != errno.ENOENT:
1183 1182 raise error.Abort(genericerror % (
1184 1183 self._path, encoding.strtolocal(e.strerror)))
1185 1184 elif pycompat.iswindows:
1186 1185 try:
1187 1186 self._gitexecutable = 'git.cmd'
1188 1187 out, err = self._gitnodir(['--version'])
1189 1188 except OSError as e2:
1190 1189 if e2.errno == errno.ENOENT:
1191 1190 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1192 1191 " for subrepo '%s'") % self._path,
1193 1192 hint=notfoundhint)
1194 1193 else:
1195 1194 raise error.Abort(genericerror % (self._path,
1196 1195 encoding.strtolocal(e2.strerror)))
1197 1196 else:
1198 1197 raise error.Abort(_("couldn't find git for subrepo '%s'")
1199 1198 % self._path, hint=notfoundhint)
1200 1199 versionstatus = self._checkversion(out)
1201 1200 if versionstatus == 'unknown':
1202 1201 self.ui.warn(_('cannot retrieve git version\n'))
1203 1202 elif versionstatus == 'abort':
1204 1203 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1205 1204 elif versionstatus == 'warning':
1206 1205 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1207 1206
1208 1207 @staticmethod
1209 1208 def _gitversion(out):
1210 1209 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1211 1210 if m:
1212 1211 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1213 1212
1214 1213 m = re.search(br'^git version (\d+)\.(\d+)', out)
1215 1214 if m:
1216 1215 return (int(m.group(1)), int(m.group(2)), 0)
1217 1216
1218 1217 return -1
1219 1218
1220 1219 @staticmethod
1221 1220 def _checkversion(out):
1222 1221 '''ensure git version is new enough
1223 1222
1224 1223 >>> _checkversion = gitsubrepo._checkversion
1225 1224 >>> _checkversion(b'git version 1.6.0')
1226 1225 'ok'
1227 1226 >>> _checkversion(b'git version 1.8.5')
1228 1227 'ok'
1229 1228 >>> _checkversion(b'git version 1.4.0')
1230 1229 'abort'
1231 1230 >>> _checkversion(b'git version 1.5.0')
1232 1231 'warning'
1233 1232 >>> _checkversion(b'git version 1.9-rc0')
1234 1233 'ok'
1235 1234 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1236 1235 'ok'
1237 1236 >>> _checkversion(b'git version 1.9.0.GIT')
1238 1237 'ok'
1239 1238 >>> _checkversion(b'git version 12345')
1240 1239 'unknown'
1241 1240 >>> _checkversion(b'no')
1242 1241 'unknown'
1243 1242 '''
1244 1243 version = gitsubrepo._gitversion(out)
1245 1244 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1246 1245 # despite the docstring comment. For now, error on 1.4.0, warn on
1247 1246 # 1.5.0 but attempt to continue.
1248 1247 if version == -1:
1249 1248 return 'unknown'
1250 1249 if version < (1, 5, 0):
1251 1250 return 'abort'
1252 1251 elif version < (1, 6, 0):
1253 1252 return 'warning'
1254 1253 return 'ok'
1255 1254
1256 1255 def _gitcommand(self, commands, env=None, stream=False):
1257 1256 return self._gitdir(commands, env=env, stream=stream)[0]
1258 1257
1259 1258 def _gitdir(self, commands, env=None, stream=False):
1260 1259 return self._gitnodir(commands, env=env, stream=stream,
1261 1260 cwd=self._abspath)
1262 1261
1263 1262 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1264 1263 """Calls the git command
1265 1264
1266 1265 The methods tries to call the git command. versions prior to 1.6.0
1267 1266 are not supported and very probably fail.
1268 1267 """
1269 1268 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1270 1269 if env is None:
1271 1270 env = encoding.environ.copy()
1272 1271 # disable localization for Git output (issue5176)
1273 1272 env['LC_ALL'] = 'C'
1274 1273 # fix for Git CVE-2015-7545
1275 1274 if 'GIT_ALLOW_PROTOCOL' not in env:
1276 1275 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1277 1276 # unless ui.quiet is set, print git's stderr,
1278 1277 # which is mostly progress and useful info
1279 1278 errpipe = None
1280 1279 if self.ui.quiet:
1281 1280 errpipe = open(os.devnull, 'w')
1282 1281 if self.ui._colormode and len(commands) and commands[0] == "diff":
1283 1282 # insert the argument in the front,
1284 1283 # the end of git diff arguments is used for paths
1285 1284 commands.insert(1, '--color')
1286 1285 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr,
1287 1286 [self._gitexecutable] + commands),
1288 1287 bufsize=-1,
1289 1288 cwd=pycompat.rapply(procutil.tonativestr, cwd),
1290 1289 env=procutil.tonativeenv(env),
1291 1290 close_fds=procutil.closefds,
1292 1291 stdout=subprocess.PIPE, stderr=errpipe)
1293 1292 if stream:
1294 1293 return p.stdout, None
1295 1294
1296 1295 retdata = p.stdout.read().strip()
1297 1296 # wait for the child to exit to avoid race condition.
1298 1297 p.wait()
1299 1298
1300 1299 if p.returncode != 0 and p.returncode != 1:
1301 1300 # there are certain error codes that are ok
1302 1301 command = commands[0]
1303 1302 if command in ('cat-file', 'symbolic-ref'):
1304 1303 return retdata, p.returncode
1305 1304 # for all others, abort
1306 1305 raise error.Abort(_('git %s error %d in %s') %
1307 1306 (command, p.returncode, self._relpath))
1308 1307
1309 1308 return retdata, p.returncode
1310 1309
1311 1310 def _gitmissing(self):
1312 1311 return not self.wvfs.exists('.git')
1313 1312
1314 1313 def _gitstate(self):
1315 1314 return self._gitcommand(['rev-parse', 'HEAD'])
1316 1315
1317 1316 def _gitcurrentbranch(self):
1318 1317 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1319 1318 if err:
1320 1319 current = None
1321 1320 return current
1322 1321
1323 1322 def _gitremote(self, remote):
1324 1323 out = self._gitcommand(['remote', 'show', '-n', remote])
1325 1324 line = out.split('\n')[1]
1326 1325 i = line.index('URL: ') + len('URL: ')
1327 1326 return line[i:]
1328 1327
1329 1328 def _githavelocally(self, revision):
1330 1329 out, code = self._gitdir(['cat-file', '-e', revision])
1331 1330 return code == 0
1332 1331
1333 1332 def _gitisancestor(self, r1, r2):
1334 1333 base = self._gitcommand(['merge-base', r1, r2])
1335 1334 return base == r1
1336 1335
1337 1336 def _gitisbare(self):
1338 1337 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1339 1338
1340 1339 def _gitupdatestat(self):
1341 1340 """This must be run before git diff-index.
1342 1341 diff-index only looks at changes to file stat;
1343 1342 this command looks at file contents and updates the stat."""
1344 1343 self._gitcommand(['update-index', '-q', '--refresh'])
1345 1344
1346 1345 def _gitbranchmap(self):
1347 1346 '''returns 2 things:
1348 1347 a map from git branch to revision
1349 1348 a map from revision to branches'''
1350 1349 branch2rev = {}
1351 1350 rev2branch = {}
1352 1351
1353 1352 out = self._gitcommand(['for-each-ref', '--format',
1354 1353 '%(objectname) %(refname)'])
1355 1354 for line in out.split('\n'):
1356 1355 revision, ref = line.split(' ')
1357 1356 if (not ref.startswith('refs/heads/') and
1358 1357 not ref.startswith('refs/remotes/')):
1359 1358 continue
1360 1359 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1361 1360 continue # ignore remote/HEAD redirects
1362 1361 branch2rev[ref] = revision
1363 1362 rev2branch.setdefault(revision, []).append(ref)
1364 1363 return branch2rev, rev2branch
1365 1364
1366 1365 def _gittracking(self, branches):
1367 1366 'return map of remote branch to local tracking branch'
1368 1367 # assumes no more than one local tracking branch for each remote
1369 1368 tracking = {}
1370 1369 for b in branches:
1371 1370 if b.startswith('refs/remotes/'):
1372 1371 continue
1373 1372 bname = b.split('/', 2)[2]
1374 1373 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1375 1374 if remote:
1376 1375 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1377 1376 tracking['refs/remotes/%s/%s' %
1378 1377 (remote, ref.split('/', 2)[2])] = b
1379 1378 return tracking
1380 1379
1381 1380 def _abssource(self, source):
1382 1381 if '://' not in source:
1383 1382 # recognize the scp syntax as an absolute source
1384 1383 colon = source.find(':')
1385 1384 if colon != -1 and '/' not in source[:colon]:
1386 1385 return source
1387 1386 self._subsource = source
1388 1387 return _abssource(self)
1389 1388
1390 1389 def _fetch(self, source, revision):
1391 1390 if self._gitmissing():
1392 1391 # SEC: check for safe ssh url
1393 1392 util.checksafessh(source)
1394 1393
1395 1394 source = self._abssource(source)
1396 1395 self.ui.status(_('cloning subrepo %s from %s\n') %
1397 1396 (self._relpath, source))
1398 1397 self._gitnodir(['clone', source, self._abspath])
1399 1398 if self._githavelocally(revision):
1400 1399 return
1401 1400 self.ui.status(_('pulling subrepo %s from %s\n') %
1402 1401 (self._relpath, self._gitremote('origin')))
1403 1402 # try only origin: the originally cloned repo
1404 1403 self._gitcommand(['fetch'])
1405 1404 if not self._githavelocally(revision):
1406 1405 raise error.Abort(_('revision %s does not exist in subrepository '
1407 1406 '"%s"\n') % (revision, self._relpath))
1408 1407
1409 1408 @annotatesubrepoerror
1410 1409 def dirty(self, ignoreupdate=False, missing=False):
1411 1410 if self._gitmissing():
1412 1411 return self._state[1] != ''
1413 1412 if self._gitisbare():
1414 1413 return True
1415 1414 if not ignoreupdate and self._state[1] != self._gitstate():
1416 1415 # different version checked out
1417 1416 return True
1418 1417 # check for staged changes or modified files; ignore untracked files
1419 1418 self._gitupdatestat()
1420 1419 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1421 1420 return code == 1
1422 1421
1423 1422 def basestate(self):
1424 1423 return self._gitstate()
1425 1424
1426 1425 @annotatesubrepoerror
1427 1426 def get(self, state, overwrite=False):
1428 1427 source, revision, kind = state
1429 1428 if not revision:
1430 1429 self.remove()
1431 1430 return
1432 1431 self._fetch(source, revision)
1433 1432 # if the repo was set to be bare, unbare it
1434 1433 if self._gitisbare():
1435 1434 self._gitcommand(['config', 'core.bare', 'false'])
1436 1435 if self._gitstate() == revision:
1437 1436 self._gitcommand(['reset', '--hard', 'HEAD'])
1438 1437 return
1439 1438 elif self._gitstate() == revision:
1440 1439 if overwrite:
1441 1440 # first reset the index to unmark new files for commit, because
1442 1441 # reset --hard will otherwise throw away files added for commit,
1443 1442 # not just unmark them.
1444 1443 self._gitcommand(['reset', 'HEAD'])
1445 1444 self._gitcommand(['reset', '--hard', 'HEAD'])
1446 1445 return
1447 1446 branch2rev, rev2branch = self._gitbranchmap()
1448 1447
1449 1448 def checkout(args):
1450 1449 cmd = ['checkout']
1451 1450 if overwrite:
1452 1451 # first reset the index to unmark new files for commit, because
1453 1452 # the -f option will otherwise throw away files added for
1454 1453 # commit, not just unmark them.
1455 1454 self._gitcommand(['reset', 'HEAD'])
1456 1455 cmd.append('-f')
1457 1456 self._gitcommand(cmd + args)
1458 1457 _sanitize(self.ui, self.wvfs, '.git')
1459 1458
1460 1459 def rawcheckout():
1461 1460 # no branch to checkout, check it out with no branch
1462 1461 self.ui.warn(_('checking out detached HEAD in '
1463 1462 'subrepository "%s"\n') % self._relpath)
1464 1463 self.ui.warn(_('check out a git branch if you intend '
1465 1464 'to make changes\n'))
1466 1465 checkout(['-q', revision])
1467 1466
1468 1467 if revision not in rev2branch:
1469 1468 rawcheckout()
1470 1469 return
1471 1470 branches = rev2branch[revision]
1472 1471 firstlocalbranch = None
1473 1472 for b in branches:
1474 1473 if b == 'refs/heads/master':
1475 1474 # master trumps all other branches
1476 1475 checkout(['refs/heads/master'])
1477 1476 return
1478 1477 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1479 1478 firstlocalbranch = b
1480 1479 if firstlocalbranch:
1481 1480 checkout([firstlocalbranch])
1482 1481 return
1483 1482
1484 1483 tracking = self._gittracking(branch2rev.keys())
1485 1484 # choose a remote branch already tracked if possible
1486 1485 remote = branches[0]
1487 1486 if remote not in tracking:
1488 1487 for b in branches:
1489 1488 if b in tracking:
1490 1489 remote = b
1491 1490 break
1492 1491
1493 1492 if remote not in tracking:
1494 1493 # create a new local tracking branch
1495 1494 local = remote.split('/', 3)[3]
1496 1495 checkout(['-b', local, remote])
1497 1496 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1498 1497 # When updating to a tracked remote branch,
1499 1498 # if the local tracking branch is downstream of it,
1500 1499 # a normal `git pull` would have performed a "fast-forward merge"
1501 1500 # which is equivalent to updating the local branch to the remote.
1502 1501 # Since we are only looking at branching at update, we need to
1503 1502 # detect this situation and perform this action lazily.
1504 1503 if tracking[remote] != self._gitcurrentbranch():
1505 1504 checkout([tracking[remote]])
1506 1505 self._gitcommand(['merge', '--ff', remote])
1507 1506 _sanitize(self.ui, self.wvfs, '.git')
1508 1507 else:
1509 1508 # a real merge would be required, just checkout the revision
1510 1509 rawcheckout()
1511 1510
1512 1511 @annotatesubrepoerror
1513 1512 def commit(self, text, user, date):
1514 1513 if self._gitmissing():
1515 1514 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1516 1515 cmd = ['commit', '-a', '-m', text]
1517 1516 env = encoding.environ.copy()
1518 1517 if user:
1519 1518 cmd += ['--author', user]
1520 1519 if date:
1521 1520 # git's date parser silently ignores when seconds < 1e9
1522 1521 # convert to ISO8601
1523 1522 env['GIT_AUTHOR_DATE'] = dateutil.datestr(date,
1524 1523 '%Y-%m-%dT%H:%M:%S %1%2')
1525 1524 self._gitcommand(cmd, env=env)
1526 1525 # make sure commit works otherwise HEAD might not exist under certain
1527 1526 # circumstances
1528 1527 return self._gitstate()
1529 1528
1530 1529 @annotatesubrepoerror
1531 1530 def merge(self, state):
1532 1531 source, revision, kind = state
1533 1532 self._fetch(source, revision)
1534 1533 base = self._gitcommand(['merge-base', revision, self._state[1]])
1535 1534 self._gitupdatestat()
1536 1535 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1537 1536
1538 1537 def mergefunc():
1539 1538 if base == revision:
1540 1539 self.get(state) # fast forward merge
1541 1540 elif base != self._state[1]:
1542 1541 self._gitcommand(['merge', '--no-commit', revision])
1543 1542 _sanitize(self.ui, self.wvfs, '.git')
1544 1543
1545 1544 if self.dirty():
1546 1545 if self._gitstate() != revision:
1547 1546 dirty = self._gitstate() == self._state[1] or code != 0
1548 1547 if _updateprompt(self.ui, self, dirty,
1549 1548 self._state[1][:7], revision[:7]):
1550 1549 mergefunc()
1551 1550 else:
1552 1551 mergefunc()
1553 1552
1554 1553 @annotatesubrepoerror
1555 1554 def push(self, opts):
1556 1555 force = opts.get('force')
1557 1556
1558 1557 if not self._state[1]:
1559 1558 return True
1560 1559 if self._gitmissing():
1561 1560 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1562 1561 # if a branch in origin contains the revision, nothing to do
1563 1562 branch2rev, rev2branch = self._gitbranchmap()
1564 1563 if self._state[1] in rev2branch:
1565 1564 for b in rev2branch[self._state[1]]:
1566 1565 if b.startswith('refs/remotes/origin/'):
1567 1566 return True
1568 1567 for b, revision in branch2rev.iteritems():
1569 1568 if b.startswith('refs/remotes/origin/'):
1570 1569 if self._gitisancestor(self._state[1], revision):
1571 1570 return True
1572 1571 # otherwise, try to push the currently checked out branch
1573 1572 cmd = ['push']
1574 1573 if force:
1575 1574 cmd.append('--force')
1576 1575
1577 1576 current = self._gitcurrentbranch()
1578 1577 if current:
1579 1578 # determine if the current branch is even useful
1580 1579 if not self._gitisancestor(self._state[1], current):
1581 1580 self.ui.warn(_('unrelated git branch checked out '
1582 1581 'in subrepository "%s"\n') % self._relpath)
1583 1582 return False
1584 1583 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1585 1584 (current.split('/', 2)[2], self._relpath))
1586 1585 ret = self._gitdir(cmd + ['origin', current])
1587 1586 return ret[1] == 0
1588 1587 else:
1589 1588 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1590 1589 'cannot push revision %s\n') %
1591 1590 (self._relpath, self._state[1]))
1592 1591 return False
1593 1592
1594 1593 @annotatesubrepoerror
1595 1594 def add(self, ui, match, prefix, explicitonly, **opts):
1596 1595 if self._gitmissing():
1597 1596 return []
1598 1597
1599 1598 s = self.status(None, unknown=True, clean=True)
1600 1599
1601 1600 tracked = set()
1602 1601 # dirstates 'amn' warn, 'r' is added again
1603 1602 for l in (s.modified, s.added, s.deleted, s.clean):
1604 1603 tracked.update(l)
1605 1604
1606 1605 # Unknown files not of interest will be rejected by the matcher
1607 1606 files = s.unknown
1608 1607 files.extend(match.files())
1609 1608
1610 1609 rejected = []
1611 1610
1612 1611 files = [f for f in sorted(set(files)) if match(f)]
1613 1612 for f in files:
1614 1613 exact = match.exact(f)
1615 1614 command = ["add"]
1616 1615 if exact:
1617 1616 command.append("-f") #should be added, even if ignored
1618 1617 if ui.verbose or not exact:
1619 1618 ui.status(_('adding %s\n') % match.rel(f))
1620 1619
1621 1620 if f in tracked: # hg prints 'adding' even if already tracked
1622 1621 if exact:
1623 1622 rejected.append(f)
1624 1623 continue
1625 1624 if not opts.get(r'dry_run'):
1626 1625 self._gitcommand(command + [f])
1627 1626
1628 1627 for f in rejected:
1629 1628 ui.warn(_("%s already tracked!\n") % match.abs(f))
1630 1629
1631 1630 return rejected
1632 1631
1633 1632 @annotatesubrepoerror
1634 1633 def remove(self):
1635 1634 if self._gitmissing():
1636 1635 return
1637 1636 if self.dirty():
1638 1637 self.ui.warn(_('not removing repo %s because '
1639 1638 'it has changes.\n') % self._relpath)
1640 1639 return
1641 1640 # we can't fully delete the repository as it may contain
1642 1641 # local-only history
1643 1642 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1644 1643 self._gitcommand(['config', 'core.bare', 'true'])
1645 1644 for f, kind in self.wvfs.readdir():
1646 1645 if f == '.git':
1647 1646 continue
1648 1647 if kind == stat.S_IFDIR:
1649 1648 self.wvfs.rmtree(f)
1650 1649 else:
1651 1650 self.wvfs.unlink(f)
1652 1651
1653 1652 def archive(self, archiver, prefix, match=None, decode=True):
1654 1653 total = 0
1655 1654 source, revision = self._state
1656 1655 if not revision:
1657 1656 return total
1658 1657 self._fetch(source, revision)
1659 1658
1660 1659 # Parse git's native archive command.
1661 1660 # This should be much faster than manually traversing the trees
1662 1661 # and objects with many subprocess calls.
1663 1662 tarstream = self._gitcommand(['archive', revision], stream=True)
1664 1663 tar = tarfile.open(fileobj=tarstream, mode=r'r|')
1665 1664 relpath = subrelpath(self)
1666 1665 progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
1667 1666 unit=_('files'))
1668 1667 progress.update(0)
1669 1668 for info in tar:
1670 1669 if info.isdir():
1671 1670 continue
1672 1671 bname = pycompat.fsencode(info.name)
1673 1672 if match and not match(bname):
1674 1673 continue
1675 1674 if info.issym():
1676 1675 data = info.linkname
1677 1676 else:
1678 1677 data = tar.extractfile(info).read()
1679 1678 archiver.addfile(prefix + self._path + '/' + bname,
1680 1679 info.mode, info.issym(), data)
1681 1680 total += 1
1682 1681 progress.increment()
1683 1682 progress.complete()
1684 1683 return total
1685 1684
1686 1685
1687 1686 @annotatesubrepoerror
1688 1687 def cat(self, match, fm, fntemplate, prefix, **opts):
1689 1688 rev = self._state[1]
1690 1689 if match.anypats():
1691 1690 return 1 #No support for include/exclude yet
1692 1691
1693 1692 if not match.files():
1694 1693 return 1
1695 1694
1696 1695 # TODO: add support for non-plain formatter (see cmdutil.cat())
1697 1696 for f in match.files():
1698 1697 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1699 1698 fp = cmdutil.makefileobj(self._ctx, fntemplate,
1700 1699 pathname=self.wvfs.reljoin(prefix, f))
1701 1700 fp.write(output)
1702 1701 fp.close()
1703 1702 return 0
1704 1703
1705 1704
1706 1705 @annotatesubrepoerror
1707 1706 def status(self, rev2, **opts):
1708 1707 rev1 = self._state[1]
1709 1708 if self._gitmissing() or not rev1:
1710 1709 # if the repo is missing, return no results
1711 1710 return scmutil.status([], [], [], [], [], [], [])
1712 1711 modified, added, removed = [], [], []
1713 1712 self._gitupdatestat()
1714 1713 if rev2:
1715 1714 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1716 1715 else:
1717 1716 command = ['diff-index', '--no-renames', rev1]
1718 1717 out = self._gitcommand(command)
1719 1718 for line in out.split('\n'):
1720 1719 tab = line.find('\t')
1721 1720 if tab == -1:
1722 1721 continue
1723 1722 status, f = line[tab - 1:tab], line[tab + 1:]
1724 1723 if status == 'M':
1725 1724 modified.append(f)
1726 1725 elif status == 'A':
1727 1726 added.append(f)
1728 1727 elif status == 'D':
1729 1728 removed.append(f)
1730 1729
1731 1730 deleted, unknown, ignored, clean = [], [], [], []
1732 1731
1733 1732 command = ['status', '--porcelain', '-z']
1734 1733 if opts.get(r'unknown'):
1735 1734 command += ['--untracked-files=all']
1736 1735 if opts.get(r'ignored'):
1737 1736 command += ['--ignored']
1738 1737 out = self._gitcommand(command)
1739 1738
1740 1739 changedfiles = set()
1741 1740 changedfiles.update(modified)
1742 1741 changedfiles.update(added)
1743 1742 changedfiles.update(removed)
1744 1743 for line in out.split('\0'):
1745 1744 if not line:
1746 1745 continue
1747 1746 st = line[0:2]
1748 1747 #moves and copies show 2 files on one line
1749 1748 if line.find('\0') >= 0:
1750 1749 filename1, filename2 = line[3:].split('\0')
1751 1750 else:
1752 1751 filename1 = line[3:]
1753 1752 filename2 = None
1754 1753
1755 1754 changedfiles.add(filename1)
1756 1755 if filename2:
1757 1756 changedfiles.add(filename2)
1758 1757
1759 1758 if st == '??':
1760 1759 unknown.append(filename1)
1761 1760 elif st == '!!':
1762 1761 ignored.append(filename1)
1763 1762
1764 1763 if opts.get(r'clean'):
1765 1764 out = self._gitcommand(['ls-files'])
1766 1765 for f in out.split('\n'):
1767 1766 if not f in changedfiles:
1768 1767 clean.append(f)
1769 1768
1770 1769 return scmutil.status(modified, added, removed, deleted,
1771 1770 unknown, ignored, clean)
1772 1771
1773 1772 @annotatesubrepoerror
1774 1773 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1775 1774 node1 = self._state[1]
1776 1775 cmd = ['diff', '--no-renames']
1777 1776 if opts[r'stat']:
1778 1777 cmd.append('--stat')
1779 1778 else:
1780 1779 # for Git, this also implies '-p'
1781 1780 cmd.append('-U%d' % diffopts.context)
1782 1781
1783 1782 gitprefix = self.wvfs.reljoin(prefix, self._path)
1784 1783
1785 1784 if diffopts.noprefix:
1786 1785 cmd.extend(['--src-prefix=%s/' % gitprefix,
1787 1786 '--dst-prefix=%s/' % gitprefix])
1788 1787 else:
1789 1788 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1790 1789 '--dst-prefix=b/%s/' % gitprefix])
1791 1790
1792 1791 if diffopts.ignorews:
1793 1792 cmd.append('--ignore-all-space')
1794 1793 if diffopts.ignorewsamount:
1795 1794 cmd.append('--ignore-space-change')
1796 1795 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1797 1796 and diffopts.ignoreblanklines:
1798 1797 cmd.append('--ignore-blank-lines')
1799 1798
1800 1799 cmd.append(node1)
1801 1800 if node2:
1802 1801 cmd.append(node2)
1803 1802
1804 1803 output = ""
1805 1804 if match.always():
1806 1805 output += self._gitcommand(cmd) + '\n'
1807 1806 else:
1808 1807 st = self.status(node2)[:3]
1809 1808 files = [f for sublist in st for f in sublist]
1810 1809 for f in files:
1811 1810 if match(f):
1812 1811 output += self._gitcommand(cmd + ['--', f]) + '\n'
1813 1812
1814 1813 if output.strip():
1815 1814 ui.write(output)
1816 1815
1817 1816 @annotatesubrepoerror
1818 1817 def revert(self, substate, *pats, **opts):
1819 1818 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1820 1819 if not opts.get(r'no_backup'):
1821 1820 status = self.status(None)
1822 1821 names = status.modified
1823 1822 for name in names:
1824 1823 # backuppath() expects a path relative to the parent repo (the
1825 1824 # repo that ui.origbackuppath is relative to)
1826 1825 parentname = os.path.join(self._path, name)
1827 1826 bakname = scmutil.backuppath(self.ui, self._subparent,
1828 1827 parentname)
1829 1828 self.ui.note(_('saving current version of %s as %s\n') %
1830 1829 (name, os.path.relpath(bakname)))
1831 1830 util.rename(self.wvfs.join(name), bakname)
1832 1831
1833 1832 if not opts.get(r'dry_run'):
1834 1833 self.get(substate, overwrite=True)
1835 1834 return []
1836 1835
1837 1836 def shortid(self, revid):
1838 1837 return revid[:7]
1839 1838
1840 1839 types = {
1841 1840 'hg': hgsubrepo,
1842 1841 'svn': svnsubrepo,
1843 1842 'git': gitsubrepo,
1844 1843 }
General Comments 0
You need to be logged in to leave comments. Login now