##// END OF EJS Templates
rebase: provide detailed hint to abort message if working dir is not clean...
Valters Vingolds -
r30755:0fbb3a5c default
parent child Browse files
Show More
@@ -1,1463 +1,1463 b''
1 1 # rebase.py - rebasing feature for mercurial
2 2 #
3 3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to move sets of revisions to a different ancestor
9 9
10 10 This extension lets you rebase changesets in an existing Mercurial
11 11 repository.
12 12
13 13 For more information:
14 14 https://mercurial-scm.org/wiki/RebaseExtension
15 15 '''
16 16
17 17 from __future__ import absolute_import
18 18
19 19 import errno
20 20 import os
21 21
22 22 from mercurial.i18n import _
23 23 from mercurial.node import (
24 24 hex,
25 25 nullid,
26 26 nullrev,
27 27 short,
28 28 )
29 29 from mercurial import (
30 30 bookmarks,
31 31 cmdutil,
32 32 commands,
33 33 copies,
34 34 destutil,
35 35 dirstateguard,
36 36 error,
37 37 extensions,
38 38 hg,
39 39 lock,
40 40 merge as mergemod,
41 41 mergeutil,
42 42 obsolete,
43 43 patch,
44 44 phases,
45 45 registrar,
46 46 repair,
47 47 repoview,
48 48 revset,
49 49 scmutil,
50 50 util,
51 51 )
52 52
53 53 release = lock.release
54 54 templateopts = commands.templateopts
55 55
56 56 # The following constants are used throughout the rebase module. The ordering of
57 57 # their values must be maintained.
58 58
59 59 # Indicates that a revision needs to be rebased
60 60 revtodo = -1
61 61 nullmerge = -2
62 62 revignored = -3
63 63 # successor in rebase destination
64 64 revprecursor = -4
65 65 # plain prune (no successor)
66 66 revpruned = -5
67 67 revskipped = (revignored, revprecursor, revpruned)
68 68
69 69 cmdtable = {}
70 70 command = cmdutil.command(cmdtable)
71 71 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
72 72 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
73 73 # be specifying the version(s) of Mercurial they are tested with, or
74 74 # leave the attribute unspecified.
75 75 testedwith = 'ships-with-hg-core'
76 76
77 77 def _nothingtorebase():
78 78 return 1
79 79
80 80 def _savegraft(ctx, extra):
81 81 s = ctx.extra().get('source', None)
82 82 if s is not None:
83 83 extra['source'] = s
84 84 s = ctx.extra().get('intermediate-source', None)
85 85 if s is not None:
86 86 extra['intermediate-source'] = s
87 87
88 88 def _savebranch(ctx, extra):
89 89 extra['branch'] = ctx.branch()
90 90
91 91 def _makeextrafn(copiers):
92 92 """make an extrafn out of the given copy-functions.
93 93
94 94 A copy function takes a context and an extra dict, and mutates the
95 95 extra dict as needed based on the given context.
96 96 """
97 97 def extrafn(ctx, extra):
98 98 for c in copiers:
99 99 c(ctx, extra)
100 100 return extrafn
101 101
102 102 def _destrebase(repo, sourceset, destspace=None):
103 103 """small wrapper around destmerge to pass the right extra args
104 104
105 105 Please wrap destutil.destmerge instead."""
106 106 return destutil.destmerge(repo, action='rebase', sourceset=sourceset,
107 107 onheadcheck=False, destspace=destspace)
108 108
109 109 revsetpredicate = registrar.revsetpredicate()
110 110
111 111 @revsetpredicate('_destrebase')
112 112 def _revsetdestrebase(repo, subset, x):
113 113 # ``_rebasedefaultdest()``
114 114
115 115 # default destination for rebase.
116 116 # # XXX: Currently private because I expect the signature to change.
117 117 # # XXX: - bailing out in case of ambiguity vs returning all data.
118 118 # i18n: "_rebasedefaultdest" is a keyword
119 119 sourceset = None
120 120 if x is not None:
121 121 sourceset = revset.getset(repo, revset.fullreposet(repo), x)
122 122 return subset & revset.baseset([_destrebase(repo, sourceset)])
123 123
124 124 class rebaseruntime(object):
125 125 """This class is a container for rebase runtime state"""
126 126 def __init__(self, repo, ui, opts=None):
127 127 if opts is None:
128 128 opts = {}
129 129
130 130 self.repo = repo
131 131 self.ui = ui
132 132 self.opts = opts
133 133 self.originalwd = None
134 134 self.external = nullrev
135 135 # Mapping between the old revision id and either what is the new rebased
136 136 # revision or what needs to be done with the old revision. The state
137 137 # dict will be what contains most of the rebase progress state.
138 138 self.state = {}
139 139 self.activebookmark = None
140 140 self.currentbookmarks = None
141 141 self.target = None
142 142 self.skipped = set()
143 143 self.targetancestors = set()
144 144
145 145 self.collapsef = opts.get('collapse', False)
146 146 self.collapsemsg = cmdutil.logmessage(ui, opts)
147 147 self.date = opts.get('date', None)
148 148
149 149 e = opts.get('extrafn') # internal, used by e.g. hgsubversion
150 150 self.extrafns = [_savegraft]
151 151 if e:
152 152 self.extrafns = [e]
153 153
154 154 self.keepf = opts.get('keep', False)
155 155 self.keepbranchesf = opts.get('keepbranches', False)
156 156 # keepopen is not meant for use on the command line, but by
157 157 # other extensions
158 158 self.keepopen = opts.get('keepopen', False)
159 159 self.obsoletenotrebased = {}
160 160
161 161 def restorestatus(self):
162 162 """Restore a previously stored status"""
163 163 repo = self.repo
164 164 keepbranches = None
165 165 target = None
166 166 collapse = False
167 167 external = nullrev
168 168 activebookmark = None
169 169 state = {}
170 170
171 171 try:
172 172 f = repo.vfs("rebasestate")
173 173 for i, l in enumerate(f.read().splitlines()):
174 174 if i == 0:
175 175 originalwd = repo[l].rev()
176 176 elif i == 1:
177 177 target = repo[l].rev()
178 178 elif i == 2:
179 179 external = repo[l].rev()
180 180 elif i == 3:
181 181 collapse = bool(int(l))
182 182 elif i == 4:
183 183 keep = bool(int(l))
184 184 elif i == 5:
185 185 keepbranches = bool(int(l))
186 186 elif i == 6 and not (len(l) == 81 and ':' in l):
187 187 # line 6 is a recent addition, so for backwards
188 188 # compatibility check that the line doesn't look like the
189 189 # oldrev:newrev lines
190 190 activebookmark = l
191 191 else:
192 192 oldrev, newrev = l.split(':')
193 193 if newrev in (str(nullmerge), str(revignored),
194 194 str(revprecursor), str(revpruned)):
195 195 state[repo[oldrev].rev()] = int(newrev)
196 196 elif newrev == nullid:
197 197 state[repo[oldrev].rev()] = revtodo
198 198 # Legacy compat special case
199 199 else:
200 200 state[repo[oldrev].rev()] = repo[newrev].rev()
201 201
202 202 except IOError as err:
203 203 if err.errno != errno.ENOENT:
204 204 raise
205 205 cmdutil.wrongtooltocontinue(repo, _('rebase'))
206 206
207 207 if keepbranches is None:
208 208 raise error.Abort(_('.hg/rebasestate is incomplete'))
209 209
210 210 skipped = set()
211 211 # recompute the set of skipped revs
212 212 if not collapse:
213 213 seen = set([target])
214 214 for old, new in sorted(state.items()):
215 215 if new != revtodo and new in seen:
216 216 skipped.add(old)
217 217 seen.add(new)
218 218 repo.ui.debug('computed skipped revs: %s\n' %
219 219 (' '.join(str(r) for r in sorted(skipped)) or None))
220 220 repo.ui.debug('rebase status resumed\n')
221 221 _setrebasesetvisibility(repo, state.keys())
222 222
223 223 self.originalwd = originalwd
224 224 self.target = target
225 225 self.state = state
226 226 self.skipped = skipped
227 227 self.collapsef = collapse
228 228 self.keepf = keep
229 229 self.keepbranchesf = keepbranches
230 230 self.external = external
231 231 self.activebookmark = activebookmark
232 232
233 233 def _handleskippingobsolete(self, rebaserevs, obsoleterevs, target):
234 234 """Compute structures necessary for skipping obsolete revisions
235 235
236 236 rebaserevs: iterable of all revisions that are to be rebased
237 237 obsoleterevs: iterable of all obsolete revisions in rebaseset
238 238 target: a destination revision for the rebase operation
239 239 """
240 240 self.obsoletenotrebased = {}
241 241 if not self.ui.configbool('experimental', 'rebaseskipobsolete',
242 242 default=True):
243 243 return
244 244 rebaseset = set(rebaserevs)
245 245 obsoleteset = set(obsoleterevs)
246 246 self.obsoletenotrebased = _computeobsoletenotrebased(self.repo,
247 247 obsoleteset, target)
248 248 skippedset = set(self.obsoletenotrebased)
249 249 _checkobsrebase(self.repo, self.ui, obsoleteset, rebaseset, skippedset)
250 250
251 251 def _prepareabortorcontinue(self, isabort):
252 252 try:
253 253 self.restorestatus()
254 254 self.collapsemsg = restorecollapsemsg(self.repo)
255 255 except error.RepoLookupError:
256 256 if isabort:
257 257 clearstatus(self.repo)
258 258 clearcollapsemsg(self.repo)
259 259 self.repo.ui.warn(_('rebase aborted (no revision is removed,'
260 260 ' only broken state is cleared)\n'))
261 261 return 0
262 262 else:
263 263 msg = _('cannot continue inconsistent rebase')
264 264 hint = _('use "hg rebase --abort" to clear broken state')
265 265 raise error.Abort(msg, hint=hint)
266 266 if isabort:
267 267 return abort(self.repo, self.originalwd, self.target,
268 268 self.state, activebookmark=self.activebookmark)
269 269
270 270 obsrevs = (r for r, st in self.state.items() if st == revprecursor)
271 271 self._handleskippingobsolete(self.state.keys(), obsrevs, self.target)
272 272
273 273 def _preparenewrebase(self, dest, rebaseset):
274 274 if dest is None:
275 275 return _nothingtorebase()
276 276
277 277 allowunstable = obsolete.isenabled(self.repo, obsolete.allowunstableopt)
278 278 if (not (self.keepf or allowunstable)
279 279 and self.repo.revs('first(children(%ld) - %ld)',
280 280 rebaseset, rebaseset)):
281 281 raise error.Abort(
282 282 _("can't remove original changesets with"
283 283 " unrebased descendants"),
284 284 hint=_('use --keep to keep original changesets'))
285 285
286 286 obsrevs = _filterobsoleterevs(self.repo, set(rebaseset))
287 287 self._handleskippingobsolete(rebaseset, obsrevs, dest)
288 288
289 289 result = buildstate(self.repo, dest, rebaseset, self.collapsef,
290 290 self.obsoletenotrebased)
291 291
292 292 if not result:
293 293 # Empty state built, nothing to rebase
294 294 self.ui.status(_('nothing to rebase\n'))
295 295 return _nothingtorebase()
296 296
297 297 root = min(rebaseset)
298 298 if not self.keepf and not self.repo[root].mutable():
299 299 raise error.Abort(_("can't rebase public changeset %s")
300 300 % self.repo[root],
301 301 hint=_("see 'hg help phases' for details"))
302 302
303 303 (self.originalwd, self.target, self.state) = result
304 304 if self.collapsef:
305 305 self.targetancestors = self.repo.changelog.ancestors(
306 306 [self.target],
307 307 inclusive=True)
308 308 self.external = externalparent(self.repo, self.state,
309 309 self.targetancestors)
310 310
311 311 if dest.closesbranch() and not self.keepbranchesf:
312 312 self.ui.status(_('reopening closed branch head %s\n') % dest)
313 313
314 314 def _performrebase(self):
315 315 repo, ui, opts = self.repo, self.ui, self.opts
316 316 if self.keepbranchesf:
317 317 # insert _savebranch at the start of extrafns so if
318 318 # there's a user-provided extrafn it can clobber branch if
319 319 # desired
320 320 self.extrafns.insert(0, _savebranch)
321 321 if self.collapsef:
322 322 branches = set()
323 323 for rev in self.state:
324 324 branches.add(repo[rev].branch())
325 325 if len(branches) > 1:
326 326 raise error.Abort(_('cannot collapse multiple named '
327 327 'branches'))
328 328
329 329 # Rebase
330 330 if not self.targetancestors:
331 331 self.targetancestors = repo.changelog.ancestors([self.target],
332 332 inclusive=True)
333 333
334 334 # Keep track of the current bookmarks in order to reset them later
335 335 self.currentbookmarks = repo._bookmarks.copy()
336 336 self.activebookmark = self.activebookmark or repo._activebookmark
337 337 if self.activebookmark:
338 338 bookmarks.deactivate(repo)
339 339
340 340 sortedrevs = repo.revs('sort(%ld, -topo)', self.state)
341 341 cands = [k for k, v in self.state.iteritems() if v == revtodo]
342 342 total = len(cands)
343 343 pos = 0
344 344 for rev in sortedrevs:
345 345 ctx = repo[rev]
346 346 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
347 347 ctx.description().split('\n', 1)[0])
348 348 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
349 349 if names:
350 350 desc += ' (%s)' % ' '.join(names)
351 351 if self.state[rev] == revtodo:
352 352 pos += 1
353 353 ui.status(_('rebasing %s\n') % desc)
354 354 ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
355 355 _('changesets'), total)
356 356 p1, p2, base = defineparents(repo, rev, self.target,
357 357 self.state,
358 358 self.targetancestors,
359 359 self.obsoletenotrebased)
360 360 storestatus(repo, self.originalwd, self.target,
361 361 self.state, self.collapsef, self.keepf,
362 362 self.keepbranchesf, self.external,
363 363 self.activebookmark)
364 364 storecollapsemsg(repo, self.collapsemsg)
365 365 if len(repo[None].parents()) == 2:
366 366 repo.ui.debug('resuming interrupted rebase\n')
367 367 else:
368 368 try:
369 369 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
370 370 'rebase')
371 371 stats = rebasenode(repo, rev, p1, base, self.state,
372 372 self.collapsef, self.target)
373 373 if stats and stats[3] > 0:
374 374 raise error.InterventionRequired(
375 375 _('unresolved conflicts (see hg '
376 376 'resolve, then hg rebase --continue)'))
377 377 finally:
378 378 ui.setconfig('ui', 'forcemerge', '', 'rebase')
379 379 if not self.collapsef:
380 380 merging = p2 != nullrev
381 381 editform = cmdutil.mergeeditform(merging, 'rebase')
382 382 editor = cmdutil.getcommiteditor(editform=editform, **opts)
383 383 newnode = concludenode(repo, rev, p1, p2,
384 384 extrafn=_makeextrafn(self.extrafns),
385 385 editor=editor,
386 386 keepbranches=self.keepbranchesf,
387 387 date=self.date)
388 388 else:
389 389 # Skip commit if we are collapsing
390 390 repo.dirstate.beginparentchange()
391 391 repo.setparents(repo[p1].node())
392 392 repo.dirstate.endparentchange()
393 393 newnode = None
394 394 # Update the state
395 395 if newnode is not None:
396 396 self.state[rev] = repo[newnode].rev()
397 397 ui.debug('rebased as %s\n' % short(newnode))
398 398 else:
399 399 if not self.collapsef:
400 400 ui.warn(_('note: rebase of %d:%s created no changes '
401 401 'to commit\n') % (rev, ctx))
402 402 self.skipped.add(rev)
403 403 self.state[rev] = p1
404 404 ui.debug('next revision set to %s\n' % p1)
405 405 elif self.state[rev] == nullmerge:
406 406 ui.debug('ignoring null merge rebase of %s\n' % rev)
407 407 elif self.state[rev] == revignored:
408 408 ui.status(_('not rebasing ignored %s\n') % desc)
409 409 elif self.state[rev] == revprecursor:
410 410 targetctx = repo[self.obsoletenotrebased[rev]]
411 411 desctarget = '%d:%s "%s"' % (targetctx.rev(), targetctx,
412 412 targetctx.description().split('\n', 1)[0])
413 413 msg = _('note: not rebasing %s, already in destination as %s\n')
414 414 ui.status(msg % (desc, desctarget))
415 415 elif self.state[rev] == revpruned:
416 416 msg = _('note: not rebasing %s, it has no successor\n')
417 417 ui.status(msg % desc)
418 418 else:
419 419 ui.status(_('already rebased %s as %s\n') %
420 420 (desc, repo[self.state[rev]]))
421 421
422 422 ui.progress(_('rebasing'), None)
423 423 ui.note(_('rebase merging completed\n'))
424 424
425 425 def _finishrebase(self):
426 426 repo, ui, opts = self.repo, self.ui, self.opts
427 427 if self.collapsef and not self.keepopen:
428 428 p1, p2, _base = defineparents(repo, min(self.state),
429 429 self.target, self.state,
430 430 self.targetancestors,
431 431 self.obsoletenotrebased)
432 432 editopt = opts.get('edit')
433 433 editform = 'rebase.collapse'
434 434 if self.collapsemsg:
435 435 commitmsg = self.collapsemsg
436 436 else:
437 437 commitmsg = 'Collapsed revision'
438 438 for rebased in self.state:
439 439 if rebased not in self.skipped and\
440 440 self.state[rebased] > nullmerge:
441 441 commitmsg += '\n* %s' % repo[rebased].description()
442 442 editopt = True
443 443 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
444 444 revtoreuse = max(self.state)
445 445 newnode = concludenode(repo, revtoreuse, p1, self.external,
446 446 commitmsg=commitmsg,
447 447 extrafn=_makeextrafn(self.extrafns),
448 448 editor=editor,
449 449 keepbranches=self.keepbranchesf,
450 450 date=self.date)
451 451 if newnode is None:
452 452 newrev = self.target
453 453 else:
454 454 newrev = repo[newnode].rev()
455 455 for oldrev in self.state.iterkeys():
456 456 if self.state[oldrev] > nullmerge:
457 457 self.state[oldrev] = newrev
458 458
459 459 if 'qtip' in repo.tags():
460 460 updatemq(repo, self.state, self.skipped, **opts)
461 461
462 462 if self.currentbookmarks:
463 463 # Nodeids are needed to reset bookmarks
464 464 nstate = {}
465 465 for k, v in self.state.iteritems():
466 466 if v > nullmerge:
467 467 nstate[repo[k].node()] = repo[v].node()
468 468 elif v == revprecursor:
469 469 succ = self.obsoletenotrebased[k]
470 470 nstate[repo[k].node()] = repo[succ].node()
471 471 # XXX this is the same as dest.node() for the non-continue path --
472 472 # this should probably be cleaned up
473 473 targetnode = repo[self.target].node()
474 474
475 475 # restore original working directory
476 476 # (we do this before stripping)
477 477 newwd = self.state.get(self.originalwd, self.originalwd)
478 478 if newwd == revprecursor:
479 479 newwd = self.obsoletenotrebased[self.originalwd]
480 480 elif newwd < 0:
481 481 # original directory is a parent of rebase set root or ignored
482 482 newwd = self.originalwd
483 483 if newwd not in [c.rev() for c in repo[None].parents()]:
484 484 ui.note(_("update back to initial working directory parent\n"))
485 485 hg.updaterepo(repo, newwd, False)
486 486
487 487 if self.currentbookmarks:
488 488 with repo.transaction('bookmark') as tr:
489 489 updatebookmarks(repo, targetnode, nstate,
490 490 self.currentbookmarks, tr)
491 491 if self.activebookmark not in repo._bookmarks:
492 492 # active bookmark was divergent one and has been deleted
493 493 self.activebookmark = None
494 494
495 495 if not self.keepf:
496 496 collapsedas = None
497 497 if self.collapsef:
498 498 collapsedas = newnode
499 499 clearrebased(ui, repo, self.state, self.skipped, collapsedas)
500 500
501 501 clearstatus(repo)
502 502 clearcollapsemsg(repo)
503 503
504 504 ui.note(_("rebase completed\n"))
505 505 util.unlinkpath(repo.sjoin('undo'), ignoremissing=True)
506 506 if self.skipped:
507 507 skippedlen = len(self.skipped)
508 508 ui.note(_("%d revisions have been skipped\n") % skippedlen)
509 509
510 510 if (self.activebookmark and
511 511 repo['.'].node() == repo._bookmarks[self.activebookmark]):
512 512 bookmarks.activate(repo, self.activebookmark)
513 513
514 514 @command('rebase',
515 515 [('s', 'source', '',
516 516 _('rebase the specified changeset and descendants'), _('REV')),
517 517 ('b', 'base', '',
518 518 _('rebase everything from branching point of specified changeset'),
519 519 _('REV')),
520 520 ('r', 'rev', [],
521 521 _('rebase these revisions'),
522 522 _('REV')),
523 523 ('d', 'dest', '',
524 524 _('rebase onto the specified changeset'), _('REV')),
525 525 ('', 'collapse', False, _('collapse the rebased changesets')),
526 526 ('m', 'message', '',
527 527 _('use text as collapse commit message'), _('TEXT')),
528 528 ('e', 'edit', False, _('invoke editor on commit messages')),
529 529 ('l', 'logfile', '',
530 530 _('read collapse commit message from file'), _('FILE')),
531 531 ('k', 'keep', False, _('keep original changesets')),
532 532 ('', 'keepbranches', False, _('keep original branch names')),
533 533 ('D', 'detach', False, _('(DEPRECATED)')),
534 534 ('i', 'interactive', False, _('(DEPRECATED)')),
535 535 ('t', 'tool', '', _('specify merge tool')),
536 536 ('c', 'continue', False, _('continue an interrupted rebase')),
537 537 ('a', 'abort', False, _('abort an interrupted rebase'))] +
538 538 templateopts,
539 539 _('[-s REV | -b REV] [-d REV] [OPTION]'))
540 540 def rebase(ui, repo, **opts):
541 541 """move changeset (and descendants) to a different branch
542 542
543 543 Rebase uses repeated merging to graft changesets from one part of
544 544 history (the source) onto another (the destination). This can be
545 545 useful for linearizing *local* changes relative to a master
546 546 development tree.
547 547
548 548 Published commits cannot be rebased (see :hg:`help phases`).
549 549 To copy commits, see :hg:`help graft`.
550 550
551 551 If you don't specify a destination changeset (``-d/--dest``), rebase
552 552 will use the same logic as :hg:`merge` to pick a destination. if
553 553 the current branch contains exactly one other head, the other head
554 554 is merged with by default. Otherwise, an explicit revision with
555 555 which to merge with must be provided. (destination changeset is not
556 556 modified by rebasing, but new changesets are added as its
557 557 descendants.)
558 558
559 559 Here are the ways to select changesets:
560 560
561 561 1. Explicitly select them using ``--rev``.
562 562
563 563 2. Use ``--source`` to select a root changeset and include all of its
564 564 descendants.
565 565
566 566 3. Use ``--base`` to select a changeset; rebase will find ancestors
567 567 and their descendants which are not also ancestors of the destination.
568 568
569 569 4. If you do not specify any of ``--rev``, ``source``, or ``--base``,
570 570 rebase will use ``--base .`` as above.
571 571
572 572 Rebase will destroy original changesets unless you use ``--keep``.
573 573 It will also move your bookmarks (even if you do).
574 574
575 575 Some changesets may be dropped if they do not contribute changes
576 576 (e.g. merges from the destination branch).
577 577
578 578 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
579 579 a named branch with two heads. You will need to explicitly specify source
580 580 and/or destination.
581 581
582 582 If you need to use a tool to automate merge/conflict decisions, you
583 583 can specify one with ``--tool``, see :hg:`help merge-tools`.
584 584 As a caveat: the tool will not be used to mediate when a file was
585 585 deleted, there is no hook presently available for this.
586 586
587 587 If a rebase is interrupted to manually resolve a conflict, it can be
588 588 continued with --continue/-c or aborted with --abort/-a.
589 589
590 590 .. container:: verbose
591 591
592 592 Examples:
593 593
594 594 - move "local changes" (current commit back to branching point)
595 595 to the current branch tip after a pull::
596 596
597 597 hg rebase
598 598
599 599 - move a single changeset to the stable branch::
600 600
601 601 hg rebase -r 5f493448 -d stable
602 602
603 603 - splice a commit and all its descendants onto another part of history::
604 604
605 605 hg rebase --source c0c3 --dest 4cf9
606 606
607 607 - rebase everything on a branch marked by a bookmark onto the
608 608 default branch::
609 609
610 610 hg rebase --base myfeature --dest default
611 611
612 612 - collapse a sequence of changes into a single commit::
613 613
614 614 hg rebase --collapse -r 1520:1525 -d .
615 615
616 616 - move a named branch while preserving its name::
617 617
618 618 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
619 619
620 620 Returns 0 on success, 1 if nothing to rebase or there are
621 621 unresolved conflicts.
622 622
623 623 """
624 624 rbsrt = rebaseruntime(repo, ui, opts)
625 625
626 626 lock = wlock = None
627 627 try:
628 628 wlock = repo.wlock()
629 629 lock = repo.lock()
630 630
631 631 # Validate input and define rebasing points
632 632 destf = opts.get('dest', None)
633 633 srcf = opts.get('source', None)
634 634 basef = opts.get('base', None)
635 635 revf = opts.get('rev', [])
636 636 # search default destination in this space
637 637 # used in the 'hg pull --rebase' case, see issue 5214.
638 638 destspace = opts.get('_destspace')
639 639 contf = opts.get('continue')
640 640 abortf = opts.get('abort')
641 641 if opts.get('interactive'):
642 642 try:
643 643 if extensions.find('histedit'):
644 644 enablehistedit = ''
645 645 except KeyError:
646 646 enablehistedit = " --config extensions.histedit="
647 647 help = "hg%s help -e histedit" % enablehistedit
648 648 msg = _("interactive history editing is supported by the "
649 649 "'histedit' extension (see \"%s\")") % help
650 650 raise error.Abort(msg)
651 651
652 652 if rbsrt.collapsemsg and not rbsrt.collapsef:
653 653 raise error.Abort(
654 654 _('message can only be specified with collapse'))
655 655
656 656 if contf or abortf:
657 657 if contf and abortf:
658 658 raise error.Abort(_('cannot use both abort and continue'))
659 659 if rbsrt.collapsef:
660 660 raise error.Abort(
661 661 _('cannot use collapse with continue or abort'))
662 662 if srcf or basef or destf:
663 663 raise error.Abort(
664 664 _('abort and continue do not allow specifying revisions'))
665 665 if abortf and opts.get('tool', False):
666 666 ui.warn(_('tool option will be ignored\n'))
667 667 if contf:
668 668 ms = mergemod.mergestate.read(repo)
669 669 mergeutil.checkunresolved(ms)
670 670
671 671 retcode = rbsrt._prepareabortorcontinue(abortf)
672 672 if retcode is not None:
673 673 return retcode
674 674 else:
675 675 dest, rebaseset = _definesets(ui, repo, destf, srcf, basef, revf,
676 676 destspace=destspace)
677 677 retcode = rbsrt._preparenewrebase(dest, rebaseset)
678 678 if retcode is not None:
679 679 return retcode
680 680
681 681 rbsrt._performrebase()
682 682 rbsrt._finishrebase()
683 683 finally:
684 684 release(lock, wlock)
685 685
686 686 def _definesets(ui, repo, destf=None, srcf=None, basef=None, revf=[],
687 687 destspace=None):
688 688 """use revisions argument to define destination and rebase set
689 689 """
690 690 # destspace is here to work around issues with `hg pull --rebase` see
691 691 # issue5214 for details
692 692 if srcf and basef:
693 693 raise error.Abort(_('cannot specify both a source and a base'))
694 694 if revf and basef:
695 695 raise error.Abort(_('cannot specify both a revision and a base'))
696 696 if revf and srcf:
697 697 raise error.Abort(_('cannot specify both a revision and a source'))
698 698
699 699 cmdutil.checkunfinished(repo)
700 700 cmdutil.bailifchanged(repo)
701 701
702 702 if destf:
703 703 dest = scmutil.revsingle(repo, destf)
704 704
705 705 if revf:
706 706 rebaseset = scmutil.revrange(repo, revf)
707 707 if not rebaseset:
708 708 ui.status(_('empty "rev" revision set - nothing to rebase\n'))
709 709 return None, None
710 710 elif srcf:
711 711 src = scmutil.revrange(repo, [srcf])
712 712 if not src:
713 713 ui.status(_('empty "source" revision set - nothing to rebase\n'))
714 714 return None, None
715 715 rebaseset = repo.revs('(%ld)::', src)
716 716 assert rebaseset
717 717 else:
718 718 base = scmutil.revrange(repo, [basef or '.'])
719 719 if not base:
720 720 ui.status(_('empty "base" revision set - '
721 721 "can't compute rebase set\n"))
722 722 return None, None
723 723 if not destf:
724 724 dest = repo[_destrebase(repo, base, destspace=destspace)]
725 725 destf = str(dest)
726 726
727 727 roots = [] # selected children of branching points
728 728 bpbase = {} # {branchingpoint: [origbase]}
729 729 for b in base: # group bases by branching points
730 730 bp = repo.revs('ancestor(%d, %d)', b, dest).first()
731 731 bpbase[bp] = bpbase.get(bp, []) + [b]
732 732 if None in bpbase:
733 733 # emulate the old behavior, showing "nothing to rebase" (a better
734 734 # behavior may be abort with "cannot find branching point" error)
735 735 bpbase.clear()
736 736 for bp, bs in bpbase.iteritems(): # calculate roots
737 737 roots += list(repo.revs('children(%d) & ancestors(%ld)', bp, bs))
738 738
739 739 rebaseset = repo.revs('%ld::', roots)
740 740
741 741 if not rebaseset:
742 742 # transform to list because smartsets are not comparable to
743 743 # lists. This should be improved to honor laziness of
744 744 # smartset.
745 745 if list(base) == [dest.rev()]:
746 746 if basef:
747 747 ui.status(_('nothing to rebase - %s is both "base"'
748 748 ' and destination\n') % dest)
749 749 else:
750 750 ui.status(_('nothing to rebase - working directory '
751 751 'parent is also destination\n'))
752 752 elif not repo.revs('%ld - ::%d', base, dest):
753 753 if basef:
754 754 ui.status(_('nothing to rebase - "base" %s is '
755 755 'already an ancestor of destination '
756 756 '%s\n') %
757 757 ('+'.join(str(repo[r]) for r in base),
758 758 dest))
759 759 else:
760 760 ui.status(_('nothing to rebase - working '
761 761 'directory parent is already an '
762 762 'ancestor of destination %s\n') % dest)
763 763 else: # can it happen?
764 764 ui.status(_('nothing to rebase from %s to %s\n') %
765 765 ('+'.join(str(repo[r]) for r in base), dest))
766 766 return None, None
767 767
768 768 if not destf:
769 769 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
770 770 destf = str(dest)
771 771
772 772 return dest, rebaseset
773 773
774 774 def externalparent(repo, state, targetancestors):
775 775 """Return the revision that should be used as the second parent
776 776 when the revisions in state is collapsed on top of targetancestors.
777 777 Abort if there is more than one parent.
778 778 """
779 779 parents = set()
780 780 source = min(state)
781 781 for rev in state:
782 782 if rev == source:
783 783 continue
784 784 for p in repo[rev].parents():
785 785 if (p.rev() not in state
786 786 and p.rev() not in targetancestors):
787 787 parents.add(p.rev())
788 788 if not parents:
789 789 return nullrev
790 790 if len(parents) == 1:
791 791 return parents.pop()
792 792 raise error.Abort(_('unable to collapse on top of %s, there is more '
793 793 'than one external parent: %s') %
794 794 (max(targetancestors),
795 795 ', '.join(str(p) for p in sorted(parents))))
796 796
797 797 def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None,
798 798 keepbranches=False, date=None):
799 799 '''Commit the wd changes with parents p1 and p2. Reuse commit info from rev
800 800 but also store useful information in extra.
801 801 Return node of committed revision.'''
802 802 dsguard = dirstateguard.dirstateguard(repo, 'rebase')
803 803 try:
804 804 repo.setparents(repo[p1].node(), repo[p2].node())
805 805 ctx = repo[rev]
806 806 if commitmsg is None:
807 807 commitmsg = ctx.description()
808 808 keepbranch = keepbranches and repo[p1].branch() != ctx.branch()
809 809 extra = {'rebase_source': ctx.hex()}
810 810 if extrafn:
811 811 extrafn(ctx, extra)
812 812
813 813 backup = repo.ui.backupconfig('phases', 'new-commit')
814 814 try:
815 815 targetphase = max(ctx.phase(), phases.draft)
816 816 repo.ui.setconfig('phases', 'new-commit', targetphase, 'rebase')
817 817 if keepbranch:
818 818 repo.ui.setconfig('ui', 'allowemptycommit', True)
819 819 # Commit might fail if unresolved files exist
820 820 if date is None:
821 821 date = ctx.date()
822 822 newnode = repo.commit(text=commitmsg, user=ctx.user(),
823 823 date=date, extra=extra, editor=editor)
824 824 finally:
825 825 repo.ui.restoreconfig(backup)
826 826
827 827 repo.dirstate.setbranch(repo[newnode].branch())
828 828 dsguard.close()
829 829 return newnode
830 830 finally:
831 831 release(dsguard)
832 832
833 833 def rebasenode(repo, rev, p1, base, state, collapse, target):
834 834 'Rebase a single revision rev on top of p1 using base as merge ancestor'
835 835 # Merge phase
836 836 # Update to target and merge it with local
837 837 if repo['.'].rev() != p1:
838 838 repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1]))
839 839 mergemod.update(repo, p1, False, True)
840 840 else:
841 841 repo.ui.debug(" already in target\n")
842 842 repo.dirstate.write(repo.currenttransaction())
843 843 repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev]))
844 844 if base is not None:
845 845 repo.ui.debug(" detach base %d:%s\n" % (base, repo[base]))
846 846 # When collapsing in-place, the parent is the common ancestor, we
847 847 # have to allow merging with it.
848 848 stats = mergemod.update(repo, rev, True, True, base, collapse,
849 849 labels=['dest', 'source'])
850 850 if collapse:
851 851 copies.duplicatecopies(repo, rev, target)
852 852 else:
853 853 # If we're not using --collapse, we need to
854 854 # duplicate copies between the revision we're
855 855 # rebasing and its first parent, but *not*
856 856 # duplicate any copies that have already been
857 857 # performed in the destination.
858 858 p1rev = repo[rev].p1().rev()
859 859 copies.duplicatecopies(repo, rev, p1rev, skiprev=target)
860 860 return stats
861 861
862 862 def nearestrebased(repo, rev, state):
863 863 """return the nearest ancestors of rev in the rebase result"""
864 864 rebased = [r for r in state if state[r] > nullmerge]
865 865 candidates = repo.revs('max(%ld and (::%d))', rebased, rev)
866 866 if candidates:
867 867 return state[candidates.first()]
868 868 else:
869 869 return None
870 870
871 871 def _checkobsrebase(repo, ui, rebaseobsrevs, rebasesetrevs, rebaseobsskipped):
872 872 """
873 873 Abort if rebase will create divergence or rebase is noop because of markers
874 874
875 875 `rebaseobsrevs`: set of obsolete revision in source
876 876 `rebasesetrevs`: set of revisions to be rebased from source
877 877 `rebaseobsskipped`: set of revisions from source skipped because they have
878 878 successors in destination
879 879 """
880 880 # Obsolete node with successors not in dest leads to divergence
881 881 divergenceok = ui.configbool('experimental',
882 882 'allowdivergence')
883 883 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
884 884
885 885 if divergencebasecandidates and not divergenceok:
886 886 divhashes = (str(repo[r])
887 887 for r in divergencebasecandidates)
888 888 msg = _("this rebase will cause "
889 889 "divergences from: %s")
890 890 h = _("to force the rebase please set "
891 891 "experimental.allowdivergence=True")
892 892 raise error.Abort(msg % (",".join(divhashes),), hint=h)
893 893
894 894 def defineparents(repo, rev, target, state, targetancestors,
895 895 obsoletenotrebased):
896 896 'Return the new parent relationship of the revision that will be rebased'
897 897 parents = repo[rev].parents()
898 898 p1 = p2 = nullrev
899 899 rp1 = None
900 900
901 901 p1n = parents[0].rev()
902 902 if p1n in targetancestors:
903 903 p1 = target
904 904 elif p1n in state:
905 905 if state[p1n] == nullmerge:
906 906 p1 = target
907 907 elif state[p1n] in revskipped:
908 908 p1 = nearestrebased(repo, p1n, state)
909 909 if p1 is None:
910 910 p1 = target
911 911 else:
912 912 p1 = state[p1n]
913 913 else: # p1n external
914 914 p1 = target
915 915 p2 = p1n
916 916
917 917 if len(parents) == 2 and parents[1].rev() not in targetancestors:
918 918 p2n = parents[1].rev()
919 919 # interesting second parent
920 920 if p2n in state:
921 921 if p1 == target: # p1n in targetancestors or external
922 922 p1 = state[p2n]
923 923 if p1 == revprecursor:
924 924 rp1 = obsoletenotrebased[p2n]
925 925 elif state[p2n] in revskipped:
926 926 p2 = nearestrebased(repo, p2n, state)
927 927 if p2 is None:
928 928 # no ancestors rebased yet, detach
929 929 p2 = target
930 930 else:
931 931 p2 = state[p2n]
932 932 else: # p2n external
933 933 if p2 != nullrev: # p1n external too => rev is a merged revision
934 934 raise error.Abort(_('cannot use revision %d as base, result '
935 935 'would have 3 parents') % rev)
936 936 p2 = p2n
937 937 repo.ui.debug(" future parents are %d and %d\n" %
938 938 (repo[rp1 or p1].rev(), repo[p2].rev()))
939 939
940 940 if not any(p.rev() in state for p in parents):
941 941 # Case (1) root changeset of a non-detaching rebase set.
942 942 # Let the merge mechanism find the base itself.
943 943 base = None
944 944 elif not repo[rev].p2():
945 945 # Case (2) detaching the node with a single parent, use this parent
946 946 base = repo[rev].p1().rev()
947 947 else:
948 948 # Assuming there is a p1, this is the case where there also is a p2.
949 949 # We are thus rebasing a merge and need to pick the right merge base.
950 950 #
951 951 # Imagine we have:
952 952 # - M: current rebase revision in this step
953 953 # - A: one parent of M
954 954 # - B: other parent of M
955 955 # - D: destination of this merge step (p1 var)
956 956 #
957 957 # Consider the case where D is a descendant of A or B and the other is
958 958 # 'outside'. In this case, the right merge base is the D ancestor.
959 959 #
960 960 # An informal proof, assuming A is 'outside' and B is the D ancestor:
961 961 #
962 962 # If we pick B as the base, the merge involves:
963 963 # - changes from B to M (actual changeset payload)
964 964 # - changes from B to D (induced by rebase) as D is a rebased
965 965 # version of B)
966 966 # Which exactly represent the rebase operation.
967 967 #
968 968 # If we pick A as the base, the merge involves:
969 969 # - changes from A to M (actual changeset payload)
970 970 # - changes from A to D (with include changes between unrelated A and B
971 971 # plus changes induced by rebase)
972 972 # Which does not represent anything sensible and creates a lot of
973 973 # conflicts. A is thus not the right choice - B is.
974 974 #
975 975 # Note: The base found in this 'proof' is only correct in the specified
976 976 # case. This base does not make sense if is not D a descendant of A or B
977 977 # or if the other is not parent 'outside' (especially not if the other
978 978 # parent has been rebased). The current implementation does not
979 979 # make it feasible to consider different cases separately. In these
980 980 # other cases we currently just leave it to the user to correctly
981 981 # resolve an impossible merge using a wrong ancestor.
982 982 #
983 983 # xx, p1 could be -4, and both parents could probably be -4...
984 984 for p in repo[rev].parents():
985 985 if state.get(p.rev()) == p1:
986 986 base = p.rev()
987 987 break
988 988 else: # fallback when base not found
989 989 base = None
990 990
991 991 # Raise because this function is called wrong (see issue 4106)
992 992 raise AssertionError('no base found to rebase on '
993 993 '(defineparents called wrong)')
994 994 return rp1 or p1, p2, base
995 995
996 996 def isagitpatch(repo, patchname):
997 997 'Return true if the given patch is in git format'
998 998 mqpatch = os.path.join(repo.mq.path, patchname)
999 999 for line in patch.linereader(file(mqpatch, 'rb')):
1000 1000 if line.startswith('diff --git'):
1001 1001 return True
1002 1002 return False
1003 1003
1004 1004 def updatemq(repo, state, skipped, **opts):
1005 1005 'Update rebased mq patches - finalize and then import them'
1006 1006 mqrebase = {}
1007 1007 mq = repo.mq
1008 1008 original_series = mq.fullseries[:]
1009 1009 skippedpatches = set()
1010 1010
1011 1011 for p in mq.applied:
1012 1012 rev = repo[p.node].rev()
1013 1013 if rev in state:
1014 1014 repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
1015 1015 (rev, p.name))
1016 1016 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1017 1017 else:
1018 1018 # Applied but not rebased, not sure this should happen
1019 1019 skippedpatches.add(p.name)
1020 1020
1021 1021 if mqrebase:
1022 1022 mq.finish(repo, mqrebase.keys())
1023 1023
1024 1024 # We must start import from the newest revision
1025 1025 for rev in sorted(mqrebase, reverse=True):
1026 1026 if rev not in skipped:
1027 1027 name, isgit = mqrebase[rev]
1028 1028 repo.ui.note(_('updating mq patch %s to %s:%s\n') %
1029 1029 (name, state[rev], repo[state[rev]]))
1030 1030 mq.qimport(repo, (), patchname=name, git=isgit,
1031 1031 rev=[str(state[rev])])
1032 1032 else:
1033 1033 # Rebased and skipped
1034 1034 skippedpatches.add(mqrebase[rev][0])
1035 1035
1036 1036 # Patches were either applied and rebased and imported in
1037 1037 # order, applied and removed or unapplied. Discard the removed
1038 1038 # ones while preserving the original series order and guards.
1039 1039 newseries = [s for s in original_series
1040 1040 if mq.guard_re.split(s, 1)[0] not in skippedpatches]
1041 1041 mq.fullseries[:] = newseries
1042 1042 mq.seriesdirty = True
1043 1043 mq.savedirty()
1044 1044
1045 1045 def updatebookmarks(repo, targetnode, nstate, originalbookmarks, tr):
1046 1046 'Move bookmarks to their correct changesets, and delete divergent ones'
1047 1047 marks = repo._bookmarks
1048 1048 for k, v in originalbookmarks.iteritems():
1049 1049 if v in nstate:
1050 1050 # update the bookmarks for revs that have moved
1051 1051 marks[k] = nstate[v]
1052 1052 bookmarks.deletedivergent(repo, [targetnode], k)
1053 1053 marks.recordchange(tr)
1054 1054
1055 1055 def storecollapsemsg(repo, collapsemsg):
1056 1056 'Store the collapse message to allow recovery'
1057 1057 collapsemsg = collapsemsg or ''
1058 1058 f = repo.vfs("last-message.txt", "w")
1059 1059 f.write("%s\n" % collapsemsg)
1060 1060 f.close()
1061 1061
1062 1062 def clearcollapsemsg(repo):
1063 1063 'Remove collapse message file'
1064 1064 util.unlinkpath(repo.join("last-message.txt"), ignoremissing=True)
1065 1065
1066 1066 def restorecollapsemsg(repo):
1067 1067 'Restore previously stored collapse message'
1068 1068 try:
1069 1069 f = repo.vfs("last-message.txt")
1070 1070 collapsemsg = f.readline().strip()
1071 1071 f.close()
1072 1072 except IOError as err:
1073 1073 if err.errno != errno.ENOENT:
1074 1074 raise
1075 1075 raise error.Abort(_('no rebase in progress'))
1076 1076 return collapsemsg
1077 1077
1078 1078 def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches,
1079 1079 external, activebookmark):
1080 1080 'Store the current status to allow recovery'
1081 1081 f = repo.vfs("rebasestate", "w")
1082 1082 f.write(repo[originalwd].hex() + '\n')
1083 1083 f.write(repo[target].hex() + '\n')
1084 1084 f.write(repo[external].hex() + '\n')
1085 1085 f.write('%d\n' % int(collapse))
1086 1086 f.write('%d\n' % int(keep))
1087 1087 f.write('%d\n' % int(keepbranches))
1088 1088 f.write('%s\n' % (activebookmark or ''))
1089 1089 for d, v in state.iteritems():
1090 1090 oldrev = repo[d].hex()
1091 1091 if v >= 0:
1092 1092 newrev = repo[v].hex()
1093 1093 elif v == revtodo:
1094 1094 # To maintain format compatibility, we have to use nullid.
1095 1095 # Please do remove this special case when upgrading the format.
1096 1096 newrev = hex(nullid)
1097 1097 else:
1098 1098 newrev = v
1099 1099 f.write("%s:%s\n" % (oldrev, newrev))
1100 1100 f.close()
1101 1101 repo.ui.debug('rebase status stored\n')
1102 1102
1103 1103 def clearstatus(repo):
1104 1104 'Remove the status files'
1105 1105 _clearrebasesetvisibiliy(repo)
1106 1106 util.unlinkpath(repo.join("rebasestate"), ignoremissing=True)
1107 1107
1108 1108 def needupdate(repo, state):
1109 1109 '''check whether we should `update --clean` away from a merge, or if
1110 1110 somehow the working dir got forcibly updated, e.g. by older hg'''
1111 1111 parents = [p.rev() for p in repo[None].parents()]
1112 1112
1113 1113 # Are we in a merge state at all?
1114 1114 if len(parents) < 2:
1115 1115 return False
1116 1116
1117 1117 # We should be standing on the first as-of-yet unrebased commit.
1118 1118 firstunrebased = min([old for old, new in state.iteritems()
1119 1119 if new == nullrev])
1120 1120 if firstunrebased in parents:
1121 1121 return True
1122 1122
1123 1123 return False
1124 1124
1125 1125 def abort(repo, originalwd, target, state, activebookmark=None):
1126 1126 '''Restore the repository to its original state. Additional args:
1127 1127
1128 1128 activebookmark: the name of the bookmark that should be active after the
1129 1129 restore'''
1130 1130
1131 1131 try:
1132 1132 # If the first commits in the rebased set get skipped during the rebase,
1133 1133 # their values within the state mapping will be the target rev id. The
1134 1134 # dstates list must must not contain the target rev (issue4896)
1135 1135 dstates = [s for s in state.values() if s >= 0 and s != target]
1136 1136 immutable = [d for d in dstates if not repo[d].mutable()]
1137 1137 cleanup = True
1138 1138 if immutable:
1139 1139 repo.ui.warn(_("warning: can't clean up public changesets %s\n")
1140 1140 % ', '.join(str(repo[r]) for r in immutable),
1141 1141 hint=_("see 'hg help phases' for details"))
1142 1142 cleanup = False
1143 1143
1144 1144 descendants = set()
1145 1145 if dstates:
1146 1146 descendants = set(repo.changelog.descendants(dstates))
1147 1147 if descendants - set(dstates):
1148 1148 repo.ui.warn(_("warning: new changesets detected on target branch, "
1149 1149 "can't strip\n"))
1150 1150 cleanup = False
1151 1151
1152 1152 if cleanup:
1153 1153 shouldupdate = False
1154 1154 rebased = filter(lambda x: x >= 0 and x != target, state.values())
1155 1155 if rebased:
1156 1156 strippoints = [
1157 1157 c.node() for c in repo.set('roots(%ld)', rebased)]
1158 1158 shouldupdate = len([
1159 1159 c.node() for c in repo.set('. & (%ld)', rebased)]) > 0
1160 1160
1161 1161 # Update away from the rebase if necessary
1162 1162 if shouldupdate or needupdate(repo, state):
1163 1163 mergemod.update(repo, originalwd, False, True)
1164 1164
1165 1165 # Strip from the first rebased revision
1166 1166 if rebased:
1167 1167 # no backup of rebased cset versions needed
1168 1168 repair.strip(repo.ui, repo, strippoints)
1169 1169
1170 1170 if activebookmark and activebookmark in repo._bookmarks:
1171 1171 bookmarks.activate(repo, activebookmark)
1172 1172
1173 1173 finally:
1174 1174 clearstatus(repo)
1175 1175 clearcollapsemsg(repo)
1176 1176 repo.ui.warn(_('rebase aborted\n'))
1177 1177 return 0
1178 1178
1179 1179 def buildstate(repo, dest, rebaseset, collapse, obsoletenotrebased):
1180 1180 '''Define which revisions are going to be rebased and where
1181 1181
1182 1182 repo: repo
1183 1183 dest: context
1184 1184 rebaseset: set of rev
1185 1185 '''
1186 1186 _setrebasesetvisibility(repo, rebaseset)
1187 1187
1188 1188 # This check isn't strictly necessary, since mq detects commits over an
1189 1189 # applied patch. But it prevents messing up the working directory when
1190 1190 # a partially completed rebase is blocked by mq.
1191 1191 if 'qtip' in repo.tags() and (dest.node() in
1192 1192 [s.node for s in repo.mq.applied]):
1193 1193 raise error.Abort(_('cannot rebase onto an applied mq patch'))
1194 1194
1195 1195 roots = list(repo.set('roots(%ld)', rebaseset))
1196 1196 if not roots:
1197 1197 raise error.Abort(_('no matching revisions'))
1198 1198 roots.sort()
1199 1199 state = {}
1200 1200 detachset = set()
1201 1201 for root in roots:
1202 1202 commonbase = root.ancestor(dest)
1203 1203 if commonbase == root:
1204 1204 raise error.Abort(_('source is ancestor of destination'))
1205 1205 if commonbase == dest:
1206 1206 samebranch = root.branch() == dest.branch()
1207 1207 if not collapse and samebranch and root in dest.children():
1208 1208 repo.ui.debug('source is a child of destination\n')
1209 1209 return None
1210 1210
1211 1211 repo.ui.debug('rebase onto %s starting from %s\n' % (dest, root))
1212 1212 state.update(dict.fromkeys(rebaseset, revtodo))
1213 1213 # Rebase tries to turn <dest> into a parent of <root> while
1214 1214 # preserving the number of parents of rebased changesets:
1215 1215 #
1216 1216 # - A changeset with a single parent will always be rebased as a
1217 1217 # changeset with a single parent.
1218 1218 #
1219 1219 # - A merge will be rebased as merge unless its parents are both
1220 1220 # ancestors of <dest> or are themselves in the rebased set and
1221 1221 # pruned while rebased.
1222 1222 #
1223 1223 # If one parent of <root> is an ancestor of <dest>, the rebased
1224 1224 # version of this parent will be <dest>. This is always true with
1225 1225 # --base option.
1226 1226 #
1227 1227 # Otherwise, we need to *replace* the original parents with
1228 1228 # <dest>. This "detaches" the rebased set from its former location
1229 1229 # and rebases it onto <dest>. Changes introduced by ancestors of
1230 1230 # <root> not common with <dest> (the detachset, marked as
1231 1231 # nullmerge) are "removed" from the rebased changesets.
1232 1232 #
1233 1233 # - If <root> has a single parent, set it to <dest>.
1234 1234 #
1235 1235 # - If <root> is a merge, we cannot decide which parent to
1236 1236 # replace, the rebase operation is not clearly defined.
1237 1237 #
1238 1238 # The table below sums up this behavior:
1239 1239 #
1240 1240 # +------------------+----------------------+-------------------------+
1241 1241 # | | one parent | merge |
1242 1242 # +------------------+----------------------+-------------------------+
1243 1243 # | parent in | new parent is <dest> | parents in ::<dest> are |
1244 1244 # | ::<dest> | | remapped to <dest> |
1245 1245 # +------------------+----------------------+-------------------------+
1246 1246 # | unrelated source | new parent is <dest> | ambiguous, abort |
1247 1247 # +------------------+----------------------+-------------------------+
1248 1248 #
1249 1249 # The actual abort is handled by `defineparents`
1250 1250 if len(root.parents()) <= 1:
1251 1251 # ancestors of <root> not ancestors of <dest>
1252 1252 detachset.update(repo.changelog.findmissingrevs([commonbase.rev()],
1253 1253 [root.rev()]))
1254 1254 for r in detachset:
1255 1255 if r not in state:
1256 1256 state[r] = nullmerge
1257 1257 if len(roots) > 1:
1258 1258 # If we have multiple roots, we may have "hole" in the rebase set.
1259 1259 # Rebase roots that descend from those "hole" should not be detached as
1260 1260 # other root are. We use the special `revignored` to inform rebase that
1261 1261 # the revision should be ignored but that `defineparents` should search
1262 1262 # a rebase destination that make sense regarding rebased topology.
1263 1263 rebasedomain = set(repo.revs('%ld::%ld', rebaseset, rebaseset))
1264 1264 for ignored in set(rebasedomain) - set(rebaseset):
1265 1265 state[ignored] = revignored
1266 1266 for r in obsoletenotrebased:
1267 1267 if obsoletenotrebased[r] is None:
1268 1268 state[r] = revpruned
1269 1269 else:
1270 1270 state[r] = revprecursor
1271 1271 return repo['.'].rev(), dest.rev(), state
1272 1272
1273 1273 def clearrebased(ui, repo, state, skipped, collapsedas=None):
1274 1274 """dispose of rebased revision at the end of the rebase
1275 1275
1276 1276 If `collapsedas` is not None, the rebase was a collapse whose result if the
1277 1277 `collapsedas` node."""
1278 1278 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1279 1279 markers = []
1280 1280 for rev, newrev in sorted(state.items()):
1281 1281 if newrev >= 0:
1282 1282 if rev in skipped:
1283 1283 succs = ()
1284 1284 elif collapsedas is not None:
1285 1285 succs = (repo[collapsedas],)
1286 1286 else:
1287 1287 succs = (repo[newrev],)
1288 1288 markers.append((repo[rev], succs))
1289 1289 if markers:
1290 1290 obsolete.createmarkers(repo, markers)
1291 1291 else:
1292 1292 rebased = [rev for rev in state if state[rev] > nullmerge]
1293 1293 if rebased:
1294 1294 stripped = []
1295 1295 for root in repo.set('roots(%ld)', rebased):
1296 1296 if set(repo.changelog.descendants([root.rev()])) - set(state):
1297 1297 ui.warn(_("warning: new changesets detected "
1298 1298 "on source branch, not stripping\n"))
1299 1299 else:
1300 1300 stripped.append(root.node())
1301 1301 if stripped:
1302 1302 # backup the old csets by default
1303 1303 repair.strip(ui, repo, stripped, "all")
1304 1304
1305 1305
1306 1306 def pullrebase(orig, ui, repo, *args, **opts):
1307 1307 'Call rebase after pull if the latter has been invoked with --rebase'
1308 1308 ret = None
1309 1309 if opts.get('rebase'):
1310 1310 wlock = lock = None
1311 1311 try:
1312 1312 wlock = repo.wlock()
1313 1313 lock = repo.lock()
1314 1314 if opts.get('update'):
1315 1315 del opts['update']
1316 1316 ui.debug('--update and --rebase are not compatible, ignoring '
1317 1317 'the update flag\n')
1318 1318
1319 ui.debug('before rebase: ensure working dir is clean\n')
1320 1319 cmdutil.checkunfinished(repo)
1321 cmdutil.bailifchanged(repo)
1320 cmdutil.bailifchanged(repo, hint=_('cannot pull with rebase: '
1321 'please commit or shelve your changes first'))
1322 1322
1323 1323 revsprepull = len(repo)
1324 1324 origpostincoming = commands.postincoming
1325 1325 def _dummy(*args, **kwargs):
1326 1326 pass
1327 1327 commands.postincoming = _dummy
1328 1328 try:
1329 1329 ret = orig(ui, repo, *args, **opts)
1330 1330 finally:
1331 1331 commands.postincoming = origpostincoming
1332 1332 revspostpull = len(repo)
1333 1333 if revspostpull > revsprepull:
1334 1334 # --rev option from pull conflict with rebase own --rev
1335 1335 # dropping it
1336 1336 if 'rev' in opts:
1337 1337 del opts['rev']
1338 1338 # positional argument from pull conflicts with rebase's own
1339 1339 # --source.
1340 1340 if 'source' in opts:
1341 1341 del opts['source']
1342 1342 # revsprepull is the len of the repo, not revnum of tip.
1343 1343 destspace = list(repo.changelog.revs(start=revsprepull))
1344 1344 opts['_destspace'] = destspace
1345 1345 try:
1346 1346 rebase(ui, repo, **opts)
1347 1347 except error.NoMergeDestAbort:
1348 1348 # we can maybe update instead
1349 1349 rev, _a, _b = destutil.destupdate(repo)
1350 1350 if rev == repo['.'].rev():
1351 1351 ui.status(_('nothing to rebase\n'))
1352 1352 else:
1353 1353 ui.status(_('nothing to rebase - updating instead\n'))
1354 1354 # not passing argument to get the bare update behavior
1355 1355 # with warning and trumpets
1356 1356 commands.update(ui, repo)
1357 1357 finally:
1358 1358 release(lock, wlock)
1359 1359 else:
1360 1360 if opts.get('tool'):
1361 1361 raise error.Abort(_('--tool can only be used with --rebase'))
1362 1362 ret = orig(ui, repo, *args, **opts)
1363 1363
1364 1364 return ret
1365 1365
1366 1366 def _setrebasesetvisibility(repo, revs):
1367 1367 """store the currently rebased set on the repo object
1368 1368
1369 1369 This is used by another function to prevent rebased revision to because
1370 1370 hidden (see issue4505)"""
1371 1371 repo = repo.unfiltered()
1372 1372 revs = set(revs)
1373 1373 repo._rebaseset = revs
1374 1374 # invalidate cache if visibility changes
1375 1375 hiddens = repo.filteredrevcache.get('visible', set())
1376 1376 if revs & hiddens:
1377 1377 repo.invalidatevolatilesets()
1378 1378
1379 1379 def _clearrebasesetvisibiliy(repo):
1380 1380 """remove rebaseset data from the repo"""
1381 1381 repo = repo.unfiltered()
1382 1382 if '_rebaseset' in vars(repo):
1383 1383 del repo._rebaseset
1384 1384
1385 1385 def _rebasedvisible(orig, repo):
1386 1386 """ensure rebased revs stay visible (see issue4505)"""
1387 1387 blockers = orig(repo)
1388 1388 blockers.update(getattr(repo, '_rebaseset', ()))
1389 1389 return blockers
1390 1390
1391 1391 def _filterobsoleterevs(repo, revs):
1392 1392 """returns a set of the obsolete revisions in revs"""
1393 1393 return set(r for r in revs if repo[r].obsolete())
1394 1394
1395 1395 def _computeobsoletenotrebased(repo, rebaseobsrevs, dest):
1396 1396 """return a mapping obsolete => successor for all obsolete nodes to be
1397 1397 rebased that have a successors in the destination
1398 1398
1399 1399 obsolete => None entries in the mapping indicate nodes with no successor"""
1400 1400 obsoletenotrebased = {}
1401 1401
1402 1402 # Build a mapping successor => obsolete nodes for the obsolete
1403 1403 # nodes to be rebased
1404 1404 allsuccessors = {}
1405 1405 cl = repo.changelog
1406 1406 for r in rebaseobsrevs:
1407 1407 node = cl.node(r)
1408 1408 for s in obsolete.allsuccessors(repo.obsstore, [node]):
1409 1409 try:
1410 1410 allsuccessors[cl.rev(s)] = cl.rev(node)
1411 1411 except LookupError:
1412 1412 pass
1413 1413
1414 1414 if allsuccessors:
1415 1415 # Look for successors of obsolete nodes to be rebased among
1416 1416 # the ancestors of dest
1417 1417 ancs = cl.ancestors([repo[dest].rev()],
1418 1418 stoprev=min(allsuccessors),
1419 1419 inclusive=True)
1420 1420 for s in allsuccessors:
1421 1421 if s in ancs:
1422 1422 obsoletenotrebased[allsuccessors[s]] = s
1423 1423 elif (s == allsuccessors[s] and
1424 1424 allsuccessors.values().count(s) == 1):
1425 1425 # plain prune
1426 1426 obsoletenotrebased[s] = None
1427 1427
1428 1428 return obsoletenotrebased
1429 1429
1430 1430 def summaryhook(ui, repo):
1431 1431 if not repo.vfs.exists('rebasestate'):
1432 1432 return
1433 1433 try:
1434 1434 rbsrt = rebaseruntime(repo, ui, {})
1435 1435 rbsrt.restorestatus()
1436 1436 state = rbsrt.state
1437 1437 except error.RepoLookupError:
1438 1438 # i18n: column positioning for "hg summary"
1439 1439 msg = _('rebase: (use "hg rebase --abort" to clear broken state)\n')
1440 1440 ui.write(msg)
1441 1441 return
1442 1442 numrebased = len([i for i in state.itervalues() if i >= 0])
1443 1443 # i18n: column positioning for "hg summary"
1444 1444 ui.write(_('rebase: %s, %s (rebase --continue)\n') %
1445 1445 (ui.label(_('%d rebased'), 'rebase.rebased') % numrebased,
1446 1446 ui.label(_('%d remaining'), 'rebase.remaining') %
1447 1447 (len(state) - numrebased)))
1448 1448
1449 1449 def uisetup(ui):
1450 1450 #Replace pull with a decorator to provide --rebase option
1451 1451 entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
1452 1452 entry[1].append(('', 'rebase', None,
1453 1453 _("rebase working directory to branch head")))
1454 1454 entry[1].append(('t', 'tool', '',
1455 1455 _("specify merge tool for rebase")))
1456 1456 cmdutil.summaryhooks.add('rebase', summaryhook)
1457 1457 cmdutil.unfinishedstates.append(
1458 1458 ['rebasestate', False, False, _('rebase in progress'),
1459 1459 _("use 'hg rebase --continue' or 'hg rebase --abort'")])
1460 1460 cmdutil.afterresolvedstates.append(
1461 1461 ['rebasestate', _('hg rebase --continue')])
1462 1462 # ensure rebased rev are not hidden
1463 1463 extensions.wrapfunction(repoview, '_getdynamicblockers', _rebasedvisible)
@@ -1,3475 +1,3483 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import os
12 12 import re
13 13 import tempfile
14 14
15 15 from .i18n import _
16 16 from .node import (
17 17 bin,
18 18 hex,
19 19 nullid,
20 20 nullrev,
21 21 short,
22 22 )
23 23
24 24 from . import (
25 25 bookmarks,
26 26 changelog,
27 27 copies,
28 28 crecord as crecordmod,
29 29 dirstateguard as dirstateguardmod,
30 30 encoding,
31 31 error,
32 32 formatter,
33 33 graphmod,
34 34 lock as lockmod,
35 35 match as matchmod,
36 36 mergeutil,
37 37 obsolete,
38 38 patch,
39 39 pathutil,
40 40 phases,
41 41 pycompat,
42 42 repair,
43 43 revlog,
44 44 revset,
45 45 scmutil,
46 46 templatekw,
47 47 templater,
48 48 util,
49 49 )
50 50 stringio = util.stringio
51 51
52 52 # special string such that everything below this line will be ingored in the
53 53 # editor text
54 54 _linebelow = "^HG: ------------------------ >8 ------------------------$"
55 55
56 56 def ishunk(x):
57 57 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
58 58 return isinstance(x, hunkclasses)
59 59
60 60 def newandmodified(chunks, originalchunks):
61 61 newlyaddedandmodifiedfiles = set()
62 62 for chunk in chunks:
63 63 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
64 64 originalchunks:
65 65 newlyaddedandmodifiedfiles.add(chunk.header.filename())
66 66 return newlyaddedandmodifiedfiles
67 67
68 68 def parsealiases(cmd):
69 69 return cmd.lstrip("^").split("|")
70 70
71 71 def setupwrapcolorwrite(ui):
72 72 # wrap ui.write so diff output can be labeled/colorized
73 73 def wrapwrite(orig, *args, **kw):
74 74 label = kw.pop('label', '')
75 75 for chunk, l in patch.difflabel(lambda: args):
76 76 orig(chunk, label=label + l)
77 77
78 78 oldwrite = ui.write
79 79 def wrap(*args, **kwargs):
80 80 return wrapwrite(oldwrite, *args, **kwargs)
81 81 setattr(ui, 'write', wrap)
82 82 return oldwrite
83 83
84 84 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
85 85 if usecurses:
86 86 if testfile:
87 87 recordfn = crecordmod.testdecorator(testfile,
88 88 crecordmod.testchunkselector)
89 89 else:
90 90 recordfn = crecordmod.chunkselector
91 91
92 92 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
93 93
94 94 else:
95 95 return patch.filterpatch(ui, originalhunks, operation)
96 96
97 97 def recordfilter(ui, originalhunks, operation=None):
98 98 """ Prompts the user to filter the originalhunks and return a list of
99 99 selected hunks.
100 100 *operation* is used for to build ui messages to indicate the user what
101 101 kind of filtering they are doing: reverting, committing, shelving, etc.
102 102 (see patch.filterpatch).
103 103 """
104 104 usecurses = crecordmod.checkcurses(ui)
105 105 testfile = ui.config('experimental', 'crecordtest', None)
106 106 oldwrite = setupwrapcolorwrite(ui)
107 107 try:
108 108 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
109 109 testfile, operation)
110 110 finally:
111 111 ui.write = oldwrite
112 112 return newchunks, newopts
113 113
114 114 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
115 115 filterfn, *pats, **opts):
116 116 from . import merge as mergemod
117 117 if not ui.interactive():
118 118 if cmdsuggest:
119 119 msg = _('running non-interactively, use %s instead') % cmdsuggest
120 120 else:
121 121 msg = _('running non-interactively')
122 122 raise error.Abort(msg)
123 123
124 124 # make sure username is set before going interactive
125 125 if not opts.get('user'):
126 126 ui.username() # raise exception, username not provided
127 127
128 128 def recordfunc(ui, repo, message, match, opts):
129 129 """This is generic record driver.
130 130
131 131 Its job is to interactively filter local changes, and
132 132 accordingly prepare working directory into a state in which the
133 133 job can be delegated to a non-interactive commit command such as
134 134 'commit' or 'qrefresh'.
135 135
136 136 After the actual job is done by non-interactive command, the
137 137 working directory is restored to its original state.
138 138
139 139 In the end we'll record interesting changes, and everything else
140 140 will be left in place, so the user can continue working.
141 141 """
142 142
143 143 checkunfinished(repo, commit=True)
144 144 wctx = repo[None]
145 145 merge = len(wctx.parents()) > 1
146 146 if merge:
147 147 raise error.Abort(_('cannot partially commit a merge '
148 148 '(use "hg commit" instead)'))
149 149
150 150 def fail(f, msg):
151 151 raise error.Abort('%s: %s' % (f, msg))
152 152
153 153 force = opts.get('force')
154 154 if not force:
155 155 vdirs = []
156 156 match.explicitdir = vdirs.append
157 157 match.bad = fail
158 158
159 159 status = repo.status(match=match)
160 160 if not force:
161 161 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
162 162 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
163 163 diffopts.nodates = True
164 164 diffopts.git = True
165 165 diffopts.showfunc = True
166 166 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
167 167 originalchunks = patch.parsepatch(originaldiff)
168 168
169 169 # 1. filter patch, since we are intending to apply subset of it
170 170 try:
171 171 chunks, newopts = filterfn(ui, originalchunks)
172 172 except patch.PatchError as err:
173 173 raise error.Abort(_('error parsing patch: %s') % err)
174 174 opts.update(newopts)
175 175
176 176 # We need to keep a backup of files that have been newly added and
177 177 # modified during the recording process because there is a previous
178 178 # version without the edit in the workdir
179 179 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
180 180 contenders = set()
181 181 for h in chunks:
182 182 try:
183 183 contenders.update(set(h.files()))
184 184 except AttributeError:
185 185 pass
186 186
187 187 changed = status.modified + status.added + status.removed
188 188 newfiles = [f for f in changed if f in contenders]
189 189 if not newfiles:
190 190 ui.status(_('no changes to record\n'))
191 191 return 0
192 192
193 193 modified = set(status.modified)
194 194
195 195 # 2. backup changed files, so we can restore them in the end
196 196
197 197 if backupall:
198 198 tobackup = changed
199 199 else:
200 200 tobackup = [f for f in newfiles if f in modified or f in \
201 201 newlyaddedandmodifiedfiles]
202 202 backups = {}
203 203 if tobackup:
204 204 backupdir = repo.join('record-backups')
205 205 try:
206 206 os.mkdir(backupdir)
207 207 except OSError as err:
208 208 if err.errno != errno.EEXIST:
209 209 raise
210 210 try:
211 211 # backup continues
212 212 for f in tobackup:
213 213 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
214 214 dir=backupdir)
215 215 os.close(fd)
216 216 ui.debug('backup %r as %r\n' % (f, tmpname))
217 217 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
218 218 backups[f] = tmpname
219 219
220 220 fp = stringio()
221 221 for c in chunks:
222 222 fname = c.filename()
223 223 if fname in backups:
224 224 c.write(fp)
225 225 dopatch = fp.tell()
226 226 fp.seek(0)
227 227
228 228 # 2.5 optionally review / modify patch in text editor
229 229 if opts.get('review', False):
230 230 patchtext = (crecordmod.diffhelptext
231 231 + crecordmod.patchhelptext
232 232 + fp.read())
233 233 reviewedpatch = ui.edit(patchtext, "",
234 234 extra={"suffix": ".diff"})
235 235 fp.truncate(0)
236 236 fp.write(reviewedpatch)
237 237 fp.seek(0)
238 238
239 239 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
240 240 # 3a. apply filtered patch to clean repo (clean)
241 241 if backups:
242 242 # Equivalent to hg.revert
243 243 m = scmutil.matchfiles(repo, backups.keys())
244 244 mergemod.update(repo, repo.dirstate.p1(),
245 245 False, True, matcher=m)
246 246
247 247 # 3b. (apply)
248 248 if dopatch:
249 249 try:
250 250 ui.debug('applying patch\n')
251 251 ui.debug(fp.getvalue())
252 252 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
253 253 except patch.PatchError as err:
254 254 raise error.Abort(str(err))
255 255 del fp
256 256
257 257 # 4. We prepared working directory according to filtered
258 258 # patch. Now is the time to delegate the job to
259 259 # commit/qrefresh or the like!
260 260
261 261 # Make all of the pathnames absolute.
262 262 newfiles = [repo.wjoin(nf) for nf in newfiles]
263 263 return commitfunc(ui, repo, *newfiles, **opts)
264 264 finally:
265 265 # 5. finally restore backed-up files
266 266 try:
267 267 dirstate = repo.dirstate
268 268 for realname, tmpname in backups.iteritems():
269 269 ui.debug('restoring %r to %r\n' % (tmpname, realname))
270 270
271 271 if dirstate[realname] == 'n':
272 272 # without normallookup, restoring timestamp
273 273 # may cause partially committed files
274 274 # to be treated as unmodified
275 275 dirstate.normallookup(realname)
276 276
277 277 # copystat=True here and above are a hack to trick any
278 278 # editors that have f open that we haven't modified them.
279 279 #
280 280 # Also note that this racy as an editor could notice the
281 281 # file's mtime before we've finished writing it.
282 282 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
283 283 os.unlink(tmpname)
284 284 if tobackup:
285 285 os.rmdir(backupdir)
286 286 except OSError:
287 287 pass
288 288
289 289 def recordinwlock(ui, repo, message, match, opts):
290 290 with repo.wlock():
291 291 return recordfunc(ui, repo, message, match, opts)
292 292
293 293 return commit(ui, repo, recordinwlock, pats, opts)
294 294
295 295 def findpossible(cmd, table, strict=False):
296 296 """
297 297 Return cmd -> (aliases, command table entry)
298 298 for each matching command.
299 299 Return debug commands (or their aliases) only if no normal command matches.
300 300 """
301 301 choice = {}
302 302 debugchoice = {}
303 303
304 304 if cmd in table:
305 305 # short-circuit exact matches, "log" alias beats "^log|history"
306 306 keys = [cmd]
307 307 else:
308 308 keys = table.keys()
309 309
310 310 allcmds = []
311 311 for e in keys:
312 312 aliases = parsealiases(e)
313 313 allcmds.extend(aliases)
314 314 found = None
315 315 if cmd in aliases:
316 316 found = cmd
317 317 elif not strict:
318 318 for a in aliases:
319 319 if a.startswith(cmd):
320 320 found = a
321 321 break
322 322 if found is not None:
323 323 if aliases[0].startswith("debug") or found.startswith("debug"):
324 324 debugchoice[found] = (aliases, table[e])
325 325 else:
326 326 choice[found] = (aliases, table[e])
327 327
328 328 if not choice and debugchoice:
329 329 choice = debugchoice
330 330
331 331 return choice, allcmds
332 332
333 333 def findcmd(cmd, table, strict=True):
334 334 """Return (aliases, command table entry) for command string."""
335 335 choice, allcmds = findpossible(cmd, table, strict)
336 336
337 337 if cmd in choice:
338 338 return choice[cmd]
339 339
340 340 if len(choice) > 1:
341 341 clist = choice.keys()
342 342 clist.sort()
343 343 raise error.AmbiguousCommand(cmd, clist)
344 344
345 345 if choice:
346 346 return choice.values()[0]
347 347
348 348 raise error.UnknownCommand(cmd, allcmds)
349 349
350 350 def findrepo(p):
351 351 while not os.path.isdir(os.path.join(p, ".hg")):
352 352 oldp, p = p, os.path.dirname(p)
353 353 if p == oldp:
354 354 return None
355 355
356 356 return p
357 357
358 def bailifchanged(repo, merge=True):
358 def bailifchanged(repo, merge=True, hint=None):
359 """ enforce the precondition that working directory must be clean.
360
361 'merge' can be set to false if a pending uncommitted merge should be
362 ignored (such as when 'update --check' runs).
363
364 'hint' is the usual hint given to Abort exception.
365 """
366
359 367 if merge and repo.dirstate.p2() != nullid:
360 raise error.Abort(_('outstanding uncommitted merge'))
368 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
361 369 modified, added, removed, deleted = repo.status()[:4]
362 370 if modified or added or removed or deleted:
363 raise error.Abort(_('uncommitted changes'))
371 raise error.Abort(_('uncommitted changes'), hint=hint)
364 372 ctx = repo[None]
365 373 for s in sorted(ctx.substate):
366 ctx.sub(s).bailifchanged()
374 ctx.sub(s).bailifchanged(hint=hint)
367 375
368 376 def logmessage(ui, opts):
369 377 """ get the log message according to -m and -l option """
370 378 message = opts.get('message')
371 379 logfile = opts.get('logfile')
372 380
373 381 if message and logfile:
374 382 raise error.Abort(_('options --message and --logfile are mutually '
375 383 'exclusive'))
376 384 if not message and logfile:
377 385 try:
378 386 if logfile == '-':
379 387 message = ui.fin.read()
380 388 else:
381 389 message = '\n'.join(util.readfile(logfile).splitlines())
382 390 except IOError as inst:
383 391 raise error.Abort(_("can't read commit message '%s': %s") %
384 392 (logfile, inst.strerror))
385 393 return message
386 394
387 395 def mergeeditform(ctxorbool, baseformname):
388 396 """return appropriate editform name (referencing a committemplate)
389 397
390 398 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
391 399 merging is committed.
392 400
393 401 This returns baseformname with '.merge' appended if it is a merge,
394 402 otherwise '.normal' is appended.
395 403 """
396 404 if isinstance(ctxorbool, bool):
397 405 if ctxorbool:
398 406 return baseformname + ".merge"
399 407 elif 1 < len(ctxorbool.parents()):
400 408 return baseformname + ".merge"
401 409
402 410 return baseformname + ".normal"
403 411
404 412 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
405 413 editform='', **opts):
406 414 """get appropriate commit message editor according to '--edit' option
407 415
408 416 'finishdesc' is a function to be called with edited commit message
409 417 (= 'description' of the new changeset) just after editing, but
410 418 before checking empty-ness. It should return actual text to be
411 419 stored into history. This allows to change description before
412 420 storing.
413 421
414 422 'extramsg' is a extra message to be shown in the editor instead of
415 423 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
416 424 is automatically added.
417 425
418 426 'editform' is a dot-separated list of names, to distinguish
419 427 the purpose of commit text editing.
420 428
421 429 'getcommiteditor' returns 'commitforceeditor' regardless of
422 430 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
423 431 they are specific for usage in MQ.
424 432 """
425 433 if edit or finishdesc or extramsg:
426 434 return lambda r, c, s: commitforceeditor(r, c, s,
427 435 finishdesc=finishdesc,
428 436 extramsg=extramsg,
429 437 editform=editform)
430 438 elif editform:
431 439 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
432 440 else:
433 441 return commiteditor
434 442
435 443 def loglimit(opts):
436 444 """get the log limit according to option -l/--limit"""
437 445 limit = opts.get('limit')
438 446 if limit:
439 447 try:
440 448 limit = int(limit)
441 449 except ValueError:
442 450 raise error.Abort(_('limit must be a positive integer'))
443 451 if limit <= 0:
444 452 raise error.Abort(_('limit must be positive'))
445 453 else:
446 454 limit = None
447 455 return limit
448 456
449 457 def makefilename(repo, pat, node, desc=None,
450 458 total=None, seqno=None, revwidth=None, pathname=None):
451 459 node_expander = {
452 460 'H': lambda: hex(node),
453 461 'R': lambda: str(repo.changelog.rev(node)),
454 462 'h': lambda: short(node),
455 463 'm': lambda: re.sub('[^\w]', '_', str(desc))
456 464 }
457 465 expander = {
458 466 '%': lambda: '%',
459 467 'b': lambda: os.path.basename(repo.root),
460 468 }
461 469
462 470 try:
463 471 if node:
464 472 expander.update(node_expander)
465 473 if node:
466 474 expander['r'] = (lambda:
467 475 str(repo.changelog.rev(node)).zfill(revwidth or 0))
468 476 if total is not None:
469 477 expander['N'] = lambda: str(total)
470 478 if seqno is not None:
471 479 expander['n'] = lambda: str(seqno)
472 480 if total is not None and seqno is not None:
473 481 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
474 482 if pathname is not None:
475 483 expander['s'] = lambda: os.path.basename(pathname)
476 484 expander['d'] = lambda: os.path.dirname(pathname) or '.'
477 485 expander['p'] = lambda: pathname
478 486
479 487 newname = []
480 488 patlen = len(pat)
481 489 i = 0
482 490 while i < patlen:
483 491 c = pat[i]
484 492 if c == '%':
485 493 i += 1
486 494 c = pat[i]
487 495 c = expander[c]()
488 496 newname.append(c)
489 497 i += 1
490 498 return ''.join(newname)
491 499 except KeyError as inst:
492 500 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
493 501 inst.args[0])
494 502
495 503 class _unclosablefile(object):
496 504 def __init__(self, fp):
497 505 self._fp = fp
498 506
499 507 def close(self):
500 508 pass
501 509
502 510 def __iter__(self):
503 511 return iter(self._fp)
504 512
505 513 def __getattr__(self, attr):
506 514 return getattr(self._fp, attr)
507 515
508 516 def __enter__(self):
509 517 return self
510 518
511 519 def __exit__(self, exc_type, exc_value, exc_tb):
512 520 pass
513 521
514 522 def makefileobj(repo, pat, node=None, desc=None, total=None,
515 523 seqno=None, revwidth=None, mode='wb', modemap=None,
516 524 pathname=None):
517 525
518 526 writable = mode not in ('r', 'rb')
519 527
520 528 if not pat or pat == '-':
521 529 if writable:
522 530 fp = repo.ui.fout
523 531 else:
524 532 fp = repo.ui.fin
525 533 return _unclosablefile(fp)
526 534 if util.safehasattr(pat, 'write') and writable:
527 535 return pat
528 536 if util.safehasattr(pat, 'read') and 'r' in mode:
529 537 return pat
530 538 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
531 539 if modemap is not None:
532 540 mode = modemap.get(fn, mode)
533 541 if mode == 'wb':
534 542 modemap[fn] = 'ab'
535 543 return open(fn, mode)
536 544
537 545 def openrevlog(repo, cmd, file_, opts):
538 546 """opens the changelog, manifest, a filelog or a given revlog"""
539 547 cl = opts['changelog']
540 548 mf = opts['manifest']
541 549 dir = opts['dir']
542 550 msg = None
543 551 if cl and mf:
544 552 msg = _('cannot specify --changelog and --manifest at the same time')
545 553 elif cl and dir:
546 554 msg = _('cannot specify --changelog and --dir at the same time')
547 555 elif cl or mf or dir:
548 556 if file_:
549 557 msg = _('cannot specify filename with --changelog or --manifest')
550 558 elif not repo:
551 559 msg = _('cannot specify --changelog or --manifest or --dir '
552 560 'without a repository')
553 561 if msg:
554 562 raise error.Abort(msg)
555 563
556 564 r = None
557 565 if repo:
558 566 if cl:
559 567 r = repo.unfiltered().changelog
560 568 elif dir:
561 569 if 'treemanifest' not in repo.requirements:
562 570 raise error.Abort(_("--dir can only be used on repos with "
563 571 "treemanifest enabled"))
564 572 dirlog = repo.manifestlog._revlog.dirlog(dir)
565 573 if len(dirlog):
566 574 r = dirlog
567 575 elif mf:
568 576 r = repo.manifestlog._revlog
569 577 elif file_:
570 578 filelog = repo.file(file_)
571 579 if len(filelog):
572 580 r = filelog
573 581 if not r:
574 582 if not file_:
575 583 raise error.CommandError(cmd, _('invalid arguments'))
576 584 if not os.path.isfile(file_):
577 585 raise error.Abort(_("revlog '%s' not found") % file_)
578 586 r = revlog.revlog(scmutil.opener(pycompat.getcwd(), audit=False),
579 587 file_[:-2] + ".i")
580 588 return r
581 589
582 590 def copy(ui, repo, pats, opts, rename=False):
583 591 # called with the repo lock held
584 592 #
585 593 # hgsep => pathname that uses "/" to separate directories
586 594 # ossep => pathname that uses os.sep to separate directories
587 595 cwd = repo.getcwd()
588 596 targets = {}
589 597 after = opts.get("after")
590 598 dryrun = opts.get("dry_run")
591 599 wctx = repo[None]
592 600
593 601 def walkpat(pat):
594 602 srcs = []
595 603 if after:
596 604 badstates = '?'
597 605 else:
598 606 badstates = '?r'
599 607 m = scmutil.match(repo[None], [pat], opts, globbed=True)
600 608 for abs in repo.walk(m):
601 609 state = repo.dirstate[abs]
602 610 rel = m.rel(abs)
603 611 exact = m.exact(abs)
604 612 if state in badstates:
605 613 if exact and state == '?':
606 614 ui.warn(_('%s: not copying - file is not managed\n') % rel)
607 615 if exact and state == 'r':
608 616 ui.warn(_('%s: not copying - file has been marked for'
609 617 ' remove\n') % rel)
610 618 continue
611 619 # abs: hgsep
612 620 # rel: ossep
613 621 srcs.append((abs, rel, exact))
614 622 return srcs
615 623
616 624 # abssrc: hgsep
617 625 # relsrc: ossep
618 626 # otarget: ossep
619 627 def copyfile(abssrc, relsrc, otarget, exact):
620 628 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
621 629 if '/' in abstarget:
622 630 # We cannot normalize abstarget itself, this would prevent
623 631 # case only renames, like a => A.
624 632 abspath, absname = abstarget.rsplit('/', 1)
625 633 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
626 634 reltarget = repo.pathto(abstarget, cwd)
627 635 target = repo.wjoin(abstarget)
628 636 src = repo.wjoin(abssrc)
629 637 state = repo.dirstate[abstarget]
630 638
631 639 scmutil.checkportable(ui, abstarget)
632 640
633 641 # check for collisions
634 642 prevsrc = targets.get(abstarget)
635 643 if prevsrc is not None:
636 644 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
637 645 (reltarget, repo.pathto(abssrc, cwd),
638 646 repo.pathto(prevsrc, cwd)))
639 647 return
640 648
641 649 # check for overwrites
642 650 exists = os.path.lexists(target)
643 651 samefile = False
644 652 if exists and abssrc != abstarget:
645 653 if (repo.dirstate.normalize(abssrc) ==
646 654 repo.dirstate.normalize(abstarget)):
647 655 if not rename:
648 656 ui.warn(_("%s: can't copy - same file\n") % reltarget)
649 657 return
650 658 exists = False
651 659 samefile = True
652 660
653 661 if not after and exists or after and state in 'mn':
654 662 if not opts['force']:
655 663 if state in 'mn':
656 664 msg = _('%s: not overwriting - file already committed\n')
657 665 if after:
658 666 flags = '--after --force'
659 667 else:
660 668 flags = '--force'
661 669 if rename:
662 670 hint = _('(hg rename %s to replace the file by '
663 671 'recording a rename)\n') % flags
664 672 else:
665 673 hint = _('(hg copy %s to replace the file by '
666 674 'recording a copy)\n') % flags
667 675 else:
668 676 msg = _('%s: not overwriting - file exists\n')
669 677 if rename:
670 678 hint = _('(hg rename --after to record the rename)\n')
671 679 else:
672 680 hint = _('(hg copy --after to record the copy)\n')
673 681 ui.warn(msg % reltarget)
674 682 ui.warn(hint)
675 683 return
676 684
677 685 if after:
678 686 if not exists:
679 687 if rename:
680 688 ui.warn(_('%s: not recording move - %s does not exist\n') %
681 689 (relsrc, reltarget))
682 690 else:
683 691 ui.warn(_('%s: not recording copy - %s does not exist\n') %
684 692 (relsrc, reltarget))
685 693 return
686 694 elif not dryrun:
687 695 try:
688 696 if exists:
689 697 os.unlink(target)
690 698 targetdir = os.path.dirname(target) or '.'
691 699 if not os.path.isdir(targetdir):
692 700 os.makedirs(targetdir)
693 701 if samefile:
694 702 tmp = target + "~hgrename"
695 703 os.rename(src, tmp)
696 704 os.rename(tmp, target)
697 705 else:
698 706 util.copyfile(src, target)
699 707 srcexists = True
700 708 except IOError as inst:
701 709 if inst.errno == errno.ENOENT:
702 710 ui.warn(_('%s: deleted in working directory\n') % relsrc)
703 711 srcexists = False
704 712 else:
705 713 ui.warn(_('%s: cannot copy - %s\n') %
706 714 (relsrc, inst.strerror))
707 715 return True # report a failure
708 716
709 717 if ui.verbose or not exact:
710 718 if rename:
711 719 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
712 720 else:
713 721 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
714 722
715 723 targets[abstarget] = abssrc
716 724
717 725 # fix up dirstate
718 726 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
719 727 dryrun=dryrun, cwd=cwd)
720 728 if rename and not dryrun:
721 729 if not after and srcexists and not samefile:
722 730 util.unlinkpath(repo.wjoin(abssrc))
723 731 wctx.forget([abssrc])
724 732
725 733 # pat: ossep
726 734 # dest ossep
727 735 # srcs: list of (hgsep, hgsep, ossep, bool)
728 736 # return: function that takes hgsep and returns ossep
729 737 def targetpathfn(pat, dest, srcs):
730 738 if os.path.isdir(pat):
731 739 abspfx = pathutil.canonpath(repo.root, cwd, pat)
732 740 abspfx = util.localpath(abspfx)
733 741 if destdirexists:
734 742 striplen = len(os.path.split(abspfx)[0])
735 743 else:
736 744 striplen = len(abspfx)
737 745 if striplen:
738 746 striplen += len(pycompat.ossep)
739 747 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
740 748 elif destdirexists:
741 749 res = lambda p: os.path.join(dest,
742 750 os.path.basename(util.localpath(p)))
743 751 else:
744 752 res = lambda p: dest
745 753 return res
746 754
747 755 # pat: ossep
748 756 # dest ossep
749 757 # srcs: list of (hgsep, hgsep, ossep, bool)
750 758 # return: function that takes hgsep and returns ossep
751 759 def targetpathafterfn(pat, dest, srcs):
752 760 if matchmod.patkind(pat):
753 761 # a mercurial pattern
754 762 res = lambda p: os.path.join(dest,
755 763 os.path.basename(util.localpath(p)))
756 764 else:
757 765 abspfx = pathutil.canonpath(repo.root, cwd, pat)
758 766 if len(abspfx) < len(srcs[0][0]):
759 767 # A directory. Either the target path contains the last
760 768 # component of the source path or it does not.
761 769 def evalpath(striplen):
762 770 score = 0
763 771 for s in srcs:
764 772 t = os.path.join(dest, util.localpath(s[0])[striplen:])
765 773 if os.path.lexists(t):
766 774 score += 1
767 775 return score
768 776
769 777 abspfx = util.localpath(abspfx)
770 778 striplen = len(abspfx)
771 779 if striplen:
772 780 striplen += len(pycompat.ossep)
773 781 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
774 782 score = evalpath(striplen)
775 783 striplen1 = len(os.path.split(abspfx)[0])
776 784 if striplen1:
777 785 striplen1 += len(pycompat.ossep)
778 786 if evalpath(striplen1) > score:
779 787 striplen = striplen1
780 788 res = lambda p: os.path.join(dest,
781 789 util.localpath(p)[striplen:])
782 790 else:
783 791 # a file
784 792 if destdirexists:
785 793 res = lambda p: os.path.join(dest,
786 794 os.path.basename(util.localpath(p)))
787 795 else:
788 796 res = lambda p: dest
789 797 return res
790 798
791 799 pats = scmutil.expandpats(pats)
792 800 if not pats:
793 801 raise error.Abort(_('no source or destination specified'))
794 802 if len(pats) == 1:
795 803 raise error.Abort(_('no destination specified'))
796 804 dest = pats.pop()
797 805 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
798 806 if not destdirexists:
799 807 if len(pats) > 1 or matchmod.patkind(pats[0]):
800 808 raise error.Abort(_('with multiple sources, destination must be an '
801 809 'existing directory'))
802 810 if util.endswithsep(dest):
803 811 raise error.Abort(_('destination %s is not a directory') % dest)
804 812
805 813 tfn = targetpathfn
806 814 if after:
807 815 tfn = targetpathafterfn
808 816 copylist = []
809 817 for pat in pats:
810 818 srcs = walkpat(pat)
811 819 if not srcs:
812 820 continue
813 821 copylist.append((tfn(pat, dest, srcs), srcs))
814 822 if not copylist:
815 823 raise error.Abort(_('no files to copy'))
816 824
817 825 errors = 0
818 826 for targetpath, srcs in copylist:
819 827 for abssrc, relsrc, exact in srcs:
820 828 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
821 829 errors += 1
822 830
823 831 if errors:
824 832 ui.warn(_('(consider using --after)\n'))
825 833
826 834 return errors != 0
827 835
828 836 ## facility to let extension process additional data into an import patch
829 837 # list of identifier to be executed in order
830 838 extrapreimport = [] # run before commit
831 839 extrapostimport = [] # run after commit
832 840 # mapping from identifier to actual import function
833 841 #
834 842 # 'preimport' are run before the commit is made and are provided the following
835 843 # arguments:
836 844 # - repo: the localrepository instance,
837 845 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
838 846 # - extra: the future extra dictionary of the changeset, please mutate it,
839 847 # - opts: the import options.
840 848 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
841 849 # mutation of in memory commit and more. Feel free to rework the code to get
842 850 # there.
843 851 extrapreimportmap = {}
844 852 # 'postimport' are run after the commit is made and are provided the following
845 853 # argument:
846 854 # - ctx: the changectx created by import.
847 855 extrapostimportmap = {}
848 856
849 857 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
850 858 """Utility function used by commands.import to import a single patch
851 859
852 860 This function is explicitly defined here to help the evolve extension to
853 861 wrap this part of the import logic.
854 862
855 863 The API is currently a bit ugly because it a simple code translation from
856 864 the import command. Feel free to make it better.
857 865
858 866 :hunk: a patch (as a binary string)
859 867 :parents: nodes that will be parent of the created commit
860 868 :opts: the full dict of option passed to the import command
861 869 :msgs: list to save commit message to.
862 870 (used in case we need to save it when failing)
863 871 :updatefunc: a function that update a repo to a given node
864 872 updatefunc(<repo>, <node>)
865 873 """
866 874 # avoid cycle context -> subrepo -> cmdutil
867 875 from . import context
868 876 extractdata = patch.extract(ui, hunk)
869 877 tmpname = extractdata.get('filename')
870 878 message = extractdata.get('message')
871 879 user = opts.get('user') or extractdata.get('user')
872 880 date = opts.get('date') or extractdata.get('date')
873 881 branch = extractdata.get('branch')
874 882 nodeid = extractdata.get('nodeid')
875 883 p1 = extractdata.get('p1')
876 884 p2 = extractdata.get('p2')
877 885
878 886 nocommit = opts.get('no_commit')
879 887 importbranch = opts.get('import_branch')
880 888 update = not opts.get('bypass')
881 889 strip = opts["strip"]
882 890 prefix = opts["prefix"]
883 891 sim = float(opts.get('similarity') or 0)
884 892 if not tmpname:
885 893 return (None, None, False)
886 894
887 895 rejects = False
888 896
889 897 try:
890 898 cmdline_message = logmessage(ui, opts)
891 899 if cmdline_message:
892 900 # pickup the cmdline msg
893 901 message = cmdline_message
894 902 elif message:
895 903 # pickup the patch msg
896 904 message = message.strip()
897 905 else:
898 906 # launch the editor
899 907 message = None
900 908 ui.debug('message:\n%s\n' % message)
901 909
902 910 if len(parents) == 1:
903 911 parents.append(repo[nullid])
904 912 if opts.get('exact'):
905 913 if not nodeid or not p1:
906 914 raise error.Abort(_('not a Mercurial patch'))
907 915 p1 = repo[p1]
908 916 p2 = repo[p2 or nullid]
909 917 elif p2:
910 918 try:
911 919 p1 = repo[p1]
912 920 p2 = repo[p2]
913 921 # Without any options, consider p2 only if the
914 922 # patch is being applied on top of the recorded
915 923 # first parent.
916 924 if p1 != parents[0]:
917 925 p1 = parents[0]
918 926 p2 = repo[nullid]
919 927 except error.RepoError:
920 928 p1, p2 = parents
921 929 if p2.node() == nullid:
922 930 ui.warn(_("warning: import the patch as a normal revision\n"
923 931 "(use --exact to import the patch as a merge)\n"))
924 932 else:
925 933 p1, p2 = parents
926 934
927 935 n = None
928 936 if update:
929 937 if p1 != parents[0]:
930 938 updatefunc(repo, p1.node())
931 939 if p2 != parents[1]:
932 940 repo.setparents(p1.node(), p2.node())
933 941
934 942 if opts.get('exact') or importbranch:
935 943 repo.dirstate.setbranch(branch or 'default')
936 944
937 945 partial = opts.get('partial', False)
938 946 files = set()
939 947 try:
940 948 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
941 949 files=files, eolmode=None, similarity=sim / 100.0)
942 950 except patch.PatchError as e:
943 951 if not partial:
944 952 raise error.Abort(str(e))
945 953 if partial:
946 954 rejects = True
947 955
948 956 files = list(files)
949 957 if nocommit:
950 958 if message:
951 959 msgs.append(message)
952 960 else:
953 961 if opts.get('exact') or p2:
954 962 # If you got here, you either use --force and know what
955 963 # you are doing or used --exact or a merge patch while
956 964 # being updated to its first parent.
957 965 m = None
958 966 else:
959 967 m = scmutil.matchfiles(repo, files or [])
960 968 editform = mergeeditform(repo[None], 'import.normal')
961 969 if opts.get('exact'):
962 970 editor = None
963 971 else:
964 972 editor = getcommiteditor(editform=editform, **opts)
965 973 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
966 974 extra = {}
967 975 for idfunc in extrapreimport:
968 976 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
969 977 try:
970 978 if partial:
971 979 repo.ui.setconfig('ui', 'allowemptycommit', True)
972 980 n = repo.commit(message, user,
973 981 date, match=m,
974 982 editor=editor, extra=extra)
975 983 for idfunc in extrapostimport:
976 984 extrapostimportmap[idfunc](repo[n])
977 985 finally:
978 986 repo.ui.restoreconfig(allowemptyback)
979 987 else:
980 988 if opts.get('exact') or importbranch:
981 989 branch = branch or 'default'
982 990 else:
983 991 branch = p1.branch()
984 992 store = patch.filestore()
985 993 try:
986 994 files = set()
987 995 try:
988 996 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
989 997 files, eolmode=None)
990 998 except patch.PatchError as e:
991 999 raise error.Abort(str(e))
992 1000 if opts.get('exact'):
993 1001 editor = None
994 1002 else:
995 1003 editor = getcommiteditor(editform='import.bypass')
996 1004 memctx = context.makememctx(repo, (p1.node(), p2.node()),
997 1005 message,
998 1006 user,
999 1007 date,
1000 1008 branch, files, store,
1001 1009 editor=editor)
1002 1010 n = memctx.commit()
1003 1011 finally:
1004 1012 store.close()
1005 1013 if opts.get('exact') and nocommit:
1006 1014 # --exact with --no-commit is still useful in that it does merge
1007 1015 # and branch bits
1008 1016 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1009 1017 elif opts.get('exact') and hex(n) != nodeid:
1010 1018 raise error.Abort(_('patch is damaged or loses information'))
1011 1019 msg = _('applied to working directory')
1012 1020 if n:
1013 1021 # i18n: refers to a short changeset id
1014 1022 msg = _('created %s') % short(n)
1015 1023 return (msg, n, rejects)
1016 1024 finally:
1017 1025 os.unlink(tmpname)
1018 1026
1019 1027 # facility to let extensions include additional data in an exported patch
1020 1028 # list of identifiers to be executed in order
1021 1029 extraexport = []
1022 1030 # mapping from identifier to actual export function
1023 1031 # function as to return a string to be added to the header or None
1024 1032 # it is given two arguments (sequencenumber, changectx)
1025 1033 extraexportmap = {}
1026 1034
1027 1035 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1028 1036 opts=None, match=None):
1029 1037 '''export changesets as hg patches.'''
1030 1038
1031 1039 total = len(revs)
1032 1040 revwidth = max([len(str(rev)) for rev in revs])
1033 1041 filemode = {}
1034 1042
1035 1043 def single(rev, seqno, fp):
1036 1044 ctx = repo[rev]
1037 1045 node = ctx.node()
1038 1046 parents = [p.node() for p in ctx.parents() if p]
1039 1047 branch = ctx.branch()
1040 1048 if switch_parent:
1041 1049 parents.reverse()
1042 1050
1043 1051 if parents:
1044 1052 prev = parents[0]
1045 1053 else:
1046 1054 prev = nullid
1047 1055
1048 1056 shouldclose = False
1049 1057 if not fp and len(template) > 0:
1050 1058 desc_lines = ctx.description().rstrip().split('\n')
1051 1059 desc = desc_lines[0] #Commit always has a first line.
1052 1060 fp = makefileobj(repo, template, node, desc=desc, total=total,
1053 1061 seqno=seqno, revwidth=revwidth, mode='wb',
1054 1062 modemap=filemode)
1055 1063 shouldclose = True
1056 1064 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1057 1065 repo.ui.note("%s\n" % fp.name)
1058 1066
1059 1067 if not fp:
1060 1068 write = repo.ui.write
1061 1069 else:
1062 1070 def write(s, **kw):
1063 1071 fp.write(s)
1064 1072
1065 1073 write("# HG changeset patch\n")
1066 1074 write("# User %s\n" % ctx.user())
1067 1075 write("# Date %d %d\n" % ctx.date())
1068 1076 write("# %s\n" % util.datestr(ctx.date()))
1069 1077 if branch and branch != 'default':
1070 1078 write("# Branch %s\n" % branch)
1071 1079 write("# Node ID %s\n" % hex(node))
1072 1080 write("# Parent %s\n" % hex(prev))
1073 1081 if len(parents) > 1:
1074 1082 write("# Parent %s\n" % hex(parents[1]))
1075 1083
1076 1084 for headerid in extraexport:
1077 1085 header = extraexportmap[headerid](seqno, ctx)
1078 1086 if header is not None:
1079 1087 write('# %s\n' % header)
1080 1088 write(ctx.description().rstrip())
1081 1089 write("\n\n")
1082 1090
1083 1091 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1084 1092 write(chunk, label=label)
1085 1093
1086 1094 if shouldclose:
1087 1095 fp.close()
1088 1096
1089 1097 for seqno, rev in enumerate(revs):
1090 1098 single(rev, seqno + 1, fp)
1091 1099
1092 1100 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1093 1101 changes=None, stat=False, fp=None, prefix='',
1094 1102 root='', listsubrepos=False):
1095 1103 '''show diff or diffstat.'''
1096 1104 if fp is None:
1097 1105 write = ui.write
1098 1106 else:
1099 1107 def write(s, **kw):
1100 1108 fp.write(s)
1101 1109
1102 1110 if root:
1103 1111 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1104 1112 else:
1105 1113 relroot = ''
1106 1114 if relroot != '':
1107 1115 # XXX relative roots currently don't work if the root is within a
1108 1116 # subrepo
1109 1117 uirelroot = match.uipath(relroot)
1110 1118 relroot += '/'
1111 1119 for matchroot in match.files():
1112 1120 if not matchroot.startswith(relroot):
1113 1121 ui.warn(_('warning: %s not inside relative root %s\n') % (
1114 1122 match.uipath(matchroot), uirelroot))
1115 1123
1116 1124 if stat:
1117 1125 diffopts = diffopts.copy(context=0)
1118 1126 width = 80
1119 1127 if not ui.plain():
1120 1128 width = ui.termwidth()
1121 1129 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1122 1130 prefix=prefix, relroot=relroot)
1123 1131 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1124 1132 width=width):
1125 1133 write(chunk, label=label)
1126 1134 else:
1127 1135 for chunk, label in patch.diffui(repo, node1, node2, match,
1128 1136 changes, diffopts, prefix=prefix,
1129 1137 relroot=relroot):
1130 1138 write(chunk, label=label)
1131 1139
1132 1140 if listsubrepos:
1133 1141 ctx1 = repo[node1]
1134 1142 ctx2 = repo[node2]
1135 1143 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1136 1144 tempnode2 = node2
1137 1145 try:
1138 1146 if node2 is not None:
1139 1147 tempnode2 = ctx2.substate[subpath][1]
1140 1148 except KeyError:
1141 1149 # A subrepo that existed in node1 was deleted between node1 and
1142 1150 # node2 (inclusive). Thus, ctx2's substate won't contain that
1143 1151 # subpath. The best we can do is to ignore it.
1144 1152 tempnode2 = None
1145 1153 submatch = matchmod.subdirmatcher(subpath, match)
1146 1154 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1147 1155 stat=stat, fp=fp, prefix=prefix)
1148 1156
1149 1157 def _changesetlabels(ctx):
1150 1158 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1151 1159 if ctx.troubled():
1152 1160 labels.append('changeset.troubled')
1153 1161 for trouble in ctx.troubles():
1154 1162 labels.append('trouble.%s' % trouble)
1155 1163 return ' '.join(labels)
1156 1164
1157 1165 class changeset_printer(object):
1158 1166 '''show changeset information when templating not requested.'''
1159 1167
1160 1168 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1161 1169 self.ui = ui
1162 1170 self.repo = repo
1163 1171 self.buffered = buffered
1164 1172 self.matchfn = matchfn
1165 1173 self.diffopts = diffopts
1166 1174 self.header = {}
1167 1175 self.hunk = {}
1168 1176 self.lastheader = None
1169 1177 self.footer = None
1170 1178
1171 1179 def flush(self, ctx):
1172 1180 rev = ctx.rev()
1173 1181 if rev in self.header:
1174 1182 h = self.header[rev]
1175 1183 if h != self.lastheader:
1176 1184 self.lastheader = h
1177 1185 self.ui.write(h)
1178 1186 del self.header[rev]
1179 1187 if rev in self.hunk:
1180 1188 self.ui.write(self.hunk[rev])
1181 1189 del self.hunk[rev]
1182 1190 return 1
1183 1191 return 0
1184 1192
1185 1193 def close(self):
1186 1194 if self.footer:
1187 1195 self.ui.write(self.footer)
1188 1196
1189 1197 def show(self, ctx, copies=None, matchfn=None, **props):
1190 1198 if self.buffered:
1191 1199 self.ui.pushbuffer(labeled=True)
1192 1200 self._show(ctx, copies, matchfn, props)
1193 1201 self.hunk[ctx.rev()] = self.ui.popbuffer()
1194 1202 else:
1195 1203 self._show(ctx, copies, matchfn, props)
1196 1204
1197 1205 def _show(self, ctx, copies, matchfn, props):
1198 1206 '''show a single changeset or file revision'''
1199 1207 changenode = ctx.node()
1200 1208 rev = ctx.rev()
1201 1209 if self.ui.debugflag:
1202 1210 hexfunc = hex
1203 1211 else:
1204 1212 hexfunc = short
1205 1213 # as of now, wctx.node() and wctx.rev() return None, but we want to
1206 1214 # show the same values as {node} and {rev} templatekw
1207 1215 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1208 1216
1209 1217 if self.ui.quiet:
1210 1218 self.ui.write("%d:%s\n" % revnode, label='log.node')
1211 1219 return
1212 1220
1213 1221 date = util.datestr(ctx.date())
1214 1222
1215 1223 # i18n: column positioning for "hg log"
1216 1224 self.ui.write(_("changeset: %d:%s\n") % revnode,
1217 1225 label=_changesetlabels(ctx))
1218 1226
1219 1227 # branches are shown first before any other names due to backwards
1220 1228 # compatibility
1221 1229 branch = ctx.branch()
1222 1230 # don't show the default branch name
1223 1231 if branch != 'default':
1224 1232 # i18n: column positioning for "hg log"
1225 1233 self.ui.write(_("branch: %s\n") % branch,
1226 1234 label='log.branch')
1227 1235
1228 1236 for nsname, ns in self.repo.names.iteritems():
1229 1237 # branches has special logic already handled above, so here we just
1230 1238 # skip it
1231 1239 if nsname == 'branches':
1232 1240 continue
1233 1241 # we will use the templatename as the color name since those two
1234 1242 # should be the same
1235 1243 for name in ns.names(self.repo, changenode):
1236 1244 self.ui.write(ns.logfmt % name,
1237 1245 label='log.%s' % ns.colorname)
1238 1246 if self.ui.debugflag:
1239 1247 # i18n: column positioning for "hg log"
1240 1248 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1241 1249 label='log.phase')
1242 1250 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1243 1251 label = 'log.parent changeset.%s' % pctx.phasestr()
1244 1252 # i18n: column positioning for "hg log"
1245 1253 self.ui.write(_("parent: %d:%s\n")
1246 1254 % (pctx.rev(), hexfunc(pctx.node())),
1247 1255 label=label)
1248 1256
1249 1257 if self.ui.debugflag and rev is not None:
1250 1258 mnode = ctx.manifestnode()
1251 1259 # i18n: column positioning for "hg log"
1252 1260 self.ui.write(_("manifest: %d:%s\n") %
1253 1261 (self.repo.manifestlog._revlog.rev(mnode),
1254 1262 hex(mnode)),
1255 1263 label='ui.debug log.manifest')
1256 1264 # i18n: column positioning for "hg log"
1257 1265 self.ui.write(_("user: %s\n") % ctx.user(),
1258 1266 label='log.user')
1259 1267 # i18n: column positioning for "hg log"
1260 1268 self.ui.write(_("date: %s\n") % date,
1261 1269 label='log.date')
1262 1270
1263 1271 if ctx.troubled():
1264 1272 # i18n: column positioning for "hg log"
1265 1273 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1266 1274 label='log.trouble')
1267 1275
1268 1276 if self.ui.debugflag:
1269 1277 files = ctx.p1().status(ctx)[:3]
1270 1278 for key, value in zip([# i18n: column positioning for "hg log"
1271 1279 _("files:"),
1272 1280 # i18n: column positioning for "hg log"
1273 1281 _("files+:"),
1274 1282 # i18n: column positioning for "hg log"
1275 1283 _("files-:")], files):
1276 1284 if value:
1277 1285 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1278 1286 label='ui.debug log.files')
1279 1287 elif ctx.files() and self.ui.verbose:
1280 1288 # i18n: column positioning for "hg log"
1281 1289 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1282 1290 label='ui.note log.files')
1283 1291 if copies and self.ui.verbose:
1284 1292 copies = ['%s (%s)' % c for c in copies]
1285 1293 # i18n: column positioning for "hg log"
1286 1294 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1287 1295 label='ui.note log.copies')
1288 1296
1289 1297 extra = ctx.extra()
1290 1298 if extra and self.ui.debugflag:
1291 1299 for key, value in sorted(extra.items()):
1292 1300 # i18n: column positioning for "hg log"
1293 1301 self.ui.write(_("extra: %s=%s\n")
1294 1302 % (key, value.encode('string_escape')),
1295 1303 label='ui.debug log.extra')
1296 1304
1297 1305 description = ctx.description().strip()
1298 1306 if description:
1299 1307 if self.ui.verbose:
1300 1308 self.ui.write(_("description:\n"),
1301 1309 label='ui.note log.description')
1302 1310 self.ui.write(description,
1303 1311 label='ui.note log.description')
1304 1312 self.ui.write("\n\n")
1305 1313 else:
1306 1314 # i18n: column positioning for "hg log"
1307 1315 self.ui.write(_("summary: %s\n") %
1308 1316 description.splitlines()[0],
1309 1317 label='log.summary')
1310 1318 self.ui.write("\n")
1311 1319
1312 1320 self.showpatch(ctx, matchfn)
1313 1321
1314 1322 def showpatch(self, ctx, matchfn):
1315 1323 if not matchfn:
1316 1324 matchfn = self.matchfn
1317 1325 if matchfn:
1318 1326 stat = self.diffopts.get('stat')
1319 1327 diff = self.diffopts.get('patch')
1320 1328 diffopts = patch.diffallopts(self.ui, self.diffopts)
1321 1329 node = ctx.node()
1322 1330 prev = ctx.p1().node()
1323 1331 if stat:
1324 1332 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1325 1333 match=matchfn, stat=True)
1326 1334 if diff:
1327 1335 if stat:
1328 1336 self.ui.write("\n")
1329 1337 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1330 1338 match=matchfn, stat=False)
1331 1339 self.ui.write("\n")
1332 1340
1333 1341 class jsonchangeset(changeset_printer):
1334 1342 '''format changeset information.'''
1335 1343
1336 1344 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1337 1345 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1338 1346 self.cache = {}
1339 1347 self._first = True
1340 1348
1341 1349 def close(self):
1342 1350 if not self._first:
1343 1351 self.ui.write("\n]\n")
1344 1352 else:
1345 1353 self.ui.write("[]\n")
1346 1354
1347 1355 def _show(self, ctx, copies, matchfn, props):
1348 1356 '''show a single changeset or file revision'''
1349 1357 rev = ctx.rev()
1350 1358 if rev is None:
1351 1359 jrev = jnode = 'null'
1352 1360 else:
1353 1361 jrev = str(rev)
1354 1362 jnode = '"%s"' % hex(ctx.node())
1355 1363 j = encoding.jsonescape
1356 1364
1357 1365 if self._first:
1358 1366 self.ui.write("[\n {")
1359 1367 self._first = False
1360 1368 else:
1361 1369 self.ui.write(",\n {")
1362 1370
1363 1371 if self.ui.quiet:
1364 1372 self.ui.write(('\n "rev": %s') % jrev)
1365 1373 self.ui.write((',\n "node": %s') % jnode)
1366 1374 self.ui.write('\n }')
1367 1375 return
1368 1376
1369 1377 self.ui.write(('\n "rev": %s') % jrev)
1370 1378 self.ui.write((',\n "node": %s') % jnode)
1371 1379 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1372 1380 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1373 1381 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1374 1382 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1375 1383 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1376 1384
1377 1385 self.ui.write((',\n "bookmarks": [%s]') %
1378 1386 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1379 1387 self.ui.write((',\n "tags": [%s]') %
1380 1388 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1381 1389 self.ui.write((',\n "parents": [%s]') %
1382 1390 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1383 1391
1384 1392 if self.ui.debugflag:
1385 1393 if rev is None:
1386 1394 jmanifestnode = 'null'
1387 1395 else:
1388 1396 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1389 1397 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1390 1398
1391 1399 self.ui.write((',\n "extra": {%s}') %
1392 1400 ", ".join('"%s": "%s"' % (j(k), j(v))
1393 1401 for k, v in ctx.extra().items()))
1394 1402
1395 1403 files = ctx.p1().status(ctx)
1396 1404 self.ui.write((',\n "modified": [%s]') %
1397 1405 ", ".join('"%s"' % j(f) for f in files[0]))
1398 1406 self.ui.write((',\n "added": [%s]') %
1399 1407 ", ".join('"%s"' % j(f) for f in files[1]))
1400 1408 self.ui.write((',\n "removed": [%s]') %
1401 1409 ", ".join('"%s"' % j(f) for f in files[2]))
1402 1410
1403 1411 elif self.ui.verbose:
1404 1412 self.ui.write((',\n "files": [%s]') %
1405 1413 ", ".join('"%s"' % j(f) for f in ctx.files()))
1406 1414
1407 1415 if copies:
1408 1416 self.ui.write((',\n "copies": {%s}') %
1409 1417 ", ".join('"%s": "%s"' % (j(k), j(v))
1410 1418 for k, v in copies))
1411 1419
1412 1420 matchfn = self.matchfn
1413 1421 if matchfn:
1414 1422 stat = self.diffopts.get('stat')
1415 1423 diff = self.diffopts.get('patch')
1416 1424 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1417 1425 node, prev = ctx.node(), ctx.p1().node()
1418 1426 if stat:
1419 1427 self.ui.pushbuffer()
1420 1428 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1421 1429 match=matchfn, stat=True)
1422 1430 self.ui.write((',\n "diffstat": "%s"')
1423 1431 % j(self.ui.popbuffer()))
1424 1432 if diff:
1425 1433 self.ui.pushbuffer()
1426 1434 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1427 1435 match=matchfn, stat=False)
1428 1436 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1429 1437
1430 1438 self.ui.write("\n }")
1431 1439
1432 1440 class changeset_templater(changeset_printer):
1433 1441 '''format changeset information.'''
1434 1442
1435 1443 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1436 1444 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1437 1445 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1438 1446 filters = {'formatnode': formatnode}
1439 1447 defaulttempl = {
1440 1448 'parent': '{rev}:{node|formatnode} ',
1441 1449 'manifest': '{rev}:{node|formatnode}',
1442 1450 'file_copy': '{name} ({source})',
1443 1451 'extra': '{key}={value|stringescape}'
1444 1452 }
1445 1453 # filecopy is preserved for compatibility reasons
1446 1454 defaulttempl['filecopy'] = defaulttempl['file_copy']
1447 1455 assert not (tmpl and mapfile)
1448 1456 if mapfile:
1449 1457 self.t = templater.templater.frommapfile(mapfile, filters=filters,
1450 1458 cache=defaulttempl)
1451 1459 else:
1452 1460 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1453 1461 filters=filters,
1454 1462 cache=defaulttempl)
1455 1463
1456 1464 self.cache = {}
1457 1465
1458 1466 # find correct templates for current mode
1459 1467 tmplmodes = [
1460 1468 (True, None),
1461 1469 (self.ui.verbose, 'verbose'),
1462 1470 (self.ui.quiet, 'quiet'),
1463 1471 (self.ui.debugflag, 'debug'),
1464 1472 ]
1465 1473
1466 1474 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1467 1475 'docheader': '', 'docfooter': ''}
1468 1476 for mode, postfix in tmplmodes:
1469 1477 for t in self._parts:
1470 1478 cur = t
1471 1479 if postfix:
1472 1480 cur += "_" + postfix
1473 1481 if mode and cur in self.t:
1474 1482 self._parts[t] = cur
1475 1483
1476 1484 if self._parts['docheader']:
1477 1485 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1478 1486
1479 1487 def close(self):
1480 1488 if self._parts['docfooter']:
1481 1489 if not self.footer:
1482 1490 self.footer = ""
1483 1491 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1484 1492 return super(changeset_templater, self).close()
1485 1493
1486 1494 def _show(self, ctx, copies, matchfn, props):
1487 1495 '''show a single changeset or file revision'''
1488 1496 props = props.copy()
1489 1497 props.update(templatekw.keywords)
1490 1498 props['templ'] = self.t
1491 1499 props['ctx'] = ctx
1492 1500 props['repo'] = self.repo
1493 1501 props['ui'] = self.repo.ui
1494 1502 props['revcache'] = {'copies': copies}
1495 1503 props['cache'] = self.cache
1496 1504
1497 1505 # write header
1498 1506 if self._parts['header']:
1499 1507 h = templater.stringify(self.t(self._parts['header'], **props))
1500 1508 if self.buffered:
1501 1509 self.header[ctx.rev()] = h
1502 1510 else:
1503 1511 if self.lastheader != h:
1504 1512 self.lastheader = h
1505 1513 self.ui.write(h)
1506 1514
1507 1515 # write changeset metadata, then patch if requested
1508 1516 key = self._parts['changeset']
1509 1517 self.ui.write(templater.stringify(self.t(key, **props)))
1510 1518 self.showpatch(ctx, matchfn)
1511 1519
1512 1520 if self._parts['footer']:
1513 1521 if not self.footer:
1514 1522 self.footer = templater.stringify(
1515 1523 self.t(self._parts['footer'], **props))
1516 1524
1517 1525 def gettemplate(ui, tmpl, style):
1518 1526 """
1519 1527 Find the template matching the given template spec or style.
1520 1528 """
1521 1529
1522 1530 # ui settings
1523 1531 if not tmpl and not style: # template are stronger than style
1524 1532 tmpl = ui.config('ui', 'logtemplate')
1525 1533 if tmpl:
1526 1534 return templater.unquotestring(tmpl), None
1527 1535 else:
1528 1536 style = util.expandpath(ui.config('ui', 'style', ''))
1529 1537
1530 1538 if not tmpl and style:
1531 1539 mapfile = style
1532 1540 if not os.path.split(mapfile)[0]:
1533 1541 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1534 1542 or templater.templatepath(mapfile))
1535 1543 if mapname:
1536 1544 mapfile = mapname
1537 1545 return None, mapfile
1538 1546
1539 1547 if not tmpl:
1540 1548 return None, None
1541 1549
1542 1550 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1543 1551
1544 1552 def show_changeset(ui, repo, opts, buffered=False):
1545 1553 """show one changeset using template or regular display.
1546 1554
1547 1555 Display format will be the first non-empty hit of:
1548 1556 1. option 'template'
1549 1557 2. option 'style'
1550 1558 3. [ui] setting 'logtemplate'
1551 1559 4. [ui] setting 'style'
1552 1560 If all of these values are either the unset or the empty string,
1553 1561 regular display via changeset_printer() is done.
1554 1562 """
1555 1563 # options
1556 1564 matchfn = None
1557 1565 if opts.get('patch') or opts.get('stat'):
1558 1566 matchfn = scmutil.matchall(repo)
1559 1567
1560 1568 if opts.get('template') == 'json':
1561 1569 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1562 1570
1563 1571 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1564 1572
1565 1573 if not tmpl and not mapfile:
1566 1574 return changeset_printer(ui, repo, matchfn, opts, buffered)
1567 1575
1568 1576 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1569 1577
1570 1578 def showmarker(fm, marker, index=None):
1571 1579 """utility function to display obsolescence marker in a readable way
1572 1580
1573 1581 To be used by debug function."""
1574 1582 if index is not None:
1575 1583 fm.write('index', '%i ', index)
1576 1584 fm.write('precnode', '%s ', hex(marker.precnode()))
1577 1585 succs = marker.succnodes()
1578 1586 fm.condwrite(succs, 'succnodes', '%s ',
1579 1587 fm.formatlist(map(hex, succs), name='node'))
1580 1588 fm.write('flag', '%X ', marker.flags())
1581 1589 parents = marker.parentnodes()
1582 1590 if parents is not None:
1583 1591 fm.write('parentnodes', '{%s} ',
1584 1592 fm.formatlist(map(hex, parents), name='node', sep=', '))
1585 1593 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1586 1594 meta = marker.metadata().copy()
1587 1595 meta.pop('date', None)
1588 1596 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1589 1597 fm.plain('\n')
1590 1598
1591 1599 def finddate(ui, repo, date):
1592 1600 """Find the tipmost changeset that matches the given date spec"""
1593 1601
1594 1602 df = util.matchdate(date)
1595 1603 m = scmutil.matchall(repo)
1596 1604 results = {}
1597 1605
1598 1606 def prep(ctx, fns):
1599 1607 d = ctx.date()
1600 1608 if df(d[0]):
1601 1609 results[ctx.rev()] = d
1602 1610
1603 1611 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1604 1612 rev = ctx.rev()
1605 1613 if rev in results:
1606 1614 ui.status(_("found revision %s from %s\n") %
1607 1615 (rev, util.datestr(results[rev])))
1608 1616 return str(rev)
1609 1617
1610 1618 raise error.Abort(_("revision matching date not found"))
1611 1619
1612 1620 def increasingwindows(windowsize=8, sizelimit=512):
1613 1621 while True:
1614 1622 yield windowsize
1615 1623 if windowsize < sizelimit:
1616 1624 windowsize *= 2
1617 1625
1618 1626 class FileWalkError(Exception):
1619 1627 pass
1620 1628
1621 1629 def walkfilerevs(repo, match, follow, revs, fncache):
1622 1630 '''Walks the file history for the matched files.
1623 1631
1624 1632 Returns the changeset revs that are involved in the file history.
1625 1633
1626 1634 Throws FileWalkError if the file history can't be walked using
1627 1635 filelogs alone.
1628 1636 '''
1629 1637 wanted = set()
1630 1638 copies = []
1631 1639 minrev, maxrev = min(revs), max(revs)
1632 1640 def filerevgen(filelog, last):
1633 1641 """
1634 1642 Only files, no patterns. Check the history of each file.
1635 1643
1636 1644 Examines filelog entries within minrev, maxrev linkrev range
1637 1645 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1638 1646 tuples in backwards order
1639 1647 """
1640 1648 cl_count = len(repo)
1641 1649 revs = []
1642 1650 for j in xrange(0, last + 1):
1643 1651 linkrev = filelog.linkrev(j)
1644 1652 if linkrev < minrev:
1645 1653 continue
1646 1654 # only yield rev for which we have the changelog, it can
1647 1655 # happen while doing "hg log" during a pull or commit
1648 1656 if linkrev >= cl_count:
1649 1657 break
1650 1658
1651 1659 parentlinkrevs = []
1652 1660 for p in filelog.parentrevs(j):
1653 1661 if p != nullrev:
1654 1662 parentlinkrevs.append(filelog.linkrev(p))
1655 1663 n = filelog.node(j)
1656 1664 revs.append((linkrev, parentlinkrevs,
1657 1665 follow and filelog.renamed(n)))
1658 1666
1659 1667 return reversed(revs)
1660 1668 def iterfiles():
1661 1669 pctx = repo['.']
1662 1670 for filename in match.files():
1663 1671 if follow:
1664 1672 if filename not in pctx:
1665 1673 raise error.Abort(_('cannot follow file not in parent '
1666 1674 'revision: "%s"') % filename)
1667 1675 yield filename, pctx[filename].filenode()
1668 1676 else:
1669 1677 yield filename, None
1670 1678 for filename_node in copies:
1671 1679 yield filename_node
1672 1680
1673 1681 for file_, node in iterfiles():
1674 1682 filelog = repo.file(file_)
1675 1683 if not len(filelog):
1676 1684 if node is None:
1677 1685 # A zero count may be a directory or deleted file, so
1678 1686 # try to find matching entries on the slow path.
1679 1687 if follow:
1680 1688 raise error.Abort(
1681 1689 _('cannot follow nonexistent file: "%s"') % file_)
1682 1690 raise FileWalkError("Cannot walk via filelog")
1683 1691 else:
1684 1692 continue
1685 1693
1686 1694 if node is None:
1687 1695 last = len(filelog) - 1
1688 1696 else:
1689 1697 last = filelog.rev(node)
1690 1698
1691 1699 # keep track of all ancestors of the file
1692 1700 ancestors = set([filelog.linkrev(last)])
1693 1701
1694 1702 # iterate from latest to oldest revision
1695 1703 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1696 1704 if not follow:
1697 1705 if rev > maxrev:
1698 1706 continue
1699 1707 else:
1700 1708 # Note that last might not be the first interesting
1701 1709 # rev to us:
1702 1710 # if the file has been changed after maxrev, we'll
1703 1711 # have linkrev(last) > maxrev, and we still need
1704 1712 # to explore the file graph
1705 1713 if rev not in ancestors:
1706 1714 continue
1707 1715 # XXX insert 1327 fix here
1708 1716 if flparentlinkrevs:
1709 1717 ancestors.update(flparentlinkrevs)
1710 1718
1711 1719 fncache.setdefault(rev, []).append(file_)
1712 1720 wanted.add(rev)
1713 1721 if copied:
1714 1722 copies.append(copied)
1715 1723
1716 1724 return wanted
1717 1725
1718 1726 class _followfilter(object):
1719 1727 def __init__(self, repo, onlyfirst=False):
1720 1728 self.repo = repo
1721 1729 self.startrev = nullrev
1722 1730 self.roots = set()
1723 1731 self.onlyfirst = onlyfirst
1724 1732
1725 1733 def match(self, rev):
1726 1734 def realparents(rev):
1727 1735 if self.onlyfirst:
1728 1736 return self.repo.changelog.parentrevs(rev)[0:1]
1729 1737 else:
1730 1738 return filter(lambda x: x != nullrev,
1731 1739 self.repo.changelog.parentrevs(rev))
1732 1740
1733 1741 if self.startrev == nullrev:
1734 1742 self.startrev = rev
1735 1743 return True
1736 1744
1737 1745 if rev > self.startrev:
1738 1746 # forward: all descendants
1739 1747 if not self.roots:
1740 1748 self.roots.add(self.startrev)
1741 1749 for parent in realparents(rev):
1742 1750 if parent in self.roots:
1743 1751 self.roots.add(rev)
1744 1752 return True
1745 1753 else:
1746 1754 # backwards: all parents
1747 1755 if not self.roots:
1748 1756 self.roots.update(realparents(self.startrev))
1749 1757 if rev in self.roots:
1750 1758 self.roots.remove(rev)
1751 1759 self.roots.update(realparents(rev))
1752 1760 return True
1753 1761
1754 1762 return False
1755 1763
1756 1764 def walkchangerevs(repo, match, opts, prepare):
1757 1765 '''Iterate over files and the revs in which they changed.
1758 1766
1759 1767 Callers most commonly need to iterate backwards over the history
1760 1768 in which they are interested. Doing so has awful (quadratic-looking)
1761 1769 performance, so we use iterators in a "windowed" way.
1762 1770
1763 1771 We walk a window of revisions in the desired order. Within the
1764 1772 window, we first walk forwards to gather data, then in the desired
1765 1773 order (usually backwards) to display it.
1766 1774
1767 1775 This function returns an iterator yielding contexts. Before
1768 1776 yielding each context, the iterator will first call the prepare
1769 1777 function on each context in the window in forward order.'''
1770 1778
1771 1779 follow = opts.get('follow') or opts.get('follow_first')
1772 1780 revs = _logrevs(repo, opts)
1773 1781 if not revs:
1774 1782 return []
1775 1783 wanted = set()
1776 1784 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1777 1785 opts.get('removed'))
1778 1786 fncache = {}
1779 1787 change = repo.changectx
1780 1788
1781 1789 # First step is to fill wanted, the set of revisions that we want to yield.
1782 1790 # When it does not induce extra cost, we also fill fncache for revisions in
1783 1791 # wanted: a cache of filenames that were changed (ctx.files()) and that
1784 1792 # match the file filtering conditions.
1785 1793
1786 1794 if match.always():
1787 1795 # No files, no patterns. Display all revs.
1788 1796 wanted = revs
1789 1797 elif not slowpath:
1790 1798 # We only have to read through the filelog to find wanted revisions
1791 1799
1792 1800 try:
1793 1801 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1794 1802 except FileWalkError:
1795 1803 slowpath = True
1796 1804
1797 1805 # We decided to fall back to the slowpath because at least one
1798 1806 # of the paths was not a file. Check to see if at least one of them
1799 1807 # existed in history, otherwise simply return
1800 1808 for path in match.files():
1801 1809 if path == '.' or path in repo.store:
1802 1810 break
1803 1811 else:
1804 1812 return []
1805 1813
1806 1814 if slowpath:
1807 1815 # We have to read the changelog to match filenames against
1808 1816 # changed files
1809 1817
1810 1818 if follow:
1811 1819 raise error.Abort(_('can only follow copies/renames for explicit '
1812 1820 'filenames'))
1813 1821
1814 1822 # The slow path checks files modified in every changeset.
1815 1823 # This is really slow on large repos, so compute the set lazily.
1816 1824 class lazywantedset(object):
1817 1825 def __init__(self):
1818 1826 self.set = set()
1819 1827 self.revs = set(revs)
1820 1828
1821 1829 # No need to worry about locality here because it will be accessed
1822 1830 # in the same order as the increasing window below.
1823 1831 def __contains__(self, value):
1824 1832 if value in self.set:
1825 1833 return True
1826 1834 elif not value in self.revs:
1827 1835 return False
1828 1836 else:
1829 1837 self.revs.discard(value)
1830 1838 ctx = change(value)
1831 1839 matches = filter(match, ctx.files())
1832 1840 if matches:
1833 1841 fncache[value] = matches
1834 1842 self.set.add(value)
1835 1843 return True
1836 1844 return False
1837 1845
1838 1846 def discard(self, value):
1839 1847 self.revs.discard(value)
1840 1848 self.set.discard(value)
1841 1849
1842 1850 wanted = lazywantedset()
1843 1851
1844 1852 # it might be worthwhile to do this in the iterator if the rev range
1845 1853 # is descending and the prune args are all within that range
1846 1854 for rev in opts.get('prune', ()):
1847 1855 rev = repo[rev].rev()
1848 1856 ff = _followfilter(repo)
1849 1857 stop = min(revs[0], revs[-1])
1850 1858 for x in xrange(rev, stop - 1, -1):
1851 1859 if ff.match(x):
1852 1860 wanted = wanted - [x]
1853 1861
1854 1862 # Now that wanted is correctly initialized, we can iterate over the
1855 1863 # revision range, yielding only revisions in wanted.
1856 1864 def iterate():
1857 1865 if follow and match.always():
1858 1866 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1859 1867 def want(rev):
1860 1868 return ff.match(rev) and rev in wanted
1861 1869 else:
1862 1870 def want(rev):
1863 1871 return rev in wanted
1864 1872
1865 1873 it = iter(revs)
1866 1874 stopiteration = False
1867 1875 for windowsize in increasingwindows():
1868 1876 nrevs = []
1869 1877 for i in xrange(windowsize):
1870 1878 rev = next(it, None)
1871 1879 if rev is None:
1872 1880 stopiteration = True
1873 1881 break
1874 1882 elif want(rev):
1875 1883 nrevs.append(rev)
1876 1884 for rev in sorted(nrevs):
1877 1885 fns = fncache.get(rev)
1878 1886 ctx = change(rev)
1879 1887 if not fns:
1880 1888 def fns_generator():
1881 1889 for f in ctx.files():
1882 1890 if match(f):
1883 1891 yield f
1884 1892 fns = fns_generator()
1885 1893 prepare(ctx, fns)
1886 1894 for rev in nrevs:
1887 1895 yield change(rev)
1888 1896
1889 1897 if stopiteration:
1890 1898 break
1891 1899
1892 1900 return iterate()
1893 1901
1894 1902 def _makefollowlogfilematcher(repo, files, followfirst):
1895 1903 # When displaying a revision with --patch --follow FILE, we have
1896 1904 # to know which file of the revision must be diffed. With
1897 1905 # --follow, we want the names of the ancestors of FILE in the
1898 1906 # revision, stored in "fcache". "fcache" is populated by
1899 1907 # reproducing the graph traversal already done by --follow revset
1900 1908 # and relating revs to file names (which is not "correct" but
1901 1909 # good enough).
1902 1910 fcache = {}
1903 1911 fcacheready = [False]
1904 1912 pctx = repo['.']
1905 1913
1906 1914 def populate():
1907 1915 for fn in files:
1908 1916 fctx = pctx[fn]
1909 1917 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
1910 1918 for c in fctx.ancestors(followfirst=followfirst):
1911 1919 fcache.setdefault(c.rev(), set()).add(c.path())
1912 1920
1913 1921 def filematcher(rev):
1914 1922 if not fcacheready[0]:
1915 1923 # Lazy initialization
1916 1924 fcacheready[0] = True
1917 1925 populate()
1918 1926 return scmutil.matchfiles(repo, fcache.get(rev, []))
1919 1927
1920 1928 return filematcher
1921 1929
1922 1930 def _makenofollowlogfilematcher(repo, pats, opts):
1923 1931 '''hook for extensions to override the filematcher for non-follow cases'''
1924 1932 return None
1925 1933
1926 1934 def _makelogrevset(repo, pats, opts, revs):
1927 1935 """Return (expr, filematcher) where expr is a revset string built
1928 1936 from log options and file patterns or None. If --stat or --patch
1929 1937 are not passed filematcher is None. Otherwise it is a callable
1930 1938 taking a revision number and returning a match objects filtering
1931 1939 the files to be detailed when displaying the revision.
1932 1940 """
1933 1941 opt2revset = {
1934 1942 'no_merges': ('not merge()', None),
1935 1943 'only_merges': ('merge()', None),
1936 1944 '_ancestors': ('ancestors(%(val)s)', None),
1937 1945 '_fancestors': ('_firstancestors(%(val)s)', None),
1938 1946 '_descendants': ('descendants(%(val)s)', None),
1939 1947 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1940 1948 '_matchfiles': ('_matchfiles(%(val)s)', None),
1941 1949 'date': ('date(%(val)r)', None),
1942 1950 'branch': ('branch(%(val)r)', ' or '),
1943 1951 '_patslog': ('filelog(%(val)r)', ' or '),
1944 1952 '_patsfollow': ('follow(%(val)r)', ' or '),
1945 1953 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1946 1954 'keyword': ('keyword(%(val)r)', ' or '),
1947 1955 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1948 1956 'user': ('user(%(val)r)', ' or '),
1949 1957 }
1950 1958
1951 1959 opts = dict(opts)
1952 1960 # follow or not follow?
1953 1961 follow = opts.get('follow') or opts.get('follow_first')
1954 1962 if opts.get('follow_first'):
1955 1963 followfirst = 1
1956 1964 else:
1957 1965 followfirst = 0
1958 1966 # --follow with FILE behavior depends on revs...
1959 1967 it = iter(revs)
1960 1968 startrev = next(it)
1961 1969 followdescendants = startrev < next(it, startrev)
1962 1970
1963 1971 # branch and only_branch are really aliases and must be handled at
1964 1972 # the same time
1965 1973 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1966 1974 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1967 1975 # pats/include/exclude are passed to match.match() directly in
1968 1976 # _matchfiles() revset but walkchangerevs() builds its matcher with
1969 1977 # scmutil.match(). The difference is input pats are globbed on
1970 1978 # platforms without shell expansion (windows).
1971 1979 wctx = repo[None]
1972 1980 match, pats = scmutil.matchandpats(wctx, pats, opts)
1973 1981 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1974 1982 opts.get('removed'))
1975 1983 if not slowpath:
1976 1984 for f in match.files():
1977 1985 if follow and f not in wctx:
1978 1986 # If the file exists, it may be a directory, so let it
1979 1987 # take the slow path.
1980 1988 if os.path.exists(repo.wjoin(f)):
1981 1989 slowpath = True
1982 1990 continue
1983 1991 else:
1984 1992 raise error.Abort(_('cannot follow file not in parent '
1985 1993 'revision: "%s"') % f)
1986 1994 filelog = repo.file(f)
1987 1995 if not filelog:
1988 1996 # A zero count may be a directory or deleted file, so
1989 1997 # try to find matching entries on the slow path.
1990 1998 if follow:
1991 1999 raise error.Abort(
1992 2000 _('cannot follow nonexistent file: "%s"') % f)
1993 2001 slowpath = True
1994 2002
1995 2003 # We decided to fall back to the slowpath because at least one
1996 2004 # of the paths was not a file. Check to see if at least one of them
1997 2005 # existed in history - in that case, we'll continue down the
1998 2006 # slowpath; otherwise, we can turn off the slowpath
1999 2007 if slowpath:
2000 2008 for path in match.files():
2001 2009 if path == '.' or path in repo.store:
2002 2010 break
2003 2011 else:
2004 2012 slowpath = False
2005 2013
2006 2014 fpats = ('_patsfollow', '_patsfollowfirst')
2007 2015 fnopats = (('_ancestors', '_fancestors'),
2008 2016 ('_descendants', '_fdescendants'))
2009 2017 if slowpath:
2010 2018 # See walkchangerevs() slow path.
2011 2019 #
2012 2020 # pats/include/exclude cannot be represented as separate
2013 2021 # revset expressions as their filtering logic applies at file
2014 2022 # level. For instance "-I a -X a" matches a revision touching
2015 2023 # "a" and "b" while "file(a) and not file(b)" does
2016 2024 # not. Besides, filesets are evaluated against the working
2017 2025 # directory.
2018 2026 matchargs = ['r:', 'd:relpath']
2019 2027 for p in pats:
2020 2028 matchargs.append('p:' + p)
2021 2029 for p in opts.get('include', []):
2022 2030 matchargs.append('i:' + p)
2023 2031 for p in opts.get('exclude', []):
2024 2032 matchargs.append('x:' + p)
2025 2033 matchargs = ','.join(('%r' % p) for p in matchargs)
2026 2034 opts['_matchfiles'] = matchargs
2027 2035 if follow:
2028 2036 opts[fnopats[0][followfirst]] = '.'
2029 2037 else:
2030 2038 if follow:
2031 2039 if pats:
2032 2040 # follow() revset interprets its file argument as a
2033 2041 # manifest entry, so use match.files(), not pats.
2034 2042 opts[fpats[followfirst]] = list(match.files())
2035 2043 else:
2036 2044 op = fnopats[followdescendants][followfirst]
2037 2045 opts[op] = 'rev(%d)' % startrev
2038 2046 else:
2039 2047 opts['_patslog'] = list(pats)
2040 2048
2041 2049 filematcher = None
2042 2050 if opts.get('patch') or opts.get('stat'):
2043 2051 # When following files, track renames via a special matcher.
2044 2052 # If we're forced to take the slowpath it means we're following
2045 2053 # at least one pattern/directory, so don't bother with rename tracking.
2046 2054 if follow and not match.always() and not slowpath:
2047 2055 # _makefollowlogfilematcher expects its files argument to be
2048 2056 # relative to the repo root, so use match.files(), not pats.
2049 2057 filematcher = _makefollowlogfilematcher(repo, match.files(),
2050 2058 followfirst)
2051 2059 else:
2052 2060 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2053 2061 if filematcher is None:
2054 2062 filematcher = lambda rev: match
2055 2063
2056 2064 expr = []
2057 2065 for op, val in sorted(opts.iteritems()):
2058 2066 if not val:
2059 2067 continue
2060 2068 if op not in opt2revset:
2061 2069 continue
2062 2070 revop, andor = opt2revset[op]
2063 2071 if '%(val)' not in revop:
2064 2072 expr.append(revop)
2065 2073 else:
2066 2074 if not isinstance(val, list):
2067 2075 e = revop % {'val': val}
2068 2076 else:
2069 2077 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2070 2078 expr.append(e)
2071 2079
2072 2080 if expr:
2073 2081 expr = '(' + ' and '.join(expr) + ')'
2074 2082 else:
2075 2083 expr = None
2076 2084 return expr, filematcher
2077 2085
2078 2086 def _logrevs(repo, opts):
2079 2087 # Default --rev value depends on --follow but --follow behavior
2080 2088 # depends on revisions resolved from --rev...
2081 2089 follow = opts.get('follow') or opts.get('follow_first')
2082 2090 if opts.get('rev'):
2083 2091 revs = scmutil.revrange(repo, opts['rev'])
2084 2092 elif follow and repo.dirstate.p1() == nullid:
2085 2093 revs = revset.baseset()
2086 2094 elif follow:
2087 2095 revs = repo.revs('reverse(:.)')
2088 2096 else:
2089 2097 revs = revset.spanset(repo)
2090 2098 revs.reverse()
2091 2099 return revs
2092 2100
2093 2101 def getgraphlogrevs(repo, pats, opts):
2094 2102 """Return (revs, expr, filematcher) where revs is an iterable of
2095 2103 revision numbers, expr is a revset string built from log options
2096 2104 and file patterns or None, and used to filter 'revs'. If --stat or
2097 2105 --patch are not passed filematcher is None. Otherwise it is a
2098 2106 callable taking a revision number and returning a match objects
2099 2107 filtering the files to be detailed when displaying the revision.
2100 2108 """
2101 2109 limit = loglimit(opts)
2102 2110 revs = _logrevs(repo, opts)
2103 2111 if not revs:
2104 2112 return revset.baseset(), None, None
2105 2113 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2106 2114 if opts.get('rev'):
2107 2115 # User-specified revs might be unsorted, but don't sort before
2108 2116 # _makelogrevset because it might depend on the order of revs
2109 2117 if not (revs.isdescending() or revs.istopo()):
2110 2118 revs.sort(reverse=True)
2111 2119 if expr:
2112 2120 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2113 2121 revs = matcher(repo, revs)
2114 2122 if limit is not None:
2115 2123 limitedrevs = []
2116 2124 for idx, rev in enumerate(revs):
2117 2125 if idx >= limit:
2118 2126 break
2119 2127 limitedrevs.append(rev)
2120 2128 revs = revset.baseset(limitedrevs)
2121 2129
2122 2130 return revs, expr, filematcher
2123 2131
2124 2132 def getlogrevs(repo, pats, opts):
2125 2133 """Return (revs, expr, filematcher) where revs is an iterable of
2126 2134 revision numbers, expr is a revset string built from log options
2127 2135 and file patterns or None, and used to filter 'revs'. If --stat or
2128 2136 --patch are not passed filematcher is None. Otherwise it is a
2129 2137 callable taking a revision number and returning a match objects
2130 2138 filtering the files to be detailed when displaying the revision.
2131 2139 """
2132 2140 limit = loglimit(opts)
2133 2141 revs = _logrevs(repo, opts)
2134 2142 if not revs:
2135 2143 return revset.baseset([]), None, None
2136 2144 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2137 2145 if expr:
2138 2146 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2139 2147 revs = matcher(repo, revs)
2140 2148 if limit is not None:
2141 2149 limitedrevs = []
2142 2150 for idx, r in enumerate(revs):
2143 2151 if limit <= idx:
2144 2152 break
2145 2153 limitedrevs.append(r)
2146 2154 revs = revset.baseset(limitedrevs)
2147 2155
2148 2156 return revs, expr, filematcher
2149 2157
2150 2158 def _graphnodeformatter(ui, displayer):
2151 2159 spec = ui.config('ui', 'graphnodetemplate')
2152 2160 if not spec:
2153 2161 return templatekw.showgraphnode # fast path for "{graphnode}"
2154 2162
2155 2163 templ = formatter.gettemplater(ui, 'graphnode', spec)
2156 2164 cache = {}
2157 2165 if isinstance(displayer, changeset_templater):
2158 2166 cache = displayer.cache # reuse cache of slow templates
2159 2167 props = templatekw.keywords.copy()
2160 2168 props['templ'] = templ
2161 2169 props['cache'] = cache
2162 2170 def formatnode(repo, ctx):
2163 2171 props['ctx'] = ctx
2164 2172 props['repo'] = repo
2165 2173 props['ui'] = repo.ui
2166 2174 props['revcache'] = {}
2167 2175 return templater.stringify(templ('graphnode', **props))
2168 2176 return formatnode
2169 2177
2170 2178 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2171 2179 filematcher=None):
2172 2180 formatnode = _graphnodeformatter(ui, displayer)
2173 2181 state = graphmod.asciistate()
2174 2182 styles = state['styles']
2175 2183
2176 2184 # only set graph styling if HGPLAIN is not set.
2177 2185 if ui.plain('graph'):
2178 2186 # set all edge styles to |, the default pre-3.8 behaviour
2179 2187 styles.update(dict.fromkeys(styles, '|'))
2180 2188 else:
2181 2189 edgetypes = {
2182 2190 'parent': graphmod.PARENT,
2183 2191 'grandparent': graphmod.GRANDPARENT,
2184 2192 'missing': graphmod.MISSINGPARENT
2185 2193 }
2186 2194 for name, key in edgetypes.items():
2187 2195 # experimental config: experimental.graphstyle.*
2188 2196 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2189 2197 styles[key])
2190 2198 if not styles[key]:
2191 2199 styles[key] = None
2192 2200
2193 2201 # experimental config: experimental.graphshorten
2194 2202 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2195 2203
2196 2204 for rev, type, ctx, parents in dag:
2197 2205 char = formatnode(repo, ctx)
2198 2206 copies = None
2199 2207 if getrenamed and ctx.rev():
2200 2208 copies = []
2201 2209 for fn in ctx.files():
2202 2210 rename = getrenamed(fn, ctx.rev())
2203 2211 if rename:
2204 2212 copies.append((fn, rename[0]))
2205 2213 revmatchfn = None
2206 2214 if filematcher is not None:
2207 2215 revmatchfn = filematcher(ctx.rev())
2208 2216 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2209 2217 lines = displayer.hunk.pop(rev).split('\n')
2210 2218 if not lines[-1]:
2211 2219 del lines[-1]
2212 2220 displayer.flush(ctx)
2213 2221 edges = edgefn(type, char, lines, state, rev, parents)
2214 2222 for type, char, lines, coldata in edges:
2215 2223 graphmod.ascii(ui, state, type, char, lines, coldata)
2216 2224 displayer.close()
2217 2225
2218 2226 def graphlog(ui, repo, *pats, **opts):
2219 2227 # Parameters are identical to log command ones
2220 2228 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2221 2229 revdag = graphmod.dagwalker(repo, revs)
2222 2230
2223 2231 getrenamed = None
2224 2232 if opts.get('copies'):
2225 2233 endrev = None
2226 2234 if opts.get('rev'):
2227 2235 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2228 2236 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2229 2237 displayer = show_changeset(ui, repo, opts, buffered=True)
2230 2238 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2231 2239 filematcher)
2232 2240
2233 2241 def checkunsupportedgraphflags(pats, opts):
2234 2242 for op in ["newest_first"]:
2235 2243 if op in opts and opts[op]:
2236 2244 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2237 2245 % op.replace("_", "-"))
2238 2246
2239 2247 def graphrevs(repo, nodes, opts):
2240 2248 limit = loglimit(opts)
2241 2249 nodes.reverse()
2242 2250 if limit is not None:
2243 2251 nodes = nodes[:limit]
2244 2252 return graphmod.nodes(repo, nodes)
2245 2253
2246 2254 def add(ui, repo, match, prefix, explicitonly, **opts):
2247 2255 join = lambda f: os.path.join(prefix, f)
2248 2256 bad = []
2249 2257
2250 2258 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2251 2259 names = []
2252 2260 wctx = repo[None]
2253 2261 cca = None
2254 2262 abort, warn = scmutil.checkportabilityalert(ui)
2255 2263 if abort or warn:
2256 2264 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2257 2265
2258 2266 badmatch = matchmod.badmatch(match, badfn)
2259 2267 dirstate = repo.dirstate
2260 2268 # We don't want to just call wctx.walk here, since it would return a lot of
2261 2269 # clean files, which we aren't interested in and takes time.
2262 2270 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2263 2271 True, False, full=False)):
2264 2272 exact = match.exact(f)
2265 2273 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2266 2274 if cca:
2267 2275 cca(f)
2268 2276 names.append(f)
2269 2277 if ui.verbose or not exact:
2270 2278 ui.status(_('adding %s\n') % match.rel(f))
2271 2279
2272 2280 for subpath in sorted(wctx.substate):
2273 2281 sub = wctx.sub(subpath)
2274 2282 try:
2275 2283 submatch = matchmod.subdirmatcher(subpath, match)
2276 2284 if opts.get('subrepos'):
2277 2285 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2278 2286 else:
2279 2287 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2280 2288 except error.LookupError:
2281 2289 ui.status(_("skipping missing subrepository: %s\n")
2282 2290 % join(subpath))
2283 2291
2284 2292 if not opts.get('dry_run'):
2285 2293 rejected = wctx.add(names, prefix)
2286 2294 bad.extend(f for f in rejected if f in match.files())
2287 2295 return bad
2288 2296
2289 2297 def forget(ui, repo, match, prefix, explicitonly):
2290 2298 join = lambda f: os.path.join(prefix, f)
2291 2299 bad = []
2292 2300 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2293 2301 wctx = repo[None]
2294 2302 forgot = []
2295 2303
2296 2304 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2297 2305 forget = sorted(s[0] + s[1] + s[3] + s[6])
2298 2306 if explicitonly:
2299 2307 forget = [f for f in forget if match.exact(f)]
2300 2308
2301 2309 for subpath in sorted(wctx.substate):
2302 2310 sub = wctx.sub(subpath)
2303 2311 try:
2304 2312 submatch = matchmod.subdirmatcher(subpath, match)
2305 2313 subbad, subforgot = sub.forget(submatch, prefix)
2306 2314 bad.extend([subpath + '/' + f for f in subbad])
2307 2315 forgot.extend([subpath + '/' + f for f in subforgot])
2308 2316 except error.LookupError:
2309 2317 ui.status(_("skipping missing subrepository: %s\n")
2310 2318 % join(subpath))
2311 2319
2312 2320 if not explicitonly:
2313 2321 for f in match.files():
2314 2322 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2315 2323 if f not in forgot:
2316 2324 if repo.wvfs.exists(f):
2317 2325 # Don't complain if the exact case match wasn't given.
2318 2326 # But don't do this until after checking 'forgot', so
2319 2327 # that subrepo files aren't normalized, and this op is
2320 2328 # purely from data cached by the status walk above.
2321 2329 if repo.dirstate.normalize(f) in repo.dirstate:
2322 2330 continue
2323 2331 ui.warn(_('not removing %s: '
2324 2332 'file is already untracked\n')
2325 2333 % match.rel(f))
2326 2334 bad.append(f)
2327 2335
2328 2336 for f in forget:
2329 2337 if ui.verbose or not match.exact(f):
2330 2338 ui.status(_('removing %s\n') % match.rel(f))
2331 2339
2332 2340 rejected = wctx.forget(forget, prefix)
2333 2341 bad.extend(f for f in rejected if f in match.files())
2334 2342 forgot.extend(f for f in forget if f not in rejected)
2335 2343 return bad, forgot
2336 2344
2337 2345 def files(ui, ctx, m, fm, fmt, subrepos):
2338 2346 rev = ctx.rev()
2339 2347 ret = 1
2340 2348 ds = ctx.repo().dirstate
2341 2349
2342 2350 for f in ctx.matches(m):
2343 2351 if rev is None and ds[f] == 'r':
2344 2352 continue
2345 2353 fm.startitem()
2346 2354 if ui.verbose:
2347 2355 fc = ctx[f]
2348 2356 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2349 2357 fm.data(abspath=f)
2350 2358 fm.write('path', fmt, m.rel(f))
2351 2359 ret = 0
2352 2360
2353 2361 for subpath in sorted(ctx.substate):
2354 2362 submatch = matchmod.subdirmatcher(subpath, m)
2355 2363 if (subrepos or m.exact(subpath) or any(submatch.files())):
2356 2364 sub = ctx.sub(subpath)
2357 2365 try:
2358 2366 recurse = m.exact(subpath) or subrepos
2359 2367 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2360 2368 ret = 0
2361 2369 except error.LookupError:
2362 2370 ui.status(_("skipping missing subrepository: %s\n")
2363 2371 % m.abs(subpath))
2364 2372
2365 2373 return ret
2366 2374
2367 2375 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2368 2376 join = lambda f: os.path.join(prefix, f)
2369 2377 ret = 0
2370 2378 s = repo.status(match=m, clean=True)
2371 2379 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2372 2380
2373 2381 wctx = repo[None]
2374 2382
2375 2383 if warnings is None:
2376 2384 warnings = []
2377 2385 warn = True
2378 2386 else:
2379 2387 warn = False
2380 2388
2381 2389 subs = sorted(wctx.substate)
2382 2390 total = len(subs)
2383 2391 count = 0
2384 2392 for subpath in subs:
2385 2393 count += 1
2386 2394 submatch = matchmod.subdirmatcher(subpath, m)
2387 2395 if subrepos or m.exact(subpath) or any(submatch.files()):
2388 2396 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2389 2397 sub = wctx.sub(subpath)
2390 2398 try:
2391 2399 if sub.removefiles(submatch, prefix, after, force, subrepos,
2392 2400 warnings):
2393 2401 ret = 1
2394 2402 except error.LookupError:
2395 2403 warnings.append(_("skipping missing subrepository: %s\n")
2396 2404 % join(subpath))
2397 2405 ui.progress(_('searching'), None)
2398 2406
2399 2407 # warn about failure to delete explicit files/dirs
2400 2408 deleteddirs = util.dirs(deleted)
2401 2409 files = m.files()
2402 2410 total = len(files)
2403 2411 count = 0
2404 2412 for f in files:
2405 2413 def insubrepo():
2406 2414 for subpath in wctx.substate:
2407 2415 if f.startswith(subpath + '/'):
2408 2416 return True
2409 2417 return False
2410 2418
2411 2419 count += 1
2412 2420 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2413 2421 isdir = f in deleteddirs or wctx.hasdir(f)
2414 2422 if (f in repo.dirstate or isdir or f == '.'
2415 2423 or insubrepo() or f in subs):
2416 2424 continue
2417 2425
2418 2426 if repo.wvfs.exists(f):
2419 2427 if repo.wvfs.isdir(f):
2420 2428 warnings.append(_('not removing %s: no tracked files\n')
2421 2429 % m.rel(f))
2422 2430 else:
2423 2431 warnings.append(_('not removing %s: file is untracked\n')
2424 2432 % m.rel(f))
2425 2433 # missing files will generate a warning elsewhere
2426 2434 ret = 1
2427 2435 ui.progress(_('deleting'), None)
2428 2436
2429 2437 if force:
2430 2438 list = modified + deleted + clean + added
2431 2439 elif after:
2432 2440 list = deleted
2433 2441 remaining = modified + added + clean
2434 2442 total = len(remaining)
2435 2443 count = 0
2436 2444 for f in remaining:
2437 2445 count += 1
2438 2446 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2439 2447 warnings.append(_('not removing %s: file still exists\n')
2440 2448 % m.rel(f))
2441 2449 ret = 1
2442 2450 ui.progress(_('skipping'), None)
2443 2451 else:
2444 2452 list = deleted + clean
2445 2453 total = len(modified) + len(added)
2446 2454 count = 0
2447 2455 for f in modified:
2448 2456 count += 1
2449 2457 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2450 2458 warnings.append(_('not removing %s: file is modified (use -f'
2451 2459 ' to force removal)\n') % m.rel(f))
2452 2460 ret = 1
2453 2461 for f in added:
2454 2462 count += 1
2455 2463 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2456 2464 warnings.append(_("not removing %s: file has been marked for add"
2457 2465 " (use 'hg forget' to undo add)\n") % m.rel(f))
2458 2466 ret = 1
2459 2467 ui.progress(_('skipping'), None)
2460 2468
2461 2469 list = sorted(list)
2462 2470 total = len(list)
2463 2471 count = 0
2464 2472 for f in list:
2465 2473 count += 1
2466 2474 if ui.verbose or not m.exact(f):
2467 2475 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2468 2476 ui.status(_('removing %s\n') % m.rel(f))
2469 2477 ui.progress(_('deleting'), None)
2470 2478
2471 2479 with repo.wlock():
2472 2480 if not after:
2473 2481 for f in list:
2474 2482 if f in added:
2475 2483 continue # we never unlink added files on remove
2476 2484 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2477 2485 repo[None].forget(list)
2478 2486
2479 2487 if warn:
2480 2488 for warning in warnings:
2481 2489 ui.warn(warning)
2482 2490
2483 2491 return ret
2484 2492
2485 2493 def cat(ui, repo, ctx, matcher, prefix, **opts):
2486 2494 err = 1
2487 2495
2488 2496 def write(path):
2489 2497 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2490 2498 pathname=os.path.join(prefix, path))
2491 2499 data = ctx[path].data()
2492 2500 if opts.get('decode'):
2493 2501 data = repo.wwritedata(path, data)
2494 2502 fp.write(data)
2495 2503 fp.close()
2496 2504
2497 2505 # Automation often uses hg cat on single files, so special case it
2498 2506 # for performance to avoid the cost of parsing the manifest.
2499 2507 if len(matcher.files()) == 1 and not matcher.anypats():
2500 2508 file = matcher.files()[0]
2501 2509 mfl = repo.manifestlog
2502 2510 mfnode = ctx.manifestnode()
2503 2511 try:
2504 2512 if mfnode and mfl[mfnode].find(file)[0]:
2505 2513 write(file)
2506 2514 return 0
2507 2515 except KeyError:
2508 2516 pass
2509 2517
2510 2518 for abs in ctx.walk(matcher):
2511 2519 write(abs)
2512 2520 err = 0
2513 2521
2514 2522 for subpath in sorted(ctx.substate):
2515 2523 sub = ctx.sub(subpath)
2516 2524 try:
2517 2525 submatch = matchmod.subdirmatcher(subpath, matcher)
2518 2526
2519 2527 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2520 2528 **opts):
2521 2529 err = 0
2522 2530 except error.RepoLookupError:
2523 2531 ui.status(_("skipping missing subrepository: %s\n")
2524 2532 % os.path.join(prefix, subpath))
2525 2533
2526 2534 return err
2527 2535
2528 2536 def commit(ui, repo, commitfunc, pats, opts):
2529 2537 '''commit the specified files or all outstanding changes'''
2530 2538 date = opts.get('date')
2531 2539 if date:
2532 2540 opts['date'] = util.parsedate(date)
2533 2541 message = logmessage(ui, opts)
2534 2542 matcher = scmutil.match(repo[None], pats, opts)
2535 2543
2536 2544 # extract addremove carefully -- this function can be called from a command
2537 2545 # that doesn't support addremove
2538 2546 if opts.get('addremove'):
2539 2547 if scmutil.addremove(repo, matcher, "", opts) != 0:
2540 2548 raise error.Abort(
2541 2549 _("failed to mark all new/missing files as added/removed"))
2542 2550
2543 2551 return commitfunc(ui, repo, message, matcher, opts)
2544 2552
2545 2553 def samefile(f, ctx1, ctx2):
2546 2554 if f in ctx1.manifest():
2547 2555 a = ctx1.filectx(f)
2548 2556 if f in ctx2.manifest():
2549 2557 b = ctx2.filectx(f)
2550 2558 return (not a.cmp(b)
2551 2559 and a.flags() == b.flags())
2552 2560 else:
2553 2561 return False
2554 2562 else:
2555 2563 return f not in ctx2.manifest()
2556 2564
2557 2565 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2558 2566 # avoid cycle context -> subrepo -> cmdutil
2559 2567 from . import context
2560 2568
2561 2569 # amend will reuse the existing user if not specified, but the obsolete
2562 2570 # marker creation requires that the current user's name is specified.
2563 2571 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2564 2572 ui.username() # raise exception if username not set
2565 2573
2566 2574 ui.note(_('amending changeset %s\n') % old)
2567 2575 base = old.p1()
2568 2576 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2569 2577
2570 2578 wlock = lock = newid = None
2571 2579 try:
2572 2580 wlock = repo.wlock()
2573 2581 lock = repo.lock()
2574 2582 with repo.transaction('amend') as tr:
2575 2583 # See if we got a message from -m or -l, if not, open the editor
2576 2584 # with the message of the changeset to amend
2577 2585 message = logmessage(ui, opts)
2578 2586 # ensure logfile does not conflict with later enforcement of the
2579 2587 # message. potential logfile content has been processed by
2580 2588 # `logmessage` anyway.
2581 2589 opts.pop('logfile')
2582 2590 # First, do a regular commit to record all changes in the working
2583 2591 # directory (if there are any)
2584 2592 ui.callhooks = False
2585 2593 activebookmark = repo._bookmarks.active
2586 2594 try:
2587 2595 repo._bookmarks.active = None
2588 2596 opts['message'] = 'temporary amend commit for %s' % old
2589 2597 node = commit(ui, repo, commitfunc, pats, opts)
2590 2598 finally:
2591 2599 repo._bookmarks.active = activebookmark
2592 2600 repo._bookmarks.recordchange(tr)
2593 2601 ui.callhooks = True
2594 2602 ctx = repo[node]
2595 2603
2596 2604 # Participating changesets:
2597 2605 #
2598 2606 # node/ctx o - new (intermediate) commit that contains changes
2599 2607 # | from working dir to go into amending commit
2600 2608 # | (or a workingctx if there were no changes)
2601 2609 # |
2602 2610 # old o - changeset to amend
2603 2611 # |
2604 2612 # base o - parent of amending changeset
2605 2613
2606 2614 # Update extra dict from amended commit (e.g. to preserve graft
2607 2615 # source)
2608 2616 extra.update(old.extra())
2609 2617
2610 2618 # Also update it from the intermediate commit or from the wctx
2611 2619 extra.update(ctx.extra())
2612 2620
2613 2621 if len(old.parents()) > 1:
2614 2622 # ctx.files() isn't reliable for merges, so fall back to the
2615 2623 # slower repo.status() method
2616 2624 files = set([fn for st in repo.status(base, old)[:3]
2617 2625 for fn in st])
2618 2626 else:
2619 2627 files = set(old.files())
2620 2628
2621 2629 # Second, we use either the commit we just did, or if there were no
2622 2630 # changes the parent of the working directory as the version of the
2623 2631 # files in the final amend commit
2624 2632 if node:
2625 2633 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2626 2634
2627 2635 user = ctx.user()
2628 2636 date = ctx.date()
2629 2637 # Recompute copies (avoid recording a -> b -> a)
2630 2638 copied = copies.pathcopies(base, ctx)
2631 2639 if old.p2:
2632 2640 copied.update(copies.pathcopies(old.p2(), ctx))
2633 2641
2634 2642 # Prune files which were reverted by the updates: if old
2635 2643 # introduced file X and our intermediate commit, node,
2636 2644 # renamed that file, then those two files are the same and
2637 2645 # we can discard X from our list of files. Likewise if X
2638 2646 # was deleted, it's no longer relevant
2639 2647 files.update(ctx.files())
2640 2648 files = [f for f in files if not samefile(f, ctx, base)]
2641 2649
2642 2650 def filectxfn(repo, ctx_, path):
2643 2651 try:
2644 2652 fctx = ctx[path]
2645 2653 flags = fctx.flags()
2646 2654 mctx = context.memfilectx(repo,
2647 2655 fctx.path(), fctx.data(),
2648 2656 islink='l' in flags,
2649 2657 isexec='x' in flags,
2650 2658 copied=copied.get(path))
2651 2659 return mctx
2652 2660 except KeyError:
2653 2661 return None
2654 2662 else:
2655 2663 ui.note(_('copying changeset %s to %s\n') % (old, base))
2656 2664
2657 2665 # Use version of files as in the old cset
2658 2666 def filectxfn(repo, ctx_, path):
2659 2667 try:
2660 2668 return old.filectx(path)
2661 2669 except KeyError:
2662 2670 return None
2663 2671
2664 2672 user = opts.get('user') or old.user()
2665 2673 date = opts.get('date') or old.date()
2666 2674 editform = mergeeditform(old, 'commit.amend')
2667 2675 editor = getcommiteditor(editform=editform, **opts)
2668 2676 if not message:
2669 2677 editor = getcommiteditor(edit=True, editform=editform)
2670 2678 message = old.description()
2671 2679
2672 2680 pureextra = extra.copy()
2673 2681 extra['amend_source'] = old.hex()
2674 2682
2675 2683 new = context.memctx(repo,
2676 2684 parents=[base.node(), old.p2().node()],
2677 2685 text=message,
2678 2686 files=files,
2679 2687 filectxfn=filectxfn,
2680 2688 user=user,
2681 2689 date=date,
2682 2690 extra=extra,
2683 2691 editor=editor)
2684 2692
2685 2693 newdesc = changelog.stripdesc(new.description())
2686 2694 if ((not node)
2687 2695 and newdesc == old.description()
2688 2696 and user == old.user()
2689 2697 and date == old.date()
2690 2698 and pureextra == old.extra()):
2691 2699 # nothing changed. continuing here would create a new node
2692 2700 # anyway because of the amend_source noise.
2693 2701 #
2694 2702 # This not what we expect from amend.
2695 2703 return old.node()
2696 2704
2697 2705 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2698 2706 try:
2699 2707 if opts.get('secret'):
2700 2708 commitphase = 'secret'
2701 2709 else:
2702 2710 commitphase = old.phase()
2703 2711 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2704 2712 newid = repo.commitctx(new)
2705 2713 finally:
2706 2714 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2707 2715 if newid != old.node():
2708 2716 # Reroute the working copy parent to the new changeset
2709 2717 repo.setparents(newid, nullid)
2710 2718
2711 2719 # Move bookmarks from old parent to amend commit
2712 2720 bms = repo.nodebookmarks(old.node())
2713 2721 if bms:
2714 2722 marks = repo._bookmarks
2715 2723 for bm in bms:
2716 2724 ui.debug('moving bookmarks %r from %s to %s\n' %
2717 2725 (marks, old.hex(), hex(newid)))
2718 2726 marks[bm] = newid
2719 2727 marks.recordchange(tr)
2720 2728 #commit the whole amend process
2721 2729 if createmarkers:
2722 2730 # mark the new changeset as successor of the rewritten one
2723 2731 new = repo[newid]
2724 2732 obs = [(old, (new,))]
2725 2733 if node:
2726 2734 obs.append((ctx, ()))
2727 2735
2728 2736 obsolete.createmarkers(repo, obs)
2729 2737 if not createmarkers and newid != old.node():
2730 2738 # Strip the intermediate commit (if there was one) and the amended
2731 2739 # commit
2732 2740 if node:
2733 2741 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2734 2742 ui.note(_('stripping amended changeset %s\n') % old)
2735 2743 repair.strip(ui, repo, old.node(), topic='amend-backup')
2736 2744 finally:
2737 2745 lockmod.release(lock, wlock)
2738 2746 return newid
2739 2747
2740 2748 def commiteditor(repo, ctx, subs, editform=''):
2741 2749 if ctx.description():
2742 2750 return ctx.description()
2743 2751 return commitforceeditor(repo, ctx, subs, editform=editform,
2744 2752 unchangedmessagedetection=True)
2745 2753
2746 2754 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2747 2755 editform='', unchangedmessagedetection=False):
2748 2756 if not extramsg:
2749 2757 extramsg = _("Leave message empty to abort commit.")
2750 2758
2751 2759 forms = [e for e in editform.split('.') if e]
2752 2760 forms.insert(0, 'changeset')
2753 2761 templatetext = None
2754 2762 while forms:
2755 2763 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2756 2764 if tmpl:
2757 2765 templatetext = committext = buildcommittemplate(
2758 2766 repo, ctx, subs, extramsg, tmpl)
2759 2767 break
2760 2768 forms.pop()
2761 2769 else:
2762 2770 committext = buildcommittext(repo, ctx, subs, extramsg)
2763 2771
2764 2772 # run editor in the repository root
2765 2773 olddir = pycompat.getcwd()
2766 2774 os.chdir(repo.root)
2767 2775
2768 2776 # make in-memory changes visible to external process
2769 2777 tr = repo.currenttransaction()
2770 2778 repo.dirstate.write(tr)
2771 2779 pending = tr and tr.writepending() and repo.root
2772 2780
2773 2781 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2774 2782 editform=editform, pending=pending)
2775 2783 text = editortext
2776 2784
2777 2785 # strip away anything below this special string (used for editors that want
2778 2786 # to display the diff)
2779 2787 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2780 2788 if stripbelow:
2781 2789 text = text[:stripbelow.start()]
2782 2790
2783 2791 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2784 2792 os.chdir(olddir)
2785 2793
2786 2794 if finishdesc:
2787 2795 text = finishdesc(text)
2788 2796 if not text.strip():
2789 2797 raise error.Abort(_("empty commit message"))
2790 2798 if unchangedmessagedetection and editortext == templatetext:
2791 2799 raise error.Abort(_("commit message unchanged"))
2792 2800
2793 2801 return text
2794 2802
2795 2803 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2796 2804 ui = repo.ui
2797 2805 tmpl, mapfile = gettemplate(ui, tmpl, None)
2798 2806
2799 2807 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2800 2808
2801 2809 for k, v in repo.ui.configitems('committemplate'):
2802 2810 if k != 'changeset':
2803 2811 t.t.cache[k] = v
2804 2812
2805 2813 if not extramsg:
2806 2814 extramsg = '' # ensure that extramsg is string
2807 2815
2808 2816 ui.pushbuffer()
2809 2817 t.show(ctx, extramsg=extramsg)
2810 2818 return ui.popbuffer()
2811 2819
2812 2820 def hgprefix(msg):
2813 2821 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2814 2822
2815 2823 def buildcommittext(repo, ctx, subs, extramsg):
2816 2824 edittext = []
2817 2825 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2818 2826 if ctx.description():
2819 2827 edittext.append(ctx.description())
2820 2828 edittext.append("")
2821 2829 edittext.append("") # Empty line between message and comments.
2822 2830 edittext.append(hgprefix(_("Enter commit message."
2823 2831 " Lines beginning with 'HG:' are removed.")))
2824 2832 edittext.append(hgprefix(extramsg))
2825 2833 edittext.append("HG: --")
2826 2834 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2827 2835 if ctx.p2():
2828 2836 edittext.append(hgprefix(_("branch merge")))
2829 2837 if ctx.branch():
2830 2838 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2831 2839 if bookmarks.isactivewdirparent(repo):
2832 2840 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2833 2841 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2834 2842 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2835 2843 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2836 2844 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2837 2845 if not added and not modified and not removed:
2838 2846 edittext.append(hgprefix(_("no files changed")))
2839 2847 edittext.append("")
2840 2848
2841 2849 return "\n".join(edittext)
2842 2850
2843 2851 def commitstatus(repo, node, branch, bheads=None, opts=None):
2844 2852 if opts is None:
2845 2853 opts = {}
2846 2854 ctx = repo[node]
2847 2855 parents = ctx.parents()
2848 2856
2849 2857 if (not opts.get('amend') and bheads and node not in bheads and not
2850 2858 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2851 2859 repo.ui.status(_('created new head\n'))
2852 2860 # The message is not printed for initial roots. For the other
2853 2861 # changesets, it is printed in the following situations:
2854 2862 #
2855 2863 # Par column: for the 2 parents with ...
2856 2864 # N: null or no parent
2857 2865 # B: parent is on another named branch
2858 2866 # C: parent is a regular non head changeset
2859 2867 # H: parent was a branch head of the current branch
2860 2868 # Msg column: whether we print "created new head" message
2861 2869 # In the following, it is assumed that there already exists some
2862 2870 # initial branch heads of the current branch, otherwise nothing is
2863 2871 # printed anyway.
2864 2872 #
2865 2873 # Par Msg Comment
2866 2874 # N N y additional topo root
2867 2875 #
2868 2876 # B N y additional branch root
2869 2877 # C N y additional topo head
2870 2878 # H N n usual case
2871 2879 #
2872 2880 # B B y weird additional branch root
2873 2881 # C B y branch merge
2874 2882 # H B n merge with named branch
2875 2883 #
2876 2884 # C C y additional head from merge
2877 2885 # C H n merge with a head
2878 2886 #
2879 2887 # H H n head merge: head count decreases
2880 2888
2881 2889 if not opts.get('close_branch'):
2882 2890 for r in parents:
2883 2891 if r.closesbranch() and r.branch() == branch:
2884 2892 repo.ui.status(_('reopening closed branch head %d\n') % r)
2885 2893
2886 2894 if repo.ui.debugflag:
2887 2895 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2888 2896 elif repo.ui.verbose:
2889 2897 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2890 2898
2891 2899 def postcommitstatus(repo, pats, opts):
2892 2900 return repo.status(match=scmutil.match(repo[None], pats, opts))
2893 2901
2894 2902 def revert(ui, repo, ctx, parents, *pats, **opts):
2895 2903 parent, p2 = parents
2896 2904 node = ctx.node()
2897 2905
2898 2906 mf = ctx.manifest()
2899 2907 if node == p2:
2900 2908 parent = p2
2901 2909
2902 2910 # need all matching names in dirstate and manifest of target rev,
2903 2911 # so have to walk both. do not print errors if files exist in one
2904 2912 # but not other. in both cases, filesets should be evaluated against
2905 2913 # workingctx to get consistent result (issue4497). this means 'set:**'
2906 2914 # cannot be used to select missing files from target rev.
2907 2915
2908 2916 # `names` is a mapping for all elements in working copy and target revision
2909 2917 # The mapping is in the form:
2910 2918 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2911 2919 names = {}
2912 2920
2913 2921 with repo.wlock():
2914 2922 ## filling of the `names` mapping
2915 2923 # walk dirstate to fill `names`
2916 2924
2917 2925 interactive = opts.get('interactive', False)
2918 2926 wctx = repo[None]
2919 2927 m = scmutil.match(wctx, pats, opts)
2920 2928
2921 2929 # we'll need this later
2922 2930 targetsubs = sorted(s for s in wctx.substate if m(s))
2923 2931
2924 2932 if not m.always():
2925 2933 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2926 2934 names[abs] = m.rel(abs), m.exact(abs)
2927 2935
2928 2936 # walk target manifest to fill `names`
2929 2937
2930 2938 def badfn(path, msg):
2931 2939 if path in names:
2932 2940 return
2933 2941 if path in ctx.substate:
2934 2942 return
2935 2943 path_ = path + '/'
2936 2944 for f in names:
2937 2945 if f.startswith(path_):
2938 2946 return
2939 2947 ui.warn("%s: %s\n" % (m.rel(path), msg))
2940 2948
2941 2949 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2942 2950 if abs not in names:
2943 2951 names[abs] = m.rel(abs), m.exact(abs)
2944 2952
2945 2953 # Find status of all file in `names`.
2946 2954 m = scmutil.matchfiles(repo, names)
2947 2955
2948 2956 changes = repo.status(node1=node, match=m,
2949 2957 unknown=True, ignored=True, clean=True)
2950 2958 else:
2951 2959 changes = repo.status(node1=node, match=m)
2952 2960 for kind in changes:
2953 2961 for abs in kind:
2954 2962 names[abs] = m.rel(abs), m.exact(abs)
2955 2963
2956 2964 m = scmutil.matchfiles(repo, names)
2957 2965
2958 2966 modified = set(changes.modified)
2959 2967 added = set(changes.added)
2960 2968 removed = set(changes.removed)
2961 2969 _deleted = set(changes.deleted)
2962 2970 unknown = set(changes.unknown)
2963 2971 unknown.update(changes.ignored)
2964 2972 clean = set(changes.clean)
2965 2973 modadded = set()
2966 2974
2967 2975 # split between files known in target manifest and the others
2968 2976 smf = set(mf)
2969 2977
2970 2978 # determine the exact nature of the deleted changesets
2971 2979 deladded = _deleted - smf
2972 2980 deleted = _deleted - deladded
2973 2981
2974 2982 # We need to account for the state of the file in the dirstate,
2975 2983 # even when we revert against something else than parent. This will
2976 2984 # slightly alter the behavior of revert (doing back up or not, delete
2977 2985 # or just forget etc).
2978 2986 if parent == node:
2979 2987 dsmodified = modified
2980 2988 dsadded = added
2981 2989 dsremoved = removed
2982 2990 # store all local modifications, useful later for rename detection
2983 2991 localchanges = dsmodified | dsadded
2984 2992 modified, added, removed = set(), set(), set()
2985 2993 else:
2986 2994 changes = repo.status(node1=parent, match=m)
2987 2995 dsmodified = set(changes.modified)
2988 2996 dsadded = set(changes.added)
2989 2997 dsremoved = set(changes.removed)
2990 2998 # store all local modifications, useful later for rename detection
2991 2999 localchanges = dsmodified | dsadded
2992 3000
2993 3001 # only take into account for removes between wc and target
2994 3002 clean |= dsremoved - removed
2995 3003 dsremoved &= removed
2996 3004 # distinct between dirstate remove and other
2997 3005 removed -= dsremoved
2998 3006
2999 3007 modadded = added & dsmodified
3000 3008 added -= modadded
3001 3009
3002 3010 # tell newly modified apart.
3003 3011 dsmodified &= modified
3004 3012 dsmodified |= modified & dsadded # dirstate added may need backup
3005 3013 modified -= dsmodified
3006 3014
3007 3015 # We need to wait for some post-processing to update this set
3008 3016 # before making the distinction. The dirstate will be used for
3009 3017 # that purpose.
3010 3018 dsadded = added
3011 3019
3012 3020 # in case of merge, files that are actually added can be reported as
3013 3021 # modified, we need to post process the result
3014 3022 if p2 != nullid:
3015 3023 mergeadd = dsmodified - smf
3016 3024 dsadded |= mergeadd
3017 3025 dsmodified -= mergeadd
3018 3026
3019 3027 # if f is a rename, update `names` to also revert the source
3020 3028 cwd = repo.getcwd()
3021 3029 for f in localchanges:
3022 3030 src = repo.dirstate.copied(f)
3023 3031 # XXX should we check for rename down to target node?
3024 3032 if src and src not in names and repo.dirstate[src] == 'r':
3025 3033 dsremoved.add(src)
3026 3034 names[src] = (repo.pathto(src, cwd), True)
3027 3035
3028 3036 # distinguish between file to forget and the other
3029 3037 added = set()
3030 3038 for abs in dsadded:
3031 3039 if repo.dirstate[abs] != 'a':
3032 3040 added.add(abs)
3033 3041 dsadded -= added
3034 3042
3035 3043 for abs in deladded:
3036 3044 if repo.dirstate[abs] == 'a':
3037 3045 dsadded.add(abs)
3038 3046 deladded -= dsadded
3039 3047
3040 3048 # For files marked as removed, we check if an unknown file is present at
3041 3049 # the same path. If a such file exists it may need to be backed up.
3042 3050 # Making the distinction at this stage helps have simpler backup
3043 3051 # logic.
3044 3052 removunk = set()
3045 3053 for abs in removed:
3046 3054 target = repo.wjoin(abs)
3047 3055 if os.path.lexists(target):
3048 3056 removunk.add(abs)
3049 3057 removed -= removunk
3050 3058
3051 3059 dsremovunk = set()
3052 3060 for abs in dsremoved:
3053 3061 target = repo.wjoin(abs)
3054 3062 if os.path.lexists(target):
3055 3063 dsremovunk.add(abs)
3056 3064 dsremoved -= dsremovunk
3057 3065
3058 3066 # action to be actually performed by revert
3059 3067 # (<list of file>, message>) tuple
3060 3068 actions = {'revert': ([], _('reverting %s\n')),
3061 3069 'add': ([], _('adding %s\n')),
3062 3070 'remove': ([], _('removing %s\n')),
3063 3071 'drop': ([], _('removing %s\n')),
3064 3072 'forget': ([], _('forgetting %s\n')),
3065 3073 'undelete': ([], _('undeleting %s\n')),
3066 3074 'noop': (None, _('no changes needed to %s\n')),
3067 3075 'unknown': (None, _('file not managed: %s\n')),
3068 3076 }
3069 3077
3070 3078 # "constant" that convey the backup strategy.
3071 3079 # All set to `discard` if `no-backup` is set do avoid checking
3072 3080 # no_backup lower in the code.
3073 3081 # These values are ordered for comparison purposes
3074 3082 backupinteractive = 3 # do backup if interactively modified
3075 3083 backup = 2 # unconditionally do backup
3076 3084 check = 1 # check if the existing file differs from target
3077 3085 discard = 0 # never do backup
3078 3086 if opts.get('no_backup'):
3079 3087 backupinteractive = backup = check = discard
3080 3088 if interactive:
3081 3089 dsmodifiedbackup = backupinteractive
3082 3090 else:
3083 3091 dsmodifiedbackup = backup
3084 3092 tobackup = set()
3085 3093
3086 3094 backupanddel = actions['remove']
3087 3095 if not opts.get('no_backup'):
3088 3096 backupanddel = actions['drop']
3089 3097
3090 3098 disptable = (
3091 3099 # dispatch table:
3092 3100 # file state
3093 3101 # action
3094 3102 # make backup
3095 3103
3096 3104 ## Sets that results that will change file on disk
3097 3105 # Modified compared to target, no local change
3098 3106 (modified, actions['revert'], discard),
3099 3107 # Modified compared to target, but local file is deleted
3100 3108 (deleted, actions['revert'], discard),
3101 3109 # Modified compared to target, local change
3102 3110 (dsmodified, actions['revert'], dsmodifiedbackup),
3103 3111 # Added since target
3104 3112 (added, actions['remove'], discard),
3105 3113 # Added in working directory
3106 3114 (dsadded, actions['forget'], discard),
3107 3115 # Added since target, have local modification
3108 3116 (modadded, backupanddel, backup),
3109 3117 # Added since target but file is missing in working directory
3110 3118 (deladded, actions['drop'], discard),
3111 3119 # Removed since target, before working copy parent
3112 3120 (removed, actions['add'], discard),
3113 3121 # Same as `removed` but an unknown file exists at the same path
3114 3122 (removunk, actions['add'], check),
3115 3123 # Removed since targe, marked as such in working copy parent
3116 3124 (dsremoved, actions['undelete'], discard),
3117 3125 # Same as `dsremoved` but an unknown file exists at the same path
3118 3126 (dsremovunk, actions['undelete'], check),
3119 3127 ## the following sets does not result in any file changes
3120 3128 # File with no modification
3121 3129 (clean, actions['noop'], discard),
3122 3130 # Existing file, not tracked anywhere
3123 3131 (unknown, actions['unknown'], discard),
3124 3132 )
3125 3133
3126 3134 for abs, (rel, exact) in sorted(names.items()):
3127 3135 # target file to be touch on disk (relative to cwd)
3128 3136 target = repo.wjoin(abs)
3129 3137 # search the entry in the dispatch table.
3130 3138 # if the file is in any of these sets, it was touched in the working
3131 3139 # directory parent and we are sure it needs to be reverted.
3132 3140 for table, (xlist, msg), dobackup in disptable:
3133 3141 if abs not in table:
3134 3142 continue
3135 3143 if xlist is not None:
3136 3144 xlist.append(abs)
3137 3145 if dobackup:
3138 3146 # If in interactive mode, don't automatically create
3139 3147 # .orig files (issue4793)
3140 3148 if dobackup == backupinteractive:
3141 3149 tobackup.add(abs)
3142 3150 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3143 3151 bakname = scmutil.origpath(ui, repo, rel)
3144 3152 ui.note(_('saving current version of %s as %s\n') %
3145 3153 (rel, bakname))
3146 3154 if not opts.get('dry_run'):
3147 3155 if interactive:
3148 3156 util.copyfile(target, bakname)
3149 3157 else:
3150 3158 util.rename(target, bakname)
3151 3159 if ui.verbose or not exact:
3152 3160 if not isinstance(msg, basestring):
3153 3161 msg = msg(abs)
3154 3162 ui.status(msg % rel)
3155 3163 elif exact:
3156 3164 ui.warn(msg % rel)
3157 3165 break
3158 3166
3159 3167 if not opts.get('dry_run'):
3160 3168 needdata = ('revert', 'add', 'undelete')
3161 3169 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3162 3170 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3163 3171
3164 3172 if targetsubs:
3165 3173 # Revert the subrepos on the revert list
3166 3174 for sub in targetsubs:
3167 3175 try:
3168 3176 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3169 3177 except KeyError:
3170 3178 raise error.Abort("subrepository '%s' does not exist in %s!"
3171 3179 % (sub, short(ctx.node())))
3172 3180
3173 3181 def _revertprefetch(repo, ctx, *files):
3174 3182 """Let extension changing the storage layer prefetch content"""
3175 3183 pass
3176 3184
3177 3185 def _performrevert(repo, parents, ctx, actions, interactive=False,
3178 3186 tobackup=None):
3179 3187 """function that actually perform all the actions computed for revert
3180 3188
3181 3189 This is an independent function to let extension to plug in and react to
3182 3190 the imminent revert.
3183 3191
3184 3192 Make sure you have the working directory locked when calling this function.
3185 3193 """
3186 3194 parent, p2 = parents
3187 3195 node = ctx.node()
3188 3196 excluded_files = []
3189 3197 matcher_opts = {"exclude": excluded_files}
3190 3198
3191 3199 def checkout(f):
3192 3200 fc = ctx[f]
3193 3201 repo.wwrite(f, fc.data(), fc.flags())
3194 3202
3195 3203 def doremove(f):
3196 3204 try:
3197 3205 util.unlinkpath(repo.wjoin(f))
3198 3206 except OSError:
3199 3207 pass
3200 3208 repo.dirstate.remove(f)
3201 3209
3202 3210 audit_path = pathutil.pathauditor(repo.root)
3203 3211 for f in actions['forget'][0]:
3204 3212 if interactive:
3205 3213 choice = repo.ui.promptchoice(
3206 3214 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3207 3215 if choice == 0:
3208 3216 repo.dirstate.drop(f)
3209 3217 else:
3210 3218 excluded_files.append(repo.wjoin(f))
3211 3219 else:
3212 3220 repo.dirstate.drop(f)
3213 3221 for f in actions['remove'][0]:
3214 3222 audit_path(f)
3215 3223 if interactive:
3216 3224 choice = repo.ui.promptchoice(
3217 3225 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3218 3226 if choice == 0:
3219 3227 doremove(f)
3220 3228 else:
3221 3229 excluded_files.append(repo.wjoin(f))
3222 3230 else:
3223 3231 doremove(f)
3224 3232 for f in actions['drop'][0]:
3225 3233 audit_path(f)
3226 3234 repo.dirstate.remove(f)
3227 3235
3228 3236 normal = None
3229 3237 if node == parent:
3230 3238 # We're reverting to our parent. If possible, we'd like status
3231 3239 # to report the file as clean. We have to use normallookup for
3232 3240 # merges to avoid losing information about merged/dirty files.
3233 3241 if p2 != nullid:
3234 3242 normal = repo.dirstate.normallookup
3235 3243 else:
3236 3244 normal = repo.dirstate.normal
3237 3245
3238 3246 newlyaddedandmodifiedfiles = set()
3239 3247 if interactive:
3240 3248 # Prompt the user for changes to revert
3241 3249 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3242 3250 m = scmutil.match(ctx, torevert, matcher_opts)
3243 3251 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3244 3252 diffopts.nodates = True
3245 3253 diffopts.git = True
3246 3254 reversehunks = repo.ui.configbool('experimental',
3247 3255 'revertalternateinteractivemode',
3248 3256 True)
3249 3257 if reversehunks:
3250 3258 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3251 3259 else:
3252 3260 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3253 3261 originalchunks = patch.parsepatch(diff)
3254 3262 operation = 'discard' if node == parent else 'revert'
3255 3263
3256 3264 try:
3257 3265
3258 3266 chunks, opts = recordfilter(repo.ui, originalchunks,
3259 3267 operation=operation)
3260 3268 if reversehunks:
3261 3269 chunks = patch.reversehunks(chunks)
3262 3270
3263 3271 except patch.PatchError as err:
3264 3272 raise error.Abort(_('error parsing patch: %s') % err)
3265 3273
3266 3274 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3267 3275 if tobackup is None:
3268 3276 tobackup = set()
3269 3277 # Apply changes
3270 3278 fp = stringio()
3271 3279 for c in chunks:
3272 3280 # Create a backup file only if this hunk should be backed up
3273 3281 if ishunk(c) and c.header.filename() in tobackup:
3274 3282 abs = c.header.filename()
3275 3283 target = repo.wjoin(abs)
3276 3284 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3277 3285 util.copyfile(target, bakname)
3278 3286 tobackup.remove(abs)
3279 3287 c.write(fp)
3280 3288 dopatch = fp.tell()
3281 3289 fp.seek(0)
3282 3290 if dopatch:
3283 3291 try:
3284 3292 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3285 3293 except patch.PatchError as err:
3286 3294 raise error.Abort(str(err))
3287 3295 del fp
3288 3296 else:
3289 3297 for f in actions['revert'][0]:
3290 3298 checkout(f)
3291 3299 if normal:
3292 3300 normal(f)
3293 3301
3294 3302 for f in actions['add'][0]:
3295 3303 # Don't checkout modified files, they are already created by the diff
3296 3304 if f not in newlyaddedandmodifiedfiles:
3297 3305 checkout(f)
3298 3306 repo.dirstate.add(f)
3299 3307
3300 3308 normal = repo.dirstate.normallookup
3301 3309 if node == parent and p2 == nullid:
3302 3310 normal = repo.dirstate.normal
3303 3311 for f in actions['undelete'][0]:
3304 3312 checkout(f)
3305 3313 normal(f)
3306 3314
3307 3315 copied = copies.pathcopies(repo[parent], ctx)
3308 3316
3309 3317 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3310 3318 if f in copied:
3311 3319 repo.dirstate.copy(copied[f], f)
3312 3320
3313 3321 def command(table):
3314 3322 """Returns a function object to be used as a decorator for making commands.
3315 3323
3316 3324 This function receives a command table as its argument. The table should
3317 3325 be a dict.
3318 3326
3319 3327 The returned function can be used as a decorator for adding commands
3320 3328 to that command table. This function accepts multiple arguments to define
3321 3329 a command.
3322 3330
3323 3331 The first argument is the command name.
3324 3332
3325 3333 The options argument is an iterable of tuples defining command arguments.
3326 3334 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3327 3335
3328 3336 The synopsis argument defines a short, one line summary of how to use the
3329 3337 command. This shows up in the help output.
3330 3338
3331 3339 The norepo argument defines whether the command does not require a
3332 3340 local repository. Most commands operate against a repository, thus the
3333 3341 default is False.
3334 3342
3335 3343 The optionalrepo argument defines whether the command optionally requires
3336 3344 a local repository.
3337 3345
3338 3346 The inferrepo argument defines whether to try to find a repository from the
3339 3347 command line arguments. If True, arguments will be examined for potential
3340 3348 repository locations. See ``findrepo()``. If a repository is found, it
3341 3349 will be used.
3342 3350 """
3343 3351 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3344 3352 inferrepo=False):
3345 3353 def decorator(func):
3346 3354 func.norepo = norepo
3347 3355 func.optionalrepo = optionalrepo
3348 3356 func.inferrepo = inferrepo
3349 3357 if synopsis:
3350 3358 table[name] = func, list(options), synopsis
3351 3359 else:
3352 3360 table[name] = func, list(options)
3353 3361 return func
3354 3362 return decorator
3355 3363
3356 3364 return cmd
3357 3365
3358 3366 def checkunresolved(ms):
3359 3367 ms._repo.ui.deprecwarn('checkunresolved moved from cmdutil to mergeutil',
3360 3368 '4.1')
3361 3369 return mergeutil.checkunresolved(ms)
3362 3370
3363 3371 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3364 3372 # commands.outgoing. "missing" is "missing" of the result of
3365 3373 # "findcommonoutgoing()"
3366 3374 outgoinghooks = util.hooks()
3367 3375
3368 3376 # a list of (ui, repo) functions called by commands.summary
3369 3377 summaryhooks = util.hooks()
3370 3378
3371 3379 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3372 3380 #
3373 3381 # functions should return tuple of booleans below, if 'changes' is None:
3374 3382 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3375 3383 #
3376 3384 # otherwise, 'changes' is a tuple of tuples below:
3377 3385 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3378 3386 # - (desturl, destbranch, destpeer, outgoing)
3379 3387 summaryremotehooks = util.hooks()
3380 3388
3381 3389 # A list of state files kept by multistep operations like graft.
3382 3390 # Since graft cannot be aborted, it is considered 'clearable' by update.
3383 3391 # note: bisect is intentionally excluded
3384 3392 # (state file, clearable, allowcommit, error, hint)
3385 3393 unfinishedstates = [
3386 3394 ('graftstate', True, False, _('graft in progress'),
3387 3395 _("use 'hg graft --continue' or 'hg update' to abort")),
3388 3396 ('updatestate', True, False, _('last update was interrupted'),
3389 3397 _("use 'hg update' to get a consistent checkout"))
3390 3398 ]
3391 3399
3392 3400 def checkunfinished(repo, commit=False):
3393 3401 '''Look for an unfinished multistep operation, like graft, and abort
3394 3402 if found. It's probably good to check this right before
3395 3403 bailifchanged().
3396 3404 '''
3397 3405 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3398 3406 if commit and allowcommit:
3399 3407 continue
3400 3408 if repo.vfs.exists(f):
3401 3409 raise error.Abort(msg, hint=hint)
3402 3410
3403 3411 def clearunfinished(repo):
3404 3412 '''Check for unfinished operations (as above), and clear the ones
3405 3413 that are clearable.
3406 3414 '''
3407 3415 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3408 3416 if not clearable and repo.vfs.exists(f):
3409 3417 raise error.Abort(msg, hint=hint)
3410 3418 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3411 3419 if clearable and repo.vfs.exists(f):
3412 3420 util.unlink(repo.join(f))
3413 3421
3414 3422 afterresolvedstates = [
3415 3423 ('graftstate',
3416 3424 _('hg graft --continue')),
3417 3425 ]
3418 3426
3419 3427 def howtocontinue(repo):
3420 3428 '''Check for an unfinished operation and return the command to finish
3421 3429 it.
3422 3430
3423 3431 afterresolvedstates tuples define a .hg/{file} and the corresponding
3424 3432 command needed to finish it.
3425 3433
3426 3434 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3427 3435 a boolean.
3428 3436 '''
3429 3437 contmsg = _("continue: %s")
3430 3438 for f, msg in afterresolvedstates:
3431 3439 if repo.vfs.exists(f):
3432 3440 return contmsg % msg, True
3433 3441 workingctx = repo[None]
3434 3442 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3435 3443 for s in workingctx.substate)
3436 3444 if dirty:
3437 3445 return contmsg % _("hg commit"), False
3438 3446 return None, None
3439 3447
3440 3448 def checkafterresolved(repo):
3441 3449 '''Inform the user about the next action after completing hg resolve
3442 3450
3443 3451 If there's a matching afterresolvedstates, howtocontinue will yield
3444 3452 repo.ui.warn as the reporter.
3445 3453
3446 3454 Otherwise, it will yield repo.ui.note.
3447 3455 '''
3448 3456 msg, warning = howtocontinue(repo)
3449 3457 if msg is not None:
3450 3458 if warning:
3451 3459 repo.ui.warn("%s\n" % msg)
3452 3460 else:
3453 3461 repo.ui.note("%s\n" % msg)
3454 3462
3455 3463 def wrongtooltocontinue(repo, task):
3456 3464 '''Raise an abort suggesting how to properly continue if there is an
3457 3465 active task.
3458 3466
3459 3467 Uses howtocontinue() to find the active task.
3460 3468
3461 3469 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3462 3470 a hint.
3463 3471 '''
3464 3472 after = howtocontinue(repo)
3465 3473 hint = None
3466 3474 if after[1]:
3467 3475 hint = after[0]
3468 3476 raise error.Abort(_('no %s in progress') % task, hint=hint)
3469 3477
3470 3478 class dirstateguard(dirstateguardmod.dirstateguard):
3471 3479 def __init__(self, repo, name):
3472 3480 dirstateguardmod.dirstateguard.__init__(self, repo, name)
3473 3481 repo.ui.deprecwarn(
3474 3482 'dirstateguard has moved from cmdutil to dirstateguard',
3475 3483 '4.1')
@@ -1,1960 +1,1960 b''
1 1 # subrepo.py - sub-repository handling for Mercurial
2 2 #
3 3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import copy
11 11 import errno
12 12 import hashlib
13 13 import os
14 14 import posixpath
15 15 import re
16 16 import stat
17 17 import subprocess
18 18 import sys
19 19 import tarfile
20 20 import xml.dom.minidom
21 21
22 22
23 23 from .i18n import _
24 24 from . import (
25 25 cmdutil,
26 26 config,
27 27 encoding,
28 28 error,
29 29 exchange,
30 30 filemerge,
31 31 match as matchmod,
32 32 node,
33 33 pathutil,
34 34 phases,
35 35 pycompat,
36 36 scmutil,
37 37 util,
38 38 )
39 39
40 40 hg = None
41 41 propertycache = util.propertycache
42 42
43 43 nullstate = ('', '', 'empty')
44 44
45 45 def _expandedabspath(path):
46 46 '''
47 47 get a path or url and if it is a path expand it and return an absolute path
48 48 '''
49 49 expandedpath = util.urllocalpath(util.expandpath(path))
50 50 u = util.url(expandedpath)
51 51 if not u.scheme:
52 52 path = util.normpath(os.path.abspath(u.path))
53 53 return path
54 54
55 55 def _getstorehashcachename(remotepath):
56 56 '''get a unique filename for the store hash cache of a remote repository'''
57 57 return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
58 58
59 59 class SubrepoAbort(error.Abort):
60 60 """Exception class used to avoid handling a subrepo error more than once"""
61 61 def __init__(self, *args, **kw):
62 62 self.subrepo = kw.pop('subrepo', None)
63 63 self.cause = kw.pop('cause', None)
64 64 error.Abort.__init__(self, *args, **kw)
65 65
66 66 def annotatesubrepoerror(func):
67 67 def decoratedmethod(self, *args, **kargs):
68 68 try:
69 69 res = func(self, *args, **kargs)
70 70 except SubrepoAbort as ex:
71 71 # This exception has already been handled
72 72 raise ex
73 73 except error.Abort as ex:
74 74 subrepo = subrelpath(self)
75 75 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
76 76 # avoid handling this exception by raising a SubrepoAbort exception
77 77 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
78 78 cause=sys.exc_info())
79 79 return res
80 80 return decoratedmethod
81 81
82 82 def state(ctx, ui):
83 83 """return a state dict, mapping subrepo paths configured in .hgsub
84 84 to tuple: (source from .hgsub, revision from .hgsubstate, kind
85 85 (key in types dict))
86 86 """
87 87 p = config.config()
88 88 repo = ctx.repo()
89 89 def read(f, sections=None, remap=None):
90 90 if f in ctx:
91 91 try:
92 92 data = ctx[f].data()
93 93 except IOError as err:
94 94 if err.errno != errno.ENOENT:
95 95 raise
96 96 # handle missing subrepo spec files as removed
97 97 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
98 98 repo.pathto(f))
99 99 return
100 100 p.parse(f, data, sections, remap, read)
101 101 else:
102 102 raise error.Abort(_("subrepo spec file \'%s\' not found") %
103 103 repo.pathto(f))
104 104 if '.hgsub' in ctx:
105 105 read('.hgsub')
106 106
107 107 for path, src in ui.configitems('subpaths'):
108 108 p.set('subpaths', path, src, ui.configsource('subpaths', path))
109 109
110 110 rev = {}
111 111 if '.hgsubstate' in ctx:
112 112 try:
113 113 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
114 114 l = l.lstrip()
115 115 if not l:
116 116 continue
117 117 try:
118 118 revision, path = l.split(" ", 1)
119 119 except ValueError:
120 120 raise error.Abort(_("invalid subrepository revision "
121 121 "specifier in \'%s\' line %d")
122 122 % (repo.pathto('.hgsubstate'), (i + 1)))
123 123 rev[path] = revision
124 124 except IOError as err:
125 125 if err.errno != errno.ENOENT:
126 126 raise
127 127
128 128 def remap(src):
129 129 for pattern, repl in p.items('subpaths'):
130 130 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
131 131 # does a string decode.
132 132 repl = repl.encode('string-escape')
133 133 # However, we still want to allow back references to go
134 134 # through unharmed, so we turn r'\\1' into r'\1'. Again,
135 135 # extra escapes are needed because re.sub string decodes.
136 136 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
137 137 try:
138 138 src = re.sub(pattern, repl, src, 1)
139 139 except re.error as e:
140 140 raise error.Abort(_("bad subrepository pattern in %s: %s")
141 141 % (p.source('subpaths', pattern), e))
142 142 return src
143 143
144 144 state = {}
145 145 for path, src in p[''].items():
146 146 kind = 'hg'
147 147 if src.startswith('['):
148 148 if ']' not in src:
149 149 raise error.Abort(_('missing ] in subrepo source'))
150 150 kind, src = src.split(']', 1)
151 151 kind = kind[1:]
152 152 src = src.lstrip() # strip any extra whitespace after ']'
153 153
154 154 if not util.url(src).isabs():
155 155 parent = _abssource(repo, abort=False)
156 156 if parent:
157 157 parent = util.url(parent)
158 158 parent.path = posixpath.join(parent.path or '', src)
159 159 parent.path = posixpath.normpath(parent.path)
160 160 joined = str(parent)
161 161 # Remap the full joined path and use it if it changes,
162 162 # else remap the original source.
163 163 remapped = remap(joined)
164 164 if remapped == joined:
165 165 src = remap(src)
166 166 else:
167 167 src = remapped
168 168
169 169 src = remap(src)
170 170 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
171 171
172 172 return state
173 173
174 174 def writestate(repo, state):
175 175 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
176 176 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
177 177 if state[s][1] != nullstate[1]]
178 178 repo.wwrite('.hgsubstate', ''.join(lines), '')
179 179
180 180 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
181 181 """delegated from merge.applyupdates: merging of .hgsubstate file
182 182 in working context, merging context and ancestor context"""
183 183 if mctx == actx: # backwards?
184 184 actx = wctx.p1()
185 185 s1 = wctx.substate
186 186 s2 = mctx.substate
187 187 sa = actx.substate
188 188 sm = {}
189 189
190 190 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
191 191
192 192 def debug(s, msg, r=""):
193 193 if r:
194 194 r = "%s:%s:%s" % r
195 195 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
196 196
197 197 for s, l in sorted(s1.iteritems()):
198 198 a = sa.get(s, nullstate)
199 199 ld = l # local state with possible dirty flag for compares
200 200 if wctx.sub(s).dirty():
201 201 ld = (l[0], l[1] + "+")
202 202 if wctx == actx: # overwrite
203 203 a = ld
204 204
205 205 if s in s2:
206 206 prompts = filemerge.partextras(labels)
207 207 prompts['s'] = s
208 208 r = s2[s]
209 209 if ld == r or r == a: # no change or local is newer
210 210 sm[s] = l
211 211 continue
212 212 elif ld == a: # other side changed
213 213 debug(s, "other changed, get", r)
214 214 wctx.sub(s).get(r, overwrite)
215 215 sm[s] = r
216 216 elif ld[0] != r[0]: # sources differ
217 217 prompts['lo'] = l[0]
218 218 prompts['ro'] = r[0]
219 219 if repo.ui.promptchoice(
220 220 _(' subrepository sources for %(s)s differ\n'
221 221 'use (l)ocal%(l)s source (%(lo)s)'
222 222 ' or (r)emote%(o)s source (%(ro)s)?'
223 223 '$$ &Local $$ &Remote') % prompts, 0):
224 224 debug(s, "prompt changed, get", r)
225 225 wctx.sub(s).get(r, overwrite)
226 226 sm[s] = r
227 227 elif ld[1] == a[1]: # local side is unchanged
228 228 debug(s, "other side changed, get", r)
229 229 wctx.sub(s).get(r, overwrite)
230 230 sm[s] = r
231 231 else:
232 232 debug(s, "both sides changed")
233 233 srepo = wctx.sub(s)
234 234 prompts['sl'] = srepo.shortid(l[1])
235 235 prompts['sr'] = srepo.shortid(r[1])
236 236 option = repo.ui.promptchoice(
237 237 _(' subrepository %(s)s diverged (local revision: %(sl)s, '
238 238 'remote revision: %(sr)s)\n'
239 239 '(M)erge, keep (l)ocal%(l)s or keep (r)emote%(o)s?'
240 240 '$$ &Merge $$ &Local $$ &Remote')
241 241 % prompts, 0)
242 242 if option == 0:
243 243 wctx.sub(s).merge(r)
244 244 sm[s] = l
245 245 debug(s, "merge with", r)
246 246 elif option == 1:
247 247 sm[s] = l
248 248 debug(s, "keep local subrepo revision", l)
249 249 else:
250 250 wctx.sub(s).get(r, overwrite)
251 251 sm[s] = r
252 252 debug(s, "get remote subrepo revision", r)
253 253 elif ld == a: # remote removed, local unchanged
254 254 debug(s, "remote removed, remove")
255 255 wctx.sub(s).remove()
256 256 elif a == nullstate: # not present in remote or ancestor
257 257 debug(s, "local added, keep")
258 258 sm[s] = l
259 259 continue
260 260 else:
261 261 if repo.ui.promptchoice(
262 262 _(' local%(l)s changed subrepository %(s)s'
263 263 ' which remote%(o)s removed\n'
264 264 'use (c)hanged version or (d)elete?'
265 265 '$$ &Changed $$ &Delete') % prompts, 0):
266 266 debug(s, "prompt remove")
267 267 wctx.sub(s).remove()
268 268
269 269 for s, r in sorted(s2.items()):
270 270 if s in s1:
271 271 continue
272 272 elif s not in sa:
273 273 debug(s, "remote added, get", r)
274 274 mctx.sub(s).get(r)
275 275 sm[s] = r
276 276 elif r != sa[s]:
277 277 if repo.ui.promptchoice(
278 278 _(' remote%(o)s changed subrepository %(s)s'
279 279 ' which local%(l)s removed\n'
280 280 'use (c)hanged version or (d)elete?'
281 281 '$$ &Changed $$ &Delete') % prompts, 0) == 0:
282 282 debug(s, "prompt recreate", r)
283 283 mctx.sub(s).get(r)
284 284 sm[s] = r
285 285
286 286 # record merged .hgsubstate
287 287 writestate(repo, sm)
288 288 return sm
289 289
290 290 def _updateprompt(ui, sub, dirty, local, remote):
291 291 if dirty:
292 292 msg = (_(' subrepository sources for %s differ\n'
293 293 'use (l)ocal source (%s) or (r)emote source (%s)?'
294 294 '$$ &Local $$ &Remote')
295 295 % (subrelpath(sub), local, remote))
296 296 else:
297 297 msg = (_(' subrepository sources for %s differ (in checked out '
298 298 'version)\n'
299 299 'use (l)ocal source (%s) or (r)emote source (%s)?'
300 300 '$$ &Local $$ &Remote')
301 301 % (subrelpath(sub), local, remote))
302 302 return ui.promptchoice(msg, 0)
303 303
304 304 def reporelpath(repo):
305 305 """return path to this (sub)repo as seen from outermost repo"""
306 306 parent = repo
307 307 while util.safehasattr(parent, '_subparent'):
308 308 parent = parent._subparent
309 309 return repo.root[len(pathutil.normasprefix(parent.root)):]
310 310
311 311 def subrelpath(sub):
312 312 """return path to this subrepo as seen from outermost repo"""
313 313 return sub._relpath
314 314
315 315 def _abssource(repo, push=False, abort=True):
316 316 """return pull/push path of repo - either based on parent repo .hgsub info
317 317 or on the top repo config. Abort or return None if no source found."""
318 318 if util.safehasattr(repo, '_subparent'):
319 319 source = util.url(repo._subsource)
320 320 if source.isabs():
321 321 return str(source)
322 322 source.path = posixpath.normpath(source.path)
323 323 parent = _abssource(repo._subparent, push, abort=False)
324 324 if parent:
325 325 parent = util.url(util.pconvert(parent))
326 326 parent.path = posixpath.join(parent.path or '', source.path)
327 327 parent.path = posixpath.normpath(parent.path)
328 328 return str(parent)
329 329 else: # recursion reached top repo
330 330 if util.safehasattr(repo, '_subtoppath'):
331 331 return repo._subtoppath
332 332 if push and repo.ui.config('paths', 'default-push'):
333 333 return repo.ui.config('paths', 'default-push')
334 334 if repo.ui.config('paths', 'default'):
335 335 return repo.ui.config('paths', 'default')
336 336 if repo.shared():
337 337 # chop off the .hg component to get the default path form
338 338 return os.path.dirname(repo.sharedpath)
339 339 if abort:
340 340 raise error.Abort(_("default path for subrepository not found"))
341 341
342 342 def _sanitize(ui, vfs, ignore):
343 343 for dirname, dirs, names in vfs.walk():
344 344 for i, d in enumerate(dirs):
345 345 if d.lower() == ignore:
346 346 del dirs[i]
347 347 break
348 348 if vfs.basename(dirname).lower() != '.hg':
349 349 continue
350 350 for f in names:
351 351 if f.lower() == 'hgrc':
352 352 ui.warn(_("warning: removing potentially hostile 'hgrc' "
353 353 "in '%s'\n") % vfs.join(dirname))
354 354 vfs.unlink(vfs.reljoin(dirname, f))
355 355
356 356 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
357 357 """return instance of the right subrepo class for subrepo in path"""
358 358 # subrepo inherently violates our import layering rules
359 359 # because it wants to make repo objects from deep inside the stack
360 360 # so we manually delay the circular imports to not break
361 361 # scripts that don't use our demand-loading
362 362 global hg
363 363 from . import hg as h
364 364 hg = h
365 365
366 366 pathutil.pathauditor(ctx.repo().root)(path)
367 367 state = ctx.substate[path]
368 368 if state[2] not in types:
369 369 raise error.Abort(_('unknown subrepo type %s') % state[2])
370 370 if allowwdir:
371 371 state = (state[0], ctx.subrev(path), state[2])
372 372 return types[state[2]](ctx, path, state[:2], allowcreate)
373 373
374 374 def nullsubrepo(ctx, path, pctx):
375 375 """return an empty subrepo in pctx for the extant subrepo in ctx"""
376 376 # subrepo inherently violates our import layering rules
377 377 # because it wants to make repo objects from deep inside the stack
378 378 # so we manually delay the circular imports to not break
379 379 # scripts that don't use our demand-loading
380 380 global hg
381 381 from . import hg as h
382 382 hg = h
383 383
384 384 pathutil.pathauditor(ctx.repo().root)(path)
385 385 state = ctx.substate[path]
386 386 if state[2] not in types:
387 387 raise error.Abort(_('unknown subrepo type %s') % state[2])
388 388 subrev = ''
389 389 if state[2] == 'hg':
390 390 subrev = "0" * 40
391 391 return types[state[2]](pctx, path, (state[0], subrev), True)
392 392
393 393 def newcommitphase(ui, ctx):
394 394 commitphase = phases.newcommitphase(ui)
395 395 substate = getattr(ctx, "substate", None)
396 396 if not substate:
397 397 return commitphase
398 398 check = ui.config('phases', 'checksubrepos', 'follow')
399 399 if check not in ('ignore', 'follow', 'abort'):
400 400 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
401 401 % (check))
402 402 if check == 'ignore':
403 403 return commitphase
404 404 maxphase = phases.public
405 405 maxsub = None
406 406 for s in sorted(substate):
407 407 sub = ctx.sub(s)
408 408 subphase = sub.phase(substate[s][1])
409 409 if maxphase < subphase:
410 410 maxphase = subphase
411 411 maxsub = s
412 412 if commitphase < maxphase:
413 413 if check == 'abort':
414 414 raise error.Abort(_("can't commit in %s phase"
415 415 " conflicting %s from subrepository %s") %
416 416 (phases.phasenames[commitphase],
417 417 phases.phasenames[maxphase], maxsub))
418 418 ui.warn(_("warning: changes are committed in"
419 419 " %s phase from subrepository %s\n") %
420 420 (phases.phasenames[maxphase], maxsub))
421 421 return maxphase
422 422 return commitphase
423 423
424 424 # subrepo classes need to implement the following abstract class:
425 425
426 426 class abstractsubrepo(object):
427 427
428 428 def __init__(self, ctx, path):
429 429 """Initialize abstractsubrepo part
430 430
431 431 ``ctx`` is the context referring this subrepository in the
432 432 parent repository.
433 433
434 434 ``path`` is the path to this subrepository as seen from
435 435 innermost repository.
436 436 """
437 437 self.ui = ctx.repo().ui
438 438 self._ctx = ctx
439 439 self._path = path
440 440
441 441 def storeclean(self, path):
442 442 """
443 443 returns true if the repository has not changed since it was last
444 444 cloned from or pushed to a given repository.
445 445 """
446 446 return False
447 447
448 448 def dirty(self, ignoreupdate=False):
449 449 """returns true if the dirstate of the subrepo is dirty or does not
450 450 match current stored state. If ignoreupdate is true, only check
451 451 whether the subrepo has uncommitted changes in its dirstate.
452 452 """
453 453 raise NotImplementedError
454 454
455 455 def dirtyreason(self, ignoreupdate=False):
456 456 """return reason string if it is ``dirty()``
457 457
458 458 Returned string should have enough information for the message
459 459 of exception.
460 460
461 461 This returns None, otherwise.
462 462 """
463 463 if self.dirty(ignoreupdate=ignoreupdate):
464 464 return _("uncommitted changes in subrepository '%s'"
465 465 ) % subrelpath(self)
466 466
467 def bailifchanged(self, ignoreupdate=False):
467 def bailifchanged(self, ignoreupdate=False, hint=None):
468 468 """raise Abort if subrepository is ``dirty()``
469 469 """
470 470 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate)
471 471 if dirtyreason:
472 raise error.Abort(dirtyreason)
472 raise error.Abort(dirtyreason, hint=hint)
473 473
474 474 def basestate(self):
475 475 """current working directory base state, disregarding .hgsubstate
476 476 state and working directory modifications"""
477 477 raise NotImplementedError
478 478
479 479 def checknested(self, path):
480 480 """check if path is a subrepository within this repository"""
481 481 return False
482 482
483 483 def commit(self, text, user, date):
484 484 """commit the current changes to the subrepo with the given
485 485 log message. Use given user and date if possible. Return the
486 486 new state of the subrepo.
487 487 """
488 488 raise NotImplementedError
489 489
490 490 def phase(self, state):
491 491 """returns phase of specified state in the subrepository.
492 492 """
493 493 return phases.public
494 494
495 495 def remove(self):
496 496 """remove the subrepo
497 497
498 498 (should verify the dirstate is not dirty first)
499 499 """
500 500 raise NotImplementedError
501 501
502 502 def get(self, state, overwrite=False):
503 503 """run whatever commands are needed to put the subrepo into
504 504 this state
505 505 """
506 506 raise NotImplementedError
507 507
508 508 def merge(self, state):
509 509 """merge currently-saved state with the new state."""
510 510 raise NotImplementedError
511 511
512 512 def push(self, opts):
513 513 """perform whatever action is analogous to 'hg push'
514 514
515 515 This may be a no-op on some systems.
516 516 """
517 517 raise NotImplementedError
518 518
519 519 def add(self, ui, match, prefix, explicitonly, **opts):
520 520 return []
521 521
522 522 def addremove(self, matcher, prefix, opts, dry_run, similarity):
523 523 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
524 524 return 1
525 525
526 526 def cat(self, match, prefix, **opts):
527 527 return 1
528 528
529 529 def status(self, rev2, **opts):
530 530 return scmutil.status([], [], [], [], [], [], [])
531 531
532 532 def diff(self, ui, diffopts, node2, match, prefix, **opts):
533 533 pass
534 534
535 535 def outgoing(self, ui, dest, opts):
536 536 return 1
537 537
538 538 def incoming(self, ui, source, opts):
539 539 return 1
540 540
541 541 def files(self):
542 542 """return filename iterator"""
543 543 raise NotImplementedError
544 544
545 545 def filedata(self, name):
546 546 """return file data"""
547 547 raise NotImplementedError
548 548
549 549 def fileflags(self, name):
550 550 """return file flags"""
551 551 return ''
552 552
553 553 def getfileset(self, expr):
554 554 """Resolve the fileset expression for this repo"""
555 555 return set()
556 556
557 557 def printfiles(self, ui, m, fm, fmt, subrepos):
558 558 """handle the files command for this subrepo"""
559 559 return 1
560 560
561 561 def archive(self, archiver, prefix, match=None):
562 562 if match is not None:
563 563 files = [f for f in self.files() if match(f)]
564 564 else:
565 565 files = self.files()
566 566 total = len(files)
567 567 relpath = subrelpath(self)
568 568 self.ui.progress(_('archiving (%s)') % relpath, 0,
569 569 unit=_('files'), total=total)
570 570 for i, name in enumerate(files):
571 571 flags = self.fileflags(name)
572 572 mode = 'x' in flags and 0o755 or 0o644
573 573 symlink = 'l' in flags
574 574 archiver.addfile(prefix + self._path + '/' + name,
575 575 mode, symlink, self.filedata(name))
576 576 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
577 577 unit=_('files'), total=total)
578 578 self.ui.progress(_('archiving (%s)') % relpath, None)
579 579 return total
580 580
581 581 def walk(self, match):
582 582 '''
583 583 walk recursively through the directory tree, finding all files
584 584 matched by the match function
585 585 '''
586 586 pass
587 587
588 588 def forget(self, match, prefix):
589 589 return ([], [])
590 590
591 591 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
592 592 """remove the matched files from the subrepository and the filesystem,
593 593 possibly by force and/or after the file has been removed from the
594 594 filesystem. Return 0 on success, 1 on any warning.
595 595 """
596 596 warnings.append(_("warning: removefiles not implemented (%s)")
597 597 % self._path)
598 598 return 1
599 599
600 600 def revert(self, substate, *pats, **opts):
601 601 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
602 602 % (substate[0], substate[2]))
603 603 return []
604 604
605 605 def shortid(self, revid):
606 606 return revid
607 607
608 608 def verify(self):
609 609 '''verify the integrity of the repository. Return 0 on success or
610 610 warning, 1 on any error.
611 611 '''
612 612 return 0
613 613
614 614 @propertycache
615 615 def wvfs(self):
616 616 """return vfs to access the working directory of this subrepository
617 617 """
618 618 return scmutil.vfs(self._ctx.repo().wvfs.join(self._path))
619 619
620 620 @propertycache
621 621 def _relpath(self):
622 622 """return path to this subrepository as seen from outermost repository
623 623 """
624 624 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
625 625
626 626 class hgsubrepo(abstractsubrepo):
627 627 def __init__(self, ctx, path, state, allowcreate):
628 628 super(hgsubrepo, self).__init__(ctx, path)
629 629 self._state = state
630 630 r = ctx.repo()
631 631 root = r.wjoin(path)
632 632 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
633 633 self._repo = hg.repository(r.baseui, root, create=create)
634 634
635 635 # Propagate the parent's --hidden option
636 636 if r is r.unfiltered():
637 637 self._repo = self._repo.unfiltered()
638 638
639 639 self.ui = self._repo.ui
640 640 for s, k in [('ui', 'commitsubrepos')]:
641 641 v = r.ui.config(s, k)
642 642 if v:
643 643 self.ui.setconfig(s, k, v, 'subrepo')
644 644 # internal config: ui._usedassubrepo
645 645 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
646 646 self._initrepo(r, state[0], create)
647 647
648 648 def storeclean(self, path):
649 649 with self._repo.lock():
650 650 return self._storeclean(path)
651 651
652 652 def _storeclean(self, path):
653 653 clean = True
654 654 itercache = self._calcstorehash(path)
655 655 for filehash in self._readstorehashcache(path):
656 656 if filehash != next(itercache, None):
657 657 clean = False
658 658 break
659 659 if clean:
660 660 # if not empty:
661 661 # the cached and current pull states have a different size
662 662 clean = next(itercache, None) is None
663 663 return clean
664 664
665 665 def _calcstorehash(self, remotepath):
666 666 '''calculate a unique "store hash"
667 667
668 668 This method is used to to detect when there are changes that may
669 669 require a push to a given remote path.'''
670 670 # sort the files that will be hashed in increasing (likely) file size
671 671 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
672 672 yield '# %s\n' % _expandedabspath(remotepath)
673 673 vfs = self._repo.vfs
674 674 for relname in filelist:
675 675 filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
676 676 yield '%s = %s\n' % (relname, filehash)
677 677
678 678 @propertycache
679 679 def _cachestorehashvfs(self):
680 680 return scmutil.vfs(self._repo.join('cache/storehash'))
681 681
682 682 def _readstorehashcache(self, remotepath):
683 683 '''read the store hash cache for a given remote repository'''
684 684 cachefile = _getstorehashcachename(remotepath)
685 685 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
686 686
687 687 def _cachestorehash(self, remotepath):
688 688 '''cache the current store hash
689 689
690 690 Each remote repo requires its own store hash cache, because a subrepo
691 691 store may be "clean" versus a given remote repo, but not versus another
692 692 '''
693 693 cachefile = _getstorehashcachename(remotepath)
694 694 with self._repo.lock():
695 695 storehash = list(self._calcstorehash(remotepath))
696 696 vfs = self._cachestorehashvfs
697 697 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
698 698
699 699 def _getctx(self):
700 700 '''fetch the context for this subrepo revision, possibly a workingctx
701 701 '''
702 702 if self._ctx.rev() is None:
703 703 return self._repo[None] # workingctx if parent is workingctx
704 704 else:
705 705 rev = self._state[1]
706 706 return self._repo[rev]
707 707
708 708 @annotatesubrepoerror
709 709 def _initrepo(self, parentrepo, source, create):
710 710 self._repo._subparent = parentrepo
711 711 self._repo._subsource = source
712 712
713 713 if create:
714 714 lines = ['[paths]\n']
715 715
716 716 def addpathconfig(key, value):
717 717 if value:
718 718 lines.append('%s = %s\n' % (key, value))
719 719 self.ui.setconfig('paths', key, value, 'subrepo')
720 720
721 721 defpath = _abssource(self._repo, abort=False)
722 722 defpushpath = _abssource(self._repo, True, abort=False)
723 723 addpathconfig('default', defpath)
724 724 if defpath != defpushpath:
725 725 addpathconfig('default-push', defpushpath)
726 726
727 727 fp = self._repo.vfs("hgrc", "w", text=True)
728 728 try:
729 729 fp.write(''.join(lines))
730 730 finally:
731 731 fp.close()
732 732
733 733 @annotatesubrepoerror
734 734 def add(self, ui, match, prefix, explicitonly, **opts):
735 735 return cmdutil.add(ui, self._repo, match,
736 736 self.wvfs.reljoin(prefix, self._path),
737 737 explicitonly, **opts)
738 738
739 739 @annotatesubrepoerror
740 740 def addremove(self, m, prefix, opts, dry_run, similarity):
741 741 # In the same way as sub directories are processed, once in a subrepo,
742 742 # always entry any of its subrepos. Don't corrupt the options that will
743 743 # be used to process sibling subrepos however.
744 744 opts = copy.copy(opts)
745 745 opts['subrepos'] = True
746 746 return scmutil.addremove(self._repo, m,
747 747 self.wvfs.reljoin(prefix, self._path), opts,
748 748 dry_run, similarity)
749 749
750 750 @annotatesubrepoerror
751 751 def cat(self, match, prefix, **opts):
752 752 rev = self._state[1]
753 753 ctx = self._repo[rev]
754 754 return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
755 755
756 756 @annotatesubrepoerror
757 757 def status(self, rev2, **opts):
758 758 try:
759 759 rev1 = self._state[1]
760 760 ctx1 = self._repo[rev1]
761 761 ctx2 = self._repo[rev2]
762 762 return self._repo.status(ctx1, ctx2, **opts)
763 763 except error.RepoLookupError as inst:
764 764 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
765 765 % (inst, subrelpath(self)))
766 766 return scmutil.status([], [], [], [], [], [], [])
767 767
768 768 @annotatesubrepoerror
769 769 def diff(self, ui, diffopts, node2, match, prefix, **opts):
770 770 try:
771 771 node1 = node.bin(self._state[1])
772 772 # We currently expect node2 to come from substate and be
773 773 # in hex format
774 774 if node2 is not None:
775 775 node2 = node.bin(node2)
776 776 cmdutil.diffordiffstat(ui, self._repo, diffopts,
777 777 node1, node2, match,
778 778 prefix=posixpath.join(prefix, self._path),
779 779 listsubrepos=True, **opts)
780 780 except error.RepoLookupError as inst:
781 781 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
782 782 % (inst, subrelpath(self)))
783 783
784 784 @annotatesubrepoerror
785 785 def archive(self, archiver, prefix, match=None):
786 786 self._get(self._state + ('hg',))
787 787 total = abstractsubrepo.archive(self, archiver, prefix, match)
788 788 rev = self._state[1]
789 789 ctx = self._repo[rev]
790 790 for subpath in ctx.substate:
791 791 s = subrepo(ctx, subpath, True)
792 792 submatch = matchmod.subdirmatcher(subpath, match)
793 793 total += s.archive(archiver, prefix + self._path + '/', submatch)
794 794 return total
795 795
796 796 @annotatesubrepoerror
797 797 def dirty(self, ignoreupdate=False):
798 798 r = self._state[1]
799 799 if r == '' and not ignoreupdate: # no state recorded
800 800 return True
801 801 w = self._repo[None]
802 802 if r != w.p1().hex() and not ignoreupdate:
803 803 # different version checked out
804 804 return True
805 805 return w.dirty() # working directory changed
806 806
807 807 def basestate(self):
808 808 return self._repo['.'].hex()
809 809
810 810 def checknested(self, path):
811 811 return self._repo._checknested(self._repo.wjoin(path))
812 812
813 813 @annotatesubrepoerror
814 814 def commit(self, text, user, date):
815 815 # don't bother committing in the subrepo if it's only been
816 816 # updated
817 817 if not self.dirty(True):
818 818 return self._repo['.'].hex()
819 819 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
820 820 n = self._repo.commit(text, user, date)
821 821 if not n:
822 822 return self._repo['.'].hex() # different version checked out
823 823 return node.hex(n)
824 824
825 825 @annotatesubrepoerror
826 826 def phase(self, state):
827 827 return self._repo[state].phase()
828 828
829 829 @annotatesubrepoerror
830 830 def remove(self):
831 831 # we can't fully delete the repository as it may contain
832 832 # local-only history
833 833 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
834 834 hg.clean(self._repo, node.nullid, False)
835 835
836 836 def _get(self, state):
837 837 source, revision, kind = state
838 838 if revision in self._repo.unfiltered():
839 839 return True
840 840 self._repo._subsource = source
841 841 srcurl = _abssource(self._repo)
842 842 other = hg.peer(self._repo, {}, srcurl)
843 843 if len(self._repo) == 0:
844 844 self.ui.status(_('cloning subrepo %s from %s\n')
845 845 % (subrelpath(self), srcurl))
846 846 parentrepo = self._repo._subparent
847 847 # use self._repo.vfs instead of self.wvfs to remove .hg only
848 848 self._repo.vfs.rmtree()
849 849 other, cloned = hg.clone(self._repo._subparent.baseui, {},
850 850 other, self._repo.root,
851 851 update=False)
852 852 self._repo = cloned.local()
853 853 self._initrepo(parentrepo, source, create=True)
854 854 self._cachestorehash(srcurl)
855 855 else:
856 856 self.ui.status(_('pulling subrepo %s from %s\n')
857 857 % (subrelpath(self), srcurl))
858 858 cleansub = self.storeclean(srcurl)
859 859 exchange.pull(self._repo, other)
860 860 if cleansub:
861 861 # keep the repo clean after pull
862 862 self._cachestorehash(srcurl)
863 863 return False
864 864
865 865 @annotatesubrepoerror
866 866 def get(self, state, overwrite=False):
867 867 inrepo = self._get(state)
868 868 source, revision, kind = state
869 869 repo = self._repo
870 870 repo.ui.debug("getting subrepo %s\n" % self._path)
871 871 if inrepo:
872 872 urepo = repo.unfiltered()
873 873 ctx = urepo[revision]
874 874 if ctx.hidden():
875 875 urepo.ui.warn(
876 876 _('revision %s in subrepo %s is hidden\n') \
877 877 % (revision[0:12], self._path))
878 878 repo = urepo
879 879 hg.updaterepo(repo, revision, overwrite)
880 880
881 881 @annotatesubrepoerror
882 882 def merge(self, state):
883 883 self._get(state)
884 884 cur = self._repo['.']
885 885 dst = self._repo[state[1]]
886 886 anc = dst.ancestor(cur)
887 887
888 888 def mergefunc():
889 889 if anc == cur and dst.branch() == cur.branch():
890 890 self.ui.debug("updating subrepo %s\n" % subrelpath(self))
891 891 hg.update(self._repo, state[1])
892 892 elif anc == dst:
893 893 self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
894 894 else:
895 895 self.ui.debug("merging subrepo %s\n" % subrelpath(self))
896 896 hg.merge(self._repo, state[1], remind=False)
897 897
898 898 wctx = self._repo[None]
899 899 if self.dirty():
900 900 if anc != dst:
901 901 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
902 902 mergefunc()
903 903 else:
904 904 mergefunc()
905 905 else:
906 906 mergefunc()
907 907
908 908 @annotatesubrepoerror
909 909 def push(self, opts):
910 910 force = opts.get('force')
911 911 newbranch = opts.get('new_branch')
912 912 ssh = opts.get('ssh')
913 913
914 914 # push subrepos depth-first for coherent ordering
915 915 c = self._repo['']
916 916 subs = c.substate # only repos that are committed
917 917 for s in sorted(subs):
918 918 if c.sub(s).push(opts) == 0:
919 919 return False
920 920
921 921 dsturl = _abssource(self._repo, True)
922 922 if not force:
923 923 if self.storeclean(dsturl):
924 924 self.ui.status(
925 925 _('no changes made to subrepo %s since last push to %s\n')
926 926 % (subrelpath(self), dsturl))
927 927 return None
928 928 self.ui.status(_('pushing subrepo %s to %s\n') %
929 929 (subrelpath(self), dsturl))
930 930 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
931 931 res = exchange.push(self._repo, other, force, newbranch=newbranch)
932 932
933 933 # the repo is now clean
934 934 self._cachestorehash(dsturl)
935 935 return res.cgresult
936 936
937 937 @annotatesubrepoerror
938 938 def outgoing(self, ui, dest, opts):
939 939 if 'rev' in opts or 'branch' in opts:
940 940 opts = copy.copy(opts)
941 941 opts.pop('rev', None)
942 942 opts.pop('branch', None)
943 943 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
944 944
945 945 @annotatesubrepoerror
946 946 def incoming(self, ui, source, opts):
947 947 if 'rev' in opts or 'branch' in opts:
948 948 opts = copy.copy(opts)
949 949 opts.pop('rev', None)
950 950 opts.pop('branch', None)
951 951 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
952 952
953 953 @annotatesubrepoerror
954 954 def files(self):
955 955 rev = self._state[1]
956 956 ctx = self._repo[rev]
957 957 return ctx.manifest().keys()
958 958
959 959 def filedata(self, name):
960 960 rev = self._state[1]
961 961 return self._repo[rev][name].data()
962 962
963 963 def fileflags(self, name):
964 964 rev = self._state[1]
965 965 ctx = self._repo[rev]
966 966 return ctx.flags(name)
967 967
968 968 @annotatesubrepoerror
969 969 def printfiles(self, ui, m, fm, fmt, subrepos):
970 970 # If the parent context is a workingctx, use the workingctx here for
971 971 # consistency.
972 972 if self._ctx.rev() is None:
973 973 ctx = self._repo[None]
974 974 else:
975 975 rev = self._state[1]
976 976 ctx = self._repo[rev]
977 977 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
978 978
979 979 @annotatesubrepoerror
980 980 def getfileset(self, expr):
981 981 if self._ctx.rev() is None:
982 982 ctx = self._repo[None]
983 983 else:
984 984 rev = self._state[1]
985 985 ctx = self._repo[rev]
986 986
987 987 files = ctx.getfileset(expr)
988 988
989 989 for subpath in ctx.substate:
990 990 sub = ctx.sub(subpath)
991 991
992 992 try:
993 993 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
994 994 except error.LookupError:
995 995 self.ui.status(_("skipping missing subrepository: %s\n")
996 996 % self.wvfs.reljoin(reporelpath(self), subpath))
997 997 return files
998 998
999 999 def walk(self, match):
1000 1000 ctx = self._repo[None]
1001 1001 return ctx.walk(match)
1002 1002
1003 1003 @annotatesubrepoerror
1004 1004 def forget(self, match, prefix):
1005 1005 return cmdutil.forget(self.ui, self._repo, match,
1006 1006 self.wvfs.reljoin(prefix, self._path), True)
1007 1007
1008 1008 @annotatesubrepoerror
1009 1009 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
1010 1010 return cmdutil.remove(self.ui, self._repo, matcher,
1011 1011 self.wvfs.reljoin(prefix, self._path),
1012 1012 after, force, subrepos)
1013 1013
1014 1014 @annotatesubrepoerror
1015 1015 def revert(self, substate, *pats, **opts):
1016 1016 # reverting a subrepo is a 2 step process:
1017 1017 # 1. if the no_backup is not set, revert all modified
1018 1018 # files inside the subrepo
1019 1019 # 2. update the subrepo to the revision specified in
1020 1020 # the corresponding substate dictionary
1021 1021 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1022 1022 if not opts.get('no_backup'):
1023 1023 # Revert all files on the subrepo, creating backups
1024 1024 # Note that this will not recursively revert subrepos
1025 1025 # We could do it if there was a set:subrepos() predicate
1026 1026 opts = opts.copy()
1027 1027 opts['date'] = None
1028 1028 opts['rev'] = substate[1]
1029 1029
1030 1030 self.filerevert(*pats, **opts)
1031 1031
1032 1032 # Update the repo to the revision specified in the given substate
1033 1033 if not opts.get('dry_run'):
1034 1034 self.get(substate, overwrite=True)
1035 1035
1036 1036 def filerevert(self, *pats, **opts):
1037 1037 ctx = self._repo[opts['rev']]
1038 1038 parents = self._repo.dirstate.parents()
1039 1039 if opts.get('all'):
1040 1040 pats = ['set:modified()']
1041 1041 else:
1042 1042 pats = []
1043 1043 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1044 1044
1045 1045 def shortid(self, revid):
1046 1046 return revid[:12]
1047 1047
1048 1048 def verify(self):
1049 1049 try:
1050 1050 rev = self._state[1]
1051 1051 ctx = self._repo.unfiltered()[rev]
1052 1052 if ctx.hidden():
1053 1053 # Since hidden revisions aren't pushed/pulled, it seems worth an
1054 1054 # explicit warning.
1055 1055 ui = self._repo.ui
1056 1056 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1057 1057 (self._relpath, node.short(self._ctx.node())))
1058 1058 return 0
1059 1059 except error.RepoLookupError:
1060 1060 # A missing subrepo revision may be a case of needing to pull it, so
1061 1061 # don't treat this as an error.
1062 1062 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1063 1063 (self._relpath, node.short(self._ctx.node())))
1064 1064 return 0
1065 1065
1066 1066 @propertycache
1067 1067 def wvfs(self):
1068 1068 """return own wvfs for efficiency and consistency
1069 1069 """
1070 1070 return self._repo.wvfs
1071 1071
1072 1072 @propertycache
1073 1073 def _relpath(self):
1074 1074 """return path to this subrepository as seen from outermost repository
1075 1075 """
1076 1076 # Keep consistent dir separators by avoiding vfs.join(self._path)
1077 1077 return reporelpath(self._repo)
1078 1078
1079 1079 class svnsubrepo(abstractsubrepo):
1080 1080 def __init__(self, ctx, path, state, allowcreate):
1081 1081 super(svnsubrepo, self).__init__(ctx, path)
1082 1082 self._state = state
1083 1083 self._exe = util.findexe('svn')
1084 1084 if not self._exe:
1085 1085 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1086 1086 % self._path)
1087 1087
1088 1088 def _svncommand(self, commands, filename='', failok=False):
1089 1089 cmd = [self._exe]
1090 1090 extrakw = {}
1091 1091 if not self.ui.interactive():
1092 1092 # Making stdin be a pipe should prevent svn from behaving
1093 1093 # interactively even if we can't pass --non-interactive.
1094 1094 extrakw['stdin'] = subprocess.PIPE
1095 1095 # Starting in svn 1.5 --non-interactive is a global flag
1096 1096 # instead of being per-command, but we need to support 1.4 so
1097 1097 # we have to be intelligent about what commands take
1098 1098 # --non-interactive.
1099 1099 if commands[0] in ('update', 'checkout', 'commit'):
1100 1100 cmd.append('--non-interactive')
1101 1101 cmd.extend(commands)
1102 1102 if filename is not None:
1103 1103 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1104 1104 self._path, filename)
1105 1105 cmd.append(path)
1106 1106 env = dict(encoding.environ)
1107 1107 # Avoid localized output, preserve current locale for everything else.
1108 1108 lc_all = env.get('LC_ALL')
1109 1109 if lc_all:
1110 1110 env['LANG'] = lc_all
1111 1111 del env['LC_ALL']
1112 1112 env['LC_MESSAGES'] = 'C'
1113 1113 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1114 1114 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1115 1115 universal_newlines=True, env=env, **extrakw)
1116 1116 stdout, stderr = p.communicate()
1117 1117 stderr = stderr.strip()
1118 1118 if not failok:
1119 1119 if p.returncode:
1120 1120 raise error.Abort(stderr or 'exited with code %d'
1121 1121 % p.returncode)
1122 1122 if stderr:
1123 1123 self.ui.warn(stderr + '\n')
1124 1124 return stdout, stderr
1125 1125
1126 1126 @propertycache
1127 1127 def _svnversion(self):
1128 1128 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1129 1129 m = re.search(r'^(\d+)\.(\d+)', output)
1130 1130 if not m:
1131 1131 raise error.Abort(_('cannot retrieve svn tool version'))
1132 1132 return (int(m.group(1)), int(m.group(2)))
1133 1133
1134 1134 def _wcrevs(self):
1135 1135 # Get the working directory revision as well as the last
1136 1136 # commit revision so we can compare the subrepo state with
1137 1137 # both. We used to store the working directory one.
1138 1138 output, err = self._svncommand(['info', '--xml'])
1139 1139 doc = xml.dom.minidom.parseString(output)
1140 1140 entries = doc.getElementsByTagName('entry')
1141 1141 lastrev, rev = '0', '0'
1142 1142 if entries:
1143 1143 rev = str(entries[0].getAttribute('revision')) or '0'
1144 1144 commits = entries[0].getElementsByTagName('commit')
1145 1145 if commits:
1146 1146 lastrev = str(commits[0].getAttribute('revision')) or '0'
1147 1147 return (lastrev, rev)
1148 1148
1149 1149 def _wcrev(self):
1150 1150 return self._wcrevs()[0]
1151 1151
1152 1152 def _wcchanged(self):
1153 1153 """Return (changes, extchanges, missing) where changes is True
1154 1154 if the working directory was changed, extchanges is
1155 1155 True if any of these changes concern an external entry and missing
1156 1156 is True if any change is a missing entry.
1157 1157 """
1158 1158 output, err = self._svncommand(['status', '--xml'])
1159 1159 externals, changes, missing = [], [], []
1160 1160 doc = xml.dom.minidom.parseString(output)
1161 1161 for e in doc.getElementsByTagName('entry'):
1162 1162 s = e.getElementsByTagName('wc-status')
1163 1163 if not s:
1164 1164 continue
1165 1165 item = s[0].getAttribute('item')
1166 1166 props = s[0].getAttribute('props')
1167 1167 path = e.getAttribute('path')
1168 1168 if item == 'external':
1169 1169 externals.append(path)
1170 1170 elif item == 'missing':
1171 1171 missing.append(path)
1172 1172 if (item not in ('', 'normal', 'unversioned', 'external')
1173 1173 or props not in ('', 'none', 'normal')):
1174 1174 changes.append(path)
1175 1175 for path in changes:
1176 1176 for ext in externals:
1177 1177 if path == ext or path.startswith(ext + pycompat.ossep):
1178 1178 return True, True, bool(missing)
1179 1179 return bool(changes), False, bool(missing)
1180 1180
1181 1181 def dirty(self, ignoreupdate=False):
1182 1182 if not self._wcchanged()[0]:
1183 1183 if self._state[1] in self._wcrevs() or ignoreupdate:
1184 1184 return False
1185 1185 return True
1186 1186
1187 1187 def basestate(self):
1188 1188 lastrev, rev = self._wcrevs()
1189 1189 if lastrev != rev:
1190 1190 # Last committed rev is not the same than rev. We would
1191 1191 # like to take lastrev but we do not know if the subrepo
1192 1192 # URL exists at lastrev. Test it and fallback to rev it
1193 1193 # is not there.
1194 1194 try:
1195 1195 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1196 1196 return lastrev
1197 1197 except error.Abort:
1198 1198 pass
1199 1199 return rev
1200 1200
1201 1201 @annotatesubrepoerror
1202 1202 def commit(self, text, user, date):
1203 1203 # user and date are out of our hands since svn is centralized
1204 1204 changed, extchanged, missing = self._wcchanged()
1205 1205 if not changed:
1206 1206 return self.basestate()
1207 1207 if extchanged:
1208 1208 # Do not try to commit externals
1209 1209 raise error.Abort(_('cannot commit svn externals'))
1210 1210 if missing:
1211 1211 # svn can commit with missing entries but aborting like hg
1212 1212 # seems a better approach.
1213 1213 raise error.Abort(_('cannot commit missing svn entries'))
1214 1214 commitinfo, err = self._svncommand(['commit', '-m', text])
1215 1215 self.ui.status(commitinfo)
1216 1216 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1217 1217 if not newrev:
1218 1218 if not commitinfo.strip():
1219 1219 # Sometimes, our definition of "changed" differs from
1220 1220 # svn one. For instance, svn ignores missing files
1221 1221 # when committing. If there are only missing files, no
1222 1222 # commit is made, no output and no error code.
1223 1223 raise error.Abort(_('failed to commit svn changes'))
1224 1224 raise error.Abort(commitinfo.splitlines()[-1])
1225 1225 newrev = newrev.groups()[0]
1226 1226 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1227 1227 return newrev
1228 1228
1229 1229 @annotatesubrepoerror
1230 1230 def remove(self):
1231 1231 if self.dirty():
1232 1232 self.ui.warn(_('not removing repo %s because '
1233 1233 'it has changes.\n') % self._path)
1234 1234 return
1235 1235 self.ui.note(_('removing subrepo %s\n') % self._path)
1236 1236
1237 1237 self.wvfs.rmtree(forcibly=True)
1238 1238 try:
1239 1239 pwvfs = self._ctx.repo().wvfs
1240 1240 pwvfs.removedirs(pwvfs.dirname(self._path))
1241 1241 except OSError:
1242 1242 pass
1243 1243
1244 1244 @annotatesubrepoerror
1245 1245 def get(self, state, overwrite=False):
1246 1246 if overwrite:
1247 1247 self._svncommand(['revert', '--recursive'])
1248 1248 args = ['checkout']
1249 1249 if self._svnversion >= (1, 5):
1250 1250 args.append('--force')
1251 1251 # The revision must be specified at the end of the URL to properly
1252 1252 # update to a directory which has since been deleted and recreated.
1253 1253 args.append('%s@%s' % (state[0], state[1]))
1254 1254 status, err = self._svncommand(args, failok=True)
1255 1255 _sanitize(self.ui, self.wvfs, '.svn')
1256 1256 if not re.search('Checked out revision [0-9]+.', status):
1257 1257 if ('is already a working copy for a different URL' in err
1258 1258 and (self._wcchanged()[:2] == (False, False))):
1259 1259 # obstructed but clean working copy, so just blow it away.
1260 1260 self.remove()
1261 1261 self.get(state, overwrite=False)
1262 1262 return
1263 1263 raise error.Abort((status or err).splitlines()[-1])
1264 1264 self.ui.status(status)
1265 1265
1266 1266 @annotatesubrepoerror
1267 1267 def merge(self, state):
1268 1268 old = self._state[1]
1269 1269 new = state[1]
1270 1270 wcrev = self._wcrev()
1271 1271 if new != wcrev:
1272 1272 dirty = old == wcrev or self._wcchanged()[0]
1273 1273 if _updateprompt(self.ui, self, dirty, wcrev, new):
1274 1274 self.get(state, False)
1275 1275
1276 1276 def push(self, opts):
1277 1277 # push is a no-op for SVN
1278 1278 return True
1279 1279
1280 1280 @annotatesubrepoerror
1281 1281 def files(self):
1282 1282 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1283 1283 doc = xml.dom.minidom.parseString(output)
1284 1284 paths = []
1285 1285 for e in doc.getElementsByTagName('entry'):
1286 1286 kind = str(e.getAttribute('kind'))
1287 1287 if kind != 'file':
1288 1288 continue
1289 1289 name = ''.join(c.data for c
1290 1290 in e.getElementsByTagName('name')[0].childNodes
1291 1291 if c.nodeType == c.TEXT_NODE)
1292 1292 paths.append(name.encode('utf-8'))
1293 1293 return paths
1294 1294
1295 1295 def filedata(self, name):
1296 1296 return self._svncommand(['cat'], name)[0]
1297 1297
1298 1298
1299 1299 class gitsubrepo(abstractsubrepo):
1300 1300 def __init__(self, ctx, path, state, allowcreate):
1301 1301 super(gitsubrepo, self).__init__(ctx, path)
1302 1302 self._state = state
1303 1303 self._abspath = ctx.repo().wjoin(path)
1304 1304 self._subparent = ctx.repo()
1305 1305 self._ensuregit()
1306 1306
1307 1307 def _ensuregit(self):
1308 1308 try:
1309 1309 self._gitexecutable = 'git'
1310 1310 out, err = self._gitnodir(['--version'])
1311 1311 except OSError as e:
1312 1312 genericerror = _("error executing git for subrepo '%s': %s")
1313 1313 notfoundhint = _("check git is installed and in your PATH")
1314 1314 if e.errno != errno.ENOENT:
1315 1315 raise error.Abort(genericerror % (self._path, e.strerror))
1316 1316 elif pycompat.osname == 'nt':
1317 1317 try:
1318 1318 self._gitexecutable = 'git.cmd'
1319 1319 out, err = self._gitnodir(['--version'])
1320 1320 except OSError as e2:
1321 1321 if e2.errno == errno.ENOENT:
1322 1322 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1323 1323 " for subrepo '%s'") % self._path,
1324 1324 hint=notfoundhint)
1325 1325 else:
1326 1326 raise error.Abort(genericerror % (self._path,
1327 1327 e2.strerror))
1328 1328 else:
1329 1329 raise error.Abort(_("couldn't find git for subrepo '%s'")
1330 1330 % self._path, hint=notfoundhint)
1331 1331 versionstatus = self._checkversion(out)
1332 1332 if versionstatus == 'unknown':
1333 1333 self.ui.warn(_('cannot retrieve git version\n'))
1334 1334 elif versionstatus == 'abort':
1335 1335 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1336 1336 elif versionstatus == 'warning':
1337 1337 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1338 1338
1339 1339 @staticmethod
1340 1340 def _gitversion(out):
1341 1341 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1342 1342 if m:
1343 1343 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1344 1344
1345 1345 m = re.search(r'^git version (\d+)\.(\d+)', out)
1346 1346 if m:
1347 1347 return (int(m.group(1)), int(m.group(2)), 0)
1348 1348
1349 1349 return -1
1350 1350
1351 1351 @staticmethod
1352 1352 def _checkversion(out):
1353 1353 '''ensure git version is new enough
1354 1354
1355 1355 >>> _checkversion = gitsubrepo._checkversion
1356 1356 >>> _checkversion('git version 1.6.0')
1357 1357 'ok'
1358 1358 >>> _checkversion('git version 1.8.5')
1359 1359 'ok'
1360 1360 >>> _checkversion('git version 1.4.0')
1361 1361 'abort'
1362 1362 >>> _checkversion('git version 1.5.0')
1363 1363 'warning'
1364 1364 >>> _checkversion('git version 1.9-rc0')
1365 1365 'ok'
1366 1366 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1367 1367 'ok'
1368 1368 >>> _checkversion('git version 1.9.0.GIT')
1369 1369 'ok'
1370 1370 >>> _checkversion('git version 12345')
1371 1371 'unknown'
1372 1372 >>> _checkversion('no')
1373 1373 'unknown'
1374 1374 '''
1375 1375 version = gitsubrepo._gitversion(out)
1376 1376 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1377 1377 # despite the docstring comment. For now, error on 1.4.0, warn on
1378 1378 # 1.5.0 but attempt to continue.
1379 1379 if version == -1:
1380 1380 return 'unknown'
1381 1381 if version < (1, 5, 0):
1382 1382 return 'abort'
1383 1383 elif version < (1, 6, 0):
1384 1384 return 'warning'
1385 1385 return 'ok'
1386 1386
1387 1387 def _gitcommand(self, commands, env=None, stream=False):
1388 1388 return self._gitdir(commands, env=env, stream=stream)[0]
1389 1389
1390 1390 def _gitdir(self, commands, env=None, stream=False):
1391 1391 return self._gitnodir(commands, env=env, stream=stream,
1392 1392 cwd=self._abspath)
1393 1393
1394 1394 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1395 1395 """Calls the git command
1396 1396
1397 1397 The methods tries to call the git command. versions prior to 1.6.0
1398 1398 are not supported and very probably fail.
1399 1399 """
1400 1400 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1401 1401 if env is None:
1402 1402 env = encoding.environ.copy()
1403 1403 # disable localization for Git output (issue5176)
1404 1404 env['LC_ALL'] = 'C'
1405 1405 # fix for Git CVE-2015-7545
1406 1406 if 'GIT_ALLOW_PROTOCOL' not in env:
1407 1407 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1408 1408 # unless ui.quiet is set, print git's stderr,
1409 1409 # which is mostly progress and useful info
1410 1410 errpipe = None
1411 1411 if self.ui.quiet:
1412 1412 errpipe = open(os.devnull, 'w')
1413 1413 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1414 1414 cwd=cwd, env=env, close_fds=util.closefds,
1415 1415 stdout=subprocess.PIPE, stderr=errpipe)
1416 1416 if stream:
1417 1417 return p.stdout, None
1418 1418
1419 1419 retdata = p.stdout.read().strip()
1420 1420 # wait for the child to exit to avoid race condition.
1421 1421 p.wait()
1422 1422
1423 1423 if p.returncode != 0 and p.returncode != 1:
1424 1424 # there are certain error codes that are ok
1425 1425 command = commands[0]
1426 1426 if command in ('cat-file', 'symbolic-ref'):
1427 1427 return retdata, p.returncode
1428 1428 # for all others, abort
1429 1429 raise error.Abort(_('git %s error %d in %s') %
1430 1430 (command, p.returncode, self._relpath))
1431 1431
1432 1432 return retdata, p.returncode
1433 1433
1434 1434 def _gitmissing(self):
1435 1435 return not self.wvfs.exists('.git')
1436 1436
1437 1437 def _gitstate(self):
1438 1438 return self._gitcommand(['rev-parse', 'HEAD'])
1439 1439
1440 1440 def _gitcurrentbranch(self):
1441 1441 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1442 1442 if err:
1443 1443 current = None
1444 1444 return current
1445 1445
1446 1446 def _gitremote(self, remote):
1447 1447 out = self._gitcommand(['remote', 'show', '-n', remote])
1448 1448 line = out.split('\n')[1]
1449 1449 i = line.index('URL: ') + len('URL: ')
1450 1450 return line[i:]
1451 1451
1452 1452 def _githavelocally(self, revision):
1453 1453 out, code = self._gitdir(['cat-file', '-e', revision])
1454 1454 return code == 0
1455 1455
1456 1456 def _gitisancestor(self, r1, r2):
1457 1457 base = self._gitcommand(['merge-base', r1, r2])
1458 1458 return base == r1
1459 1459
1460 1460 def _gitisbare(self):
1461 1461 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1462 1462
1463 1463 def _gitupdatestat(self):
1464 1464 """This must be run before git diff-index.
1465 1465 diff-index only looks at changes to file stat;
1466 1466 this command looks at file contents and updates the stat."""
1467 1467 self._gitcommand(['update-index', '-q', '--refresh'])
1468 1468
1469 1469 def _gitbranchmap(self):
1470 1470 '''returns 2 things:
1471 1471 a map from git branch to revision
1472 1472 a map from revision to branches'''
1473 1473 branch2rev = {}
1474 1474 rev2branch = {}
1475 1475
1476 1476 out = self._gitcommand(['for-each-ref', '--format',
1477 1477 '%(objectname) %(refname)'])
1478 1478 for line in out.split('\n'):
1479 1479 revision, ref = line.split(' ')
1480 1480 if (not ref.startswith('refs/heads/') and
1481 1481 not ref.startswith('refs/remotes/')):
1482 1482 continue
1483 1483 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1484 1484 continue # ignore remote/HEAD redirects
1485 1485 branch2rev[ref] = revision
1486 1486 rev2branch.setdefault(revision, []).append(ref)
1487 1487 return branch2rev, rev2branch
1488 1488
1489 1489 def _gittracking(self, branches):
1490 1490 'return map of remote branch to local tracking branch'
1491 1491 # assumes no more than one local tracking branch for each remote
1492 1492 tracking = {}
1493 1493 for b in branches:
1494 1494 if b.startswith('refs/remotes/'):
1495 1495 continue
1496 1496 bname = b.split('/', 2)[2]
1497 1497 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1498 1498 if remote:
1499 1499 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1500 1500 tracking['refs/remotes/%s/%s' %
1501 1501 (remote, ref.split('/', 2)[2])] = b
1502 1502 return tracking
1503 1503
1504 1504 def _abssource(self, source):
1505 1505 if '://' not in source:
1506 1506 # recognize the scp syntax as an absolute source
1507 1507 colon = source.find(':')
1508 1508 if colon != -1 and '/' not in source[:colon]:
1509 1509 return source
1510 1510 self._subsource = source
1511 1511 return _abssource(self)
1512 1512
1513 1513 def _fetch(self, source, revision):
1514 1514 if self._gitmissing():
1515 1515 source = self._abssource(source)
1516 1516 self.ui.status(_('cloning subrepo %s from %s\n') %
1517 1517 (self._relpath, source))
1518 1518 self._gitnodir(['clone', source, self._abspath])
1519 1519 if self._githavelocally(revision):
1520 1520 return
1521 1521 self.ui.status(_('pulling subrepo %s from %s\n') %
1522 1522 (self._relpath, self._gitremote('origin')))
1523 1523 # try only origin: the originally cloned repo
1524 1524 self._gitcommand(['fetch'])
1525 1525 if not self._githavelocally(revision):
1526 1526 raise error.Abort(_("revision %s does not exist in subrepo %s\n") %
1527 1527 (revision, self._relpath))
1528 1528
1529 1529 @annotatesubrepoerror
1530 1530 def dirty(self, ignoreupdate=False):
1531 1531 if self._gitmissing():
1532 1532 return self._state[1] != ''
1533 1533 if self._gitisbare():
1534 1534 return True
1535 1535 if not ignoreupdate and self._state[1] != self._gitstate():
1536 1536 # different version checked out
1537 1537 return True
1538 1538 # check for staged changes or modified files; ignore untracked files
1539 1539 self._gitupdatestat()
1540 1540 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1541 1541 return code == 1
1542 1542
1543 1543 def basestate(self):
1544 1544 return self._gitstate()
1545 1545
1546 1546 @annotatesubrepoerror
1547 1547 def get(self, state, overwrite=False):
1548 1548 source, revision, kind = state
1549 1549 if not revision:
1550 1550 self.remove()
1551 1551 return
1552 1552 self._fetch(source, revision)
1553 1553 # if the repo was set to be bare, unbare it
1554 1554 if self._gitisbare():
1555 1555 self._gitcommand(['config', 'core.bare', 'false'])
1556 1556 if self._gitstate() == revision:
1557 1557 self._gitcommand(['reset', '--hard', 'HEAD'])
1558 1558 return
1559 1559 elif self._gitstate() == revision:
1560 1560 if overwrite:
1561 1561 # first reset the index to unmark new files for commit, because
1562 1562 # reset --hard will otherwise throw away files added for commit,
1563 1563 # not just unmark them.
1564 1564 self._gitcommand(['reset', 'HEAD'])
1565 1565 self._gitcommand(['reset', '--hard', 'HEAD'])
1566 1566 return
1567 1567 branch2rev, rev2branch = self._gitbranchmap()
1568 1568
1569 1569 def checkout(args):
1570 1570 cmd = ['checkout']
1571 1571 if overwrite:
1572 1572 # first reset the index to unmark new files for commit, because
1573 1573 # the -f option will otherwise throw away files added for
1574 1574 # commit, not just unmark them.
1575 1575 self._gitcommand(['reset', 'HEAD'])
1576 1576 cmd.append('-f')
1577 1577 self._gitcommand(cmd + args)
1578 1578 _sanitize(self.ui, self.wvfs, '.git')
1579 1579
1580 1580 def rawcheckout():
1581 1581 # no branch to checkout, check it out with no branch
1582 1582 self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1583 1583 self._relpath)
1584 1584 self.ui.warn(_('check out a git branch if you intend '
1585 1585 'to make changes\n'))
1586 1586 checkout(['-q', revision])
1587 1587
1588 1588 if revision not in rev2branch:
1589 1589 rawcheckout()
1590 1590 return
1591 1591 branches = rev2branch[revision]
1592 1592 firstlocalbranch = None
1593 1593 for b in branches:
1594 1594 if b == 'refs/heads/master':
1595 1595 # master trumps all other branches
1596 1596 checkout(['refs/heads/master'])
1597 1597 return
1598 1598 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1599 1599 firstlocalbranch = b
1600 1600 if firstlocalbranch:
1601 1601 checkout([firstlocalbranch])
1602 1602 return
1603 1603
1604 1604 tracking = self._gittracking(branch2rev.keys())
1605 1605 # choose a remote branch already tracked if possible
1606 1606 remote = branches[0]
1607 1607 if remote not in tracking:
1608 1608 for b in branches:
1609 1609 if b in tracking:
1610 1610 remote = b
1611 1611 break
1612 1612
1613 1613 if remote not in tracking:
1614 1614 # create a new local tracking branch
1615 1615 local = remote.split('/', 3)[3]
1616 1616 checkout(['-b', local, remote])
1617 1617 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1618 1618 # When updating to a tracked remote branch,
1619 1619 # if the local tracking branch is downstream of it,
1620 1620 # a normal `git pull` would have performed a "fast-forward merge"
1621 1621 # which is equivalent to updating the local branch to the remote.
1622 1622 # Since we are only looking at branching at update, we need to
1623 1623 # detect this situation and perform this action lazily.
1624 1624 if tracking[remote] != self._gitcurrentbranch():
1625 1625 checkout([tracking[remote]])
1626 1626 self._gitcommand(['merge', '--ff', remote])
1627 1627 _sanitize(self.ui, self.wvfs, '.git')
1628 1628 else:
1629 1629 # a real merge would be required, just checkout the revision
1630 1630 rawcheckout()
1631 1631
1632 1632 @annotatesubrepoerror
1633 1633 def commit(self, text, user, date):
1634 1634 if self._gitmissing():
1635 1635 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1636 1636 cmd = ['commit', '-a', '-m', text]
1637 1637 env = encoding.environ.copy()
1638 1638 if user:
1639 1639 cmd += ['--author', user]
1640 1640 if date:
1641 1641 # git's date parser silently ignores when seconds < 1e9
1642 1642 # convert to ISO8601
1643 1643 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1644 1644 '%Y-%m-%dT%H:%M:%S %1%2')
1645 1645 self._gitcommand(cmd, env=env)
1646 1646 # make sure commit works otherwise HEAD might not exist under certain
1647 1647 # circumstances
1648 1648 return self._gitstate()
1649 1649
1650 1650 @annotatesubrepoerror
1651 1651 def merge(self, state):
1652 1652 source, revision, kind = state
1653 1653 self._fetch(source, revision)
1654 1654 base = self._gitcommand(['merge-base', revision, self._state[1]])
1655 1655 self._gitupdatestat()
1656 1656 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1657 1657
1658 1658 def mergefunc():
1659 1659 if base == revision:
1660 1660 self.get(state) # fast forward merge
1661 1661 elif base != self._state[1]:
1662 1662 self._gitcommand(['merge', '--no-commit', revision])
1663 1663 _sanitize(self.ui, self.wvfs, '.git')
1664 1664
1665 1665 if self.dirty():
1666 1666 if self._gitstate() != revision:
1667 1667 dirty = self._gitstate() == self._state[1] or code != 0
1668 1668 if _updateprompt(self.ui, self, dirty,
1669 1669 self._state[1][:7], revision[:7]):
1670 1670 mergefunc()
1671 1671 else:
1672 1672 mergefunc()
1673 1673
1674 1674 @annotatesubrepoerror
1675 1675 def push(self, opts):
1676 1676 force = opts.get('force')
1677 1677
1678 1678 if not self._state[1]:
1679 1679 return True
1680 1680 if self._gitmissing():
1681 1681 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1682 1682 # if a branch in origin contains the revision, nothing to do
1683 1683 branch2rev, rev2branch = self._gitbranchmap()
1684 1684 if self._state[1] in rev2branch:
1685 1685 for b in rev2branch[self._state[1]]:
1686 1686 if b.startswith('refs/remotes/origin/'):
1687 1687 return True
1688 1688 for b, revision in branch2rev.iteritems():
1689 1689 if b.startswith('refs/remotes/origin/'):
1690 1690 if self._gitisancestor(self._state[1], revision):
1691 1691 return True
1692 1692 # otherwise, try to push the currently checked out branch
1693 1693 cmd = ['push']
1694 1694 if force:
1695 1695 cmd.append('--force')
1696 1696
1697 1697 current = self._gitcurrentbranch()
1698 1698 if current:
1699 1699 # determine if the current branch is even useful
1700 1700 if not self._gitisancestor(self._state[1], current):
1701 1701 self.ui.warn(_('unrelated git branch checked out '
1702 1702 'in subrepo %s\n') % self._relpath)
1703 1703 return False
1704 1704 self.ui.status(_('pushing branch %s of subrepo %s\n') %
1705 1705 (current.split('/', 2)[2], self._relpath))
1706 1706 ret = self._gitdir(cmd + ['origin', current])
1707 1707 return ret[1] == 0
1708 1708 else:
1709 1709 self.ui.warn(_('no branch checked out in subrepo %s\n'
1710 1710 'cannot push revision %s\n') %
1711 1711 (self._relpath, self._state[1]))
1712 1712 return False
1713 1713
1714 1714 @annotatesubrepoerror
1715 1715 def add(self, ui, match, prefix, explicitonly, **opts):
1716 1716 if self._gitmissing():
1717 1717 return []
1718 1718
1719 1719 (modified, added, removed,
1720 1720 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1721 1721 clean=True)
1722 1722
1723 1723 tracked = set()
1724 1724 # dirstates 'amn' warn, 'r' is added again
1725 1725 for l in (modified, added, deleted, clean):
1726 1726 tracked.update(l)
1727 1727
1728 1728 # Unknown files not of interest will be rejected by the matcher
1729 1729 files = unknown
1730 1730 files.extend(match.files())
1731 1731
1732 1732 rejected = []
1733 1733
1734 1734 files = [f for f in sorted(set(files)) if match(f)]
1735 1735 for f in files:
1736 1736 exact = match.exact(f)
1737 1737 command = ["add"]
1738 1738 if exact:
1739 1739 command.append("-f") #should be added, even if ignored
1740 1740 if ui.verbose or not exact:
1741 1741 ui.status(_('adding %s\n') % match.rel(f))
1742 1742
1743 1743 if f in tracked: # hg prints 'adding' even if already tracked
1744 1744 if exact:
1745 1745 rejected.append(f)
1746 1746 continue
1747 1747 if not opts.get('dry_run'):
1748 1748 self._gitcommand(command + [f])
1749 1749
1750 1750 for f in rejected:
1751 1751 ui.warn(_("%s already tracked!\n") % match.abs(f))
1752 1752
1753 1753 return rejected
1754 1754
1755 1755 @annotatesubrepoerror
1756 1756 def remove(self):
1757 1757 if self._gitmissing():
1758 1758 return
1759 1759 if self.dirty():
1760 1760 self.ui.warn(_('not removing repo %s because '
1761 1761 'it has changes.\n') % self._relpath)
1762 1762 return
1763 1763 # we can't fully delete the repository as it may contain
1764 1764 # local-only history
1765 1765 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1766 1766 self._gitcommand(['config', 'core.bare', 'true'])
1767 1767 for f, kind in self.wvfs.readdir():
1768 1768 if f == '.git':
1769 1769 continue
1770 1770 if kind == stat.S_IFDIR:
1771 1771 self.wvfs.rmtree(f)
1772 1772 else:
1773 1773 self.wvfs.unlink(f)
1774 1774
1775 1775 def archive(self, archiver, prefix, match=None):
1776 1776 total = 0
1777 1777 source, revision = self._state
1778 1778 if not revision:
1779 1779 return total
1780 1780 self._fetch(source, revision)
1781 1781
1782 1782 # Parse git's native archive command.
1783 1783 # This should be much faster than manually traversing the trees
1784 1784 # and objects with many subprocess calls.
1785 1785 tarstream = self._gitcommand(['archive', revision], stream=True)
1786 1786 tar = tarfile.open(fileobj=tarstream, mode='r|')
1787 1787 relpath = subrelpath(self)
1788 1788 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1789 1789 for i, info in enumerate(tar):
1790 1790 if info.isdir():
1791 1791 continue
1792 1792 if match and not match(info.name):
1793 1793 continue
1794 1794 if info.issym():
1795 1795 data = info.linkname
1796 1796 else:
1797 1797 data = tar.extractfile(info).read()
1798 1798 archiver.addfile(prefix + self._path + '/' + info.name,
1799 1799 info.mode, info.issym(), data)
1800 1800 total += 1
1801 1801 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1802 1802 unit=_('files'))
1803 1803 self.ui.progress(_('archiving (%s)') % relpath, None)
1804 1804 return total
1805 1805
1806 1806
1807 1807 @annotatesubrepoerror
1808 1808 def cat(self, match, prefix, **opts):
1809 1809 rev = self._state[1]
1810 1810 if match.anypats():
1811 1811 return 1 #No support for include/exclude yet
1812 1812
1813 1813 if not match.files():
1814 1814 return 1
1815 1815
1816 1816 for f in match.files():
1817 1817 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1818 1818 fp = cmdutil.makefileobj(self._subparent, opts.get('output'),
1819 1819 self._ctx.node(),
1820 1820 pathname=self.wvfs.reljoin(prefix, f))
1821 1821 fp.write(output)
1822 1822 fp.close()
1823 1823 return 0
1824 1824
1825 1825
1826 1826 @annotatesubrepoerror
1827 1827 def status(self, rev2, **opts):
1828 1828 rev1 = self._state[1]
1829 1829 if self._gitmissing() or not rev1:
1830 1830 # if the repo is missing, return no results
1831 1831 return scmutil.status([], [], [], [], [], [], [])
1832 1832 modified, added, removed = [], [], []
1833 1833 self._gitupdatestat()
1834 1834 if rev2:
1835 1835 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1836 1836 else:
1837 1837 command = ['diff-index', '--no-renames', rev1]
1838 1838 out = self._gitcommand(command)
1839 1839 for line in out.split('\n'):
1840 1840 tab = line.find('\t')
1841 1841 if tab == -1:
1842 1842 continue
1843 1843 status, f = line[tab - 1], line[tab + 1:]
1844 1844 if status == 'M':
1845 1845 modified.append(f)
1846 1846 elif status == 'A':
1847 1847 added.append(f)
1848 1848 elif status == 'D':
1849 1849 removed.append(f)
1850 1850
1851 1851 deleted, unknown, ignored, clean = [], [], [], []
1852 1852
1853 1853 command = ['status', '--porcelain', '-z']
1854 1854 if opts.get('unknown'):
1855 1855 command += ['--untracked-files=all']
1856 1856 if opts.get('ignored'):
1857 1857 command += ['--ignored']
1858 1858 out = self._gitcommand(command)
1859 1859
1860 1860 changedfiles = set()
1861 1861 changedfiles.update(modified)
1862 1862 changedfiles.update(added)
1863 1863 changedfiles.update(removed)
1864 1864 for line in out.split('\0'):
1865 1865 if not line:
1866 1866 continue
1867 1867 st = line[0:2]
1868 1868 #moves and copies show 2 files on one line
1869 1869 if line.find('\0') >= 0:
1870 1870 filename1, filename2 = line[3:].split('\0')
1871 1871 else:
1872 1872 filename1 = line[3:]
1873 1873 filename2 = None
1874 1874
1875 1875 changedfiles.add(filename1)
1876 1876 if filename2:
1877 1877 changedfiles.add(filename2)
1878 1878
1879 1879 if st == '??':
1880 1880 unknown.append(filename1)
1881 1881 elif st == '!!':
1882 1882 ignored.append(filename1)
1883 1883
1884 1884 if opts.get('clean'):
1885 1885 out = self._gitcommand(['ls-files'])
1886 1886 for f in out.split('\n'):
1887 1887 if not f in changedfiles:
1888 1888 clean.append(f)
1889 1889
1890 1890 return scmutil.status(modified, added, removed, deleted,
1891 1891 unknown, ignored, clean)
1892 1892
1893 1893 @annotatesubrepoerror
1894 1894 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1895 1895 node1 = self._state[1]
1896 1896 cmd = ['diff', '--no-renames']
1897 1897 if opts['stat']:
1898 1898 cmd.append('--stat')
1899 1899 else:
1900 1900 # for Git, this also implies '-p'
1901 1901 cmd.append('-U%d' % diffopts.context)
1902 1902
1903 1903 gitprefix = self.wvfs.reljoin(prefix, self._path)
1904 1904
1905 1905 if diffopts.noprefix:
1906 1906 cmd.extend(['--src-prefix=%s/' % gitprefix,
1907 1907 '--dst-prefix=%s/' % gitprefix])
1908 1908 else:
1909 1909 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1910 1910 '--dst-prefix=b/%s/' % gitprefix])
1911 1911
1912 1912 if diffopts.ignorews:
1913 1913 cmd.append('--ignore-all-space')
1914 1914 if diffopts.ignorewsamount:
1915 1915 cmd.append('--ignore-space-change')
1916 1916 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1917 1917 and diffopts.ignoreblanklines:
1918 1918 cmd.append('--ignore-blank-lines')
1919 1919
1920 1920 cmd.append(node1)
1921 1921 if node2:
1922 1922 cmd.append(node2)
1923 1923
1924 1924 output = ""
1925 1925 if match.always():
1926 1926 output += self._gitcommand(cmd) + '\n'
1927 1927 else:
1928 1928 st = self.status(node2)[:3]
1929 1929 files = [f for sublist in st for f in sublist]
1930 1930 for f in files:
1931 1931 if match(f):
1932 1932 output += self._gitcommand(cmd + ['--', f]) + '\n'
1933 1933
1934 1934 if output.strip():
1935 1935 ui.write(output)
1936 1936
1937 1937 @annotatesubrepoerror
1938 1938 def revert(self, substate, *pats, **opts):
1939 1939 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1940 1940 if not opts.get('no_backup'):
1941 1941 status = self.status(None)
1942 1942 names = status.modified
1943 1943 for name in names:
1944 1944 bakname = scmutil.origpath(self.ui, self._subparent, name)
1945 1945 self.ui.note(_('saving current version of %s as %s\n') %
1946 1946 (name, bakname))
1947 1947 self.wvfs.rename(name, bakname)
1948 1948
1949 1949 if not opts.get('dry_run'):
1950 1950 self.get(substate, overwrite=True)
1951 1951 return []
1952 1952
1953 1953 def shortid(self, revid):
1954 1954 return revid[:7]
1955 1955
1956 1956 types = {
1957 1957 'hg': hgsubrepo,
1958 1958 'svn': svnsubrepo,
1959 1959 'git': gitsubrepo,
1960 1960 }
@@ -1,415 +1,451 b''
1 1 $ cat >> $HGRCPATH <<EOF
2 2 > [extensions]
3 3 > rebase=
4 4 > histedit=
5 5 >
6 6 > [alias]
7 7 > tglog = log -G --template "{rev}: '{desc}' {branches}\n"
8 8 > EOF
9 9
10 10
11 11 $ hg init a
12 12 $ cd a
13 13
14 14 $ echo C1 > C1
15 15 $ hg ci -Am C1
16 16 adding C1
17 17
18 18 $ echo C2 > C2
19 19 $ hg ci -Am C2
20 20 adding C2
21 21
22 22 $ cd ..
23 23
24 24 $ hg clone a b
25 25 updating to branch default
26 26 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
27 27
28 28 $ hg clone a c
29 29 updating to branch default
30 30 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
31 31
32 32 $ cd b
33 33
34 34 $ echo L1 > L1
35 35 $ hg ci -Am L1
36 36 adding L1
37 37
38 38
39 39 $ cd ../a
40 40
41 41 $ echo R1 > R1
42 42 $ hg ci -Am R1
43 43 adding R1
44 44
45 45
46 46 $ cd ../b
47 47
48 48 Now b has one revision to be pulled from a:
49 49
50 50 $ hg pull --rebase
51 51 pulling from $TESTTMP/a (glob)
52 52 searching for changes
53 53 adding changesets
54 54 adding manifests
55 55 adding file changes
56 56 added 1 changesets with 1 changes to 1 files (+1 heads)
57 57 rebasing 2:ff8d69a621f9 "L1"
58 58 saved backup bundle to $TESTTMP/b/.hg/strip-backup/ff8d69a621f9-160fa373-backup.hg (glob)
59 59
60 60 $ hg tglog
61 61 @ 3: 'L1'
62 62 |
63 63 o 2: 'R1'
64 64 |
65 65 o 1: 'C2'
66 66 |
67 67 o 0: 'C1'
68 68
69 69 Re-run:
70 70
71 71 $ hg pull --rebase
72 72 pulling from $TESTTMP/a (glob)
73 73 searching for changes
74 74 no changes found
75 75
76 76 Abort pull early if working dir is not clean:
77 77
78 78 $ echo L1-mod > L1
79 79 $ hg pull --rebase
80 80 abort: uncommitted changes
81 (cannot pull with rebase: please commit or shelve your changes first)
81 82 [255]
82 83 $ hg update --clean --quiet
83 84
84 85 Abort pull early if another operation (histedit) is in progress:
85 86
86 87 $ hg histedit . -q --commands - << EOF
87 88 > edit d80cc2da061e histedit: generate unfinished state
88 89 > EOF
89 90 Editing (d80cc2da061e), you may commit or record as needed now.
90 91 (hg histedit --continue to resume)
91 92 [1]
92 93 $ hg pull --rebase
93 94 abort: histedit in progress
94 95 (use 'hg histedit --continue' or 'hg histedit --abort')
95 96 [255]
96 97 $ hg histedit --abort --quiet
97 98
99 Abort pull early with pending uncommitted merge:
100
101 $ cd ..
102 $ hg clone --noupdate c d
103 $ cd d
104 $ hg tglog
105 o 1: 'C2'
106 |
107 o 0: 'C1'
108
109 $ hg update --quiet 0
110 $ echo M1 > M1
111 $ hg commit --quiet -Am M1
112 $ hg update --quiet 1
113 $ hg merge 2
114 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
115 (branch merge, don't forget to commit)
116 $ hg pull --rebase
117 abort: outstanding uncommitted merge
118 (cannot pull with rebase: please commit or shelve your changes first)
119 [255]
120 $ hg update --clean --quiet
121
122 Abort pull early with unclean subrepo:
123 $ echo s = s > .hgsub
124 $ hg add .hgsub
125 $ hg init s
126 $ hg commit -m "generated a subrepo"
127 $ echo a > s/a
128 $ hg -R s add s/a
129 $ hg pull --rebase
130 abort: uncommitted changes in subrepository 's'
131 (cannot pull with rebase: please commit or shelve your changes first)
132 [255]
133
98 134 Invoke pull --rebase and nothing to rebase:
99 135
100 136 $ cd ../c
101 137
102 138 $ hg book norebase
103 139 $ hg pull --rebase
104 140 pulling from $TESTTMP/a (glob)
105 141 searching for changes
106 142 adding changesets
107 143 adding manifests
108 144 adding file changes
109 145 added 1 changesets with 1 changes to 1 files
110 146 nothing to rebase - updating instead
111 147 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
112 148 updating bookmark norebase
113 149
114 150 $ hg tglog -l 1
115 151 @ 2: 'R1'
116 152 |
117 153 ~
118 154
119 155 pull --rebase --update should ignore --update:
120 156
121 157 $ hg pull --rebase --update
122 158 pulling from $TESTTMP/a (glob)
123 159 searching for changes
124 160 no changes found
125 161
126 162 pull --rebase doesn't update if nothing has been pulled:
127 163
128 164 $ hg up -q 1
129 165
130 166 $ hg pull --rebase
131 167 pulling from $TESTTMP/a (glob)
132 168 searching for changes
133 169 no changes found
134 170
135 171 $ hg tglog -l 1
136 172 o 2: 'R1'
137 173 |
138 174 ~
139 175
140 176 $ cd ..
141 177
142 178 pull --rebase works when a specific revision is pulled (issue3619)
143 179
144 180 $ cd a
145 181 $ hg tglog
146 182 @ 2: 'R1'
147 183 |
148 184 o 1: 'C2'
149 185 |
150 186 o 0: 'C1'
151 187
152 188 $ echo R2 > R2
153 189 $ hg ci -Am R2
154 190 adding R2
155 191 $ echo R3 > R3
156 192 $ hg ci -Am R3
157 193 adding R3
158 194 $ cd ../c
159 195 $ hg tglog
160 196 o 2: 'R1'
161 197 |
162 198 @ 1: 'C2'
163 199 |
164 200 o 0: 'C1'
165 201
166 202 $ echo L1 > L1
167 203 $ hg ci -Am L1
168 204 adding L1
169 205 created new head
170 206 $ hg pull --rev tip --rebase
171 207 pulling from $TESTTMP/a (glob)
172 208 searching for changes
173 209 adding changesets
174 210 adding manifests
175 211 adding file changes
176 212 added 2 changesets with 2 changes to 2 files
177 213 rebasing 3:ff8d69a621f9 "L1"
178 214 saved backup bundle to $TESTTMP/c/.hg/strip-backup/ff8d69a621f9-160fa373-backup.hg (glob)
179 215 $ hg tglog
180 216 @ 5: 'L1'
181 217 |
182 218 o 4: 'R3'
183 219 |
184 220 o 3: 'R2'
185 221 |
186 222 o 2: 'R1'
187 223 |
188 224 o 1: 'C2'
189 225 |
190 226 o 0: 'C1'
191 227
192 228 pull --rebase works with bundle2 turned on
193 229
194 230 $ cd ../a
195 231 $ echo R4 > R4
196 232 $ hg ci -Am R4
197 233 adding R4
198 234 $ hg tglog
199 235 @ 5: 'R4'
200 236 |
201 237 o 4: 'R3'
202 238 |
203 239 o 3: 'R2'
204 240 |
205 241 o 2: 'R1'
206 242 |
207 243 o 1: 'C2'
208 244 |
209 245 o 0: 'C1'
210 246
211 247 $ cd ../c
212 248 $ hg pull --rebase
213 249 pulling from $TESTTMP/a (glob)
214 250 searching for changes
215 251 adding changesets
216 252 adding manifests
217 253 adding file changes
218 254 added 1 changesets with 1 changes to 1 files (+1 heads)
219 255 rebasing 5:518d153c0ba3 "L1"
220 256 saved backup bundle to $TESTTMP/c/.hg/strip-backup/518d153c0ba3-73407f14-backup.hg (glob)
221 257 $ hg tglog
222 258 @ 6: 'L1'
223 259 |
224 260 o 5: 'R4'
225 261 |
226 262 o 4: 'R3'
227 263 |
228 264 o 3: 'R2'
229 265 |
230 266 o 2: 'R1'
231 267 |
232 268 o 1: 'C2'
233 269 |
234 270 o 0: 'C1'
235 271
236 272
237 273 pull --rebase only update if there is nothing to rebase
238 274
239 275 $ cd ../a
240 276 $ echo R5 > R5
241 277 $ hg ci -Am R5
242 278 adding R5
243 279 $ hg tglog
244 280 @ 6: 'R5'
245 281 |
246 282 o 5: 'R4'
247 283 |
248 284 o 4: 'R3'
249 285 |
250 286 o 3: 'R2'
251 287 |
252 288 o 2: 'R1'
253 289 |
254 290 o 1: 'C2'
255 291 |
256 292 o 0: 'C1'
257 293
258 294 $ cd ../c
259 295 $ echo L2 > L2
260 296 $ hg ci -Am L2
261 297 adding L2
262 298 $ hg up 'desc(L1)'
263 299 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
264 300 $ hg pull --rebase
265 301 pulling from $TESTTMP/a (glob)
266 302 searching for changes
267 303 adding changesets
268 304 adding manifests
269 305 adding file changes
270 306 added 1 changesets with 1 changes to 1 files (+1 heads)
271 307 rebasing 6:0d0727eb7ce0 "L1"
272 308 rebasing 7:c1f58876e3bf "L2"
273 309 saved backup bundle to $TESTTMP/c/.hg/strip-backup/0d0727eb7ce0-ef61ccb2-backup.hg (glob)
274 310 $ hg tglog
275 311 o 8: 'L2'
276 312 |
277 313 @ 7: 'L1'
278 314 |
279 315 o 6: 'R5'
280 316 |
281 317 o 5: 'R4'
282 318 |
283 319 o 4: 'R3'
284 320 |
285 321 o 3: 'R2'
286 322 |
287 323 o 2: 'R1'
288 324 |
289 325 o 1: 'C2'
290 326 |
291 327 o 0: 'C1'
292 328
293 329
294 330 pull --rebase update (no rebase) use proper update:
295 331
296 332 - warn about other head.
297 333
298 334 $ cd ../a
299 335 $ echo R6 > R6
300 336 $ hg ci -Am R6
301 337 adding R6
302 338 $ cd ../c
303 339 $ hg up 'desc(R5)'
304 340 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
305 341 $ hg pull --rebase
306 342 pulling from $TESTTMP/a (glob)
307 343 searching for changes
308 344 adding changesets
309 345 adding manifests
310 346 adding file changes
311 347 added 1 changesets with 1 changes to 1 files (+1 heads)
312 348 nothing to rebase - updating instead
313 349 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
314 350 1 other heads for branch "default"
315 351 $ hg tglog
316 352 @ 9: 'R6'
317 353 |
318 354 | o 8: 'L2'
319 355 | |
320 356 | o 7: 'L1'
321 357 |/
322 358 o 6: 'R5'
323 359 |
324 360 o 5: 'R4'
325 361 |
326 362 o 4: 'R3'
327 363 |
328 364 o 3: 'R2'
329 365 |
330 366 o 2: 'R1'
331 367 |
332 368 o 1: 'C2'
333 369 |
334 370 o 0: 'C1'
335 371
336 372
337 373 Multiple pre-existing heads on the branch
338 374 -----------------------------------------
339 375
340 376 Pull bring content, but nothing on the current branch, we should not consider
341 377 pre-existing heads.
342 378
343 379 $ cd ../a
344 380 $ hg branch unrelatedbranch
345 381 marked working directory as branch unrelatedbranch
346 382 (branches are permanent and global, did you want a bookmark?)
347 383 $ echo B1 > B1
348 384 $ hg commit -Am B1
349 385 adding B1
350 386 $ cd ../c
351 387 $ hg up 'desc(L2)'
352 388 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
353 389 $ hg pull --rebase
354 390 pulling from $TESTTMP/a (glob)
355 391 searching for changes
356 392 adding changesets
357 393 adding manifests
358 394 adding file changes
359 395 added 1 changesets with 1 changes to 1 files
360 396 nothing to rebase
361 397
362 398 There is two local heads and we pull a third one.
363 399 The second local head should not confuse the `hg pull rebase`.
364 400
365 401 $ hg up 'desc(R6)'
366 402 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
367 403 $ echo M1 > M1
368 404 $ hg commit -Am M1
369 405 adding M1
370 406 $ cd ../a
371 407 $ hg up 'desc(R6)'
372 408 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
373 409 $ echo R7 > R7
374 410 $ hg commit -Am R7
375 411 adding R7
376 412 $ cd ../c
377 413 $ hg up 'desc(L2)'
378 414 2 files updated, 0 files merged, 2 files removed, 0 files unresolved
379 415 $ hg pull --rebase
380 416 pulling from $TESTTMP/a (glob)
381 417 searching for changes
382 418 adding changesets
383 419 adding manifests
384 420 adding file changes
385 421 added 1 changesets with 1 changes to 1 files (+1 heads)
386 422 rebasing 7:864e0a2d2614 "L1"
387 423 rebasing 8:6dc0ea5dcf55 "L2"
388 424 saved backup bundle to $TESTTMP/c/.hg/strip-backup/864e0a2d2614-2f72c89c-backup.hg (glob)
389 425 $ hg tglog
390 426 @ 12: 'L2'
391 427 |
392 428 o 11: 'L1'
393 429 |
394 430 o 10: 'R7'
395 431 |
396 432 | o 9: 'M1'
397 433 |/
398 434 | o 8: 'B1' unrelatedbranch
399 435 |/
400 436 o 7: 'R6'
401 437 |
402 438 o 6: 'R5'
403 439 |
404 440 o 5: 'R4'
405 441 |
406 442 o 4: 'R3'
407 443 |
408 444 o 3: 'R2'
409 445 |
410 446 o 2: 'R1'
411 447 |
412 448 o 1: 'C2'
413 449 |
414 450 o 0: 'C1'
415 451
General Comments 0
You need to be logged in to leave comments. Login now