##// END OF EJS Templates
checkunresolved: move to new package to help avoid import cycles...
Augie Fackler -
r30494:c1149533 default
parent child Browse files
Show More
@@ -34,6 +34,7 b' from . import ('
34 graphmod,
34 graphmod,
35 lock as lockmod,
35 lock as lockmod,
36 match as matchmod,
36 match as matchmod,
37 mergeutil,
37 obsolete,
38 obsolete,
38 patch,
39 patch,
39 pathutil,
40 pathutil,
@@ -3407,13 +3408,7 b' def command(table):'
3407
3408
3408 return cmd
3409 return cmd
3409
3410
3410 def checkunresolved(ms):
3411 checkunresolved = mergeutil.checkunresolved
3411 if list(ms.unresolved()):
3412 raise error.Abort(_("unresolved merge conflicts "
3413 "(see 'hg help resolve')"))
3414 if ms.mdstate() != 's' or list(ms.driverresolved()):
3415 raise error.Abort(_('driver-resolved merge conflicts'),
3416 hint=_('run "hg resolve --all" to resolve'))
3417
3412
3418 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3413 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3419 # commands.outgoing. "missing" is "missing" of the result of
3414 # commands.outgoing. "missing" is "missing" of the result of
This diff has been collapsed as it changes many lines, (3510 lines changed) Show them Hide them
@@ -1,4 +1,4 b''
1 # cmdutil.py - help for command processing in mercurial
1 # mergeutil.py - help for merge processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
@@ -7,3405 +7,11 b''
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
11 import os
12 import re
13 import sys
14 import tempfile
15
16 from .i18n import _
10 from .i18n import _
17 from .node import (
18 bin,
19 hex,
20 nullid,
21 nullrev,
22 short,
23 )
24
11
25 from . import (
12 from . import (
26 bookmarks,
27 changelog,
28 copies,
29 crecord as crecordmod,
30 dirstateguard as dirstateguardmod,
31 encoding,
32 error,
13 error,
33 formatter,
34 graphmod,
35 lock as lockmod,
36 match as matchmod,
37 obsolete,
38 patch,
39 pathutil,
40 phases,
41 repair,
42 revlog,
43 revset,
44 scmutil,
45 templatekw,
46 templater,
47 util,
48 )
14 )
49 stringio = util.stringio
50
51 def ishunk(x):
52 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
53 return isinstance(x, hunkclasses)
54
55 def newandmodified(chunks, originalchunks):
56 newlyaddedandmodifiedfiles = set()
57 for chunk in chunks:
58 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
59 originalchunks:
60 newlyaddedandmodifiedfiles.add(chunk.header.filename())
61 return newlyaddedandmodifiedfiles
62
63 def parsealiases(cmd):
64 return cmd.lstrip("^").split("|")
65
66 def setupwrapcolorwrite(ui):
67 # wrap ui.write so diff output can be labeled/colorized
68 def wrapwrite(orig, *args, **kw):
69 label = kw.pop('label', '')
70 for chunk, l in patch.difflabel(lambda: args):
71 orig(chunk, label=label + l)
72
73 oldwrite = ui.write
74 def wrap(*args, **kwargs):
75 return wrapwrite(oldwrite, *args, **kwargs)
76 setattr(ui, 'write', wrap)
77 return oldwrite
78
79 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
80 if usecurses:
81 if testfile:
82 recordfn = crecordmod.testdecorator(testfile,
83 crecordmod.testchunkselector)
84 else:
85 recordfn = crecordmod.chunkselector
86
87 return crecordmod.filterpatch(ui, originalhunks, recordfn)
88
89 else:
90 return patch.filterpatch(ui, originalhunks, operation)
91
92 def recordfilter(ui, originalhunks, operation=None):
93 """ Prompts the user to filter the originalhunks and return a list of
94 selected hunks.
95 *operation* is used for to build ui messages to indicate the user what
96 kind of filtering they are doing: reverting, committing, shelving, etc.
97 (see patch.filterpatch).
98 """
99 usecurses = crecordmod.checkcurses(ui)
100 testfile = ui.config('experimental', 'crecordtest', None)
101 oldwrite = setupwrapcolorwrite(ui)
102 try:
103 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
104 testfile, operation)
105 finally:
106 ui.write = oldwrite
107 return newchunks, newopts
108
109 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
110 filterfn, *pats, **opts):
111 from . import merge as mergemod
112 if not ui.interactive():
113 if cmdsuggest:
114 msg = _('running non-interactively, use %s instead') % cmdsuggest
115 else:
116 msg = _('running non-interactively')
117 raise error.Abort(msg)
118
119 # make sure username is set before going interactive
120 if not opts.get('user'):
121 ui.username() # raise exception, username not provided
122
123 def recordfunc(ui, repo, message, match, opts):
124 """This is generic record driver.
125
126 Its job is to interactively filter local changes, and
127 accordingly prepare working directory into a state in which the
128 job can be delegated to a non-interactive commit command such as
129 'commit' or 'qrefresh'.
130
131 After the actual job is done by non-interactive command, the
132 working directory is restored to its original state.
133
134 In the end we'll record interesting changes, and everything else
135 will be left in place, so the user can continue working.
136 """
137
138 checkunfinished(repo, commit=True)
139 wctx = repo[None]
140 merge = len(wctx.parents()) > 1
141 if merge:
142 raise error.Abort(_('cannot partially commit a merge '
143 '(use "hg commit" instead)'))
144
145 def fail(f, msg):
146 raise error.Abort('%s: %s' % (f, msg))
147
148 force = opts.get('force')
149 if not force:
150 vdirs = []
151 match.explicitdir = vdirs.append
152 match.bad = fail
153
154 status = repo.status(match=match)
155 if not force:
156 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
157 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
158 diffopts.nodates = True
159 diffopts.git = True
160 diffopts.showfunc = True
161 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
162 originalchunks = patch.parsepatch(originaldiff)
163
164 # 1. filter patch, since we are intending to apply subset of it
165 try:
166 chunks, newopts = filterfn(ui, originalchunks)
167 except patch.PatchError as err:
168 raise error.Abort(_('error parsing patch: %s') % err)
169 opts.update(newopts)
170
171 # We need to keep a backup of files that have been newly added and
172 # modified during the recording process because there is a previous
173 # version without the edit in the workdir
174 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
175 contenders = set()
176 for h in chunks:
177 try:
178 contenders.update(set(h.files()))
179 except AttributeError:
180 pass
181
182 changed = status.modified + status.added + status.removed
183 newfiles = [f for f in changed if f in contenders]
184 if not newfiles:
185 ui.status(_('no changes to record\n'))
186 return 0
187
188 modified = set(status.modified)
189
190 # 2. backup changed files, so we can restore them in the end
191
192 if backupall:
193 tobackup = changed
194 else:
195 tobackup = [f for f in newfiles if f in modified or f in \
196 newlyaddedandmodifiedfiles]
197 backups = {}
198 if tobackup:
199 backupdir = repo.join('record-backups')
200 try:
201 os.mkdir(backupdir)
202 except OSError as err:
203 if err.errno != errno.EEXIST:
204 raise
205 try:
206 # backup continues
207 for f in tobackup:
208 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
209 dir=backupdir)
210 os.close(fd)
211 ui.debug('backup %r as %r\n' % (f, tmpname))
212 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
213 backups[f] = tmpname
214
215 fp = stringio()
216 for c in chunks:
217 fname = c.filename()
218 if fname in backups:
219 c.write(fp)
220 dopatch = fp.tell()
221 fp.seek(0)
222
223 # 2.5 optionally review / modify patch in text editor
224 if opts.get('review', False):
225 patchtext = (crecordmod.diffhelptext
226 + crecordmod.patchhelptext
227 + fp.read())
228 reviewedpatch = ui.edit(patchtext, "",
229 extra={"suffix": ".diff"})
230 fp.truncate(0)
231 fp.write(reviewedpatch)
232 fp.seek(0)
233
234 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
235 # 3a. apply filtered patch to clean repo (clean)
236 if backups:
237 # Equivalent to hg.revert
238 m = scmutil.matchfiles(repo, backups.keys())
239 mergemod.update(repo, repo.dirstate.p1(),
240 False, True, matcher=m)
241
242 # 3b. (apply)
243 if dopatch:
244 try:
245 ui.debug('applying patch\n')
246 ui.debug(fp.getvalue())
247 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
248 except patch.PatchError as err:
249 raise error.Abort(str(err))
250 del fp
251
252 # 4. We prepared working directory according to filtered
253 # patch. Now is the time to delegate the job to
254 # commit/qrefresh or the like!
255
256 # Make all of the pathnames absolute.
257 newfiles = [repo.wjoin(nf) for nf in newfiles]
258 return commitfunc(ui, repo, *newfiles, **opts)
259 finally:
260 # 5. finally restore backed-up files
261 try:
262 dirstate = repo.dirstate
263 for realname, tmpname in backups.iteritems():
264 ui.debug('restoring %r to %r\n' % (tmpname, realname))
265
266 if dirstate[realname] == 'n':
267 # without normallookup, restoring timestamp
268 # may cause partially committed files
269 # to be treated as unmodified
270 dirstate.normallookup(realname)
271
272 # copystat=True here and above are a hack to trick any
273 # editors that have f open that we haven't modified them.
274 #
275 # Also note that this racy as an editor could notice the
276 # file's mtime before we've finished writing it.
277 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
278 os.unlink(tmpname)
279 if tobackup:
280 os.rmdir(backupdir)
281 except OSError:
282 pass
283
284 def recordinwlock(ui, repo, message, match, opts):
285 with repo.wlock():
286 return recordfunc(ui, repo, message, match, opts)
287
288 return commit(ui, repo, recordinwlock, pats, opts)
289
290 def findpossible(cmd, table, strict=False):
291 """
292 Return cmd -> (aliases, command table entry)
293 for each matching command.
294 Return debug commands (or their aliases) only if no normal command matches.
295 """
296 choice = {}
297 debugchoice = {}
298
299 if cmd in table:
300 # short-circuit exact matches, "log" alias beats "^log|history"
301 keys = [cmd]
302 else:
303 keys = table.keys()
304
305 allcmds = []
306 for e in keys:
307 aliases = parsealiases(e)
308 allcmds.extend(aliases)
309 found = None
310 if cmd in aliases:
311 found = cmd
312 elif not strict:
313 for a in aliases:
314 if a.startswith(cmd):
315 found = a
316 break
317 if found is not None:
318 if aliases[0].startswith("debug") or found.startswith("debug"):
319 debugchoice[found] = (aliases, table[e])
320 else:
321 choice[found] = (aliases, table[e])
322
323 if not choice and debugchoice:
324 choice = debugchoice
325
326 return choice, allcmds
327
328 def findcmd(cmd, table, strict=True):
329 """Return (aliases, command table entry) for command string."""
330 choice, allcmds = findpossible(cmd, table, strict)
331
332 if cmd in choice:
333 return choice[cmd]
334
335 if len(choice) > 1:
336 clist = choice.keys()
337 clist.sort()
338 raise error.AmbiguousCommand(cmd, clist)
339
340 if choice:
341 return choice.values()[0]
342
343 raise error.UnknownCommand(cmd, allcmds)
344
345 def findrepo(p):
346 while not os.path.isdir(os.path.join(p, ".hg")):
347 oldp, p = p, os.path.dirname(p)
348 if p == oldp:
349 return None
350
351 return p
352
353 def bailifchanged(repo, merge=True):
354 if merge and repo.dirstate.p2() != nullid:
355 raise error.Abort(_('outstanding uncommitted merge'))
356 modified, added, removed, deleted = repo.status()[:4]
357 if modified or added or removed or deleted:
358 raise error.Abort(_('uncommitted changes'))
359 ctx = repo[None]
360 for s in sorted(ctx.substate):
361 ctx.sub(s).bailifchanged()
362
363 def logmessage(ui, opts):
364 """ get the log message according to -m and -l option """
365 message = opts.get('message')
366 logfile = opts.get('logfile')
367
368 if message and logfile:
369 raise error.Abort(_('options --message and --logfile are mutually '
370 'exclusive'))
371 if not message and logfile:
372 try:
373 if logfile == '-':
374 message = ui.fin.read()
375 else:
376 message = '\n'.join(util.readfile(logfile).splitlines())
377 except IOError as inst:
378 raise error.Abort(_("can't read commit message '%s': %s") %
379 (logfile, inst.strerror))
380 return message
381
382 def mergeeditform(ctxorbool, baseformname):
383 """return appropriate editform name (referencing a committemplate)
384
385 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
386 merging is committed.
387
388 This returns baseformname with '.merge' appended if it is a merge,
389 otherwise '.normal' is appended.
390 """
391 if isinstance(ctxorbool, bool):
392 if ctxorbool:
393 return baseformname + ".merge"
394 elif 1 < len(ctxorbool.parents()):
395 return baseformname + ".merge"
396
397 return baseformname + ".normal"
398
399 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
400 editform='', **opts):
401 """get appropriate commit message editor according to '--edit' option
402
403 'finishdesc' is a function to be called with edited commit message
404 (= 'description' of the new changeset) just after editing, but
405 before checking empty-ness. It should return actual text to be
406 stored into history. This allows to change description before
407 storing.
408
409 'extramsg' is a extra message to be shown in the editor instead of
410 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
411 is automatically added.
412
413 'editform' is a dot-separated list of names, to distinguish
414 the purpose of commit text editing.
415
416 'getcommiteditor' returns 'commitforceeditor' regardless of
417 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
418 they are specific for usage in MQ.
419 """
420 if edit or finishdesc or extramsg:
421 return lambda r, c, s: commitforceeditor(r, c, s,
422 finishdesc=finishdesc,
423 extramsg=extramsg,
424 editform=editform)
425 elif editform:
426 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
427 else:
428 return commiteditor
429
430 def loglimit(opts):
431 """get the log limit according to option -l/--limit"""
432 limit = opts.get('limit')
433 if limit:
434 try:
435 limit = int(limit)
436 except ValueError:
437 raise error.Abort(_('limit must be a positive integer'))
438 if limit <= 0:
439 raise error.Abort(_('limit must be positive'))
440 else:
441 limit = None
442 return limit
443
444 def makefilename(repo, pat, node, desc=None,
445 total=None, seqno=None, revwidth=None, pathname=None):
446 node_expander = {
447 'H': lambda: hex(node),
448 'R': lambda: str(repo.changelog.rev(node)),
449 'h': lambda: short(node),
450 'm': lambda: re.sub('[^\w]', '_', str(desc))
451 }
452 expander = {
453 '%': lambda: '%',
454 'b': lambda: os.path.basename(repo.root),
455 }
456
457 try:
458 if node:
459 expander.update(node_expander)
460 if node:
461 expander['r'] = (lambda:
462 str(repo.changelog.rev(node)).zfill(revwidth or 0))
463 if total is not None:
464 expander['N'] = lambda: str(total)
465 if seqno is not None:
466 expander['n'] = lambda: str(seqno)
467 if total is not None and seqno is not None:
468 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
469 if pathname is not None:
470 expander['s'] = lambda: os.path.basename(pathname)
471 expander['d'] = lambda: os.path.dirname(pathname) or '.'
472 expander['p'] = lambda: pathname
473
474 newname = []
475 patlen = len(pat)
476 i = 0
477 while i < patlen:
478 c = pat[i]
479 if c == '%':
480 i += 1
481 c = pat[i]
482 c = expander[c]()
483 newname.append(c)
484 i += 1
485 return ''.join(newname)
486 except KeyError as inst:
487 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
488 inst.args[0])
489
490 class _unclosablefile(object):
491 def __init__(self, fp):
492 self._fp = fp
493
494 def close(self):
495 pass
496
497 def __iter__(self):
498 return iter(self._fp)
499
500 def __getattr__(self, attr):
501 return getattr(self._fp, attr)
502
503 def __enter__(self):
504 return self
505
506 def __exit__(self, exc_type, exc_value, exc_tb):
507 pass
508
509 def makefileobj(repo, pat, node=None, desc=None, total=None,
510 seqno=None, revwidth=None, mode='wb', modemap=None,
511 pathname=None):
512
513 writable = mode not in ('r', 'rb')
514
515 if not pat or pat == '-':
516 if writable:
517 fp = repo.ui.fout
518 else:
519 fp = repo.ui.fin
520 return _unclosablefile(fp)
521 if util.safehasattr(pat, 'write') and writable:
522 return pat
523 if util.safehasattr(pat, 'read') and 'r' in mode:
524 return pat
525 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
526 if modemap is not None:
527 mode = modemap.get(fn, mode)
528 if mode == 'wb':
529 modemap[fn] = 'ab'
530 return open(fn, mode)
531
532 def openrevlog(repo, cmd, file_, opts):
533 """opens the changelog, manifest, a filelog or a given revlog"""
534 cl = opts['changelog']
535 mf = opts['manifest']
536 dir = opts['dir']
537 msg = None
538 if cl and mf:
539 msg = _('cannot specify --changelog and --manifest at the same time')
540 elif cl and dir:
541 msg = _('cannot specify --changelog and --dir at the same time')
542 elif cl or mf or dir:
543 if file_:
544 msg = _('cannot specify filename with --changelog or --manifest')
545 elif not repo:
546 msg = _('cannot specify --changelog or --manifest or --dir '
547 'without a repository')
548 if msg:
549 raise error.Abort(msg)
550
551 r = None
552 if repo:
553 if cl:
554 r = repo.unfiltered().changelog
555 elif dir:
556 if 'treemanifest' not in repo.requirements:
557 raise error.Abort(_("--dir can only be used on repos with "
558 "treemanifest enabled"))
559 dirlog = repo.manifestlog._revlog.dirlog(dir)
560 if len(dirlog):
561 r = dirlog
562 elif mf:
563 r = repo.manifestlog._revlog
564 elif file_:
565 filelog = repo.file(file_)
566 if len(filelog):
567 r = filelog
568 if not r:
569 if not file_:
570 raise error.CommandError(cmd, _('invalid arguments'))
571 if not os.path.isfile(file_):
572 raise error.Abort(_("revlog '%s' not found") % file_)
573 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
574 file_[:-2] + ".i")
575 return r
576
577 def copy(ui, repo, pats, opts, rename=False):
578 # called with the repo lock held
579 #
580 # hgsep => pathname that uses "/" to separate directories
581 # ossep => pathname that uses os.sep to separate directories
582 cwd = repo.getcwd()
583 targets = {}
584 after = opts.get("after")
585 dryrun = opts.get("dry_run")
586 wctx = repo[None]
587
588 def walkpat(pat):
589 srcs = []
590 if after:
591 badstates = '?'
592 else:
593 badstates = '?r'
594 m = scmutil.match(repo[None], [pat], opts, globbed=True)
595 for abs in repo.walk(m):
596 state = repo.dirstate[abs]
597 rel = m.rel(abs)
598 exact = m.exact(abs)
599 if state in badstates:
600 if exact and state == '?':
601 ui.warn(_('%s: not copying - file is not managed\n') % rel)
602 if exact and state == 'r':
603 ui.warn(_('%s: not copying - file has been marked for'
604 ' remove\n') % rel)
605 continue
606 # abs: hgsep
607 # rel: ossep
608 srcs.append((abs, rel, exact))
609 return srcs
610
611 # abssrc: hgsep
612 # relsrc: ossep
613 # otarget: ossep
614 def copyfile(abssrc, relsrc, otarget, exact):
615 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
616 if '/' in abstarget:
617 # We cannot normalize abstarget itself, this would prevent
618 # case only renames, like a => A.
619 abspath, absname = abstarget.rsplit('/', 1)
620 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
621 reltarget = repo.pathto(abstarget, cwd)
622 target = repo.wjoin(abstarget)
623 src = repo.wjoin(abssrc)
624 state = repo.dirstate[abstarget]
625
626 scmutil.checkportable(ui, abstarget)
627
628 # check for collisions
629 prevsrc = targets.get(abstarget)
630 if prevsrc is not None:
631 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
632 (reltarget, repo.pathto(abssrc, cwd),
633 repo.pathto(prevsrc, cwd)))
634 return
635
636 # check for overwrites
637 exists = os.path.lexists(target)
638 samefile = False
639 if exists and abssrc != abstarget:
640 if (repo.dirstate.normalize(abssrc) ==
641 repo.dirstate.normalize(abstarget)):
642 if not rename:
643 ui.warn(_("%s: can't copy - same file\n") % reltarget)
644 return
645 exists = False
646 samefile = True
647
648 if not after and exists or after and state in 'mn':
649 if not opts['force']:
650 if state in 'mn':
651 msg = _('%s: not overwriting - file already committed\n')
652 if after:
653 flags = '--after --force'
654 else:
655 flags = '--force'
656 if rename:
657 hint = _('(hg rename %s to replace the file by '
658 'recording a rename)\n') % flags
659 else:
660 hint = _('(hg copy %s to replace the file by '
661 'recording a copy)\n') % flags
662 else:
663 msg = _('%s: not overwriting - file exists\n')
664 if rename:
665 hint = _('(hg rename --after to record the rename)\n')
666 else:
667 hint = _('(hg copy --after to record the copy)\n')
668 ui.warn(msg % reltarget)
669 ui.warn(hint)
670 return
671
672 if after:
673 if not exists:
674 if rename:
675 ui.warn(_('%s: not recording move - %s does not exist\n') %
676 (relsrc, reltarget))
677 else:
678 ui.warn(_('%s: not recording copy - %s does not exist\n') %
679 (relsrc, reltarget))
680 return
681 elif not dryrun:
682 try:
683 if exists:
684 os.unlink(target)
685 targetdir = os.path.dirname(target) or '.'
686 if not os.path.isdir(targetdir):
687 os.makedirs(targetdir)
688 if samefile:
689 tmp = target + "~hgrename"
690 os.rename(src, tmp)
691 os.rename(tmp, target)
692 else:
693 util.copyfile(src, target)
694 srcexists = True
695 except IOError as inst:
696 if inst.errno == errno.ENOENT:
697 ui.warn(_('%s: deleted in working directory\n') % relsrc)
698 srcexists = False
699 else:
700 ui.warn(_('%s: cannot copy - %s\n') %
701 (relsrc, inst.strerror))
702 return True # report a failure
703
704 if ui.verbose or not exact:
705 if rename:
706 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
707 else:
708 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
709
710 targets[abstarget] = abssrc
711
712 # fix up dirstate
713 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
714 dryrun=dryrun, cwd=cwd)
715 if rename and not dryrun:
716 if not after and srcexists and not samefile:
717 util.unlinkpath(repo.wjoin(abssrc))
718 wctx.forget([abssrc])
719
720 # pat: ossep
721 # dest ossep
722 # srcs: list of (hgsep, hgsep, ossep, bool)
723 # return: function that takes hgsep and returns ossep
724 def targetpathfn(pat, dest, srcs):
725 if os.path.isdir(pat):
726 abspfx = pathutil.canonpath(repo.root, cwd, pat)
727 abspfx = util.localpath(abspfx)
728 if destdirexists:
729 striplen = len(os.path.split(abspfx)[0])
730 else:
731 striplen = len(abspfx)
732 if striplen:
733 striplen += len(os.sep)
734 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
735 elif destdirexists:
736 res = lambda p: os.path.join(dest,
737 os.path.basename(util.localpath(p)))
738 else:
739 res = lambda p: dest
740 return res
741
742 # pat: ossep
743 # dest ossep
744 # srcs: list of (hgsep, hgsep, ossep, bool)
745 # return: function that takes hgsep and returns ossep
746 def targetpathafterfn(pat, dest, srcs):
747 if matchmod.patkind(pat):
748 # a mercurial pattern
749 res = lambda p: os.path.join(dest,
750 os.path.basename(util.localpath(p)))
751 else:
752 abspfx = pathutil.canonpath(repo.root, cwd, pat)
753 if len(abspfx) < len(srcs[0][0]):
754 # A directory. Either the target path contains the last
755 # component of the source path or it does not.
756 def evalpath(striplen):
757 score = 0
758 for s in srcs:
759 t = os.path.join(dest, util.localpath(s[0])[striplen:])
760 if os.path.lexists(t):
761 score += 1
762 return score
763
764 abspfx = util.localpath(abspfx)
765 striplen = len(abspfx)
766 if striplen:
767 striplen += len(os.sep)
768 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
769 score = evalpath(striplen)
770 striplen1 = len(os.path.split(abspfx)[0])
771 if striplen1:
772 striplen1 += len(os.sep)
773 if evalpath(striplen1) > score:
774 striplen = striplen1
775 res = lambda p: os.path.join(dest,
776 util.localpath(p)[striplen:])
777 else:
778 # a file
779 if destdirexists:
780 res = lambda p: os.path.join(dest,
781 os.path.basename(util.localpath(p)))
782 else:
783 res = lambda p: dest
784 return res
785
786 pats = scmutil.expandpats(pats)
787 if not pats:
788 raise error.Abort(_('no source or destination specified'))
789 if len(pats) == 1:
790 raise error.Abort(_('no destination specified'))
791 dest = pats.pop()
792 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
793 if not destdirexists:
794 if len(pats) > 1 or matchmod.patkind(pats[0]):
795 raise error.Abort(_('with multiple sources, destination must be an '
796 'existing directory'))
797 if util.endswithsep(dest):
798 raise error.Abort(_('destination %s is not a directory') % dest)
799
800 tfn = targetpathfn
801 if after:
802 tfn = targetpathafterfn
803 copylist = []
804 for pat in pats:
805 srcs = walkpat(pat)
806 if not srcs:
807 continue
808 copylist.append((tfn(pat, dest, srcs), srcs))
809 if not copylist:
810 raise error.Abort(_('no files to copy'))
811
812 errors = 0
813 for targetpath, srcs in copylist:
814 for abssrc, relsrc, exact in srcs:
815 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
816 errors += 1
817
818 if errors:
819 ui.warn(_('(consider using --after)\n'))
820
821 return errors != 0
822
823 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
824 runargs=None, appendpid=False):
825 '''Run a command as a service.'''
826
827 def writepid(pid):
828 if opts['pid_file']:
829 if appendpid:
830 mode = 'a'
831 else:
832 mode = 'w'
833 fp = open(opts['pid_file'], mode)
834 fp.write(str(pid) + '\n')
835 fp.close()
836
837 if opts['daemon'] and not opts['daemon_postexec']:
838 # Signal child process startup with file removal
839 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
840 os.close(lockfd)
841 try:
842 if not runargs:
843 runargs = util.hgcmd() + sys.argv[1:]
844 runargs.append('--daemon-postexec=unlink:%s' % lockpath)
845 # Don't pass --cwd to the child process, because we've already
846 # changed directory.
847 for i in xrange(1, len(runargs)):
848 if runargs[i].startswith('--cwd='):
849 del runargs[i]
850 break
851 elif runargs[i].startswith('--cwd'):
852 del runargs[i:i + 2]
853 break
854 def condfn():
855 return not os.path.exists(lockpath)
856 pid = util.rundetached(runargs, condfn)
857 if pid < 0:
858 raise error.Abort(_('child process failed to start'))
859 writepid(pid)
860 finally:
861 try:
862 os.unlink(lockpath)
863 except OSError as e:
864 if e.errno != errno.ENOENT:
865 raise
866 if parentfn:
867 return parentfn(pid)
868 else:
869 return
870
871 if initfn:
872 initfn()
873
874 if not opts['daemon']:
875 writepid(util.getpid())
876
877 if opts['daemon_postexec']:
878 try:
879 os.setsid()
880 except AttributeError:
881 pass
882 for inst in opts['daemon_postexec']:
883 if inst.startswith('unlink:'):
884 lockpath = inst[7:]
885 os.unlink(lockpath)
886 elif inst.startswith('chdir:'):
887 os.chdir(inst[6:])
888 elif inst != 'none':
889 raise error.Abort(_('invalid value for --daemon-postexec: %s')
890 % inst)
891 util.hidewindow()
892 util.stdout.flush()
893 util.stderr.flush()
894
895 nullfd = os.open(os.devnull, os.O_RDWR)
896 logfilefd = nullfd
897 if logfile:
898 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
899 os.dup2(nullfd, 0)
900 os.dup2(logfilefd, 1)
901 os.dup2(logfilefd, 2)
902 if nullfd not in (0, 1, 2):
903 os.close(nullfd)
904 if logfile and logfilefd not in (0, 1, 2):
905 os.close(logfilefd)
906
907 if runfn:
908 return runfn()
909
910 ## facility to let extension process additional data into an import patch
911 # list of identifier to be executed in order
912 extrapreimport = [] # run before commit
913 extrapostimport = [] # run after commit
914 # mapping from identifier to actual import function
915 #
916 # 'preimport' are run before the commit is made and are provided the following
917 # arguments:
918 # - repo: the localrepository instance,
919 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
920 # - extra: the future extra dictionary of the changeset, please mutate it,
921 # - opts: the import options.
922 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
923 # mutation of in memory commit and more. Feel free to rework the code to get
924 # there.
925 extrapreimportmap = {}
926 # 'postimport' are run after the commit is made and are provided the following
927 # argument:
928 # - ctx: the changectx created by import.
929 extrapostimportmap = {}
930
931 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
932 """Utility function used by commands.import to import a single patch
933
934 This function is explicitly defined here to help the evolve extension to
935 wrap this part of the import logic.
936
937 The API is currently a bit ugly because it a simple code translation from
938 the import command. Feel free to make it better.
939
940 :hunk: a patch (as a binary string)
941 :parents: nodes that will be parent of the created commit
942 :opts: the full dict of option passed to the import command
943 :msgs: list to save commit message to.
944 (used in case we need to save it when failing)
945 :updatefunc: a function that update a repo to a given node
946 updatefunc(<repo>, <node>)
947 """
948 # avoid cycle context -> subrepo -> cmdutil
949 from . import context
950 extractdata = patch.extract(ui, hunk)
951 tmpname = extractdata.get('filename')
952 message = extractdata.get('message')
953 user = opts.get('user') or extractdata.get('user')
954 date = opts.get('date') or extractdata.get('date')
955 branch = extractdata.get('branch')
956 nodeid = extractdata.get('nodeid')
957 p1 = extractdata.get('p1')
958 p2 = extractdata.get('p2')
959
960 nocommit = opts.get('no_commit')
961 importbranch = opts.get('import_branch')
962 update = not opts.get('bypass')
963 strip = opts["strip"]
964 prefix = opts["prefix"]
965 sim = float(opts.get('similarity') or 0)
966 if not tmpname:
967 return (None, None, False)
968
969 rejects = False
970
971 try:
972 cmdline_message = logmessage(ui, opts)
973 if cmdline_message:
974 # pickup the cmdline msg
975 message = cmdline_message
976 elif message:
977 # pickup the patch msg
978 message = message.strip()
979 else:
980 # launch the editor
981 message = None
982 ui.debug('message:\n%s\n' % message)
983
984 if len(parents) == 1:
985 parents.append(repo[nullid])
986 if opts.get('exact'):
987 if not nodeid or not p1:
988 raise error.Abort(_('not a Mercurial patch'))
989 p1 = repo[p1]
990 p2 = repo[p2 or nullid]
991 elif p2:
992 try:
993 p1 = repo[p1]
994 p2 = repo[p2]
995 # Without any options, consider p2 only if the
996 # patch is being applied on top of the recorded
997 # first parent.
998 if p1 != parents[0]:
999 p1 = parents[0]
1000 p2 = repo[nullid]
1001 except error.RepoError:
1002 p1, p2 = parents
1003 if p2.node() == nullid:
1004 ui.warn(_("warning: import the patch as a normal revision\n"
1005 "(use --exact to import the patch as a merge)\n"))
1006 else:
1007 p1, p2 = parents
1008
1009 n = None
1010 if update:
1011 if p1 != parents[0]:
1012 updatefunc(repo, p1.node())
1013 if p2 != parents[1]:
1014 repo.setparents(p1.node(), p2.node())
1015
1016 if opts.get('exact') or importbranch:
1017 repo.dirstate.setbranch(branch or 'default')
1018
1019 partial = opts.get('partial', False)
1020 files = set()
1021 try:
1022 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1023 files=files, eolmode=None, similarity=sim / 100.0)
1024 except patch.PatchError as e:
1025 if not partial:
1026 raise error.Abort(str(e))
1027 if partial:
1028 rejects = True
1029
1030 files = list(files)
1031 if nocommit:
1032 if message:
1033 msgs.append(message)
1034 else:
1035 if opts.get('exact') or p2:
1036 # If you got here, you either use --force and know what
1037 # you are doing or used --exact or a merge patch while
1038 # being updated to its first parent.
1039 m = None
1040 else:
1041 m = scmutil.matchfiles(repo, files or [])
1042 editform = mergeeditform(repo[None], 'import.normal')
1043 if opts.get('exact'):
1044 editor = None
1045 else:
1046 editor = getcommiteditor(editform=editform, **opts)
1047 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
1048 extra = {}
1049 for idfunc in extrapreimport:
1050 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1051 try:
1052 if partial:
1053 repo.ui.setconfig('ui', 'allowemptycommit', True)
1054 n = repo.commit(message, user,
1055 date, match=m,
1056 editor=editor, extra=extra)
1057 for idfunc in extrapostimport:
1058 extrapostimportmap[idfunc](repo[n])
1059 finally:
1060 repo.ui.restoreconfig(allowemptyback)
1061 else:
1062 if opts.get('exact') or importbranch:
1063 branch = branch or 'default'
1064 else:
1065 branch = p1.branch()
1066 store = patch.filestore()
1067 try:
1068 files = set()
1069 try:
1070 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1071 files, eolmode=None)
1072 except patch.PatchError as e:
1073 raise error.Abort(str(e))
1074 if opts.get('exact'):
1075 editor = None
1076 else:
1077 editor = getcommiteditor(editform='import.bypass')
1078 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1079 message,
1080 user,
1081 date,
1082 branch, files, store,
1083 editor=editor)
1084 n = memctx.commit()
1085 finally:
1086 store.close()
1087 if opts.get('exact') and nocommit:
1088 # --exact with --no-commit is still useful in that it does merge
1089 # and branch bits
1090 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1091 elif opts.get('exact') and hex(n) != nodeid:
1092 raise error.Abort(_('patch is damaged or loses information'))
1093 msg = _('applied to working directory')
1094 if n:
1095 # i18n: refers to a short changeset id
1096 msg = _('created %s') % short(n)
1097 return (msg, n, rejects)
1098 finally:
1099 os.unlink(tmpname)
1100
1101 # facility to let extensions include additional data in an exported patch
1102 # list of identifiers to be executed in order
1103 extraexport = []
1104 # mapping from identifier to actual export function
1105 # function as to return a string to be added to the header or None
1106 # it is given two arguments (sequencenumber, changectx)
1107 extraexportmap = {}
1108
1109 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1110 opts=None, match=None):
1111 '''export changesets as hg patches.'''
1112
1113 total = len(revs)
1114 revwidth = max([len(str(rev)) for rev in revs])
1115 filemode = {}
1116
1117 def single(rev, seqno, fp):
1118 ctx = repo[rev]
1119 node = ctx.node()
1120 parents = [p.node() for p in ctx.parents() if p]
1121 branch = ctx.branch()
1122 if switch_parent:
1123 parents.reverse()
1124
1125 if parents:
1126 prev = parents[0]
1127 else:
1128 prev = nullid
1129
1130 shouldclose = False
1131 if not fp and len(template) > 0:
1132 desc_lines = ctx.description().rstrip().split('\n')
1133 desc = desc_lines[0] #Commit always has a first line.
1134 fp = makefileobj(repo, template, node, desc=desc, total=total,
1135 seqno=seqno, revwidth=revwidth, mode='wb',
1136 modemap=filemode)
1137 shouldclose = True
1138 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1139 repo.ui.note("%s\n" % fp.name)
1140
1141 if not fp:
1142 write = repo.ui.write
1143 else:
1144 def write(s, **kw):
1145 fp.write(s)
1146
1147 write("# HG changeset patch\n")
1148 write("# User %s\n" % ctx.user())
1149 write("# Date %d %d\n" % ctx.date())
1150 write("# %s\n" % util.datestr(ctx.date()))
1151 if branch and branch != 'default':
1152 write("# Branch %s\n" % branch)
1153 write("# Node ID %s\n" % hex(node))
1154 write("# Parent %s\n" % hex(prev))
1155 if len(parents) > 1:
1156 write("# Parent %s\n" % hex(parents[1]))
1157
1158 for headerid in extraexport:
1159 header = extraexportmap[headerid](seqno, ctx)
1160 if header is not None:
1161 write('# %s\n' % header)
1162 write(ctx.description().rstrip())
1163 write("\n\n")
1164
1165 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1166 write(chunk, label=label)
1167
1168 if shouldclose:
1169 fp.close()
1170
1171 for seqno, rev in enumerate(revs):
1172 single(rev, seqno + 1, fp)
1173
1174 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1175 changes=None, stat=False, fp=None, prefix='',
1176 root='', listsubrepos=False):
1177 '''show diff or diffstat.'''
1178 if fp is None:
1179 write = ui.write
1180 else:
1181 def write(s, **kw):
1182 fp.write(s)
1183
1184 if root:
1185 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1186 else:
1187 relroot = ''
1188 if relroot != '':
1189 # XXX relative roots currently don't work if the root is within a
1190 # subrepo
1191 uirelroot = match.uipath(relroot)
1192 relroot += '/'
1193 for matchroot in match.files():
1194 if not matchroot.startswith(relroot):
1195 ui.warn(_('warning: %s not inside relative root %s\n') % (
1196 match.uipath(matchroot), uirelroot))
1197
1198 if stat:
1199 diffopts = diffopts.copy(context=0)
1200 width = 80
1201 if not ui.plain():
1202 width = ui.termwidth()
1203 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1204 prefix=prefix, relroot=relroot)
1205 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1206 width=width):
1207 write(chunk, label=label)
1208 else:
1209 for chunk, label in patch.diffui(repo, node1, node2, match,
1210 changes, diffopts, prefix=prefix,
1211 relroot=relroot):
1212 write(chunk, label=label)
1213
1214 if listsubrepos:
1215 ctx1 = repo[node1]
1216 ctx2 = repo[node2]
1217 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1218 tempnode2 = node2
1219 try:
1220 if node2 is not None:
1221 tempnode2 = ctx2.substate[subpath][1]
1222 except KeyError:
1223 # A subrepo that existed in node1 was deleted between node1 and
1224 # node2 (inclusive). Thus, ctx2's substate won't contain that
1225 # subpath. The best we can do is to ignore it.
1226 tempnode2 = None
1227 submatch = matchmod.subdirmatcher(subpath, match)
1228 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1229 stat=stat, fp=fp, prefix=prefix)
1230
1231 class changeset_printer(object):
1232 '''show changeset information when templating not requested.'''
1233
1234 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1235 self.ui = ui
1236 self.repo = repo
1237 self.buffered = buffered
1238 self.matchfn = matchfn
1239 self.diffopts = diffopts
1240 self.header = {}
1241 self.hunk = {}
1242 self.lastheader = None
1243 self.footer = None
1244
1245 def flush(self, ctx):
1246 rev = ctx.rev()
1247 if rev in self.header:
1248 h = self.header[rev]
1249 if h != self.lastheader:
1250 self.lastheader = h
1251 self.ui.write(h)
1252 del self.header[rev]
1253 if rev in self.hunk:
1254 self.ui.write(self.hunk[rev])
1255 del self.hunk[rev]
1256 return 1
1257 return 0
1258
1259 def close(self):
1260 if self.footer:
1261 self.ui.write(self.footer)
1262
1263 def show(self, ctx, copies=None, matchfn=None, **props):
1264 if self.buffered:
1265 self.ui.pushbuffer(labeled=True)
1266 self._show(ctx, copies, matchfn, props)
1267 self.hunk[ctx.rev()] = self.ui.popbuffer()
1268 else:
1269 self._show(ctx, copies, matchfn, props)
1270
1271 def _show(self, ctx, copies, matchfn, props):
1272 '''show a single changeset or file revision'''
1273 changenode = ctx.node()
1274 rev = ctx.rev()
1275 if self.ui.debugflag:
1276 hexfunc = hex
1277 else:
1278 hexfunc = short
1279 # as of now, wctx.node() and wctx.rev() return None, but we want to
1280 # show the same values as {node} and {rev} templatekw
1281 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1282
1283 if self.ui.quiet:
1284 self.ui.write("%d:%s\n" % revnode, label='log.node')
1285 return
1286
1287 date = util.datestr(ctx.date())
1288
1289 # i18n: column positioning for "hg log"
1290 self.ui.write(_("changeset: %d:%s\n") % revnode,
1291 label='log.changeset changeset.%s' % ctx.phasestr())
1292
1293 # branches are shown first before any other names due to backwards
1294 # compatibility
1295 branch = ctx.branch()
1296 # don't show the default branch name
1297 if branch != 'default':
1298 # i18n: column positioning for "hg log"
1299 self.ui.write(_("branch: %s\n") % branch,
1300 label='log.branch')
1301
1302 for nsname, ns in self.repo.names.iteritems():
1303 # branches has special logic already handled above, so here we just
1304 # skip it
1305 if nsname == 'branches':
1306 continue
1307 # we will use the templatename as the color name since those two
1308 # should be the same
1309 for name in ns.names(self.repo, changenode):
1310 self.ui.write(ns.logfmt % name,
1311 label='log.%s' % ns.colorname)
1312 if self.ui.debugflag:
1313 # i18n: column positioning for "hg log"
1314 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1315 label='log.phase')
1316 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1317 label = 'log.parent changeset.%s' % pctx.phasestr()
1318 # i18n: column positioning for "hg log"
1319 self.ui.write(_("parent: %d:%s\n")
1320 % (pctx.rev(), hexfunc(pctx.node())),
1321 label=label)
1322
1323 if self.ui.debugflag and rev is not None:
1324 mnode = ctx.manifestnode()
1325 # i18n: column positioning for "hg log"
1326 self.ui.write(_("manifest: %d:%s\n") %
1327 (self.repo.manifestlog._revlog.rev(mnode),
1328 hex(mnode)),
1329 label='ui.debug log.manifest')
1330 # i18n: column positioning for "hg log"
1331 self.ui.write(_("user: %s\n") % ctx.user(),
1332 label='log.user')
1333 # i18n: column positioning for "hg log"
1334 self.ui.write(_("date: %s\n") % date,
1335 label='log.date')
1336
1337 if self.ui.debugflag:
1338 files = ctx.p1().status(ctx)[:3]
1339 for key, value in zip([# i18n: column positioning for "hg log"
1340 _("files:"),
1341 # i18n: column positioning for "hg log"
1342 _("files+:"),
1343 # i18n: column positioning for "hg log"
1344 _("files-:")], files):
1345 if value:
1346 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1347 label='ui.debug log.files')
1348 elif ctx.files() and self.ui.verbose:
1349 # i18n: column positioning for "hg log"
1350 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1351 label='ui.note log.files')
1352 if copies and self.ui.verbose:
1353 copies = ['%s (%s)' % c for c in copies]
1354 # i18n: column positioning for "hg log"
1355 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1356 label='ui.note log.copies')
1357
1358 extra = ctx.extra()
1359 if extra and self.ui.debugflag:
1360 for key, value in sorted(extra.items()):
1361 # i18n: column positioning for "hg log"
1362 self.ui.write(_("extra: %s=%s\n")
1363 % (key, value.encode('string_escape')),
1364 label='ui.debug log.extra')
1365
1366 description = ctx.description().strip()
1367 if description:
1368 if self.ui.verbose:
1369 self.ui.write(_("description:\n"),
1370 label='ui.note log.description')
1371 self.ui.write(description,
1372 label='ui.note log.description')
1373 self.ui.write("\n\n")
1374 else:
1375 # i18n: column positioning for "hg log"
1376 self.ui.write(_("summary: %s\n") %
1377 description.splitlines()[0],
1378 label='log.summary')
1379 self.ui.write("\n")
1380
1381 self.showpatch(ctx, matchfn)
1382
1383 def showpatch(self, ctx, matchfn):
1384 if not matchfn:
1385 matchfn = self.matchfn
1386 if matchfn:
1387 stat = self.diffopts.get('stat')
1388 diff = self.diffopts.get('patch')
1389 diffopts = patch.diffallopts(self.ui, self.diffopts)
1390 node = ctx.node()
1391 prev = ctx.p1().node()
1392 if stat:
1393 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1394 match=matchfn, stat=True)
1395 if diff:
1396 if stat:
1397 self.ui.write("\n")
1398 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1399 match=matchfn, stat=False)
1400 self.ui.write("\n")
1401
1402 class jsonchangeset(changeset_printer):
1403 '''format changeset information.'''
1404
1405 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1406 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1407 self.cache = {}
1408 self._first = True
1409
1410 def close(self):
1411 if not self._first:
1412 self.ui.write("\n]\n")
1413 else:
1414 self.ui.write("[]\n")
1415
1416 def _show(self, ctx, copies, matchfn, props):
1417 '''show a single changeset or file revision'''
1418 rev = ctx.rev()
1419 if rev is None:
1420 jrev = jnode = 'null'
1421 else:
1422 jrev = str(rev)
1423 jnode = '"%s"' % hex(ctx.node())
1424 j = encoding.jsonescape
1425
1426 if self._first:
1427 self.ui.write("[\n {")
1428 self._first = False
1429 else:
1430 self.ui.write(",\n {")
1431
1432 if self.ui.quiet:
1433 self.ui.write(('\n "rev": %s') % jrev)
1434 self.ui.write((',\n "node": %s') % jnode)
1435 self.ui.write('\n }')
1436 return
1437
1438 self.ui.write(('\n "rev": %s') % jrev)
1439 self.ui.write((',\n "node": %s') % jnode)
1440 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1441 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1442 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1443 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1444 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1445
1446 self.ui.write((',\n "bookmarks": [%s]') %
1447 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1448 self.ui.write((',\n "tags": [%s]') %
1449 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1450 self.ui.write((',\n "parents": [%s]') %
1451 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1452
1453 if self.ui.debugflag:
1454 if rev is None:
1455 jmanifestnode = 'null'
1456 else:
1457 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1458 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1459
1460 self.ui.write((',\n "extra": {%s}') %
1461 ", ".join('"%s": "%s"' % (j(k), j(v))
1462 for k, v in ctx.extra().items()))
1463
1464 files = ctx.p1().status(ctx)
1465 self.ui.write((',\n "modified": [%s]') %
1466 ", ".join('"%s"' % j(f) for f in files[0]))
1467 self.ui.write((',\n "added": [%s]') %
1468 ", ".join('"%s"' % j(f) for f in files[1]))
1469 self.ui.write((',\n "removed": [%s]') %
1470 ", ".join('"%s"' % j(f) for f in files[2]))
1471
1472 elif self.ui.verbose:
1473 self.ui.write((',\n "files": [%s]') %
1474 ", ".join('"%s"' % j(f) for f in ctx.files()))
1475
1476 if copies:
1477 self.ui.write((',\n "copies": {%s}') %
1478 ", ".join('"%s": "%s"' % (j(k), j(v))
1479 for k, v in copies))
1480
1481 matchfn = self.matchfn
1482 if matchfn:
1483 stat = self.diffopts.get('stat')
1484 diff = self.diffopts.get('patch')
1485 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1486 node, prev = ctx.node(), ctx.p1().node()
1487 if stat:
1488 self.ui.pushbuffer()
1489 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1490 match=matchfn, stat=True)
1491 self.ui.write((',\n "diffstat": "%s"')
1492 % j(self.ui.popbuffer()))
1493 if diff:
1494 self.ui.pushbuffer()
1495 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1496 match=matchfn, stat=False)
1497 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1498
1499 self.ui.write("\n }")
1500
1501 class changeset_templater(changeset_printer):
1502 '''format changeset information.'''
1503
1504 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1505 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1506 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1507 filters = {'formatnode': formatnode}
1508 defaulttempl = {
1509 'parent': '{rev}:{node|formatnode} ',
1510 'manifest': '{rev}:{node|formatnode}',
1511 'file_copy': '{name} ({source})',
1512 'extra': '{key}={value|stringescape}'
1513 }
1514 # filecopy is preserved for compatibility reasons
1515 defaulttempl['filecopy'] = defaulttempl['file_copy']
1516 assert not (tmpl and mapfile)
1517 if mapfile:
1518 self.t = templater.templater.frommapfile(mapfile, filters=filters,
1519 cache=defaulttempl)
1520 else:
1521 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1522 filters=filters,
1523 cache=defaulttempl)
1524
1525 self.cache = {}
1526
1527 # find correct templates for current mode
1528 tmplmodes = [
1529 (True, None),
1530 (self.ui.verbose, 'verbose'),
1531 (self.ui.quiet, 'quiet'),
1532 (self.ui.debugflag, 'debug'),
1533 ]
1534
1535 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1536 'docheader': '', 'docfooter': ''}
1537 for mode, postfix in tmplmodes:
1538 for t in self._parts:
1539 cur = t
1540 if postfix:
1541 cur += "_" + postfix
1542 if mode and cur in self.t:
1543 self._parts[t] = cur
1544
1545 if self._parts['docheader']:
1546 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1547
1548 def close(self):
1549 if self._parts['docfooter']:
1550 if not self.footer:
1551 self.footer = ""
1552 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1553 return super(changeset_templater, self).close()
1554
1555 def _show(self, ctx, copies, matchfn, props):
1556 '''show a single changeset or file revision'''
1557 props = props.copy()
1558 props.update(templatekw.keywords)
1559 props['templ'] = self.t
1560 props['ctx'] = ctx
1561 props['repo'] = self.repo
1562 props['ui'] = self.repo.ui
1563 props['revcache'] = {'copies': copies}
1564 props['cache'] = self.cache
1565
1566 # write header
1567 if self._parts['header']:
1568 h = templater.stringify(self.t(self._parts['header'], **props))
1569 if self.buffered:
1570 self.header[ctx.rev()] = h
1571 else:
1572 if self.lastheader != h:
1573 self.lastheader = h
1574 self.ui.write(h)
1575
1576 # write changeset metadata, then patch if requested
1577 key = self._parts['changeset']
1578 self.ui.write(templater.stringify(self.t(key, **props)))
1579 self.showpatch(ctx, matchfn)
1580
1581 if self._parts['footer']:
1582 if not self.footer:
1583 self.footer = templater.stringify(
1584 self.t(self._parts['footer'], **props))
1585
1586 def gettemplate(ui, tmpl, style):
1587 """
1588 Find the template matching the given template spec or style.
1589 """
1590
1591 # ui settings
1592 if not tmpl and not style: # template are stronger than style
1593 tmpl = ui.config('ui', 'logtemplate')
1594 if tmpl:
1595 return templater.unquotestring(tmpl), None
1596 else:
1597 style = util.expandpath(ui.config('ui', 'style', ''))
1598
1599 if not tmpl and style:
1600 mapfile = style
1601 if not os.path.split(mapfile)[0]:
1602 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1603 or templater.templatepath(mapfile))
1604 if mapname:
1605 mapfile = mapname
1606 return None, mapfile
1607
1608 if not tmpl:
1609 return None, None
1610
1611 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1612
1613 def show_changeset(ui, repo, opts, buffered=False):
1614 """show one changeset using template or regular display.
1615
1616 Display format will be the first non-empty hit of:
1617 1. option 'template'
1618 2. option 'style'
1619 3. [ui] setting 'logtemplate'
1620 4. [ui] setting 'style'
1621 If all of these values are either the unset or the empty string,
1622 regular display via changeset_printer() is done.
1623 """
1624 # options
1625 matchfn = None
1626 if opts.get('patch') or opts.get('stat'):
1627 matchfn = scmutil.matchall(repo)
1628
1629 if opts.get('template') == 'json':
1630 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1631
1632 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1633
1634 if not tmpl and not mapfile:
1635 return changeset_printer(ui, repo, matchfn, opts, buffered)
1636
1637 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1638
1639 def showmarker(fm, marker, index=None):
1640 """utility function to display obsolescence marker in a readable way
1641
1642 To be used by debug function."""
1643 if index is not None:
1644 fm.write('index', '%i ', index)
1645 fm.write('precnode', '%s ', hex(marker.precnode()))
1646 succs = marker.succnodes()
1647 fm.condwrite(succs, 'succnodes', '%s ',
1648 fm.formatlist(map(hex, succs), name='node'))
1649 fm.write('flag', '%X ', marker.flags())
1650 parents = marker.parentnodes()
1651 if parents is not None:
1652 fm.write('parentnodes', '{%s} ',
1653 fm.formatlist(map(hex, parents), name='node', sep=', '))
1654 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1655 meta = marker.metadata().copy()
1656 meta.pop('date', None)
1657 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1658 fm.plain('\n')
1659
1660 def finddate(ui, repo, date):
1661 """Find the tipmost changeset that matches the given date spec"""
1662
1663 df = util.matchdate(date)
1664 m = scmutil.matchall(repo)
1665 results = {}
1666
1667 def prep(ctx, fns):
1668 d = ctx.date()
1669 if df(d[0]):
1670 results[ctx.rev()] = d
1671
1672 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1673 rev = ctx.rev()
1674 if rev in results:
1675 ui.status(_("found revision %s from %s\n") %
1676 (rev, util.datestr(results[rev])))
1677 return str(rev)
1678
1679 raise error.Abort(_("revision matching date not found"))
1680
1681 def increasingwindows(windowsize=8, sizelimit=512):
1682 while True:
1683 yield windowsize
1684 if windowsize < sizelimit:
1685 windowsize *= 2
1686
1687 class FileWalkError(Exception):
1688 pass
1689
1690 def walkfilerevs(repo, match, follow, revs, fncache):
1691 '''Walks the file history for the matched files.
1692
1693 Returns the changeset revs that are involved in the file history.
1694
1695 Throws FileWalkError if the file history can't be walked using
1696 filelogs alone.
1697 '''
1698 wanted = set()
1699 copies = []
1700 minrev, maxrev = min(revs), max(revs)
1701 def filerevgen(filelog, last):
1702 """
1703 Only files, no patterns. Check the history of each file.
1704
1705 Examines filelog entries within minrev, maxrev linkrev range
1706 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1707 tuples in backwards order
1708 """
1709 cl_count = len(repo)
1710 revs = []
1711 for j in xrange(0, last + 1):
1712 linkrev = filelog.linkrev(j)
1713 if linkrev < minrev:
1714 continue
1715 # only yield rev for which we have the changelog, it can
1716 # happen while doing "hg log" during a pull or commit
1717 if linkrev >= cl_count:
1718 break
1719
1720 parentlinkrevs = []
1721 for p in filelog.parentrevs(j):
1722 if p != nullrev:
1723 parentlinkrevs.append(filelog.linkrev(p))
1724 n = filelog.node(j)
1725 revs.append((linkrev, parentlinkrevs,
1726 follow and filelog.renamed(n)))
1727
1728 return reversed(revs)
1729 def iterfiles():
1730 pctx = repo['.']
1731 for filename in match.files():
1732 if follow:
1733 if filename not in pctx:
1734 raise error.Abort(_('cannot follow file not in parent '
1735 'revision: "%s"') % filename)
1736 yield filename, pctx[filename].filenode()
1737 else:
1738 yield filename, None
1739 for filename_node in copies:
1740 yield filename_node
1741
1742 for file_, node in iterfiles():
1743 filelog = repo.file(file_)
1744 if not len(filelog):
1745 if node is None:
1746 # A zero count may be a directory or deleted file, so
1747 # try to find matching entries on the slow path.
1748 if follow:
1749 raise error.Abort(
1750 _('cannot follow nonexistent file: "%s"') % file_)
1751 raise FileWalkError("Cannot walk via filelog")
1752 else:
1753 continue
1754
1755 if node is None:
1756 last = len(filelog) - 1
1757 else:
1758 last = filelog.rev(node)
1759
1760 # keep track of all ancestors of the file
1761 ancestors = set([filelog.linkrev(last)])
1762
1763 # iterate from latest to oldest revision
1764 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1765 if not follow:
1766 if rev > maxrev:
1767 continue
1768 else:
1769 # Note that last might not be the first interesting
1770 # rev to us:
1771 # if the file has been changed after maxrev, we'll
1772 # have linkrev(last) > maxrev, and we still need
1773 # to explore the file graph
1774 if rev not in ancestors:
1775 continue
1776 # XXX insert 1327 fix here
1777 if flparentlinkrevs:
1778 ancestors.update(flparentlinkrevs)
1779
1780 fncache.setdefault(rev, []).append(file_)
1781 wanted.add(rev)
1782 if copied:
1783 copies.append(copied)
1784
1785 return wanted
1786
1787 class _followfilter(object):
1788 def __init__(self, repo, onlyfirst=False):
1789 self.repo = repo
1790 self.startrev = nullrev
1791 self.roots = set()
1792 self.onlyfirst = onlyfirst
1793
1794 def match(self, rev):
1795 def realparents(rev):
1796 if self.onlyfirst:
1797 return self.repo.changelog.parentrevs(rev)[0:1]
1798 else:
1799 return filter(lambda x: x != nullrev,
1800 self.repo.changelog.parentrevs(rev))
1801
1802 if self.startrev == nullrev:
1803 self.startrev = rev
1804 return True
1805
1806 if rev > self.startrev:
1807 # forward: all descendants
1808 if not self.roots:
1809 self.roots.add(self.startrev)
1810 for parent in realparents(rev):
1811 if parent in self.roots:
1812 self.roots.add(rev)
1813 return True
1814 else:
1815 # backwards: all parents
1816 if not self.roots:
1817 self.roots.update(realparents(self.startrev))
1818 if rev in self.roots:
1819 self.roots.remove(rev)
1820 self.roots.update(realparents(rev))
1821 return True
1822
1823 return False
1824
1825 def walkchangerevs(repo, match, opts, prepare):
1826 '''Iterate over files and the revs in which they changed.
1827
1828 Callers most commonly need to iterate backwards over the history
1829 in which they are interested. Doing so has awful (quadratic-looking)
1830 performance, so we use iterators in a "windowed" way.
1831
1832 We walk a window of revisions in the desired order. Within the
1833 window, we first walk forwards to gather data, then in the desired
1834 order (usually backwards) to display it.
1835
1836 This function returns an iterator yielding contexts. Before
1837 yielding each context, the iterator will first call the prepare
1838 function on each context in the window in forward order.'''
1839
1840 follow = opts.get('follow') or opts.get('follow_first')
1841 revs = _logrevs(repo, opts)
1842 if not revs:
1843 return []
1844 wanted = set()
1845 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1846 opts.get('removed'))
1847 fncache = {}
1848 change = repo.changectx
1849
1850 # First step is to fill wanted, the set of revisions that we want to yield.
1851 # When it does not induce extra cost, we also fill fncache for revisions in
1852 # wanted: a cache of filenames that were changed (ctx.files()) and that
1853 # match the file filtering conditions.
1854
1855 if match.always():
1856 # No files, no patterns. Display all revs.
1857 wanted = revs
1858 elif not slowpath:
1859 # We only have to read through the filelog to find wanted revisions
1860
1861 try:
1862 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1863 except FileWalkError:
1864 slowpath = True
1865
1866 # We decided to fall back to the slowpath because at least one
1867 # of the paths was not a file. Check to see if at least one of them
1868 # existed in history, otherwise simply return
1869 for path in match.files():
1870 if path == '.' or path in repo.store:
1871 break
1872 else:
1873 return []
1874
1875 if slowpath:
1876 # We have to read the changelog to match filenames against
1877 # changed files
1878
1879 if follow:
1880 raise error.Abort(_('can only follow copies/renames for explicit '
1881 'filenames'))
1882
1883 # The slow path checks files modified in every changeset.
1884 # This is really slow on large repos, so compute the set lazily.
1885 class lazywantedset(object):
1886 def __init__(self):
1887 self.set = set()
1888 self.revs = set(revs)
1889
1890 # No need to worry about locality here because it will be accessed
1891 # in the same order as the increasing window below.
1892 def __contains__(self, value):
1893 if value in self.set:
1894 return True
1895 elif not value in self.revs:
1896 return False
1897 else:
1898 self.revs.discard(value)
1899 ctx = change(value)
1900 matches = filter(match, ctx.files())
1901 if matches:
1902 fncache[value] = matches
1903 self.set.add(value)
1904 return True
1905 return False
1906
1907 def discard(self, value):
1908 self.revs.discard(value)
1909 self.set.discard(value)
1910
1911 wanted = lazywantedset()
1912
1913 # it might be worthwhile to do this in the iterator if the rev range
1914 # is descending and the prune args are all within that range
1915 for rev in opts.get('prune', ()):
1916 rev = repo[rev].rev()
1917 ff = _followfilter(repo)
1918 stop = min(revs[0], revs[-1])
1919 for x in xrange(rev, stop - 1, -1):
1920 if ff.match(x):
1921 wanted = wanted - [x]
1922
1923 # Now that wanted is correctly initialized, we can iterate over the
1924 # revision range, yielding only revisions in wanted.
1925 def iterate():
1926 if follow and match.always():
1927 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1928 def want(rev):
1929 return ff.match(rev) and rev in wanted
1930 else:
1931 def want(rev):
1932 return rev in wanted
1933
1934 it = iter(revs)
1935 stopiteration = False
1936 for windowsize in increasingwindows():
1937 nrevs = []
1938 for i in xrange(windowsize):
1939 rev = next(it, None)
1940 if rev is None:
1941 stopiteration = True
1942 break
1943 elif want(rev):
1944 nrevs.append(rev)
1945 for rev in sorted(nrevs):
1946 fns = fncache.get(rev)
1947 ctx = change(rev)
1948 if not fns:
1949 def fns_generator():
1950 for f in ctx.files():
1951 if match(f):
1952 yield f
1953 fns = fns_generator()
1954 prepare(ctx, fns)
1955 for rev in nrevs:
1956 yield change(rev)
1957
1958 if stopiteration:
1959 break
1960
1961 return iterate()
1962
1963 def _makefollowlogfilematcher(repo, files, followfirst):
1964 # When displaying a revision with --patch --follow FILE, we have
1965 # to know which file of the revision must be diffed. With
1966 # --follow, we want the names of the ancestors of FILE in the
1967 # revision, stored in "fcache". "fcache" is populated by
1968 # reproducing the graph traversal already done by --follow revset
1969 # and relating revs to file names (which is not "correct" but
1970 # good enough).
1971 fcache = {}
1972 fcacheready = [False]
1973 pctx = repo['.']
1974
1975 def populate():
1976 for fn in files:
1977 fctx = pctx[fn]
1978 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
1979 for c in fctx.ancestors(followfirst=followfirst):
1980 fcache.setdefault(c.rev(), set()).add(c.path())
1981
1982 def filematcher(rev):
1983 if not fcacheready[0]:
1984 # Lazy initialization
1985 fcacheready[0] = True
1986 populate()
1987 return scmutil.matchfiles(repo, fcache.get(rev, []))
1988
1989 return filematcher
1990
1991 def _makenofollowlogfilematcher(repo, pats, opts):
1992 '''hook for extensions to override the filematcher for non-follow cases'''
1993 return None
1994
1995 def _makelogrevset(repo, pats, opts, revs):
1996 """Return (expr, filematcher) where expr is a revset string built
1997 from log options and file patterns or None. If --stat or --patch
1998 are not passed filematcher is None. Otherwise it is a callable
1999 taking a revision number and returning a match objects filtering
2000 the files to be detailed when displaying the revision.
2001 """
2002 opt2revset = {
2003 'no_merges': ('not merge()', None),
2004 'only_merges': ('merge()', None),
2005 '_ancestors': ('ancestors(%(val)s)', None),
2006 '_fancestors': ('_firstancestors(%(val)s)', None),
2007 '_descendants': ('descendants(%(val)s)', None),
2008 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2009 '_matchfiles': ('_matchfiles(%(val)s)', None),
2010 'date': ('date(%(val)r)', None),
2011 'branch': ('branch(%(val)r)', ' or '),
2012 '_patslog': ('filelog(%(val)r)', ' or '),
2013 '_patsfollow': ('follow(%(val)r)', ' or '),
2014 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2015 'keyword': ('keyword(%(val)r)', ' or '),
2016 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2017 'user': ('user(%(val)r)', ' or '),
2018 }
2019
2020 opts = dict(opts)
2021 # follow or not follow?
2022 follow = opts.get('follow') or opts.get('follow_first')
2023 if opts.get('follow_first'):
2024 followfirst = 1
2025 else:
2026 followfirst = 0
2027 # --follow with FILE behavior depends on revs...
2028 it = iter(revs)
2029 startrev = next(it)
2030 followdescendants = startrev < next(it, startrev)
2031
2032 # branch and only_branch are really aliases and must be handled at
2033 # the same time
2034 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2035 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2036 # pats/include/exclude are passed to match.match() directly in
2037 # _matchfiles() revset but walkchangerevs() builds its matcher with
2038 # scmutil.match(). The difference is input pats are globbed on
2039 # platforms without shell expansion (windows).
2040 wctx = repo[None]
2041 match, pats = scmutil.matchandpats(wctx, pats, opts)
2042 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2043 opts.get('removed'))
2044 if not slowpath:
2045 for f in match.files():
2046 if follow and f not in wctx:
2047 # If the file exists, it may be a directory, so let it
2048 # take the slow path.
2049 if os.path.exists(repo.wjoin(f)):
2050 slowpath = True
2051 continue
2052 else:
2053 raise error.Abort(_('cannot follow file not in parent '
2054 'revision: "%s"') % f)
2055 filelog = repo.file(f)
2056 if not filelog:
2057 # A zero count may be a directory or deleted file, so
2058 # try to find matching entries on the slow path.
2059 if follow:
2060 raise error.Abort(
2061 _('cannot follow nonexistent file: "%s"') % f)
2062 slowpath = True
2063
2064 # We decided to fall back to the slowpath because at least one
2065 # of the paths was not a file. Check to see if at least one of them
2066 # existed in history - in that case, we'll continue down the
2067 # slowpath; otherwise, we can turn off the slowpath
2068 if slowpath:
2069 for path in match.files():
2070 if path == '.' or path in repo.store:
2071 break
2072 else:
2073 slowpath = False
2074
2075 fpats = ('_patsfollow', '_patsfollowfirst')
2076 fnopats = (('_ancestors', '_fancestors'),
2077 ('_descendants', '_fdescendants'))
2078 if slowpath:
2079 # See walkchangerevs() slow path.
2080 #
2081 # pats/include/exclude cannot be represented as separate
2082 # revset expressions as their filtering logic applies at file
2083 # level. For instance "-I a -X a" matches a revision touching
2084 # "a" and "b" while "file(a) and not file(b)" does
2085 # not. Besides, filesets are evaluated against the working
2086 # directory.
2087 matchargs = ['r:', 'd:relpath']
2088 for p in pats:
2089 matchargs.append('p:' + p)
2090 for p in opts.get('include', []):
2091 matchargs.append('i:' + p)
2092 for p in opts.get('exclude', []):
2093 matchargs.append('x:' + p)
2094 matchargs = ','.join(('%r' % p) for p in matchargs)
2095 opts['_matchfiles'] = matchargs
2096 if follow:
2097 opts[fnopats[0][followfirst]] = '.'
2098 else:
2099 if follow:
2100 if pats:
2101 # follow() revset interprets its file argument as a
2102 # manifest entry, so use match.files(), not pats.
2103 opts[fpats[followfirst]] = list(match.files())
2104 else:
2105 op = fnopats[followdescendants][followfirst]
2106 opts[op] = 'rev(%d)' % startrev
2107 else:
2108 opts['_patslog'] = list(pats)
2109
2110 filematcher = None
2111 if opts.get('patch') or opts.get('stat'):
2112 # When following files, track renames via a special matcher.
2113 # If we're forced to take the slowpath it means we're following
2114 # at least one pattern/directory, so don't bother with rename tracking.
2115 if follow and not match.always() and not slowpath:
2116 # _makefollowlogfilematcher expects its files argument to be
2117 # relative to the repo root, so use match.files(), not pats.
2118 filematcher = _makefollowlogfilematcher(repo, match.files(),
2119 followfirst)
2120 else:
2121 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2122 if filematcher is None:
2123 filematcher = lambda rev: match
2124
2125 expr = []
2126 for op, val in sorted(opts.iteritems()):
2127 if not val:
2128 continue
2129 if op not in opt2revset:
2130 continue
2131 revop, andor = opt2revset[op]
2132 if '%(val)' not in revop:
2133 expr.append(revop)
2134 else:
2135 if not isinstance(val, list):
2136 e = revop % {'val': val}
2137 else:
2138 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2139 expr.append(e)
2140
2141 if expr:
2142 expr = '(' + ' and '.join(expr) + ')'
2143 else:
2144 expr = None
2145 return expr, filematcher
2146
2147 def _logrevs(repo, opts):
2148 # Default --rev value depends on --follow but --follow behavior
2149 # depends on revisions resolved from --rev...
2150 follow = opts.get('follow') or opts.get('follow_first')
2151 if opts.get('rev'):
2152 revs = scmutil.revrange(repo, opts['rev'])
2153 elif follow and repo.dirstate.p1() == nullid:
2154 revs = revset.baseset()
2155 elif follow:
2156 revs = repo.revs('reverse(:.)')
2157 else:
2158 revs = revset.spanset(repo)
2159 revs.reverse()
2160 return revs
2161
2162 def getgraphlogrevs(repo, pats, opts):
2163 """Return (revs, expr, filematcher) where revs is an iterable of
2164 revision numbers, expr is a revset string built from log options
2165 and file patterns or None, and used to filter 'revs'. If --stat or
2166 --patch are not passed filematcher is None. Otherwise it is a
2167 callable taking a revision number and returning a match objects
2168 filtering the files to be detailed when displaying the revision.
2169 """
2170 limit = loglimit(opts)
2171 revs = _logrevs(repo, opts)
2172 if not revs:
2173 return revset.baseset(), None, None
2174 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2175 if opts.get('rev'):
2176 # User-specified revs might be unsorted, but don't sort before
2177 # _makelogrevset because it might depend on the order of revs
2178 if not (revs.isdescending() or revs.istopo()):
2179 revs.sort(reverse=True)
2180 if expr:
2181 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2182 revs = matcher(repo, revs)
2183 if limit is not None:
2184 limitedrevs = []
2185 for idx, rev in enumerate(revs):
2186 if idx >= limit:
2187 break
2188 limitedrevs.append(rev)
2189 revs = revset.baseset(limitedrevs)
2190
2191 return revs, expr, filematcher
2192
2193 def getlogrevs(repo, pats, opts):
2194 """Return (revs, expr, filematcher) where revs is an iterable of
2195 revision numbers, expr is a revset string built from log options
2196 and file patterns or None, and used to filter 'revs'. If --stat or
2197 --patch are not passed filematcher is None. Otherwise it is a
2198 callable taking a revision number and returning a match objects
2199 filtering the files to be detailed when displaying the revision.
2200 """
2201 limit = loglimit(opts)
2202 revs = _logrevs(repo, opts)
2203 if not revs:
2204 return revset.baseset([]), None, None
2205 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2206 if expr:
2207 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2208 revs = matcher(repo, revs)
2209 if limit is not None:
2210 limitedrevs = []
2211 for idx, r in enumerate(revs):
2212 if limit <= idx:
2213 break
2214 limitedrevs.append(r)
2215 revs = revset.baseset(limitedrevs)
2216
2217 return revs, expr, filematcher
2218
2219 def _graphnodeformatter(ui, displayer):
2220 spec = ui.config('ui', 'graphnodetemplate')
2221 if not spec:
2222 return templatekw.showgraphnode # fast path for "{graphnode}"
2223
2224 templ = formatter.gettemplater(ui, 'graphnode', spec)
2225 cache = {}
2226 if isinstance(displayer, changeset_templater):
2227 cache = displayer.cache # reuse cache of slow templates
2228 props = templatekw.keywords.copy()
2229 props['templ'] = templ
2230 props['cache'] = cache
2231 def formatnode(repo, ctx):
2232 props['ctx'] = ctx
2233 props['repo'] = repo
2234 props['ui'] = repo.ui
2235 props['revcache'] = {}
2236 return templater.stringify(templ('graphnode', **props))
2237 return formatnode
2238
2239 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2240 filematcher=None):
2241 formatnode = _graphnodeformatter(ui, displayer)
2242 state = graphmod.asciistate()
2243 styles = state['styles']
2244
2245 # only set graph styling if HGPLAIN is not set.
2246 if ui.plain('graph'):
2247 # set all edge styles to |, the default pre-3.8 behaviour
2248 styles.update(dict.fromkeys(styles, '|'))
2249 else:
2250 edgetypes = {
2251 'parent': graphmod.PARENT,
2252 'grandparent': graphmod.GRANDPARENT,
2253 'missing': graphmod.MISSINGPARENT
2254 }
2255 for name, key in edgetypes.items():
2256 # experimental config: experimental.graphstyle.*
2257 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2258 styles[key])
2259 if not styles[key]:
2260 styles[key] = None
2261
2262 # experimental config: experimental.graphshorten
2263 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2264
2265 for rev, type, ctx, parents in dag:
2266 char = formatnode(repo, ctx)
2267 copies = None
2268 if getrenamed and ctx.rev():
2269 copies = []
2270 for fn in ctx.files():
2271 rename = getrenamed(fn, ctx.rev())
2272 if rename:
2273 copies.append((fn, rename[0]))
2274 revmatchfn = None
2275 if filematcher is not None:
2276 revmatchfn = filematcher(ctx.rev())
2277 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2278 lines = displayer.hunk.pop(rev).split('\n')
2279 if not lines[-1]:
2280 del lines[-1]
2281 displayer.flush(ctx)
2282 edges = edgefn(type, char, lines, state, rev, parents)
2283 for type, char, lines, coldata in edges:
2284 graphmod.ascii(ui, state, type, char, lines, coldata)
2285 displayer.close()
2286
2287 def graphlog(ui, repo, *pats, **opts):
2288 # Parameters are identical to log command ones
2289 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2290 revdag = graphmod.dagwalker(repo, revs)
2291
2292 getrenamed = None
2293 if opts.get('copies'):
2294 endrev = None
2295 if opts.get('rev'):
2296 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2297 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2298 displayer = show_changeset(ui, repo, opts, buffered=True)
2299 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2300 filematcher)
2301
2302 def checkunsupportedgraphflags(pats, opts):
2303 for op in ["newest_first"]:
2304 if op in opts and opts[op]:
2305 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2306 % op.replace("_", "-"))
2307
2308 def graphrevs(repo, nodes, opts):
2309 limit = loglimit(opts)
2310 nodes.reverse()
2311 if limit is not None:
2312 nodes = nodes[:limit]
2313 return graphmod.nodes(repo, nodes)
2314
2315 def add(ui, repo, match, prefix, explicitonly, **opts):
2316 join = lambda f: os.path.join(prefix, f)
2317 bad = []
2318
2319 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2320 names = []
2321 wctx = repo[None]
2322 cca = None
2323 abort, warn = scmutil.checkportabilityalert(ui)
2324 if abort or warn:
2325 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2326
2327 badmatch = matchmod.badmatch(match, badfn)
2328 dirstate = repo.dirstate
2329 # We don't want to just call wctx.walk here, since it would return a lot of
2330 # clean files, which we aren't interested in and takes time.
2331 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2332 True, False, full=False)):
2333 exact = match.exact(f)
2334 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2335 if cca:
2336 cca(f)
2337 names.append(f)
2338 if ui.verbose or not exact:
2339 ui.status(_('adding %s\n') % match.rel(f))
2340
2341 for subpath in sorted(wctx.substate):
2342 sub = wctx.sub(subpath)
2343 try:
2344 submatch = matchmod.subdirmatcher(subpath, match)
2345 if opts.get('subrepos'):
2346 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2347 else:
2348 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2349 except error.LookupError:
2350 ui.status(_("skipping missing subrepository: %s\n")
2351 % join(subpath))
2352
2353 if not opts.get('dry_run'):
2354 rejected = wctx.add(names, prefix)
2355 bad.extend(f for f in rejected if f in match.files())
2356 return bad
2357
2358 def forget(ui, repo, match, prefix, explicitonly):
2359 join = lambda f: os.path.join(prefix, f)
2360 bad = []
2361 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2362 wctx = repo[None]
2363 forgot = []
2364
2365 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2366 forget = sorted(s[0] + s[1] + s[3] + s[6])
2367 if explicitonly:
2368 forget = [f for f in forget if match.exact(f)]
2369
2370 for subpath in sorted(wctx.substate):
2371 sub = wctx.sub(subpath)
2372 try:
2373 submatch = matchmod.subdirmatcher(subpath, match)
2374 subbad, subforgot = sub.forget(submatch, prefix)
2375 bad.extend([subpath + '/' + f for f in subbad])
2376 forgot.extend([subpath + '/' + f for f in subforgot])
2377 except error.LookupError:
2378 ui.status(_("skipping missing subrepository: %s\n")
2379 % join(subpath))
2380
2381 if not explicitonly:
2382 for f in match.files():
2383 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2384 if f not in forgot:
2385 if repo.wvfs.exists(f):
2386 # Don't complain if the exact case match wasn't given.
2387 # But don't do this until after checking 'forgot', so
2388 # that subrepo files aren't normalized, and this op is
2389 # purely from data cached by the status walk above.
2390 if repo.dirstate.normalize(f) in repo.dirstate:
2391 continue
2392 ui.warn(_('not removing %s: '
2393 'file is already untracked\n')
2394 % match.rel(f))
2395 bad.append(f)
2396
2397 for f in forget:
2398 if ui.verbose or not match.exact(f):
2399 ui.status(_('removing %s\n') % match.rel(f))
2400
2401 rejected = wctx.forget(forget, prefix)
2402 bad.extend(f for f in rejected if f in match.files())
2403 forgot.extend(f for f in forget if f not in rejected)
2404 return bad, forgot
2405
2406 def files(ui, ctx, m, fm, fmt, subrepos):
2407 rev = ctx.rev()
2408 ret = 1
2409 ds = ctx.repo().dirstate
2410
2411 for f in ctx.matches(m):
2412 if rev is None and ds[f] == 'r':
2413 continue
2414 fm.startitem()
2415 if ui.verbose:
2416 fc = ctx[f]
2417 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2418 fm.data(abspath=f)
2419 fm.write('path', fmt, m.rel(f))
2420 ret = 0
2421
2422 for subpath in sorted(ctx.substate):
2423 submatch = matchmod.subdirmatcher(subpath, m)
2424 if (subrepos or m.exact(subpath) or any(submatch.files())):
2425 sub = ctx.sub(subpath)
2426 try:
2427 recurse = m.exact(subpath) or subrepos
2428 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2429 ret = 0
2430 except error.LookupError:
2431 ui.status(_("skipping missing subrepository: %s\n")
2432 % m.abs(subpath))
2433
2434 return ret
2435
2436 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2437 join = lambda f: os.path.join(prefix, f)
2438 ret = 0
2439 s = repo.status(match=m, clean=True)
2440 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2441
2442 wctx = repo[None]
2443
2444 if warnings is None:
2445 warnings = []
2446 warn = True
2447 else:
2448 warn = False
2449
2450 subs = sorted(wctx.substate)
2451 total = len(subs)
2452 count = 0
2453 for subpath in subs:
2454 count += 1
2455 submatch = matchmod.subdirmatcher(subpath, m)
2456 if subrepos or m.exact(subpath) or any(submatch.files()):
2457 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2458 sub = wctx.sub(subpath)
2459 try:
2460 if sub.removefiles(submatch, prefix, after, force, subrepos,
2461 warnings):
2462 ret = 1
2463 except error.LookupError:
2464 warnings.append(_("skipping missing subrepository: %s\n")
2465 % join(subpath))
2466 ui.progress(_('searching'), None)
2467
2468 # warn about failure to delete explicit files/dirs
2469 deleteddirs = util.dirs(deleted)
2470 files = m.files()
2471 total = len(files)
2472 count = 0
2473 for f in files:
2474 def insubrepo():
2475 for subpath in wctx.substate:
2476 if f.startswith(subpath + '/'):
2477 return True
2478 return False
2479
2480 count += 1
2481 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2482 isdir = f in deleteddirs or wctx.hasdir(f)
2483 if (f in repo.dirstate or isdir or f == '.'
2484 or insubrepo() or f in subs):
2485 continue
2486
2487 if repo.wvfs.exists(f):
2488 if repo.wvfs.isdir(f):
2489 warnings.append(_('not removing %s: no tracked files\n')
2490 % m.rel(f))
2491 else:
2492 warnings.append(_('not removing %s: file is untracked\n')
2493 % m.rel(f))
2494 # missing files will generate a warning elsewhere
2495 ret = 1
2496 ui.progress(_('deleting'), None)
2497
2498 if force:
2499 list = modified + deleted + clean + added
2500 elif after:
2501 list = deleted
2502 remaining = modified + added + clean
2503 total = len(remaining)
2504 count = 0
2505 for f in remaining:
2506 count += 1
2507 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2508 warnings.append(_('not removing %s: file still exists\n')
2509 % m.rel(f))
2510 ret = 1
2511 ui.progress(_('skipping'), None)
2512 else:
2513 list = deleted + clean
2514 total = len(modified) + len(added)
2515 count = 0
2516 for f in modified:
2517 count += 1
2518 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2519 warnings.append(_('not removing %s: file is modified (use -f'
2520 ' to force removal)\n') % m.rel(f))
2521 ret = 1
2522 for f in added:
2523 count += 1
2524 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2525 warnings.append(_("not removing %s: file has been marked for add"
2526 " (use 'hg forget' to undo add)\n") % m.rel(f))
2527 ret = 1
2528 ui.progress(_('skipping'), None)
2529
2530 list = sorted(list)
2531 total = len(list)
2532 count = 0
2533 for f in list:
2534 count += 1
2535 if ui.verbose or not m.exact(f):
2536 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2537 ui.status(_('removing %s\n') % m.rel(f))
2538 ui.progress(_('deleting'), None)
2539
2540 with repo.wlock():
2541 if not after:
2542 for f in list:
2543 if f in added:
2544 continue # we never unlink added files on remove
2545 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2546 repo[None].forget(list)
2547
2548 if warn:
2549 for warning in warnings:
2550 ui.warn(warning)
2551
2552 return ret
2553
2554 def cat(ui, repo, ctx, matcher, prefix, **opts):
2555 err = 1
2556
2557 def write(path):
2558 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2559 pathname=os.path.join(prefix, path))
2560 data = ctx[path].data()
2561 if opts.get('decode'):
2562 data = repo.wwritedata(path, data)
2563 fp.write(data)
2564 fp.close()
2565
2566 # Automation often uses hg cat on single files, so special case it
2567 # for performance to avoid the cost of parsing the manifest.
2568 if len(matcher.files()) == 1 and not matcher.anypats():
2569 file = matcher.files()[0]
2570 mfl = repo.manifestlog
2571 mfnode = ctx.manifestnode()
2572 try:
2573 if mfnode and mfl[mfnode].find(file)[0]:
2574 write(file)
2575 return 0
2576 except KeyError:
2577 pass
2578
2579 for abs in ctx.walk(matcher):
2580 write(abs)
2581 err = 0
2582
2583 for subpath in sorted(ctx.substate):
2584 sub = ctx.sub(subpath)
2585 try:
2586 submatch = matchmod.subdirmatcher(subpath, matcher)
2587
2588 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2589 **opts):
2590 err = 0
2591 except error.RepoLookupError:
2592 ui.status(_("skipping missing subrepository: %s\n")
2593 % os.path.join(prefix, subpath))
2594
2595 return err
2596
2597 def commit(ui, repo, commitfunc, pats, opts):
2598 '''commit the specified files or all outstanding changes'''
2599 date = opts.get('date')
2600 if date:
2601 opts['date'] = util.parsedate(date)
2602 message = logmessage(ui, opts)
2603 matcher = scmutil.match(repo[None], pats, opts)
2604
2605 # extract addremove carefully -- this function can be called from a command
2606 # that doesn't support addremove
2607 if opts.get('addremove'):
2608 if scmutil.addremove(repo, matcher, "", opts) != 0:
2609 raise error.Abort(
2610 _("failed to mark all new/missing files as added/removed"))
2611
2612 return commitfunc(ui, repo, message, matcher, opts)
2613
2614 def samefile(f, ctx1, ctx2):
2615 if f in ctx1.manifest():
2616 a = ctx1.filectx(f)
2617 if f in ctx2.manifest():
2618 b = ctx2.filectx(f)
2619 return (not a.cmp(b)
2620 and a.flags() == b.flags())
2621 else:
2622 return False
2623 else:
2624 return f not in ctx2.manifest()
2625
2626 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2627 # avoid cycle context -> subrepo -> cmdutil
2628 from . import context
2629
2630 # amend will reuse the existing user if not specified, but the obsolete
2631 # marker creation requires that the current user's name is specified.
2632 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2633 ui.username() # raise exception if username not set
2634
2635 ui.note(_('amending changeset %s\n') % old)
2636 base = old.p1()
2637 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2638
2639 wlock = lock = newid = None
2640 try:
2641 wlock = repo.wlock()
2642 lock = repo.lock()
2643 with repo.transaction('amend') as tr:
2644 # See if we got a message from -m or -l, if not, open the editor
2645 # with the message of the changeset to amend
2646 message = logmessage(ui, opts)
2647 # ensure logfile does not conflict with later enforcement of the
2648 # message. potential logfile content has been processed by
2649 # `logmessage` anyway.
2650 opts.pop('logfile')
2651 # First, do a regular commit to record all changes in the working
2652 # directory (if there are any)
2653 ui.callhooks = False
2654 activebookmark = repo._bookmarks.active
2655 try:
2656 repo._bookmarks.active = None
2657 opts['message'] = 'temporary amend commit for %s' % old
2658 node = commit(ui, repo, commitfunc, pats, opts)
2659 finally:
2660 repo._bookmarks.active = activebookmark
2661 repo._bookmarks.recordchange(tr)
2662 ui.callhooks = True
2663 ctx = repo[node]
2664
2665 # Participating changesets:
2666 #
2667 # node/ctx o - new (intermediate) commit that contains changes
2668 # | from working dir to go into amending commit
2669 # | (or a workingctx if there were no changes)
2670 # |
2671 # old o - changeset to amend
2672 # |
2673 # base o - parent of amending changeset
2674
2675 # Update extra dict from amended commit (e.g. to preserve graft
2676 # source)
2677 extra.update(old.extra())
2678
2679 # Also update it from the intermediate commit or from the wctx
2680 extra.update(ctx.extra())
2681
2682 if len(old.parents()) > 1:
2683 # ctx.files() isn't reliable for merges, so fall back to the
2684 # slower repo.status() method
2685 files = set([fn for st in repo.status(base, old)[:3]
2686 for fn in st])
2687 else:
2688 files = set(old.files())
2689
2690 # Second, we use either the commit we just did, or if there were no
2691 # changes the parent of the working directory as the version of the
2692 # files in the final amend commit
2693 if node:
2694 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2695
2696 user = ctx.user()
2697 date = ctx.date()
2698 # Recompute copies (avoid recording a -> b -> a)
2699 copied = copies.pathcopies(base, ctx)
2700 if old.p2:
2701 copied.update(copies.pathcopies(old.p2(), ctx))
2702
2703 # Prune files which were reverted by the updates: if old
2704 # introduced file X and our intermediate commit, node,
2705 # renamed that file, then those two files are the same and
2706 # we can discard X from our list of files. Likewise if X
2707 # was deleted, it's no longer relevant
2708 files.update(ctx.files())
2709 files = [f for f in files if not samefile(f, ctx, base)]
2710
2711 def filectxfn(repo, ctx_, path):
2712 try:
2713 fctx = ctx[path]
2714 flags = fctx.flags()
2715 mctx = context.memfilectx(repo,
2716 fctx.path(), fctx.data(),
2717 islink='l' in flags,
2718 isexec='x' in flags,
2719 copied=copied.get(path))
2720 return mctx
2721 except KeyError:
2722 return None
2723 else:
2724 ui.note(_('copying changeset %s to %s\n') % (old, base))
2725
2726 # Use version of files as in the old cset
2727 def filectxfn(repo, ctx_, path):
2728 try:
2729 return old.filectx(path)
2730 except KeyError:
2731 return None
2732
2733 user = opts.get('user') or old.user()
2734 date = opts.get('date') or old.date()
2735 editform = mergeeditform(old, 'commit.amend')
2736 editor = getcommiteditor(editform=editform, **opts)
2737 if not message:
2738 editor = getcommiteditor(edit=True, editform=editform)
2739 message = old.description()
2740
2741 pureextra = extra.copy()
2742 extra['amend_source'] = old.hex()
2743
2744 new = context.memctx(repo,
2745 parents=[base.node(), old.p2().node()],
2746 text=message,
2747 files=files,
2748 filectxfn=filectxfn,
2749 user=user,
2750 date=date,
2751 extra=extra,
2752 editor=editor)
2753
2754 newdesc = changelog.stripdesc(new.description())
2755 if ((not node)
2756 and newdesc == old.description()
2757 and user == old.user()
2758 and date == old.date()
2759 and pureextra == old.extra()):
2760 # nothing changed. continuing here would create a new node
2761 # anyway because of the amend_source noise.
2762 #
2763 # This not what we expect from amend.
2764 return old.node()
2765
2766 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2767 try:
2768 if opts.get('secret'):
2769 commitphase = 'secret'
2770 else:
2771 commitphase = old.phase()
2772 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2773 newid = repo.commitctx(new)
2774 finally:
2775 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2776 if newid != old.node():
2777 # Reroute the working copy parent to the new changeset
2778 repo.setparents(newid, nullid)
2779
2780 # Move bookmarks from old parent to amend commit
2781 bms = repo.nodebookmarks(old.node())
2782 if bms:
2783 marks = repo._bookmarks
2784 for bm in bms:
2785 ui.debug('moving bookmarks %r from %s to %s\n' %
2786 (marks, old.hex(), hex(newid)))
2787 marks[bm] = newid
2788 marks.recordchange(tr)
2789 #commit the whole amend process
2790 if createmarkers:
2791 # mark the new changeset as successor of the rewritten one
2792 new = repo[newid]
2793 obs = [(old, (new,))]
2794 if node:
2795 obs.append((ctx, ()))
2796
2797 obsolete.createmarkers(repo, obs)
2798 if not createmarkers and newid != old.node():
2799 # Strip the intermediate commit (if there was one) and the amended
2800 # commit
2801 if node:
2802 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2803 ui.note(_('stripping amended changeset %s\n') % old)
2804 repair.strip(ui, repo, old.node(), topic='amend-backup')
2805 finally:
2806 lockmod.release(lock, wlock)
2807 return newid
2808
2809 def commiteditor(repo, ctx, subs, editform=''):
2810 if ctx.description():
2811 return ctx.description()
2812 return commitforceeditor(repo, ctx, subs, editform=editform,
2813 unchangedmessagedetection=True)
2814
2815 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2816 editform='', unchangedmessagedetection=False):
2817 if not extramsg:
2818 extramsg = _("Leave message empty to abort commit.")
2819
2820 forms = [e for e in editform.split('.') if e]
2821 forms.insert(0, 'changeset')
2822 templatetext = None
2823 while forms:
2824 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2825 if tmpl:
2826 templatetext = committext = buildcommittemplate(
2827 repo, ctx, subs, extramsg, tmpl)
2828 break
2829 forms.pop()
2830 else:
2831 committext = buildcommittext(repo, ctx, subs, extramsg)
2832
2833 # run editor in the repository root
2834 olddir = os.getcwd()
2835 os.chdir(repo.root)
2836
2837 # make in-memory changes visible to external process
2838 tr = repo.currenttransaction()
2839 repo.dirstate.write(tr)
2840 pending = tr and tr.writepending() and repo.root
2841
2842 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2843 editform=editform, pending=pending)
2844 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2845 os.chdir(olddir)
2846
2847 if finishdesc:
2848 text = finishdesc(text)
2849 if not text.strip():
2850 raise error.Abort(_("empty commit message"))
2851 if unchangedmessagedetection and editortext == templatetext:
2852 raise error.Abort(_("commit message unchanged"))
2853
2854 return text
2855
2856 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2857 ui = repo.ui
2858 tmpl, mapfile = gettemplate(ui, tmpl, None)
2859
2860 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2861
2862 for k, v in repo.ui.configitems('committemplate'):
2863 if k != 'changeset':
2864 t.t.cache[k] = v
2865
2866 if not extramsg:
2867 extramsg = '' # ensure that extramsg is string
2868
2869 ui.pushbuffer()
2870 t.show(ctx, extramsg=extramsg)
2871 return ui.popbuffer()
2872
2873 def hgprefix(msg):
2874 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2875
2876 def buildcommittext(repo, ctx, subs, extramsg):
2877 edittext = []
2878 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2879 if ctx.description():
2880 edittext.append(ctx.description())
2881 edittext.append("")
2882 edittext.append("") # Empty line between message and comments.
2883 edittext.append(hgprefix(_("Enter commit message."
2884 " Lines beginning with 'HG:' are removed.")))
2885 edittext.append(hgprefix(extramsg))
2886 edittext.append("HG: --")
2887 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2888 if ctx.p2():
2889 edittext.append(hgprefix(_("branch merge")))
2890 if ctx.branch():
2891 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2892 if bookmarks.isactivewdirparent(repo):
2893 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2894 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2895 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2896 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2897 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2898 if not added and not modified and not removed:
2899 edittext.append(hgprefix(_("no files changed")))
2900 edittext.append("")
2901
2902 return "\n".join(edittext)
2903
2904 def commitstatus(repo, node, branch, bheads=None, opts=None):
2905 if opts is None:
2906 opts = {}
2907 ctx = repo[node]
2908 parents = ctx.parents()
2909
2910 if (not opts.get('amend') and bheads and node not in bheads and not
2911 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2912 repo.ui.status(_('created new head\n'))
2913 # The message is not printed for initial roots. For the other
2914 # changesets, it is printed in the following situations:
2915 #
2916 # Par column: for the 2 parents with ...
2917 # N: null or no parent
2918 # B: parent is on another named branch
2919 # C: parent is a regular non head changeset
2920 # H: parent was a branch head of the current branch
2921 # Msg column: whether we print "created new head" message
2922 # In the following, it is assumed that there already exists some
2923 # initial branch heads of the current branch, otherwise nothing is
2924 # printed anyway.
2925 #
2926 # Par Msg Comment
2927 # N N y additional topo root
2928 #
2929 # B N y additional branch root
2930 # C N y additional topo head
2931 # H N n usual case
2932 #
2933 # B B y weird additional branch root
2934 # C B y branch merge
2935 # H B n merge with named branch
2936 #
2937 # C C y additional head from merge
2938 # C H n merge with a head
2939 #
2940 # H H n head merge: head count decreases
2941
2942 if not opts.get('close_branch'):
2943 for r in parents:
2944 if r.closesbranch() and r.branch() == branch:
2945 repo.ui.status(_('reopening closed branch head %d\n') % r)
2946
2947 if repo.ui.debugflag:
2948 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2949 elif repo.ui.verbose:
2950 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2951
2952 def postcommitstatus(repo, pats, opts):
2953 return repo.status(match=scmutil.match(repo[None], pats, opts))
2954
2955 def revert(ui, repo, ctx, parents, *pats, **opts):
2956 parent, p2 = parents
2957 node = ctx.node()
2958
2959 mf = ctx.manifest()
2960 if node == p2:
2961 parent = p2
2962
2963 # need all matching names in dirstate and manifest of target rev,
2964 # so have to walk both. do not print errors if files exist in one
2965 # but not other. in both cases, filesets should be evaluated against
2966 # workingctx to get consistent result (issue4497). this means 'set:**'
2967 # cannot be used to select missing files from target rev.
2968
2969 # `names` is a mapping for all elements in working copy and target revision
2970 # The mapping is in the form:
2971 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2972 names = {}
2973
2974 with repo.wlock():
2975 ## filling of the `names` mapping
2976 # walk dirstate to fill `names`
2977
2978 interactive = opts.get('interactive', False)
2979 wctx = repo[None]
2980 m = scmutil.match(wctx, pats, opts)
2981
2982 # we'll need this later
2983 targetsubs = sorted(s for s in wctx.substate if m(s))
2984
2985 if not m.always():
2986 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2987 names[abs] = m.rel(abs), m.exact(abs)
2988
2989 # walk target manifest to fill `names`
2990
2991 def badfn(path, msg):
2992 if path in names:
2993 return
2994 if path in ctx.substate:
2995 return
2996 path_ = path + '/'
2997 for f in names:
2998 if f.startswith(path_):
2999 return
3000 ui.warn("%s: %s\n" % (m.rel(path), msg))
3001
3002 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3003 if abs not in names:
3004 names[abs] = m.rel(abs), m.exact(abs)
3005
3006 # Find status of all file in `names`.
3007 m = scmutil.matchfiles(repo, names)
3008
3009 changes = repo.status(node1=node, match=m,
3010 unknown=True, ignored=True, clean=True)
3011 else:
3012 changes = repo.status(node1=node, match=m)
3013 for kind in changes:
3014 for abs in kind:
3015 names[abs] = m.rel(abs), m.exact(abs)
3016
3017 m = scmutil.matchfiles(repo, names)
3018
3019 modified = set(changes.modified)
3020 added = set(changes.added)
3021 removed = set(changes.removed)
3022 _deleted = set(changes.deleted)
3023 unknown = set(changes.unknown)
3024 unknown.update(changes.ignored)
3025 clean = set(changes.clean)
3026 modadded = set()
3027
3028 # split between files known in target manifest and the others
3029 smf = set(mf)
3030
3031 # determine the exact nature of the deleted changesets
3032 deladded = _deleted - smf
3033 deleted = _deleted - deladded
3034
3035 # We need to account for the state of the file in the dirstate,
3036 # even when we revert against something else than parent. This will
3037 # slightly alter the behavior of revert (doing back up or not, delete
3038 # or just forget etc).
3039 if parent == node:
3040 dsmodified = modified
3041 dsadded = added
3042 dsremoved = removed
3043 # store all local modifications, useful later for rename detection
3044 localchanges = dsmodified | dsadded
3045 modified, added, removed = set(), set(), set()
3046 else:
3047 changes = repo.status(node1=parent, match=m)
3048 dsmodified = set(changes.modified)
3049 dsadded = set(changes.added)
3050 dsremoved = set(changes.removed)
3051 # store all local modifications, useful later for rename detection
3052 localchanges = dsmodified | dsadded
3053
3054 # only take into account for removes between wc and target
3055 clean |= dsremoved - removed
3056 dsremoved &= removed
3057 # distinct between dirstate remove and other
3058 removed -= dsremoved
3059
3060 modadded = added & dsmodified
3061 added -= modadded
3062
3063 # tell newly modified apart.
3064 dsmodified &= modified
3065 dsmodified |= modified & dsadded # dirstate added may need backup
3066 modified -= dsmodified
3067
3068 # We need to wait for some post-processing to update this set
3069 # before making the distinction. The dirstate will be used for
3070 # that purpose.
3071 dsadded = added
3072
3073 # in case of merge, files that are actually added can be reported as
3074 # modified, we need to post process the result
3075 if p2 != nullid:
3076 mergeadd = dsmodified - smf
3077 dsadded |= mergeadd
3078 dsmodified -= mergeadd
3079
3080 # if f is a rename, update `names` to also revert the source
3081 cwd = repo.getcwd()
3082 for f in localchanges:
3083 src = repo.dirstate.copied(f)
3084 # XXX should we check for rename down to target node?
3085 if src and src not in names and repo.dirstate[src] == 'r':
3086 dsremoved.add(src)
3087 names[src] = (repo.pathto(src, cwd), True)
3088
3089 # distinguish between file to forget and the other
3090 added = set()
3091 for abs in dsadded:
3092 if repo.dirstate[abs] != 'a':
3093 added.add(abs)
3094 dsadded -= added
3095
3096 for abs in deladded:
3097 if repo.dirstate[abs] == 'a':
3098 dsadded.add(abs)
3099 deladded -= dsadded
3100
3101 # For files marked as removed, we check if an unknown file is present at
3102 # the same path. If a such file exists it may need to be backed up.
3103 # Making the distinction at this stage helps have simpler backup
3104 # logic.
3105 removunk = set()
3106 for abs in removed:
3107 target = repo.wjoin(abs)
3108 if os.path.lexists(target):
3109 removunk.add(abs)
3110 removed -= removunk
3111
3112 dsremovunk = set()
3113 for abs in dsremoved:
3114 target = repo.wjoin(abs)
3115 if os.path.lexists(target):
3116 dsremovunk.add(abs)
3117 dsremoved -= dsremovunk
3118
3119 # action to be actually performed by revert
3120 # (<list of file>, message>) tuple
3121 actions = {'revert': ([], _('reverting %s\n')),
3122 'add': ([], _('adding %s\n')),
3123 'remove': ([], _('removing %s\n')),
3124 'drop': ([], _('removing %s\n')),
3125 'forget': ([], _('forgetting %s\n')),
3126 'undelete': ([], _('undeleting %s\n')),
3127 'noop': (None, _('no changes needed to %s\n')),
3128 'unknown': (None, _('file not managed: %s\n')),
3129 }
3130
3131 # "constant" that convey the backup strategy.
3132 # All set to `discard` if `no-backup` is set do avoid checking
3133 # no_backup lower in the code.
3134 # These values are ordered for comparison purposes
3135 backupinteractive = 3 # do backup if interactively modified
3136 backup = 2 # unconditionally do backup
3137 check = 1 # check if the existing file differs from target
3138 discard = 0 # never do backup
3139 if opts.get('no_backup'):
3140 backupinteractive = backup = check = discard
3141 if interactive:
3142 dsmodifiedbackup = backupinteractive
3143 else:
3144 dsmodifiedbackup = backup
3145 tobackup = set()
3146
3147 backupanddel = actions['remove']
3148 if not opts.get('no_backup'):
3149 backupanddel = actions['drop']
3150
3151 disptable = (
3152 # dispatch table:
3153 # file state
3154 # action
3155 # make backup
3156
3157 ## Sets that results that will change file on disk
3158 # Modified compared to target, no local change
3159 (modified, actions['revert'], discard),
3160 # Modified compared to target, but local file is deleted
3161 (deleted, actions['revert'], discard),
3162 # Modified compared to target, local change
3163 (dsmodified, actions['revert'], dsmodifiedbackup),
3164 # Added since target
3165 (added, actions['remove'], discard),
3166 # Added in working directory
3167 (dsadded, actions['forget'], discard),
3168 # Added since target, have local modification
3169 (modadded, backupanddel, backup),
3170 # Added since target but file is missing in working directory
3171 (deladded, actions['drop'], discard),
3172 # Removed since target, before working copy parent
3173 (removed, actions['add'], discard),
3174 # Same as `removed` but an unknown file exists at the same path
3175 (removunk, actions['add'], check),
3176 # Removed since targe, marked as such in working copy parent
3177 (dsremoved, actions['undelete'], discard),
3178 # Same as `dsremoved` but an unknown file exists at the same path
3179 (dsremovunk, actions['undelete'], check),
3180 ## the following sets does not result in any file changes
3181 # File with no modification
3182 (clean, actions['noop'], discard),
3183 # Existing file, not tracked anywhere
3184 (unknown, actions['unknown'], discard),
3185 )
3186
3187 for abs, (rel, exact) in sorted(names.items()):
3188 # target file to be touch on disk (relative to cwd)
3189 target = repo.wjoin(abs)
3190 # search the entry in the dispatch table.
3191 # if the file is in any of these sets, it was touched in the working
3192 # directory parent and we are sure it needs to be reverted.
3193 for table, (xlist, msg), dobackup in disptable:
3194 if abs not in table:
3195 continue
3196 if xlist is not None:
3197 xlist.append(abs)
3198 if dobackup:
3199 # If in interactive mode, don't automatically create
3200 # .orig files (issue4793)
3201 if dobackup == backupinteractive:
3202 tobackup.add(abs)
3203 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3204 bakname = scmutil.origpath(ui, repo, rel)
3205 ui.note(_('saving current version of %s as %s\n') %
3206 (rel, bakname))
3207 if not opts.get('dry_run'):
3208 if interactive:
3209 util.copyfile(target, bakname)
3210 else:
3211 util.rename(target, bakname)
3212 if ui.verbose or not exact:
3213 if not isinstance(msg, basestring):
3214 msg = msg(abs)
3215 ui.status(msg % rel)
3216 elif exact:
3217 ui.warn(msg % rel)
3218 break
3219
3220 if not opts.get('dry_run'):
3221 needdata = ('revert', 'add', 'undelete')
3222 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3223 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3224
3225 if targetsubs:
3226 # Revert the subrepos on the revert list
3227 for sub in targetsubs:
3228 try:
3229 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3230 except KeyError:
3231 raise error.Abort("subrepository '%s' does not exist in %s!"
3232 % (sub, short(ctx.node())))
3233
3234 def _revertprefetch(repo, ctx, *files):
3235 """Let extension changing the storage layer prefetch content"""
3236 pass
3237
3238 def _performrevert(repo, parents, ctx, actions, interactive=False,
3239 tobackup=None):
3240 """function that actually perform all the actions computed for revert
3241
3242 This is an independent function to let extension to plug in and react to
3243 the imminent revert.
3244
3245 Make sure you have the working directory locked when calling this function.
3246 """
3247 parent, p2 = parents
3248 node = ctx.node()
3249 excluded_files = []
3250 matcher_opts = {"exclude": excluded_files}
3251
3252 def checkout(f):
3253 fc = ctx[f]
3254 repo.wwrite(f, fc.data(), fc.flags())
3255
3256 audit_path = pathutil.pathauditor(repo.root)
3257 for f in actions['forget'][0]:
3258 if interactive:
3259 choice = \
3260 repo.ui.promptchoice(
3261 _("forget added file %s (yn)?$$ &Yes $$ &No")
3262 % f)
3263 if choice == 0:
3264 repo.dirstate.drop(f)
3265 else:
3266 excluded_files.append(repo.wjoin(f))
3267 else:
3268 repo.dirstate.drop(f)
3269 for f in actions['remove'][0]:
3270 audit_path(f)
3271 try:
3272 util.unlinkpath(repo.wjoin(f))
3273 except OSError:
3274 pass
3275 repo.dirstate.remove(f)
3276 for f in actions['drop'][0]:
3277 audit_path(f)
3278 repo.dirstate.remove(f)
3279
3280 normal = None
3281 if node == parent:
3282 # We're reverting to our parent. If possible, we'd like status
3283 # to report the file as clean. We have to use normallookup for
3284 # merges to avoid losing information about merged/dirty files.
3285 if p2 != nullid:
3286 normal = repo.dirstate.normallookup
3287 else:
3288 normal = repo.dirstate.normal
3289
3290 newlyaddedandmodifiedfiles = set()
3291 if interactive:
3292 # Prompt the user for changes to revert
3293 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3294 m = scmutil.match(ctx, torevert, matcher_opts)
3295 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3296 diffopts.nodates = True
3297 diffopts.git = True
3298 reversehunks = repo.ui.configbool('experimental',
3299 'revertalternateinteractivemode',
3300 True)
3301 if reversehunks:
3302 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3303 else:
3304 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3305 originalchunks = patch.parsepatch(diff)
3306 operation = 'discard' if node == parent else 'revert'
3307
3308 try:
3309
3310 chunks, opts = recordfilter(repo.ui, originalchunks,
3311 operation=operation)
3312 if reversehunks:
3313 chunks = patch.reversehunks(chunks)
3314
3315 except patch.PatchError as err:
3316 raise error.Abort(_('error parsing patch: %s') % err)
3317
3318 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3319 if tobackup is None:
3320 tobackup = set()
3321 # Apply changes
3322 fp = stringio()
3323 for c in chunks:
3324 # Create a backup file only if this hunk should be backed up
3325 if ishunk(c) and c.header.filename() in tobackup:
3326 abs = c.header.filename()
3327 target = repo.wjoin(abs)
3328 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3329 util.copyfile(target, bakname)
3330 tobackup.remove(abs)
3331 c.write(fp)
3332 dopatch = fp.tell()
3333 fp.seek(0)
3334 if dopatch:
3335 try:
3336 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3337 except patch.PatchError as err:
3338 raise error.Abort(str(err))
3339 del fp
3340 else:
3341 for f in actions['revert'][0]:
3342 checkout(f)
3343 if normal:
3344 normal(f)
3345
3346 for f in actions['add'][0]:
3347 # Don't checkout modified files, they are already created by the diff
3348 if f not in newlyaddedandmodifiedfiles:
3349 checkout(f)
3350 repo.dirstate.add(f)
3351
3352 normal = repo.dirstate.normallookup
3353 if node == parent and p2 == nullid:
3354 normal = repo.dirstate.normal
3355 for f in actions['undelete'][0]:
3356 checkout(f)
3357 normal(f)
3358
3359 copied = copies.pathcopies(repo[parent], ctx)
3360
3361 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3362 if f in copied:
3363 repo.dirstate.copy(copied[f], f)
3364
3365 def command(table):
3366 """Returns a function object to be used as a decorator for making commands.
3367
3368 This function receives a command table as its argument. The table should
3369 be a dict.
3370
3371 The returned function can be used as a decorator for adding commands
3372 to that command table. This function accepts multiple arguments to define
3373 a command.
3374
3375 The first argument is the command name.
3376
3377 The options argument is an iterable of tuples defining command arguments.
3378 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3379
3380 The synopsis argument defines a short, one line summary of how to use the
3381 command. This shows up in the help output.
3382
3383 The norepo argument defines whether the command does not require a
3384 local repository. Most commands operate against a repository, thus the
3385 default is False.
3386
3387 The optionalrepo argument defines whether the command optionally requires
3388 a local repository.
3389
3390 The inferrepo argument defines whether to try to find a repository from the
3391 command line arguments. If True, arguments will be examined for potential
3392 repository locations. See ``findrepo()``. If a repository is found, it
3393 will be used.
3394 """
3395 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3396 inferrepo=False):
3397 def decorator(func):
3398 func.norepo = norepo
3399 func.optionalrepo = optionalrepo
3400 func.inferrepo = inferrepo
3401 if synopsis:
3402 table[name] = func, list(options), synopsis
3403 else:
3404 table[name] = func, list(options)
3405 return func
3406 return decorator
3407
3408 return cmd
3409
15
3410 def checkunresolved(ms):
16 def checkunresolved(ms):
3411 if list(ms.unresolved()):
17 if list(ms.unresolved()):
@@ -3414,117 +20,3 b' def checkunresolved(ms):'
3414 if ms.mdstate() != 's' or list(ms.driverresolved()):
20 if ms.mdstate() != 's' or list(ms.driverresolved()):
3415 raise error.Abort(_('driver-resolved merge conflicts'),
21 raise error.Abort(_('driver-resolved merge conflicts'),
3416 hint=_('run "hg resolve --all" to resolve'))
22 hint=_('run "hg resolve --all" to resolve'))
3417
3418 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3419 # commands.outgoing. "missing" is "missing" of the result of
3420 # "findcommonoutgoing()"
3421 outgoinghooks = util.hooks()
3422
3423 # a list of (ui, repo) functions called by commands.summary
3424 summaryhooks = util.hooks()
3425
3426 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3427 #
3428 # functions should return tuple of booleans below, if 'changes' is None:
3429 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3430 #
3431 # otherwise, 'changes' is a tuple of tuples below:
3432 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3433 # - (desturl, destbranch, destpeer, outgoing)
3434 summaryremotehooks = util.hooks()
3435
3436 # A list of state files kept by multistep operations like graft.
3437 # Since graft cannot be aborted, it is considered 'clearable' by update.
3438 # note: bisect is intentionally excluded
3439 # (state file, clearable, allowcommit, error, hint)
3440 unfinishedstates = [
3441 ('graftstate', True, False, _('graft in progress'),
3442 _("use 'hg graft --continue' or 'hg update' to abort")),
3443 ('updatestate', True, False, _('last update was interrupted'),
3444 _("use 'hg update' to get a consistent checkout"))
3445 ]
3446
3447 def checkunfinished(repo, commit=False):
3448 '''Look for an unfinished multistep operation, like graft, and abort
3449 if found. It's probably good to check this right before
3450 bailifchanged().
3451 '''
3452 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3453 if commit and allowcommit:
3454 continue
3455 if repo.vfs.exists(f):
3456 raise error.Abort(msg, hint=hint)
3457
3458 def clearunfinished(repo):
3459 '''Check for unfinished operations (as above), and clear the ones
3460 that are clearable.
3461 '''
3462 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3463 if not clearable and repo.vfs.exists(f):
3464 raise error.Abort(msg, hint=hint)
3465 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3466 if clearable and repo.vfs.exists(f):
3467 util.unlink(repo.join(f))
3468
3469 afterresolvedstates = [
3470 ('graftstate',
3471 _('hg graft --continue')),
3472 ]
3473
3474 def howtocontinue(repo):
3475 '''Check for an unfinished operation and return the command to finish
3476 it.
3477
3478 afterresolvedstates tuples define a .hg/{file} and the corresponding
3479 command needed to finish it.
3480
3481 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3482 a boolean.
3483 '''
3484 contmsg = _("continue: %s")
3485 for f, msg in afterresolvedstates:
3486 if repo.vfs.exists(f):
3487 return contmsg % msg, True
3488 workingctx = repo[None]
3489 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3490 for s in workingctx.substate)
3491 if dirty:
3492 return contmsg % _("hg commit"), False
3493 return None, None
3494
3495 def checkafterresolved(repo):
3496 '''Inform the user about the next action after completing hg resolve
3497
3498 If there's a matching afterresolvedstates, howtocontinue will yield
3499 repo.ui.warn as the reporter.
3500
3501 Otherwise, it will yield repo.ui.note.
3502 '''
3503 msg, warning = howtocontinue(repo)
3504 if msg is not None:
3505 if warning:
3506 repo.ui.warn("%s\n" % msg)
3507 else:
3508 repo.ui.note("%s\n" % msg)
3509
3510 def wrongtooltocontinue(repo, task):
3511 '''Raise an abort suggesting how to properly continue if there is an
3512 active task.
3513
3514 Uses howtocontinue() to find the active task.
3515
3516 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3517 a hint.
3518 '''
3519 after = howtocontinue(repo)
3520 hint = None
3521 if after[1]:
3522 hint = after[0]
3523 raise error.Abort(_('no %s in progress') % task, hint=hint)
3524
3525 class dirstateguard(dirstateguardmod.dirstateguard):
3526 def __init__(self, repo, name):
3527 dirstateguardmod.dirstateguard.__init__(self, repo, name)
3528 repo.ui.deprecwarn(
3529 'dirstateguard has moved from cmdutil to dirstateguard',
3530 '4.1')
General Comments 0
You need to be logged in to leave comments. Login now