##// END OF EJS Templates
formatter: wrap (tmpl, mapfile) by named tuple...
Yuya Nishihara -
r32838:615ec3f1 default
parent child Browse files
Show More
@@ -1,3587 +1,3590
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import itertools
12 12 import os
13 13 import re
14 14 import tempfile
15 15
16 16 from .i18n import _
17 17 from .node import (
18 18 hex,
19 19 nullid,
20 20 nullrev,
21 21 short,
22 22 )
23 23
24 24 from . import (
25 25 bookmarks,
26 26 changelog,
27 27 copies,
28 28 crecord as crecordmod,
29 29 encoding,
30 30 error,
31 31 formatter,
32 32 graphmod,
33 33 lock as lockmod,
34 34 match as matchmod,
35 35 obsolete,
36 36 patch,
37 37 pathutil,
38 38 phases,
39 39 pycompat,
40 40 registrar,
41 41 repair,
42 42 revlog,
43 43 revset,
44 44 scmutil,
45 45 smartset,
46 46 templatekw,
47 47 templater,
48 48 util,
49 49 vfs as vfsmod,
50 50 )
51 51 stringio = util.stringio
52 52
53 53 # templates of common command options
54 54
55 55 dryrunopts = [
56 56 ('n', 'dry-run', None,
57 57 _('do not perform actions, just print output')),
58 58 ]
59 59
60 60 remoteopts = [
61 61 ('e', 'ssh', '',
62 62 _('specify ssh command to use'), _('CMD')),
63 63 ('', 'remotecmd', '',
64 64 _('specify hg command to run on the remote side'), _('CMD')),
65 65 ('', 'insecure', None,
66 66 _('do not verify server certificate (ignoring web.cacerts config)')),
67 67 ]
68 68
69 69 walkopts = [
70 70 ('I', 'include', [],
71 71 _('include names matching the given patterns'), _('PATTERN')),
72 72 ('X', 'exclude', [],
73 73 _('exclude names matching the given patterns'), _('PATTERN')),
74 74 ]
75 75
76 76 commitopts = [
77 77 ('m', 'message', '',
78 78 _('use text as commit message'), _('TEXT')),
79 79 ('l', 'logfile', '',
80 80 _('read commit message from file'), _('FILE')),
81 81 ]
82 82
83 83 commitopts2 = [
84 84 ('d', 'date', '',
85 85 _('record the specified date as commit date'), _('DATE')),
86 86 ('u', 'user', '',
87 87 _('record the specified user as committer'), _('USER')),
88 88 ]
89 89
90 90 # hidden for now
91 91 formatteropts = [
92 92 ('T', 'template', '',
93 93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
94 94 ]
95 95
96 96 templateopts = [
97 97 ('', 'style', '',
98 98 _('display using template map file (DEPRECATED)'), _('STYLE')),
99 99 ('T', 'template', '',
100 100 _('display with template'), _('TEMPLATE')),
101 101 ]
102 102
103 103 logopts = [
104 104 ('p', 'patch', None, _('show patch')),
105 105 ('g', 'git', None, _('use git extended diff format')),
106 106 ('l', 'limit', '',
107 107 _('limit number of changes displayed'), _('NUM')),
108 108 ('M', 'no-merges', None, _('do not show merges')),
109 109 ('', 'stat', None, _('output diffstat-style summary of changes')),
110 110 ('G', 'graph', None, _("show the revision DAG")),
111 111 ] + templateopts
112 112
113 113 diffopts = [
114 114 ('a', 'text', None, _('treat all files as text')),
115 115 ('g', 'git', None, _('use git extended diff format')),
116 116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
117 117 ('', 'nodates', None, _('omit dates from diff headers'))
118 118 ]
119 119
120 120 diffwsopts = [
121 121 ('w', 'ignore-all-space', None,
122 122 _('ignore white space when comparing lines')),
123 123 ('b', 'ignore-space-change', None,
124 124 _('ignore changes in the amount of white space')),
125 125 ('B', 'ignore-blank-lines', None,
126 126 _('ignore changes whose lines are all blank')),
127 127 ]
128 128
129 129 diffopts2 = [
130 130 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
131 131 ('p', 'show-function', None, _('show which function each change is in')),
132 132 ('', 'reverse', None, _('produce a diff that undoes the changes')),
133 133 ] + diffwsopts + [
134 134 ('U', 'unified', '',
135 135 _('number of lines of context to show'), _('NUM')),
136 136 ('', 'stat', None, _('output diffstat-style summary of changes')),
137 137 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
138 138 ]
139 139
140 140 mergetoolopts = [
141 141 ('t', 'tool', '', _('specify merge tool')),
142 142 ]
143 143
144 144 similarityopts = [
145 145 ('s', 'similarity', '',
146 146 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
147 147 ]
148 148
149 149 subrepoopts = [
150 150 ('S', 'subrepos', None,
151 151 _('recurse into subrepositories'))
152 152 ]
153 153
154 154 debugrevlogopts = [
155 155 ('c', 'changelog', False, _('open changelog')),
156 156 ('m', 'manifest', False, _('open manifest')),
157 157 ('', 'dir', '', _('open directory manifest')),
158 158 ]
159 159
160 160 # special string such that everything below this line will be ingored in the
161 161 # editor text
162 162 _linebelow = "^HG: ------------------------ >8 ------------------------$"
163 163
164 164 def ishunk(x):
165 165 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
166 166 return isinstance(x, hunkclasses)
167 167
168 168 def newandmodified(chunks, originalchunks):
169 169 newlyaddedandmodifiedfiles = set()
170 170 for chunk in chunks:
171 171 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
172 172 originalchunks:
173 173 newlyaddedandmodifiedfiles.add(chunk.header.filename())
174 174 return newlyaddedandmodifiedfiles
175 175
176 176 def parsealiases(cmd):
177 177 return cmd.lstrip("^").split("|")
178 178
179 179 def setupwrapcolorwrite(ui):
180 180 # wrap ui.write so diff output can be labeled/colorized
181 181 def wrapwrite(orig, *args, **kw):
182 182 label = kw.pop('label', '')
183 183 for chunk, l in patch.difflabel(lambda: args):
184 184 orig(chunk, label=label + l)
185 185
186 186 oldwrite = ui.write
187 187 def wrap(*args, **kwargs):
188 188 return wrapwrite(oldwrite, *args, **kwargs)
189 189 setattr(ui, 'write', wrap)
190 190 return oldwrite
191 191
192 192 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
193 193 if usecurses:
194 194 if testfile:
195 195 recordfn = crecordmod.testdecorator(testfile,
196 196 crecordmod.testchunkselector)
197 197 else:
198 198 recordfn = crecordmod.chunkselector
199 199
200 200 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
201 201
202 202 else:
203 203 return patch.filterpatch(ui, originalhunks, operation)
204 204
205 205 def recordfilter(ui, originalhunks, operation=None):
206 206 """ Prompts the user to filter the originalhunks and return a list of
207 207 selected hunks.
208 208 *operation* is used for to build ui messages to indicate the user what
209 209 kind of filtering they are doing: reverting, committing, shelving, etc.
210 210 (see patch.filterpatch).
211 211 """
212 212 usecurses = crecordmod.checkcurses(ui)
213 213 testfile = ui.config('experimental', 'crecordtest', None)
214 214 oldwrite = setupwrapcolorwrite(ui)
215 215 try:
216 216 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
217 217 testfile, operation)
218 218 finally:
219 219 ui.write = oldwrite
220 220 return newchunks, newopts
221 221
222 222 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
223 223 filterfn, *pats, **opts):
224 224 from . import merge as mergemod
225 225 opts = pycompat.byteskwargs(opts)
226 226 if not ui.interactive():
227 227 if cmdsuggest:
228 228 msg = _('running non-interactively, use %s instead') % cmdsuggest
229 229 else:
230 230 msg = _('running non-interactively')
231 231 raise error.Abort(msg)
232 232
233 233 # make sure username is set before going interactive
234 234 if not opts.get('user'):
235 235 ui.username() # raise exception, username not provided
236 236
237 237 def recordfunc(ui, repo, message, match, opts):
238 238 """This is generic record driver.
239 239
240 240 Its job is to interactively filter local changes, and
241 241 accordingly prepare working directory into a state in which the
242 242 job can be delegated to a non-interactive commit command such as
243 243 'commit' or 'qrefresh'.
244 244
245 245 After the actual job is done by non-interactive command, the
246 246 working directory is restored to its original state.
247 247
248 248 In the end we'll record interesting changes, and everything else
249 249 will be left in place, so the user can continue working.
250 250 """
251 251
252 252 checkunfinished(repo, commit=True)
253 253 wctx = repo[None]
254 254 merge = len(wctx.parents()) > 1
255 255 if merge:
256 256 raise error.Abort(_('cannot partially commit a merge '
257 257 '(use "hg commit" instead)'))
258 258
259 259 def fail(f, msg):
260 260 raise error.Abort('%s: %s' % (f, msg))
261 261
262 262 force = opts.get('force')
263 263 if not force:
264 264 vdirs = []
265 265 match.explicitdir = vdirs.append
266 266 match.bad = fail
267 267
268 268 status = repo.status(match=match)
269 269 if not force:
270 270 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
271 271 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
272 272 diffopts.nodates = True
273 273 diffopts.git = True
274 274 diffopts.showfunc = True
275 275 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
276 276 originalchunks = patch.parsepatch(originaldiff)
277 277
278 278 # 1. filter patch, since we are intending to apply subset of it
279 279 try:
280 280 chunks, newopts = filterfn(ui, originalchunks)
281 281 except patch.PatchError as err:
282 282 raise error.Abort(_('error parsing patch: %s') % err)
283 283 opts.update(newopts)
284 284
285 285 # We need to keep a backup of files that have been newly added and
286 286 # modified during the recording process because there is a previous
287 287 # version without the edit in the workdir
288 288 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
289 289 contenders = set()
290 290 for h in chunks:
291 291 try:
292 292 contenders.update(set(h.files()))
293 293 except AttributeError:
294 294 pass
295 295
296 296 changed = status.modified + status.added + status.removed
297 297 newfiles = [f for f in changed if f in contenders]
298 298 if not newfiles:
299 299 ui.status(_('no changes to record\n'))
300 300 return 0
301 301
302 302 modified = set(status.modified)
303 303
304 304 # 2. backup changed files, so we can restore them in the end
305 305
306 306 if backupall:
307 307 tobackup = changed
308 308 else:
309 309 tobackup = [f for f in newfiles if f in modified or f in \
310 310 newlyaddedandmodifiedfiles]
311 311 backups = {}
312 312 if tobackup:
313 313 backupdir = repo.vfs.join('record-backups')
314 314 try:
315 315 os.mkdir(backupdir)
316 316 except OSError as err:
317 317 if err.errno != errno.EEXIST:
318 318 raise
319 319 try:
320 320 # backup continues
321 321 for f in tobackup:
322 322 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
323 323 dir=backupdir)
324 324 os.close(fd)
325 325 ui.debug('backup %r as %r\n' % (f, tmpname))
326 326 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
327 327 backups[f] = tmpname
328 328
329 329 fp = stringio()
330 330 for c in chunks:
331 331 fname = c.filename()
332 332 if fname in backups:
333 333 c.write(fp)
334 334 dopatch = fp.tell()
335 335 fp.seek(0)
336 336
337 337 # 2.5 optionally review / modify patch in text editor
338 338 if opts.get('review', False):
339 339 patchtext = (crecordmod.diffhelptext
340 340 + crecordmod.patchhelptext
341 341 + fp.read())
342 342 reviewedpatch = ui.edit(patchtext, "",
343 343 extra={"suffix": ".diff"},
344 344 repopath=repo.path)
345 345 fp.truncate(0)
346 346 fp.write(reviewedpatch)
347 347 fp.seek(0)
348 348
349 349 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
350 350 # 3a. apply filtered patch to clean repo (clean)
351 351 if backups:
352 352 # Equivalent to hg.revert
353 353 m = scmutil.matchfiles(repo, backups.keys())
354 354 mergemod.update(repo, repo.dirstate.p1(),
355 355 False, True, matcher=m)
356 356
357 357 # 3b. (apply)
358 358 if dopatch:
359 359 try:
360 360 ui.debug('applying patch\n')
361 361 ui.debug(fp.getvalue())
362 362 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
363 363 except patch.PatchError as err:
364 364 raise error.Abort(str(err))
365 365 del fp
366 366
367 367 # 4. We prepared working directory according to filtered
368 368 # patch. Now is the time to delegate the job to
369 369 # commit/qrefresh or the like!
370 370
371 371 # Make all of the pathnames absolute.
372 372 newfiles = [repo.wjoin(nf) for nf in newfiles]
373 373 return commitfunc(ui, repo, *newfiles, **opts)
374 374 finally:
375 375 # 5. finally restore backed-up files
376 376 try:
377 377 dirstate = repo.dirstate
378 378 for realname, tmpname in backups.iteritems():
379 379 ui.debug('restoring %r to %r\n' % (tmpname, realname))
380 380
381 381 if dirstate[realname] == 'n':
382 382 # without normallookup, restoring timestamp
383 383 # may cause partially committed files
384 384 # to be treated as unmodified
385 385 dirstate.normallookup(realname)
386 386
387 387 # copystat=True here and above are a hack to trick any
388 388 # editors that have f open that we haven't modified them.
389 389 #
390 390 # Also note that this racy as an editor could notice the
391 391 # file's mtime before we've finished writing it.
392 392 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
393 393 os.unlink(tmpname)
394 394 if tobackup:
395 395 os.rmdir(backupdir)
396 396 except OSError:
397 397 pass
398 398
399 399 def recordinwlock(ui, repo, message, match, opts):
400 400 with repo.wlock():
401 401 return recordfunc(ui, repo, message, match, opts)
402 402
403 403 return commit(ui, repo, recordinwlock, pats, opts)
404 404
405 405 def findpossible(cmd, table, strict=False):
406 406 """
407 407 Return cmd -> (aliases, command table entry)
408 408 for each matching command.
409 409 Return debug commands (or their aliases) only if no normal command matches.
410 410 """
411 411 choice = {}
412 412 debugchoice = {}
413 413
414 414 if cmd in table:
415 415 # short-circuit exact matches, "log" alias beats "^log|history"
416 416 keys = [cmd]
417 417 else:
418 418 keys = table.keys()
419 419
420 420 allcmds = []
421 421 for e in keys:
422 422 aliases = parsealiases(e)
423 423 allcmds.extend(aliases)
424 424 found = None
425 425 if cmd in aliases:
426 426 found = cmd
427 427 elif not strict:
428 428 for a in aliases:
429 429 if a.startswith(cmd):
430 430 found = a
431 431 break
432 432 if found is not None:
433 433 if aliases[0].startswith("debug") or found.startswith("debug"):
434 434 debugchoice[found] = (aliases, table[e])
435 435 else:
436 436 choice[found] = (aliases, table[e])
437 437
438 438 if not choice and debugchoice:
439 439 choice = debugchoice
440 440
441 441 return choice, allcmds
442 442
443 443 def findcmd(cmd, table, strict=True):
444 444 """Return (aliases, command table entry) for command string."""
445 445 choice, allcmds = findpossible(cmd, table, strict)
446 446
447 447 if cmd in choice:
448 448 return choice[cmd]
449 449
450 450 if len(choice) > 1:
451 451 clist = sorted(choice)
452 452 raise error.AmbiguousCommand(cmd, clist)
453 453
454 454 if choice:
455 455 return choice.values()[0]
456 456
457 457 raise error.UnknownCommand(cmd, allcmds)
458 458
459 459 def findrepo(p):
460 460 while not os.path.isdir(os.path.join(p, ".hg")):
461 461 oldp, p = p, os.path.dirname(p)
462 462 if p == oldp:
463 463 return None
464 464
465 465 return p
466 466
467 467 def bailifchanged(repo, merge=True, hint=None):
468 468 """ enforce the precondition that working directory must be clean.
469 469
470 470 'merge' can be set to false if a pending uncommitted merge should be
471 471 ignored (such as when 'update --check' runs).
472 472
473 473 'hint' is the usual hint given to Abort exception.
474 474 """
475 475
476 476 if merge and repo.dirstate.p2() != nullid:
477 477 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
478 478 modified, added, removed, deleted = repo.status()[:4]
479 479 if modified or added or removed or deleted:
480 480 raise error.Abort(_('uncommitted changes'), hint=hint)
481 481 ctx = repo[None]
482 482 for s in sorted(ctx.substate):
483 483 ctx.sub(s).bailifchanged(hint=hint)
484 484
485 485 def logmessage(ui, opts):
486 486 """ get the log message according to -m and -l option """
487 487 message = opts.get('message')
488 488 logfile = opts.get('logfile')
489 489
490 490 if message and logfile:
491 491 raise error.Abort(_('options --message and --logfile are mutually '
492 492 'exclusive'))
493 493 if not message and logfile:
494 494 try:
495 495 if isstdiofilename(logfile):
496 496 message = ui.fin.read()
497 497 else:
498 498 message = '\n'.join(util.readfile(logfile).splitlines())
499 499 except IOError as inst:
500 500 raise error.Abort(_("can't read commit message '%s': %s") %
501 501 (logfile, inst.strerror))
502 502 return message
503 503
504 504 def mergeeditform(ctxorbool, baseformname):
505 505 """return appropriate editform name (referencing a committemplate)
506 506
507 507 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
508 508 merging is committed.
509 509
510 510 This returns baseformname with '.merge' appended if it is a merge,
511 511 otherwise '.normal' is appended.
512 512 """
513 513 if isinstance(ctxorbool, bool):
514 514 if ctxorbool:
515 515 return baseformname + ".merge"
516 516 elif 1 < len(ctxorbool.parents()):
517 517 return baseformname + ".merge"
518 518
519 519 return baseformname + ".normal"
520 520
521 521 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
522 522 editform='', **opts):
523 523 """get appropriate commit message editor according to '--edit' option
524 524
525 525 'finishdesc' is a function to be called with edited commit message
526 526 (= 'description' of the new changeset) just after editing, but
527 527 before checking empty-ness. It should return actual text to be
528 528 stored into history. This allows to change description before
529 529 storing.
530 530
531 531 'extramsg' is a extra message to be shown in the editor instead of
532 532 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
533 533 is automatically added.
534 534
535 535 'editform' is a dot-separated list of names, to distinguish
536 536 the purpose of commit text editing.
537 537
538 538 'getcommiteditor' returns 'commitforceeditor' regardless of
539 539 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
540 540 they are specific for usage in MQ.
541 541 """
542 542 if edit or finishdesc or extramsg:
543 543 return lambda r, c, s: commitforceeditor(r, c, s,
544 544 finishdesc=finishdesc,
545 545 extramsg=extramsg,
546 546 editform=editform)
547 547 elif editform:
548 548 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
549 549 else:
550 550 return commiteditor
551 551
552 552 def loglimit(opts):
553 553 """get the log limit according to option -l/--limit"""
554 554 limit = opts.get('limit')
555 555 if limit:
556 556 try:
557 557 limit = int(limit)
558 558 except ValueError:
559 559 raise error.Abort(_('limit must be a positive integer'))
560 560 if limit <= 0:
561 561 raise error.Abort(_('limit must be positive'))
562 562 else:
563 563 limit = None
564 564 return limit
565 565
566 566 def makefilename(repo, pat, node, desc=None,
567 567 total=None, seqno=None, revwidth=None, pathname=None):
568 568 node_expander = {
569 569 'H': lambda: hex(node),
570 570 'R': lambda: str(repo.changelog.rev(node)),
571 571 'h': lambda: short(node),
572 572 'm': lambda: re.sub('[^\w]', '_', str(desc))
573 573 }
574 574 expander = {
575 575 '%': lambda: '%',
576 576 'b': lambda: os.path.basename(repo.root),
577 577 }
578 578
579 579 try:
580 580 if node:
581 581 expander.update(node_expander)
582 582 if node:
583 583 expander['r'] = (lambda:
584 584 str(repo.changelog.rev(node)).zfill(revwidth or 0))
585 585 if total is not None:
586 586 expander['N'] = lambda: str(total)
587 587 if seqno is not None:
588 588 expander['n'] = lambda: str(seqno)
589 589 if total is not None and seqno is not None:
590 590 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
591 591 if pathname is not None:
592 592 expander['s'] = lambda: os.path.basename(pathname)
593 593 expander['d'] = lambda: os.path.dirname(pathname) or '.'
594 594 expander['p'] = lambda: pathname
595 595
596 596 newname = []
597 597 patlen = len(pat)
598 598 i = 0
599 599 while i < patlen:
600 600 c = pat[i:i + 1]
601 601 if c == '%':
602 602 i += 1
603 603 c = pat[i:i + 1]
604 604 c = expander[c]()
605 605 newname.append(c)
606 606 i += 1
607 607 return ''.join(newname)
608 608 except KeyError as inst:
609 609 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
610 610 inst.args[0])
611 611
612 612 def isstdiofilename(pat):
613 613 """True if the given pat looks like a filename denoting stdin/stdout"""
614 614 return not pat or pat == '-'
615 615
616 616 class _unclosablefile(object):
617 617 def __init__(self, fp):
618 618 self._fp = fp
619 619
620 620 def close(self):
621 621 pass
622 622
623 623 def __iter__(self):
624 624 return iter(self._fp)
625 625
626 626 def __getattr__(self, attr):
627 627 return getattr(self._fp, attr)
628 628
629 629 def __enter__(self):
630 630 return self
631 631
632 632 def __exit__(self, exc_type, exc_value, exc_tb):
633 633 pass
634 634
635 635 def makefileobj(repo, pat, node=None, desc=None, total=None,
636 636 seqno=None, revwidth=None, mode='wb', modemap=None,
637 637 pathname=None):
638 638
639 639 writable = mode not in ('r', 'rb')
640 640
641 641 if isstdiofilename(pat):
642 642 if writable:
643 643 fp = repo.ui.fout
644 644 else:
645 645 fp = repo.ui.fin
646 646 return _unclosablefile(fp)
647 647 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
648 648 if modemap is not None:
649 649 mode = modemap.get(fn, mode)
650 650 if mode == 'wb':
651 651 modemap[fn] = 'ab'
652 652 return open(fn, mode)
653 653
654 654 def openrevlog(repo, cmd, file_, opts):
655 655 """opens the changelog, manifest, a filelog or a given revlog"""
656 656 cl = opts['changelog']
657 657 mf = opts['manifest']
658 658 dir = opts['dir']
659 659 msg = None
660 660 if cl and mf:
661 661 msg = _('cannot specify --changelog and --manifest at the same time')
662 662 elif cl and dir:
663 663 msg = _('cannot specify --changelog and --dir at the same time')
664 664 elif cl or mf or dir:
665 665 if file_:
666 666 msg = _('cannot specify filename with --changelog or --manifest')
667 667 elif not repo:
668 668 msg = _('cannot specify --changelog or --manifest or --dir '
669 669 'without a repository')
670 670 if msg:
671 671 raise error.Abort(msg)
672 672
673 673 r = None
674 674 if repo:
675 675 if cl:
676 676 r = repo.unfiltered().changelog
677 677 elif dir:
678 678 if 'treemanifest' not in repo.requirements:
679 679 raise error.Abort(_("--dir can only be used on repos with "
680 680 "treemanifest enabled"))
681 681 dirlog = repo.manifestlog._revlog.dirlog(dir)
682 682 if len(dirlog):
683 683 r = dirlog
684 684 elif mf:
685 685 r = repo.manifestlog._revlog
686 686 elif file_:
687 687 filelog = repo.file(file_)
688 688 if len(filelog):
689 689 r = filelog
690 690 if not r:
691 691 if not file_:
692 692 raise error.CommandError(cmd, _('invalid arguments'))
693 693 if not os.path.isfile(file_):
694 694 raise error.Abort(_("revlog '%s' not found") % file_)
695 695 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
696 696 file_[:-2] + ".i")
697 697 return r
698 698
699 699 def copy(ui, repo, pats, opts, rename=False):
700 700 # called with the repo lock held
701 701 #
702 702 # hgsep => pathname that uses "/" to separate directories
703 703 # ossep => pathname that uses os.sep to separate directories
704 704 cwd = repo.getcwd()
705 705 targets = {}
706 706 after = opts.get("after")
707 707 dryrun = opts.get("dry_run")
708 708 wctx = repo[None]
709 709
710 710 def walkpat(pat):
711 711 srcs = []
712 712 if after:
713 713 badstates = '?'
714 714 else:
715 715 badstates = '?r'
716 716 m = scmutil.match(wctx, [pat], opts, globbed=True)
717 717 for abs in wctx.walk(m):
718 718 state = repo.dirstate[abs]
719 719 rel = m.rel(abs)
720 720 exact = m.exact(abs)
721 721 if state in badstates:
722 722 if exact and state == '?':
723 723 ui.warn(_('%s: not copying - file is not managed\n') % rel)
724 724 if exact and state == 'r':
725 725 ui.warn(_('%s: not copying - file has been marked for'
726 726 ' remove\n') % rel)
727 727 continue
728 728 # abs: hgsep
729 729 # rel: ossep
730 730 srcs.append((abs, rel, exact))
731 731 return srcs
732 732
733 733 # abssrc: hgsep
734 734 # relsrc: ossep
735 735 # otarget: ossep
736 736 def copyfile(abssrc, relsrc, otarget, exact):
737 737 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
738 738 if '/' in abstarget:
739 739 # We cannot normalize abstarget itself, this would prevent
740 740 # case only renames, like a => A.
741 741 abspath, absname = abstarget.rsplit('/', 1)
742 742 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
743 743 reltarget = repo.pathto(abstarget, cwd)
744 744 target = repo.wjoin(abstarget)
745 745 src = repo.wjoin(abssrc)
746 746 state = repo.dirstate[abstarget]
747 747
748 748 scmutil.checkportable(ui, abstarget)
749 749
750 750 # check for collisions
751 751 prevsrc = targets.get(abstarget)
752 752 if prevsrc is not None:
753 753 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
754 754 (reltarget, repo.pathto(abssrc, cwd),
755 755 repo.pathto(prevsrc, cwd)))
756 756 return
757 757
758 758 # check for overwrites
759 759 exists = os.path.lexists(target)
760 760 samefile = False
761 761 if exists and abssrc != abstarget:
762 762 if (repo.dirstate.normalize(abssrc) ==
763 763 repo.dirstate.normalize(abstarget)):
764 764 if not rename:
765 765 ui.warn(_("%s: can't copy - same file\n") % reltarget)
766 766 return
767 767 exists = False
768 768 samefile = True
769 769
770 770 if not after and exists or after and state in 'mn':
771 771 if not opts['force']:
772 772 if state in 'mn':
773 773 msg = _('%s: not overwriting - file already committed\n')
774 774 if after:
775 775 flags = '--after --force'
776 776 else:
777 777 flags = '--force'
778 778 if rename:
779 779 hint = _('(hg rename %s to replace the file by '
780 780 'recording a rename)\n') % flags
781 781 else:
782 782 hint = _('(hg copy %s to replace the file by '
783 783 'recording a copy)\n') % flags
784 784 else:
785 785 msg = _('%s: not overwriting - file exists\n')
786 786 if rename:
787 787 hint = _('(hg rename --after to record the rename)\n')
788 788 else:
789 789 hint = _('(hg copy --after to record the copy)\n')
790 790 ui.warn(msg % reltarget)
791 791 ui.warn(hint)
792 792 return
793 793
794 794 if after:
795 795 if not exists:
796 796 if rename:
797 797 ui.warn(_('%s: not recording move - %s does not exist\n') %
798 798 (relsrc, reltarget))
799 799 else:
800 800 ui.warn(_('%s: not recording copy - %s does not exist\n') %
801 801 (relsrc, reltarget))
802 802 return
803 803 elif not dryrun:
804 804 try:
805 805 if exists:
806 806 os.unlink(target)
807 807 targetdir = os.path.dirname(target) or '.'
808 808 if not os.path.isdir(targetdir):
809 809 os.makedirs(targetdir)
810 810 if samefile:
811 811 tmp = target + "~hgrename"
812 812 os.rename(src, tmp)
813 813 os.rename(tmp, target)
814 814 else:
815 815 util.copyfile(src, target)
816 816 srcexists = True
817 817 except IOError as inst:
818 818 if inst.errno == errno.ENOENT:
819 819 ui.warn(_('%s: deleted in working directory\n') % relsrc)
820 820 srcexists = False
821 821 else:
822 822 ui.warn(_('%s: cannot copy - %s\n') %
823 823 (relsrc, inst.strerror))
824 824 return True # report a failure
825 825
826 826 if ui.verbose or not exact:
827 827 if rename:
828 828 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
829 829 else:
830 830 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
831 831
832 832 targets[abstarget] = abssrc
833 833
834 834 # fix up dirstate
835 835 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
836 836 dryrun=dryrun, cwd=cwd)
837 837 if rename and not dryrun:
838 838 if not after and srcexists and not samefile:
839 839 repo.wvfs.unlinkpath(abssrc)
840 840 wctx.forget([abssrc])
841 841
842 842 # pat: ossep
843 843 # dest ossep
844 844 # srcs: list of (hgsep, hgsep, ossep, bool)
845 845 # return: function that takes hgsep and returns ossep
846 846 def targetpathfn(pat, dest, srcs):
847 847 if os.path.isdir(pat):
848 848 abspfx = pathutil.canonpath(repo.root, cwd, pat)
849 849 abspfx = util.localpath(abspfx)
850 850 if destdirexists:
851 851 striplen = len(os.path.split(abspfx)[0])
852 852 else:
853 853 striplen = len(abspfx)
854 854 if striplen:
855 855 striplen += len(pycompat.ossep)
856 856 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
857 857 elif destdirexists:
858 858 res = lambda p: os.path.join(dest,
859 859 os.path.basename(util.localpath(p)))
860 860 else:
861 861 res = lambda p: dest
862 862 return res
863 863
864 864 # pat: ossep
865 865 # dest ossep
866 866 # srcs: list of (hgsep, hgsep, ossep, bool)
867 867 # return: function that takes hgsep and returns ossep
868 868 def targetpathafterfn(pat, dest, srcs):
869 869 if matchmod.patkind(pat):
870 870 # a mercurial pattern
871 871 res = lambda p: os.path.join(dest,
872 872 os.path.basename(util.localpath(p)))
873 873 else:
874 874 abspfx = pathutil.canonpath(repo.root, cwd, pat)
875 875 if len(abspfx) < len(srcs[0][0]):
876 876 # A directory. Either the target path contains the last
877 877 # component of the source path or it does not.
878 878 def evalpath(striplen):
879 879 score = 0
880 880 for s in srcs:
881 881 t = os.path.join(dest, util.localpath(s[0])[striplen:])
882 882 if os.path.lexists(t):
883 883 score += 1
884 884 return score
885 885
886 886 abspfx = util.localpath(abspfx)
887 887 striplen = len(abspfx)
888 888 if striplen:
889 889 striplen += len(pycompat.ossep)
890 890 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
891 891 score = evalpath(striplen)
892 892 striplen1 = len(os.path.split(abspfx)[0])
893 893 if striplen1:
894 894 striplen1 += len(pycompat.ossep)
895 895 if evalpath(striplen1) > score:
896 896 striplen = striplen1
897 897 res = lambda p: os.path.join(dest,
898 898 util.localpath(p)[striplen:])
899 899 else:
900 900 # a file
901 901 if destdirexists:
902 902 res = lambda p: os.path.join(dest,
903 903 os.path.basename(util.localpath(p)))
904 904 else:
905 905 res = lambda p: dest
906 906 return res
907 907
908 908 pats = scmutil.expandpats(pats)
909 909 if not pats:
910 910 raise error.Abort(_('no source or destination specified'))
911 911 if len(pats) == 1:
912 912 raise error.Abort(_('no destination specified'))
913 913 dest = pats.pop()
914 914 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
915 915 if not destdirexists:
916 916 if len(pats) > 1 or matchmod.patkind(pats[0]):
917 917 raise error.Abort(_('with multiple sources, destination must be an '
918 918 'existing directory'))
919 919 if util.endswithsep(dest):
920 920 raise error.Abort(_('destination %s is not a directory') % dest)
921 921
922 922 tfn = targetpathfn
923 923 if after:
924 924 tfn = targetpathafterfn
925 925 copylist = []
926 926 for pat in pats:
927 927 srcs = walkpat(pat)
928 928 if not srcs:
929 929 continue
930 930 copylist.append((tfn(pat, dest, srcs), srcs))
931 931 if not copylist:
932 932 raise error.Abort(_('no files to copy'))
933 933
934 934 errors = 0
935 935 for targetpath, srcs in copylist:
936 936 for abssrc, relsrc, exact in srcs:
937 937 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
938 938 errors += 1
939 939
940 940 if errors:
941 941 ui.warn(_('(consider using --after)\n'))
942 942
943 943 return errors != 0
944 944
945 945 ## facility to let extension process additional data into an import patch
946 946 # list of identifier to be executed in order
947 947 extrapreimport = [] # run before commit
948 948 extrapostimport = [] # run after commit
949 949 # mapping from identifier to actual import function
950 950 #
951 951 # 'preimport' are run before the commit is made and are provided the following
952 952 # arguments:
953 953 # - repo: the localrepository instance,
954 954 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
955 955 # - extra: the future extra dictionary of the changeset, please mutate it,
956 956 # - opts: the import options.
957 957 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
958 958 # mutation of in memory commit and more. Feel free to rework the code to get
959 959 # there.
960 960 extrapreimportmap = {}
961 961 # 'postimport' are run after the commit is made and are provided the following
962 962 # argument:
963 963 # - ctx: the changectx created by import.
964 964 extrapostimportmap = {}
965 965
966 966 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
967 967 """Utility function used by commands.import to import a single patch
968 968
969 969 This function is explicitly defined here to help the evolve extension to
970 970 wrap this part of the import logic.
971 971
972 972 The API is currently a bit ugly because it a simple code translation from
973 973 the import command. Feel free to make it better.
974 974
975 975 :hunk: a patch (as a binary string)
976 976 :parents: nodes that will be parent of the created commit
977 977 :opts: the full dict of option passed to the import command
978 978 :msgs: list to save commit message to.
979 979 (used in case we need to save it when failing)
980 980 :updatefunc: a function that update a repo to a given node
981 981 updatefunc(<repo>, <node>)
982 982 """
983 983 # avoid cycle context -> subrepo -> cmdutil
984 984 from . import context
985 985 extractdata = patch.extract(ui, hunk)
986 986 tmpname = extractdata.get('filename')
987 987 message = extractdata.get('message')
988 988 user = opts.get('user') or extractdata.get('user')
989 989 date = opts.get('date') or extractdata.get('date')
990 990 branch = extractdata.get('branch')
991 991 nodeid = extractdata.get('nodeid')
992 992 p1 = extractdata.get('p1')
993 993 p2 = extractdata.get('p2')
994 994
995 995 nocommit = opts.get('no_commit')
996 996 importbranch = opts.get('import_branch')
997 997 update = not opts.get('bypass')
998 998 strip = opts["strip"]
999 999 prefix = opts["prefix"]
1000 1000 sim = float(opts.get('similarity') or 0)
1001 1001 if not tmpname:
1002 1002 return (None, None, False)
1003 1003
1004 1004 rejects = False
1005 1005
1006 1006 try:
1007 1007 cmdline_message = logmessage(ui, opts)
1008 1008 if cmdline_message:
1009 1009 # pickup the cmdline msg
1010 1010 message = cmdline_message
1011 1011 elif message:
1012 1012 # pickup the patch msg
1013 1013 message = message.strip()
1014 1014 else:
1015 1015 # launch the editor
1016 1016 message = None
1017 1017 ui.debug('message:\n%s\n' % message)
1018 1018
1019 1019 if len(parents) == 1:
1020 1020 parents.append(repo[nullid])
1021 1021 if opts.get('exact'):
1022 1022 if not nodeid or not p1:
1023 1023 raise error.Abort(_('not a Mercurial patch'))
1024 1024 p1 = repo[p1]
1025 1025 p2 = repo[p2 or nullid]
1026 1026 elif p2:
1027 1027 try:
1028 1028 p1 = repo[p1]
1029 1029 p2 = repo[p2]
1030 1030 # Without any options, consider p2 only if the
1031 1031 # patch is being applied on top of the recorded
1032 1032 # first parent.
1033 1033 if p1 != parents[0]:
1034 1034 p1 = parents[0]
1035 1035 p2 = repo[nullid]
1036 1036 except error.RepoError:
1037 1037 p1, p2 = parents
1038 1038 if p2.node() == nullid:
1039 1039 ui.warn(_("warning: import the patch as a normal revision\n"
1040 1040 "(use --exact to import the patch as a merge)\n"))
1041 1041 else:
1042 1042 p1, p2 = parents
1043 1043
1044 1044 n = None
1045 1045 if update:
1046 1046 if p1 != parents[0]:
1047 1047 updatefunc(repo, p1.node())
1048 1048 if p2 != parents[1]:
1049 1049 repo.setparents(p1.node(), p2.node())
1050 1050
1051 1051 if opts.get('exact') or importbranch:
1052 1052 repo.dirstate.setbranch(branch or 'default')
1053 1053
1054 1054 partial = opts.get('partial', False)
1055 1055 files = set()
1056 1056 try:
1057 1057 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1058 1058 files=files, eolmode=None, similarity=sim / 100.0)
1059 1059 except patch.PatchError as e:
1060 1060 if not partial:
1061 1061 raise error.Abort(str(e))
1062 1062 if partial:
1063 1063 rejects = True
1064 1064
1065 1065 files = list(files)
1066 1066 if nocommit:
1067 1067 if message:
1068 1068 msgs.append(message)
1069 1069 else:
1070 1070 if opts.get('exact') or p2:
1071 1071 # If you got here, you either use --force and know what
1072 1072 # you are doing or used --exact or a merge patch while
1073 1073 # being updated to its first parent.
1074 1074 m = None
1075 1075 else:
1076 1076 m = scmutil.matchfiles(repo, files or [])
1077 1077 editform = mergeeditform(repo[None], 'import.normal')
1078 1078 if opts.get('exact'):
1079 1079 editor = None
1080 1080 else:
1081 1081 editor = getcommiteditor(editform=editform, **opts)
1082 1082 extra = {}
1083 1083 for idfunc in extrapreimport:
1084 1084 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1085 1085 overrides = {}
1086 1086 if partial:
1087 1087 overrides[('ui', 'allowemptycommit')] = True
1088 1088 with repo.ui.configoverride(overrides, 'import'):
1089 1089 n = repo.commit(message, user,
1090 1090 date, match=m,
1091 1091 editor=editor, extra=extra)
1092 1092 for idfunc in extrapostimport:
1093 1093 extrapostimportmap[idfunc](repo[n])
1094 1094 else:
1095 1095 if opts.get('exact') or importbranch:
1096 1096 branch = branch or 'default'
1097 1097 else:
1098 1098 branch = p1.branch()
1099 1099 store = patch.filestore()
1100 1100 try:
1101 1101 files = set()
1102 1102 try:
1103 1103 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1104 1104 files, eolmode=None)
1105 1105 except patch.PatchError as e:
1106 1106 raise error.Abort(str(e))
1107 1107 if opts.get('exact'):
1108 1108 editor = None
1109 1109 else:
1110 1110 editor = getcommiteditor(editform='import.bypass')
1111 1111 memctx = context.memctx(repo, (p1.node(), p2.node()),
1112 1112 message,
1113 1113 files=files,
1114 1114 filectxfn=store,
1115 1115 user=user,
1116 1116 date=date,
1117 1117 branch=branch,
1118 1118 editor=editor)
1119 1119 n = memctx.commit()
1120 1120 finally:
1121 1121 store.close()
1122 1122 if opts.get('exact') and nocommit:
1123 1123 # --exact with --no-commit is still useful in that it does merge
1124 1124 # and branch bits
1125 1125 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1126 1126 elif opts.get('exact') and hex(n) != nodeid:
1127 1127 raise error.Abort(_('patch is damaged or loses information'))
1128 1128 msg = _('applied to working directory')
1129 1129 if n:
1130 1130 # i18n: refers to a short changeset id
1131 1131 msg = _('created %s') % short(n)
1132 1132 return (msg, n, rejects)
1133 1133 finally:
1134 1134 os.unlink(tmpname)
1135 1135
1136 1136 # facility to let extensions include additional data in an exported patch
1137 1137 # list of identifiers to be executed in order
1138 1138 extraexport = []
1139 1139 # mapping from identifier to actual export function
1140 1140 # function as to return a string to be added to the header or None
1141 1141 # it is given two arguments (sequencenumber, changectx)
1142 1142 extraexportmap = {}
1143 1143
1144 1144 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1145 1145 node = scmutil.binnode(ctx)
1146 1146 parents = [p.node() for p in ctx.parents() if p]
1147 1147 branch = ctx.branch()
1148 1148 if switch_parent:
1149 1149 parents.reverse()
1150 1150
1151 1151 if parents:
1152 1152 prev = parents[0]
1153 1153 else:
1154 1154 prev = nullid
1155 1155
1156 1156 write("# HG changeset patch\n")
1157 1157 write("# User %s\n" % ctx.user())
1158 1158 write("# Date %d %d\n" % ctx.date())
1159 1159 write("# %s\n" % util.datestr(ctx.date()))
1160 1160 if branch and branch != 'default':
1161 1161 write("# Branch %s\n" % branch)
1162 1162 write("# Node ID %s\n" % hex(node))
1163 1163 write("# Parent %s\n" % hex(prev))
1164 1164 if len(parents) > 1:
1165 1165 write("# Parent %s\n" % hex(parents[1]))
1166 1166
1167 1167 for headerid in extraexport:
1168 1168 header = extraexportmap[headerid](seqno, ctx)
1169 1169 if header is not None:
1170 1170 write('# %s\n' % header)
1171 1171 write(ctx.description().rstrip())
1172 1172 write("\n\n")
1173 1173
1174 1174 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1175 1175 write(chunk, label=label)
1176 1176
1177 1177 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1178 1178 opts=None, match=None):
1179 1179 '''export changesets as hg patches
1180 1180
1181 1181 Args:
1182 1182 repo: The repository from which we're exporting revisions.
1183 1183 revs: A list of revisions to export as revision numbers.
1184 1184 fntemplate: An optional string to use for generating patch file names.
1185 1185 fp: An optional file-like object to which patches should be written.
1186 1186 switch_parent: If True, show diffs against second parent when not nullid.
1187 1187 Default is false, which always shows diff against p1.
1188 1188 opts: diff options to use for generating the patch.
1189 1189 match: If specified, only export changes to files matching this matcher.
1190 1190
1191 1191 Returns:
1192 1192 Nothing.
1193 1193
1194 1194 Side Effect:
1195 1195 "HG Changeset Patch" data is emitted to one of the following
1196 1196 destinations:
1197 1197 fp is specified: All revs are written to the specified
1198 1198 file-like object.
1199 1199 fntemplate specified: Each rev is written to a unique file named using
1200 1200 the given template.
1201 1201 Neither fp nor template specified: All revs written to repo.ui.write()
1202 1202 '''
1203 1203
1204 1204 total = len(revs)
1205 1205 revwidth = max(len(str(rev)) for rev in revs)
1206 1206 filemode = {}
1207 1207
1208 1208 write = None
1209 1209 dest = '<unnamed>'
1210 1210 if fp:
1211 1211 dest = getattr(fp, 'name', dest)
1212 1212 def write(s, **kw):
1213 1213 fp.write(s)
1214 1214 elif not fntemplate:
1215 1215 write = repo.ui.write
1216 1216
1217 1217 for seqno, rev in enumerate(revs, 1):
1218 1218 ctx = repo[rev]
1219 1219 fo = None
1220 1220 if not fp and fntemplate:
1221 1221 desc_lines = ctx.description().rstrip().split('\n')
1222 1222 desc = desc_lines[0] #Commit always has a first line.
1223 1223 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1224 1224 total=total, seqno=seqno, revwidth=revwidth,
1225 1225 mode='wb', modemap=filemode)
1226 1226 dest = fo.name
1227 1227 def write(s, **kw):
1228 1228 fo.write(s)
1229 1229 if not dest.startswith('<'):
1230 1230 repo.ui.note("%s\n" % dest)
1231 1231 _exportsingle(
1232 1232 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1233 1233 if fo is not None:
1234 1234 fo.close()
1235 1235
1236 1236 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1237 1237 changes=None, stat=False, fp=None, prefix='',
1238 1238 root='', listsubrepos=False):
1239 1239 '''show diff or diffstat.'''
1240 1240 if fp is None:
1241 1241 write = ui.write
1242 1242 else:
1243 1243 def write(s, **kw):
1244 1244 fp.write(s)
1245 1245
1246 1246 if root:
1247 1247 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1248 1248 else:
1249 1249 relroot = ''
1250 1250 if relroot != '':
1251 1251 # XXX relative roots currently don't work if the root is within a
1252 1252 # subrepo
1253 1253 uirelroot = match.uipath(relroot)
1254 1254 relroot += '/'
1255 1255 for matchroot in match.files():
1256 1256 if not matchroot.startswith(relroot):
1257 1257 ui.warn(_('warning: %s not inside relative root %s\n') % (
1258 1258 match.uipath(matchroot), uirelroot))
1259 1259
1260 1260 if stat:
1261 1261 diffopts = diffopts.copy(context=0)
1262 1262 width = 80
1263 1263 if not ui.plain():
1264 1264 width = ui.termwidth()
1265 1265 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1266 1266 prefix=prefix, relroot=relroot)
1267 1267 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1268 1268 width=width):
1269 1269 write(chunk, label=label)
1270 1270 else:
1271 1271 for chunk, label in patch.diffui(repo, node1, node2, match,
1272 1272 changes, diffopts, prefix=prefix,
1273 1273 relroot=relroot):
1274 1274 write(chunk, label=label)
1275 1275
1276 1276 if listsubrepos:
1277 1277 ctx1 = repo[node1]
1278 1278 ctx2 = repo[node2]
1279 1279 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1280 1280 tempnode2 = node2
1281 1281 try:
1282 1282 if node2 is not None:
1283 1283 tempnode2 = ctx2.substate[subpath][1]
1284 1284 except KeyError:
1285 1285 # A subrepo that existed in node1 was deleted between node1 and
1286 1286 # node2 (inclusive). Thus, ctx2's substate won't contain that
1287 1287 # subpath. The best we can do is to ignore it.
1288 1288 tempnode2 = None
1289 1289 submatch = matchmod.subdirmatcher(subpath, match)
1290 1290 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1291 1291 stat=stat, fp=fp, prefix=prefix)
1292 1292
1293 1293 def _changesetlabels(ctx):
1294 1294 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1295 1295 if ctx.obsolete():
1296 1296 labels.append('changeset.obsolete')
1297 1297 if ctx.troubled():
1298 1298 labels.append('changeset.troubled')
1299 1299 for trouble in ctx.troubles():
1300 1300 labels.append('trouble.%s' % trouble)
1301 1301 return ' '.join(labels)
1302 1302
1303 1303 class changeset_printer(object):
1304 1304 '''show changeset information when templating not requested.'''
1305 1305
1306 1306 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1307 1307 self.ui = ui
1308 1308 self.repo = repo
1309 1309 self.buffered = buffered
1310 1310 self.matchfn = matchfn
1311 1311 self.diffopts = diffopts
1312 1312 self.header = {}
1313 1313 self.hunk = {}
1314 1314 self.lastheader = None
1315 1315 self.footer = None
1316 1316
1317 1317 def flush(self, ctx):
1318 1318 rev = ctx.rev()
1319 1319 if rev in self.header:
1320 1320 h = self.header[rev]
1321 1321 if h != self.lastheader:
1322 1322 self.lastheader = h
1323 1323 self.ui.write(h)
1324 1324 del self.header[rev]
1325 1325 if rev in self.hunk:
1326 1326 self.ui.write(self.hunk[rev])
1327 1327 del self.hunk[rev]
1328 1328 return 1
1329 1329 return 0
1330 1330
1331 1331 def close(self):
1332 1332 if self.footer:
1333 1333 self.ui.write(self.footer)
1334 1334
1335 1335 def show(self, ctx, copies=None, matchfn=None, **props):
1336 1336 if self.buffered:
1337 1337 self.ui.pushbuffer(labeled=True)
1338 1338 self._show(ctx, copies, matchfn, props)
1339 1339 self.hunk[ctx.rev()] = self.ui.popbuffer()
1340 1340 else:
1341 1341 self._show(ctx, copies, matchfn, props)
1342 1342
1343 1343 def _show(self, ctx, copies, matchfn, props):
1344 1344 '''show a single changeset or file revision'''
1345 1345 changenode = ctx.node()
1346 1346 rev = ctx.rev()
1347 1347 if self.ui.debugflag:
1348 1348 hexfunc = hex
1349 1349 else:
1350 1350 hexfunc = short
1351 1351 # as of now, wctx.node() and wctx.rev() return None, but we want to
1352 1352 # show the same values as {node} and {rev} templatekw
1353 1353 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1354 1354
1355 1355 if self.ui.quiet:
1356 1356 self.ui.write("%d:%s\n" % revnode, label='log.node')
1357 1357 return
1358 1358
1359 1359 date = util.datestr(ctx.date())
1360 1360
1361 1361 # i18n: column positioning for "hg log"
1362 1362 self.ui.write(_("changeset: %d:%s\n") % revnode,
1363 1363 label=_changesetlabels(ctx))
1364 1364
1365 1365 # branches are shown first before any other names due to backwards
1366 1366 # compatibility
1367 1367 branch = ctx.branch()
1368 1368 # don't show the default branch name
1369 1369 if branch != 'default':
1370 1370 # i18n: column positioning for "hg log"
1371 1371 self.ui.write(_("branch: %s\n") % branch,
1372 1372 label='log.branch')
1373 1373
1374 1374 for nsname, ns in self.repo.names.iteritems():
1375 1375 # branches has special logic already handled above, so here we just
1376 1376 # skip it
1377 1377 if nsname == 'branches':
1378 1378 continue
1379 1379 # we will use the templatename as the color name since those two
1380 1380 # should be the same
1381 1381 for name in ns.names(self.repo, changenode):
1382 1382 self.ui.write(ns.logfmt % name,
1383 1383 label='log.%s' % ns.colorname)
1384 1384 if self.ui.debugflag:
1385 1385 # i18n: column positioning for "hg log"
1386 1386 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1387 1387 label='log.phase')
1388 1388 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1389 1389 label = 'log.parent changeset.%s' % pctx.phasestr()
1390 1390 # i18n: column positioning for "hg log"
1391 1391 self.ui.write(_("parent: %d:%s\n")
1392 1392 % (pctx.rev(), hexfunc(pctx.node())),
1393 1393 label=label)
1394 1394
1395 1395 if self.ui.debugflag and rev is not None:
1396 1396 mnode = ctx.manifestnode()
1397 1397 # i18n: column positioning for "hg log"
1398 1398 self.ui.write(_("manifest: %d:%s\n") %
1399 1399 (self.repo.manifestlog._revlog.rev(mnode),
1400 1400 hex(mnode)),
1401 1401 label='ui.debug log.manifest')
1402 1402 # i18n: column positioning for "hg log"
1403 1403 self.ui.write(_("user: %s\n") % ctx.user(),
1404 1404 label='log.user')
1405 1405 # i18n: column positioning for "hg log"
1406 1406 self.ui.write(_("date: %s\n") % date,
1407 1407 label='log.date')
1408 1408
1409 1409 if ctx.troubled():
1410 1410 # i18n: column positioning for "hg log"
1411 1411 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1412 1412 label='log.trouble')
1413 1413
1414 1414 if self.ui.debugflag:
1415 1415 files = ctx.p1().status(ctx)[:3]
1416 1416 for key, value in zip([# i18n: column positioning for "hg log"
1417 1417 _("files:"),
1418 1418 # i18n: column positioning for "hg log"
1419 1419 _("files+:"),
1420 1420 # i18n: column positioning for "hg log"
1421 1421 _("files-:")], files):
1422 1422 if value:
1423 1423 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1424 1424 label='ui.debug log.files')
1425 1425 elif ctx.files() and self.ui.verbose:
1426 1426 # i18n: column positioning for "hg log"
1427 1427 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1428 1428 label='ui.note log.files')
1429 1429 if copies and self.ui.verbose:
1430 1430 copies = ['%s (%s)' % c for c in copies]
1431 1431 # i18n: column positioning for "hg log"
1432 1432 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1433 1433 label='ui.note log.copies')
1434 1434
1435 1435 extra = ctx.extra()
1436 1436 if extra and self.ui.debugflag:
1437 1437 for key, value in sorted(extra.items()):
1438 1438 # i18n: column positioning for "hg log"
1439 1439 self.ui.write(_("extra: %s=%s\n")
1440 1440 % (key, util.escapestr(value)),
1441 1441 label='ui.debug log.extra')
1442 1442
1443 1443 description = ctx.description().strip()
1444 1444 if description:
1445 1445 if self.ui.verbose:
1446 1446 self.ui.write(_("description:\n"),
1447 1447 label='ui.note log.description')
1448 1448 self.ui.write(description,
1449 1449 label='ui.note log.description')
1450 1450 self.ui.write("\n\n")
1451 1451 else:
1452 1452 # i18n: column positioning for "hg log"
1453 1453 self.ui.write(_("summary: %s\n") %
1454 1454 description.splitlines()[0],
1455 1455 label='log.summary')
1456 1456 self.ui.write("\n")
1457 1457
1458 1458 self.showpatch(ctx, matchfn)
1459 1459
1460 1460 def showpatch(self, ctx, matchfn):
1461 1461 if not matchfn:
1462 1462 matchfn = self.matchfn
1463 1463 if matchfn:
1464 1464 stat = self.diffopts.get('stat')
1465 1465 diff = self.diffopts.get('patch')
1466 1466 diffopts = patch.diffallopts(self.ui, self.diffopts)
1467 1467 node = ctx.node()
1468 1468 prev = ctx.p1().node()
1469 1469 if stat:
1470 1470 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1471 1471 match=matchfn, stat=True)
1472 1472 if diff:
1473 1473 if stat:
1474 1474 self.ui.write("\n")
1475 1475 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1476 1476 match=matchfn, stat=False)
1477 1477 self.ui.write("\n")
1478 1478
1479 1479 class jsonchangeset(changeset_printer):
1480 1480 '''format changeset information.'''
1481 1481
1482 1482 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1483 1483 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1484 1484 self.cache = {}
1485 1485 self._first = True
1486 1486
1487 1487 def close(self):
1488 1488 if not self._first:
1489 1489 self.ui.write("\n]\n")
1490 1490 else:
1491 1491 self.ui.write("[]\n")
1492 1492
1493 1493 def _show(self, ctx, copies, matchfn, props):
1494 1494 '''show a single changeset or file revision'''
1495 1495 rev = ctx.rev()
1496 1496 if rev is None:
1497 1497 jrev = jnode = 'null'
1498 1498 else:
1499 1499 jrev = '%d' % rev
1500 1500 jnode = '"%s"' % hex(ctx.node())
1501 1501 j = encoding.jsonescape
1502 1502
1503 1503 if self._first:
1504 1504 self.ui.write("[\n {")
1505 1505 self._first = False
1506 1506 else:
1507 1507 self.ui.write(",\n {")
1508 1508
1509 1509 if self.ui.quiet:
1510 1510 self.ui.write(('\n "rev": %s') % jrev)
1511 1511 self.ui.write((',\n "node": %s') % jnode)
1512 1512 self.ui.write('\n }')
1513 1513 return
1514 1514
1515 1515 self.ui.write(('\n "rev": %s') % jrev)
1516 1516 self.ui.write((',\n "node": %s') % jnode)
1517 1517 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1518 1518 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1519 1519 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1520 1520 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1521 1521 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1522 1522
1523 1523 self.ui.write((',\n "bookmarks": [%s]') %
1524 1524 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1525 1525 self.ui.write((',\n "tags": [%s]') %
1526 1526 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1527 1527 self.ui.write((',\n "parents": [%s]') %
1528 1528 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1529 1529
1530 1530 if self.ui.debugflag:
1531 1531 if rev is None:
1532 1532 jmanifestnode = 'null'
1533 1533 else:
1534 1534 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1535 1535 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1536 1536
1537 1537 self.ui.write((',\n "extra": {%s}') %
1538 1538 ", ".join('"%s": "%s"' % (j(k), j(v))
1539 1539 for k, v in ctx.extra().items()))
1540 1540
1541 1541 files = ctx.p1().status(ctx)
1542 1542 self.ui.write((',\n "modified": [%s]') %
1543 1543 ", ".join('"%s"' % j(f) for f in files[0]))
1544 1544 self.ui.write((',\n "added": [%s]') %
1545 1545 ", ".join('"%s"' % j(f) for f in files[1]))
1546 1546 self.ui.write((',\n "removed": [%s]') %
1547 1547 ", ".join('"%s"' % j(f) for f in files[2]))
1548 1548
1549 1549 elif self.ui.verbose:
1550 1550 self.ui.write((',\n "files": [%s]') %
1551 1551 ", ".join('"%s"' % j(f) for f in ctx.files()))
1552 1552
1553 1553 if copies:
1554 1554 self.ui.write((',\n "copies": {%s}') %
1555 1555 ", ".join('"%s": "%s"' % (j(k), j(v))
1556 1556 for k, v in copies))
1557 1557
1558 1558 matchfn = self.matchfn
1559 1559 if matchfn:
1560 1560 stat = self.diffopts.get('stat')
1561 1561 diff = self.diffopts.get('patch')
1562 1562 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1563 1563 node, prev = ctx.node(), ctx.p1().node()
1564 1564 if stat:
1565 1565 self.ui.pushbuffer()
1566 1566 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1567 1567 match=matchfn, stat=True)
1568 1568 self.ui.write((',\n "diffstat": "%s"')
1569 1569 % j(self.ui.popbuffer()))
1570 1570 if diff:
1571 1571 self.ui.pushbuffer()
1572 1572 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1573 1573 match=matchfn, stat=False)
1574 1574 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1575 1575
1576 1576 self.ui.write("\n }")
1577 1577
1578 1578 class changeset_templater(changeset_printer):
1579 1579 '''format changeset information.'''
1580 1580
1581 1581 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1582 1582 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1583 self.t = formatter.loadtemplater(ui, 'changeset', (tmpl, mapfile),
1583 tmplspec = logtemplatespec(tmpl, mapfile)
1584 self.t = formatter.loadtemplater(ui, 'changeset', tmplspec,
1584 1585 cache=templatekw.defaulttempl)
1585 1586 self._counter = itertools.count()
1586 1587 self.cache = {}
1587 1588
1588 1589 # find correct templates for current mode
1589 1590 tmplmodes = [
1590 1591 (True, None),
1591 1592 (self.ui.verbose, 'verbose'),
1592 1593 (self.ui.quiet, 'quiet'),
1593 1594 (self.ui.debugflag, 'debug'),
1594 1595 ]
1595 1596
1596 1597 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1597 1598 'docheader': '', 'docfooter': ''}
1598 1599 for mode, postfix in tmplmodes:
1599 1600 for t in self._parts:
1600 1601 cur = t
1601 1602 if postfix:
1602 1603 cur += "_" + postfix
1603 1604 if mode and cur in self.t:
1604 1605 self._parts[t] = cur
1605 1606
1606 1607 if self._parts['docheader']:
1607 1608 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1608 1609
1609 1610 def close(self):
1610 1611 if self._parts['docfooter']:
1611 1612 if not self.footer:
1612 1613 self.footer = ""
1613 1614 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1614 1615 return super(changeset_templater, self).close()
1615 1616
1616 1617 def _show(self, ctx, copies, matchfn, props):
1617 1618 '''show a single changeset or file revision'''
1618 1619 props = props.copy()
1619 1620 props.update(templatekw.keywords)
1620 1621 props['templ'] = self.t
1621 1622 props['ctx'] = ctx
1622 1623 props['repo'] = self.repo
1623 1624 props['ui'] = self.repo.ui
1624 1625 props['index'] = next(self._counter)
1625 1626 props['revcache'] = {'copies': copies}
1626 1627 props['cache'] = self.cache
1627 1628 props = pycompat.strkwargs(props)
1628 1629
1629 1630 # write header
1630 1631 if self._parts['header']:
1631 1632 h = templater.stringify(self.t(self._parts['header'], **props))
1632 1633 if self.buffered:
1633 1634 self.header[ctx.rev()] = h
1634 1635 else:
1635 1636 if self.lastheader != h:
1636 1637 self.lastheader = h
1637 1638 self.ui.write(h)
1638 1639
1639 1640 # write changeset metadata, then patch if requested
1640 1641 key = self._parts['changeset']
1641 1642 self.ui.write(templater.stringify(self.t(key, **props)))
1642 1643 self.showpatch(ctx, matchfn)
1643 1644
1644 1645 if self._parts['footer']:
1645 1646 if not self.footer:
1646 1647 self.footer = templater.stringify(
1647 1648 self.t(self._parts['footer'], **props))
1648 1649
1650 logtemplatespec = formatter.templatespec
1651
1649 1652 def _lookuplogtemplate(ui, tmpl, style):
1650 1653 """Find the template matching the given template spec or style
1651 1654
1652 1655 See formatter.lookuptemplate() for details.
1653 1656 """
1654 1657
1655 1658 # ui settings
1656 1659 if not tmpl and not style: # template are stronger than style
1657 1660 tmpl = ui.config('ui', 'logtemplate')
1658 1661 if tmpl:
1659 return templater.unquotestring(tmpl), None
1662 return logtemplatespec(templater.unquotestring(tmpl), None)
1660 1663 else:
1661 1664 style = util.expandpath(ui.config('ui', 'style', ''))
1662 1665
1663 1666 if not tmpl and style:
1664 1667 mapfile = style
1665 1668 if not os.path.split(mapfile)[0]:
1666 1669 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1667 1670 or templater.templatepath(mapfile))
1668 1671 if mapname:
1669 1672 mapfile = mapname
1670 return None, mapfile
1673 return logtemplatespec(None, mapfile)
1671 1674
1672 1675 if not tmpl:
1673 return None, None
1676 return logtemplatespec(None, None)
1674 1677
1675 1678 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1676 1679
1677 1680 def makelogtemplater(ui, repo, tmpl, buffered=False):
1678 1681 """Create a changeset_templater from a literal template 'tmpl'"""
1679 1682 return changeset_templater(ui, repo, matchfn=None, diffopts={},
1680 1683 tmpl=tmpl, mapfile=None, buffered=buffered)
1681 1684
1682 1685 def show_changeset(ui, repo, opts, buffered=False):
1683 1686 """show one changeset using template or regular display.
1684 1687
1685 1688 Display format will be the first non-empty hit of:
1686 1689 1. option 'template'
1687 1690 2. option 'style'
1688 1691 3. [ui] setting 'logtemplate'
1689 1692 4. [ui] setting 'style'
1690 1693 If all of these values are either the unset or the empty string,
1691 1694 regular display via changeset_printer() is done.
1692 1695 """
1693 1696 # options
1694 1697 matchfn = None
1695 1698 if opts.get('patch') or opts.get('stat'):
1696 1699 matchfn = scmutil.matchall(repo)
1697 1700
1698 1701 if opts.get('template') == 'json':
1699 1702 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1700 1703
1701 1704 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1702 1705 tmpl, mapfile = spec
1703 1706
1704 1707 if not tmpl and not mapfile:
1705 1708 return changeset_printer(ui, repo, matchfn, opts, buffered)
1706 1709
1707 1710 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1708 1711
1709 1712 def showmarker(fm, marker, index=None):
1710 1713 """utility function to display obsolescence marker in a readable way
1711 1714
1712 1715 To be used by debug function."""
1713 1716 if index is not None:
1714 1717 fm.write('index', '%i ', index)
1715 1718 fm.write('precnode', '%s ', hex(marker.precnode()))
1716 1719 succs = marker.succnodes()
1717 1720 fm.condwrite(succs, 'succnodes', '%s ',
1718 1721 fm.formatlist(map(hex, succs), name='node'))
1719 1722 fm.write('flag', '%X ', marker.flags())
1720 1723 parents = marker.parentnodes()
1721 1724 if parents is not None:
1722 1725 fm.write('parentnodes', '{%s} ',
1723 1726 fm.formatlist(map(hex, parents), name='node', sep=', '))
1724 1727 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1725 1728 meta = marker.metadata().copy()
1726 1729 meta.pop('date', None)
1727 1730 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1728 1731 fm.plain('\n')
1729 1732
1730 1733 def finddate(ui, repo, date):
1731 1734 """Find the tipmost changeset that matches the given date spec"""
1732 1735
1733 1736 df = util.matchdate(date)
1734 1737 m = scmutil.matchall(repo)
1735 1738 results = {}
1736 1739
1737 1740 def prep(ctx, fns):
1738 1741 d = ctx.date()
1739 1742 if df(d[0]):
1740 1743 results[ctx.rev()] = d
1741 1744
1742 1745 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1743 1746 rev = ctx.rev()
1744 1747 if rev in results:
1745 1748 ui.status(_("found revision %s from %s\n") %
1746 1749 (rev, util.datestr(results[rev])))
1747 1750 return '%d' % rev
1748 1751
1749 1752 raise error.Abort(_("revision matching date not found"))
1750 1753
1751 1754 def increasingwindows(windowsize=8, sizelimit=512):
1752 1755 while True:
1753 1756 yield windowsize
1754 1757 if windowsize < sizelimit:
1755 1758 windowsize *= 2
1756 1759
1757 1760 class FileWalkError(Exception):
1758 1761 pass
1759 1762
1760 1763 def walkfilerevs(repo, match, follow, revs, fncache):
1761 1764 '''Walks the file history for the matched files.
1762 1765
1763 1766 Returns the changeset revs that are involved in the file history.
1764 1767
1765 1768 Throws FileWalkError if the file history can't be walked using
1766 1769 filelogs alone.
1767 1770 '''
1768 1771 wanted = set()
1769 1772 copies = []
1770 1773 minrev, maxrev = min(revs), max(revs)
1771 1774 def filerevgen(filelog, last):
1772 1775 """
1773 1776 Only files, no patterns. Check the history of each file.
1774 1777
1775 1778 Examines filelog entries within minrev, maxrev linkrev range
1776 1779 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1777 1780 tuples in backwards order
1778 1781 """
1779 1782 cl_count = len(repo)
1780 1783 revs = []
1781 1784 for j in xrange(0, last + 1):
1782 1785 linkrev = filelog.linkrev(j)
1783 1786 if linkrev < minrev:
1784 1787 continue
1785 1788 # only yield rev for which we have the changelog, it can
1786 1789 # happen while doing "hg log" during a pull or commit
1787 1790 if linkrev >= cl_count:
1788 1791 break
1789 1792
1790 1793 parentlinkrevs = []
1791 1794 for p in filelog.parentrevs(j):
1792 1795 if p != nullrev:
1793 1796 parentlinkrevs.append(filelog.linkrev(p))
1794 1797 n = filelog.node(j)
1795 1798 revs.append((linkrev, parentlinkrevs,
1796 1799 follow and filelog.renamed(n)))
1797 1800
1798 1801 return reversed(revs)
1799 1802 def iterfiles():
1800 1803 pctx = repo['.']
1801 1804 for filename in match.files():
1802 1805 if follow:
1803 1806 if filename not in pctx:
1804 1807 raise error.Abort(_('cannot follow file not in parent '
1805 1808 'revision: "%s"') % filename)
1806 1809 yield filename, pctx[filename].filenode()
1807 1810 else:
1808 1811 yield filename, None
1809 1812 for filename_node in copies:
1810 1813 yield filename_node
1811 1814
1812 1815 for file_, node in iterfiles():
1813 1816 filelog = repo.file(file_)
1814 1817 if not len(filelog):
1815 1818 if node is None:
1816 1819 # A zero count may be a directory or deleted file, so
1817 1820 # try to find matching entries on the slow path.
1818 1821 if follow:
1819 1822 raise error.Abort(
1820 1823 _('cannot follow nonexistent file: "%s"') % file_)
1821 1824 raise FileWalkError("Cannot walk via filelog")
1822 1825 else:
1823 1826 continue
1824 1827
1825 1828 if node is None:
1826 1829 last = len(filelog) - 1
1827 1830 else:
1828 1831 last = filelog.rev(node)
1829 1832
1830 1833 # keep track of all ancestors of the file
1831 1834 ancestors = {filelog.linkrev(last)}
1832 1835
1833 1836 # iterate from latest to oldest revision
1834 1837 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1835 1838 if not follow:
1836 1839 if rev > maxrev:
1837 1840 continue
1838 1841 else:
1839 1842 # Note that last might not be the first interesting
1840 1843 # rev to us:
1841 1844 # if the file has been changed after maxrev, we'll
1842 1845 # have linkrev(last) > maxrev, and we still need
1843 1846 # to explore the file graph
1844 1847 if rev not in ancestors:
1845 1848 continue
1846 1849 # XXX insert 1327 fix here
1847 1850 if flparentlinkrevs:
1848 1851 ancestors.update(flparentlinkrevs)
1849 1852
1850 1853 fncache.setdefault(rev, []).append(file_)
1851 1854 wanted.add(rev)
1852 1855 if copied:
1853 1856 copies.append(copied)
1854 1857
1855 1858 return wanted
1856 1859
1857 1860 class _followfilter(object):
1858 1861 def __init__(self, repo, onlyfirst=False):
1859 1862 self.repo = repo
1860 1863 self.startrev = nullrev
1861 1864 self.roots = set()
1862 1865 self.onlyfirst = onlyfirst
1863 1866
1864 1867 def match(self, rev):
1865 1868 def realparents(rev):
1866 1869 if self.onlyfirst:
1867 1870 return self.repo.changelog.parentrevs(rev)[0:1]
1868 1871 else:
1869 1872 return filter(lambda x: x != nullrev,
1870 1873 self.repo.changelog.parentrevs(rev))
1871 1874
1872 1875 if self.startrev == nullrev:
1873 1876 self.startrev = rev
1874 1877 return True
1875 1878
1876 1879 if rev > self.startrev:
1877 1880 # forward: all descendants
1878 1881 if not self.roots:
1879 1882 self.roots.add(self.startrev)
1880 1883 for parent in realparents(rev):
1881 1884 if parent in self.roots:
1882 1885 self.roots.add(rev)
1883 1886 return True
1884 1887 else:
1885 1888 # backwards: all parents
1886 1889 if not self.roots:
1887 1890 self.roots.update(realparents(self.startrev))
1888 1891 if rev in self.roots:
1889 1892 self.roots.remove(rev)
1890 1893 self.roots.update(realparents(rev))
1891 1894 return True
1892 1895
1893 1896 return False
1894 1897
1895 1898 def walkchangerevs(repo, match, opts, prepare):
1896 1899 '''Iterate over files and the revs in which they changed.
1897 1900
1898 1901 Callers most commonly need to iterate backwards over the history
1899 1902 in which they are interested. Doing so has awful (quadratic-looking)
1900 1903 performance, so we use iterators in a "windowed" way.
1901 1904
1902 1905 We walk a window of revisions in the desired order. Within the
1903 1906 window, we first walk forwards to gather data, then in the desired
1904 1907 order (usually backwards) to display it.
1905 1908
1906 1909 This function returns an iterator yielding contexts. Before
1907 1910 yielding each context, the iterator will first call the prepare
1908 1911 function on each context in the window in forward order.'''
1909 1912
1910 1913 follow = opts.get('follow') or opts.get('follow_first')
1911 1914 revs = _logrevs(repo, opts)
1912 1915 if not revs:
1913 1916 return []
1914 1917 wanted = set()
1915 1918 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1916 1919 opts.get('removed'))
1917 1920 fncache = {}
1918 1921 change = repo.changectx
1919 1922
1920 1923 # First step is to fill wanted, the set of revisions that we want to yield.
1921 1924 # When it does not induce extra cost, we also fill fncache for revisions in
1922 1925 # wanted: a cache of filenames that were changed (ctx.files()) and that
1923 1926 # match the file filtering conditions.
1924 1927
1925 1928 if match.always():
1926 1929 # No files, no patterns. Display all revs.
1927 1930 wanted = revs
1928 1931 elif not slowpath:
1929 1932 # We only have to read through the filelog to find wanted revisions
1930 1933
1931 1934 try:
1932 1935 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1933 1936 except FileWalkError:
1934 1937 slowpath = True
1935 1938
1936 1939 # We decided to fall back to the slowpath because at least one
1937 1940 # of the paths was not a file. Check to see if at least one of them
1938 1941 # existed in history, otherwise simply return
1939 1942 for path in match.files():
1940 1943 if path == '.' or path in repo.store:
1941 1944 break
1942 1945 else:
1943 1946 return []
1944 1947
1945 1948 if slowpath:
1946 1949 # We have to read the changelog to match filenames against
1947 1950 # changed files
1948 1951
1949 1952 if follow:
1950 1953 raise error.Abort(_('can only follow copies/renames for explicit '
1951 1954 'filenames'))
1952 1955
1953 1956 # The slow path checks files modified in every changeset.
1954 1957 # This is really slow on large repos, so compute the set lazily.
1955 1958 class lazywantedset(object):
1956 1959 def __init__(self):
1957 1960 self.set = set()
1958 1961 self.revs = set(revs)
1959 1962
1960 1963 # No need to worry about locality here because it will be accessed
1961 1964 # in the same order as the increasing window below.
1962 1965 def __contains__(self, value):
1963 1966 if value in self.set:
1964 1967 return True
1965 1968 elif not value in self.revs:
1966 1969 return False
1967 1970 else:
1968 1971 self.revs.discard(value)
1969 1972 ctx = change(value)
1970 1973 matches = filter(match, ctx.files())
1971 1974 if matches:
1972 1975 fncache[value] = matches
1973 1976 self.set.add(value)
1974 1977 return True
1975 1978 return False
1976 1979
1977 1980 def discard(self, value):
1978 1981 self.revs.discard(value)
1979 1982 self.set.discard(value)
1980 1983
1981 1984 wanted = lazywantedset()
1982 1985
1983 1986 # it might be worthwhile to do this in the iterator if the rev range
1984 1987 # is descending and the prune args are all within that range
1985 1988 for rev in opts.get('prune', ()):
1986 1989 rev = repo[rev].rev()
1987 1990 ff = _followfilter(repo)
1988 1991 stop = min(revs[0], revs[-1])
1989 1992 for x in xrange(rev, stop - 1, -1):
1990 1993 if ff.match(x):
1991 1994 wanted = wanted - [x]
1992 1995
1993 1996 # Now that wanted is correctly initialized, we can iterate over the
1994 1997 # revision range, yielding only revisions in wanted.
1995 1998 def iterate():
1996 1999 if follow and match.always():
1997 2000 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1998 2001 def want(rev):
1999 2002 return ff.match(rev) and rev in wanted
2000 2003 else:
2001 2004 def want(rev):
2002 2005 return rev in wanted
2003 2006
2004 2007 it = iter(revs)
2005 2008 stopiteration = False
2006 2009 for windowsize in increasingwindows():
2007 2010 nrevs = []
2008 2011 for i in xrange(windowsize):
2009 2012 rev = next(it, None)
2010 2013 if rev is None:
2011 2014 stopiteration = True
2012 2015 break
2013 2016 elif want(rev):
2014 2017 nrevs.append(rev)
2015 2018 for rev in sorted(nrevs):
2016 2019 fns = fncache.get(rev)
2017 2020 ctx = change(rev)
2018 2021 if not fns:
2019 2022 def fns_generator():
2020 2023 for f in ctx.files():
2021 2024 if match(f):
2022 2025 yield f
2023 2026 fns = fns_generator()
2024 2027 prepare(ctx, fns)
2025 2028 for rev in nrevs:
2026 2029 yield change(rev)
2027 2030
2028 2031 if stopiteration:
2029 2032 break
2030 2033
2031 2034 return iterate()
2032 2035
2033 2036 def _makefollowlogfilematcher(repo, files, followfirst):
2034 2037 # When displaying a revision with --patch --follow FILE, we have
2035 2038 # to know which file of the revision must be diffed. With
2036 2039 # --follow, we want the names of the ancestors of FILE in the
2037 2040 # revision, stored in "fcache". "fcache" is populated by
2038 2041 # reproducing the graph traversal already done by --follow revset
2039 2042 # and relating revs to file names (which is not "correct" but
2040 2043 # good enough).
2041 2044 fcache = {}
2042 2045 fcacheready = [False]
2043 2046 pctx = repo['.']
2044 2047
2045 2048 def populate():
2046 2049 for fn in files:
2047 2050 fctx = pctx[fn]
2048 2051 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2049 2052 for c in fctx.ancestors(followfirst=followfirst):
2050 2053 fcache.setdefault(c.rev(), set()).add(c.path())
2051 2054
2052 2055 def filematcher(rev):
2053 2056 if not fcacheready[0]:
2054 2057 # Lazy initialization
2055 2058 fcacheready[0] = True
2056 2059 populate()
2057 2060 return scmutil.matchfiles(repo, fcache.get(rev, []))
2058 2061
2059 2062 return filematcher
2060 2063
2061 2064 def _makenofollowlogfilematcher(repo, pats, opts):
2062 2065 '''hook for extensions to override the filematcher for non-follow cases'''
2063 2066 return None
2064 2067
2065 2068 def _makelogrevset(repo, pats, opts, revs):
2066 2069 """Return (expr, filematcher) where expr is a revset string built
2067 2070 from log options and file patterns or None. If --stat or --patch
2068 2071 are not passed filematcher is None. Otherwise it is a callable
2069 2072 taking a revision number and returning a match objects filtering
2070 2073 the files to be detailed when displaying the revision.
2071 2074 """
2072 2075 opt2revset = {
2073 2076 'no_merges': ('not merge()', None),
2074 2077 'only_merges': ('merge()', None),
2075 2078 '_ancestors': ('ancestors(%(val)s)', None),
2076 2079 '_fancestors': ('_firstancestors(%(val)s)', None),
2077 2080 '_descendants': ('descendants(%(val)s)', None),
2078 2081 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2079 2082 '_matchfiles': ('_matchfiles(%(val)s)', None),
2080 2083 'date': ('date(%(val)r)', None),
2081 2084 'branch': ('branch(%(val)r)', ' or '),
2082 2085 '_patslog': ('filelog(%(val)r)', ' or '),
2083 2086 '_patsfollow': ('follow(%(val)r)', ' or '),
2084 2087 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2085 2088 'keyword': ('keyword(%(val)r)', ' or '),
2086 2089 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2087 2090 'user': ('user(%(val)r)', ' or '),
2088 2091 }
2089 2092
2090 2093 opts = dict(opts)
2091 2094 # follow or not follow?
2092 2095 follow = opts.get('follow') or opts.get('follow_first')
2093 2096 if opts.get('follow_first'):
2094 2097 followfirst = 1
2095 2098 else:
2096 2099 followfirst = 0
2097 2100 # --follow with FILE behavior depends on revs...
2098 2101 it = iter(revs)
2099 2102 startrev = next(it)
2100 2103 followdescendants = startrev < next(it, startrev)
2101 2104
2102 2105 # branch and only_branch are really aliases and must be handled at
2103 2106 # the same time
2104 2107 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2105 2108 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2106 2109 # pats/include/exclude are passed to match.match() directly in
2107 2110 # _matchfiles() revset but walkchangerevs() builds its matcher with
2108 2111 # scmutil.match(). The difference is input pats are globbed on
2109 2112 # platforms without shell expansion (windows).
2110 2113 wctx = repo[None]
2111 2114 match, pats = scmutil.matchandpats(wctx, pats, opts)
2112 2115 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2113 2116 opts.get('removed'))
2114 2117 if not slowpath:
2115 2118 for f in match.files():
2116 2119 if follow and f not in wctx:
2117 2120 # If the file exists, it may be a directory, so let it
2118 2121 # take the slow path.
2119 2122 if os.path.exists(repo.wjoin(f)):
2120 2123 slowpath = True
2121 2124 continue
2122 2125 else:
2123 2126 raise error.Abort(_('cannot follow file not in parent '
2124 2127 'revision: "%s"') % f)
2125 2128 filelog = repo.file(f)
2126 2129 if not filelog:
2127 2130 # A zero count may be a directory or deleted file, so
2128 2131 # try to find matching entries on the slow path.
2129 2132 if follow:
2130 2133 raise error.Abort(
2131 2134 _('cannot follow nonexistent file: "%s"') % f)
2132 2135 slowpath = True
2133 2136
2134 2137 # We decided to fall back to the slowpath because at least one
2135 2138 # of the paths was not a file. Check to see if at least one of them
2136 2139 # existed in history - in that case, we'll continue down the
2137 2140 # slowpath; otherwise, we can turn off the slowpath
2138 2141 if slowpath:
2139 2142 for path in match.files():
2140 2143 if path == '.' or path in repo.store:
2141 2144 break
2142 2145 else:
2143 2146 slowpath = False
2144 2147
2145 2148 fpats = ('_patsfollow', '_patsfollowfirst')
2146 2149 fnopats = (('_ancestors', '_fancestors'),
2147 2150 ('_descendants', '_fdescendants'))
2148 2151 if slowpath:
2149 2152 # See walkchangerevs() slow path.
2150 2153 #
2151 2154 # pats/include/exclude cannot be represented as separate
2152 2155 # revset expressions as their filtering logic applies at file
2153 2156 # level. For instance "-I a -X a" matches a revision touching
2154 2157 # "a" and "b" while "file(a) and not file(b)" does
2155 2158 # not. Besides, filesets are evaluated against the working
2156 2159 # directory.
2157 2160 matchargs = ['r:', 'd:relpath']
2158 2161 for p in pats:
2159 2162 matchargs.append('p:' + p)
2160 2163 for p in opts.get('include', []):
2161 2164 matchargs.append('i:' + p)
2162 2165 for p in opts.get('exclude', []):
2163 2166 matchargs.append('x:' + p)
2164 2167 matchargs = ','.join(('%r' % p) for p in matchargs)
2165 2168 opts['_matchfiles'] = matchargs
2166 2169 if follow:
2167 2170 opts[fnopats[0][followfirst]] = '.'
2168 2171 else:
2169 2172 if follow:
2170 2173 if pats:
2171 2174 # follow() revset interprets its file argument as a
2172 2175 # manifest entry, so use match.files(), not pats.
2173 2176 opts[fpats[followfirst]] = list(match.files())
2174 2177 else:
2175 2178 op = fnopats[followdescendants][followfirst]
2176 2179 opts[op] = 'rev(%d)' % startrev
2177 2180 else:
2178 2181 opts['_patslog'] = list(pats)
2179 2182
2180 2183 filematcher = None
2181 2184 if opts.get('patch') or opts.get('stat'):
2182 2185 # When following files, track renames via a special matcher.
2183 2186 # If we're forced to take the slowpath it means we're following
2184 2187 # at least one pattern/directory, so don't bother with rename tracking.
2185 2188 if follow and not match.always() and not slowpath:
2186 2189 # _makefollowlogfilematcher expects its files argument to be
2187 2190 # relative to the repo root, so use match.files(), not pats.
2188 2191 filematcher = _makefollowlogfilematcher(repo, match.files(),
2189 2192 followfirst)
2190 2193 else:
2191 2194 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2192 2195 if filematcher is None:
2193 2196 filematcher = lambda rev: match
2194 2197
2195 2198 expr = []
2196 2199 for op, val in sorted(opts.iteritems()):
2197 2200 if not val:
2198 2201 continue
2199 2202 if op not in opt2revset:
2200 2203 continue
2201 2204 revop, andor = opt2revset[op]
2202 2205 if '%(val)' not in revop:
2203 2206 expr.append(revop)
2204 2207 else:
2205 2208 if not isinstance(val, list):
2206 2209 e = revop % {'val': val}
2207 2210 else:
2208 2211 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2209 2212 expr.append(e)
2210 2213
2211 2214 if expr:
2212 2215 expr = '(' + ' and '.join(expr) + ')'
2213 2216 else:
2214 2217 expr = None
2215 2218 return expr, filematcher
2216 2219
2217 2220 def _logrevs(repo, opts):
2218 2221 # Default --rev value depends on --follow but --follow behavior
2219 2222 # depends on revisions resolved from --rev...
2220 2223 follow = opts.get('follow') or opts.get('follow_first')
2221 2224 if opts.get('rev'):
2222 2225 revs = scmutil.revrange(repo, opts['rev'])
2223 2226 elif follow and repo.dirstate.p1() == nullid:
2224 2227 revs = smartset.baseset()
2225 2228 elif follow:
2226 2229 revs = repo.revs('reverse(:.)')
2227 2230 else:
2228 2231 revs = smartset.spanset(repo)
2229 2232 revs.reverse()
2230 2233 return revs
2231 2234
2232 2235 def getgraphlogrevs(repo, pats, opts):
2233 2236 """Return (revs, expr, filematcher) where revs is an iterable of
2234 2237 revision numbers, expr is a revset string built from log options
2235 2238 and file patterns or None, and used to filter 'revs'. If --stat or
2236 2239 --patch are not passed filematcher is None. Otherwise it is a
2237 2240 callable taking a revision number and returning a match objects
2238 2241 filtering the files to be detailed when displaying the revision.
2239 2242 """
2240 2243 limit = loglimit(opts)
2241 2244 revs = _logrevs(repo, opts)
2242 2245 if not revs:
2243 2246 return smartset.baseset(), None, None
2244 2247 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2245 2248 if opts.get('rev'):
2246 2249 # User-specified revs might be unsorted, but don't sort before
2247 2250 # _makelogrevset because it might depend on the order of revs
2248 2251 if not (revs.isdescending() or revs.istopo()):
2249 2252 revs.sort(reverse=True)
2250 2253 if expr:
2251 2254 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2252 2255 revs = matcher(repo, revs)
2253 2256 if limit is not None:
2254 2257 limitedrevs = []
2255 2258 for idx, rev in enumerate(revs):
2256 2259 if idx >= limit:
2257 2260 break
2258 2261 limitedrevs.append(rev)
2259 2262 revs = smartset.baseset(limitedrevs)
2260 2263
2261 2264 return revs, expr, filematcher
2262 2265
2263 2266 def getlogrevs(repo, pats, opts):
2264 2267 """Return (revs, expr, filematcher) where revs is an iterable of
2265 2268 revision numbers, expr is a revset string built from log options
2266 2269 and file patterns or None, and used to filter 'revs'. If --stat or
2267 2270 --patch are not passed filematcher is None. Otherwise it is a
2268 2271 callable taking a revision number and returning a match objects
2269 2272 filtering the files to be detailed when displaying the revision.
2270 2273 """
2271 2274 limit = loglimit(opts)
2272 2275 revs = _logrevs(repo, opts)
2273 2276 if not revs:
2274 2277 return smartset.baseset([]), None, None
2275 2278 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2276 2279 if expr:
2277 2280 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2278 2281 revs = matcher(repo, revs)
2279 2282 if limit is not None:
2280 2283 limitedrevs = []
2281 2284 for idx, r in enumerate(revs):
2282 2285 if limit <= idx:
2283 2286 break
2284 2287 limitedrevs.append(r)
2285 2288 revs = smartset.baseset(limitedrevs)
2286 2289
2287 2290 return revs, expr, filematcher
2288 2291
2289 2292 def _graphnodeformatter(ui, displayer):
2290 2293 spec = ui.config('ui', 'graphnodetemplate')
2291 2294 if not spec:
2292 2295 return templatekw.showgraphnode # fast path for "{graphnode}"
2293 2296
2294 2297 spec = templater.unquotestring(spec)
2295 2298 templ = formatter.maketemplater(ui, 'graphnode', spec)
2296 2299 cache = {}
2297 2300 if isinstance(displayer, changeset_templater):
2298 2301 cache = displayer.cache # reuse cache of slow templates
2299 2302 props = templatekw.keywords.copy()
2300 2303 props['templ'] = templ
2301 2304 props['cache'] = cache
2302 2305 def formatnode(repo, ctx):
2303 2306 props['ctx'] = ctx
2304 2307 props['repo'] = repo
2305 2308 props['ui'] = repo.ui
2306 2309 props['revcache'] = {}
2307 2310 return templater.stringify(templ('graphnode', **props))
2308 2311 return formatnode
2309 2312
2310 2313 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2311 2314 filematcher=None):
2312 2315 formatnode = _graphnodeformatter(ui, displayer)
2313 2316 state = graphmod.asciistate()
2314 2317 styles = state['styles']
2315 2318
2316 2319 # only set graph styling if HGPLAIN is not set.
2317 2320 if ui.plain('graph'):
2318 2321 # set all edge styles to |, the default pre-3.8 behaviour
2319 2322 styles.update(dict.fromkeys(styles, '|'))
2320 2323 else:
2321 2324 edgetypes = {
2322 2325 'parent': graphmod.PARENT,
2323 2326 'grandparent': graphmod.GRANDPARENT,
2324 2327 'missing': graphmod.MISSINGPARENT
2325 2328 }
2326 2329 for name, key in edgetypes.items():
2327 2330 # experimental config: experimental.graphstyle.*
2328 2331 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2329 2332 styles[key])
2330 2333 if not styles[key]:
2331 2334 styles[key] = None
2332 2335
2333 2336 # experimental config: experimental.graphshorten
2334 2337 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2335 2338
2336 2339 for rev, type, ctx, parents in dag:
2337 2340 char = formatnode(repo, ctx)
2338 2341 copies = None
2339 2342 if getrenamed and ctx.rev():
2340 2343 copies = []
2341 2344 for fn in ctx.files():
2342 2345 rename = getrenamed(fn, ctx.rev())
2343 2346 if rename:
2344 2347 copies.append((fn, rename[0]))
2345 2348 revmatchfn = None
2346 2349 if filematcher is not None:
2347 2350 revmatchfn = filematcher(ctx.rev())
2348 2351 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2349 2352 lines = displayer.hunk.pop(rev).split('\n')
2350 2353 if not lines[-1]:
2351 2354 del lines[-1]
2352 2355 displayer.flush(ctx)
2353 2356 edges = edgefn(type, char, lines, state, rev, parents)
2354 2357 for type, char, lines, coldata in edges:
2355 2358 graphmod.ascii(ui, state, type, char, lines, coldata)
2356 2359 displayer.close()
2357 2360
2358 2361 def graphlog(ui, repo, pats, opts):
2359 2362 # Parameters are identical to log command ones
2360 2363 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2361 2364 revdag = graphmod.dagwalker(repo, revs)
2362 2365
2363 2366 getrenamed = None
2364 2367 if opts.get('copies'):
2365 2368 endrev = None
2366 2369 if opts.get('rev'):
2367 2370 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2368 2371 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2369 2372
2370 2373 ui.pager('log')
2371 2374 displayer = show_changeset(ui, repo, opts, buffered=True)
2372 2375 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2373 2376 filematcher)
2374 2377
2375 2378 def checkunsupportedgraphflags(pats, opts):
2376 2379 for op in ["newest_first"]:
2377 2380 if op in opts and opts[op]:
2378 2381 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2379 2382 % op.replace("_", "-"))
2380 2383
2381 2384 def graphrevs(repo, nodes, opts):
2382 2385 limit = loglimit(opts)
2383 2386 nodes.reverse()
2384 2387 if limit is not None:
2385 2388 nodes = nodes[:limit]
2386 2389 return graphmod.nodes(repo, nodes)
2387 2390
2388 2391 def add(ui, repo, match, prefix, explicitonly, **opts):
2389 2392 join = lambda f: os.path.join(prefix, f)
2390 2393 bad = []
2391 2394
2392 2395 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2393 2396 names = []
2394 2397 wctx = repo[None]
2395 2398 cca = None
2396 2399 abort, warn = scmutil.checkportabilityalert(ui)
2397 2400 if abort or warn:
2398 2401 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2399 2402
2400 2403 badmatch = matchmod.badmatch(match, badfn)
2401 2404 dirstate = repo.dirstate
2402 2405 # We don't want to just call wctx.walk here, since it would return a lot of
2403 2406 # clean files, which we aren't interested in and takes time.
2404 2407 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2405 2408 True, False, full=False)):
2406 2409 exact = match.exact(f)
2407 2410 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2408 2411 if cca:
2409 2412 cca(f)
2410 2413 names.append(f)
2411 2414 if ui.verbose or not exact:
2412 2415 ui.status(_('adding %s\n') % match.rel(f))
2413 2416
2414 2417 for subpath in sorted(wctx.substate):
2415 2418 sub = wctx.sub(subpath)
2416 2419 try:
2417 2420 submatch = matchmod.subdirmatcher(subpath, match)
2418 2421 if opts.get(r'subrepos'):
2419 2422 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2420 2423 else:
2421 2424 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2422 2425 except error.LookupError:
2423 2426 ui.status(_("skipping missing subrepository: %s\n")
2424 2427 % join(subpath))
2425 2428
2426 2429 if not opts.get(r'dry_run'):
2427 2430 rejected = wctx.add(names, prefix)
2428 2431 bad.extend(f for f in rejected if f in match.files())
2429 2432 return bad
2430 2433
2431 2434 def addwebdirpath(repo, serverpath, webconf):
2432 2435 webconf[serverpath] = repo.root
2433 2436 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2434 2437
2435 2438 for r in repo.revs('filelog("path:.hgsub")'):
2436 2439 ctx = repo[r]
2437 2440 for subpath in ctx.substate:
2438 2441 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2439 2442
2440 2443 def forget(ui, repo, match, prefix, explicitonly):
2441 2444 join = lambda f: os.path.join(prefix, f)
2442 2445 bad = []
2443 2446 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2444 2447 wctx = repo[None]
2445 2448 forgot = []
2446 2449
2447 2450 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2448 2451 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2449 2452 if explicitonly:
2450 2453 forget = [f for f in forget if match.exact(f)]
2451 2454
2452 2455 for subpath in sorted(wctx.substate):
2453 2456 sub = wctx.sub(subpath)
2454 2457 try:
2455 2458 submatch = matchmod.subdirmatcher(subpath, match)
2456 2459 subbad, subforgot = sub.forget(submatch, prefix)
2457 2460 bad.extend([subpath + '/' + f for f in subbad])
2458 2461 forgot.extend([subpath + '/' + f for f in subforgot])
2459 2462 except error.LookupError:
2460 2463 ui.status(_("skipping missing subrepository: %s\n")
2461 2464 % join(subpath))
2462 2465
2463 2466 if not explicitonly:
2464 2467 for f in match.files():
2465 2468 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2466 2469 if f not in forgot:
2467 2470 if repo.wvfs.exists(f):
2468 2471 # Don't complain if the exact case match wasn't given.
2469 2472 # But don't do this until after checking 'forgot', so
2470 2473 # that subrepo files aren't normalized, and this op is
2471 2474 # purely from data cached by the status walk above.
2472 2475 if repo.dirstate.normalize(f) in repo.dirstate:
2473 2476 continue
2474 2477 ui.warn(_('not removing %s: '
2475 2478 'file is already untracked\n')
2476 2479 % match.rel(f))
2477 2480 bad.append(f)
2478 2481
2479 2482 for f in forget:
2480 2483 if ui.verbose or not match.exact(f):
2481 2484 ui.status(_('removing %s\n') % match.rel(f))
2482 2485
2483 2486 rejected = wctx.forget(forget, prefix)
2484 2487 bad.extend(f for f in rejected if f in match.files())
2485 2488 forgot.extend(f for f in forget if f not in rejected)
2486 2489 return bad, forgot
2487 2490
2488 2491 def files(ui, ctx, m, fm, fmt, subrepos):
2489 2492 rev = ctx.rev()
2490 2493 ret = 1
2491 2494 ds = ctx.repo().dirstate
2492 2495
2493 2496 for f in ctx.matches(m):
2494 2497 if rev is None and ds[f] == 'r':
2495 2498 continue
2496 2499 fm.startitem()
2497 2500 if ui.verbose:
2498 2501 fc = ctx[f]
2499 2502 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2500 2503 fm.data(abspath=f)
2501 2504 fm.write('path', fmt, m.rel(f))
2502 2505 ret = 0
2503 2506
2504 2507 for subpath in sorted(ctx.substate):
2505 2508 submatch = matchmod.subdirmatcher(subpath, m)
2506 2509 if (subrepos or m.exact(subpath) or any(submatch.files())):
2507 2510 sub = ctx.sub(subpath)
2508 2511 try:
2509 2512 recurse = m.exact(subpath) or subrepos
2510 2513 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2511 2514 ret = 0
2512 2515 except error.LookupError:
2513 2516 ui.status(_("skipping missing subrepository: %s\n")
2514 2517 % m.abs(subpath))
2515 2518
2516 2519 return ret
2517 2520
2518 2521 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2519 2522 join = lambda f: os.path.join(prefix, f)
2520 2523 ret = 0
2521 2524 s = repo.status(match=m, clean=True)
2522 2525 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2523 2526
2524 2527 wctx = repo[None]
2525 2528
2526 2529 if warnings is None:
2527 2530 warnings = []
2528 2531 warn = True
2529 2532 else:
2530 2533 warn = False
2531 2534
2532 2535 subs = sorted(wctx.substate)
2533 2536 total = len(subs)
2534 2537 count = 0
2535 2538 for subpath in subs:
2536 2539 count += 1
2537 2540 submatch = matchmod.subdirmatcher(subpath, m)
2538 2541 if subrepos or m.exact(subpath) or any(submatch.files()):
2539 2542 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2540 2543 sub = wctx.sub(subpath)
2541 2544 try:
2542 2545 if sub.removefiles(submatch, prefix, after, force, subrepos,
2543 2546 warnings):
2544 2547 ret = 1
2545 2548 except error.LookupError:
2546 2549 warnings.append(_("skipping missing subrepository: %s\n")
2547 2550 % join(subpath))
2548 2551 ui.progress(_('searching'), None)
2549 2552
2550 2553 # warn about failure to delete explicit files/dirs
2551 2554 deleteddirs = util.dirs(deleted)
2552 2555 files = m.files()
2553 2556 total = len(files)
2554 2557 count = 0
2555 2558 for f in files:
2556 2559 def insubrepo():
2557 2560 for subpath in wctx.substate:
2558 2561 if f.startswith(subpath + '/'):
2559 2562 return True
2560 2563 return False
2561 2564
2562 2565 count += 1
2563 2566 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2564 2567 isdir = f in deleteddirs or wctx.hasdir(f)
2565 2568 if (f in repo.dirstate or isdir or f == '.'
2566 2569 or insubrepo() or f in subs):
2567 2570 continue
2568 2571
2569 2572 if repo.wvfs.exists(f):
2570 2573 if repo.wvfs.isdir(f):
2571 2574 warnings.append(_('not removing %s: no tracked files\n')
2572 2575 % m.rel(f))
2573 2576 else:
2574 2577 warnings.append(_('not removing %s: file is untracked\n')
2575 2578 % m.rel(f))
2576 2579 # missing files will generate a warning elsewhere
2577 2580 ret = 1
2578 2581 ui.progress(_('deleting'), None)
2579 2582
2580 2583 if force:
2581 2584 list = modified + deleted + clean + added
2582 2585 elif after:
2583 2586 list = deleted
2584 2587 remaining = modified + added + clean
2585 2588 total = len(remaining)
2586 2589 count = 0
2587 2590 for f in remaining:
2588 2591 count += 1
2589 2592 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2590 2593 warnings.append(_('not removing %s: file still exists\n')
2591 2594 % m.rel(f))
2592 2595 ret = 1
2593 2596 ui.progress(_('skipping'), None)
2594 2597 else:
2595 2598 list = deleted + clean
2596 2599 total = len(modified) + len(added)
2597 2600 count = 0
2598 2601 for f in modified:
2599 2602 count += 1
2600 2603 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2601 2604 warnings.append(_('not removing %s: file is modified (use -f'
2602 2605 ' to force removal)\n') % m.rel(f))
2603 2606 ret = 1
2604 2607 for f in added:
2605 2608 count += 1
2606 2609 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2607 2610 warnings.append(_("not removing %s: file has been marked for add"
2608 2611 " (use 'hg forget' to undo add)\n") % m.rel(f))
2609 2612 ret = 1
2610 2613 ui.progress(_('skipping'), None)
2611 2614
2612 2615 list = sorted(list)
2613 2616 total = len(list)
2614 2617 count = 0
2615 2618 for f in list:
2616 2619 count += 1
2617 2620 if ui.verbose or not m.exact(f):
2618 2621 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2619 2622 ui.status(_('removing %s\n') % m.rel(f))
2620 2623 ui.progress(_('deleting'), None)
2621 2624
2622 2625 with repo.wlock():
2623 2626 if not after:
2624 2627 for f in list:
2625 2628 if f in added:
2626 2629 continue # we never unlink added files on remove
2627 2630 repo.wvfs.unlinkpath(f, ignoremissing=True)
2628 2631 repo[None].forget(list)
2629 2632
2630 2633 if warn:
2631 2634 for warning in warnings:
2632 2635 ui.warn(warning)
2633 2636
2634 2637 return ret
2635 2638
2636 2639 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2637 2640 err = 1
2638 2641
2639 2642 def write(path):
2640 2643 filename = None
2641 2644 if fntemplate:
2642 2645 filename = makefilename(repo, fntemplate, ctx.node(),
2643 2646 pathname=os.path.join(prefix, path))
2644 2647 with formatter.maybereopen(basefm, filename, opts) as fm:
2645 2648 data = ctx[path].data()
2646 2649 if opts.get('decode'):
2647 2650 data = repo.wwritedata(path, data)
2648 2651 fm.startitem()
2649 2652 fm.write('data', '%s', data)
2650 2653 fm.data(abspath=path, path=matcher.rel(path))
2651 2654
2652 2655 # Automation often uses hg cat on single files, so special case it
2653 2656 # for performance to avoid the cost of parsing the manifest.
2654 2657 if len(matcher.files()) == 1 and not matcher.anypats():
2655 2658 file = matcher.files()[0]
2656 2659 mfl = repo.manifestlog
2657 2660 mfnode = ctx.manifestnode()
2658 2661 try:
2659 2662 if mfnode and mfl[mfnode].find(file)[0]:
2660 2663 write(file)
2661 2664 return 0
2662 2665 except KeyError:
2663 2666 pass
2664 2667
2665 2668 for abs in ctx.walk(matcher):
2666 2669 write(abs)
2667 2670 err = 0
2668 2671
2669 2672 for subpath in sorted(ctx.substate):
2670 2673 sub = ctx.sub(subpath)
2671 2674 try:
2672 2675 submatch = matchmod.subdirmatcher(subpath, matcher)
2673 2676
2674 2677 if not sub.cat(submatch, basefm, fntemplate,
2675 2678 os.path.join(prefix, sub._path), **opts):
2676 2679 err = 0
2677 2680 except error.RepoLookupError:
2678 2681 ui.status(_("skipping missing subrepository: %s\n")
2679 2682 % os.path.join(prefix, subpath))
2680 2683
2681 2684 return err
2682 2685
2683 2686 def commit(ui, repo, commitfunc, pats, opts):
2684 2687 '''commit the specified files or all outstanding changes'''
2685 2688 date = opts.get('date')
2686 2689 if date:
2687 2690 opts['date'] = util.parsedate(date)
2688 2691 message = logmessage(ui, opts)
2689 2692 matcher = scmutil.match(repo[None], pats, opts)
2690 2693
2691 2694 # extract addremove carefully -- this function can be called from a command
2692 2695 # that doesn't support addremove
2693 2696 if opts.get('addremove'):
2694 2697 if scmutil.addremove(repo, matcher, "", opts) != 0:
2695 2698 raise error.Abort(
2696 2699 _("failed to mark all new/missing files as added/removed"))
2697 2700
2698 2701 return commitfunc(ui, repo, message, matcher, opts)
2699 2702
2700 2703 def samefile(f, ctx1, ctx2):
2701 2704 if f in ctx1.manifest():
2702 2705 a = ctx1.filectx(f)
2703 2706 if f in ctx2.manifest():
2704 2707 b = ctx2.filectx(f)
2705 2708 return (not a.cmp(b)
2706 2709 and a.flags() == b.flags())
2707 2710 else:
2708 2711 return False
2709 2712 else:
2710 2713 return f not in ctx2.manifest()
2711 2714
2712 2715 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2713 2716 # avoid cycle context -> subrepo -> cmdutil
2714 2717 from . import context
2715 2718
2716 2719 # amend will reuse the existing user if not specified, but the obsolete
2717 2720 # marker creation requires that the current user's name is specified.
2718 2721 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2719 2722 ui.username() # raise exception if username not set
2720 2723
2721 2724 ui.note(_('amending changeset %s\n') % old)
2722 2725 base = old.p1()
2723 2726 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2724 2727
2725 2728 wlock = lock = newid = None
2726 2729 try:
2727 2730 wlock = repo.wlock()
2728 2731 lock = repo.lock()
2729 2732 with repo.transaction('amend') as tr:
2730 2733 # See if we got a message from -m or -l, if not, open the editor
2731 2734 # with the message of the changeset to amend
2732 2735 message = logmessage(ui, opts)
2733 2736 # ensure logfile does not conflict with later enforcement of the
2734 2737 # message. potential logfile content has been processed by
2735 2738 # `logmessage` anyway.
2736 2739 opts.pop('logfile')
2737 2740 # First, do a regular commit to record all changes in the working
2738 2741 # directory (if there are any)
2739 2742 ui.callhooks = False
2740 2743 activebookmark = repo._bookmarks.active
2741 2744 try:
2742 2745 repo._bookmarks.active = None
2743 2746 opts['message'] = 'temporary amend commit for %s' % old
2744 2747 node = commit(ui, repo, commitfunc, pats, opts)
2745 2748 finally:
2746 2749 repo._bookmarks.active = activebookmark
2747 2750 repo._bookmarks.recordchange(tr)
2748 2751 ui.callhooks = True
2749 2752 ctx = repo[node]
2750 2753
2751 2754 # Participating changesets:
2752 2755 #
2753 2756 # node/ctx o - new (intermediate) commit that contains changes
2754 2757 # | from working dir to go into amending commit
2755 2758 # | (or a workingctx if there were no changes)
2756 2759 # |
2757 2760 # old o - changeset to amend
2758 2761 # |
2759 2762 # base o - parent of amending changeset
2760 2763
2761 2764 # Update extra dict from amended commit (e.g. to preserve graft
2762 2765 # source)
2763 2766 extra.update(old.extra())
2764 2767
2765 2768 # Also update it from the intermediate commit or from the wctx
2766 2769 extra.update(ctx.extra())
2767 2770
2768 2771 if len(old.parents()) > 1:
2769 2772 # ctx.files() isn't reliable for merges, so fall back to the
2770 2773 # slower repo.status() method
2771 2774 files = set([fn for st in repo.status(base, old)[:3]
2772 2775 for fn in st])
2773 2776 else:
2774 2777 files = set(old.files())
2775 2778
2776 2779 # Second, we use either the commit we just did, or if there were no
2777 2780 # changes the parent of the working directory as the version of the
2778 2781 # files in the final amend commit
2779 2782 if node:
2780 2783 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2781 2784
2782 2785 user = ctx.user()
2783 2786 date = ctx.date()
2784 2787 # Recompute copies (avoid recording a -> b -> a)
2785 2788 copied = copies.pathcopies(base, ctx)
2786 2789 if old.p2:
2787 2790 copied.update(copies.pathcopies(old.p2(), ctx))
2788 2791
2789 2792 # Prune files which were reverted by the updates: if old
2790 2793 # introduced file X and our intermediate commit, node,
2791 2794 # renamed that file, then those two files are the same and
2792 2795 # we can discard X from our list of files. Likewise if X
2793 2796 # was deleted, it's no longer relevant
2794 2797 files.update(ctx.files())
2795 2798 files = [f for f in files if not samefile(f, ctx, base)]
2796 2799
2797 2800 def filectxfn(repo, ctx_, path):
2798 2801 try:
2799 2802 fctx = ctx[path]
2800 2803 flags = fctx.flags()
2801 2804 mctx = context.memfilectx(repo,
2802 2805 fctx.path(), fctx.data(),
2803 2806 islink='l' in flags,
2804 2807 isexec='x' in flags,
2805 2808 copied=copied.get(path))
2806 2809 return mctx
2807 2810 except KeyError:
2808 2811 return None
2809 2812 else:
2810 2813 ui.note(_('copying changeset %s to %s\n') % (old, base))
2811 2814
2812 2815 # Use version of files as in the old cset
2813 2816 def filectxfn(repo, ctx_, path):
2814 2817 try:
2815 2818 return old.filectx(path)
2816 2819 except KeyError:
2817 2820 return None
2818 2821
2819 2822 user = opts.get('user') or old.user()
2820 2823 date = opts.get('date') or old.date()
2821 2824 editform = mergeeditform(old, 'commit.amend')
2822 2825 editor = getcommiteditor(editform=editform, **opts)
2823 2826 if not message:
2824 2827 editor = getcommiteditor(edit=True, editform=editform)
2825 2828 message = old.description()
2826 2829
2827 2830 pureextra = extra.copy()
2828 2831 extra['amend_source'] = old.hex()
2829 2832
2830 2833 new = context.memctx(repo,
2831 2834 parents=[base.node(), old.p2().node()],
2832 2835 text=message,
2833 2836 files=files,
2834 2837 filectxfn=filectxfn,
2835 2838 user=user,
2836 2839 date=date,
2837 2840 extra=extra,
2838 2841 editor=editor)
2839 2842
2840 2843 newdesc = changelog.stripdesc(new.description())
2841 2844 if ((not node)
2842 2845 and newdesc == old.description()
2843 2846 and user == old.user()
2844 2847 and date == old.date()
2845 2848 and pureextra == old.extra()):
2846 2849 # nothing changed. continuing here would create a new node
2847 2850 # anyway because of the amend_source noise.
2848 2851 #
2849 2852 # This not what we expect from amend.
2850 2853 return old.node()
2851 2854
2852 2855 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2853 2856 try:
2854 2857 if opts.get('secret'):
2855 2858 commitphase = 'secret'
2856 2859 else:
2857 2860 commitphase = old.phase()
2858 2861 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2859 2862 newid = repo.commitctx(new)
2860 2863 finally:
2861 2864 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2862 2865 if newid != old.node():
2863 2866 # Reroute the working copy parent to the new changeset
2864 2867 repo.setparents(newid, nullid)
2865 2868
2866 2869 # Move bookmarks from old parent to amend commit
2867 2870 bms = repo.nodebookmarks(old.node())
2868 2871 if bms:
2869 2872 marks = repo._bookmarks
2870 2873 for bm in bms:
2871 2874 ui.debug('moving bookmarks %r from %s to %s\n' %
2872 2875 (marks, old.hex(), hex(newid)))
2873 2876 marks[bm] = newid
2874 2877 marks.recordchange(tr)
2875 2878 #commit the whole amend process
2876 2879 if createmarkers:
2877 2880 # mark the new changeset as successor of the rewritten one
2878 2881 new = repo[newid]
2879 2882 obs = [(old, (new,))]
2880 2883 if node:
2881 2884 obs.append((ctx, ()))
2882 2885
2883 2886 obsolete.createmarkers(repo, obs, operation='amend')
2884 2887 if not createmarkers and newid != old.node():
2885 2888 # Strip the intermediate commit (if there was one) and the amended
2886 2889 # commit
2887 2890 if node:
2888 2891 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2889 2892 ui.note(_('stripping amended changeset %s\n') % old)
2890 2893 repair.strip(ui, repo, old.node(), topic='amend-backup')
2891 2894 finally:
2892 2895 lockmod.release(lock, wlock)
2893 2896 return newid
2894 2897
2895 2898 def commiteditor(repo, ctx, subs, editform=''):
2896 2899 if ctx.description():
2897 2900 return ctx.description()
2898 2901 return commitforceeditor(repo, ctx, subs, editform=editform,
2899 2902 unchangedmessagedetection=True)
2900 2903
2901 2904 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2902 2905 editform='', unchangedmessagedetection=False):
2903 2906 if not extramsg:
2904 2907 extramsg = _("Leave message empty to abort commit.")
2905 2908
2906 2909 forms = [e for e in editform.split('.') if e]
2907 2910 forms.insert(0, 'changeset')
2908 2911 templatetext = None
2909 2912 while forms:
2910 2913 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2911 2914 if tmpl:
2912 2915 tmpl = templater.unquotestring(tmpl)
2913 2916 templatetext = committext = buildcommittemplate(
2914 2917 repo, ctx, subs, extramsg, tmpl)
2915 2918 break
2916 2919 forms.pop()
2917 2920 else:
2918 2921 committext = buildcommittext(repo, ctx, subs, extramsg)
2919 2922
2920 2923 # run editor in the repository root
2921 2924 olddir = pycompat.getcwd()
2922 2925 os.chdir(repo.root)
2923 2926
2924 2927 # make in-memory changes visible to external process
2925 2928 tr = repo.currenttransaction()
2926 2929 repo.dirstate.write(tr)
2927 2930 pending = tr and tr.writepending() and repo.root
2928 2931
2929 2932 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2930 2933 editform=editform, pending=pending,
2931 2934 repopath=repo.path)
2932 2935 text = editortext
2933 2936
2934 2937 # strip away anything below this special string (used for editors that want
2935 2938 # to display the diff)
2936 2939 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2937 2940 if stripbelow:
2938 2941 text = text[:stripbelow.start()]
2939 2942
2940 2943 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2941 2944 os.chdir(olddir)
2942 2945
2943 2946 if finishdesc:
2944 2947 text = finishdesc(text)
2945 2948 if not text.strip():
2946 2949 raise error.Abort(_("empty commit message"))
2947 2950 if unchangedmessagedetection and editortext == templatetext:
2948 2951 raise error.Abort(_("commit message unchanged"))
2949 2952
2950 2953 return text
2951 2954
2952 2955 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2953 2956 ui = repo.ui
2954 2957 tmpl, mapfile = _lookuplogtemplate(ui, tmpl, None)
2955 2958
2956 2959 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2957 2960
2958 2961 for k, v in repo.ui.configitems('committemplate'):
2959 2962 if k != 'changeset':
2960 2963 t.t.cache[k] = v
2961 2964
2962 2965 if not extramsg:
2963 2966 extramsg = '' # ensure that extramsg is string
2964 2967
2965 2968 ui.pushbuffer()
2966 2969 t.show(ctx, extramsg=extramsg)
2967 2970 return ui.popbuffer()
2968 2971
2969 2972 def hgprefix(msg):
2970 2973 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2971 2974
2972 2975 def buildcommittext(repo, ctx, subs, extramsg):
2973 2976 edittext = []
2974 2977 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2975 2978 if ctx.description():
2976 2979 edittext.append(ctx.description())
2977 2980 edittext.append("")
2978 2981 edittext.append("") # Empty line between message and comments.
2979 2982 edittext.append(hgprefix(_("Enter commit message."
2980 2983 " Lines beginning with 'HG:' are removed.")))
2981 2984 edittext.append(hgprefix(extramsg))
2982 2985 edittext.append("HG: --")
2983 2986 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2984 2987 if ctx.p2():
2985 2988 edittext.append(hgprefix(_("branch merge")))
2986 2989 if ctx.branch():
2987 2990 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2988 2991 if bookmarks.isactivewdirparent(repo):
2989 2992 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2990 2993 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2991 2994 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2992 2995 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2993 2996 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2994 2997 if not added and not modified and not removed:
2995 2998 edittext.append(hgprefix(_("no files changed")))
2996 2999 edittext.append("")
2997 3000
2998 3001 return "\n".join(edittext)
2999 3002
3000 3003 def commitstatus(repo, node, branch, bheads=None, opts=None):
3001 3004 if opts is None:
3002 3005 opts = {}
3003 3006 ctx = repo[node]
3004 3007 parents = ctx.parents()
3005 3008
3006 3009 if (not opts.get('amend') and bheads and node not in bheads and not
3007 3010 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3008 3011 repo.ui.status(_('created new head\n'))
3009 3012 # The message is not printed for initial roots. For the other
3010 3013 # changesets, it is printed in the following situations:
3011 3014 #
3012 3015 # Par column: for the 2 parents with ...
3013 3016 # N: null or no parent
3014 3017 # B: parent is on another named branch
3015 3018 # C: parent is a regular non head changeset
3016 3019 # H: parent was a branch head of the current branch
3017 3020 # Msg column: whether we print "created new head" message
3018 3021 # In the following, it is assumed that there already exists some
3019 3022 # initial branch heads of the current branch, otherwise nothing is
3020 3023 # printed anyway.
3021 3024 #
3022 3025 # Par Msg Comment
3023 3026 # N N y additional topo root
3024 3027 #
3025 3028 # B N y additional branch root
3026 3029 # C N y additional topo head
3027 3030 # H N n usual case
3028 3031 #
3029 3032 # B B y weird additional branch root
3030 3033 # C B y branch merge
3031 3034 # H B n merge with named branch
3032 3035 #
3033 3036 # C C y additional head from merge
3034 3037 # C H n merge with a head
3035 3038 #
3036 3039 # H H n head merge: head count decreases
3037 3040
3038 3041 if not opts.get('close_branch'):
3039 3042 for r in parents:
3040 3043 if r.closesbranch() and r.branch() == branch:
3041 3044 repo.ui.status(_('reopening closed branch head %d\n') % r)
3042 3045
3043 3046 if repo.ui.debugflag:
3044 3047 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3045 3048 elif repo.ui.verbose:
3046 3049 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3047 3050
3048 3051 def postcommitstatus(repo, pats, opts):
3049 3052 return repo.status(match=scmutil.match(repo[None], pats, opts))
3050 3053
3051 3054 def revert(ui, repo, ctx, parents, *pats, **opts):
3052 3055 parent, p2 = parents
3053 3056 node = ctx.node()
3054 3057
3055 3058 mf = ctx.manifest()
3056 3059 if node == p2:
3057 3060 parent = p2
3058 3061
3059 3062 # need all matching names in dirstate and manifest of target rev,
3060 3063 # so have to walk both. do not print errors if files exist in one
3061 3064 # but not other. in both cases, filesets should be evaluated against
3062 3065 # workingctx to get consistent result (issue4497). this means 'set:**'
3063 3066 # cannot be used to select missing files from target rev.
3064 3067
3065 3068 # `names` is a mapping for all elements in working copy and target revision
3066 3069 # The mapping is in the form:
3067 3070 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3068 3071 names = {}
3069 3072
3070 3073 with repo.wlock():
3071 3074 ## filling of the `names` mapping
3072 3075 # walk dirstate to fill `names`
3073 3076
3074 3077 interactive = opts.get('interactive', False)
3075 3078 wctx = repo[None]
3076 3079 m = scmutil.match(wctx, pats, opts)
3077 3080
3078 3081 # we'll need this later
3079 3082 targetsubs = sorted(s for s in wctx.substate if m(s))
3080 3083
3081 3084 if not m.always():
3082 3085 matcher = matchmod.badmatch(m, lambda x, y: False)
3083 3086 for abs in wctx.walk(matcher):
3084 3087 names[abs] = m.rel(abs), m.exact(abs)
3085 3088
3086 3089 # walk target manifest to fill `names`
3087 3090
3088 3091 def badfn(path, msg):
3089 3092 if path in names:
3090 3093 return
3091 3094 if path in ctx.substate:
3092 3095 return
3093 3096 path_ = path + '/'
3094 3097 for f in names:
3095 3098 if f.startswith(path_):
3096 3099 return
3097 3100 ui.warn("%s: %s\n" % (m.rel(path), msg))
3098 3101
3099 3102 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3100 3103 if abs not in names:
3101 3104 names[abs] = m.rel(abs), m.exact(abs)
3102 3105
3103 3106 # Find status of all file in `names`.
3104 3107 m = scmutil.matchfiles(repo, names)
3105 3108
3106 3109 changes = repo.status(node1=node, match=m,
3107 3110 unknown=True, ignored=True, clean=True)
3108 3111 else:
3109 3112 changes = repo.status(node1=node, match=m)
3110 3113 for kind in changes:
3111 3114 for abs in kind:
3112 3115 names[abs] = m.rel(abs), m.exact(abs)
3113 3116
3114 3117 m = scmutil.matchfiles(repo, names)
3115 3118
3116 3119 modified = set(changes.modified)
3117 3120 added = set(changes.added)
3118 3121 removed = set(changes.removed)
3119 3122 _deleted = set(changes.deleted)
3120 3123 unknown = set(changes.unknown)
3121 3124 unknown.update(changes.ignored)
3122 3125 clean = set(changes.clean)
3123 3126 modadded = set()
3124 3127
3125 3128 # We need to account for the state of the file in the dirstate,
3126 3129 # even when we revert against something else than parent. This will
3127 3130 # slightly alter the behavior of revert (doing back up or not, delete
3128 3131 # or just forget etc).
3129 3132 if parent == node:
3130 3133 dsmodified = modified
3131 3134 dsadded = added
3132 3135 dsremoved = removed
3133 3136 # store all local modifications, useful later for rename detection
3134 3137 localchanges = dsmodified | dsadded
3135 3138 modified, added, removed = set(), set(), set()
3136 3139 else:
3137 3140 changes = repo.status(node1=parent, match=m)
3138 3141 dsmodified = set(changes.modified)
3139 3142 dsadded = set(changes.added)
3140 3143 dsremoved = set(changes.removed)
3141 3144 # store all local modifications, useful later for rename detection
3142 3145 localchanges = dsmodified | dsadded
3143 3146
3144 3147 # only take into account for removes between wc and target
3145 3148 clean |= dsremoved - removed
3146 3149 dsremoved &= removed
3147 3150 # distinct between dirstate remove and other
3148 3151 removed -= dsremoved
3149 3152
3150 3153 modadded = added & dsmodified
3151 3154 added -= modadded
3152 3155
3153 3156 # tell newly modified apart.
3154 3157 dsmodified &= modified
3155 3158 dsmodified |= modified & dsadded # dirstate added may need backup
3156 3159 modified -= dsmodified
3157 3160
3158 3161 # We need to wait for some post-processing to update this set
3159 3162 # before making the distinction. The dirstate will be used for
3160 3163 # that purpose.
3161 3164 dsadded = added
3162 3165
3163 3166 # in case of merge, files that are actually added can be reported as
3164 3167 # modified, we need to post process the result
3165 3168 if p2 != nullid:
3166 3169 mergeadd = set(dsmodified)
3167 3170 for path in dsmodified:
3168 3171 if path in mf:
3169 3172 mergeadd.remove(path)
3170 3173 dsadded |= mergeadd
3171 3174 dsmodified -= mergeadd
3172 3175
3173 3176 # if f is a rename, update `names` to also revert the source
3174 3177 cwd = repo.getcwd()
3175 3178 for f in localchanges:
3176 3179 src = repo.dirstate.copied(f)
3177 3180 # XXX should we check for rename down to target node?
3178 3181 if src and src not in names and repo.dirstate[src] == 'r':
3179 3182 dsremoved.add(src)
3180 3183 names[src] = (repo.pathto(src, cwd), True)
3181 3184
3182 3185 # determine the exact nature of the deleted changesets
3183 3186 deladded = set(_deleted)
3184 3187 for path in _deleted:
3185 3188 if path in mf:
3186 3189 deladded.remove(path)
3187 3190 deleted = _deleted - deladded
3188 3191
3189 3192 # distinguish between file to forget and the other
3190 3193 added = set()
3191 3194 for abs in dsadded:
3192 3195 if repo.dirstate[abs] != 'a':
3193 3196 added.add(abs)
3194 3197 dsadded -= added
3195 3198
3196 3199 for abs in deladded:
3197 3200 if repo.dirstate[abs] == 'a':
3198 3201 dsadded.add(abs)
3199 3202 deladded -= dsadded
3200 3203
3201 3204 # For files marked as removed, we check if an unknown file is present at
3202 3205 # the same path. If a such file exists it may need to be backed up.
3203 3206 # Making the distinction at this stage helps have simpler backup
3204 3207 # logic.
3205 3208 removunk = set()
3206 3209 for abs in removed:
3207 3210 target = repo.wjoin(abs)
3208 3211 if os.path.lexists(target):
3209 3212 removunk.add(abs)
3210 3213 removed -= removunk
3211 3214
3212 3215 dsremovunk = set()
3213 3216 for abs in dsremoved:
3214 3217 target = repo.wjoin(abs)
3215 3218 if os.path.lexists(target):
3216 3219 dsremovunk.add(abs)
3217 3220 dsremoved -= dsremovunk
3218 3221
3219 3222 # action to be actually performed by revert
3220 3223 # (<list of file>, message>) tuple
3221 3224 actions = {'revert': ([], _('reverting %s\n')),
3222 3225 'add': ([], _('adding %s\n')),
3223 3226 'remove': ([], _('removing %s\n')),
3224 3227 'drop': ([], _('removing %s\n')),
3225 3228 'forget': ([], _('forgetting %s\n')),
3226 3229 'undelete': ([], _('undeleting %s\n')),
3227 3230 'noop': (None, _('no changes needed to %s\n')),
3228 3231 'unknown': (None, _('file not managed: %s\n')),
3229 3232 }
3230 3233
3231 3234 # "constant" that convey the backup strategy.
3232 3235 # All set to `discard` if `no-backup` is set do avoid checking
3233 3236 # no_backup lower in the code.
3234 3237 # These values are ordered for comparison purposes
3235 3238 backupinteractive = 3 # do backup if interactively modified
3236 3239 backup = 2 # unconditionally do backup
3237 3240 check = 1 # check if the existing file differs from target
3238 3241 discard = 0 # never do backup
3239 3242 if opts.get('no_backup'):
3240 3243 backupinteractive = backup = check = discard
3241 3244 if interactive:
3242 3245 dsmodifiedbackup = backupinteractive
3243 3246 else:
3244 3247 dsmodifiedbackup = backup
3245 3248 tobackup = set()
3246 3249
3247 3250 backupanddel = actions['remove']
3248 3251 if not opts.get('no_backup'):
3249 3252 backupanddel = actions['drop']
3250 3253
3251 3254 disptable = (
3252 3255 # dispatch table:
3253 3256 # file state
3254 3257 # action
3255 3258 # make backup
3256 3259
3257 3260 ## Sets that results that will change file on disk
3258 3261 # Modified compared to target, no local change
3259 3262 (modified, actions['revert'], discard),
3260 3263 # Modified compared to target, but local file is deleted
3261 3264 (deleted, actions['revert'], discard),
3262 3265 # Modified compared to target, local change
3263 3266 (dsmodified, actions['revert'], dsmodifiedbackup),
3264 3267 # Added since target
3265 3268 (added, actions['remove'], discard),
3266 3269 # Added in working directory
3267 3270 (dsadded, actions['forget'], discard),
3268 3271 # Added since target, have local modification
3269 3272 (modadded, backupanddel, backup),
3270 3273 # Added since target but file is missing in working directory
3271 3274 (deladded, actions['drop'], discard),
3272 3275 # Removed since target, before working copy parent
3273 3276 (removed, actions['add'], discard),
3274 3277 # Same as `removed` but an unknown file exists at the same path
3275 3278 (removunk, actions['add'], check),
3276 3279 # Removed since targe, marked as such in working copy parent
3277 3280 (dsremoved, actions['undelete'], discard),
3278 3281 # Same as `dsremoved` but an unknown file exists at the same path
3279 3282 (dsremovunk, actions['undelete'], check),
3280 3283 ## the following sets does not result in any file changes
3281 3284 # File with no modification
3282 3285 (clean, actions['noop'], discard),
3283 3286 # Existing file, not tracked anywhere
3284 3287 (unknown, actions['unknown'], discard),
3285 3288 )
3286 3289
3287 3290 for abs, (rel, exact) in sorted(names.items()):
3288 3291 # target file to be touch on disk (relative to cwd)
3289 3292 target = repo.wjoin(abs)
3290 3293 # search the entry in the dispatch table.
3291 3294 # if the file is in any of these sets, it was touched in the working
3292 3295 # directory parent and we are sure it needs to be reverted.
3293 3296 for table, (xlist, msg), dobackup in disptable:
3294 3297 if abs not in table:
3295 3298 continue
3296 3299 if xlist is not None:
3297 3300 xlist.append(abs)
3298 3301 if dobackup:
3299 3302 # If in interactive mode, don't automatically create
3300 3303 # .orig files (issue4793)
3301 3304 if dobackup == backupinteractive:
3302 3305 tobackup.add(abs)
3303 3306 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3304 3307 bakname = scmutil.origpath(ui, repo, rel)
3305 3308 ui.note(_('saving current version of %s as %s\n') %
3306 3309 (rel, bakname))
3307 3310 if not opts.get('dry_run'):
3308 3311 if interactive:
3309 3312 util.copyfile(target, bakname)
3310 3313 else:
3311 3314 util.rename(target, bakname)
3312 3315 if ui.verbose or not exact:
3313 3316 if not isinstance(msg, basestring):
3314 3317 msg = msg(abs)
3315 3318 ui.status(msg % rel)
3316 3319 elif exact:
3317 3320 ui.warn(msg % rel)
3318 3321 break
3319 3322
3320 3323 if not opts.get('dry_run'):
3321 3324 needdata = ('revert', 'add', 'undelete')
3322 3325 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3323 3326 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3324 3327
3325 3328 if targetsubs:
3326 3329 # Revert the subrepos on the revert list
3327 3330 for sub in targetsubs:
3328 3331 try:
3329 3332 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3330 3333 except KeyError:
3331 3334 raise error.Abort("subrepository '%s' does not exist in %s!"
3332 3335 % (sub, short(ctx.node())))
3333 3336
3334 3337 def _revertprefetch(repo, ctx, *files):
3335 3338 """Let extension changing the storage layer prefetch content"""
3336 3339 pass
3337 3340
3338 3341 def _performrevert(repo, parents, ctx, actions, interactive=False,
3339 3342 tobackup=None):
3340 3343 """function that actually perform all the actions computed for revert
3341 3344
3342 3345 This is an independent function to let extension to plug in and react to
3343 3346 the imminent revert.
3344 3347
3345 3348 Make sure you have the working directory locked when calling this function.
3346 3349 """
3347 3350 parent, p2 = parents
3348 3351 node = ctx.node()
3349 3352 excluded_files = []
3350 3353 matcher_opts = {"exclude": excluded_files}
3351 3354
3352 3355 def checkout(f):
3353 3356 fc = ctx[f]
3354 3357 repo.wwrite(f, fc.data(), fc.flags())
3355 3358
3356 3359 def doremove(f):
3357 3360 try:
3358 3361 repo.wvfs.unlinkpath(f)
3359 3362 except OSError:
3360 3363 pass
3361 3364 repo.dirstate.remove(f)
3362 3365
3363 3366 audit_path = pathutil.pathauditor(repo.root)
3364 3367 for f in actions['forget'][0]:
3365 3368 if interactive:
3366 3369 choice = repo.ui.promptchoice(
3367 3370 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3368 3371 if choice == 0:
3369 3372 repo.dirstate.drop(f)
3370 3373 else:
3371 3374 excluded_files.append(repo.wjoin(f))
3372 3375 else:
3373 3376 repo.dirstate.drop(f)
3374 3377 for f in actions['remove'][0]:
3375 3378 audit_path(f)
3376 3379 if interactive:
3377 3380 choice = repo.ui.promptchoice(
3378 3381 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3379 3382 if choice == 0:
3380 3383 doremove(f)
3381 3384 else:
3382 3385 excluded_files.append(repo.wjoin(f))
3383 3386 else:
3384 3387 doremove(f)
3385 3388 for f in actions['drop'][0]:
3386 3389 audit_path(f)
3387 3390 repo.dirstate.remove(f)
3388 3391
3389 3392 normal = None
3390 3393 if node == parent:
3391 3394 # We're reverting to our parent. If possible, we'd like status
3392 3395 # to report the file as clean. We have to use normallookup for
3393 3396 # merges to avoid losing information about merged/dirty files.
3394 3397 if p2 != nullid:
3395 3398 normal = repo.dirstate.normallookup
3396 3399 else:
3397 3400 normal = repo.dirstate.normal
3398 3401
3399 3402 newlyaddedandmodifiedfiles = set()
3400 3403 if interactive:
3401 3404 # Prompt the user for changes to revert
3402 3405 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3403 3406 m = scmutil.match(ctx, torevert, matcher_opts)
3404 3407 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3405 3408 diffopts.nodates = True
3406 3409 diffopts.git = True
3407 3410 operation = 'discard'
3408 3411 reversehunks = True
3409 3412 if node != parent:
3410 3413 operation = 'revert'
3411 3414 reversehunks = repo.ui.configbool('experimental',
3412 3415 'revertalternateinteractivemode',
3413 3416 True)
3414 3417 if reversehunks:
3415 3418 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3416 3419 else:
3417 3420 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3418 3421 originalchunks = patch.parsepatch(diff)
3419 3422
3420 3423 try:
3421 3424
3422 3425 chunks, opts = recordfilter(repo.ui, originalchunks,
3423 3426 operation=operation)
3424 3427 if reversehunks:
3425 3428 chunks = patch.reversehunks(chunks)
3426 3429
3427 3430 except patch.PatchError as err:
3428 3431 raise error.Abort(_('error parsing patch: %s') % err)
3429 3432
3430 3433 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3431 3434 if tobackup is None:
3432 3435 tobackup = set()
3433 3436 # Apply changes
3434 3437 fp = stringio()
3435 3438 for c in chunks:
3436 3439 # Create a backup file only if this hunk should be backed up
3437 3440 if ishunk(c) and c.header.filename() in tobackup:
3438 3441 abs = c.header.filename()
3439 3442 target = repo.wjoin(abs)
3440 3443 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3441 3444 util.copyfile(target, bakname)
3442 3445 tobackup.remove(abs)
3443 3446 c.write(fp)
3444 3447 dopatch = fp.tell()
3445 3448 fp.seek(0)
3446 3449 if dopatch:
3447 3450 try:
3448 3451 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3449 3452 except patch.PatchError as err:
3450 3453 raise error.Abort(str(err))
3451 3454 del fp
3452 3455 else:
3453 3456 for f in actions['revert'][0]:
3454 3457 checkout(f)
3455 3458 if normal:
3456 3459 normal(f)
3457 3460
3458 3461 for f in actions['add'][0]:
3459 3462 # Don't checkout modified files, they are already created by the diff
3460 3463 if f not in newlyaddedandmodifiedfiles:
3461 3464 checkout(f)
3462 3465 repo.dirstate.add(f)
3463 3466
3464 3467 normal = repo.dirstate.normallookup
3465 3468 if node == parent and p2 == nullid:
3466 3469 normal = repo.dirstate.normal
3467 3470 for f in actions['undelete'][0]:
3468 3471 checkout(f)
3469 3472 normal(f)
3470 3473
3471 3474 copied = copies.pathcopies(repo[parent], ctx)
3472 3475
3473 3476 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3474 3477 if f in copied:
3475 3478 repo.dirstate.copy(copied[f], f)
3476 3479
3477 3480 class command(registrar.command):
3478 3481 def _doregister(self, func, name, *args, **kwargs):
3479 3482 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3480 3483 return super(command, self)._doregister(func, name, *args, **kwargs)
3481 3484
3482 3485 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3483 3486 # commands.outgoing. "missing" is "missing" of the result of
3484 3487 # "findcommonoutgoing()"
3485 3488 outgoinghooks = util.hooks()
3486 3489
3487 3490 # a list of (ui, repo) functions called by commands.summary
3488 3491 summaryhooks = util.hooks()
3489 3492
3490 3493 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3491 3494 #
3492 3495 # functions should return tuple of booleans below, if 'changes' is None:
3493 3496 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3494 3497 #
3495 3498 # otherwise, 'changes' is a tuple of tuples below:
3496 3499 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3497 3500 # - (desturl, destbranch, destpeer, outgoing)
3498 3501 summaryremotehooks = util.hooks()
3499 3502
3500 3503 # A list of state files kept by multistep operations like graft.
3501 3504 # Since graft cannot be aborted, it is considered 'clearable' by update.
3502 3505 # note: bisect is intentionally excluded
3503 3506 # (state file, clearable, allowcommit, error, hint)
3504 3507 unfinishedstates = [
3505 3508 ('graftstate', True, False, _('graft in progress'),
3506 3509 _("use 'hg graft --continue' or 'hg update' to abort")),
3507 3510 ('updatestate', True, False, _('last update was interrupted'),
3508 3511 _("use 'hg update' to get a consistent checkout"))
3509 3512 ]
3510 3513
3511 3514 def checkunfinished(repo, commit=False):
3512 3515 '''Look for an unfinished multistep operation, like graft, and abort
3513 3516 if found. It's probably good to check this right before
3514 3517 bailifchanged().
3515 3518 '''
3516 3519 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3517 3520 if commit and allowcommit:
3518 3521 continue
3519 3522 if repo.vfs.exists(f):
3520 3523 raise error.Abort(msg, hint=hint)
3521 3524
3522 3525 def clearunfinished(repo):
3523 3526 '''Check for unfinished operations (as above), and clear the ones
3524 3527 that are clearable.
3525 3528 '''
3526 3529 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3527 3530 if not clearable and repo.vfs.exists(f):
3528 3531 raise error.Abort(msg, hint=hint)
3529 3532 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3530 3533 if clearable and repo.vfs.exists(f):
3531 3534 util.unlink(repo.vfs.join(f))
3532 3535
3533 3536 afterresolvedstates = [
3534 3537 ('graftstate',
3535 3538 _('hg graft --continue')),
3536 3539 ]
3537 3540
3538 3541 def howtocontinue(repo):
3539 3542 '''Check for an unfinished operation and return the command to finish
3540 3543 it.
3541 3544
3542 3545 afterresolvedstates tuples define a .hg/{file} and the corresponding
3543 3546 command needed to finish it.
3544 3547
3545 3548 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3546 3549 a boolean.
3547 3550 '''
3548 3551 contmsg = _("continue: %s")
3549 3552 for f, msg in afterresolvedstates:
3550 3553 if repo.vfs.exists(f):
3551 3554 return contmsg % msg, True
3552 3555 workingctx = repo[None]
3553 3556 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3554 3557 for s in workingctx.substate)
3555 3558 if dirty:
3556 3559 return contmsg % _("hg commit"), False
3557 3560 return None, None
3558 3561
3559 3562 def checkafterresolved(repo):
3560 3563 '''Inform the user about the next action after completing hg resolve
3561 3564
3562 3565 If there's a matching afterresolvedstates, howtocontinue will yield
3563 3566 repo.ui.warn as the reporter.
3564 3567
3565 3568 Otherwise, it will yield repo.ui.note.
3566 3569 '''
3567 3570 msg, warning = howtocontinue(repo)
3568 3571 if msg is not None:
3569 3572 if warning:
3570 3573 repo.ui.warn("%s\n" % msg)
3571 3574 else:
3572 3575 repo.ui.note("%s\n" % msg)
3573 3576
3574 3577 def wrongtooltocontinue(repo, task):
3575 3578 '''Raise an abort suggesting how to properly continue if there is an
3576 3579 active task.
3577 3580
3578 3581 Uses howtocontinue() to find the active task.
3579 3582
3580 3583 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3581 3584 a hint.
3582 3585 '''
3583 3586 after = howtocontinue(repo)
3584 3587 hint = None
3585 3588 if after[1]:
3586 3589 hint = after[0]
3587 3590 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,483 +1,486
1 1 # formatter.py - generic output formatting for mercurial
2 2 #
3 3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """Generic output formatting for Mercurial
9 9
10 10 The formatter provides API to show data in various ways. The following
11 11 functions should be used in place of ui.write():
12 12
13 13 - fm.write() for unconditional output
14 14 - fm.condwrite() to show some extra data conditionally in plain output
15 15 - fm.context() to provide changectx to template output
16 16 - fm.data() to provide extra data to JSON or template output
17 17 - fm.plain() to show raw text that isn't provided to JSON or template output
18 18
19 19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
20 20 beforehand so the data is converted to the appropriate data type. Use
21 21 fm.isplain() if you need to convert or format data conditionally which isn't
22 22 supported by the formatter API.
23 23
24 24 To build nested structure (i.e. a list of dicts), use fm.nested().
25 25
26 26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
27 27
28 28 fm.condwrite() vs 'if cond:':
29 29
30 30 In most cases, use fm.condwrite() so users can selectively show the data
31 31 in template output. If it's costly to build data, use plain 'if cond:' with
32 32 fm.write().
33 33
34 34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
35 35
36 36 fm.nested() should be used to form a tree structure (a list of dicts of
37 37 lists of dicts...) which can be accessed through template keywords, e.g.
38 38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
39 39 exports a dict-type object to template, which can be accessed by e.g.
40 40 "{get(foo, key)}" function.
41 41
42 42 Doctest helper:
43 43
44 44 >>> def show(fn, verbose=False, **opts):
45 45 ... import sys
46 46 ... from . import ui as uimod
47 47 ... ui = uimod.ui()
48 48 ... ui.fout = sys.stdout # redirect to doctest
49 49 ... ui.verbose = verbose
50 50 ... return fn(ui, ui.formatter(fn.__name__, opts))
51 51
52 52 Basic example:
53 53
54 54 >>> def files(ui, fm):
55 55 ... files = [('foo', 123, (0, 0)), ('bar', 456, (1, 0))]
56 56 ... for f in files:
57 57 ... fm.startitem()
58 58 ... fm.write('path', '%s', f[0])
59 59 ... fm.condwrite(ui.verbose, 'date', ' %s',
60 60 ... fm.formatdate(f[2], '%Y-%m-%d %H:%M:%S'))
61 61 ... fm.data(size=f[1])
62 62 ... fm.plain('\\n')
63 63 ... fm.end()
64 64 >>> show(files)
65 65 foo
66 66 bar
67 67 >>> show(files, verbose=True)
68 68 foo 1970-01-01 00:00:00
69 69 bar 1970-01-01 00:00:01
70 70 >>> show(files, template='json')
71 71 [
72 72 {
73 73 "date": [0, 0],
74 74 "path": "foo",
75 75 "size": 123
76 76 },
77 77 {
78 78 "date": [1, 0],
79 79 "path": "bar",
80 80 "size": 456
81 81 }
82 82 ]
83 83 >>> show(files, template='path: {path}\\ndate: {date|rfc3339date}\\n')
84 84 path: foo
85 85 date: 1970-01-01T00:00:00+00:00
86 86 path: bar
87 87 date: 1970-01-01T00:00:01+00:00
88 88
89 89 Nested example:
90 90
91 91 >>> def subrepos(ui, fm):
92 92 ... fm.startitem()
93 93 ... fm.write('repo', '[%s]\\n', 'baz')
94 94 ... files(ui, fm.nested('files'))
95 95 ... fm.end()
96 96 >>> show(subrepos)
97 97 [baz]
98 98 foo
99 99 bar
100 100 >>> show(subrepos, template='{repo}: {join(files % "{path}", ", ")}\\n')
101 101 baz: foo, bar
102 102 """
103 103
104 104 from __future__ import absolute_import
105 105
106 import collections
106 107 import contextlib
107 108 import itertools
108 109 import os
109 110
110 111 from .i18n import _
111 112 from .node import (
112 113 hex,
113 114 short,
114 115 )
115 116
116 117 from . import (
117 118 error,
118 119 pycompat,
119 120 templatefilters,
120 121 templatekw,
121 122 templater,
122 123 util,
123 124 )
124 125
125 126 pickle = util.pickle
126 127
127 128 class _nullconverter(object):
128 129 '''convert non-primitive data types to be processed by formatter'''
129 130 @staticmethod
130 131 def formatdate(date, fmt):
131 132 '''convert date tuple to appropriate format'''
132 133 return date
133 134 @staticmethod
134 135 def formatdict(data, key, value, fmt, sep):
135 136 '''convert dict or key-value pairs to appropriate dict format'''
136 137 # use plain dict instead of util.sortdict so that data can be
137 138 # serialized as a builtin dict in pickle output
138 139 return dict(data)
139 140 @staticmethod
140 141 def formatlist(data, name, fmt, sep):
141 142 '''convert iterable to appropriate list format'''
142 143 return list(data)
143 144
144 145 class baseformatter(object):
145 146 def __init__(self, ui, topic, opts, converter):
146 147 self._ui = ui
147 148 self._topic = topic
148 149 self._style = opts.get("style")
149 150 self._template = opts.get("template")
150 151 self._converter = converter
151 152 self._item = None
152 153 # function to convert node to string suitable for this output
153 154 self.hexfunc = hex
154 155 def __enter__(self):
155 156 return self
156 157 def __exit__(self, exctype, excvalue, traceback):
157 158 if exctype is None:
158 159 self.end()
159 160 def _showitem(self):
160 161 '''show a formatted item once all data is collected'''
161 162 pass
162 163 def startitem(self):
163 164 '''begin an item in the format list'''
164 165 if self._item is not None:
165 166 self._showitem()
166 167 self._item = {}
167 168 def formatdate(self, date, fmt='%a %b %d %H:%M:%S %Y %1%2'):
168 169 '''convert date tuple to appropriate format'''
169 170 return self._converter.formatdate(date, fmt)
170 171 def formatdict(self, data, key='key', value='value', fmt='%s=%s', sep=' '):
171 172 '''convert dict or key-value pairs to appropriate dict format'''
172 173 return self._converter.formatdict(data, key, value, fmt, sep)
173 174 def formatlist(self, data, name, fmt='%s', sep=' '):
174 175 '''convert iterable to appropriate list format'''
175 176 # name is mandatory argument for now, but it could be optional if
176 177 # we have default template keyword, e.g. {item}
177 178 return self._converter.formatlist(data, name, fmt, sep)
178 179 def context(self, **ctxs):
179 180 '''insert context objects to be used to render template keywords'''
180 181 pass
181 182 def data(self, **data):
182 183 '''insert data into item that's not shown in default output'''
183 184 data = pycompat.byteskwargs(data)
184 185 self._item.update(data)
185 186 def write(self, fields, deftext, *fielddata, **opts):
186 187 '''do default text output while assigning data to item'''
187 188 fieldkeys = fields.split()
188 189 assert len(fieldkeys) == len(fielddata)
189 190 self._item.update(zip(fieldkeys, fielddata))
190 191 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
191 192 '''do conditional write (primarily for plain formatter)'''
192 193 fieldkeys = fields.split()
193 194 assert len(fieldkeys) == len(fielddata)
194 195 self._item.update(zip(fieldkeys, fielddata))
195 196 def plain(self, text, **opts):
196 197 '''show raw text for non-templated mode'''
197 198 pass
198 199 def isplain(self):
199 200 '''check for plain formatter usage'''
200 201 return False
201 202 def nested(self, field):
202 203 '''sub formatter to store nested data in the specified field'''
203 204 self._item[field] = data = []
204 205 return _nestedformatter(self._ui, self._converter, data)
205 206 def end(self):
206 207 '''end output for the formatter'''
207 208 if self._item is not None:
208 209 self._showitem()
209 210
210 211 def nullformatter(ui, topic):
211 212 '''formatter that prints nothing'''
212 213 return baseformatter(ui, topic, opts={}, converter=_nullconverter)
213 214
214 215 class _nestedformatter(baseformatter):
215 216 '''build sub items and store them in the parent formatter'''
216 217 def __init__(self, ui, converter, data):
217 218 baseformatter.__init__(self, ui, topic='', opts={}, converter=converter)
218 219 self._data = data
219 220 def _showitem(self):
220 221 self._data.append(self._item)
221 222
222 223 def _iteritems(data):
223 224 '''iterate key-value pairs in stable order'''
224 225 if isinstance(data, dict):
225 226 return sorted(data.iteritems())
226 227 return data
227 228
228 229 class _plainconverter(object):
229 230 '''convert non-primitive data types to text'''
230 231 @staticmethod
231 232 def formatdate(date, fmt):
232 233 '''stringify date tuple in the given format'''
233 234 return util.datestr(date, fmt)
234 235 @staticmethod
235 236 def formatdict(data, key, value, fmt, sep):
236 237 '''stringify key-value pairs separated by sep'''
237 238 return sep.join(fmt % (k, v) for k, v in _iteritems(data))
238 239 @staticmethod
239 240 def formatlist(data, name, fmt, sep):
240 241 '''stringify iterable separated by sep'''
241 242 return sep.join(fmt % e for e in data)
242 243
243 244 class plainformatter(baseformatter):
244 245 '''the default text output scheme'''
245 246 def __init__(self, ui, out, topic, opts):
246 247 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
247 248 if ui.debugflag:
248 249 self.hexfunc = hex
249 250 else:
250 251 self.hexfunc = short
251 252 if ui is out:
252 253 self._write = ui.write
253 254 else:
254 255 self._write = lambda s, **opts: out.write(s)
255 256 def startitem(self):
256 257 pass
257 258 def data(self, **data):
258 259 pass
259 260 def write(self, fields, deftext, *fielddata, **opts):
260 261 self._write(deftext % fielddata, **opts)
261 262 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
262 263 '''do conditional write'''
263 264 if cond:
264 265 self._write(deftext % fielddata, **opts)
265 266 def plain(self, text, **opts):
266 267 self._write(text, **opts)
267 268 def isplain(self):
268 269 return True
269 270 def nested(self, field):
270 271 # nested data will be directly written to ui
271 272 return self
272 273 def end(self):
273 274 pass
274 275
275 276 class debugformatter(baseformatter):
276 277 def __init__(self, ui, out, topic, opts):
277 278 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
278 279 self._out = out
279 280 self._out.write("%s = [\n" % self._topic)
280 281 def _showitem(self):
281 282 self._out.write(" " + repr(self._item) + ",\n")
282 283 def end(self):
283 284 baseformatter.end(self)
284 285 self._out.write("]\n")
285 286
286 287 class pickleformatter(baseformatter):
287 288 def __init__(self, ui, out, topic, opts):
288 289 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
289 290 self._out = out
290 291 self._data = []
291 292 def _showitem(self):
292 293 self._data.append(self._item)
293 294 def end(self):
294 295 baseformatter.end(self)
295 296 self._out.write(pickle.dumps(self._data))
296 297
297 298 class jsonformatter(baseformatter):
298 299 def __init__(self, ui, out, topic, opts):
299 300 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
300 301 self._out = out
301 302 self._out.write("[")
302 303 self._first = True
303 304 def _showitem(self):
304 305 if self._first:
305 306 self._first = False
306 307 else:
307 308 self._out.write(",")
308 309
309 310 self._out.write("\n {\n")
310 311 first = True
311 312 for k, v in sorted(self._item.items()):
312 313 if first:
313 314 first = False
314 315 else:
315 316 self._out.write(",\n")
316 317 u = templatefilters.json(v, paranoid=False)
317 318 self._out.write(' "%s": %s' % (k, u))
318 319 self._out.write("\n }")
319 320 def end(self):
320 321 baseformatter.end(self)
321 322 self._out.write("\n]\n")
322 323
323 324 class _templateconverter(object):
324 325 '''convert non-primitive data types to be processed by templater'''
325 326 @staticmethod
326 327 def formatdate(date, fmt):
327 328 '''return date tuple'''
328 329 return date
329 330 @staticmethod
330 331 def formatdict(data, key, value, fmt, sep):
331 332 '''build object that can be evaluated as either plain string or dict'''
332 333 data = util.sortdict(_iteritems(data))
333 334 def f():
334 335 yield _plainconverter.formatdict(data, key, value, fmt, sep)
335 336 return templatekw.hybriddict(data, key=key, value=value, fmt=fmt,
336 337 gen=f())
337 338 @staticmethod
338 339 def formatlist(data, name, fmt, sep):
339 340 '''build object that can be evaluated as either plain string or list'''
340 341 data = list(data)
341 342 def f():
342 343 yield _plainconverter.formatlist(data, name, fmt, sep)
343 344 return templatekw.hybridlist(data, name=name, fmt=fmt, gen=f())
344 345
345 346 class templateformatter(baseformatter):
346 347 def __init__(self, ui, out, topic, opts):
347 348 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
348 349 self._out = out
349 350 self._topic = topic
350 351 spec = lookuptemplate(ui, topic, opts.get('template', ''))
351 352 self._t = loadtemplater(ui, topic, spec, cache=templatekw.defaulttempl)
352 353 self._counter = itertools.count()
353 354 self._cache = {} # for templatekw/funcs to store reusable data
354 355 def context(self, **ctxs):
355 356 '''insert context objects to be used to render template keywords'''
356 357 assert all(k == 'ctx' for k in ctxs)
357 358 self._item.update(ctxs)
358 359 def _showitem(self):
359 360 # TODO: add support for filectx. probably each template keyword or
360 361 # function will have to declare dependent resources. e.g.
361 362 # @templatekeyword(..., requires=('ctx',))
362 363 props = {}
363 364 if 'ctx' in self._item:
364 365 props.update(templatekw.keywords)
365 366 props['index'] = next(self._counter)
366 367 # explicitly-defined fields precede templatekw
367 368 props.update(self._item)
368 369 if 'ctx' in self._item:
369 370 # but template resources must be always available
370 371 props['templ'] = self._t
371 372 props['repo'] = props['ctx'].repo()
372 373 props['revcache'] = {}
373 374 g = self._t(self._topic, ui=self._ui, cache=self._cache, **props)
374 375 self._out.write(templater.stringify(g))
375 376
377 templatespec = collections.namedtuple(r'templatespec',
378 r'tmpl mapfile')
379
376 380 def lookuptemplate(ui, topic, tmpl):
377 381 """Find the template matching the given -T/--template spec 'tmpl'
378 382
379 383 'tmpl' can be any of the following:
380 384
381 385 - a literal template (e.g. '{rev}')
382 386 - a map-file name or path (e.g. 'changelog')
383 387 - a reference to [templates] in config file
384 388 - a path to raw template file
385 389
386 390 A map file defines a stand-alone template environment. If a map file
387 391 selected, all templates defined in the file will be loaded, and the
388 392 template matching the given topic will be rendered. No aliases will be
389 393 loaded from user config.
390 394 """
391 395
392 396 # looks like a literal template?
393 397 if '{' in tmpl:
394 return tmpl, None
398 return templatespec(tmpl, None)
395 399
396 400 # perhaps a stock style?
397 401 if not os.path.split(tmpl)[0]:
398 402 mapname = (templater.templatepath('map-cmdline.' + tmpl)
399 403 or templater.templatepath(tmpl))
400 404 if mapname and os.path.isfile(mapname):
401 return None, mapname
405 return templatespec(None, mapname)
402 406
403 407 # perhaps it's a reference to [templates]
404 408 t = ui.config('templates', tmpl)
405 409 if t:
406 return templater.unquotestring(t), None
410 return templatespec(templater.unquotestring(t), None)
407 411
408 412 if tmpl == 'list':
409 413 ui.write(_("available styles: %s\n") % templater.stylelist())
410 414 raise error.Abort(_("specify a template"))
411 415
412 416 # perhaps it's a path to a map or a template
413 417 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
414 418 # is it a mapfile for a style?
415 419 if os.path.basename(tmpl).startswith("map-"):
416 return None, os.path.realpath(tmpl)
420 return templatespec(None, os.path.realpath(tmpl))
417 421 with util.posixfile(tmpl, 'rb') as f:
418 422 tmpl = f.read()
419 return tmpl, None
423 return templatespec(tmpl, None)
420 424
421 425 # constant string?
422 return tmpl, None
426 return templatespec(tmpl, None)
423 427
424 428 def loadtemplater(ui, topic, spec, cache=None):
425 429 """Create a templater from either a literal template or loading from
426 430 a map file"""
427 tmpl, mapfile = spec
428 assert not (tmpl and mapfile)
429 if mapfile:
430 return templater.templater.frommapfile(mapfile, cache=cache)
431 return maketemplater(ui, topic, tmpl, cache=cache)
431 assert not (spec.tmpl and spec.mapfile)
432 if spec.mapfile:
433 return templater.templater.frommapfile(spec.mapfile, cache=cache)
434 return maketemplater(ui, topic, spec.tmpl, cache=cache)
432 435
433 436 def maketemplater(ui, topic, tmpl, cache=None):
434 437 """Create a templater from a string template 'tmpl'"""
435 438 aliases = ui.configitems('templatealias')
436 439 t = templater.templater(cache=cache, aliases=aliases)
437 440 if tmpl:
438 441 t.cache[topic] = tmpl
439 442 return t
440 443
441 444 def formatter(ui, out, topic, opts):
442 445 template = opts.get("template", "")
443 446 if template == "json":
444 447 return jsonformatter(ui, out, topic, opts)
445 448 elif template == "pickle":
446 449 return pickleformatter(ui, out, topic, opts)
447 450 elif template == "debug":
448 451 return debugformatter(ui, out, topic, opts)
449 452 elif template != "":
450 453 return templateformatter(ui, out, topic, opts)
451 454 # developer config: ui.formatdebug
452 455 elif ui.configbool('ui', 'formatdebug'):
453 456 return debugformatter(ui, out, topic, opts)
454 457 # deprecated config: ui.formatjson
455 458 elif ui.configbool('ui', 'formatjson'):
456 459 return jsonformatter(ui, out, topic, opts)
457 460 return plainformatter(ui, out, topic, opts)
458 461
459 462 @contextlib.contextmanager
460 463 def openformatter(ui, filename, topic, opts):
461 464 """Create a formatter that writes outputs to the specified file
462 465
463 466 Must be invoked using the 'with' statement.
464 467 """
465 468 with util.posixfile(filename, 'wb') as out:
466 469 with formatter(ui, out, topic, opts) as fm:
467 470 yield fm
468 471
469 472 @contextlib.contextmanager
470 473 def _neverending(fm):
471 474 yield fm
472 475
473 476 def maybereopen(fm, filename, opts):
474 477 """Create a formatter backed by file if filename specified, else return
475 478 the given formatter
476 479
477 480 Must be invoked using the 'with' statement. This will never call fm.end()
478 481 of the given formatter.
479 482 """
480 483 if filename:
481 484 return openformatter(fm._ui, filename, fm._topic, opts)
482 485 else:
483 486 return _neverending(fm)
General Comments 0
You need to be logged in to leave comments. Login now