##// END OF EJS Templates
formatter: factor out function to create templater from literal or map file...
Yuya Nishihara -
r32831:11e667a8 default
parent child Browse files
Show More
@@ -1,3587 +1,3580 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import itertools
12 12 import os
13 13 import re
14 14 import tempfile
15 15
16 16 from .i18n import _
17 17 from .node import (
18 18 hex,
19 19 nullid,
20 20 nullrev,
21 21 short,
22 22 )
23 23
24 24 from . import (
25 25 bookmarks,
26 26 changelog,
27 27 copies,
28 28 crecord as crecordmod,
29 29 encoding,
30 30 error,
31 31 formatter,
32 32 graphmod,
33 33 lock as lockmod,
34 34 match as matchmod,
35 35 obsolete,
36 36 patch,
37 37 pathutil,
38 38 phases,
39 39 pycompat,
40 40 registrar,
41 41 repair,
42 42 revlog,
43 43 revset,
44 44 scmutil,
45 45 smartset,
46 46 templatekw,
47 47 templater,
48 48 util,
49 49 vfs as vfsmod,
50 50 )
51 51 stringio = util.stringio
52 52
53 53 # templates of common command options
54 54
55 55 dryrunopts = [
56 56 ('n', 'dry-run', None,
57 57 _('do not perform actions, just print output')),
58 58 ]
59 59
60 60 remoteopts = [
61 61 ('e', 'ssh', '',
62 62 _('specify ssh command to use'), _('CMD')),
63 63 ('', 'remotecmd', '',
64 64 _('specify hg command to run on the remote side'), _('CMD')),
65 65 ('', 'insecure', None,
66 66 _('do not verify server certificate (ignoring web.cacerts config)')),
67 67 ]
68 68
69 69 walkopts = [
70 70 ('I', 'include', [],
71 71 _('include names matching the given patterns'), _('PATTERN')),
72 72 ('X', 'exclude', [],
73 73 _('exclude names matching the given patterns'), _('PATTERN')),
74 74 ]
75 75
76 76 commitopts = [
77 77 ('m', 'message', '',
78 78 _('use text as commit message'), _('TEXT')),
79 79 ('l', 'logfile', '',
80 80 _('read commit message from file'), _('FILE')),
81 81 ]
82 82
83 83 commitopts2 = [
84 84 ('d', 'date', '',
85 85 _('record the specified date as commit date'), _('DATE')),
86 86 ('u', 'user', '',
87 87 _('record the specified user as committer'), _('USER')),
88 88 ]
89 89
90 90 # hidden for now
91 91 formatteropts = [
92 92 ('T', 'template', '',
93 93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
94 94 ]
95 95
96 96 templateopts = [
97 97 ('', 'style', '',
98 98 _('display using template map file (DEPRECATED)'), _('STYLE')),
99 99 ('T', 'template', '',
100 100 _('display with template'), _('TEMPLATE')),
101 101 ]
102 102
103 103 logopts = [
104 104 ('p', 'patch', None, _('show patch')),
105 105 ('g', 'git', None, _('use git extended diff format')),
106 106 ('l', 'limit', '',
107 107 _('limit number of changes displayed'), _('NUM')),
108 108 ('M', 'no-merges', None, _('do not show merges')),
109 109 ('', 'stat', None, _('output diffstat-style summary of changes')),
110 110 ('G', 'graph', None, _("show the revision DAG")),
111 111 ] + templateopts
112 112
113 113 diffopts = [
114 114 ('a', 'text', None, _('treat all files as text')),
115 115 ('g', 'git', None, _('use git extended diff format')),
116 116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
117 117 ('', 'nodates', None, _('omit dates from diff headers'))
118 118 ]
119 119
120 120 diffwsopts = [
121 121 ('w', 'ignore-all-space', None,
122 122 _('ignore white space when comparing lines')),
123 123 ('b', 'ignore-space-change', None,
124 124 _('ignore changes in the amount of white space')),
125 125 ('B', 'ignore-blank-lines', None,
126 126 _('ignore changes whose lines are all blank')),
127 127 ]
128 128
129 129 diffopts2 = [
130 130 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
131 131 ('p', 'show-function', None, _('show which function each change is in')),
132 132 ('', 'reverse', None, _('produce a diff that undoes the changes')),
133 133 ] + diffwsopts + [
134 134 ('U', 'unified', '',
135 135 _('number of lines of context to show'), _('NUM')),
136 136 ('', 'stat', None, _('output diffstat-style summary of changes')),
137 137 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
138 138 ]
139 139
140 140 mergetoolopts = [
141 141 ('t', 'tool', '', _('specify merge tool')),
142 142 ]
143 143
144 144 similarityopts = [
145 145 ('s', 'similarity', '',
146 146 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
147 147 ]
148 148
149 149 subrepoopts = [
150 150 ('S', 'subrepos', None,
151 151 _('recurse into subrepositories'))
152 152 ]
153 153
154 154 debugrevlogopts = [
155 155 ('c', 'changelog', False, _('open changelog')),
156 156 ('m', 'manifest', False, _('open manifest')),
157 157 ('', 'dir', '', _('open directory manifest')),
158 158 ]
159 159
160 160 # special string such that everything below this line will be ingored in the
161 161 # editor text
162 162 _linebelow = "^HG: ------------------------ >8 ------------------------$"
163 163
164 164 def ishunk(x):
165 165 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
166 166 return isinstance(x, hunkclasses)
167 167
168 168 def newandmodified(chunks, originalchunks):
169 169 newlyaddedandmodifiedfiles = set()
170 170 for chunk in chunks:
171 171 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
172 172 originalchunks:
173 173 newlyaddedandmodifiedfiles.add(chunk.header.filename())
174 174 return newlyaddedandmodifiedfiles
175 175
176 176 def parsealiases(cmd):
177 177 return cmd.lstrip("^").split("|")
178 178
179 179 def setupwrapcolorwrite(ui):
180 180 # wrap ui.write so diff output can be labeled/colorized
181 181 def wrapwrite(orig, *args, **kw):
182 182 label = kw.pop('label', '')
183 183 for chunk, l in patch.difflabel(lambda: args):
184 184 orig(chunk, label=label + l)
185 185
186 186 oldwrite = ui.write
187 187 def wrap(*args, **kwargs):
188 188 return wrapwrite(oldwrite, *args, **kwargs)
189 189 setattr(ui, 'write', wrap)
190 190 return oldwrite
191 191
192 192 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
193 193 if usecurses:
194 194 if testfile:
195 195 recordfn = crecordmod.testdecorator(testfile,
196 196 crecordmod.testchunkselector)
197 197 else:
198 198 recordfn = crecordmod.chunkselector
199 199
200 200 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
201 201
202 202 else:
203 203 return patch.filterpatch(ui, originalhunks, operation)
204 204
205 205 def recordfilter(ui, originalhunks, operation=None):
206 206 """ Prompts the user to filter the originalhunks and return a list of
207 207 selected hunks.
208 208 *operation* is used for to build ui messages to indicate the user what
209 209 kind of filtering they are doing: reverting, committing, shelving, etc.
210 210 (see patch.filterpatch).
211 211 """
212 212 usecurses = crecordmod.checkcurses(ui)
213 213 testfile = ui.config('experimental', 'crecordtest', None)
214 214 oldwrite = setupwrapcolorwrite(ui)
215 215 try:
216 216 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
217 217 testfile, operation)
218 218 finally:
219 219 ui.write = oldwrite
220 220 return newchunks, newopts
221 221
222 222 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
223 223 filterfn, *pats, **opts):
224 224 from . import merge as mergemod
225 225 opts = pycompat.byteskwargs(opts)
226 226 if not ui.interactive():
227 227 if cmdsuggest:
228 228 msg = _('running non-interactively, use %s instead') % cmdsuggest
229 229 else:
230 230 msg = _('running non-interactively')
231 231 raise error.Abort(msg)
232 232
233 233 # make sure username is set before going interactive
234 234 if not opts.get('user'):
235 235 ui.username() # raise exception, username not provided
236 236
237 237 def recordfunc(ui, repo, message, match, opts):
238 238 """This is generic record driver.
239 239
240 240 Its job is to interactively filter local changes, and
241 241 accordingly prepare working directory into a state in which the
242 242 job can be delegated to a non-interactive commit command such as
243 243 'commit' or 'qrefresh'.
244 244
245 245 After the actual job is done by non-interactive command, the
246 246 working directory is restored to its original state.
247 247
248 248 In the end we'll record interesting changes, and everything else
249 249 will be left in place, so the user can continue working.
250 250 """
251 251
252 252 checkunfinished(repo, commit=True)
253 253 wctx = repo[None]
254 254 merge = len(wctx.parents()) > 1
255 255 if merge:
256 256 raise error.Abort(_('cannot partially commit a merge '
257 257 '(use "hg commit" instead)'))
258 258
259 259 def fail(f, msg):
260 260 raise error.Abort('%s: %s' % (f, msg))
261 261
262 262 force = opts.get('force')
263 263 if not force:
264 264 vdirs = []
265 265 match.explicitdir = vdirs.append
266 266 match.bad = fail
267 267
268 268 status = repo.status(match=match)
269 269 if not force:
270 270 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
271 271 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
272 272 diffopts.nodates = True
273 273 diffopts.git = True
274 274 diffopts.showfunc = True
275 275 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
276 276 originalchunks = patch.parsepatch(originaldiff)
277 277
278 278 # 1. filter patch, since we are intending to apply subset of it
279 279 try:
280 280 chunks, newopts = filterfn(ui, originalchunks)
281 281 except patch.PatchError as err:
282 282 raise error.Abort(_('error parsing patch: %s') % err)
283 283 opts.update(newopts)
284 284
285 285 # We need to keep a backup of files that have been newly added and
286 286 # modified during the recording process because there is a previous
287 287 # version without the edit in the workdir
288 288 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
289 289 contenders = set()
290 290 for h in chunks:
291 291 try:
292 292 contenders.update(set(h.files()))
293 293 except AttributeError:
294 294 pass
295 295
296 296 changed = status.modified + status.added + status.removed
297 297 newfiles = [f for f in changed if f in contenders]
298 298 if not newfiles:
299 299 ui.status(_('no changes to record\n'))
300 300 return 0
301 301
302 302 modified = set(status.modified)
303 303
304 304 # 2. backup changed files, so we can restore them in the end
305 305
306 306 if backupall:
307 307 tobackup = changed
308 308 else:
309 309 tobackup = [f for f in newfiles if f in modified or f in \
310 310 newlyaddedandmodifiedfiles]
311 311 backups = {}
312 312 if tobackup:
313 313 backupdir = repo.vfs.join('record-backups')
314 314 try:
315 315 os.mkdir(backupdir)
316 316 except OSError as err:
317 317 if err.errno != errno.EEXIST:
318 318 raise
319 319 try:
320 320 # backup continues
321 321 for f in tobackup:
322 322 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
323 323 dir=backupdir)
324 324 os.close(fd)
325 325 ui.debug('backup %r as %r\n' % (f, tmpname))
326 326 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
327 327 backups[f] = tmpname
328 328
329 329 fp = stringio()
330 330 for c in chunks:
331 331 fname = c.filename()
332 332 if fname in backups:
333 333 c.write(fp)
334 334 dopatch = fp.tell()
335 335 fp.seek(0)
336 336
337 337 # 2.5 optionally review / modify patch in text editor
338 338 if opts.get('review', False):
339 339 patchtext = (crecordmod.diffhelptext
340 340 + crecordmod.patchhelptext
341 341 + fp.read())
342 342 reviewedpatch = ui.edit(patchtext, "",
343 343 extra={"suffix": ".diff"},
344 344 repopath=repo.path)
345 345 fp.truncate(0)
346 346 fp.write(reviewedpatch)
347 347 fp.seek(0)
348 348
349 349 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
350 350 # 3a. apply filtered patch to clean repo (clean)
351 351 if backups:
352 352 # Equivalent to hg.revert
353 353 m = scmutil.matchfiles(repo, backups.keys())
354 354 mergemod.update(repo, repo.dirstate.p1(),
355 355 False, True, matcher=m)
356 356
357 357 # 3b. (apply)
358 358 if dopatch:
359 359 try:
360 360 ui.debug('applying patch\n')
361 361 ui.debug(fp.getvalue())
362 362 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
363 363 except patch.PatchError as err:
364 364 raise error.Abort(str(err))
365 365 del fp
366 366
367 367 # 4. We prepared working directory according to filtered
368 368 # patch. Now is the time to delegate the job to
369 369 # commit/qrefresh or the like!
370 370
371 371 # Make all of the pathnames absolute.
372 372 newfiles = [repo.wjoin(nf) for nf in newfiles]
373 373 return commitfunc(ui, repo, *newfiles, **opts)
374 374 finally:
375 375 # 5. finally restore backed-up files
376 376 try:
377 377 dirstate = repo.dirstate
378 378 for realname, tmpname in backups.iteritems():
379 379 ui.debug('restoring %r to %r\n' % (tmpname, realname))
380 380
381 381 if dirstate[realname] == 'n':
382 382 # without normallookup, restoring timestamp
383 383 # may cause partially committed files
384 384 # to be treated as unmodified
385 385 dirstate.normallookup(realname)
386 386
387 387 # copystat=True here and above are a hack to trick any
388 388 # editors that have f open that we haven't modified them.
389 389 #
390 390 # Also note that this racy as an editor could notice the
391 391 # file's mtime before we've finished writing it.
392 392 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
393 393 os.unlink(tmpname)
394 394 if tobackup:
395 395 os.rmdir(backupdir)
396 396 except OSError:
397 397 pass
398 398
399 399 def recordinwlock(ui, repo, message, match, opts):
400 400 with repo.wlock():
401 401 return recordfunc(ui, repo, message, match, opts)
402 402
403 403 return commit(ui, repo, recordinwlock, pats, opts)
404 404
405 405 def findpossible(cmd, table, strict=False):
406 406 """
407 407 Return cmd -> (aliases, command table entry)
408 408 for each matching command.
409 409 Return debug commands (or their aliases) only if no normal command matches.
410 410 """
411 411 choice = {}
412 412 debugchoice = {}
413 413
414 414 if cmd in table:
415 415 # short-circuit exact matches, "log" alias beats "^log|history"
416 416 keys = [cmd]
417 417 else:
418 418 keys = table.keys()
419 419
420 420 allcmds = []
421 421 for e in keys:
422 422 aliases = parsealiases(e)
423 423 allcmds.extend(aliases)
424 424 found = None
425 425 if cmd in aliases:
426 426 found = cmd
427 427 elif not strict:
428 428 for a in aliases:
429 429 if a.startswith(cmd):
430 430 found = a
431 431 break
432 432 if found is not None:
433 433 if aliases[0].startswith("debug") or found.startswith("debug"):
434 434 debugchoice[found] = (aliases, table[e])
435 435 else:
436 436 choice[found] = (aliases, table[e])
437 437
438 438 if not choice and debugchoice:
439 439 choice = debugchoice
440 440
441 441 return choice, allcmds
442 442
443 443 def findcmd(cmd, table, strict=True):
444 444 """Return (aliases, command table entry) for command string."""
445 445 choice, allcmds = findpossible(cmd, table, strict)
446 446
447 447 if cmd in choice:
448 448 return choice[cmd]
449 449
450 450 if len(choice) > 1:
451 451 clist = sorted(choice)
452 452 raise error.AmbiguousCommand(cmd, clist)
453 453
454 454 if choice:
455 455 return choice.values()[0]
456 456
457 457 raise error.UnknownCommand(cmd, allcmds)
458 458
459 459 def findrepo(p):
460 460 while not os.path.isdir(os.path.join(p, ".hg")):
461 461 oldp, p = p, os.path.dirname(p)
462 462 if p == oldp:
463 463 return None
464 464
465 465 return p
466 466
467 467 def bailifchanged(repo, merge=True, hint=None):
468 468 """ enforce the precondition that working directory must be clean.
469 469
470 470 'merge' can be set to false if a pending uncommitted merge should be
471 471 ignored (such as when 'update --check' runs).
472 472
473 473 'hint' is the usual hint given to Abort exception.
474 474 """
475 475
476 476 if merge and repo.dirstate.p2() != nullid:
477 477 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
478 478 modified, added, removed, deleted = repo.status()[:4]
479 479 if modified or added or removed or deleted:
480 480 raise error.Abort(_('uncommitted changes'), hint=hint)
481 481 ctx = repo[None]
482 482 for s in sorted(ctx.substate):
483 483 ctx.sub(s).bailifchanged(hint=hint)
484 484
485 485 def logmessage(ui, opts):
486 486 """ get the log message according to -m and -l option """
487 487 message = opts.get('message')
488 488 logfile = opts.get('logfile')
489 489
490 490 if message and logfile:
491 491 raise error.Abort(_('options --message and --logfile are mutually '
492 492 'exclusive'))
493 493 if not message and logfile:
494 494 try:
495 495 if isstdiofilename(logfile):
496 496 message = ui.fin.read()
497 497 else:
498 498 message = '\n'.join(util.readfile(logfile).splitlines())
499 499 except IOError as inst:
500 500 raise error.Abort(_("can't read commit message '%s': %s") %
501 501 (logfile, inst.strerror))
502 502 return message
503 503
504 504 def mergeeditform(ctxorbool, baseformname):
505 505 """return appropriate editform name (referencing a committemplate)
506 506
507 507 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
508 508 merging is committed.
509 509
510 510 This returns baseformname with '.merge' appended if it is a merge,
511 511 otherwise '.normal' is appended.
512 512 """
513 513 if isinstance(ctxorbool, bool):
514 514 if ctxorbool:
515 515 return baseformname + ".merge"
516 516 elif 1 < len(ctxorbool.parents()):
517 517 return baseformname + ".merge"
518 518
519 519 return baseformname + ".normal"
520 520
521 521 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
522 522 editform='', **opts):
523 523 """get appropriate commit message editor according to '--edit' option
524 524
525 525 'finishdesc' is a function to be called with edited commit message
526 526 (= 'description' of the new changeset) just after editing, but
527 527 before checking empty-ness. It should return actual text to be
528 528 stored into history. This allows to change description before
529 529 storing.
530 530
531 531 'extramsg' is a extra message to be shown in the editor instead of
532 532 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
533 533 is automatically added.
534 534
535 535 'editform' is a dot-separated list of names, to distinguish
536 536 the purpose of commit text editing.
537 537
538 538 'getcommiteditor' returns 'commitforceeditor' regardless of
539 539 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
540 540 they are specific for usage in MQ.
541 541 """
542 542 if edit or finishdesc or extramsg:
543 543 return lambda r, c, s: commitforceeditor(r, c, s,
544 544 finishdesc=finishdesc,
545 545 extramsg=extramsg,
546 546 editform=editform)
547 547 elif editform:
548 548 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
549 549 else:
550 550 return commiteditor
551 551
552 552 def loglimit(opts):
553 553 """get the log limit according to option -l/--limit"""
554 554 limit = opts.get('limit')
555 555 if limit:
556 556 try:
557 557 limit = int(limit)
558 558 except ValueError:
559 559 raise error.Abort(_('limit must be a positive integer'))
560 560 if limit <= 0:
561 561 raise error.Abort(_('limit must be positive'))
562 562 else:
563 563 limit = None
564 564 return limit
565 565
566 566 def makefilename(repo, pat, node, desc=None,
567 567 total=None, seqno=None, revwidth=None, pathname=None):
568 568 node_expander = {
569 569 'H': lambda: hex(node),
570 570 'R': lambda: str(repo.changelog.rev(node)),
571 571 'h': lambda: short(node),
572 572 'm': lambda: re.sub('[^\w]', '_', str(desc))
573 573 }
574 574 expander = {
575 575 '%': lambda: '%',
576 576 'b': lambda: os.path.basename(repo.root),
577 577 }
578 578
579 579 try:
580 580 if node:
581 581 expander.update(node_expander)
582 582 if node:
583 583 expander['r'] = (lambda:
584 584 str(repo.changelog.rev(node)).zfill(revwidth or 0))
585 585 if total is not None:
586 586 expander['N'] = lambda: str(total)
587 587 if seqno is not None:
588 588 expander['n'] = lambda: str(seqno)
589 589 if total is not None and seqno is not None:
590 590 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
591 591 if pathname is not None:
592 592 expander['s'] = lambda: os.path.basename(pathname)
593 593 expander['d'] = lambda: os.path.dirname(pathname) or '.'
594 594 expander['p'] = lambda: pathname
595 595
596 596 newname = []
597 597 patlen = len(pat)
598 598 i = 0
599 599 while i < patlen:
600 600 c = pat[i:i + 1]
601 601 if c == '%':
602 602 i += 1
603 603 c = pat[i:i + 1]
604 604 c = expander[c]()
605 605 newname.append(c)
606 606 i += 1
607 607 return ''.join(newname)
608 608 except KeyError as inst:
609 609 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
610 610 inst.args[0])
611 611
612 612 def isstdiofilename(pat):
613 613 """True if the given pat looks like a filename denoting stdin/stdout"""
614 614 return not pat or pat == '-'
615 615
616 616 class _unclosablefile(object):
617 617 def __init__(self, fp):
618 618 self._fp = fp
619 619
620 620 def close(self):
621 621 pass
622 622
623 623 def __iter__(self):
624 624 return iter(self._fp)
625 625
626 626 def __getattr__(self, attr):
627 627 return getattr(self._fp, attr)
628 628
629 629 def __enter__(self):
630 630 return self
631 631
632 632 def __exit__(self, exc_type, exc_value, exc_tb):
633 633 pass
634 634
635 635 def makefileobj(repo, pat, node=None, desc=None, total=None,
636 636 seqno=None, revwidth=None, mode='wb', modemap=None,
637 637 pathname=None):
638 638
639 639 writable = mode not in ('r', 'rb')
640 640
641 641 if isstdiofilename(pat):
642 642 if writable:
643 643 fp = repo.ui.fout
644 644 else:
645 645 fp = repo.ui.fin
646 646 return _unclosablefile(fp)
647 647 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
648 648 if modemap is not None:
649 649 mode = modemap.get(fn, mode)
650 650 if mode == 'wb':
651 651 modemap[fn] = 'ab'
652 652 return open(fn, mode)
653 653
654 654 def openrevlog(repo, cmd, file_, opts):
655 655 """opens the changelog, manifest, a filelog or a given revlog"""
656 656 cl = opts['changelog']
657 657 mf = opts['manifest']
658 658 dir = opts['dir']
659 659 msg = None
660 660 if cl and mf:
661 661 msg = _('cannot specify --changelog and --manifest at the same time')
662 662 elif cl and dir:
663 663 msg = _('cannot specify --changelog and --dir at the same time')
664 664 elif cl or mf or dir:
665 665 if file_:
666 666 msg = _('cannot specify filename with --changelog or --manifest')
667 667 elif not repo:
668 668 msg = _('cannot specify --changelog or --manifest or --dir '
669 669 'without a repository')
670 670 if msg:
671 671 raise error.Abort(msg)
672 672
673 673 r = None
674 674 if repo:
675 675 if cl:
676 676 r = repo.unfiltered().changelog
677 677 elif dir:
678 678 if 'treemanifest' not in repo.requirements:
679 679 raise error.Abort(_("--dir can only be used on repos with "
680 680 "treemanifest enabled"))
681 681 dirlog = repo.manifestlog._revlog.dirlog(dir)
682 682 if len(dirlog):
683 683 r = dirlog
684 684 elif mf:
685 685 r = repo.manifestlog._revlog
686 686 elif file_:
687 687 filelog = repo.file(file_)
688 688 if len(filelog):
689 689 r = filelog
690 690 if not r:
691 691 if not file_:
692 692 raise error.CommandError(cmd, _('invalid arguments'))
693 693 if not os.path.isfile(file_):
694 694 raise error.Abort(_("revlog '%s' not found") % file_)
695 695 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
696 696 file_[:-2] + ".i")
697 697 return r
698 698
699 699 def copy(ui, repo, pats, opts, rename=False):
700 700 # called with the repo lock held
701 701 #
702 702 # hgsep => pathname that uses "/" to separate directories
703 703 # ossep => pathname that uses os.sep to separate directories
704 704 cwd = repo.getcwd()
705 705 targets = {}
706 706 after = opts.get("after")
707 707 dryrun = opts.get("dry_run")
708 708 wctx = repo[None]
709 709
710 710 def walkpat(pat):
711 711 srcs = []
712 712 if after:
713 713 badstates = '?'
714 714 else:
715 715 badstates = '?r'
716 716 m = scmutil.match(wctx, [pat], opts, globbed=True)
717 717 for abs in wctx.walk(m):
718 718 state = repo.dirstate[abs]
719 719 rel = m.rel(abs)
720 720 exact = m.exact(abs)
721 721 if state in badstates:
722 722 if exact and state == '?':
723 723 ui.warn(_('%s: not copying - file is not managed\n') % rel)
724 724 if exact and state == 'r':
725 725 ui.warn(_('%s: not copying - file has been marked for'
726 726 ' remove\n') % rel)
727 727 continue
728 728 # abs: hgsep
729 729 # rel: ossep
730 730 srcs.append((abs, rel, exact))
731 731 return srcs
732 732
733 733 # abssrc: hgsep
734 734 # relsrc: ossep
735 735 # otarget: ossep
736 736 def copyfile(abssrc, relsrc, otarget, exact):
737 737 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
738 738 if '/' in abstarget:
739 739 # We cannot normalize abstarget itself, this would prevent
740 740 # case only renames, like a => A.
741 741 abspath, absname = abstarget.rsplit('/', 1)
742 742 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
743 743 reltarget = repo.pathto(abstarget, cwd)
744 744 target = repo.wjoin(abstarget)
745 745 src = repo.wjoin(abssrc)
746 746 state = repo.dirstate[abstarget]
747 747
748 748 scmutil.checkportable(ui, abstarget)
749 749
750 750 # check for collisions
751 751 prevsrc = targets.get(abstarget)
752 752 if prevsrc is not None:
753 753 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
754 754 (reltarget, repo.pathto(abssrc, cwd),
755 755 repo.pathto(prevsrc, cwd)))
756 756 return
757 757
758 758 # check for overwrites
759 759 exists = os.path.lexists(target)
760 760 samefile = False
761 761 if exists and abssrc != abstarget:
762 762 if (repo.dirstate.normalize(abssrc) ==
763 763 repo.dirstate.normalize(abstarget)):
764 764 if not rename:
765 765 ui.warn(_("%s: can't copy - same file\n") % reltarget)
766 766 return
767 767 exists = False
768 768 samefile = True
769 769
770 770 if not after and exists or after and state in 'mn':
771 771 if not opts['force']:
772 772 if state in 'mn':
773 773 msg = _('%s: not overwriting - file already committed\n')
774 774 if after:
775 775 flags = '--after --force'
776 776 else:
777 777 flags = '--force'
778 778 if rename:
779 779 hint = _('(hg rename %s to replace the file by '
780 780 'recording a rename)\n') % flags
781 781 else:
782 782 hint = _('(hg copy %s to replace the file by '
783 783 'recording a copy)\n') % flags
784 784 else:
785 785 msg = _('%s: not overwriting - file exists\n')
786 786 if rename:
787 787 hint = _('(hg rename --after to record the rename)\n')
788 788 else:
789 789 hint = _('(hg copy --after to record the copy)\n')
790 790 ui.warn(msg % reltarget)
791 791 ui.warn(hint)
792 792 return
793 793
794 794 if after:
795 795 if not exists:
796 796 if rename:
797 797 ui.warn(_('%s: not recording move - %s does not exist\n') %
798 798 (relsrc, reltarget))
799 799 else:
800 800 ui.warn(_('%s: not recording copy - %s does not exist\n') %
801 801 (relsrc, reltarget))
802 802 return
803 803 elif not dryrun:
804 804 try:
805 805 if exists:
806 806 os.unlink(target)
807 807 targetdir = os.path.dirname(target) or '.'
808 808 if not os.path.isdir(targetdir):
809 809 os.makedirs(targetdir)
810 810 if samefile:
811 811 tmp = target + "~hgrename"
812 812 os.rename(src, tmp)
813 813 os.rename(tmp, target)
814 814 else:
815 815 util.copyfile(src, target)
816 816 srcexists = True
817 817 except IOError as inst:
818 818 if inst.errno == errno.ENOENT:
819 819 ui.warn(_('%s: deleted in working directory\n') % relsrc)
820 820 srcexists = False
821 821 else:
822 822 ui.warn(_('%s: cannot copy - %s\n') %
823 823 (relsrc, inst.strerror))
824 824 return True # report a failure
825 825
826 826 if ui.verbose or not exact:
827 827 if rename:
828 828 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
829 829 else:
830 830 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
831 831
832 832 targets[abstarget] = abssrc
833 833
834 834 # fix up dirstate
835 835 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
836 836 dryrun=dryrun, cwd=cwd)
837 837 if rename and not dryrun:
838 838 if not after and srcexists and not samefile:
839 839 repo.wvfs.unlinkpath(abssrc)
840 840 wctx.forget([abssrc])
841 841
842 842 # pat: ossep
843 843 # dest ossep
844 844 # srcs: list of (hgsep, hgsep, ossep, bool)
845 845 # return: function that takes hgsep and returns ossep
846 846 def targetpathfn(pat, dest, srcs):
847 847 if os.path.isdir(pat):
848 848 abspfx = pathutil.canonpath(repo.root, cwd, pat)
849 849 abspfx = util.localpath(abspfx)
850 850 if destdirexists:
851 851 striplen = len(os.path.split(abspfx)[0])
852 852 else:
853 853 striplen = len(abspfx)
854 854 if striplen:
855 855 striplen += len(pycompat.ossep)
856 856 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
857 857 elif destdirexists:
858 858 res = lambda p: os.path.join(dest,
859 859 os.path.basename(util.localpath(p)))
860 860 else:
861 861 res = lambda p: dest
862 862 return res
863 863
864 864 # pat: ossep
865 865 # dest ossep
866 866 # srcs: list of (hgsep, hgsep, ossep, bool)
867 867 # return: function that takes hgsep and returns ossep
868 868 def targetpathafterfn(pat, dest, srcs):
869 869 if matchmod.patkind(pat):
870 870 # a mercurial pattern
871 871 res = lambda p: os.path.join(dest,
872 872 os.path.basename(util.localpath(p)))
873 873 else:
874 874 abspfx = pathutil.canonpath(repo.root, cwd, pat)
875 875 if len(abspfx) < len(srcs[0][0]):
876 876 # A directory. Either the target path contains the last
877 877 # component of the source path or it does not.
878 878 def evalpath(striplen):
879 879 score = 0
880 880 for s in srcs:
881 881 t = os.path.join(dest, util.localpath(s[0])[striplen:])
882 882 if os.path.lexists(t):
883 883 score += 1
884 884 return score
885 885
886 886 abspfx = util.localpath(abspfx)
887 887 striplen = len(abspfx)
888 888 if striplen:
889 889 striplen += len(pycompat.ossep)
890 890 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
891 891 score = evalpath(striplen)
892 892 striplen1 = len(os.path.split(abspfx)[0])
893 893 if striplen1:
894 894 striplen1 += len(pycompat.ossep)
895 895 if evalpath(striplen1) > score:
896 896 striplen = striplen1
897 897 res = lambda p: os.path.join(dest,
898 898 util.localpath(p)[striplen:])
899 899 else:
900 900 # a file
901 901 if destdirexists:
902 902 res = lambda p: os.path.join(dest,
903 903 os.path.basename(util.localpath(p)))
904 904 else:
905 905 res = lambda p: dest
906 906 return res
907 907
908 908 pats = scmutil.expandpats(pats)
909 909 if not pats:
910 910 raise error.Abort(_('no source or destination specified'))
911 911 if len(pats) == 1:
912 912 raise error.Abort(_('no destination specified'))
913 913 dest = pats.pop()
914 914 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
915 915 if not destdirexists:
916 916 if len(pats) > 1 or matchmod.patkind(pats[0]):
917 917 raise error.Abort(_('with multiple sources, destination must be an '
918 918 'existing directory'))
919 919 if util.endswithsep(dest):
920 920 raise error.Abort(_('destination %s is not a directory') % dest)
921 921
922 922 tfn = targetpathfn
923 923 if after:
924 924 tfn = targetpathafterfn
925 925 copylist = []
926 926 for pat in pats:
927 927 srcs = walkpat(pat)
928 928 if not srcs:
929 929 continue
930 930 copylist.append((tfn(pat, dest, srcs), srcs))
931 931 if not copylist:
932 932 raise error.Abort(_('no files to copy'))
933 933
934 934 errors = 0
935 935 for targetpath, srcs in copylist:
936 936 for abssrc, relsrc, exact in srcs:
937 937 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
938 938 errors += 1
939 939
940 940 if errors:
941 941 ui.warn(_('(consider using --after)\n'))
942 942
943 943 return errors != 0
944 944
945 945 ## facility to let extension process additional data into an import patch
946 946 # list of identifier to be executed in order
947 947 extrapreimport = [] # run before commit
948 948 extrapostimport = [] # run after commit
949 949 # mapping from identifier to actual import function
950 950 #
951 951 # 'preimport' are run before the commit is made and are provided the following
952 952 # arguments:
953 953 # - repo: the localrepository instance,
954 954 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
955 955 # - extra: the future extra dictionary of the changeset, please mutate it,
956 956 # - opts: the import options.
957 957 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
958 958 # mutation of in memory commit and more. Feel free to rework the code to get
959 959 # there.
960 960 extrapreimportmap = {}
961 961 # 'postimport' are run after the commit is made and are provided the following
962 962 # argument:
963 963 # - ctx: the changectx created by import.
964 964 extrapostimportmap = {}
965 965
966 966 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
967 967 """Utility function used by commands.import to import a single patch
968 968
969 969 This function is explicitly defined here to help the evolve extension to
970 970 wrap this part of the import logic.
971 971
972 972 The API is currently a bit ugly because it a simple code translation from
973 973 the import command. Feel free to make it better.
974 974
975 975 :hunk: a patch (as a binary string)
976 976 :parents: nodes that will be parent of the created commit
977 977 :opts: the full dict of option passed to the import command
978 978 :msgs: list to save commit message to.
979 979 (used in case we need to save it when failing)
980 980 :updatefunc: a function that update a repo to a given node
981 981 updatefunc(<repo>, <node>)
982 982 """
983 983 # avoid cycle context -> subrepo -> cmdutil
984 984 from . import context
985 985 extractdata = patch.extract(ui, hunk)
986 986 tmpname = extractdata.get('filename')
987 987 message = extractdata.get('message')
988 988 user = opts.get('user') or extractdata.get('user')
989 989 date = opts.get('date') or extractdata.get('date')
990 990 branch = extractdata.get('branch')
991 991 nodeid = extractdata.get('nodeid')
992 992 p1 = extractdata.get('p1')
993 993 p2 = extractdata.get('p2')
994 994
995 995 nocommit = opts.get('no_commit')
996 996 importbranch = opts.get('import_branch')
997 997 update = not opts.get('bypass')
998 998 strip = opts["strip"]
999 999 prefix = opts["prefix"]
1000 1000 sim = float(opts.get('similarity') or 0)
1001 1001 if not tmpname:
1002 1002 return (None, None, False)
1003 1003
1004 1004 rejects = False
1005 1005
1006 1006 try:
1007 1007 cmdline_message = logmessage(ui, opts)
1008 1008 if cmdline_message:
1009 1009 # pickup the cmdline msg
1010 1010 message = cmdline_message
1011 1011 elif message:
1012 1012 # pickup the patch msg
1013 1013 message = message.strip()
1014 1014 else:
1015 1015 # launch the editor
1016 1016 message = None
1017 1017 ui.debug('message:\n%s\n' % message)
1018 1018
1019 1019 if len(parents) == 1:
1020 1020 parents.append(repo[nullid])
1021 1021 if opts.get('exact'):
1022 1022 if not nodeid or not p1:
1023 1023 raise error.Abort(_('not a Mercurial patch'))
1024 1024 p1 = repo[p1]
1025 1025 p2 = repo[p2 or nullid]
1026 1026 elif p2:
1027 1027 try:
1028 1028 p1 = repo[p1]
1029 1029 p2 = repo[p2]
1030 1030 # Without any options, consider p2 only if the
1031 1031 # patch is being applied on top of the recorded
1032 1032 # first parent.
1033 1033 if p1 != parents[0]:
1034 1034 p1 = parents[0]
1035 1035 p2 = repo[nullid]
1036 1036 except error.RepoError:
1037 1037 p1, p2 = parents
1038 1038 if p2.node() == nullid:
1039 1039 ui.warn(_("warning: import the patch as a normal revision\n"
1040 1040 "(use --exact to import the patch as a merge)\n"))
1041 1041 else:
1042 1042 p1, p2 = parents
1043 1043
1044 1044 n = None
1045 1045 if update:
1046 1046 if p1 != parents[0]:
1047 1047 updatefunc(repo, p1.node())
1048 1048 if p2 != parents[1]:
1049 1049 repo.setparents(p1.node(), p2.node())
1050 1050
1051 1051 if opts.get('exact') or importbranch:
1052 1052 repo.dirstate.setbranch(branch or 'default')
1053 1053
1054 1054 partial = opts.get('partial', False)
1055 1055 files = set()
1056 1056 try:
1057 1057 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1058 1058 files=files, eolmode=None, similarity=sim / 100.0)
1059 1059 except patch.PatchError as e:
1060 1060 if not partial:
1061 1061 raise error.Abort(str(e))
1062 1062 if partial:
1063 1063 rejects = True
1064 1064
1065 1065 files = list(files)
1066 1066 if nocommit:
1067 1067 if message:
1068 1068 msgs.append(message)
1069 1069 else:
1070 1070 if opts.get('exact') or p2:
1071 1071 # If you got here, you either use --force and know what
1072 1072 # you are doing or used --exact or a merge patch while
1073 1073 # being updated to its first parent.
1074 1074 m = None
1075 1075 else:
1076 1076 m = scmutil.matchfiles(repo, files or [])
1077 1077 editform = mergeeditform(repo[None], 'import.normal')
1078 1078 if opts.get('exact'):
1079 1079 editor = None
1080 1080 else:
1081 1081 editor = getcommiteditor(editform=editform, **opts)
1082 1082 extra = {}
1083 1083 for idfunc in extrapreimport:
1084 1084 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1085 1085 overrides = {}
1086 1086 if partial:
1087 1087 overrides[('ui', 'allowemptycommit')] = True
1088 1088 with repo.ui.configoverride(overrides, 'import'):
1089 1089 n = repo.commit(message, user,
1090 1090 date, match=m,
1091 1091 editor=editor, extra=extra)
1092 1092 for idfunc in extrapostimport:
1093 1093 extrapostimportmap[idfunc](repo[n])
1094 1094 else:
1095 1095 if opts.get('exact') or importbranch:
1096 1096 branch = branch or 'default'
1097 1097 else:
1098 1098 branch = p1.branch()
1099 1099 store = patch.filestore()
1100 1100 try:
1101 1101 files = set()
1102 1102 try:
1103 1103 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1104 1104 files, eolmode=None)
1105 1105 except patch.PatchError as e:
1106 1106 raise error.Abort(str(e))
1107 1107 if opts.get('exact'):
1108 1108 editor = None
1109 1109 else:
1110 1110 editor = getcommiteditor(editform='import.bypass')
1111 1111 memctx = context.memctx(repo, (p1.node(), p2.node()),
1112 1112 message,
1113 1113 files=files,
1114 1114 filectxfn=store,
1115 1115 user=user,
1116 1116 date=date,
1117 1117 branch=branch,
1118 1118 editor=editor)
1119 1119 n = memctx.commit()
1120 1120 finally:
1121 1121 store.close()
1122 1122 if opts.get('exact') and nocommit:
1123 1123 # --exact with --no-commit is still useful in that it does merge
1124 1124 # and branch bits
1125 1125 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1126 1126 elif opts.get('exact') and hex(n) != nodeid:
1127 1127 raise error.Abort(_('patch is damaged or loses information'))
1128 1128 msg = _('applied to working directory')
1129 1129 if n:
1130 1130 # i18n: refers to a short changeset id
1131 1131 msg = _('created %s') % short(n)
1132 1132 return (msg, n, rejects)
1133 1133 finally:
1134 1134 os.unlink(tmpname)
1135 1135
1136 1136 # facility to let extensions include additional data in an exported patch
1137 1137 # list of identifiers to be executed in order
1138 1138 extraexport = []
1139 1139 # mapping from identifier to actual export function
1140 1140 # function as to return a string to be added to the header or None
1141 1141 # it is given two arguments (sequencenumber, changectx)
1142 1142 extraexportmap = {}
1143 1143
1144 1144 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1145 1145 node = scmutil.binnode(ctx)
1146 1146 parents = [p.node() for p in ctx.parents() if p]
1147 1147 branch = ctx.branch()
1148 1148 if switch_parent:
1149 1149 parents.reverse()
1150 1150
1151 1151 if parents:
1152 1152 prev = parents[0]
1153 1153 else:
1154 1154 prev = nullid
1155 1155
1156 1156 write("# HG changeset patch\n")
1157 1157 write("# User %s\n" % ctx.user())
1158 1158 write("# Date %d %d\n" % ctx.date())
1159 1159 write("# %s\n" % util.datestr(ctx.date()))
1160 1160 if branch and branch != 'default':
1161 1161 write("# Branch %s\n" % branch)
1162 1162 write("# Node ID %s\n" % hex(node))
1163 1163 write("# Parent %s\n" % hex(prev))
1164 1164 if len(parents) > 1:
1165 1165 write("# Parent %s\n" % hex(parents[1]))
1166 1166
1167 1167 for headerid in extraexport:
1168 1168 header = extraexportmap[headerid](seqno, ctx)
1169 1169 if header is not None:
1170 1170 write('# %s\n' % header)
1171 1171 write(ctx.description().rstrip())
1172 1172 write("\n\n")
1173 1173
1174 1174 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1175 1175 write(chunk, label=label)
1176 1176
1177 1177 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1178 1178 opts=None, match=None):
1179 1179 '''export changesets as hg patches
1180 1180
1181 1181 Args:
1182 1182 repo: The repository from which we're exporting revisions.
1183 1183 revs: A list of revisions to export as revision numbers.
1184 1184 fntemplate: An optional string to use for generating patch file names.
1185 1185 fp: An optional file-like object to which patches should be written.
1186 1186 switch_parent: If True, show diffs against second parent when not nullid.
1187 1187 Default is false, which always shows diff against p1.
1188 1188 opts: diff options to use for generating the patch.
1189 1189 match: If specified, only export changes to files matching this matcher.
1190 1190
1191 1191 Returns:
1192 1192 Nothing.
1193 1193
1194 1194 Side Effect:
1195 1195 "HG Changeset Patch" data is emitted to one of the following
1196 1196 destinations:
1197 1197 fp is specified: All revs are written to the specified
1198 1198 file-like object.
1199 1199 fntemplate specified: Each rev is written to a unique file named using
1200 1200 the given template.
1201 1201 Neither fp nor template specified: All revs written to repo.ui.write()
1202 1202 '''
1203 1203
1204 1204 total = len(revs)
1205 1205 revwidth = max(len(str(rev)) for rev in revs)
1206 1206 filemode = {}
1207 1207
1208 1208 write = None
1209 1209 dest = '<unnamed>'
1210 1210 if fp:
1211 1211 dest = getattr(fp, 'name', dest)
1212 1212 def write(s, **kw):
1213 1213 fp.write(s)
1214 1214 elif not fntemplate:
1215 1215 write = repo.ui.write
1216 1216
1217 1217 for seqno, rev in enumerate(revs, 1):
1218 1218 ctx = repo[rev]
1219 1219 fo = None
1220 1220 if not fp and fntemplate:
1221 1221 desc_lines = ctx.description().rstrip().split('\n')
1222 1222 desc = desc_lines[0] #Commit always has a first line.
1223 1223 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1224 1224 total=total, seqno=seqno, revwidth=revwidth,
1225 1225 mode='wb', modemap=filemode)
1226 1226 dest = fo.name
1227 1227 def write(s, **kw):
1228 1228 fo.write(s)
1229 1229 if not dest.startswith('<'):
1230 1230 repo.ui.note("%s\n" % dest)
1231 1231 _exportsingle(
1232 1232 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1233 1233 if fo is not None:
1234 1234 fo.close()
1235 1235
1236 1236 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1237 1237 changes=None, stat=False, fp=None, prefix='',
1238 1238 root='', listsubrepos=False):
1239 1239 '''show diff or diffstat.'''
1240 1240 if fp is None:
1241 1241 write = ui.write
1242 1242 else:
1243 1243 def write(s, **kw):
1244 1244 fp.write(s)
1245 1245
1246 1246 if root:
1247 1247 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1248 1248 else:
1249 1249 relroot = ''
1250 1250 if relroot != '':
1251 1251 # XXX relative roots currently don't work if the root is within a
1252 1252 # subrepo
1253 1253 uirelroot = match.uipath(relroot)
1254 1254 relroot += '/'
1255 1255 for matchroot in match.files():
1256 1256 if not matchroot.startswith(relroot):
1257 1257 ui.warn(_('warning: %s not inside relative root %s\n') % (
1258 1258 match.uipath(matchroot), uirelroot))
1259 1259
1260 1260 if stat:
1261 1261 diffopts = diffopts.copy(context=0)
1262 1262 width = 80
1263 1263 if not ui.plain():
1264 1264 width = ui.termwidth()
1265 1265 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1266 1266 prefix=prefix, relroot=relroot)
1267 1267 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1268 1268 width=width):
1269 1269 write(chunk, label=label)
1270 1270 else:
1271 1271 for chunk, label in patch.diffui(repo, node1, node2, match,
1272 1272 changes, diffopts, prefix=prefix,
1273 1273 relroot=relroot):
1274 1274 write(chunk, label=label)
1275 1275
1276 1276 if listsubrepos:
1277 1277 ctx1 = repo[node1]
1278 1278 ctx2 = repo[node2]
1279 1279 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1280 1280 tempnode2 = node2
1281 1281 try:
1282 1282 if node2 is not None:
1283 1283 tempnode2 = ctx2.substate[subpath][1]
1284 1284 except KeyError:
1285 1285 # A subrepo that existed in node1 was deleted between node1 and
1286 1286 # node2 (inclusive). Thus, ctx2's substate won't contain that
1287 1287 # subpath. The best we can do is to ignore it.
1288 1288 tempnode2 = None
1289 1289 submatch = matchmod.subdirmatcher(subpath, match)
1290 1290 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1291 1291 stat=stat, fp=fp, prefix=prefix)
1292 1292
1293 1293 def _changesetlabels(ctx):
1294 1294 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1295 1295 if ctx.obsolete():
1296 1296 labels.append('changeset.obsolete')
1297 1297 if ctx.troubled():
1298 1298 labels.append('changeset.troubled')
1299 1299 for trouble in ctx.troubles():
1300 1300 labels.append('trouble.%s' % trouble)
1301 1301 return ' '.join(labels)
1302 1302
1303 1303 class changeset_printer(object):
1304 1304 '''show changeset information when templating not requested.'''
1305 1305
1306 1306 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1307 1307 self.ui = ui
1308 1308 self.repo = repo
1309 1309 self.buffered = buffered
1310 1310 self.matchfn = matchfn
1311 1311 self.diffopts = diffopts
1312 1312 self.header = {}
1313 1313 self.hunk = {}
1314 1314 self.lastheader = None
1315 1315 self.footer = None
1316 1316
1317 1317 def flush(self, ctx):
1318 1318 rev = ctx.rev()
1319 1319 if rev in self.header:
1320 1320 h = self.header[rev]
1321 1321 if h != self.lastheader:
1322 1322 self.lastheader = h
1323 1323 self.ui.write(h)
1324 1324 del self.header[rev]
1325 1325 if rev in self.hunk:
1326 1326 self.ui.write(self.hunk[rev])
1327 1327 del self.hunk[rev]
1328 1328 return 1
1329 1329 return 0
1330 1330
1331 1331 def close(self):
1332 1332 if self.footer:
1333 1333 self.ui.write(self.footer)
1334 1334
1335 1335 def show(self, ctx, copies=None, matchfn=None, **props):
1336 1336 if self.buffered:
1337 1337 self.ui.pushbuffer(labeled=True)
1338 1338 self._show(ctx, copies, matchfn, props)
1339 1339 self.hunk[ctx.rev()] = self.ui.popbuffer()
1340 1340 else:
1341 1341 self._show(ctx, copies, matchfn, props)
1342 1342
1343 1343 def _show(self, ctx, copies, matchfn, props):
1344 1344 '''show a single changeset or file revision'''
1345 1345 changenode = ctx.node()
1346 1346 rev = ctx.rev()
1347 1347 if self.ui.debugflag:
1348 1348 hexfunc = hex
1349 1349 else:
1350 1350 hexfunc = short
1351 1351 # as of now, wctx.node() and wctx.rev() return None, but we want to
1352 1352 # show the same values as {node} and {rev} templatekw
1353 1353 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1354 1354
1355 1355 if self.ui.quiet:
1356 1356 self.ui.write("%d:%s\n" % revnode, label='log.node')
1357 1357 return
1358 1358
1359 1359 date = util.datestr(ctx.date())
1360 1360
1361 1361 # i18n: column positioning for "hg log"
1362 1362 self.ui.write(_("changeset: %d:%s\n") % revnode,
1363 1363 label=_changesetlabels(ctx))
1364 1364
1365 1365 # branches are shown first before any other names due to backwards
1366 1366 # compatibility
1367 1367 branch = ctx.branch()
1368 1368 # don't show the default branch name
1369 1369 if branch != 'default':
1370 1370 # i18n: column positioning for "hg log"
1371 1371 self.ui.write(_("branch: %s\n") % branch,
1372 1372 label='log.branch')
1373 1373
1374 1374 for nsname, ns in self.repo.names.iteritems():
1375 1375 # branches has special logic already handled above, so here we just
1376 1376 # skip it
1377 1377 if nsname == 'branches':
1378 1378 continue
1379 1379 # we will use the templatename as the color name since those two
1380 1380 # should be the same
1381 1381 for name in ns.names(self.repo, changenode):
1382 1382 self.ui.write(ns.logfmt % name,
1383 1383 label='log.%s' % ns.colorname)
1384 1384 if self.ui.debugflag:
1385 1385 # i18n: column positioning for "hg log"
1386 1386 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1387 1387 label='log.phase')
1388 1388 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1389 1389 label = 'log.parent changeset.%s' % pctx.phasestr()
1390 1390 # i18n: column positioning for "hg log"
1391 1391 self.ui.write(_("parent: %d:%s\n")
1392 1392 % (pctx.rev(), hexfunc(pctx.node())),
1393 1393 label=label)
1394 1394
1395 1395 if self.ui.debugflag and rev is not None:
1396 1396 mnode = ctx.manifestnode()
1397 1397 # i18n: column positioning for "hg log"
1398 1398 self.ui.write(_("manifest: %d:%s\n") %
1399 1399 (self.repo.manifestlog._revlog.rev(mnode),
1400 1400 hex(mnode)),
1401 1401 label='ui.debug log.manifest')
1402 1402 # i18n: column positioning for "hg log"
1403 1403 self.ui.write(_("user: %s\n") % ctx.user(),
1404 1404 label='log.user')
1405 1405 # i18n: column positioning for "hg log"
1406 1406 self.ui.write(_("date: %s\n") % date,
1407 1407 label='log.date')
1408 1408
1409 1409 if ctx.troubled():
1410 1410 # i18n: column positioning for "hg log"
1411 1411 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1412 1412 label='log.trouble')
1413 1413
1414 1414 if self.ui.debugflag:
1415 1415 files = ctx.p1().status(ctx)[:3]
1416 1416 for key, value in zip([# i18n: column positioning for "hg log"
1417 1417 _("files:"),
1418 1418 # i18n: column positioning for "hg log"
1419 1419 _("files+:"),
1420 1420 # i18n: column positioning for "hg log"
1421 1421 _("files-:")], files):
1422 1422 if value:
1423 1423 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1424 1424 label='ui.debug log.files')
1425 1425 elif ctx.files() and self.ui.verbose:
1426 1426 # i18n: column positioning for "hg log"
1427 1427 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1428 1428 label='ui.note log.files')
1429 1429 if copies and self.ui.verbose:
1430 1430 copies = ['%s (%s)' % c for c in copies]
1431 1431 # i18n: column positioning for "hg log"
1432 1432 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1433 1433 label='ui.note log.copies')
1434 1434
1435 1435 extra = ctx.extra()
1436 1436 if extra and self.ui.debugflag:
1437 1437 for key, value in sorted(extra.items()):
1438 1438 # i18n: column positioning for "hg log"
1439 1439 self.ui.write(_("extra: %s=%s\n")
1440 1440 % (key, util.escapestr(value)),
1441 1441 label='ui.debug log.extra')
1442 1442
1443 1443 description = ctx.description().strip()
1444 1444 if description:
1445 1445 if self.ui.verbose:
1446 1446 self.ui.write(_("description:\n"),
1447 1447 label='ui.note log.description')
1448 1448 self.ui.write(description,
1449 1449 label='ui.note log.description')
1450 1450 self.ui.write("\n\n")
1451 1451 else:
1452 1452 # i18n: column positioning for "hg log"
1453 1453 self.ui.write(_("summary: %s\n") %
1454 1454 description.splitlines()[0],
1455 1455 label='log.summary')
1456 1456 self.ui.write("\n")
1457 1457
1458 1458 self.showpatch(ctx, matchfn)
1459 1459
1460 1460 def showpatch(self, ctx, matchfn):
1461 1461 if not matchfn:
1462 1462 matchfn = self.matchfn
1463 1463 if matchfn:
1464 1464 stat = self.diffopts.get('stat')
1465 1465 diff = self.diffopts.get('patch')
1466 1466 diffopts = patch.diffallopts(self.ui, self.diffopts)
1467 1467 node = ctx.node()
1468 1468 prev = ctx.p1().node()
1469 1469 if stat:
1470 1470 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1471 1471 match=matchfn, stat=True)
1472 1472 if diff:
1473 1473 if stat:
1474 1474 self.ui.write("\n")
1475 1475 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1476 1476 match=matchfn, stat=False)
1477 1477 self.ui.write("\n")
1478 1478
1479 1479 class jsonchangeset(changeset_printer):
1480 1480 '''format changeset information.'''
1481 1481
1482 1482 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1483 1483 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1484 1484 self.cache = {}
1485 1485 self._first = True
1486 1486
1487 1487 def close(self):
1488 1488 if not self._first:
1489 1489 self.ui.write("\n]\n")
1490 1490 else:
1491 1491 self.ui.write("[]\n")
1492 1492
1493 1493 def _show(self, ctx, copies, matchfn, props):
1494 1494 '''show a single changeset or file revision'''
1495 1495 rev = ctx.rev()
1496 1496 if rev is None:
1497 1497 jrev = jnode = 'null'
1498 1498 else:
1499 1499 jrev = '%d' % rev
1500 1500 jnode = '"%s"' % hex(ctx.node())
1501 1501 j = encoding.jsonescape
1502 1502
1503 1503 if self._first:
1504 1504 self.ui.write("[\n {")
1505 1505 self._first = False
1506 1506 else:
1507 1507 self.ui.write(",\n {")
1508 1508
1509 1509 if self.ui.quiet:
1510 1510 self.ui.write(('\n "rev": %s') % jrev)
1511 1511 self.ui.write((',\n "node": %s') % jnode)
1512 1512 self.ui.write('\n }')
1513 1513 return
1514 1514
1515 1515 self.ui.write(('\n "rev": %s') % jrev)
1516 1516 self.ui.write((',\n "node": %s') % jnode)
1517 1517 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1518 1518 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1519 1519 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1520 1520 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1521 1521 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1522 1522
1523 1523 self.ui.write((',\n "bookmarks": [%s]') %
1524 1524 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1525 1525 self.ui.write((',\n "tags": [%s]') %
1526 1526 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1527 1527 self.ui.write((',\n "parents": [%s]') %
1528 1528 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1529 1529
1530 1530 if self.ui.debugflag:
1531 1531 if rev is None:
1532 1532 jmanifestnode = 'null'
1533 1533 else:
1534 1534 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1535 1535 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1536 1536
1537 1537 self.ui.write((',\n "extra": {%s}') %
1538 1538 ", ".join('"%s": "%s"' % (j(k), j(v))
1539 1539 for k, v in ctx.extra().items()))
1540 1540
1541 1541 files = ctx.p1().status(ctx)
1542 1542 self.ui.write((',\n "modified": [%s]') %
1543 1543 ", ".join('"%s"' % j(f) for f in files[0]))
1544 1544 self.ui.write((',\n "added": [%s]') %
1545 1545 ", ".join('"%s"' % j(f) for f in files[1]))
1546 1546 self.ui.write((',\n "removed": [%s]') %
1547 1547 ", ".join('"%s"' % j(f) for f in files[2]))
1548 1548
1549 1549 elif self.ui.verbose:
1550 1550 self.ui.write((',\n "files": [%s]') %
1551 1551 ", ".join('"%s"' % j(f) for f in ctx.files()))
1552 1552
1553 1553 if copies:
1554 1554 self.ui.write((',\n "copies": {%s}') %
1555 1555 ", ".join('"%s": "%s"' % (j(k), j(v))
1556 1556 for k, v in copies))
1557 1557
1558 1558 matchfn = self.matchfn
1559 1559 if matchfn:
1560 1560 stat = self.diffopts.get('stat')
1561 1561 diff = self.diffopts.get('patch')
1562 1562 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1563 1563 node, prev = ctx.node(), ctx.p1().node()
1564 1564 if stat:
1565 1565 self.ui.pushbuffer()
1566 1566 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1567 1567 match=matchfn, stat=True)
1568 1568 self.ui.write((',\n "diffstat": "%s"')
1569 1569 % j(self.ui.popbuffer()))
1570 1570 if diff:
1571 1571 self.ui.pushbuffer()
1572 1572 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1573 1573 match=matchfn, stat=False)
1574 1574 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1575 1575
1576 1576 self.ui.write("\n }")
1577 1577
1578 1578 class changeset_templater(changeset_printer):
1579 1579 '''format changeset information.'''
1580 1580
1581 1581 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1582 1582 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1583 assert not (tmpl and mapfile)
1584 defaulttempl = templatekw.defaulttempl
1585 if mapfile:
1586 self.t = templater.templater.frommapfile(mapfile,
1587 cache=defaulttempl)
1588 else:
1589 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1590 cache=defaulttempl)
1591
1583 self.t = formatter.loadtemplater(ui, 'changeset', (tmpl, mapfile),
1584 cache=templatekw.defaulttempl)
1592 1585 self._counter = itertools.count()
1593 1586 self.cache = {}
1594 1587
1595 1588 # find correct templates for current mode
1596 1589 tmplmodes = [
1597 1590 (True, None),
1598 1591 (self.ui.verbose, 'verbose'),
1599 1592 (self.ui.quiet, 'quiet'),
1600 1593 (self.ui.debugflag, 'debug'),
1601 1594 ]
1602 1595
1603 1596 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1604 1597 'docheader': '', 'docfooter': ''}
1605 1598 for mode, postfix in tmplmodes:
1606 1599 for t in self._parts:
1607 1600 cur = t
1608 1601 if postfix:
1609 1602 cur += "_" + postfix
1610 1603 if mode and cur in self.t:
1611 1604 self._parts[t] = cur
1612 1605
1613 1606 if self._parts['docheader']:
1614 1607 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1615 1608
1616 1609 def close(self):
1617 1610 if self._parts['docfooter']:
1618 1611 if not self.footer:
1619 1612 self.footer = ""
1620 1613 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1621 1614 return super(changeset_templater, self).close()
1622 1615
1623 1616 def _show(self, ctx, copies, matchfn, props):
1624 1617 '''show a single changeset or file revision'''
1625 1618 props = props.copy()
1626 1619 props.update(templatekw.keywords)
1627 1620 props['templ'] = self.t
1628 1621 props['ctx'] = ctx
1629 1622 props['repo'] = self.repo
1630 1623 props['ui'] = self.repo.ui
1631 1624 props['index'] = next(self._counter)
1632 1625 props['revcache'] = {'copies': copies}
1633 1626 props['cache'] = self.cache
1634 1627 props = pycompat.strkwargs(props)
1635 1628
1636 1629 # write header
1637 1630 if self._parts['header']:
1638 1631 h = templater.stringify(self.t(self._parts['header'], **props))
1639 1632 if self.buffered:
1640 1633 self.header[ctx.rev()] = h
1641 1634 else:
1642 1635 if self.lastheader != h:
1643 1636 self.lastheader = h
1644 1637 self.ui.write(h)
1645 1638
1646 1639 # write changeset metadata, then patch if requested
1647 1640 key = self._parts['changeset']
1648 1641 self.ui.write(templater.stringify(self.t(key, **props)))
1649 1642 self.showpatch(ctx, matchfn)
1650 1643
1651 1644 if self._parts['footer']:
1652 1645 if not self.footer:
1653 1646 self.footer = templater.stringify(
1654 1647 self.t(self._parts['footer'], **props))
1655 1648
1656 1649 def gettemplate(ui, tmpl, style):
1657 1650 """
1658 1651 Find the template matching the given template spec or style.
1659 1652 """
1660 1653
1661 1654 # ui settings
1662 1655 if not tmpl and not style: # template are stronger than style
1663 1656 tmpl = ui.config('ui', 'logtemplate')
1664 1657 if tmpl:
1665 1658 return templater.unquotestring(tmpl), None
1666 1659 else:
1667 1660 style = util.expandpath(ui.config('ui', 'style', ''))
1668 1661
1669 1662 if not tmpl and style:
1670 1663 mapfile = style
1671 1664 if not os.path.split(mapfile)[0]:
1672 1665 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1673 1666 or templater.templatepath(mapfile))
1674 1667 if mapname:
1675 1668 mapfile = mapname
1676 1669 return None, mapfile
1677 1670
1678 1671 if not tmpl:
1679 1672 return None, None
1680 1673
1681 1674 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1682 1675
1683 1676 def show_changeset(ui, repo, opts, buffered=False):
1684 1677 """show one changeset using template or regular display.
1685 1678
1686 1679 Display format will be the first non-empty hit of:
1687 1680 1. option 'template'
1688 1681 2. option 'style'
1689 1682 3. [ui] setting 'logtemplate'
1690 1683 4. [ui] setting 'style'
1691 1684 If all of these values are either the unset or the empty string,
1692 1685 regular display via changeset_printer() is done.
1693 1686 """
1694 1687 # options
1695 1688 matchfn = None
1696 1689 if opts.get('patch') or opts.get('stat'):
1697 1690 matchfn = scmutil.matchall(repo)
1698 1691
1699 1692 if opts.get('template') == 'json':
1700 1693 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1701 1694
1702 1695 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1703 1696
1704 1697 if not tmpl and not mapfile:
1705 1698 return changeset_printer(ui, repo, matchfn, opts, buffered)
1706 1699
1707 1700 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1708 1701
1709 1702 def showmarker(fm, marker, index=None):
1710 1703 """utility function to display obsolescence marker in a readable way
1711 1704
1712 1705 To be used by debug function."""
1713 1706 if index is not None:
1714 1707 fm.write('index', '%i ', index)
1715 1708 fm.write('precnode', '%s ', hex(marker.precnode()))
1716 1709 succs = marker.succnodes()
1717 1710 fm.condwrite(succs, 'succnodes', '%s ',
1718 1711 fm.formatlist(map(hex, succs), name='node'))
1719 1712 fm.write('flag', '%X ', marker.flags())
1720 1713 parents = marker.parentnodes()
1721 1714 if parents is not None:
1722 1715 fm.write('parentnodes', '{%s} ',
1723 1716 fm.formatlist(map(hex, parents), name='node', sep=', '))
1724 1717 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1725 1718 meta = marker.metadata().copy()
1726 1719 meta.pop('date', None)
1727 1720 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1728 1721 fm.plain('\n')
1729 1722
1730 1723 def finddate(ui, repo, date):
1731 1724 """Find the tipmost changeset that matches the given date spec"""
1732 1725
1733 1726 df = util.matchdate(date)
1734 1727 m = scmutil.matchall(repo)
1735 1728 results = {}
1736 1729
1737 1730 def prep(ctx, fns):
1738 1731 d = ctx.date()
1739 1732 if df(d[0]):
1740 1733 results[ctx.rev()] = d
1741 1734
1742 1735 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1743 1736 rev = ctx.rev()
1744 1737 if rev in results:
1745 1738 ui.status(_("found revision %s from %s\n") %
1746 1739 (rev, util.datestr(results[rev])))
1747 1740 return '%d' % rev
1748 1741
1749 1742 raise error.Abort(_("revision matching date not found"))
1750 1743
1751 1744 def increasingwindows(windowsize=8, sizelimit=512):
1752 1745 while True:
1753 1746 yield windowsize
1754 1747 if windowsize < sizelimit:
1755 1748 windowsize *= 2
1756 1749
1757 1750 class FileWalkError(Exception):
1758 1751 pass
1759 1752
1760 1753 def walkfilerevs(repo, match, follow, revs, fncache):
1761 1754 '''Walks the file history for the matched files.
1762 1755
1763 1756 Returns the changeset revs that are involved in the file history.
1764 1757
1765 1758 Throws FileWalkError if the file history can't be walked using
1766 1759 filelogs alone.
1767 1760 '''
1768 1761 wanted = set()
1769 1762 copies = []
1770 1763 minrev, maxrev = min(revs), max(revs)
1771 1764 def filerevgen(filelog, last):
1772 1765 """
1773 1766 Only files, no patterns. Check the history of each file.
1774 1767
1775 1768 Examines filelog entries within minrev, maxrev linkrev range
1776 1769 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1777 1770 tuples in backwards order
1778 1771 """
1779 1772 cl_count = len(repo)
1780 1773 revs = []
1781 1774 for j in xrange(0, last + 1):
1782 1775 linkrev = filelog.linkrev(j)
1783 1776 if linkrev < minrev:
1784 1777 continue
1785 1778 # only yield rev for which we have the changelog, it can
1786 1779 # happen while doing "hg log" during a pull or commit
1787 1780 if linkrev >= cl_count:
1788 1781 break
1789 1782
1790 1783 parentlinkrevs = []
1791 1784 for p in filelog.parentrevs(j):
1792 1785 if p != nullrev:
1793 1786 parentlinkrevs.append(filelog.linkrev(p))
1794 1787 n = filelog.node(j)
1795 1788 revs.append((linkrev, parentlinkrevs,
1796 1789 follow and filelog.renamed(n)))
1797 1790
1798 1791 return reversed(revs)
1799 1792 def iterfiles():
1800 1793 pctx = repo['.']
1801 1794 for filename in match.files():
1802 1795 if follow:
1803 1796 if filename not in pctx:
1804 1797 raise error.Abort(_('cannot follow file not in parent '
1805 1798 'revision: "%s"') % filename)
1806 1799 yield filename, pctx[filename].filenode()
1807 1800 else:
1808 1801 yield filename, None
1809 1802 for filename_node in copies:
1810 1803 yield filename_node
1811 1804
1812 1805 for file_, node in iterfiles():
1813 1806 filelog = repo.file(file_)
1814 1807 if not len(filelog):
1815 1808 if node is None:
1816 1809 # A zero count may be a directory or deleted file, so
1817 1810 # try to find matching entries on the slow path.
1818 1811 if follow:
1819 1812 raise error.Abort(
1820 1813 _('cannot follow nonexistent file: "%s"') % file_)
1821 1814 raise FileWalkError("Cannot walk via filelog")
1822 1815 else:
1823 1816 continue
1824 1817
1825 1818 if node is None:
1826 1819 last = len(filelog) - 1
1827 1820 else:
1828 1821 last = filelog.rev(node)
1829 1822
1830 1823 # keep track of all ancestors of the file
1831 1824 ancestors = {filelog.linkrev(last)}
1832 1825
1833 1826 # iterate from latest to oldest revision
1834 1827 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1835 1828 if not follow:
1836 1829 if rev > maxrev:
1837 1830 continue
1838 1831 else:
1839 1832 # Note that last might not be the first interesting
1840 1833 # rev to us:
1841 1834 # if the file has been changed after maxrev, we'll
1842 1835 # have linkrev(last) > maxrev, and we still need
1843 1836 # to explore the file graph
1844 1837 if rev not in ancestors:
1845 1838 continue
1846 1839 # XXX insert 1327 fix here
1847 1840 if flparentlinkrevs:
1848 1841 ancestors.update(flparentlinkrevs)
1849 1842
1850 1843 fncache.setdefault(rev, []).append(file_)
1851 1844 wanted.add(rev)
1852 1845 if copied:
1853 1846 copies.append(copied)
1854 1847
1855 1848 return wanted
1856 1849
1857 1850 class _followfilter(object):
1858 1851 def __init__(self, repo, onlyfirst=False):
1859 1852 self.repo = repo
1860 1853 self.startrev = nullrev
1861 1854 self.roots = set()
1862 1855 self.onlyfirst = onlyfirst
1863 1856
1864 1857 def match(self, rev):
1865 1858 def realparents(rev):
1866 1859 if self.onlyfirst:
1867 1860 return self.repo.changelog.parentrevs(rev)[0:1]
1868 1861 else:
1869 1862 return filter(lambda x: x != nullrev,
1870 1863 self.repo.changelog.parentrevs(rev))
1871 1864
1872 1865 if self.startrev == nullrev:
1873 1866 self.startrev = rev
1874 1867 return True
1875 1868
1876 1869 if rev > self.startrev:
1877 1870 # forward: all descendants
1878 1871 if not self.roots:
1879 1872 self.roots.add(self.startrev)
1880 1873 for parent in realparents(rev):
1881 1874 if parent in self.roots:
1882 1875 self.roots.add(rev)
1883 1876 return True
1884 1877 else:
1885 1878 # backwards: all parents
1886 1879 if not self.roots:
1887 1880 self.roots.update(realparents(self.startrev))
1888 1881 if rev in self.roots:
1889 1882 self.roots.remove(rev)
1890 1883 self.roots.update(realparents(rev))
1891 1884 return True
1892 1885
1893 1886 return False
1894 1887
1895 1888 def walkchangerevs(repo, match, opts, prepare):
1896 1889 '''Iterate over files and the revs in which they changed.
1897 1890
1898 1891 Callers most commonly need to iterate backwards over the history
1899 1892 in which they are interested. Doing so has awful (quadratic-looking)
1900 1893 performance, so we use iterators in a "windowed" way.
1901 1894
1902 1895 We walk a window of revisions in the desired order. Within the
1903 1896 window, we first walk forwards to gather data, then in the desired
1904 1897 order (usually backwards) to display it.
1905 1898
1906 1899 This function returns an iterator yielding contexts. Before
1907 1900 yielding each context, the iterator will first call the prepare
1908 1901 function on each context in the window in forward order.'''
1909 1902
1910 1903 follow = opts.get('follow') or opts.get('follow_first')
1911 1904 revs = _logrevs(repo, opts)
1912 1905 if not revs:
1913 1906 return []
1914 1907 wanted = set()
1915 1908 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1916 1909 opts.get('removed'))
1917 1910 fncache = {}
1918 1911 change = repo.changectx
1919 1912
1920 1913 # First step is to fill wanted, the set of revisions that we want to yield.
1921 1914 # When it does not induce extra cost, we also fill fncache for revisions in
1922 1915 # wanted: a cache of filenames that were changed (ctx.files()) and that
1923 1916 # match the file filtering conditions.
1924 1917
1925 1918 if match.always():
1926 1919 # No files, no patterns. Display all revs.
1927 1920 wanted = revs
1928 1921 elif not slowpath:
1929 1922 # We only have to read through the filelog to find wanted revisions
1930 1923
1931 1924 try:
1932 1925 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1933 1926 except FileWalkError:
1934 1927 slowpath = True
1935 1928
1936 1929 # We decided to fall back to the slowpath because at least one
1937 1930 # of the paths was not a file. Check to see if at least one of them
1938 1931 # existed in history, otherwise simply return
1939 1932 for path in match.files():
1940 1933 if path == '.' or path in repo.store:
1941 1934 break
1942 1935 else:
1943 1936 return []
1944 1937
1945 1938 if slowpath:
1946 1939 # We have to read the changelog to match filenames against
1947 1940 # changed files
1948 1941
1949 1942 if follow:
1950 1943 raise error.Abort(_('can only follow copies/renames for explicit '
1951 1944 'filenames'))
1952 1945
1953 1946 # The slow path checks files modified in every changeset.
1954 1947 # This is really slow on large repos, so compute the set lazily.
1955 1948 class lazywantedset(object):
1956 1949 def __init__(self):
1957 1950 self.set = set()
1958 1951 self.revs = set(revs)
1959 1952
1960 1953 # No need to worry about locality here because it will be accessed
1961 1954 # in the same order as the increasing window below.
1962 1955 def __contains__(self, value):
1963 1956 if value in self.set:
1964 1957 return True
1965 1958 elif not value in self.revs:
1966 1959 return False
1967 1960 else:
1968 1961 self.revs.discard(value)
1969 1962 ctx = change(value)
1970 1963 matches = filter(match, ctx.files())
1971 1964 if matches:
1972 1965 fncache[value] = matches
1973 1966 self.set.add(value)
1974 1967 return True
1975 1968 return False
1976 1969
1977 1970 def discard(self, value):
1978 1971 self.revs.discard(value)
1979 1972 self.set.discard(value)
1980 1973
1981 1974 wanted = lazywantedset()
1982 1975
1983 1976 # it might be worthwhile to do this in the iterator if the rev range
1984 1977 # is descending and the prune args are all within that range
1985 1978 for rev in opts.get('prune', ()):
1986 1979 rev = repo[rev].rev()
1987 1980 ff = _followfilter(repo)
1988 1981 stop = min(revs[0], revs[-1])
1989 1982 for x in xrange(rev, stop - 1, -1):
1990 1983 if ff.match(x):
1991 1984 wanted = wanted - [x]
1992 1985
1993 1986 # Now that wanted is correctly initialized, we can iterate over the
1994 1987 # revision range, yielding only revisions in wanted.
1995 1988 def iterate():
1996 1989 if follow and match.always():
1997 1990 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1998 1991 def want(rev):
1999 1992 return ff.match(rev) and rev in wanted
2000 1993 else:
2001 1994 def want(rev):
2002 1995 return rev in wanted
2003 1996
2004 1997 it = iter(revs)
2005 1998 stopiteration = False
2006 1999 for windowsize in increasingwindows():
2007 2000 nrevs = []
2008 2001 for i in xrange(windowsize):
2009 2002 rev = next(it, None)
2010 2003 if rev is None:
2011 2004 stopiteration = True
2012 2005 break
2013 2006 elif want(rev):
2014 2007 nrevs.append(rev)
2015 2008 for rev in sorted(nrevs):
2016 2009 fns = fncache.get(rev)
2017 2010 ctx = change(rev)
2018 2011 if not fns:
2019 2012 def fns_generator():
2020 2013 for f in ctx.files():
2021 2014 if match(f):
2022 2015 yield f
2023 2016 fns = fns_generator()
2024 2017 prepare(ctx, fns)
2025 2018 for rev in nrevs:
2026 2019 yield change(rev)
2027 2020
2028 2021 if stopiteration:
2029 2022 break
2030 2023
2031 2024 return iterate()
2032 2025
2033 2026 def _makefollowlogfilematcher(repo, files, followfirst):
2034 2027 # When displaying a revision with --patch --follow FILE, we have
2035 2028 # to know which file of the revision must be diffed. With
2036 2029 # --follow, we want the names of the ancestors of FILE in the
2037 2030 # revision, stored in "fcache". "fcache" is populated by
2038 2031 # reproducing the graph traversal already done by --follow revset
2039 2032 # and relating revs to file names (which is not "correct" but
2040 2033 # good enough).
2041 2034 fcache = {}
2042 2035 fcacheready = [False]
2043 2036 pctx = repo['.']
2044 2037
2045 2038 def populate():
2046 2039 for fn in files:
2047 2040 fctx = pctx[fn]
2048 2041 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2049 2042 for c in fctx.ancestors(followfirst=followfirst):
2050 2043 fcache.setdefault(c.rev(), set()).add(c.path())
2051 2044
2052 2045 def filematcher(rev):
2053 2046 if not fcacheready[0]:
2054 2047 # Lazy initialization
2055 2048 fcacheready[0] = True
2056 2049 populate()
2057 2050 return scmutil.matchfiles(repo, fcache.get(rev, []))
2058 2051
2059 2052 return filematcher
2060 2053
2061 2054 def _makenofollowlogfilematcher(repo, pats, opts):
2062 2055 '''hook for extensions to override the filematcher for non-follow cases'''
2063 2056 return None
2064 2057
2065 2058 def _makelogrevset(repo, pats, opts, revs):
2066 2059 """Return (expr, filematcher) where expr is a revset string built
2067 2060 from log options and file patterns or None. If --stat or --patch
2068 2061 are not passed filematcher is None. Otherwise it is a callable
2069 2062 taking a revision number and returning a match objects filtering
2070 2063 the files to be detailed when displaying the revision.
2071 2064 """
2072 2065 opt2revset = {
2073 2066 'no_merges': ('not merge()', None),
2074 2067 'only_merges': ('merge()', None),
2075 2068 '_ancestors': ('ancestors(%(val)s)', None),
2076 2069 '_fancestors': ('_firstancestors(%(val)s)', None),
2077 2070 '_descendants': ('descendants(%(val)s)', None),
2078 2071 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2079 2072 '_matchfiles': ('_matchfiles(%(val)s)', None),
2080 2073 'date': ('date(%(val)r)', None),
2081 2074 'branch': ('branch(%(val)r)', ' or '),
2082 2075 '_patslog': ('filelog(%(val)r)', ' or '),
2083 2076 '_patsfollow': ('follow(%(val)r)', ' or '),
2084 2077 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2085 2078 'keyword': ('keyword(%(val)r)', ' or '),
2086 2079 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2087 2080 'user': ('user(%(val)r)', ' or '),
2088 2081 }
2089 2082
2090 2083 opts = dict(opts)
2091 2084 # follow or not follow?
2092 2085 follow = opts.get('follow') or opts.get('follow_first')
2093 2086 if opts.get('follow_first'):
2094 2087 followfirst = 1
2095 2088 else:
2096 2089 followfirst = 0
2097 2090 # --follow with FILE behavior depends on revs...
2098 2091 it = iter(revs)
2099 2092 startrev = next(it)
2100 2093 followdescendants = startrev < next(it, startrev)
2101 2094
2102 2095 # branch and only_branch are really aliases and must be handled at
2103 2096 # the same time
2104 2097 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2105 2098 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2106 2099 # pats/include/exclude are passed to match.match() directly in
2107 2100 # _matchfiles() revset but walkchangerevs() builds its matcher with
2108 2101 # scmutil.match(). The difference is input pats are globbed on
2109 2102 # platforms without shell expansion (windows).
2110 2103 wctx = repo[None]
2111 2104 match, pats = scmutil.matchandpats(wctx, pats, opts)
2112 2105 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2113 2106 opts.get('removed'))
2114 2107 if not slowpath:
2115 2108 for f in match.files():
2116 2109 if follow and f not in wctx:
2117 2110 # If the file exists, it may be a directory, so let it
2118 2111 # take the slow path.
2119 2112 if os.path.exists(repo.wjoin(f)):
2120 2113 slowpath = True
2121 2114 continue
2122 2115 else:
2123 2116 raise error.Abort(_('cannot follow file not in parent '
2124 2117 'revision: "%s"') % f)
2125 2118 filelog = repo.file(f)
2126 2119 if not filelog:
2127 2120 # A zero count may be a directory or deleted file, so
2128 2121 # try to find matching entries on the slow path.
2129 2122 if follow:
2130 2123 raise error.Abort(
2131 2124 _('cannot follow nonexistent file: "%s"') % f)
2132 2125 slowpath = True
2133 2126
2134 2127 # We decided to fall back to the slowpath because at least one
2135 2128 # of the paths was not a file. Check to see if at least one of them
2136 2129 # existed in history - in that case, we'll continue down the
2137 2130 # slowpath; otherwise, we can turn off the slowpath
2138 2131 if slowpath:
2139 2132 for path in match.files():
2140 2133 if path == '.' or path in repo.store:
2141 2134 break
2142 2135 else:
2143 2136 slowpath = False
2144 2137
2145 2138 fpats = ('_patsfollow', '_patsfollowfirst')
2146 2139 fnopats = (('_ancestors', '_fancestors'),
2147 2140 ('_descendants', '_fdescendants'))
2148 2141 if slowpath:
2149 2142 # See walkchangerevs() slow path.
2150 2143 #
2151 2144 # pats/include/exclude cannot be represented as separate
2152 2145 # revset expressions as their filtering logic applies at file
2153 2146 # level. For instance "-I a -X a" matches a revision touching
2154 2147 # "a" and "b" while "file(a) and not file(b)" does
2155 2148 # not. Besides, filesets are evaluated against the working
2156 2149 # directory.
2157 2150 matchargs = ['r:', 'd:relpath']
2158 2151 for p in pats:
2159 2152 matchargs.append('p:' + p)
2160 2153 for p in opts.get('include', []):
2161 2154 matchargs.append('i:' + p)
2162 2155 for p in opts.get('exclude', []):
2163 2156 matchargs.append('x:' + p)
2164 2157 matchargs = ','.join(('%r' % p) for p in matchargs)
2165 2158 opts['_matchfiles'] = matchargs
2166 2159 if follow:
2167 2160 opts[fnopats[0][followfirst]] = '.'
2168 2161 else:
2169 2162 if follow:
2170 2163 if pats:
2171 2164 # follow() revset interprets its file argument as a
2172 2165 # manifest entry, so use match.files(), not pats.
2173 2166 opts[fpats[followfirst]] = list(match.files())
2174 2167 else:
2175 2168 op = fnopats[followdescendants][followfirst]
2176 2169 opts[op] = 'rev(%d)' % startrev
2177 2170 else:
2178 2171 opts['_patslog'] = list(pats)
2179 2172
2180 2173 filematcher = None
2181 2174 if opts.get('patch') or opts.get('stat'):
2182 2175 # When following files, track renames via a special matcher.
2183 2176 # If we're forced to take the slowpath it means we're following
2184 2177 # at least one pattern/directory, so don't bother with rename tracking.
2185 2178 if follow and not match.always() and not slowpath:
2186 2179 # _makefollowlogfilematcher expects its files argument to be
2187 2180 # relative to the repo root, so use match.files(), not pats.
2188 2181 filematcher = _makefollowlogfilematcher(repo, match.files(),
2189 2182 followfirst)
2190 2183 else:
2191 2184 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2192 2185 if filematcher is None:
2193 2186 filematcher = lambda rev: match
2194 2187
2195 2188 expr = []
2196 2189 for op, val in sorted(opts.iteritems()):
2197 2190 if not val:
2198 2191 continue
2199 2192 if op not in opt2revset:
2200 2193 continue
2201 2194 revop, andor = opt2revset[op]
2202 2195 if '%(val)' not in revop:
2203 2196 expr.append(revop)
2204 2197 else:
2205 2198 if not isinstance(val, list):
2206 2199 e = revop % {'val': val}
2207 2200 else:
2208 2201 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2209 2202 expr.append(e)
2210 2203
2211 2204 if expr:
2212 2205 expr = '(' + ' and '.join(expr) + ')'
2213 2206 else:
2214 2207 expr = None
2215 2208 return expr, filematcher
2216 2209
2217 2210 def _logrevs(repo, opts):
2218 2211 # Default --rev value depends on --follow but --follow behavior
2219 2212 # depends on revisions resolved from --rev...
2220 2213 follow = opts.get('follow') or opts.get('follow_first')
2221 2214 if opts.get('rev'):
2222 2215 revs = scmutil.revrange(repo, opts['rev'])
2223 2216 elif follow and repo.dirstate.p1() == nullid:
2224 2217 revs = smartset.baseset()
2225 2218 elif follow:
2226 2219 revs = repo.revs('reverse(:.)')
2227 2220 else:
2228 2221 revs = smartset.spanset(repo)
2229 2222 revs.reverse()
2230 2223 return revs
2231 2224
2232 2225 def getgraphlogrevs(repo, pats, opts):
2233 2226 """Return (revs, expr, filematcher) where revs is an iterable of
2234 2227 revision numbers, expr is a revset string built from log options
2235 2228 and file patterns or None, and used to filter 'revs'. If --stat or
2236 2229 --patch are not passed filematcher is None. Otherwise it is a
2237 2230 callable taking a revision number and returning a match objects
2238 2231 filtering the files to be detailed when displaying the revision.
2239 2232 """
2240 2233 limit = loglimit(opts)
2241 2234 revs = _logrevs(repo, opts)
2242 2235 if not revs:
2243 2236 return smartset.baseset(), None, None
2244 2237 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2245 2238 if opts.get('rev'):
2246 2239 # User-specified revs might be unsorted, but don't sort before
2247 2240 # _makelogrevset because it might depend on the order of revs
2248 2241 if not (revs.isdescending() or revs.istopo()):
2249 2242 revs.sort(reverse=True)
2250 2243 if expr:
2251 2244 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2252 2245 revs = matcher(repo, revs)
2253 2246 if limit is not None:
2254 2247 limitedrevs = []
2255 2248 for idx, rev in enumerate(revs):
2256 2249 if idx >= limit:
2257 2250 break
2258 2251 limitedrevs.append(rev)
2259 2252 revs = smartset.baseset(limitedrevs)
2260 2253
2261 2254 return revs, expr, filematcher
2262 2255
2263 2256 def getlogrevs(repo, pats, opts):
2264 2257 """Return (revs, expr, filematcher) where revs is an iterable of
2265 2258 revision numbers, expr is a revset string built from log options
2266 2259 and file patterns or None, and used to filter 'revs'. If --stat or
2267 2260 --patch are not passed filematcher is None. Otherwise it is a
2268 2261 callable taking a revision number and returning a match objects
2269 2262 filtering the files to be detailed when displaying the revision.
2270 2263 """
2271 2264 limit = loglimit(opts)
2272 2265 revs = _logrevs(repo, opts)
2273 2266 if not revs:
2274 2267 return smartset.baseset([]), None, None
2275 2268 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2276 2269 if expr:
2277 2270 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2278 2271 revs = matcher(repo, revs)
2279 2272 if limit is not None:
2280 2273 limitedrevs = []
2281 2274 for idx, r in enumerate(revs):
2282 2275 if limit <= idx:
2283 2276 break
2284 2277 limitedrevs.append(r)
2285 2278 revs = smartset.baseset(limitedrevs)
2286 2279
2287 2280 return revs, expr, filematcher
2288 2281
2289 2282 def _graphnodeformatter(ui, displayer):
2290 2283 spec = ui.config('ui', 'graphnodetemplate')
2291 2284 if not spec:
2292 2285 return templatekw.showgraphnode # fast path for "{graphnode}"
2293 2286
2294 2287 spec = templater.unquotestring(spec)
2295 2288 templ = formatter.maketemplater(ui, 'graphnode', spec)
2296 2289 cache = {}
2297 2290 if isinstance(displayer, changeset_templater):
2298 2291 cache = displayer.cache # reuse cache of slow templates
2299 2292 props = templatekw.keywords.copy()
2300 2293 props['templ'] = templ
2301 2294 props['cache'] = cache
2302 2295 def formatnode(repo, ctx):
2303 2296 props['ctx'] = ctx
2304 2297 props['repo'] = repo
2305 2298 props['ui'] = repo.ui
2306 2299 props['revcache'] = {}
2307 2300 return templater.stringify(templ('graphnode', **props))
2308 2301 return formatnode
2309 2302
2310 2303 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2311 2304 filematcher=None):
2312 2305 formatnode = _graphnodeformatter(ui, displayer)
2313 2306 state = graphmod.asciistate()
2314 2307 styles = state['styles']
2315 2308
2316 2309 # only set graph styling if HGPLAIN is not set.
2317 2310 if ui.plain('graph'):
2318 2311 # set all edge styles to |, the default pre-3.8 behaviour
2319 2312 styles.update(dict.fromkeys(styles, '|'))
2320 2313 else:
2321 2314 edgetypes = {
2322 2315 'parent': graphmod.PARENT,
2323 2316 'grandparent': graphmod.GRANDPARENT,
2324 2317 'missing': graphmod.MISSINGPARENT
2325 2318 }
2326 2319 for name, key in edgetypes.items():
2327 2320 # experimental config: experimental.graphstyle.*
2328 2321 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2329 2322 styles[key])
2330 2323 if not styles[key]:
2331 2324 styles[key] = None
2332 2325
2333 2326 # experimental config: experimental.graphshorten
2334 2327 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2335 2328
2336 2329 for rev, type, ctx, parents in dag:
2337 2330 char = formatnode(repo, ctx)
2338 2331 copies = None
2339 2332 if getrenamed and ctx.rev():
2340 2333 copies = []
2341 2334 for fn in ctx.files():
2342 2335 rename = getrenamed(fn, ctx.rev())
2343 2336 if rename:
2344 2337 copies.append((fn, rename[0]))
2345 2338 revmatchfn = None
2346 2339 if filematcher is not None:
2347 2340 revmatchfn = filematcher(ctx.rev())
2348 2341 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2349 2342 lines = displayer.hunk.pop(rev).split('\n')
2350 2343 if not lines[-1]:
2351 2344 del lines[-1]
2352 2345 displayer.flush(ctx)
2353 2346 edges = edgefn(type, char, lines, state, rev, parents)
2354 2347 for type, char, lines, coldata in edges:
2355 2348 graphmod.ascii(ui, state, type, char, lines, coldata)
2356 2349 displayer.close()
2357 2350
2358 2351 def graphlog(ui, repo, pats, opts):
2359 2352 # Parameters are identical to log command ones
2360 2353 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2361 2354 revdag = graphmod.dagwalker(repo, revs)
2362 2355
2363 2356 getrenamed = None
2364 2357 if opts.get('copies'):
2365 2358 endrev = None
2366 2359 if opts.get('rev'):
2367 2360 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2368 2361 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2369 2362
2370 2363 ui.pager('log')
2371 2364 displayer = show_changeset(ui, repo, opts, buffered=True)
2372 2365 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2373 2366 filematcher)
2374 2367
2375 2368 def checkunsupportedgraphflags(pats, opts):
2376 2369 for op in ["newest_first"]:
2377 2370 if op in opts and opts[op]:
2378 2371 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2379 2372 % op.replace("_", "-"))
2380 2373
2381 2374 def graphrevs(repo, nodes, opts):
2382 2375 limit = loglimit(opts)
2383 2376 nodes.reverse()
2384 2377 if limit is not None:
2385 2378 nodes = nodes[:limit]
2386 2379 return graphmod.nodes(repo, nodes)
2387 2380
2388 2381 def add(ui, repo, match, prefix, explicitonly, **opts):
2389 2382 join = lambda f: os.path.join(prefix, f)
2390 2383 bad = []
2391 2384
2392 2385 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2393 2386 names = []
2394 2387 wctx = repo[None]
2395 2388 cca = None
2396 2389 abort, warn = scmutil.checkportabilityalert(ui)
2397 2390 if abort or warn:
2398 2391 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2399 2392
2400 2393 badmatch = matchmod.badmatch(match, badfn)
2401 2394 dirstate = repo.dirstate
2402 2395 # We don't want to just call wctx.walk here, since it would return a lot of
2403 2396 # clean files, which we aren't interested in and takes time.
2404 2397 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2405 2398 True, False, full=False)):
2406 2399 exact = match.exact(f)
2407 2400 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2408 2401 if cca:
2409 2402 cca(f)
2410 2403 names.append(f)
2411 2404 if ui.verbose or not exact:
2412 2405 ui.status(_('adding %s\n') % match.rel(f))
2413 2406
2414 2407 for subpath in sorted(wctx.substate):
2415 2408 sub = wctx.sub(subpath)
2416 2409 try:
2417 2410 submatch = matchmod.subdirmatcher(subpath, match)
2418 2411 if opts.get(r'subrepos'):
2419 2412 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2420 2413 else:
2421 2414 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2422 2415 except error.LookupError:
2423 2416 ui.status(_("skipping missing subrepository: %s\n")
2424 2417 % join(subpath))
2425 2418
2426 2419 if not opts.get(r'dry_run'):
2427 2420 rejected = wctx.add(names, prefix)
2428 2421 bad.extend(f for f in rejected if f in match.files())
2429 2422 return bad
2430 2423
2431 2424 def addwebdirpath(repo, serverpath, webconf):
2432 2425 webconf[serverpath] = repo.root
2433 2426 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2434 2427
2435 2428 for r in repo.revs('filelog("path:.hgsub")'):
2436 2429 ctx = repo[r]
2437 2430 for subpath in ctx.substate:
2438 2431 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2439 2432
2440 2433 def forget(ui, repo, match, prefix, explicitonly):
2441 2434 join = lambda f: os.path.join(prefix, f)
2442 2435 bad = []
2443 2436 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2444 2437 wctx = repo[None]
2445 2438 forgot = []
2446 2439
2447 2440 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2448 2441 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2449 2442 if explicitonly:
2450 2443 forget = [f for f in forget if match.exact(f)]
2451 2444
2452 2445 for subpath in sorted(wctx.substate):
2453 2446 sub = wctx.sub(subpath)
2454 2447 try:
2455 2448 submatch = matchmod.subdirmatcher(subpath, match)
2456 2449 subbad, subforgot = sub.forget(submatch, prefix)
2457 2450 bad.extend([subpath + '/' + f for f in subbad])
2458 2451 forgot.extend([subpath + '/' + f for f in subforgot])
2459 2452 except error.LookupError:
2460 2453 ui.status(_("skipping missing subrepository: %s\n")
2461 2454 % join(subpath))
2462 2455
2463 2456 if not explicitonly:
2464 2457 for f in match.files():
2465 2458 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2466 2459 if f not in forgot:
2467 2460 if repo.wvfs.exists(f):
2468 2461 # Don't complain if the exact case match wasn't given.
2469 2462 # But don't do this until after checking 'forgot', so
2470 2463 # that subrepo files aren't normalized, and this op is
2471 2464 # purely from data cached by the status walk above.
2472 2465 if repo.dirstate.normalize(f) in repo.dirstate:
2473 2466 continue
2474 2467 ui.warn(_('not removing %s: '
2475 2468 'file is already untracked\n')
2476 2469 % match.rel(f))
2477 2470 bad.append(f)
2478 2471
2479 2472 for f in forget:
2480 2473 if ui.verbose or not match.exact(f):
2481 2474 ui.status(_('removing %s\n') % match.rel(f))
2482 2475
2483 2476 rejected = wctx.forget(forget, prefix)
2484 2477 bad.extend(f for f in rejected if f in match.files())
2485 2478 forgot.extend(f for f in forget if f not in rejected)
2486 2479 return bad, forgot
2487 2480
2488 2481 def files(ui, ctx, m, fm, fmt, subrepos):
2489 2482 rev = ctx.rev()
2490 2483 ret = 1
2491 2484 ds = ctx.repo().dirstate
2492 2485
2493 2486 for f in ctx.matches(m):
2494 2487 if rev is None and ds[f] == 'r':
2495 2488 continue
2496 2489 fm.startitem()
2497 2490 if ui.verbose:
2498 2491 fc = ctx[f]
2499 2492 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2500 2493 fm.data(abspath=f)
2501 2494 fm.write('path', fmt, m.rel(f))
2502 2495 ret = 0
2503 2496
2504 2497 for subpath in sorted(ctx.substate):
2505 2498 submatch = matchmod.subdirmatcher(subpath, m)
2506 2499 if (subrepos or m.exact(subpath) or any(submatch.files())):
2507 2500 sub = ctx.sub(subpath)
2508 2501 try:
2509 2502 recurse = m.exact(subpath) or subrepos
2510 2503 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2511 2504 ret = 0
2512 2505 except error.LookupError:
2513 2506 ui.status(_("skipping missing subrepository: %s\n")
2514 2507 % m.abs(subpath))
2515 2508
2516 2509 return ret
2517 2510
2518 2511 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2519 2512 join = lambda f: os.path.join(prefix, f)
2520 2513 ret = 0
2521 2514 s = repo.status(match=m, clean=True)
2522 2515 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2523 2516
2524 2517 wctx = repo[None]
2525 2518
2526 2519 if warnings is None:
2527 2520 warnings = []
2528 2521 warn = True
2529 2522 else:
2530 2523 warn = False
2531 2524
2532 2525 subs = sorted(wctx.substate)
2533 2526 total = len(subs)
2534 2527 count = 0
2535 2528 for subpath in subs:
2536 2529 count += 1
2537 2530 submatch = matchmod.subdirmatcher(subpath, m)
2538 2531 if subrepos or m.exact(subpath) or any(submatch.files()):
2539 2532 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2540 2533 sub = wctx.sub(subpath)
2541 2534 try:
2542 2535 if sub.removefiles(submatch, prefix, after, force, subrepos,
2543 2536 warnings):
2544 2537 ret = 1
2545 2538 except error.LookupError:
2546 2539 warnings.append(_("skipping missing subrepository: %s\n")
2547 2540 % join(subpath))
2548 2541 ui.progress(_('searching'), None)
2549 2542
2550 2543 # warn about failure to delete explicit files/dirs
2551 2544 deleteddirs = util.dirs(deleted)
2552 2545 files = m.files()
2553 2546 total = len(files)
2554 2547 count = 0
2555 2548 for f in files:
2556 2549 def insubrepo():
2557 2550 for subpath in wctx.substate:
2558 2551 if f.startswith(subpath + '/'):
2559 2552 return True
2560 2553 return False
2561 2554
2562 2555 count += 1
2563 2556 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2564 2557 isdir = f in deleteddirs or wctx.hasdir(f)
2565 2558 if (f in repo.dirstate or isdir or f == '.'
2566 2559 or insubrepo() or f in subs):
2567 2560 continue
2568 2561
2569 2562 if repo.wvfs.exists(f):
2570 2563 if repo.wvfs.isdir(f):
2571 2564 warnings.append(_('not removing %s: no tracked files\n')
2572 2565 % m.rel(f))
2573 2566 else:
2574 2567 warnings.append(_('not removing %s: file is untracked\n')
2575 2568 % m.rel(f))
2576 2569 # missing files will generate a warning elsewhere
2577 2570 ret = 1
2578 2571 ui.progress(_('deleting'), None)
2579 2572
2580 2573 if force:
2581 2574 list = modified + deleted + clean + added
2582 2575 elif after:
2583 2576 list = deleted
2584 2577 remaining = modified + added + clean
2585 2578 total = len(remaining)
2586 2579 count = 0
2587 2580 for f in remaining:
2588 2581 count += 1
2589 2582 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2590 2583 warnings.append(_('not removing %s: file still exists\n')
2591 2584 % m.rel(f))
2592 2585 ret = 1
2593 2586 ui.progress(_('skipping'), None)
2594 2587 else:
2595 2588 list = deleted + clean
2596 2589 total = len(modified) + len(added)
2597 2590 count = 0
2598 2591 for f in modified:
2599 2592 count += 1
2600 2593 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2601 2594 warnings.append(_('not removing %s: file is modified (use -f'
2602 2595 ' to force removal)\n') % m.rel(f))
2603 2596 ret = 1
2604 2597 for f in added:
2605 2598 count += 1
2606 2599 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2607 2600 warnings.append(_("not removing %s: file has been marked for add"
2608 2601 " (use 'hg forget' to undo add)\n") % m.rel(f))
2609 2602 ret = 1
2610 2603 ui.progress(_('skipping'), None)
2611 2604
2612 2605 list = sorted(list)
2613 2606 total = len(list)
2614 2607 count = 0
2615 2608 for f in list:
2616 2609 count += 1
2617 2610 if ui.verbose or not m.exact(f):
2618 2611 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2619 2612 ui.status(_('removing %s\n') % m.rel(f))
2620 2613 ui.progress(_('deleting'), None)
2621 2614
2622 2615 with repo.wlock():
2623 2616 if not after:
2624 2617 for f in list:
2625 2618 if f in added:
2626 2619 continue # we never unlink added files on remove
2627 2620 repo.wvfs.unlinkpath(f, ignoremissing=True)
2628 2621 repo[None].forget(list)
2629 2622
2630 2623 if warn:
2631 2624 for warning in warnings:
2632 2625 ui.warn(warning)
2633 2626
2634 2627 return ret
2635 2628
2636 2629 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2637 2630 err = 1
2638 2631
2639 2632 def write(path):
2640 2633 filename = None
2641 2634 if fntemplate:
2642 2635 filename = makefilename(repo, fntemplate, ctx.node(),
2643 2636 pathname=os.path.join(prefix, path))
2644 2637 with formatter.maybereopen(basefm, filename, opts) as fm:
2645 2638 data = ctx[path].data()
2646 2639 if opts.get('decode'):
2647 2640 data = repo.wwritedata(path, data)
2648 2641 fm.startitem()
2649 2642 fm.write('data', '%s', data)
2650 2643 fm.data(abspath=path, path=matcher.rel(path))
2651 2644
2652 2645 # Automation often uses hg cat on single files, so special case it
2653 2646 # for performance to avoid the cost of parsing the manifest.
2654 2647 if len(matcher.files()) == 1 and not matcher.anypats():
2655 2648 file = matcher.files()[0]
2656 2649 mfl = repo.manifestlog
2657 2650 mfnode = ctx.manifestnode()
2658 2651 try:
2659 2652 if mfnode and mfl[mfnode].find(file)[0]:
2660 2653 write(file)
2661 2654 return 0
2662 2655 except KeyError:
2663 2656 pass
2664 2657
2665 2658 for abs in ctx.walk(matcher):
2666 2659 write(abs)
2667 2660 err = 0
2668 2661
2669 2662 for subpath in sorted(ctx.substate):
2670 2663 sub = ctx.sub(subpath)
2671 2664 try:
2672 2665 submatch = matchmod.subdirmatcher(subpath, matcher)
2673 2666
2674 2667 if not sub.cat(submatch, basefm, fntemplate,
2675 2668 os.path.join(prefix, sub._path), **opts):
2676 2669 err = 0
2677 2670 except error.RepoLookupError:
2678 2671 ui.status(_("skipping missing subrepository: %s\n")
2679 2672 % os.path.join(prefix, subpath))
2680 2673
2681 2674 return err
2682 2675
2683 2676 def commit(ui, repo, commitfunc, pats, opts):
2684 2677 '''commit the specified files or all outstanding changes'''
2685 2678 date = opts.get('date')
2686 2679 if date:
2687 2680 opts['date'] = util.parsedate(date)
2688 2681 message = logmessage(ui, opts)
2689 2682 matcher = scmutil.match(repo[None], pats, opts)
2690 2683
2691 2684 # extract addremove carefully -- this function can be called from a command
2692 2685 # that doesn't support addremove
2693 2686 if opts.get('addremove'):
2694 2687 if scmutil.addremove(repo, matcher, "", opts) != 0:
2695 2688 raise error.Abort(
2696 2689 _("failed to mark all new/missing files as added/removed"))
2697 2690
2698 2691 return commitfunc(ui, repo, message, matcher, opts)
2699 2692
2700 2693 def samefile(f, ctx1, ctx2):
2701 2694 if f in ctx1.manifest():
2702 2695 a = ctx1.filectx(f)
2703 2696 if f in ctx2.manifest():
2704 2697 b = ctx2.filectx(f)
2705 2698 return (not a.cmp(b)
2706 2699 and a.flags() == b.flags())
2707 2700 else:
2708 2701 return False
2709 2702 else:
2710 2703 return f not in ctx2.manifest()
2711 2704
2712 2705 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2713 2706 # avoid cycle context -> subrepo -> cmdutil
2714 2707 from . import context
2715 2708
2716 2709 # amend will reuse the existing user if not specified, but the obsolete
2717 2710 # marker creation requires that the current user's name is specified.
2718 2711 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2719 2712 ui.username() # raise exception if username not set
2720 2713
2721 2714 ui.note(_('amending changeset %s\n') % old)
2722 2715 base = old.p1()
2723 2716 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2724 2717
2725 2718 wlock = lock = newid = None
2726 2719 try:
2727 2720 wlock = repo.wlock()
2728 2721 lock = repo.lock()
2729 2722 with repo.transaction('amend') as tr:
2730 2723 # See if we got a message from -m or -l, if not, open the editor
2731 2724 # with the message of the changeset to amend
2732 2725 message = logmessage(ui, opts)
2733 2726 # ensure logfile does not conflict with later enforcement of the
2734 2727 # message. potential logfile content has been processed by
2735 2728 # `logmessage` anyway.
2736 2729 opts.pop('logfile')
2737 2730 # First, do a regular commit to record all changes in the working
2738 2731 # directory (if there are any)
2739 2732 ui.callhooks = False
2740 2733 activebookmark = repo._bookmarks.active
2741 2734 try:
2742 2735 repo._bookmarks.active = None
2743 2736 opts['message'] = 'temporary amend commit for %s' % old
2744 2737 node = commit(ui, repo, commitfunc, pats, opts)
2745 2738 finally:
2746 2739 repo._bookmarks.active = activebookmark
2747 2740 repo._bookmarks.recordchange(tr)
2748 2741 ui.callhooks = True
2749 2742 ctx = repo[node]
2750 2743
2751 2744 # Participating changesets:
2752 2745 #
2753 2746 # node/ctx o - new (intermediate) commit that contains changes
2754 2747 # | from working dir to go into amending commit
2755 2748 # | (or a workingctx if there were no changes)
2756 2749 # |
2757 2750 # old o - changeset to amend
2758 2751 # |
2759 2752 # base o - parent of amending changeset
2760 2753
2761 2754 # Update extra dict from amended commit (e.g. to preserve graft
2762 2755 # source)
2763 2756 extra.update(old.extra())
2764 2757
2765 2758 # Also update it from the intermediate commit or from the wctx
2766 2759 extra.update(ctx.extra())
2767 2760
2768 2761 if len(old.parents()) > 1:
2769 2762 # ctx.files() isn't reliable for merges, so fall back to the
2770 2763 # slower repo.status() method
2771 2764 files = set([fn for st in repo.status(base, old)[:3]
2772 2765 for fn in st])
2773 2766 else:
2774 2767 files = set(old.files())
2775 2768
2776 2769 # Second, we use either the commit we just did, or if there were no
2777 2770 # changes the parent of the working directory as the version of the
2778 2771 # files in the final amend commit
2779 2772 if node:
2780 2773 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2781 2774
2782 2775 user = ctx.user()
2783 2776 date = ctx.date()
2784 2777 # Recompute copies (avoid recording a -> b -> a)
2785 2778 copied = copies.pathcopies(base, ctx)
2786 2779 if old.p2:
2787 2780 copied.update(copies.pathcopies(old.p2(), ctx))
2788 2781
2789 2782 # Prune files which were reverted by the updates: if old
2790 2783 # introduced file X and our intermediate commit, node,
2791 2784 # renamed that file, then those two files are the same and
2792 2785 # we can discard X from our list of files. Likewise if X
2793 2786 # was deleted, it's no longer relevant
2794 2787 files.update(ctx.files())
2795 2788 files = [f for f in files if not samefile(f, ctx, base)]
2796 2789
2797 2790 def filectxfn(repo, ctx_, path):
2798 2791 try:
2799 2792 fctx = ctx[path]
2800 2793 flags = fctx.flags()
2801 2794 mctx = context.memfilectx(repo,
2802 2795 fctx.path(), fctx.data(),
2803 2796 islink='l' in flags,
2804 2797 isexec='x' in flags,
2805 2798 copied=copied.get(path))
2806 2799 return mctx
2807 2800 except KeyError:
2808 2801 return None
2809 2802 else:
2810 2803 ui.note(_('copying changeset %s to %s\n') % (old, base))
2811 2804
2812 2805 # Use version of files as in the old cset
2813 2806 def filectxfn(repo, ctx_, path):
2814 2807 try:
2815 2808 return old.filectx(path)
2816 2809 except KeyError:
2817 2810 return None
2818 2811
2819 2812 user = opts.get('user') or old.user()
2820 2813 date = opts.get('date') or old.date()
2821 2814 editform = mergeeditform(old, 'commit.amend')
2822 2815 editor = getcommiteditor(editform=editform, **opts)
2823 2816 if not message:
2824 2817 editor = getcommiteditor(edit=True, editform=editform)
2825 2818 message = old.description()
2826 2819
2827 2820 pureextra = extra.copy()
2828 2821 extra['amend_source'] = old.hex()
2829 2822
2830 2823 new = context.memctx(repo,
2831 2824 parents=[base.node(), old.p2().node()],
2832 2825 text=message,
2833 2826 files=files,
2834 2827 filectxfn=filectxfn,
2835 2828 user=user,
2836 2829 date=date,
2837 2830 extra=extra,
2838 2831 editor=editor)
2839 2832
2840 2833 newdesc = changelog.stripdesc(new.description())
2841 2834 if ((not node)
2842 2835 and newdesc == old.description()
2843 2836 and user == old.user()
2844 2837 and date == old.date()
2845 2838 and pureextra == old.extra()):
2846 2839 # nothing changed. continuing here would create a new node
2847 2840 # anyway because of the amend_source noise.
2848 2841 #
2849 2842 # This not what we expect from amend.
2850 2843 return old.node()
2851 2844
2852 2845 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2853 2846 try:
2854 2847 if opts.get('secret'):
2855 2848 commitphase = 'secret'
2856 2849 else:
2857 2850 commitphase = old.phase()
2858 2851 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2859 2852 newid = repo.commitctx(new)
2860 2853 finally:
2861 2854 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2862 2855 if newid != old.node():
2863 2856 # Reroute the working copy parent to the new changeset
2864 2857 repo.setparents(newid, nullid)
2865 2858
2866 2859 # Move bookmarks from old parent to amend commit
2867 2860 bms = repo.nodebookmarks(old.node())
2868 2861 if bms:
2869 2862 marks = repo._bookmarks
2870 2863 for bm in bms:
2871 2864 ui.debug('moving bookmarks %r from %s to %s\n' %
2872 2865 (marks, old.hex(), hex(newid)))
2873 2866 marks[bm] = newid
2874 2867 marks.recordchange(tr)
2875 2868 #commit the whole amend process
2876 2869 if createmarkers:
2877 2870 # mark the new changeset as successor of the rewritten one
2878 2871 new = repo[newid]
2879 2872 obs = [(old, (new,))]
2880 2873 if node:
2881 2874 obs.append((ctx, ()))
2882 2875
2883 2876 obsolete.createmarkers(repo, obs, operation='amend')
2884 2877 if not createmarkers and newid != old.node():
2885 2878 # Strip the intermediate commit (if there was one) and the amended
2886 2879 # commit
2887 2880 if node:
2888 2881 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2889 2882 ui.note(_('stripping amended changeset %s\n') % old)
2890 2883 repair.strip(ui, repo, old.node(), topic='amend-backup')
2891 2884 finally:
2892 2885 lockmod.release(lock, wlock)
2893 2886 return newid
2894 2887
2895 2888 def commiteditor(repo, ctx, subs, editform=''):
2896 2889 if ctx.description():
2897 2890 return ctx.description()
2898 2891 return commitforceeditor(repo, ctx, subs, editform=editform,
2899 2892 unchangedmessagedetection=True)
2900 2893
2901 2894 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2902 2895 editform='', unchangedmessagedetection=False):
2903 2896 if not extramsg:
2904 2897 extramsg = _("Leave message empty to abort commit.")
2905 2898
2906 2899 forms = [e for e in editform.split('.') if e]
2907 2900 forms.insert(0, 'changeset')
2908 2901 templatetext = None
2909 2902 while forms:
2910 2903 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2911 2904 if tmpl:
2912 2905 tmpl = templater.unquotestring(tmpl)
2913 2906 templatetext = committext = buildcommittemplate(
2914 2907 repo, ctx, subs, extramsg, tmpl)
2915 2908 break
2916 2909 forms.pop()
2917 2910 else:
2918 2911 committext = buildcommittext(repo, ctx, subs, extramsg)
2919 2912
2920 2913 # run editor in the repository root
2921 2914 olddir = pycompat.getcwd()
2922 2915 os.chdir(repo.root)
2923 2916
2924 2917 # make in-memory changes visible to external process
2925 2918 tr = repo.currenttransaction()
2926 2919 repo.dirstate.write(tr)
2927 2920 pending = tr and tr.writepending() and repo.root
2928 2921
2929 2922 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2930 2923 editform=editform, pending=pending,
2931 2924 repopath=repo.path)
2932 2925 text = editortext
2933 2926
2934 2927 # strip away anything below this special string (used for editors that want
2935 2928 # to display the diff)
2936 2929 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2937 2930 if stripbelow:
2938 2931 text = text[:stripbelow.start()]
2939 2932
2940 2933 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2941 2934 os.chdir(olddir)
2942 2935
2943 2936 if finishdesc:
2944 2937 text = finishdesc(text)
2945 2938 if not text.strip():
2946 2939 raise error.Abort(_("empty commit message"))
2947 2940 if unchangedmessagedetection and editortext == templatetext:
2948 2941 raise error.Abort(_("commit message unchanged"))
2949 2942
2950 2943 return text
2951 2944
2952 2945 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2953 2946 ui = repo.ui
2954 2947 tmpl, mapfile = gettemplate(ui, tmpl, None)
2955 2948
2956 2949 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2957 2950
2958 2951 for k, v in repo.ui.configitems('committemplate'):
2959 2952 if k != 'changeset':
2960 2953 t.t.cache[k] = v
2961 2954
2962 2955 if not extramsg:
2963 2956 extramsg = '' # ensure that extramsg is string
2964 2957
2965 2958 ui.pushbuffer()
2966 2959 t.show(ctx, extramsg=extramsg)
2967 2960 return ui.popbuffer()
2968 2961
2969 2962 def hgprefix(msg):
2970 2963 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2971 2964
2972 2965 def buildcommittext(repo, ctx, subs, extramsg):
2973 2966 edittext = []
2974 2967 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2975 2968 if ctx.description():
2976 2969 edittext.append(ctx.description())
2977 2970 edittext.append("")
2978 2971 edittext.append("") # Empty line between message and comments.
2979 2972 edittext.append(hgprefix(_("Enter commit message."
2980 2973 " Lines beginning with 'HG:' are removed.")))
2981 2974 edittext.append(hgprefix(extramsg))
2982 2975 edittext.append("HG: --")
2983 2976 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2984 2977 if ctx.p2():
2985 2978 edittext.append(hgprefix(_("branch merge")))
2986 2979 if ctx.branch():
2987 2980 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2988 2981 if bookmarks.isactivewdirparent(repo):
2989 2982 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2990 2983 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2991 2984 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2992 2985 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2993 2986 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2994 2987 if not added and not modified and not removed:
2995 2988 edittext.append(hgprefix(_("no files changed")))
2996 2989 edittext.append("")
2997 2990
2998 2991 return "\n".join(edittext)
2999 2992
3000 2993 def commitstatus(repo, node, branch, bheads=None, opts=None):
3001 2994 if opts is None:
3002 2995 opts = {}
3003 2996 ctx = repo[node]
3004 2997 parents = ctx.parents()
3005 2998
3006 2999 if (not opts.get('amend') and bheads and node not in bheads and not
3007 3000 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3008 3001 repo.ui.status(_('created new head\n'))
3009 3002 # The message is not printed for initial roots. For the other
3010 3003 # changesets, it is printed in the following situations:
3011 3004 #
3012 3005 # Par column: for the 2 parents with ...
3013 3006 # N: null or no parent
3014 3007 # B: parent is on another named branch
3015 3008 # C: parent is a regular non head changeset
3016 3009 # H: parent was a branch head of the current branch
3017 3010 # Msg column: whether we print "created new head" message
3018 3011 # In the following, it is assumed that there already exists some
3019 3012 # initial branch heads of the current branch, otherwise nothing is
3020 3013 # printed anyway.
3021 3014 #
3022 3015 # Par Msg Comment
3023 3016 # N N y additional topo root
3024 3017 #
3025 3018 # B N y additional branch root
3026 3019 # C N y additional topo head
3027 3020 # H N n usual case
3028 3021 #
3029 3022 # B B y weird additional branch root
3030 3023 # C B y branch merge
3031 3024 # H B n merge with named branch
3032 3025 #
3033 3026 # C C y additional head from merge
3034 3027 # C H n merge with a head
3035 3028 #
3036 3029 # H H n head merge: head count decreases
3037 3030
3038 3031 if not opts.get('close_branch'):
3039 3032 for r in parents:
3040 3033 if r.closesbranch() and r.branch() == branch:
3041 3034 repo.ui.status(_('reopening closed branch head %d\n') % r)
3042 3035
3043 3036 if repo.ui.debugflag:
3044 3037 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3045 3038 elif repo.ui.verbose:
3046 3039 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3047 3040
3048 3041 def postcommitstatus(repo, pats, opts):
3049 3042 return repo.status(match=scmutil.match(repo[None], pats, opts))
3050 3043
3051 3044 def revert(ui, repo, ctx, parents, *pats, **opts):
3052 3045 parent, p2 = parents
3053 3046 node = ctx.node()
3054 3047
3055 3048 mf = ctx.manifest()
3056 3049 if node == p2:
3057 3050 parent = p2
3058 3051
3059 3052 # need all matching names in dirstate and manifest of target rev,
3060 3053 # so have to walk both. do not print errors if files exist in one
3061 3054 # but not other. in both cases, filesets should be evaluated against
3062 3055 # workingctx to get consistent result (issue4497). this means 'set:**'
3063 3056 # cannot be used to select missing files from target rev.
3064 3057
3065 3058 # `names` is a mapping for all elements in working copy and target revision
3066 3059 # The mapping is in the form:
3067 3060 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3068 3061 names = {}
3069 3062
3070 3063 with repo.wlock():
3071 3064 ## filling of the `names` mapping
3072 3065 # walk dirstate to fill `names`
3073 3066
3074 3067 interactive = opts.get('interactive', False)
3075 3068 wctx = repo[None]
3076 3069 m = scmutil.match(wctx, pats, opts)
3077 3070
3078 3071 # we'll need this later
3079 3072 targetsubs = sorted(s for s in wctx.substate if m(s))
3080 3073
3081 3074 if not m.always():
3082 3075 matcher = matchmod.badmatch(m, lambda x, y: False)
3083 3076 for abs in wctx.walk(matcher):
3084 3077 names[abs] = m.rel(abs), m.exact(abs)
3085 3078
3086 3079 # walk target manifest to fill `names`
3087 3080
3088 3081 def badfn(path, msg):
3089 3082 if path in names:
3090 3083 return
3091 3084 if path in ctx.substate:
3092 3085 return
3093 3086 path_ = path + '/'
3094 3087 for f in names:
3095 3088 if f.startswith(path_):
3096 3089 return
3097 3090 ui.warn("%s: %s\n" % (m.rel(path), msg))
3098 3091
3099 3092 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3100 3093 if abs not in names:
3101 3094 names[abs] = m.rel(abs), m.exact(abs)
3102 3095
3103 3096 # Find status of all file in `names`.
3104 3097 m = scmutil.matchfiles(repo, names)
3105 3098
3106 3099 changes = repo.status(node1=node, match=m,
3107 3100 unknown=True, ignored=True, clean=True)
3108 3101 else:
3109 3102 changes = repo.status(node1=node, match=m)
3110 3103 for kind in changes:
3111 3104 for abs in kind:
3112 3105 names[abs] = m.rel(abs), m.exact(abs)
3113 3106
3114 3107 m = scmutil.matchfiles(repo, names)
3115 3108
3116 3109 modified = set(changes.modified)
3117 3110 added = set(changes.added)
3118 3111 removed = set(changes.removed)
3119 3112 _deleted = set(changes.deleted)
3120 3113 unknown = set(changes.unknown)
3121 3114 unknown.update(changes.ignored)
3122 3115 clean = set(changes.clean)
3123 3116 modadded = set()
3124 3117
3125 3118 # We need to account for the state of the file in the dirstate,
3126 3119 # even when we revert against something else than parent. This will
3127 3120 # slightly alter the behavior of revert (doing back up or not, delete
3128 3121 # or just forget etc).
3129 3122 if parent == node:
3130 3123 dsmodified = modified
3131 3124 dsadded = added
3132 3125 dsremoved = removed
3133 3126 # store all local modifications, useful later for rename detection
3134 3127 localchanges = dsmodified | dsadded
3135 3128 modified, added, removed = set(), set(), set()
3136 3129 else:
3137 3130 changes = repo.status(node1=parent, match=m)
3138 3131 dsmodified = set(changes.modified)
3139 3132 dsadded = set(changes.added)
3140 3133 dsremoved = set(changes.removed)
3141 3134 # store all local modifications, useful later for rename detection
3142 3135 localchanges = dsmodified | dsadded
3143 3136
3144 3137 # only take into account for removes between wc and target
3145 3138 clean |= dsremoved - removed
3146 3139 dsremoved &= removed
3147 3140 # distinct between dirstate remove and other
3148 3141 removed -= dsremoved
3149 3142
3150 3143 modadded = added & dsmodified
3151 3144 added -= modadded
3152 3145
3153 3146 # tell newly modified apart.
3154 3147 dsmodified &= modified
3155 3148 dsmodified |= modified & dsadded # dirstate added may need backup
3156 3149 modified -= dsmodified
3157 3150
3158 3151 # We need to wait for some post-processing to update this set
3159 3152 # before making the distinction. The dirstate will be used for
3160 3153 # that purpose.
3161 3154 dsadded = added
3162 3155
3163 3156 # in case of merge, files that are actually added can be reported as
3164 3157 # modified, we need to post process the result
3165 3158 if p2 != nullid:
3166 3159 mergeadd = set(dsmodified)
3167 3160 for path in dsmodified:
3168 3161 if path in mf:
3169 3162 mergeadd.remove(path)
3170 3163 dsadded |= mergeadd
3171 3164 dsmodified -= mergeadd
3172 3165
3173 3166 # if f is a rename, update `names` to also revert the source
3174 3167 cwd = repo.getcwd()
3175 3168 for f in localchanges:
3176 3169 src = repo.dirstate.copied(f)
3177 3170 # XXX should we check for rename down to target node?
3178 3171 if src and src not in names and repo.dirstate[src] == 'r':
3179 3172 dsremoved.add(src)
3180 3173 names[src] = (repo.pathto(src, cwd), True)
3181 3174
3182 3175 # determine the exact nature of the deleted changesets
3183 3176 deladded = set(_deleted)
3184 3177 for path in _deleted:
3185 3178 if path in mf:
3186 3179 deladded.remove(path)
3187 3180 deleted = _deleted - deladded
3188 3181
3189 3182 # distinguish between file to forget and the other
3190 3183 added = set()
3191 3184 for abs in dsadded:
3192 3185 if repo.dirstate[abs] != 'a':
3193 3186 added.add(abs)
3194 3187 dsadded -= added
3195 3188
3196 3189 for abs in deladded:
3197 3190 if repo.dirstate[abs] == 'a':
3198 3191 dsadded.add(abs)
3199 3192 deladded -= dsadded
3200 3193
3201 3194 # For files marked as removed, we check if an unknown file is present at
3202 3195 # the same path. If a such file exists it may need to be backed up.
3203 3196 # Making the distinction at this stage helps have simpler backup
3204 3197 # logic.
3205 3198 removunk = set()
3206 3199 for abs in removed:
3207 3200 target = repo.wjoin(abs)
3208 3201 if os.path.lexists(target):
3209 3202 removunk.add(abs)
3210 3203 removed -= removunk
3211 3204
3212 3205 dsremovunk = set()
3213 3206 for abs in dsremoved:
3214 3207 target = repo.wjoin(abs)
3215 3208 if os.path.lexists(target):
3216 3209 dsremovunk.add(abs)
3217 3210 dsremoved -= dsremovunk
3218 3211
3219 3212 # action to be actually performed by revert
3220 3213 # (<list of file>, message>) tuple
3221 3214 actions = {'revert': ([], _('reverting %s\n')),
3222 3215 'add': ([], _('adding %s\n')),
3223 3216 'remove': ([], _('removing %s\n')),
3224 3217 'drop': ([], _('removing %s\n')),
3225 3218 'forget': ([], _('forgetting %s\n')),
3226 3219 'undelete': ([], _('undeleting %s\n')),
3227 3220 'noop': (None, _('no changes needed to %s\n')),
3228 3221 'unknown': (None, _('file not managed: %s\n')),
3229 3222 }
3230 3223
3231 3224 # "constant" that convey the backup strategy.
3232 3225 # All set to `discard` if `no-backup` is set do avoid checking
3233 3226 # no_backup lower in the code.
3234 3227 # These values are ordered for comparison purposes
3235 3228 backupinteractive = 3 # do backup if interactively modified
3236 3229 backup = 2 # unconditionally do backup
3237 3230 check = 1 # check if the existing file differs from target
3238 3231 discard = 0 # never do backup
3239 3232 if opts.get('no_backup'):
3240 3233 backupinteractive = backup = check = discard
3241 3234 if interactive:
3242 3235 dsmodifiedbackup = backupinteractive
3243 3236 else:
3244 3237 dsmodifiedbackup = backup
3245 3238 tobackup = set()
3246 3239
3247 3240 backupanddel = actions['remove']
3248 3241 if not opts.get('no_backup'):
3249 3242 backupanddel = actions['drop']
3250 3243
3251 3244 disptable = (
3252 3245 # dispatch table:
3253 3246 # file state
3254 3247 # action
3255 3248 # make backup
3256 3249
3257 3250 ## Sets that results that will change file on disk
3258 3251 # Modified compared to target, no local change
3259 3252 (modified, actions['revert'], discard),
3260 3253 # Modified compared to target, but local file is deleted
3261 3254 (deleted, actions['revert'], discard),
3262 3255 # Modified compared to target, local change
3263 3256 (dsmodified, actions['revert'], dsmodifiedbackup),
3264 3257 # Added since target
3265 3258 (added, actions['remove'], discard),
3266 3259 # Added in working directory
3267 3260 (dsadded, actions['forget'], discard),
3268 3261 # Added since target, have local modification
3269 3262 (modadded, backupanddel, backup),
3270 3263 # Added since target but file is missing in working directory
3271 3264 (deladded, actions['drop'], discard),
3272 3265 # Removed since target, before working copy parent
3273 3266 (removed, actions['add'], discard),
3274 3267 # Same as `removed` but an unknown file exists at the same path
3275 3268 (removunk, actions['add'], check),
3276 3269 # Removed since targe, marked as such in working copy parent
3277 3270 (dsremoved, actions['undelete'], discard),
3278 3271 # Same as `dsremoved` but an unknown file exists at the same path
3279 3272 (dsremovunk, actions['undelete'], check),
3280 3273 ## the following sets does not result in any file changes
3281 3274 # File with no modification
3282 3275 (clean, actions['noop'], discard),
3283 3276 # Existing file, not tracked anywhere
3284 3277 (unknown, actions['unknown'], discard),
3285 3278 )
3286 3279
3287 3280 for abs, (rel, exact) in sorted(names.items()):
3288 3281 # target file to be touch on disk (relative to cwd)
3289 3282 target = repo.wjoin(abs)
3290 3283 # search the entry in the dispatch table.
3291 3284 # if the file is in any of these sets, it was touched in the working
3292 3285 # directory parent and we are sure it needs to be reverted.
3293 3286 for table, (xlist, msg), dobackup in disptable:
3294 3287 if abs not in table:
3295 3288 continue
3296 3289 if xlist is not None:
3297 3290 xlist.append(abs)
3298 3291 if dobackup:
3299 3292 # If in interactive mode, don't automatically create
3300 3293 # .orig files (issue4793)
3301 3294 if dobackup == backupinteractive:
3302 3295 tobackup.add(abs)
3303 3296 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3304 3297 bakname = scmutil.origpath(ui, repo, rel)
3305 3298 ui.note(_('saving current version of %s as %s\n') %
3306 3299 (rel, bakname))
3307 3300 if not opts.get('dry_run'):
3308 3301 if interactive:
3309 3302 util.copyfile(target, bakname)
3310 3303 else:
3311 3304 util.rename(target, bakname)
3312 3305 if ui.verbose or not exact:
3313 3306 if not isinstance(msg, basestring):
3314 3307 msg = msg(abs)
3315 3308 ui.status(msg % rel)
3316 3309 elif exact:
3317 3310 ui.warn(msg % rel)
3318 3311 break
3319 3312
3320 3313 if not opts.get('dry_run'):
3321 3314 needdata = ('revert', 'add', 'undelete')
3322 3315 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3323 3316 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3324 3317
3325 3318 if targetsubs:
3326 3319 # Revert the subrepos on the revert list
3327 3320 for sub in targetsubs:
3328 3321 try:
3329 3322 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3330 3323 except KeyError:
3331 3324 raise error.Abort("subrepository '%s' does not exist in %s!"
3332 3325 % (sub, short(ctx.node())))
3333 3326
3334 3327 def _revertprefetch(repo, ctx, *files):
3335 3328 """Let extension changing the storage layer prefetch content"""
3336 3329 pass
3337 3330
3338 3331 def _performrevert(repo, parents, ctx, actions, interactive=False,
3339 3332 tobackup=None):
3340 3333 """function that actually perform all the actions computed for revert
3341 3334
3342 3335 This is an independent function to let extension to plug in and react to
3343 3336 the imminent revert.
3344 3337
3345 3338 Make sure you have the working directory locked when calling this function.
3346 3339 """
3347 3340 parent, p2 = parents
3348 3341 node = ctx.node()
3349 3342 excluded_files = []
3350 3343 matcher_opts = {"exclude": excluded_files}
3351 3344
3352 3345 def checkout(f):
3353 3346 fc = ctx[f]
3354 3347 repo.wwrite(f, fc.data(), fc.flags())
3355 3348
3356 3349 def doremove(f):
3357 3350 try:
3358 3351 repo.wvfs.unlinkpath(f)
3359 3352 except OSError:
3360 3353 pass
3361 3354 repo.dirstate.remove(f)
3362 3355
3363 3356 audit_path = pathutil.pathauditor(repo.root)
3364 3357 for f in actions['forget'][0]:
3365 3358 if interactive:
3366 3359 choice = repo.ui.promptchoice(
3367 3360 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3368 3361 if choice == 0:
3369 3362 repo.dirstate.drop(f)
3370 3363 else:
3371 3364 excluded_files.append(repo.wjoin(f))
3372 3365 else:
3373 3366 repo.dirstate.drop(f)
3374 3367 for f in actions['remove'][0]:
3375 3368 audit_path(f)
3376 3369 if interactive:
3377 3370 choice = repo.ui.promptchoice(
3378 3371 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3379 3372 if choice == 0:
3380 3373 doremove(f)
3381 3374 else:
3382 3375 excluded_files.append(repo.wjoin(f))
3383 3376 else:
3384 3377 doremove(f)
3385 3378 for f in actions['drop'][0]:
3386 3379 audit_path(f)
3387 3380 repo.dirstate.remove(f)
3388 3381
3389 3382 normal = None
3390 3383 if node == parent:
3391 3384 # We're reverting to our parent. If possible, we'd like status
3392 3385 # to report the file as clean. We have to use normallookup for
3393 3386 # merges to avoid losing information about merged/dirty files.
3394 3387 if p2 != nullid:
3395 3388 normal = repo.dirstate.normallookup
3396 3389 else:
3397 3390 normal = repo.dirstate.normal
3398 3391
3399 3392 newlyaddedandmodifiedfiles = set()
3400 3393 if interactive:
3401 3394 # Prompt the user for changes to revert
3402 3395 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3403 3396 m = scmutil.match(ctx, torevert, matcher_opts)
3404 3397 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3405 3398 diffopts.nodates = True
3406 3399 diffopts.git = True
3407 3400 operation = 'discard'
3408 3401 reversehunks = True
3409 3402 if node != parent:
3410 3403 operation = 'revert'
3411 3404 reversehunks = repo.ui.configbool('experimental',
3412 3405 'revertalternateinteractivemode',
3413 3406 True)
3414 3407 if reversehunks:
3415 3408 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3416 3409 else:
3417 3410 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3418 3411 originalchunks = patch.parsepatch(diff)
3419 3412
3420 3413 try:
3421 3414
3422 3415 chunks, opts = recordfilter(repo.ui, originalchunks,
3423 3416 operation=operation)
3424 3417 if reversehunks:
3425 3418 chunks = patch.reversehunks(chunks)
3426 3419
3427 3420 except patch.PatchError as err:
3428 3421 raise error.Abort(_('error parsing patch: %s') % err)
3429 3422
3430 3423 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3431 3424 if tobackup is None:
3432 3425 tobackup = set()
3433 3426 # Apply changes
3434 3427 fp = stringio()
3435 3428 for c in chunks:
3436 3429 # Create a backup file only if this hunk should be backed up
3437 3430 if ishunk(c) and c.header.filename() in tobackup:
3438 3431 abs = c.header.filename()
3439 3432 target = repo.wjoin(abs)
3440 3433 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3441 3434 util.copyfile(target, bakname)
3442 3435 tobackup.remove(abs)
3443 3436 c.write(fp)
3444 3437 dopatch = fp.tell()
3445 3438 fp.seek(0)
3446 3439 if dopatch:
3447 3440 try:
3448 3441 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3449 3442 except patch.PatchError as err:
3450 3443 raise error.Abort(str(err))
3451 3444 del fp
3452 3445 else:
3453 3446 for f in actions['revert'][0]:
3454 3447 checkout(f)
3455 3448 if normal:
3456 3449 normal(f)
3457 3450
3458 3451 for f in actions['add'][0]:
3459 3452 # Don't checkout modified files, they are already created by the diff
3460 3453 if f not in newlyaddedandmodifiedfiles:
3461 3454 checkout(f)
3462 3455 repo.dirstate.add(f)
3463 3456
3464 3457 normal = repo.dirstate.normallookup
3465 3458 if node == parent and p2 == nullid:
3466 3459 normal = repo.dirstate.normal
3467 3460 for f in actions['undelete'][0]:
3468 3461 checkout(f)
3469 3462 normal(f)
3470 3463
3471 3464 copied = copies.pathcopies(repo[parent], ctx)
3472 3465
3473 3466 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3474 3467 if f in copied:
3475 3468 repo.dirstate.copy(copied[f], f)
3476 3469
3477 3470 class command(registrar.command):
3478 3471 def _doregister(self, func, name, *args, **kwargs):
3479 3472 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3480 3473 return super(command, self)._doregister(func, name, *args, **kwargs)
3481 3474
3482 3475 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3483 3476 # commands.outgoing. "missing" is "missing" of the result of
3484 3477 # "findcommonoutgoing()"
3485 3478 outgoinghooks = util.hooks()
3486 3479
3487 3480 # a list of (ui, repo) functions called by commands.summary
3488 3481 summaryhooks = util.hooks()
3489 3482
3490 3483 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3491 3484 #
3492 3485 # functions should return tuple of booleans below, if 'changes' is None:
3493 3486 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3494 3487 #
3495 3488 # otherwise, 'changes' is a tuple of tuples below:
3496 3489 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3497 3490 # - (desturl, destbranch, destpeer, outgoing)
3498 3491 summaryremotehooks = util.hooks()
3499 3492
3500 3493 # A list of state files kept by multistep operations like graft.
3501 3494 # Since graft cannot be aborted, it is considered 'clearable' by update.
3502 3495 # note: bisect is intentionally excluded
3503 3496 # (state file, clearable, allowcommit, error, hint)
3504 3497 unfinishedstates = [
3505 3498 ('graftstate', True, False, _('graft in progress'),
3506 3499 _("use 'hg graft --continue' or 'hg update' to abort")),
3507 3500 ('updatestate', True, False, _('last update was interrupted'),
3508 3501 _("use 'hg update' to get a consistent checkout"))
3509 3502 ]
3510 3503
3511 3504 def checkunfinished(repo, commit=False):
3512 3505 '''Look for an unfinished multistep operation, like graft, and abort
3513 3506 if found. It's probably good to check this right before
3514 3507 bailifchanged().
3515 3508 '''
3516 3509 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3517 3510 if commit and allowcommit:
3518 3511 continue
3519 3512 if repo.vfs.exists(f):
3520 3513 raise error.Abort(msg, hint=hint)
3521 3514
3522 3515 def clearunfinished(repo):
3523 3516 '''Check for unfinished operations (as above), and clear the ones
3524 3517 that are clearable.
3525 3518 '''
3526 3519 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3527 3520 if not clearable and repo.vfs.exists(f):
3528 3521 raise error.Abort(msg, hint=hint)
3529 3522 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3530 3523 if clearable and repo.vfs.exists(f):
3531 3524 util.unlink(repo.vfs.join(f))
3532 3525
3533 3526 afterresolvedstates = [
3534 3527 ('graftstate',
3535 3528 _('hg graft --continue')),
3536 3529 ]
3537 3530
3538 3531 def howtocontinue(repo):
3539 3532 '''Check for an unfinished operation and return the command to finish
3540 3533 it.
3541 3534
3542 3535 afterresolvedstates tuples define a .hg/{file} and the corresponding
3543 3536 command needed to finish it.
3544 3537
3545 3538 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3546 3539 a boolean.
3547 3540 '''
3548 3541 contmsg = _("continue: %s")
3549 3542 for f, msg in afterresolvedstates:
3550 3543 if repo.vfs.exists(f):
3551 3544 return contmsg % msg, True
3552 3545 workingctx = repo[None]
3553 3546 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3554 3547 for s in workingctx.substate)
3555 3548 if dirty:
3556 3549 return contmsg % _("hg commit"), False
3557 3550 return None, None
3558 3551
3559 3552 def checkafterresolved(repo):
3560 3553 '''Inform the user about the next action after completing hg resolve
3561 3554
3562 3555 If there's a matching afterresolvedstates, howtocontinue will yield
3563 3556 repo.ui.warn as the reporter.
3564 3557
3565 3558 Otherwise, it will yield repo.ui.note.
3566 3559 '''
3567 3560 msg, warning = howtocontinue(repo)
3568 3561 if msg is not None:
3569 3562 if warning:
3570 3563 repo.ui.warn("%s\n" % msg)
3571 3564 else:
3572 3565 repo.ui.note("%s\n" % msg)
3573 3566
3574 3567 def wrongtooltocontinue(repo, task):
3575 3568 '''Raise an abort suggesting how to properly continue if there is an
3576 3569 active task.
3577 3570
3578 3571 Uses howtocontinue() to find the active task.
3579 3572
3580 3573 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3581 3574 a hint.
3582 3575 '''
3583 3576 after = howtocontinue(repo)
3584 3577 hint = None
3585 3578 if after[1]:
3586 3579 hint = after[0]
3587 3580 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,466 +1,472 b''
1 1 # formatter.py - generic output formatting for mercurial
2 2 #
3 3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """Generic output formatting for Mercurial
9 9
10 10 The formatter provides API to show data in various ways. The following
11 11 functions should be used in place of ui.write():
12 12
13 13 - fm.write() for unconditional output
14 14 - fm.condwrite() to show some extra data conditionally in plain output
15 15 - fm.context() to provide changectx to template output
16 16 - fm.data() to provide extra data to JSON or template output
17 17 - fm.plain() to show raw text that isn't provided to JSON or template output
18 18
19 19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
20 20 beforehand so the data is converted to the appropriate data type. Use
21 21 fm.isplain() if you need to convert or format data conditionally which isn't
22 22 supported by the formatter API.
23 23
24 24 To build nested structure (i.e. a list of dicts), use fm.nested().
25 25
26 26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
27 27
28 28 fm.condwrite() vs 'if cond:':
29 29
30 30 In most cases, use fm.condwrite() so users can selectively show the data
31 31 in template output. If it's costly to build data, use plain 'if cond:' with
32 32 fm.write().
33 33
34 34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
35 35
36 36 fm.nested() should be used to form a tree structure (a list of dicts of
37 37 lists of dicts...) which can be accessed through template keywords, e.g.
38 38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
39 39 exports a dict-type object to template, which can be accessed by e.g.
40 40 "{get(foo, key)}" function.
41 41
42 42 Doctest helper:
43 43
44 44 >>> def show(fn, verbose=False, **opts):
45 45 ... import sys
46 46 ... from . import ui as uimod
47 47 ... ui = uimod.ui()
48 48 ... ui.fout = sys.stdout # redirect to doctest
49 49 ... ui.verbose = verbose
50 50 ... return fn(ui, ui.formatter(fn.__name__, opts))
51 51
52 52 Basic example:
53 53
54 54 >>> def files(ui, fm):
55 55 ... files = [('foo', 123, (0, 0)), ('bar', 456, (1, 0))]
56 56 ... for f in files:
57 57 ... fm.startitem()
58 58 ... fm.write('path', '%s', f[0])
59 59 ... fm.condwrite(ui.verbose, 'date', ' %s',
60 60 ... fm.formatdate(f[2], '%Y-%m-%d %H:%M:%S'))
61 61 ... fm.data(size=f[1])
62 62 ... fm.plain('\\n')
63 63 ... fm.end()
64 64 >>> show(files)
65 65 foo
66 66 bar
67 67 >>> show(files, verbose=True)
68 68 foo 1970-01-01 00:00:00
69 69 bar 1970-01-01 00:00:01
70 70 >>> show(files, template='json')
71 71 [
72 72 {
73 73 "date": [0, 0],
74 74 "path": "foo",
75 75 "size": 123
76 76 },
77 77 {
78 78 "date": [1, 0],
79 79 "path": "bar",
80 80 "size": 456
81 81 }
82 82 ]
83 83 >>> show(files, template='path: {path}\\ndate: {date|rfc3339date}\\n')
84 84 path: foo
85 85 date: 1970-01-01T00:00:00+00:00
86 86 path: bar
87 87 date: 1970-01-01T00:00:01+00:00
88 88
89 89 Nested example:
90 90
91 91 >>> def subrepos(ui, fm):
92 92 ... fm.startitem()
93 93 ... fm.write('repo', '[%s]\\n', 'baz')
94 94 ... files(ui, fm.nested('files'))
95 95 ... fm.end()
96 96 >>> show(subrepos)
97 97 [baz]
98 98 foo
99 99 bar
100 100 >>> show(subrepos, template='{repo}: {join(files % "{path}", ", ")}\\n')
101 101 baz: foo, bar
102 102 """
103 103
104 104 from __future__ import absolute_import
105 105
106 106 import contextlib
107 107 import itertools
108 108 import os
109 109
110 110 from .i18n import _
111 111 from .node import (
112 112 hex,
113 113 short,
114 114 )
115 115
116 116 from . import (
117 117 error,
118 118 pycompat,
119 119 templatefilters,
120 120 templatekw,
121 121 templater,
122 122 util,
123 123 )
124 124
125 125 pickle = util.pickle
126 126
127 127 class _nullconverter(object):
128 128 '''convert non-primitive data types to be processed by formatter'''
129 129 @staticmethod
130 130 def formatdate(date, fmt):
131 131 '''convert date tuple to appropriate format'''
132 132 return date
133 133 @staticmethod
134 134 def formatdict(data, key, value, fmt, sep):
135 135 '''convert dict or key-value pairs to appropriate dict format'''
136 136 # use plain dict instead of util.sortdict so that data can be
137 137 # serialized as a builtin dict in pickle output
138 138 return dict(data)
139 139 @staticmethod
140 140 def formatlist(data, name, fmt, sep):
141 141 '''convert iterable to appropriate list format'''
142 142 return list(data)
143 143
144 144 class baseformatter(object):
145 145 def __init__(self, ui, topic, opts, converter):
146 146 self._ui = ui
147 147 self._topic = topic
148 148 self._style = opts.get("style")
149 149 self._template = opts.get("template")
150 150 self._converter = converter
151 151 self._item = None
152 152 # function to convert node to string suitable for this output
153 153 self.hexfunc = hex
154 154 def __enter__(self):
155 155 return self
156 156 def __exit__(self, exctype, excvalue, traceback):
157 157 if exctype is None:
158 158 self.end()
159 159 def _showitem(self):
160 160 '''show a formatted item once all data is collected'''
161 161 pass
162 162 def startitem(self):
163 163 '''begin an item in the format list'''
164 164 if self._item is not None:
165 165 self._showitem()
166 166 self._item = {}
167 167 def formatdate(self, date, fmt='%a %b %d %H:%M:%S %Y %1%2'):
168 168 '''convert date tuple to appropriate format'''
169 169 return self._converter.formatdate(date, fmt)
170 170 def formatdict(self, data, key='key', value='value', fmt='%s=%s', sep=' '):
171 171 '''convert dict or key-value pairs to appropriate dict format'''
172 172 return self._converter.formatdict(data, key, value, fmt, sep)
173 173 def formatlist(self, data, name, fmt='%s', sep=' '):
174 174 '''convert iterable to appropriate list format'''
175 175 # name is mandatory argument for now, but it could be optional if
176 176 # we have default template keyword, e.g. {item}
177 177 return self._converter.formatlist(data, name, fmt, sep)
178 178 def context(self, **ctxs):
179 179 '''insert context objects to be used to render template keywords'''
180 180 pass
181 181 def data(self, **data):
182 182 '''insert data into item that's not shown in default output'''
183 183 data = pycompat.byteskwargs(data)
184 184 self._item.update(data)
185 185 def write(self, fields, deftext, *fielddata, **opts):
186 186 '''do default text output while assigning data to item'''
187 187 fieldkeys = fields.split()
188 188 assert len(fieldkeys) == len(fielddata)
189 189 self._item.update(zip(fieldkeys, fielddata))
190 190 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
191 191 '''do conditional write (primarily for plain formatter)'''
192 192 fieldkeys = fields.split()
193 193 assert len(fieldkeys) == len(fielddata)
194 194 self._item.update(zip(fieldkeys, fielddata))
195 195 def plain(self, text, **opts):
196 196 '''show raw text for non-templated mode'''
197 197 pass
198 198 def isplain(self):
199 199 '''check for plain formatter usage'''
200 200 return False
201 201 def nested(self, field):
202 202 '''sub formatter to store nested data in the specified field'''
203 203 self._item[field] = data = []
204 204 return _nestedformatter(self._ui, self._converter, data)
205 205 def end(self):
206 206 '''end output for the formatter'''
207 207 if self._item is not None:
208 208 self._showitem()
209 209
210 210 def nullformatter(ui, topic):
211 211 '''formatter that prints nothing'''
212 212 return baseformatter(ui, topic, opts={}, converter=_nullconverter)
213 213
214 214 class _nestedformatter(baseformatter):
215 215 '''build sub items and store them in the parent formatter'''
216 216 def __init__(self, ui, converter, data):
217 217 baseformatter.__init__(self, ui, topic='', opts={}, converter=converter)
218 218 self._data = data
219 219 def _showitem(self):
220 220 self._data.append(self._item)
221 221
222 222 def _iteritems(data):
223 223 '''iterate key-value pairs in stable order'''
224 224 if isinstance(data, dict):
225 225 return sorted(data.iteritems())
226 226 return data
227 227
228 228 class _plainconverter(object):
229 229 '''convert non-primitive data types to text'''
230 230 @staticmethod
231 231 def formatdate(date, fmt):
232 232 '''stringify date tuple in the given format'''
233 233 return util.datestr(date, fmt)
234 234 @staticmethod
235 235 def formatdict(data, key, value, fmt, sep):
236 236 '''stringify key-value pairs separated by sep'''
237 237 return sep.join(fmt % (k, v) for k, v in _iteritems(data))
238 238 @staticmethod
239 239 def formatlist(data, name, fmt, sep):
240 240 '''stringify iterable separated by sep'''
241 241 return sep.join(fmt % e for e in data)
242 242
243 243 class plainformatter(baseformatter):
244 244 '''the default text output scheme'''
245 245 def __init__(self, ui, out, topic, opts):
246 246 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
247 247 if ui.debugflag:
248 248 self.hexfunc = hex
249 249 else:
250 250 self.hexfunc = short
251 251 if ui is out:
252 252 self._write = ui.write
253 253 else:
254 254 self._write = lambda s, **opts: out.write(s)
255 255 def startitem(self):
256 256 pass
257 257 def data(self, **data):
258 258 pass
259 259 def write(self, fields, deftext, *fielddata, **opts):
260 260 self._write(deftext % fielddata, **opts)
261 261 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
262 262 '''do conditional write'''
263 263 if cond:
264 264 self._write(deftext % fielddata, **opts)
265 265 def plain(self, text, **opts):
266 266 self._write(text, **opts)
267 267 def isplain(self):
268 268 return True
269 269 def nested(self, field):
270 270 # nested data will be directly written to ui
271 271 return self
272 272 def end(self):
273 273 pass
274 274
275 275 class debugformatter(baseformatter):
276 276 def __init__(self, ui, out, topic, opts):
277 277 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
278 278 self._out = out
279 279 self._out.write("%s = [\n" % self._topic)
280 280 def _showitem(self):
281 281 self._out.write(" " + repr(self._item) + ",\n")
282 282 def end(self):
283 283 baseformatter.end(self)
284 284 self._out.write("]\n")
285 285
286 286 class pickleformatter(baseformatter):
287 287 def __init__(self, ui, out, topic, opts):
288 288 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
289 289 self._out = out
290 290 self._data = []
291 291 def _showitem(self):
292 292 self._data.append(self._item)
293 293 def end(self):
294 294 baseformatter.end(self)
295 295 self._out.write(pickle.dumps(self._data))
296 296
297 297 class jsonformatter(baseformatter):
298 298 def __init__(self, ui, out, topic, opts):
299 299 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
300 300 self._out = out
301 301 self._out.write("[")
302 302 self._first = True
303 303 def _showitem(self):
304 304 if self._first:
305 305 self._first = False
306 306 else:
307 307 self._out.write(",")
308 308
309 309 self._out.write("\n {\n")
310 310 first = True
311 311 for k, v in sorted(self._item.items()):
312 312 if first:
313 313 first = False
314 314 else:
315 315 self._out.write(",\n")
316 316 u = templatefilters.json(v, paranoid=False)
317 317 self._out.write(' "%s": %s' % (k, u))
318 318 self._out.write("\n }")
319 319 def end(self):
320 320 baseformatter.end(self)
321 321 self._out.write("\n]\n")
322 322
323 323 class _templateconverter(object):
324 324 '''convert non-primitive data types to be processed by templater'''
325 325 @staticmethod
326 326 def formatdate(date, fmt):
327 327 '''return date tuple'''
328 328 return date
329 329 @staticmethod
330 330 def formatdict(data, key, value, fmt, sep):
331 331 '''build object that can be evaluated as either plain string or dict'''
332 332 data = util.sortdict(_iteritems(data))
333 333 def f():
334 334 yield _plainconverter.formatdict(data, key, value, fmt, sep)
335 335 return templatekw.hybriddict(data, key=key, value=value, fmt=fmt,
336 336 gen=f())
337 337 @staticmethod
338 338 def formatlist(data, name, fmt, sep):
339 339 '''build object that can be evaluated as either plain string or list'''
340 340 data = list(data)
341 341 def f():
342 342 yield _plainconverter.formatlist(data, name, fmt, sep)
343 343 return templatekw.hybridlist(data, name=name, fmt=fmt, gen=f())
344 344
345 345 class templateformatter(baseformatter):
346 346 def __init__(self, ui, out, topic, opts):
347 347 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
348 348 self._out = out
349 349 self._topic = topic
350 350 self._t = gettemplater(ui, topic, opts.get('template', ''),
351 351 cache=templatekw.defaulttempl)
352 352 self._counter = itertools.count()
353 353 self._cache = {} # for templatekw/funcs to store reusable data
354 354 def context(self, **ctxs):
355 355 '''insert context objects to be used to render template keywords'''
356 356 assert all(k == 'ctx' for k in ctxs)
357 357 self._item.update(ctxs)
358 358 def _showitem(self):
359 359 # TODO: add support for filectx. probably each template keyword or
360 360 # function will have to declare dependent resources. e.g.
361 361 # @templatekeyword(..., requires=('ctx',))
362 362 props = {}
363 363 if 'ctx' in self._item:
364 364 props.update(templatekw.keywords)
365 365 props['index'] = next(self._counter)
366 366 # explicitly-defined fields precede templatekw
367 367 props.update(self._item)
368 368 if 'ctx' in self._item:
369 369 # but template resources must be always available
370 370 props['templ'] = self._t
371 371 props['repo'] = props['ctx'].repo()
372 372 props['revcache'] = {}
373 373 g = self._t(self._topic, ui=self._ui, cache=self._cache, **props)
374 374 self._out.write(templater.stringify(g))
375 375
376 376 def lookuptemplate(ui, topic, tmpl):
377 377 # looks like a literal template?
378 378 if '{' in tmpl:
379 379 return tmpl, None
380 380
381 381 # perhaps a stock style?
382 382 if not os.path.split(tmpl)[0]:
383 383 mapname = (templater.templatepath('map-cmdline.' + tmpl)
384 384 or templater.templatepath(tmpl))
385 385 if mapname and os.path.isfile(mapname):
386 386 return None, mapname
387 387
388 388 # perhaps it's a reference to [templates]
389 389 t = ui.config('templates', tmpl)
390 390 if t:
391 391 return templater.unquotestring(t), None
392 392
393 393 if tmpl == 'list':
394 394 ui.write(_("available styles: %s\n") % templater.stylelist())
395 395 raise error.Abort(_("specify a template"))
396 396
397 397 # perhaps it's a path to a map or a template
398 398 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
399 399 # is it a mapfile for a style?
400 400 if os.path.basename(tmpl).startswith("map-"):
401 401 return None, os.path.realpath(tmpl)
402 402 with util.posixfile(tmpl, 'rb') as f:
403 403 tmpl = f.read()
404 404 return tmpl, None
405 405
406 406 # constant string?
407 407 return tmpl, None
408 408
409 409 def gettemplater(ui, topic, spec, cache=None):
410 410 tmpl, mapfile = lookuptemplate(ui, topic, spec)
411 return loadtemplater(ui, topic, (tmpl, mapfile), cache=cache)
412
413 def loadtemplater(ui, topic, spec, cache=None):
414 """Create a templater from either a literal template or loading from
415 a map file"""
416 tmpl, mapfile = spec
411 417 assert not (tmpl and mapfile)
412 418 if mapfile:
413 419 return templater.templater.frommapfile(mapfile, cache=cache)
414 420 return maketemplater(ui, topic, tmpl, cache=cache)
415 421
416 422 def maketemplater(ui, topic, tmpl, cache=None):
417 423 """Create a templater from a string template 'tmpl'"""
418 424 aliases = ui.configitems('templatealias')
419 425 t = templater.templater(cache=cache, aliases=aliases)
420 426 if tmpl:
421 427 t.cache[topic] = tmpl
422 428 return t
423 429
424 430 def formatter(ui, out, topic, opts):
425 431 template = opts.get("template", "")
426 432 if template == "json":
427 433 return jsonformatter(ui, out, topic, opts)
428 434 elif template == "pickle":
429 435 return pickleformatter(ui, out, topic, opts)
430 436 elif template == "debug":
431 437 return debugformatter(ui, out, topic, opts)
432 438 elif template != "":
433 439 return templateformatter(ui, out, topic, opts)
434 440 # developer config: ui.formatdebug
435 441 elif ui.configbool('ui', 'formatdebug'):
436 442 return debugformatter(ui, out, topic, opts)
437 443 # deprecated config: ui.formatjson
438 444 elif ui.configbool('ui', 'formatjson'):
439 445 return jsonformatter(ui, out, topic, opts)
440 446 return plainformatter(ui, out, topic, opts)
441 447
442 448 @contextlib.contextmanager
443 449 def openformatter(ui, filename, topic, opts):
444 450 """Create a formatter that writes outputs to the specified file
445 451
446 452 Must be invoked using the 'with' statement.
447 453 """
448 454 with util.posixfile(filename, 'wb') as out:
449 455 with formatter(ui, out, topic, opts) as fm:
450 456 yield fm
451 457
452 458 @contextlib.contextmanager
453 459 def _neverending(fm):
454 460 yield fm
455 461
456 462 def maybereopen(fm, filename, opts):
457 463 """Create a formatter backed by file if filename specified, else return
458 464 the given formatter
459 465
460 466 Must be invoked using the 'with' statement. This will never call fm.end()
461 467 of the given formatter.
462 468 """
463 469 if filename:
464 470 return openformatter(fm._ui, filename, fm._topic, opts)
465 471 else:
466 472 return _neverending(fm)
General Comments 0
You need to be logged in to leave comments. Login now