##// END OF EJS Templates
cmdutil: extract closure that performs the actual export formatting...
Augie Fackler -
r32433:7feaf555 default
parent child Browse files
Show More
@@ -1,3579 +1,3581 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import itertools
12 12 import os
13 13 import re
14 14 import tempfile
15 15
16 16 from .i18n import _
17 17 from .node import (
18 18 bin,
19 19 hex,
20 20 nullid,
21 21 nullrev,
22 22 short,
23 23 )
24 24
25 25 from . import (
26 26 bookmarks,
27 27 changelog,
28 28 copies,
29 29 crecord as crecordmod,
30 30 encoding,
31 31 error,
32 32 formatter,
33 33 graphmod,
34 34 lock as lockmod,
35 35 match as matchmod,
36 36 obsolete,
37 37 patch,
38 38 pathutil,
39 39 phases,
40 40 pycompat,
41 41 registrar,
42 42 repair,
43 43 revlog,
44 44 revset,
45 45 scmutil,
46 46 smartset,
47 47 templatekw,
48 48 templater,
49 49 util,
50 50 vfs as vfsmod,
51 51 )
52 52 stringio = util.stringio
53 53
54 54 # templates of common command options
55 55
56 56 dryrunopts = [
57 57 ('n', 'dry-run', None,
58 58 _('do not perform actions, just print output')),
59 59 ]
60 60
61 61 remoteopts = [
62 62 ('e', 'ssh', '',
63 63 _('specify ssh command to use'), _('CMD')),
64 64 ('', 'remotecmd', '',
65 65 _('specify hg command to run on the remote side'), _('CMD')),
66 66 ('', 'insecure', None,
67 67 _('do not verify server certificate (ignoring web.cacerts config)')),
68 68 ]
69 69
70 70 walkopts = [
71 71 ('I', 'include', [],
72 72 _('include names matching the given patterns'), _('PATTERN')),
73 73 ('X', 'exclude', [],
74 74 _('exclude names matching the given patterns'), _('PATTERN')),
75 75 ]
76 76
77 77 commitopts = [
78 78 ('m', 'message', '',
79 79 _('use text as commit message'), _('TEXT')),
80 80 ('l', 'logfile', '',
81 81 _('read commit message from file'), _('FILE')),
82 82 ]
83 83
84 84 commitopts2 = [
85 85 ('d', 'date', '',
86 86 _('record the specified date as commit date'), _('DATE')),
87 87 ('u', 'user', '',
88 88 _('record the specified user as committer'), _('USER')),
89 89 ]
90 90
91 91 # hidden for now
92 92 formatteropts = [
93 93 ('T', 'template', '',
94 94 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
95 95 ]
96 96
97 97 templateopts = [
98 98 ('', 'style', '',
99 99 _('display using template map file (DEPRECATED)'), _('STYLE')),
100 100 ('T', 'template', '',
101 101 _('display with template'), _('TEMPLATE')),
102 102 ]
103 103
104 104 logopts = [
105 105 ('p', 'patch', None, _('show patch')),
106 106 ('g', 'git', None, _('use git extended diff format')),
107 107 ('l', 'limit', '',
108 108 _('limit number of changes displayed'), _('NUM')),
109 109 ('M', 'no-merges', None, _('do not show merges')),
110 110 ('', 'stat', None, _('output diffstat-style summary of changes')),
111 111 ('G', 'graph', None, _("show the revision DAG")),
112 112 ] + templateopts
113 113
114 114 diffopts = [
115 115 ('a', 'text', None, _('treat all files as text')),
116 116 ('g', 'git', None, _('use git extended diff format')),
117 117 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
118 118 ('', 'nodates', None, _('omit dates from diff headers'))
119 119 ]
120 120
121 121 diffwsopts = [
122 122 ('w', 'ignore-all-space', None,
123 123 _('ignore white space when comparing lines')),
124 124 ('b', 'ignore-space-change', None,
125 125 _('ignore changes in the amount of white space')),
126 126 ('B', 'ignore-blank-lines', None,
127 127 _('ignore changes whose lines are all blank')),
128 128 ]
129 129
130 130 diffopts2 = [
131 131 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
132 132 ('p', 'show-function', None, _('show which function each change is in')),
133 133 ('', 'reverse', None, _('produce a diff that undoes the changes')),
134 134 ] + diffwsopts + [
135 135 ('U', 'unified', '',
136 136 _('number of lines of context to show'), _('NUM')),
137 137 ('', 'stat', None, _('output diffstat-style summary of changes')),
138 138 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
139 139 ]
140 140
141 141 mergetoolopts = [
142 142 ('t', 'tool', '', _('specify merge tool')),
143 143 ]
144 144
145 145 similarityopts = [
146 146 ('s', 'similarity', '',
147 147 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
148 148 ]
149 149
150 150 subrepoopts = [
151 151 ('S', 'subrepos', None,
152 152 _('recurse into subrepositories'))
153 153 ]
154 154
155 155 debugrevlogopts = [
156 156 ('c', 'changelog', False, _('open changelog')),
157 157 ('m', 'manifest', False, _('open manifest')),
158 158 ('', 'dir', '', _('open directory manifest')),
159 159 ]
160 160
161 161 # special string such that everything below this line will be ingored in the
162 162 # editor text
163 163 _linebelow = "^HG: ------------------------ >8 ------------------------$"
164 164
165 165 def ishunk(x):
166 166 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
167 167 return isinstance(x, hunkclasses)
168 168
169 169 def newandmodified(chunks, originalchunks):
170 170 newlyaddedandmodifiedfiles = set()
171 171 for chunk in chunks:
172 172 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
173 173 originalchunks:
174 174 newlyaddedandmodifiedfiles.add(chunk.header.filename())
175 175 return newlyaddedandmodifiedfiles
176 176
177 177 def parsealiases(cmd):
178 178 return cmd.lstrip("^").split("|")
179 179
180 180 def setupwrapcolorwrite(ui):
181 181 # wrap ui.write so diff output can be labeled/colorized
182 182 def wrapwrite(orig, *args, **kw):
183 183 label = kw.pop('label', '')
184 184 for chunk, l in patch.difflabel(lambda: args):
185 185 orig(chunk, label=label + l)
186 186
187 187 oldwrite = ui.write
188 188 def wrap(*args, **kwargs):
189 189 return wrapwrite(oldwrite, *args, **kwargs)
190 190 setattr(ui, 'write', wrap)
191 191 return oldwrite
192 192
193 193 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
194 194 if usecurses:
195 195 if testfile:
196 196 recordfn = crecordmod.testdecorator(testfile,
197 197 crecordmod.testchunkselector)
198 198 else:
199 199 recordfn = crecordmod.chunkselector
200 200
201 201 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
202 202
203 203 else:
204 204 return patch.filterpatch(ui, originalhunks, operation)
205 205
206 206 def recordfilter(ui, originalhunks, operation=None):
207 207 """ Prompts the user to filter the originalhunks and return a list of
208 208 selected hunks.
209 209 *operation* is used for to build ui messages to indicate the user what
210 210 kind of filtering they are doing: reverting, committing, shelving, etc.
211 211 (see patch.filterpatch).
212 212 """
213 213 usecurses = crecordmod.checkcurses(ui)
214 214 testfile = ui.config('experimental', 'crecordtest', None)
215 215 oldwrite = setupwrapcolorwrite(ui)
216 216 try:
217 217 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
218 218 testfile, operation)
219 219 finally:
220 220 ui.write = oldwrite
221 221 return newchunks, newopts
222 222
223 223 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
224 224 filterfn, *pats, **opts):
225 225 from . import merge as mergemod
226 226 opts = pycompat.byteskwargs(opts)
227 227 if not ui.interactive():
228 228 if cmdsuggest:
229 229 msg = _('running non-interactively, use %s instead') % cmdsuggest
230 230 else:
231 231 msg = _('running non-interactively')
232 232 raise error.Abort(msg)
233 233
234 234 # make sure username is set before going interactive
235 235 if not opts.get('user'):
236 236 ui.username() # raise exception, username not provided
237 237
238 238 def recordfunc(ui, repo, message, match, opts):
239 239 """This is generic record driver.
240 240
241 241 Its job is to interactively filter local changes, and
242 242 accordingly prepare working directory into a state in which the
243 243 job can be delegated to a non-interactive commit command such as
244 244 'commit' or 'qrefresh'.
245 245
246 246 After the actual job is done by non-interactive command, the
247 247 working directory is restored to its original state.
248 248
249 249 In the end we'll record interesting changes, and everything else
250 250 will be left in place, so the user can continue working.
251 251 """
252 252
253 253 checkunfinished(repo, commit=True)
254 254 wctx = repo[None]
255 255 merge = len(wctx.parents()) > 1
256 256 if merge:
257 257 raise error.Abort(_('cannot partially commit a merge '
258 258 '(use "hg commit" instead)'))
259 259
260 260 def fail(f, msg):
261 261 raise error.Abort('%s: %s' % (f, msg))
262 262
263 263 force = opts.get('force')
264 264 if not force:
265 265 vdirs = []
266 266 match.explicitdir = vdirs.append
267 267 match.bad = fail
268 268
269 269 status = repo.status(match=match)
270 270 if not force:
271 271 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
272 272 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
273 273 diffopts.nodates = True
274 274 diffopts.git = True
275 275 diffopts.showfunc = True
276 276 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
277 277 originalchunks = patch.parsepatch(originaldiff)
278 278
279 279 # 1. filter patch, since we are intending to apply subset of it
280 280 try:
281 281 chunks, newopts = filterfn(ui, originalchunks)
282 282 except patch.PatchError as err:
283 283 raise error.Abort(_('error parsing patch: %s') % err)
284 284 opts.update(newopts)
285 285
286 286 # We need to keep a backup of files that have been newly added and
287 287 # modified during the recording process because there is a previous
288 288 # version without the edit in the workdir
289 289 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
290 290 contenders = set()
291 291 for h in chunks:
292 292 try:
293 293 contenders.update(set(h.files()))
294 294 except AttributeError:
295 295 pass
296 296
297 297 changed = status.modified + status.added + status.removed
298 298 newfiles = [f for f in changed if f in contenders]
299 299 if not newfiles:
300 300 ui.status(_('no changes to record\n'))
301 301 return 0
302 302
303 303 modified = set(status.modified)
304 304
305 305 # 2. backup changed files, so we can restore them in the end
306 306
307 307 if backupall:
308 308 tobackup = changed
309 309 else:
310 310 tobackup = [f for f in newfiles if f in modified or f in \
311 311 newlyaddedandmodifiedfiles]
312 312 backups = {}
313 313 if tobackup:
314 314 backupdir = repo.vfs.join('record-backups')
315 315 try:
316 316 os.mkdir(backupdir)
317 317 except OSError as err:
318 318 if err.errno != errno.EEXIST:
319 319 raise
320 320 try:
321 321 # backup continues
322 322 for f in tobackup:
323 323 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
324 324 dir=backupdir)
325 325 os.close(fd)
326 326 ui.debug('backup %r as %r\n' % (f, tmpname))
327 327 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
328 328 backups[f] = tmpname
329 329
330 330 fp = stringio()
331 331 for c in chunks:
332 332 fname = c.filename()
333 333 if fname in backups:
334 334 c.write(fp)
335 335 dopatch = fp.tell()
336 336 fp.seek(0)
337 337
338 338 # 2.5 optionally review / modify patch in text editor
339 339 if opts.get('review', False):
340 340 patchtext = (crecordmod.diffhelptext
341 341 + crecordmod.patchhelptext
342 342 + fp.read())
343 343 reviewedpatch = ui.edit(patchtext, "",
344 344 extra={"suffix": ".diff"},
345 345 repopath=repo.path)
346 346 fp.truncate(0)
347 347 fp.write(reviewedpatch)
348 348 fp.seek(0)
349 349
350 350 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
351 351 # 3a. apply filtered patch to clean repo (clean)
352 352 if backups:
353 353 # Equivalent to hg.revert
354 354 m = scmutil.matchfiles(repo, backups.keys())
355 355 mergemod.update(repo, repo.dirstate.p1(),
356 356 False, True, matcher=m)
357 357
358 358 # 3b. (apply)
359 359 if dopatch:
360 360 try:
361 361 ui.debug('applying patch\n')
362 362 ui.debug(fp.getvalue())
363 363 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
364 364 except patch.PatchError as err:
365 365 raise error.Abort(str(err))
366 366 del fp
367 367
368 368 # 4. We prepared working directory according to filtered
369 369 # patch. Now is the time to delegate the job to
370 370 # commit/qrefresh or the like!
371 371
372 372 # Make all of the pathnames absolute.
373 373 newfiles = [repo.wjoin(nf) for nf in newfiles]
374 374 return commitfunc(ui, repo, *newfiles, **opts)
375 375 finally:
376 376 # 5. finally restore backed-up files
377 377 try:
378 378 dirstate = repo.dirstate
379 379 for realname, tmpname in backups.iteritems():
380 380 ui.debug('restoring %r to %r\n' % (tmpname, realname))
381 381
382 382 if dirstate[realname] == 'n':
383 383 # without normallookup, restoring timestamp
384 384 # may cause partially committed files
385 385 # to be treated as unmodified
386 386 dirstate.normallookup(realname)
387 387
388 388 # copystat=True here and above are a hack to trick any
389 389 # editors that have f open that we haven't modified them.
390 390 #
391 391 # Also note that this racy as an editor could notice the
392 392 # file's mtime before we've finished writing it.
393 393 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
394 394 os.unlink(tmpname)
395 395 if tobackup:
396 396 os.rmdir(backupdir)
397 397 except OSError:
398 398 pass
399 399
400 400 def recordinwlock(ui, repo, message, match, opts):
401 401 with repo.wlock():
402 402 return recordfunc(ui, repo, message, match, opts)
403 403
404 404 return commit(ui, repo, recordinwlock, pats, opts)
405 405
406 406 def findpossible(cmd, table, strict=False):
407 407 """
408 408 Return cmd -> (aliases, command table entry)
409 409 for each matching command.
410 410 Return debug commands (or their aliases) only if no normal command matches.
411 411 """
412 412 choice = {}
413 413 debugchoice = {}
414 414
415 415 if cmd in table:
416 416 # short-circuit exact matches, "log" alias beats "^log|history"
417 417 keys = [cmd]
418 418 else:
419 419 keys = table.keys()
420 420
421 421 allcmds = []
422 422 for e in keys:
423 423 aliases = parsealiases(e)
424 424 allcmds.extend(aliases)
425 425 found = None
426 426 if cmd in aliases:
427 427 found = cmd
428 428 elif not strict:
429 429 for a in aliases:
430 430 if a.startswith(cmd):
431 431 found = a
432 432 break
433 433 if found is not None:
434 434 if aliases[0].startswith("debug") or found.startswith("debug"):
435 435 debugchoice[found] = (aliases, table[e])
436 436 else:
437 437 choice[found] = (aliases, table[e])
438 438
439 439 if not choice and debugchoice:
440 440 choice = debugchoice
441 441
442 442 return choice, allcmds
443 443
444 444 def findcmd(cmd, table, strict=True):
445 445 """Return (aliases, command table entry) for command string."""
446 446 choice, allcmds = findpossible(cmd, table, strict)
447 447
448 448 if cmd in choice:
449 449 return choice[cmd]
450 450
451 451 if len(choice) > 1:
452 452 clist = choice.keys()
453 453 clist.sort()
454 454 raise error.AmbiguousCommand(cmd, clist)
455 455
456 456 if choice:
457 457 return choice.values()[0]
458 458
459 459 raise error.UnknownCommand(cmd, allcmds)
460 460
461 461 def findrepo(p):
462 462 while not os.path.isdir(os.path.join(p, ".hg")):
463 463 oldp, p = p, os.path.dirname(p)
464 464 if p == oldp:
465 465 return None
466 466
467 467 return p
468 468
469 469 def bailifchanged(repo, merge=True, hint=None):
470 470 """ enforce the precondition that working directory must be clean.
471 471
472 472 'merge' can be set to false if a pending uncommitted merge should be
473 473 ignored (such as when 'update --check' runs).
474 474
475 475 'hint' is the usual hint given to Abort exception.
476 476 """
477 477
478 478 if merge and repo.dirstate.p2() != nullid:
479 479 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
480 480 modified, added, removed, deleted = repo.status()[:4]
481 481 if modified or added or removed or deleted:
482 482 raise error.Abort(_('uncommitted changes'), hint=hint)
483 483 ctx = repo[None]
484 484 for s in sorted(ctx.substate):
485 485 ctx.sub(s).bailifchanged(hint=hint)
486 486
487 487 def logmessage(ui, opts):
488 488 """ get the log message according to -m and -l option """
489 489 message = opts.get('message')
490 490 logfile = opts.get('logfile')
491 491
492 492 if message and logfile:
493 493 raise error.Abort(_('options --message and --logfile are mutually '
494 494 'exclusive'))
495 495 if not message and logfile:
496 496 try:
497 497 if logfile == '-':
498 498 message = ui.fin.read()
499 499 else:
500 500 message = '\n'.join(util.readfile(logfile).splitlines())
501 501 except IOError as inst:
502 502 raise error.Abort(_("can't read commit message '%s': %s") %
503 503 (logfile, inst.strerror))
504 504 return message
505 505
506 506 def mergeeditform(ctxorbool, baseformname):
507 507 """return appropriate editform name (referencing a committemplate)
508 508
509 509 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
510 510 merging is committed.
511 511
512 512 This returns baseformname with '.merge' appended if it is a merge,
513 513 otherwise '.normal' is appended.
514 514 """
515 515 if isinstance(ctxorbool, bool):
516 516 if ctxorbool:
517 517 return baseformname + ".merge"
518 518 elif 1 < len(ctxorbool.parents()):
519 519 return baseformname + ".merge"
520 520
521 521 return baseformname + ".normal"
522 522
523 523 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
524 524 editform='', **opts):
525 525 """get appropriate commit message editor according to '--edit' option
526 526
527 527 'finishdesc' is a function to be called with edited commit message
528 528 (= 'description' of the new changeset) just after editing, but
529 529 before checking empty-ness. It should return actual text to be
530 530 stored into history. This allows to change description before
531 531 storing.
532 532
533 533 'extramsg' is a extra message to be shown in the editor instead of
534 534 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
535 535 is automatically added.
536 536
537 537 'editform' is a dot-separated list of names, to distinguish
538 538 the purpose of commit text editing.
539 539
540 540 'getcommiteditor' returns 'commitforceeditor' regardless of
541 541 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
542 542 they are specific for usage in MQ.
543 543 """
544 544 if edit or finishdesc or extramsg:
545 545 return lambda r, c, s: commitforceeditor(r, c, s,
546 546 finishdesc=finishdesc,
547 547 extramsg=extramsg,
548 548 editform=editform)
549 549 elif editform:
550 550 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
551 551 else:
552 552 return commiteditor
553 553
554 554 def loglimit(opts):
555 555 """get the log limit according to option -l/--limit"""
556 556 limit = opts.get('limit')
557 557 if limit:
558 558 try:
559 559 limit = int(limit)
560 560 except ValueError:
561 561 raise error.Abort(_('limit must be a positive integer'))
562 562 if limit <= 0:
563 563 raise error.Abort(_('limit must be positive'))
564 564 else:
565 565 limit = None
566 566 return limit
567 567
568 568 def makefilename(repo, pat, node, desc=None,
569 569 total=None, seqno=None, revwidth=None, pathname=None):
570 570 node_expander = {
571 571 'H': lambda: hex(node),
572 572 'R': lambda: str(repo.changelog.rev(node)),
573 573 'h': lambda: short(node),
574 574 'm': lambda: re.sub('[^\w]', '_', str(desc))
575 575 }
576 576 expander = {
577 577 '%': lambda: '%',
578 578 'b': lambda: os.path.basename(repo.root),
579 579 }
580 580
581 581 try:
582 582 if node:
583 583 expander.update(node_expander)
584 584 if node:
585 585 expander['r'] = (lambda:
586 586 str(repo.changelog.rev(node)).zfill(revwidth or 0))
587 587 if total is not None:
588 588 expander['N'] = lambda: str(total)
589 589 if seqno is not None:
590 590 expander['n'] = lambda: str(seqno)
591 591 if total is not None and seqno is not None:
592 592 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
593 593 if pathname is not None:
594 594 expander['s'] = lambda: os.path.basename(pathname)
595 595 expander['d'] = lambda: os.path.dirname(pathname) or '.'
596 596 expander['p'] = lambda: pathname
597 597
598 598 newname = []
599 599 patlen = len(pat)
600 600 i = 0
601 601 while i < patlen:
602 602 c = pat[i:i + 1]
603 603 if c == '%':
604 604 i += 1
605 605 c = pat[i:i + 1]
606 606 c = expander[c]()
607 607 newname.append(c)
608 608 i += 1
609 609 return ''.join(newname)
610 610 except KeyError as inst:
611 611 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
612 612 inst.args[0])
613 613
614 614 class _unclosablefile(object):
615 615 def __init__(self, fp):
616 616 self._fp = fp
617 617
618 618 def close(self):
619 619 pass
620 620
621 621 def __iter__(self):
622 622 return iter(self._fp)
623 623
624 624 def __getattr__(self, attr):
625 625 return getattr(self._fp, attr)
626 626
627 627 def __enter__(self):
628 628 return self
629 629
630 630 def __exit__(self, exc_type, exc_value, exc_tb):
631 631 pass
632 632
633 633 def makefileobj(repo, pat, node=None, desc=None, total=None,
634 634 seqno=None, revwidth=None, mode='wb', modemap=None,
635 635 pathname=None):
636 636
637 637 writable = mode not in ('r', 'rb')
638 638
639 639 if not pat or pat == '-':
640 640 if writable:
641 641 fp = repo.ui.fout
642 642 else:
643 643 fp = repo.ui.fin
644 644 return _unclosablefile(fp)
645 645 if util.safehasattr(pat, 'write') and writable:
646 646 return pat
647 647 if util.safehasattr(pat, 'read') and 'r' in mode:
648 648 return pat
649 649 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
650 650 if modemap is not None:
651 651 mode = modemap.get(fn, mode)
652 652 if mode == 'wb':
653 653 modemap[fn] = 'ab'
654 654 return open(fn, mode)
655 655
656 656 def openrevlog(repo, cmd, file_, opts):
657 657 """opens the changelog, manifest, a filelog or a given revlog"""
658 658 cl = opts['changelog']
659 659 mf = opts['manifest']
660 660 dir = opts['dir']
661 661 msg = None
662 662 if cl and mf:
663 663 msg = _('cannot specify --changelog and --manifest at the same time')
664 664 elif cl and dir:
665 665 msg = _('cannot specify --changelog and --dir at the same time')
666 666 elif cl or mf or dir:
667 667 if file_:
668 668 msg = _('cannot specify filename with --changelog or --manifest')
669 669 elif not repo:
670 670 msg = _('cannot specify --changelog or --manifest or --dir '
671 671 'without a repository')
672 672 if msg:
673 673 raise error.Abort(msg)
674 674
675 675 r = None
676 676 if repo:
677 677 if cl:
678 678 r = repo.unfiltered().changelog
679 679 elif dir:
680 680 if 'treemanifest' not in repo.requirements:
681 681 raise error.Abort(_("--dir can only be used on repos with "
682 682 "treemanifest enabled"))
683 683 dirlog = repo.manifestlog._revlog.dirlog(dir)
684 684 if len(dirlog):
685 685 r = dirlog
686 686 elif mf:
687 687 r = repo.manifestlog._revlog
688 688 elif file_:
689 689 filelog = repo.file(file_)
690 690 if len(filelog):
691 691 r = filelog
692 692 if not r:
693 693 if not file_:
694 694 raise error.CommandError(cmd, _('invalid arguments'))
695 695 if not os.path.isfile(file_):
696 696 raise error.Abort(_("revlog '%s' not found") % file_)
697 697 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
698 698 file_[:-2] + ".i")
699 699 return r
700 700
701 701 def copy(ui, repo, pats, opts, rename=False):
702 702 # called with the repo lock held
703 703 #
704 704 # hgsep => pathname that uses "/" to separate directories
705 705 # ossep => pathname that uses os.sep to separate directories
706 706 cwd = repo.getcwd()
707 707 targets = {}
708 708 after = opts.get("after")
709 709 dryrun = opts.get("dry_run")
710 710 wctx = repo[None]
711 711
712 712 def walkpat(pat):
713 713 srcs = []
714 714 if after:
715 715 badstates = '?'
716 716 else:
717 717 badstates = '?r'
718 718 m = scmutil.match(wctx, [pat], opts, globbed=True)
719 719 for abs in wctx.walk(m):
720 720 state = repo.dirstate[abs]
721 721 rel = m.rel(abs)
722 722 exact = m.exact(abs)
723 723 if state in badstates:
724 724 if exact and state == '?':
725 725 ui.warn(_('%s: not copying - file is not managed\n') % rel)
726 726 if exact and state == 'r':
727 727 ui.warn(_('%s: not copying - file has been marked for'
728 728 ' remove\n') % rel)
729 729 continue
730 730 # abs: hgsep
731 731 # rel: ossep
732 732 srcs.append((abs, rel, exact))
733 733 return srcs
734 734
735 735 # abssrc: hgsep
736 736 # relsrc: ossep
737 737 # otarget: ossep
738 738 def copyfile(abssrc, relsrc, otarget, exact):
739 739 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
740 740 if '/' in abstarget:
741 741 # We cannot normalize abstarget itself, this would prevent
742 742 # case only renames, like a => A.
743 743 abspath, absname = abstarget.rsplit('/', 1)
744 744 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
745 745 reltarget = repo.pathto(abstarget, cwd)
746 746 target = repo.wjoin(abstarget)
747 747 src = repo.wjoin(abssrc)
748 748 state = repo.dirstate[abstarget]
749 749
750 750 scmutil.checkportable(ui, abstarget)
751 751
752 752 # check for collisions
753 753 prevsrc = targets.get(abstarget)
754 754 if prevsrc is not None:
755 755 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
756 756 (reltarget, repo.pathto(abssrc, cwd),
757 757 repo.pathto(prevsrc, cwd)))
758 758 return
759 759
760 760 # check for overwrites
761 761 exists = os.path.lexists(target)
762 762 samefile = False
763 763 if exists and abssrc != abstarget:
764 764 if (repo.dirstate.normalize(abssrc) ==
765 765 repo.dirstate.normalize(abstarget)):
766 766 if not rename:
767 767 ui.warn(_("%s: can't copy - same file\n") % reltarget)
768 768 return
769 769 exists = False
770 770 samefile = True
771 771
772 772 if not after and exists or after and state in 'mn':
773 773 if not opts['force']:
774 774 if state in 'mn':
775 775 msg = _('%s: not overwriting - file already committed\n')
776 776 if after:
777 777 flags = '--after --force'
778 778 else:
779 779 flags = '--force'
780 780 if rename:
781 781 hint = _('(hg rename %s to replace the file by '
782 782 'recording a rename)\n') % flags
783 783 else:
784 784 hint = _('(hg copy %s to replace the file by '
785 785 'recording a copy)\n') % flags
786 786 else:
787 787 msg = _('%s: not overwriting - file exists\n')
788 788 if rename:
789 789 hint = _('(hg rename --after to record the rename)\n')
790 790 else:
791 791 hint = _('(hg copy --after to record the copy)\n')
792 792 ui.warn(msg % reltarget)
793 793 ui.warn(hint)
794 794 return
795 795
796 796 if after:
797 797 if not exists:
798 798 if rename:
799 799 ui.warn(_('%s: not recording move - %s does not exist\n') %
800 800 (relsrc, reltarget))
801 801 else:
802 802 ui.warn(_('%s: not recording copy - %s does not exist\n') %
803 803 (relsrc, reltarget))
804 804 return
805 805 elif not dryrun:
806 806 try:
807 807 if exists:
808 808 os.unlink(target)
809 809 targetdir = os.path.dirname(target) or '.'
810 810 if not os.path.isdir(targetdir):
811 811 os.makedirs(targetdir)
812 812 if samefile:
813 813 tmp = target + "~hgrename"
814 814 os.rename(src, tmp)
815 815 os.rename(tmp, target)
816 816 else:
817 817 util.copyfile(src, target)
818 818 srcexists = True
819 819 except IOError as inst:
820 820 if inst.errno == errno.ENOENT:
821 821 ui.warn(_('%s: deleted in working directory\n') % relsrc)
822 822 srcexists = False
823 823 else:
824 824 ui.warn(_('%s: cannot copy - %s\n') %
825 825 (relsrc, inst.strerror))
826 826 return True # report a failure
827 827
828 828 if ui.verbose or not exact:
829 829 if rename:
830 830 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
831 831 else:
832 832 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
833 833
834 834 targets[abstarget] = abssrc
835 835
836 836 # fix up dirstate
837 837 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
838 838 dryrun=dryrun, cwd=cwd)
839 839 if rename and not dryrun:
840 840 if not after and srcexists and not samefile:
841 841 repo.wvfs.unlinkpath(abssrc)
842 842 wctx.forget([abssrc])
843 843
844 844 # pat: ossep
845 845 # dest ossep
846 846 # srcs: list of (hgsep, hgsep, ossep, bool)
847 847 # return: function that takes hgsep and returns ossep
848 848 def targetpathfn(pat, dest, srcs):
849 849 if os.path.isdir(pat):
850 850 abspfx = pathutil.canonpath(repo.root, cwd, pat)
851 851 abspfx = util.localpath(abspfx)
852 852 if destdirexists:
853 853 striplen = len(os.path.split(abspfx)[0])
854 854 else:
855 855 striplen = len(abspfx)
856 856 if striplen:
857 857 striplen += len(pycompat.ossep)
858 858 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
859 859 elif destdirexists:
860 860 res = lambda p: os.path.join(dest,
861 861 os.path.basename(util.localpath(p)))
862 862 else:
863 863 res = lambda p: dest
864 864 return res
865 865
866 866 # pat: ossep
867 867 # dest ossep
868 868 # srcs: list of (hgsep, hgsep, ossep, bool)
869 869 # return: function that takes hgsep and returns ossep
870 870 def targetpathafterfn(pat, dest, srcs):
871 871 if matchmod.patkind(pat):
872 872 # a mercurial pattern
873 873 res = lambda p: os.path.join(dest,
874 874 os.path.basename(util.localpath(p)))
875 875 else:
876 876 abspfx = pathutil.canonpath(repo.root, cwd, pat)
877 877 if len(abspfx) < len(srcs[0][0]):
878 878 # A directory. Either the target path contains the last
879 879 # component of the source path or it does not.
880 880 def evalpath(striplen):
881 881 score = 0
882 882 for s in srcs:
883 883 t = os.path.join(dest, util.localpath(s[0])[striplen:])
884 884 if os.path.lexists(t):
885 885 score += 1
886 886 return score
887 887
888 888 abspfx = util.localpath(abspfx)
889 889 striplen = len(abspfx)
890 890 if striplen:
891 891 striplen += len(pycompat.ossep)
892 892 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
893 893 score = evalpath(striplen)
894 894 striplen1 = len(os.path.split(abspfx)[0])
895 895 if striplen1:
896 896 striplen1 += len(pycompat.ossep)
897 897 if evalpath(striplen1) > score:
898 898 striplen = striplen1
899 899 res = lambda p: os.path.join(dest,
900 900 util.localpath(p)[striplen:])
901 901 else:
902 902 # a file
903 903 if destdirexists:
904 904 res = lambda p: os.path.join(dest,
905 905 os.path.basename(util.localpath(p)))
906 906 else:
907 907 res = lambda p: dest
908 908 return res
909 909
910 910 pats = scmutil.expandpats(pats)
911 911 if not pats:
912 912 raise error.Abort(_('no source or destination specified'))
913 913 if len(pats) == 1:
914 914 raise error.Abort(_('no destination specified'))
915 915 dest = pats.pop()
916 916 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
917 917 if not destdirexists:
918 918 if len(pats) > 1 or matchmod.patkind(pats[0]):
919 919 raise error.Abort(_('with multiple sources, destination must be an '
920 920 'existing directory'))
921 921 if util.endswithsep(dest):
922 922 raise error.Abort(_('destination %s is not a directory') % dest)
923 923
924 924 tfn = targetpathfn
925 925 if after:
926 926 tfn = targetpathafterfn
927 927 copylist = []
928 928 for pat in pats:
929 929 srcs = walkpat(pat)
930 930 if not srcs:
931 931 continue
932 932 copylist.append((tfn(pat, dest, srcs), srcs))
933 933 if not copylist:
934 934 raise error.Abort(_('no files to copy'))
935 935
936 936 errors = 0
937 937 for targetpath, srcs in copylist:
938 938 for abssrc, relsrc, exact in srcs:
939 939 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
940 940 errors += 1
941 941
942 942 if errors:
943 943 ui.warn(_('(consider using --after)\n'))
944 944
945 945 return errors != 0
946 946
947 947 ## facility to let extension process additional data into an import patch
948 948 # list of identifier to be executed in order
949 949 extrapreimport = [] # run before commit
950 950 extrapostimport = [] # run after commit
951 951 # mapping from identifier to actual import function
952 952 #
953 953 # 'preimport' are run before the commit is made and are provided the following
954 954 # arguments:
955 955 # - repo: the localrepository instance,
956 956 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
957 957 # - extra: the future extra dictionary of the changeset, please mutate it,
958 958 # - opts: the import options.
959 959 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
960 960 # mutation of in memory commit and more. Feel free to rework the code to get
961 961 # there.
962 962 extrapreimportmap = {}
963 963 # 'postimport' are run after the commit is made and are provided the following
964 964 # argument:
965 965 # - ctx: the changectx created by import.
966 966 extrapostimportmap = {}
967 967
968 968 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
969 969 """Utility function used by commands.import to import a single patch
970 970
971 971 This function is explicitly defined here to help the evolve extension to
972 972 wrap this part of the import logic.
973 973
974 974 The API is currently a bit ugly because it a simple code translation from
975 975 the import command. Feel free to make it better.
976 976
977 977 :hunk: a patch (as a binary string)
978 978 :parents: nodes that will be parent of the created commit
979 979 :opts: the full dict of option passed to the import command
980 980 :msgs: list to save commit message to.
981 981 (used in case we need to save it when failing)
982 982 :updatefunc: a function that update a repo to a given node
983 983 updatefunc(<repo>, <node>)
984 984 """
985 985 # avoid cycle context -> subrepo -> cmdutil
986 986 from . import context
987 987 extractdata = patch.extract(ui, hunk)
988 988 tmpname = extractdata.get('filename')
989 989 message = extractdata.get('message')
990 990 user = opts.get('user') or extractdata.get('user')
991 991 date = opts.get('date') or extractdata.get('date')
992 992 branch = extractdata.get('branch')
993 993 nodeid = extractdata.get('nodeid')
994 994 p1 = extractdata.get('p1')
995 995 p2 = extractdata.get('p2')
996 996
997 997 nocommit = opts.get('no_commit')
998 998 importbranch = opts.get('import_branch')
999 999 update = not opts.get('bypass')
1000 1000 strip = opts["strip"]
1001 1001 prefix = opts["prefix"]
1002 1002 sim = float(opts.get('similarity') or 0)
1003 1003 if not tmpname:
1004 1004 return (None, None, False)
1005 1005
1006 1006 rejects = False
1007 1007
1008 1008 try:
1009 1009 cmdline_message = logmessage(ui, opts)
1010 1010 if cmdline_message:
1011 1011 # pickup the cmdline msg
1012 1012 message = cmdline_message
1013 1013 elif message:
1014 1014 # pickup the patch msg
1015 1015 message = message.strip()
1016 1016 else:
1017 1017 # launch the editor
1018 1018 message = None
1019 1019 ui.debug('message:\n%s\n' % message)
1020 1020
1021 1021 if len(parents) == 1:
1022 1022 parents.append(repo[nullid])
1023 1023 if opts.get('exact'):
1024 1024 if not nodeid or not p1:
1025 1025 raise error.Abort(_('not a Mercurial patch'))
1026 1026 p1 = repo[p1]
1027 1027 p2 = repo[p2 or nullid]
1028 1028 elif p2:
1029 1029 try:
1030 1030 p1 = repo[p1]
1031 1031 p2 = repo[p2]
1032 1032 # Without any options, consider p2 only if the
1033 1033 # patch is being applied on top of the recorded
1034 1034 # first parent.
1035 1035 if p1 != parents[0]:
1036 1036 p1 = parents[0]
1037 1037 p2 = repo[nullid]
1038 1038 except error.RepoError:
1039 1039 p1, p2 = parents
1040 1040 if p2.node() == nullid:
1041 1041 ui.warn(_("warning: import the patch as a normal revision\n"
1042 1042 "(use --exact to import the patch as a merge)\n"))
1043 1043 else:
1044 1044 p1, p2 = parents
1045 1045
1046 1046 n = None
1047 1047 if update:
1048 1048 if p1 != parents[0]:
1049 1049 updatefunc(repo, p1.node())
1050 1050 if p2 != parents[1]:
1051 1051 repo.setparents(p1.node(), p2.node())
1052 1052
1053 1053 if opts.get('exact') or importbranch:
1054 1054 repo.dirstate.setbranch(branch or 'default')
1055 1055
1056 1056 partial = opts.get('partial', False)
1057 1057 files = set()
1058 1058 try:
1059 1059 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1060 1060 files=files, eolmode=None, similarity=sim / 100.0)
1061 1061 except patch.PatchError as e:
1062 1062 if not partial:
1063 1063 raise error.Abort(str(e))
1064 1064 if partial:
1065 1065 rejects = True
1066 1066
1067 1067 files = list(files)
1068 1068 if nocommit:
1069 1069 if message:
1070 1070 msgs.append(message)
1071 1071 else:
1072 1072 if opts.get('exact') or p2:
1073 1073 # If you got here, you either use --force and know what
1074 1074 # you are doing or used --exact or a merge patch while
1075 1075 # being updated to its first parent.
1076 1076 m = None
1077 1077 else:
1078 1078 m = scmutil.matchfiles(repo, files or [])
1079 1079 editform = mergeeditform(repo[None], 'import.normal')
1080 1080 if opts.get('exact'):
1081 1081 editor = None
1082 1082 else:
1083 1083 editor = getcommiteditor(editform=editform, **opts)
1084 1084 extra = {}
1085 1085 for idfunc in extrapreimport:
1086 1086 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1087 1087 overrides = {}
1088 1088 if partial:
1089 1089 overrides[('ui', 'allowemptycommit')] = True
1090 1090 with repo.ui.configoverride(overrides, 'import'):
1091 1091 n = repo.commit(message, user,
1092 1092 date, match=m,
1093 1093 editor=editor, extra=extra)
1094 1094 for idfunc in extrapostimport:
1095 1095 extrapostimportmap[idfunc](repo[n])
1096 1096 else:
1097 1097 if opts.get('exact') or importbranch:
1098 1098 branch = branch or 'default'
1099 1099 else:
1100 1100 branch = p1.branch()
1101 1101 store = patch.filestore()
1102 1102 try:
1103 1103 files = set()
1104 1104 try:
1105 1105 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1106 1106 files, eolmode=None)
1107 1107 except patch.PatchError as e:
1108 1108 raise error.Abort(str(e))
1109 1109 if opts.get('exact'):
1110 1110 editor = None
1111 1111 else:
1112 1112 editor = getcommiteditor(editform='import.bypass')
1113 1113 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1114 1114 message,
1115 1115 user,
1116 1116 date,
1117 1117 branch, files, store,
1118 1118 editor=editor)
1119 1119 n = memctx.commit()
1120 1120 finally:
1121 1121 store.close()
1122 1122 if opts.get('exact') and nocommit:
1123 1123 # --exact with --no-commit is still useful in that it does merge
1124 1124 # and branch bits
1125 1125 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1126 1126 elif opts.get('exact') and hex(n) != nodeid:
1127 1127 raise error.Abort(_('patch is damaged or loses information'))
1128 1128 msg = _('applied to working directory')
1129 1129 if n:
1130 1130 # i18n: refers to a short changeset id
1131 1131 msg = _('created %s') % short(n)
1132 1132 return (msg, n, rejects)
1133 1133 finally:
1134 1134 os.unlink(tmpname)
1135 1135
1136 1136 # facility to let extensions include additional data in an exported patch
1137 1137 # list of identifiers to be executed in order
1138 1138 extraexport = []
1139 1139 # mapping from identifier to actual export function
1140 1140 # function as to return a string to be added to the header or None
1141 1141 # it is given two arguments (sequencenumber, changectx)
1142 1142 extraexportmap = {}
1143 1143
1144 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1145 node = ctx.node()
1146 parents = [p.node() for p in ctx.parents() if p]
1147 branch = ctx.branch()
1148 if switch_parent:
1149 parents.reverse()
1150
1151 if parents:
1152 prev = parents[0]
1153 else:
1154 prev = nullid
1155
1156 write("# HG changeset patch\n")
1157 write("# User %s\n" % ctx.user())
1158 write("# Date %d %d\n" % ctx.date())
1159 write("# %s\n" % util.datestr(ctx.date()))
1160 if branch and branch != 'default':
1161 write("# Branch %s\n" % branch)
1162 write("# Node ID %s\n" % hex(node))
1163 write("# Parent %s\n" % hex(prev))
1164 if len(parents) > 1:
1165 write("# Parent %s\n" % hex(parents[1]))
1166
1167 for headerid in extraexport:
1168 header = extraexportmap[headerid](seqno, ctx)
1169 if header is not None:
1170 write('# %s\n' % header)
1171 write(ctx.description().rstrip())
1172 write("\n\n")
1173
1174 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1175 write(chunk, label=label)
1176
1144 1177 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1145 1178 opts=None, match=None):
1146 1179 '''export changesets as hg patches
1147 1180
1148 1181 Args:
1149 1182 repo: The repository from which we're exporting revisions.
1150 1183 revs: A list of revisions to export as revision numbers.
1151 1184 fntemplate: An optional string to use for generating patch file names.
1152 1185 fp: An optional file-like object to which patches should be written.
1153 1186 switch_parent: If True, show diffs against second parent when not nullid.
1154 1187 Default is false, which always shows diff against p1.
1155 1188 opts: diff options to use for generating the patch.
1156 1189 match: If specified, only export changes to files matching this matcher.
1157 1190
1158 1191 Returns:
1159 1192 Nothing.
1160 1193
1161 1194 Side Effect:
1162 1195 "HG Changeset Patch" data is emitted to one of the following
1163 1196 destinations:
1164 1197 fp is specified: All revs are written to the specified
1165 1198 file-like object.
1166 1199 fntemplate specified: Each rev is written to a unique file named using
1167 1200 the given template.
1168 1201 Neither fp nor template specified: All revs written to repo.ui.write()
1169 1202 '''
1170 1203
1171 1204 total = len(revs)
1172 1205 revwidth = max(len(str(rev)) for rev in revs)
1173 1206 filemode = {}
1174 1207
1175 def single(rev, seqno, fp):
1208 for seqno, rev in enumerate(revs, 1):
1176 1209 ctx = repo[rev]
1177 node = ctx.node()
1178 parents = [p.node() for p in ctx.parents() if p]
1179 branch = ctx.branch()
1180 if switch_parent:
1181 parents.reverse()
1182
1183 if parents:
1184 prev = parents[0]
1185 else:
1186 prev = nullid
1187
1188 shouldclose = False
1210 fo = None
1211 dest = '<unnamed>'
1189 1212 if not fp and len(fntemplate) > 0:
1190 1213 desc_lines = ctx.description().rstrip().split('\n')
1191 1214 desc = desc_lines[0] #Commit always has a first line.
1192 fp = makefileobj(repo, fntemplate, node, desc=desc, total=total,
1193 seqno=seqno, revwidth=revwidth, mode='wb',
1194 modemap=filemode)
1195 shouldclose = True
1196 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1197 repo.ui.note("%s\n" % fp.name)
1198
1199 if not fp:
1200 write = repo.ui.write
1201 else:
1215 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1216 total=total, seqno=seqno, revwidth=revwidth,
1217 mode='wb', modemap=filemode)
1218 dest = fo.name
1219 def write(s, **kw):
1220 fo.write(s)
1221 elif fp:
1222 dest = getattr(fp, 'name', dest)
1202 1223 def write(s, **kw):
1203 1224 fp.write(s)
1204
1205 write("# HG changeset patch\n")
1206 write("# User %s\n" % ctx.user())
1207 write("# Date %d %d\n" % ctx.date())
1208 write("# %s\n" % util.datestr(ctx.date()))
1209 if branch and branch != 'default':
1210 write("# Branch %s\n" % branch)
1211 write("# Node ID %s\n" % hex(node))
1212 write("# Parent %s\n" % hex(prev))
1213 if len(parents) > 1:
1214 write("# Parent %s\n" % hex(parents[1]))
1215
1216 for headerid in extraexport:
1217 header = extraexportmap[headerid](seqno, ctx)
1218 if header is not None:
1219 write('# %s\n' % header)
1220 write(ctx.description().rstrip())
1221 write("\n\n")
1222
1223 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1224 write(chunk, label=label)
1225
1226 if shouldclose:
1227 fp.close()
1228
1229 for seqno, rev in enumerate(revs):
1230 single(rev, seqno + 1, fp)
1225 else:
1226 write = repo.ui.write
1227 if not dest.startswith('<'):
1228 repo.ui.note("%s\n" % dest)
1229 _exportsingle(
1230 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1231 if fo is not None:
1232 fo.close()
1231 1233
1232 1234 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1233 1235 changes=None, stat=False, fp=None, prefix='',
1234 1236 root='', listsubrepos=False):
1235 1237 '''show diff or diffstat.'''
1236 1238 if fp is None:
1237 1239 write = ui.write
1238 1240 else:
1239 1241 def write(s, **kw):
1240 1242 fp.write(s)
1241 1243
1242 1244 if root:
1243 1245 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1244 1246 else:
1245 1247 relroot = ''
1246 1248 if relroot != '':
1247 1249 # XXX relative roots currently don't work if the root is within a
1248 1250 # subrepo
1249 1251 uirelroot = match.uipath(relroot)
1250 1252 relroot += '/'
1251 1253 for matchroot in match.files():
1252 1254 if not matchroot.startswith(relroot):
1253 1255 ui.warn(_('warning: %s not inside relative root %s\n') % (
1254 1256 match.uipath(matchroot), uirelroot))
1255 1257
1256 1258 if stat:
1257 1259 diffopts = diffopts.copy(context=0)
1258 1260 width = 80
1259 1261 if not ui.plain():
1260 1262 width = ui.termwidth()
1261 1263 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1262 1264 prefix=prefix, relroot=relroot)
1263 1265 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1264 1266 width=width):
1265 1267 write(chunk, label=label)
1266 1268 else:
1267 1269 for chunk, label in patch.diffui(repo, node1, node2, match,
1268 1270 changes, diffopts, prefix=prefix,
1269 1271 relroot=relroot):
1270 1272 write(chunk, label=label)
1271 1273
1272 1274 if listsubrepos:
1273 1275 ctx1 = repo[node1]
1274 1276 ctx2 = repo[node2]
1275 1277 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1276 1278 tempnode2 = node2
1277 1279 try:
1278 1280 if node2 is not None:
1279 1281 tempnode2 = ctx2.substate[subpath][1]
1280 1282 except KeyError:
1281 1283 # A subrepo that existed in node1 was deleted between node1 and
1282 1284 # node2 (inclusive). Thus, ctx2's substate won't contain that
1283 1285 # subpath. The best we can do is to ignore it.
1284 1286 tempnode2 = None
1285 1287 submatch = matchmod.subdirmatcher(subpath, match)
1286 1288 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1287 1289 stat=stat, fp=fp, prefix=prefix)
1288 1290
1289 1291 def _changesetlabels(ctx):
1290 1292 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1291 1293 if ctx.obsolete():
1292 1294 labels.append('changeset.obsolete')
1293 1295 if ctx.troubled():
1294 1296 labels.append('changeset.troubled')
1295 1297 for trouble in ctx.troubles():
1296 1298 labels.append('trouble.%s' % trouble)
1297 1299 return ' '.join(labels)
1298 1300
1299 1301 class changeset_printer(object):
1300 1302 '''show changeset information when templating not requested.'''
1301 1303
1302 1304 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1303 1305 self.ui = ui
1304 1306 self.repo = repo
1305 1307 self.buffered = buffered
1306 1308 self.matchfn = matchfn
1307 1309 self.diffopts = diffopts
1308 1310 self.header = {}
1309 1311 self.hunk = {}
1310 1312 self.lastheader = None
1311 1313 self.footer = None
1312 1314
1313 1315 def flush(self, ctx):
1314 1316 rev = ctx.rev()
1315 1317 if rev in self.header:
1316 1318 h = self.header[rev]
1317 1319 if h != self.lastheader:
1318 1320 self.lastheader = h
1319 1321 self.ui.write(h)
1320 1322 del self.header[rev]
1321 1323 if rev in self.hunk:
1322 1324 self.ui.write(self.hunk[rev])
1323 1325 del self.hunk[rev]
1324 1326 return 1
1325 1327 return 0
1326 1328
1327 1329 def close(self):
1328 1330 if self.footer:
1329 1331 self.ui.write(self.footer)
1330 1332
1331 1333 def show(self, ctx, copies=None, matchfn=None, **props):
1332 1334 if self.buffered:
1333 1335 self.ui.pushbuffer(labeled=True)
1334 1336 self._show(ctx, copies, matchfn, props)
1335 1337 self.hunk[ctx.rev()] = self.ui.popbuffer()
1336 1338 else:
1337 1339 self._show(ctx, copies, matchfn, props)
1338 1340
1339 1341 def _show(self, ctx, copies, matchfn, props):
1340 1342 '''show a single changeset or file revision'''
1341 1343 changenode = ctx.node()
1342 1344 rev = ctx.rev()
1343 1345 if self.ui.debugflag:
1344 1346 hexfunc = hex
1345 1347 else:
1346 1348 hexfunc = short
1347 1349 # as of now, wctx.node() and wctx.rev() return None, but we want to
1348 1350 # show the same values as {node} and {rev} templatekw
1349 1351 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1350 1352
1351 1353 if self.ui.quiet:
1352 1354 self.ui.write("%d:%s\n" % revnode, label='log.node')
1353 1355 return
1354 1356
1355 1357 date = util.datestr(ctx.date())
1356 1358
1357 1359 # i18n: column positioning for "hg log"
1358 1360 self.ui.write(_("changeset: %d:%s\n") % revnode,
1359 1361 label=_changesetlabels(ctx))
1360 1362
1361 1363 # branches are shown first before any other names due to backwards
1362 1364 # compatibility
1363 1365 branch = ctx.branch()
1364 1366 # don't show the default branch name
1365 1367 if branch != 'default':
1366 1368 # i18n: column positioning for "hg log"
1367 1369 self.ui.write(_("branch: %s\n") % branch,
1368 1370 label='log.branch')
1369 1371
1370 1372 for nsname, ns in self.repo.names.iteritems():
1371 1373 # branches has special logic already handled above, so here we just
1372 1374 # skip it
1373 1375 if nsname == 'branches':
1374 1376 continue
1375 1377 # we will use the templatename as the color name since those two
1376 1378 # should be the same
1377 1379 for name in ns.names(self.repo, changenode):
1378 1380 self.ui.write(ns.logfmt % name,
1379 1381 label='log.%s' % ns.colorname)
1380 1382 if self.ui.debugflag:
1381 1383 # i18n: column positioning for "hg log"
1382 1384 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1383 1385 label='log.phase')
1384 1386 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1385 1387 label = 'log.parent changeset.%s' % pctx.phasestr()
1386 1388 # i18n: column positioning for "hg log"
1387 1389 self.ui.write(_("parent: %d:%s\n")
1388 1390 % (pctx.rev(), hexfunc(pctx.node())),
1389 1391 label=label)
1390 1392
1391 1393 if self.ui.debugflag and rev is not None:
1392 1394 mnode = ctx.manifestnode()
1393 1395 # i18n: column positioning for "hg log"
1394 1396 self.ui.write(_("manifest: %d:%s\n") %
1395 1397 (self.repo.manifestlog._revlog.rev(mnode),
1396 1398 hex(mnode)),
1397 1399 label='ui.debug log.manifest')
1398 1400 # i18n: column positioning for "hg log"
1399 1401 self.ui.write(_("user: %s\n") % ctx.user(),
1400 1402 label='log.user')
1401 1403 # i18n: column positioning for "hg log"
1402 1404 self.ui.write(_("date: %s\n") % date,
1403 1405 label='log.date')
1404 1406
1405 1407 if ctx.troubled():
1406 1408 # i18n: column positioning for "hg log"
1407 1409 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1408 1410 label='log.trouble')
1409 1411
1410 1412 if self.ui.debugflag:
1411 1413 files = ctx.p1().status(ctx)[:3]
1412 1414 for key, value in zip([# i18n: column positioning for "hg log"
1413 1415 _("files:"),
1414 1416 # i18n: column positioning for "hg log"
1415 1417 _("files+:"),
1416 1418 # i18n: column positioning for "hg log"
1417 1419 _("files-:")], files):
1418 1420 if value:
1419 1421 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1420 1422 label='ui.debug log.files')
1421 1423 elif ctx.files() and self.ui.verbose:
1422 1424 # i18n: column positioning for "hg log"
1423 1425 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1424 1426 label='ui.note log.files')
1425 1427 if copies and self.ui.verbose:
1426 1428 copies = ['%s (%s)' % c for c in copies]
1427 1429 # i18n: column positioning for "hg log"
1428 1430 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1429 1431 label='ui.note log.copies')
1430 1432
1431 1433 extra = ctx.extra()
1432 1434 if extra and self.ui.debugflag:
1433 1435 for key, value in sorted(extra.items()):
1434 1436 # i18n: column positioning for "hg log"
1435 1437 self.ui.write(_("extra: %s=%s\n")
1436 1438 % (key, util.escapestr(value)),
1437 1439 label='ui.debug log.extra')
1438 1440
1439 1441 description = ctx.description().strip()
1440 1442 if description:
1441 1443 if self.ui.verbose:
1442 1444 self.ui.write(_("description:\n"),
1443 1445 label='ui.note log.description')
1444 1446 self.ui.write(description,
1445 1447 label='ui.note log.description')
1446 1448 self.ui.write("\n\n")
1447 1449 else:
1448 1450 # i18n: column positioning for "hg log"
1449 1451 self.ui.write(_("summary: %s\n") %
1450 1452 description.splitlines()[0],
1451 1453 label='log.summary')
1452 1454 self.ui.write("\n")
1453 1455
1454 1456 self.showpatch(ctx, matchfn)
1455 1457
1456 1458 def showpatch(self, ctx, matchfn):
1457 1459 if not matchfn:
1458 1460 matchfn = self.matchfn
1459 1461 if matchfn:
1460 1462 stat = self.diffopts.get('stat')
1461 1463 diff = self.diffopts.get('patch')
1462 1464 diffopts = patch.diffallopts(self.ui, self.diffopts)
1463 1465 node = ctx.node()
1464 1466 prev = ctx.p1().node()
1465 1467 if stat:
1466 1468 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1467 1469 match=matchfn, stat=True)
1468 1470 if diff:
1469 1471 if stat:
1470 1472 self.ui.write("\n")
1471 1473 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1472 1474 match=matchfn, stat=False)
1473 1475 self.ui.write("\n")
1474 1476
1475 1477 class jsonchangeset(changeset_printer):
1476 1478 '''format changeset information.'''
1477 1479
1478 1480 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1479 1481 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1480 1482 self.cache = {}
1481 1483 self._first = True
1482 1484
1483 1485 def close(self):
1484 1486 if not self._first:
1485 1487 self.ui.write("\n]\n")
1486 1488 else:
1487 1489 self.ui.write("[]\n")
1488 1490
1489 1491 def _show(self, ctx, copies, matchfn, props):
1490 1492 '''show a single changeset or file revision'''
1491 1493 rev = ctx.rev()
1492 1494 if rev is None:
1493 1495 jrev = jnode = 'null'
1494 1496 else:
1495 1497 jrev = '%d' % rev
1496 1498 jnode = '"%s"' % hex(ctx.node())
1497 1499 j = encoding.jsonescape
1498 1500
1499 1501 if self._first:
1500 1502 self.ui.write("[\n {")
1501 1503 self._first = False
1502 1504 else:
1503 1505 self.ui.write(",\n {")
1504 1506
1505 1507 if self.ui.quiet:
1506 1508 self.ui.write(('\n "rev": %s') % jrev)
1507 1509 self.ui.write((',\n "node": %s') % jnode)
1508 1510 self.ui.write('\n }')
1509 1511 return
1510 1512
1511 1513 self.ui.write(('\n "rev": %s') % jrev)
1512 1514 self.ui.write((',\n "node": %s') % jnode)
1513 1515 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1514 1516 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1515 1517 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1516 1518 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1517 1519 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1518 1520
1519 1521 self.ui.write((',\n "bookmarks": [%s]') %
1520 1522 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1521 1523 self.ui.write((',\n "tags": [%s]') %
1522 1524 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1523 1525 self.ui.write((',\n "parents": [%s]') %
1524 1526 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1525 1527
1526 1528 if self.ui.debugflag:
1527 1529 if rev is None:
1528 1530 jmanifestnode = 'null'
1529 1531 else:
1530 1532 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1531 1533 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1532 1534
1533 1535 self.ui.write((',\n "extra": {%s}') %
1534 1536 ", ".join('"%s": "%s"' % (j(k), j(v))
1535 1537 for k, v in ctx.extra().items()))
1536 1538
1537 1539 files = ctx.p1().status(ctx)
1538 1540 self.ui.write((',\n "modified": [%s]') %
1539 1541 ", ".join('"%s"' % j(f) for f in files[0]))
1540 1542 self.ui.write((',\n "added": [%s]') %
1541 1543 ", ".join('"%s"' % j(f) for f in files[1]))
1542 1544 self.ui.write((',\n "removed": [%s]') %
1543 1545 ", ".join('"%s"' % j(f) for f in files[2]))
1544 1546
1545 1547 elif self.ui.verbose:
1546 1548 self.ui.write((',\n "files": [%s]') %
1547 1549 ", ".join('"%s"' % j(f) for f in ctx.files()))
1548 1550
1549 1551 if copies:
1550 1552 self.ui.write((',\n "copies": {%s}') %
1551 1553 ", ".join('"%s": "%s"' % (j(k), j(v))
1552 1554 for k, v in copies))
1553 1555
1554 1556 matchfn = self.matchfn
1555 1557 if matchfn:
1556 1558 stat = self.diffopts.get('stat')
1557 1559 diff = self.diffopts.get('patch')
1558 1560 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1559 1561 node, prev = ctx.node(), ctx.p1().node()
1560 1562 if stat:
1561 1563 self.ui.pushbuffer()
1562 1564 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1563 1565 match=matchfn, stat=True)
1564 1566 self.ui.write((',\n "diffstat": "%s"')
1565 1567 % j(self.ui.popbuffer()))
1566 1568 if diff:
1567 1569 self.ui.pushbuffer()
1568 1570 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1569 1571 match=matchfn, stat=False)
1570 1572 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1571 1573
1572 1574 self.ui.write("\n }")
1573 1575
1574 1576 class changeset_templater(changeset_printer):
1575 1577 '''format changeset information.'''
1576 1578
1577 1579 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1578 1580 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1579 1581 assert not (tmpl and mapfile)
1580 1582 defaulttempl = templatekw.defaulttempl
1581 1583 if mapfile:
1582 1584 self.t = templater.templater.frommapfile(mapfile,
1583 1585 cache=defaulttempl)
1584 1586 else:
1585 1587 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1586 1588 cache=defaulttempl)
1587 1589
1588 1590 self._counter = itertools.count()
1589 1591 self.cache = {}
1590 1592
1591 1593 # find correct templates for current mode
1592 1594 tmplmodes = [
1593 1595 (True, None),
1594 1596 (self.ui.verbose, 'verbose'),
1595 1597 (self.ui.quiet, 'quiet'),
1596 1598 (self.ui.debugflag, 'debug'),
1597 1599 ]
1598 1600
1599 1601 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1600 1602 'docheader': '', 'docfooter': ''}
1601 1603 for mode, postfix in tmplmodes:
1602 1604 for t in self._parts:
1603 1605 cur = t
1604 1606 if postfix:
1605 1607 cur += "_" + postfix
1606 1608 if mode and cur in self.t:
1607 1609 self._parts[t] = cur
1608 1610
1609 1611 if self._parts['docheader']:
1610 1612 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1611 1613
1612 1614 def close(self):
1613 1615 if self._parts['docfooter']:
1614 1616 if not self.footer:
1615 1617 self.footer = ""
1616 1618 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1617 1619 return super(changeset_templater, self).close()
1618 1620
1619 1621 def _show(self, ctx, copies, matchfn, props):
1620 1622 '''show a single changeset or file revision'''
1621 1623 props = props.copy()
1622 1624 props.update(templatekw.keywords)
1623 1625 props['templ'] = self.t
1624 1626 props['ctx'] = ctx
1625 1627 props['repo'] = self.repo
1626 1628 props['ui'] = self.repo.ui
1627 1629 props['index'] = next(self._counter)
1628 1630 props['revcache'] = {'copies': copies}
1629 1631 props['cache'] = self.cache
1630 1632 props = pycompat.strkwargs(props)
1631 1633
1632 1634 # write header
1633 1635 if self._parts['header']:
1634 1636 h = templater.stringify(self.t(self._parts['header'], **props))
1635 1637 if self.buffered:
1636 1638 self.header[ctx.rev()] = h
1637 1639 else:
1638 1640 if self.lastheader != h:
1639 1641 self.lastheader = h
1640 1642 self.ui.write(h)
1641 1643
1642 1644 # write changeset metadata, then patch if requested
1643 1645 key = self._parts['changeset']
1644 1646 self.ui.write(templater.stringify(self.t(key, **props)))
1645 1647 self.showpatch(ctx, matchfn)
1646 1648
1647 1649 if self._parts['footer']:
1648 1650 if not self.footer:
1649 1651 self.footer = templater.stringify(
1650 1652 self.t(self._parts['footer'], **props))
1651 1653
1652 1654 def gettemplate(ui, tmpl, style):
1653 1655 """
1654 1656 Find the template matching the given template spec or style.
1655 1657 """
1656 1658
1657 1659 # ui settings
1658 1660 if not tmpl and not style: # template are stronger than style
1659 1661 tmpl = ui.config('ui', 'logtemplate')
1660 1662 if tmpl:
1661 1663 return templater.unquotestring(tmpl), None
1662 1664 else:
1663 1665 style = util.expandpath(ui.config('ui', 'style', ''))
1664 1666
1665 1667 if not tmpl and style:
1666 1668 mapfile = style
1667 1669 if not os.path.split(mapfile)[0]:
1668 1670 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1669 1671 or templater.templatepath(mapfile))
1670 1672 if mapname:
1671 1673 mapfile = mapname
1672 1674 return None, mapfile
1673 1675
1674 1676 if not tmpl:
1675 1677 return None, None
1676 1678
1677 1679 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1678 1680
1679 1681 def show_changeset(ui, repo, opts, buffered=False):
1680 1682 """show one changeset using template or regular display.
1681 1683
1682 1684 Display format will be the first non-empty hit of:
1683 1685 1. option 'template'
1684 1686 2. option 'style'
1685 1687 3. [ui] setting 'logtemplate'
1686 1688 4. [ui] setting 'style'
1687 1689 If all of these values are either the unset or the empty string,
1688 1690 regular display via changeset_printer() is done.
1689 1691 """
1690 1692 # options
1691 1693 matchfn = None
1692 1694 if opts.get('patch') or opts.get('stat'):
1693 1695 matchfn = scmutil.matchall(repo)
1694 1696
1695 1697 if opts.get('template') == 'json':
1696 1698 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1697 1699
1698 1700 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1699 1701
1700 1702 if not tmpl and not mapfile:
1701 1703 return changeset_printer(ui, repo, matchfn, opts, buffered)
1702 1704
1703 1705 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1704 1706
1705 1707 def showmarker(fm, marker, index=None):
1706 1708 """utility function to display obsolescence marker in a readable way
1707 1709
1708 1710 To be used by debug function."""
1709 1711 if index is not None:
1710 1712 fm.write('index', '%i ', index)
1711 1713 fm.write('precnode', '%s ', hex(marker.precnode()))
1712 1714 succs = marker.succnodes()
1713 1715 fm.condwrite(succs, 'succnodes', '%s ',
1714 1716 fm.formatlist(map(hex, succs), name='node'))
1715 1717 fm.write('flag', '%X ', marker.flags())
1716 1718 parents = marker.parentnodes()
1717 1719 if parents is not None:
1718 1720 fm.write('parentnodes', '{%s} ',
1719 1721 fm.formatlist(map(hex, parents), name='node', sep=', '))
1720 1722 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1721 1723 meta = marker.metadata().copy()
1722 1724 meta.pop('date', None)
1723 1725 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1724 1726 fm.plain('\n')
1725 1727
1726 1728 def finddate(ui, repo, date):
1727 1729 """Find the tipmost changeset that matches the given date spec"""
1728 1730
1729 1731 df = util.matchdate(date)
1730 1732 m = scmutil.matchall(repo)
1731 1733 results = {}
1732 1734
1733 1735 def prep(ctx, fns):
1734 1736 d = ctx.date()
1735 1737 if df(d[0]):
1736 1738 results[ctx.rev()] = d
1737 1739
1738 1740 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1739 1741 rev = ctx.rev()
1740 1742 if rev in results:
1741 1743 ui.status(_("found revision %s from %s\n") %
1742 1744 (rev, util.datestr(results[rev])))
1743 1745 return '%d' % rev
1744 1746
1745 1747 raise error.Abort(_("revision matching date not found"))
1746 1748
1747 1749 def increasingwindows(windowsize=8, sizelimit=512):
1748 1750 while True:
1749 1751 yield windowsize
1750 1752 if windowsize < sizelimit:
1751 1753 windowsize *= 2
1752 1754
1753 1755 class FileWalkError(Exception):
1754 1756 pass
1755 1757
1756 1758 def walkfilerevs(repo, match, follow, revs, fncache):
1757 1759 '''Walks the file history for the matched files.
1758 1760
1759 1761 Returns the changeset revs that are involved in the file history.
1760 1762
1761 1763 Throws FileWalkError if the file history can't be walked using
1762 1764 filelogs alone.
1763 1765 '''
1764 1766 wanted = set()
1765 1767 copies = []
1766 1768 minrev, maxrev = min(revs), max(revs)
1767 1769 def filerevgen(filelog, last):
1768 1770 """
1769 1771 Only files, no patterns. Check the history of each file.
1770 1772
1771 1773 Examines filelog entries within minrev, maxrev linkrev range
1772 1774 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1773 1775 tuples in backwards order
1774 1776 """
1775 1777 cl_count = len(repo)
1776 1778 revs = []
1777 1779 for j in xrange(0, last + 1):
1778 1780 linkrev = filelog.linkrev(j)
1779 1781 if linkrev < minrev:
1780 1782 continue
1781 1783 # only yield rev for which we have the changelog, it can
1782 1784 # happen while doing "hg log" during a pull or commit
1783 1785 if linkrev >= cl_count:
1784 1786 break
1785 1787
1786 1788 parentlinkrevs = []
1787 1789 for p in filelog.parentrevs(j):
1788 1790 if p != nullrev:
1789 1791 parentlinkrevs.append(filelog.linkrev(p))
1790 1792 n = filelog.node(j)
1791 1793 revs.append((linkrev, parentlinkrevs,
1792 1794 follow and filelog.renamed(n)))
1793 1795
1794 1796 return reversed(revs)
1795 1797 def iterfiles():
1796 1798 pctx = repo['.']
1797 1799 for filename in match.files():
1798 1800 if follow:
1799 1801 if filename not in pctx:
1800 1802 raise error.Abort(_('cannot follow file not in parent '
1801 1803 'revision: "%s"') % filename)
1802 1804 yield filename, pctx[filename].filenode()
1803 1805 else:
1804 1806 yield filename, None
1805 1807 for filename_node in copies:
1806 1808 yield filename_node
1807 1809
1808 1810 for file_, node in iterfiles():
1809 1811 filelog = repo.file(file_)
1810 1812 if not len(filelog):
1811 1813 if node is None:
1812 1814 # A zero count may be a directory or deleted file, so
1813 1815 # try to find matching entries on the slow path.
1814 1816 if follow:
1815 1817 raise error.Abort(
1816 1818 _('cannot follow nonexistent file: "%s"') % file_)
1817 1819 raise FileWalkError("Cannot walk via filelog")
1818 1820 else:
1819 1821 continue
1820 1822
1821 1823 if node is None:
1822 1824 last = len(filelog) - 1
1823 1825 else:
1824 1826 last = filelog.rev(node)
1825 1827
1826 1828 # keep track of all ancestors of the file
1827 1829 ancestors = {filelog.linkrev(last)}
1828 1830
1829 1831 # iterate from latest to oldest revision
1830 1832 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1831 1833 if not follow:
1832 1834 if rev > maxrev:
1833 1835 continue
1834 1836 else:
1835 1837 # Note that last might not be the first interesting
1836 1838 # rev to us:
1837 1839 # if the file has been changed after maxrev, we'll
1838 1840 # have linkrev(last) > maxrev, and we still need
1839 1841 # to explore the file graph
1840 1842 if rev not in ancestors:
1841 1843 continue
1842 1844 # XXX insert 1327 fix here
1843 1845 if flparentlinkrevs:
1844 1846 ancestors.update(flparentlinkrevs)
1845 1847
1846 1848 fncache.setdefault(rev, []).append(file_)
1847 1849 wanted.add(rev)
1848 1850 if copied:
1849 1851 copies.append(copied)
1850 1852
1851 1853 return wanted
1852 1854
1853 1855 class _followfilter(object):
1854 1856 def __init__(self, repo, onlyfirst=False):
1855 1857 self.repo = repo
1856 1858 self.startrev = nullrev
1857 1859 self.roots = set()
1858 1860 self.onlyfirst = onlyfirst
1859 1861
1860 1862 def match(self, rev):
1861 1863 def realparents(rev):
1862 1864 if self.onlyfirst:
1863 1865 return self.repo.changelog.parentrevs(rev)[0:1]
1864 1866 else:
1865 1867 return filter(lambda x: x != nullrev,
1866 1868 self.repo.changelog.parentrevs(rev))
1867 1869
1868 1870 if self.startrev == nullrev:
1869 1871 self.startrev = rev
1870 1872 return True
1871 1873
1872 1874 if rev > self.startrev:
1873 1875 # forward: all descendants
1874 1876 if not self.roots:
1875 1877 self.roots.add(self.startrev)
1876 1878 for parent in realparents(rev):
1877 1879 if parent in self.roots:
1878 1880 self.roots.add(rev)
1879 1881 return True
1880 1882 else:
1881 1883 # backwards: all parents
1882 1884 if not self.roots:
1883 1885 self.roots.update(realparents(self.startrev))
1884 1886 if rev in self.roots:
1885 1887 self.roots.remove(rev)
1886 1888 self.roots.update(realparents(rev))
1887 1889 return True
1888 1890
1889 1891 return False
1890 1892
1891 1893 def walkchangerevs(repo, match, opts, prepare):
1892 1894 '''Iterate over files and the revs in which they changed.
1893 1895
1894 1896 Callers most commonly need to iterate backwards over the history
1895 1897 in which they are interested. Doing so has awful (quadratic-looking)
1896 1898 performance, so we use iterators in a "windowed" way.
1897 1899
1898 1900 We walk a window of revisions in the desired order. Within the
1899 1901 window, we first walk forwards to gather data, then in the desired
1900 1902 order (usually backwards) to display it.
1901 1903
1902 1904 This function returns an iterator yielding contexts. Before
1903 1905 yielding each context, the iterator will first call the prepare
1904 1906 function on each context in the window in forward order.'''
1905 1907
1906 1908 follow = opts.get('follow') or opts.get('follow_first')
1907 1909 revs = _logrevs(repo, opts)
1908 1910 if not revs:
1909 1911 return []
1910 1912 wanted = set()
1911 1913 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1912 1914 opts.get('removed'))
1913 1915 fncache = {}
1914 1916 change = repo.changectx
1915 1917
1916 1918 # First step is to fill wanted, the set of revisions that we want to yield.
1917 1919 # When it does not induce extra cost, we also fill fncache for revisions in
1918 1920 # wanted: a cache of filenames that were changed (ctx.files()) and that
1919 1921 # match the file filtering conditions.
1920 1922
1921 1923 if match.always():
1922 1924 # No files, no patterns. Display all revs.
1923 1925 wanted = revs
1924 1926 elif not slowpath:
1925 1927 # We only have to read through the filelog to find wanted revisions
1926 1928
1927 1929 try:
1928 1930 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1929 1931 except FileWalkError:
1930 1932 slowpath = True
1931 1933
1932 1934 # We decided to fall back to the slowpath because at least one
1933 1935 # of the paths was not a file. Check to see if at least one of them
1934 1936 # existed in history, otherwise simply return
1935 1937 for path in match.files():
1936 1938 if path == '.' or path in repo.store:
1937 1939 break
1938 1940 else:
1939 1941 return []
1940 1942
1941 1943 if slowpath:
1942 1944 # We have to read the changelog to match filenames against
1943 1945 # changed files
1944 1946
1945 1947 if follow:
1946 1948 raise error.Abort(_('can only follow copies/renames for explicit '
1947 1949 'filenames'))
1948 1950
1949 1951 # The slow path checks files modified in every changeset.
1950 1952 # This is really slow on large repos, so compute the set lazily.
1951 1953 class lazywantedset(object):
1952 1954 def __init__(self):
1953 1955 self.set = set()
1954 1956 self.revs = set(revs)
1955 1957
1956 1958 # No need to worry about locality here because it will be accessed
1957 1959 # in the same order as the increasing window below.
1958 1960 def __contains__(self, value):
1959 1961 if value in self.set:
1960 1962 return True
1961 1963 elif not value in self.revs:
1962 1964 return False
1963 1965 else:
1964 1966 self.revs.discard(value)
1965 1967 ctx = change(value)
1966 1968 matches = filter(match, ctx.files())
1967 1969 if matches:
1968 1970 fncache[value] = matches
1969 1971 self.set.add(value)
1970 1972 return True
1971 1973 return False
1972 1974
1973 1975 def discard(self, value):
1974 1976 self.revs.discard(value)
1975 1977 self.set.discard(value)
1976 1978
1977 1979 wanted = lazywantedset()
1978 1980
1979 1981 # it might be worthwhile to do this in the iterator if the rev range
1980 1982 # is descending and the prune args are all within that range
1981 1983 for rev in opts.get('prune', ()):
1982 1984 rev = repo[rev].rev()
1983 1985 ff = _followfilter(repo)
1984 1986 stop = min(revs[0], revs[-1])
1985 1987 for x in xrange(rev, stop - 1, -1):
1986 1988 if ff.match(x):
1987 1989 wanted = wanted - [x]
1988 1990
1989 1991 # Now that wanted is correctly initialized, we can iterate over the
1990 1992 # revision range, yielding only revisions in wanted.
1991 1993 def iterate():
1992 1994 if follow and match.always():
1993 1995 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1994 1996 def want(rev):
1995 1997 return ff.match(rev) and rev in wanted
1996 1998 else:
1997 1999 def want(rev):
1998 2000 return rev in wanted
1999 2001
2000 2002 it = iter(revs)
2001 2003 stopiteration = False
2002 2004 for windowsize in increasingwindows():
2003 2005 nrevs = []
2004 2006 for i in xrange(windowsize):
2005 2007 rev = next(it, None)
2006 2008 if rev is None:
2007 2009 stopiteration = True
2008 2010 break
2009 2011 elif want(rev):
2010 2012 nrevs.append(rev)
2011 2013 for rev in sorted(nrevs):
2012 2014 fns = fncache.get(rev)
2013 2015 ctx = change(rev)
2014 2016 if not fns:
2015 2017 def fns_generator():
2016 2018 for f in ctx.files():
2017 2019 if match(f):
2018 2020 yield f
2019 2021 fns = fns_generator()
2020 2022 prepare(ctx, fns)
2021 2023 for rev in nrevs:
2022 2024 yield change(rev)
2023 2025
2024 2026 if stopiteration:
2025 2027 break
2026 2028
2027 2029 return iterate()
2028 2030
2029 2031 def _makefollowlogfilematcher(repo, files, followfirst):
2030 2032 # When displaying a revision with --patch --follow FILE, we have
2031 2033 # to know which file of the revision must be diffed. With
2032 2034 # --follow, we want the names of the ancestors of FILE in the
2033 2035 # revision, stored in "fcache". "fcache" is populated by
2034 2036 # reproducing the graph traversal already done by --follow revset
2035 2037 # and relating revs to file names (which is not "correct" but
2036 2038 # good enough).
2037 2039 fcache = {}
2038 2040 fcacheready = [False]
2039 2041 pctx = repo['.']
2040 2042
2041 2043 def populate():
2042 2044 for fn in files:
2043 2045 fctx = pctx[fn]
2044 2046 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2045 2047 for c in fctx.ancestors(followfirst=followfirst):
2046 2048 fcache.setdefault(c.rev(), set()).add(c.path())
2047 2049
2048 2050 def filematcher(rev):
2049 2051 if not fcacheready[0]:
2050 2052 # Lazy initialization
2051 2053 fcacheready[0] = True
2052 2054 populate()
2053 2055 return scmutil.matchfiles(repo, fcache.get(rev, []))
2054 2056
2055 2057 return filematcher
2056 2058
2057 2059 def _makenofollowlogfilematcher(repo, pats, opts):
2058 2060 '''hook for extensions to override the filematcher for non-follow cases'''
2059 2061 return None
2060 2062
2061 2063 def _makelogrevset(repo, pats, opts, revs):
2062 2064 """Return (expr, filematcher) where expr is a revset string built
2063 2065 from log options and file patterns or None. If --stat or --patch
2064 2066 are not passed filematcher is None. Otherwise it is a callable
2065 2067 taking a revision number and returning a match objects filtering
2066 2068 the files to be detailed when displaying the revision.
2067 2069 """
2068 2070 opt2revset = {
2069 2071 'no_merges': ('not merge()', None),
2070 2072 'only_merges': ('merge()', None),
2071 2073 '_ancestors': ('ancestors(%(val)s)', None),
2072 2074 '_fancestors': ('_firstancestors(%(val)s)', None),
2073 2075 '_descendants': ('descendants(%(val)s)', None),
2074 2076 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2075 2077 '_matchfiles': ('_matchfiles(%(val)s)', None),
2076 2078 'date': ('date(%(val)r)', None),
2077 2079 'branch': ('branch(%(val)r)', ' or '),
2078 2080 '_patslog': ('filelog(%(val)r)', ' or '),
2079 2081 '_patsfollow': ('follow(%(val)r)', ' or '),
2080 2082 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2081 2083 'keyword': ('keyword(%(val)r)', ' or '),
2082 2084 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2083 2085 'user': ('user(%(val)r)', ' or '),
2084 2086 }
2085 2087
2086 2088 opts = dict(opts)
2087 2089 # follow or not follow?
2088 2090 follow = opts.get('follow') or opts.get('follow_first')
2089 2091 if opts.get('follow_first'):
2090 2092 followfirst = 1
2091 2093 else:
2092 2094 followfirst = 0
2093 2095 # --follow with FILE behavior depends on revs...
2094 2096 it = iter(revs)
2095 2097 startrev = next(it)
2096 2098 followdescendants = startrev < next(it, startrev)
2097 2099
2098 2100 # branch and only_branch are really aliases and must be handled at
2099 2101 # the same time
2100 2102 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2101 2103 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2102 2104 # pats/include/exclude are passed to match.match() directly in
2103 2105 # _matchfiles() revset but walkchangerevs() builds its matcher with
2104 2106 # scmutil.match(). The difference is input pats are globbed on
2105 2107 # platforms without shell expansion (windows).
2106 2108 wctx = repo[None]
2107 2109 match, pats = scmutil.matchandpats(wctx, pats, opts)
2108 2110 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2109 2111 opts.get('removed'))
2110 2112 if not slowpath:
2111 2113 for f in match.files():
2112 2114 if follow and f not in wctx:
2113 2115 # If the file exists, it may be a directory, so let it
2114 2116 # take the slow path.
2115 2117 if os.path.exists(repo.wjoin(f)):
2116 2118 slowpath = True
2117 2119 continue
2118 2120 else:
2119 2121 raise error.Abort(_('cannot follow file not in parent '
2120 2122 'revision: "%s"') % f)
2121 2123 filelog = repo.file(f)
2122 2124 if not filelog:
2123 2125 # A zero count may be a directory or deleted file, so
2124 2126 # try to find matching entries on the slow path.
2125 2127 if follow:
2126 2128 raise error.Abort(
2127 2129 _('cannot follow nonexistent file: "%s"') % f)
2128 2130 slowpath = True
2129 2131
2130 2132 # We decided to fall back to the slowpath because at least one
2131 2133 # of the paths was not a file. Check to see if at least one of them
2132 2134 # existed in history - in that case, we'll continue down the
2133 2135 # slowpath; otherwise, we can turn off the slowpath
2134 2136 if slowpath:
2135 2137 for path in match.files():
2136 2138 if path == '.' or path in repo.store:
2137 2139 break
2138 2140 else:
2139 2141 slowpath = False
2140 2142
2141 2143 fpats = ('_patsfollow', '_patsfollowfirst')
2142 2144 fnopats = (('_ancestors', '_fancestors'),
2143 2145 ('_descendants', '_fdescendants'))
2144 2146 if slowpath:
2145 2147 # See walkchangerevs() slow path.
2146 2148 #
2147 2149 # pats/include/exclude cannot be represented as separate
2148 2150 # revset expressions as their filtering logic applies at file
2149 2151 # level. For instance "-I a -X a" matches a revision touching
2150 2152 # "a" and "b" while "file(a) and not file(b)" does
2151 2153 # not. Besides, filesets are evaluated against the working
2152 2154 # directory.
2153 2155 matchargs = ['r:', 'd:relpath']
2154 2156 for p in pats:
2155 2157 matchargs.append('p:' + p)
2156 2158 for p in opts.get('include', []):
2157 2159 matchargs.append('i:' + p)
2158 2160 for p in opts.get('exclude', []):
2159 2161 matchargs.append('x:' + p)
2160 2162 matchargs = ','.join(('%r' % p) for p in matchargs)
2161 2163 opts['_matchfiles'] = matchargs
2162 2164 if follow:
2163 2165 opts[fnopats[0][followfirst]] = '.'
2164 2166 else:
2165 2167 if follow:
2166 2168 if pats:
2167 2169 # follow() revset interprets its file argument as a
2168 2170 # manifest entry, so use match.files(), not pats.
2169 2171 opts[fpats[followfirst]] = list(match.files())
2170 2172 else:
2171 2173 op = fnopats[followdescendants][followfirst]
2172 2174 opts[op] = 'rev(%d)' % startrev
2173 2175 else:
2174 2176 opts['_patslog'] = list(pats)
2175 2177
2176 2178 filematcher = None
2177 2179 if opts.get('patch') or opts.get('stat'):
2178 2180 # When following files, track renames via a special matcher.
2179 2181 # If we're forced to take the slowpath it means we're following
2180 2182 # at least one pattern/directory, so don't bother with rename tracking.
2181 2183 if follow and not match.always() and not slowpath:
2182 2184 # _makefollowlogfilematcher expects its files argument to be
2183 2185 # relative to the repo root, so use match.files(), not pats.
2184 2186 filematcher = _makefollowlogfilematcher(repo, match.files(),
2185 2187 followfirst)
2186 2188 else:
2187 2189 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2188 2190 if filematcher is None:
2189 2191 filematcher = lambda rev: match
2190 2192
2191 2193 expr = []
2192 2194 for op, val in sorted(opts.iteritems()):
2193 2195 if not val:
2194 2196 continue
2195 2197 if op not in opt2revset:
2196 2198 continue
2197 2199 revop, andor = opt2revset[op]
2198 2200 if '%(val)' not in revop:
2199 2201 expr.append(revop)
2200 2202 else:
2201 2203 if not isinstance(val, list):
2202 2204 e = revop % {'val': val}
2203 2205 else:
2204 2206 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2205 2207 expr.append(e)
2206 2208
2207 2209 if expr:
2208 2210 expr = '(' + ' and '.join(expr) + ')'
2209 2211 else:
2210 2212 expr = None
2211 2213 return expr, filematcher
2212 2214
2213 2215 def _logrevs(repo, opts):
2214 2216 # Default --rev value depends on --follow but --follow behavior
2215 2217 # depends on revisions resolved from --rev...
2216 2218 follow = opts.get('follow') or opts.get('follow_first')
2217 2219 if opts.get('rev'):
2218 2220 revs = scmutil.revrange(repo, opts['rev'])
2219 2221 elif follow and repo.dirstate.p1() == nullid:
2220 2222 revs = smartset.baseset()
2221 2223 elif follow:
2222 2224 revs = repo.revs('reverse(:.)')
2223 2225 else:
2224 2226 revs = smartset.spanset(repo)
2225 2227 revs.reverse()
2226 2228 return revs
2227 2229
2228 2230 def getgraphlogrevs(repo, pats, opts):
2229 2231 """Return (revs, expr, filematcher) where revs is an iterable of
2230 2232 revision numbers, expr is a revset string built from log options
2231 2233 and file patterns or None, and used to filter 'revs'. If --stat or
2232 2234 --patch are not passed filematcher is None. Otherwise it is a
2233 2235 callable taking a revision number and returning a match objects
2234 2236 filtering the files to be detailed when displaying the revision.
2235 2237 """
2236 2238 limit = loglimit(opts)
2237 2239 revs = _logrevs(repo, opts)
2238 2240 if not revs:
2239 2241 return smartset.baseset(), None, None
2240 2242 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2241 2243 if opts.get('rev'):
2242 2244 # User-specified revs might be unsorted, but don't sort before
2243 2245 # _makelogrevset because it might depend on the order of revs
2244 2246 if not (revs.isdescending() or revs.istopo()):
2245 2247 revs.sort(reverse=True)
2246 2248 if expr:
2247 2249 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2248 2250 revs = matcher(repo, revs)
2249 2251 if limit is not None:
2250 2252 limitedrevs = []
2251 2253 for idx, rev in enumerate(revs):
2252 2254 if idx >= limit:
2253 2255 break
2254 2256 limitedrevs.append(rev)
2255 2257 revs = smartset.baseset(limitedrevs)
2256 2258
2257 2259 return revs, expr, filematcher
2258 2260
2259 2261 def getlogrevs(repo, pats, opts):
2260 2262 """Return (revs, expr, filematcher) where revs is an iterable of
2261 2263 revision numbers, expr is a revset string built from log options
2262 2264 and file patterns or None, and used to filter 'revs'. If --stat or
2263 2265 --patch are not passed filematcher is None. Otherwise it is a
2264 2266 callable taking a revision number and returning a match objects
2265 2267 filtering the files to be detailed when displaying the revision.
2266 2268 """
2267 2269 limit = loglimit(opts)
2268 2270 revs = _logrevs(repo, opts)
2269 2271 if not revs:
2270 2272 return smartset.baseset([]), None, None
2271 2273 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2272 2274 if expr:
2273 2275 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2274 2276 revs = matcher(repo, revs)
2275 2277 if limit is not None:
2276 2278 limitedrevs = []
2277 2279 for idx, r in enumerate(revs):
2278 2280 if limit <= idx:
2279 2281 break
2280 2282 limitedrevs.append(r)
2281 2283 revs = smartset.baseset(limitedrevs)
2282 2284
2283 2285 return revs, expr, filematcher
2284 2286
2285 2287 def _graphnodeformatter(ui, displayer):
2286 2288 spec = ui.config('ui', 'graphnodetemplate')
2287 2289 if not spec:
2288 2290 return templatekw.showgraphnode # fast path for "{graphnode}"
2289 2291
2290 2292 spec = templater.unquotestring(spec)
2291 2293 templ = formatter.gettemplater(ui, 'graphnode', spec)
2292 2294 cache = {}
2293 2295 if isinstance(displayer, changeset_templater):
2294 2296 cache = displayer.cache # reuse cache of slow templates
2295 2297 props = templatekw.keywords.copy()
2296 2298 props['templ'] = templ
2297 2299 props['cache'] = cache
2298 2300 def formatnode(repo, ctx):
2299 2301 props['ctx'] = ctx
2300 2302 props['repo'] = repo
2301 2303 props['ui'] = repo.ui
2302 2304 props['revcache'] = {}
2303 2305 return templater.stringify(templ('graphnode', **props))
2304 2306 return formatnode
2305 2307
2306 2308 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2307 2309 filematcher=None):
2308 2310 formatnode = _graphnodeformatter(ui, displayer)
2309 2311 state = graphmod.asciistate()
2310 2312 styles = state['styles']
2311 2313
2312 2314 # only set graph styling if HGPLAIN is not set.
2313 2315 if ui.plain('graph'):
2314 2316 # set all edge styles to |, the default pre-3.8 behaviour
2315 2317 styles.update(dict.fromkeys(styles, '|'))
2316 2318 else:
2317 2319 edgetypes = {
2318 2320 'parent': graphmod.PARENT,
2319 2321 'grandparent': graphmod.GRANDPARENT,
2320 2322 'missing': graphmod.MISSINGPARENT
2321 2323 }
2322 2324 for name, key in edgetypes.items():
2323 2325 # experimental config: experimental.graphstyle.*
2324 2326 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2325 2327 styles[key])
2326 2328 if not styles[key]:
2327 2329 styles[key] = None
2328 2330
2329 2331 # experimental config: experimental.graphshorten
2330 2332 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2331 2333
2332 2334 for rev, type, ctx, parents in dag:
2333 2335 char = formatnode(repo, ctx)
2334 2336 copies = None
2335 2337 if getrenamed and ctx.rev():
2336 2338 copies = []
2337 2339 for fn in ctx.files():
2338 2340 rename = getrenamed(fn, ctx.rev())
2339 2341 if rename:
2340 2342 copies.append((fn, rename[0]))
2341 2343 revmatchfn = None
2342 2344 if filematcher is not None:
2343 2345 revmatchfn = filematcher(ctx.rev())
2344 2346 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2345 2347 lines = displayer.hunk.pop(rev).split('\n')
2346 2348 if not lines[-1]:
2347 2349 del lines[-1]
2348 2350 displayer.flush(ctx)
2349 2351 edges = edgefn(type, char, lines, state, rev, parents)
2350 2352 for type, char, lines, coldata in edges:
2351 2353 graphmod.ascii(ui, state, type, char, lines, coldata)
2352 2354 displayer.close()
2353 2355
2354 2356 def graphlog(ui, repo, pats, opts):
2355 2357 # Parameters are identical to log command ones
2356 2358 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2357 2359 revdag = graphmod.dagwalker(repo, revs)
2358 2360
2359 2361 getrenamed = None
2360 2362 if opts.get('copies'):
2361 2363 endrev = None
2362 2364 if opts.get('rev'):
2363 2365 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2364 2366 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2365 2367
2366 2368 ui.pager('log')
2367 2369 displayer = show_changeset(ui, repo, opts, buffered=True)
2368 2370 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2369 2371 filematcher)
2370 2372
2371 2373 def checkunsupportedgraphflags(pats, opts):
2372 2374 for op in ["newest_first"]:
2373 2375 if op in opts and opts[op]:
2374 2376 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2375 2377 % op.replace("_", "-"))
2376 2378
2377 2379 def graphrevs(repo, nodes, opts):
2378 2380 limit = loglimit(opts)
2379 2381 nodes.reverse()
2380 2382 if limit is not None:
2381 2383 nodes = nodes[:limit]
2382 2384 return graphmod.nodes(repo, nodes)
2383 2385
2384 2386 def add(ui, repo, match, prefix, explicitonly, **opts):
2385 2387 join = lambda f: os.path.join(prefix, f)
2386 2388 bad = []
2387 2389
2388 2390 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2389 2391 names = []
2390 2392 wctx = repo[None]
2391 2393 cca = None
2392 2394 abort, warn = scmutil.checkportabilityalert(ui)
2393 2395 if abort or warn:
2394 2396 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2395 2397
2396 2398 badmatch = matchmod.badmatch(match, badfn)
2397 2399 dirstate = repo.dirstate
2398 2400 # We don't want to just call wctx.walk here, since it would return a lot of
2399 2401 # clean files, which we aren't interested in and takes time.
2400 2402 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2401 2403 True, False, full=False)):
2402 2404 exact = match.exact(f)
2403 2405 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2404 2406 if cca:
2405 2407 cca(f)
2406 2408 names.append(f)
2407 2409 if ui.verbose or not exact:
2408 2410 ui.status(_('adding %s\n') % match.rel(f))
2409 2411
2410 2412 for subpath in sorted(wctx.substate):
2411 2413 sub = wctx.sub(subpath)
2412 2414 try:
2413 2415 submatch = matchmod.subdirmatcher(subpath, match)
2414 2416 if opts.get(r'subrepos'):
2415 2417 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2416 2418 else:
2417 2419 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2418 2420 except error.LookupError:
2419 2421 ui.status(_("skipping missing subrepository: %s\n")
2420 2422 % join(subpath))
2421 2423
2422 2424 if not opts.get(r'dry_run'):
2423 2425 rejected = wctx.add(names, prefix)
2424 2426 bad.extend(f for f in rejected if f in match.files())
2425 2427 return bad
2426 2428
2427 2429 def addwebdirpath(repo, serverpath, webconf):
2428 2430 webconf[serverpath] = repo.root
2429 2431 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2430 2432
2431 2433 for r in repo.revs('filelog("path:.hgsub")'):
2432 2434 ctx = repo[r]
2433 2435 for subpath in ctx.substate:
2434 2436 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2435 2437
2436 2438 def forget(ui, repo, match, prefix, explicitonly):
2437 2439 join = lambda f: os.path.join(prefix, f)
2438 2440 bad = []
2439 2441 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2440 2442 wctx = repo[None]
2441 2443 forgot = []
2442 2444
2443 2445 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2444 2446 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2445 2447 if explicitonly:
2446 2448 forget = [f for f in forget if match.exact(f)]
2447 2449
2448 2450 for subpath in sorted(wctx.substate):
2449 2451 sub = wctx.sub(subpath)
2450 2452 try:
2451 2453 submatch = matchmod.subdirmatcher(subpath, match)
2452 2454 subbad, subforgot = sub.forget(submatch, prefix)
2453 2455 bad.extend([subpath + '/' + f for f in subbad])
2454 2456 forgot.extend([subpath + '/' + f for f in subforgot])
2455 2457 except error.LookupError:
2456 2458 ui.status(_("skipping missing subrepository: %s\n")
2457 2459 % join(subpath))
2458 2460
2459 2461 if not explicitonly:
2460 2462 for f in match.files():
2461 2463 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2462 2464 if f not in forgot:
2463 2465 if repo.wvfs.exists(f):
2464 2466 # Don't complain if the exact case match wasn't given.
2465 2467 # But don't do this until after checking 'forgot', so
2466 2468 # that subrepo files aren't normalized, and this op is
2467 2469 # purely from data cached by the status walk above.
2468 2470 if repo.dirstate.normalize(f) in repo.dirstate:
2469 2471 continue
2470 2472 ui.warn(_('not removing %s: '
2471 2473 'file is already untracked\n')
2472 2474 % match.rel(f))
2473 2475 bad.append(f)
2474 2476
2475 2477 for f in forget:
2476 2478 if ui.verbose or not match.exact(f):
2477 2479 ui.status(_('removing %s\n') % match.rel(f))
2478 2480
2479 2481 rejected = wctx.forget(forget, prefix)
2480 2482 bad.extend(f for f in rejected if f in match.files())
2481 2483 forgot.extend(f for f in forget if f not in rejected)
2482 2484 return bad, forgot
2483 2485
2484 2486 def files(ui, ctx, m, fm, fmt, subrepos):
2485 2487 rev = ctx.rev()
2486 2488 ret = 1
2487 2489 ds = ctx.repo().dirstate
2488 2490
2489 2491 for f in ctx.matches(m):
2490 2492 if rev is None and ds[f] == 'r':
2491 2493 continue
2492 2494 fm.startitem()
2493 2495 if ui.verbose:
2494 2496 fc = ctx[f]
2495 2497 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2496 2498 fm.data(abspath=f)
2497 2499 fm.write('path', fmt, m.rel(f))
2498 2500 ret = 0
2499 2501
2500 2502 for subpath in sorted(ctx.substate):
2501 2503 submatch = matchmod.subdirmatcher(subpath, m)
2502 2504 if (subrepos or m.exact(subpath) or any(submatch.files())):
2503 2505 sub = ctx.sub(subpath)
2504 2506 try:
2505 2507 recurse = m.exact(subpath) or subrepos
2506 2508 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2507 2509 ret = 0
2508 2510 except error.LookupError:
2509 2511 ui.status(_("skipping missing subrepository: %s\n")
2510 2512 % m.abs(subpath))
2511 2513
2512 2514 return ret
2513 2515
2514 2516 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2515 2517 join = lambda f: os.path.join(prefix, f)
2516 2518 ret = 0
2517 2519 s = repo.status(match=m, clean=True)
2518 2520 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2519 2521
2520 2522 wctx = repo[None]
2521 2523
2522 2524 if warnings is None:
2523 2525 warnings = []
2524 2526 warn = True
2525 2527 else:
2526 2528 warn = False
2527 2529
2528 2530 subs = sorted(wctx.substate)
2529 2531 total = len(subs)
2530 2532 count = 0
2531 2533 for subpath in subs:
2532 2534 count += 1
2533 2535 submatch = matchmod.subdirmatcher(subpath, m)
2534 2536 if subrepos or m.exact(subpath) or any(submatch.files()):
2535 2537 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2536 2538 sub = wctx.sub(subpath)
2537 2539 try:
2538 2540 if sub.removefiles(submatch, prefix, after, force, subrepos,
2539 2541 warnings):
2540 2542 ret = 1
2541 2543 except error.LookupError:
2542 2544 warnings.append(_("skipping missing subrepository: %s\n")
2543 2545 % join(subpath))
2544 2546 ui.progress(_('searching'), None)
2545 2547
2546 2548 # warn about failure to delete explicit files/dirs
2547 2549 deleteddirs = util.dirs(deleted)
2548 2550 files = m.files()
2549 2551 total = len(files)
2550 2552 count = 0
2551 2553 for f in files:
2552 2554 def insubrepo():
2553 2555 for subpath in wctx.substate:
2554 2556 if f.startswith(subpath + '/'):
2555 2557 return True
2556 2558 return False
2557 2559
2558 2560 count += 1
2559 2561 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2560 2562 isdir = f in deleteddirs or wctx.hasdir(f)
2561 2563 if (f in repo.dirstate or isdir or f == '.'
2562 2564 or insubrepo() or f in subs):
2563 2565 continue
2564 2566
2565 2567 if repo.wvfs.exists(f):
2566 2568 if repo.wvfs.isdir(f):
2567 2569 warnings.append(_('not removing %s: no tracked files\n')
2568 2570 % m.rel(f))
2569 2571 else:
2570 2572 warnings.append(_('not removing %s: file is untracked\n')
2571 2573 % m.rel(f))
2572 2574 # missing files will generate a warning elsewhere
2573 2575 ret = 1
2574 2576 ui.progress(_('deleting'), None)
2575 2577
2576 2578 if force:
2577 2579 list = modified + deleted + clean + added
2578 2580 elif after:
2579 2581 list = deleted
2580 2582 remaining = modified + added + clean
2581 2583 total = len(remaining)
2582 2584 count = 0
2583 2585 for f in remaining:
2584 2586 count += 1
2585 2587 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2586 2588 warnings.append(_('not removing %s: file still exists\n')
2587 2589 % m.rel(f))
2588 2590 ret = 1
2589 2591 ui.progress(_('skipping'), None)
2590 2592 else:
2591 2593 list = deleted + clean
2592 2594 total = len(modified) + len(added)
2593 2595 count = 0
2594 2596 for f in modified:
2595 2597 count += 1
2596 2598 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2597 2599 warnings.append(_('not removing %s: file is modified (use -f'
2598 2600 ' to force removal)\n') % m.rel(f))
2599 2601 ret = 1
2600 2602 for f in added:
2601 2603 count += 1
2602 2604 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2603 2605 warnings.append(_("not removing %s: file has been marked for add"
2604 2606 " (use 'hg forget' to undo add)\n") % m.rel(f))
2605 2607 ret = 1
2606 2608 ui.progress(_('skipping'), None)
2607 2609
2608 2610 list = sorted(list)
2609 2611 total = len(list)
2610 2612 count = 0
2611 2613 for f in list:
2612 2614 count += 1
2613 2615 if ui.verbose or not m.exact(f):
2614 2616 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2615 2617 ui.status(_('removing %s\n') % m.rel(f))
2616 2618 ui.progress(_('deleting'), None)
2617 2619
2618 2620 with repo.wlock():
2619 2621 if not after:
2620 2622 for f in list:
2621 2623 if f in added:
2622 2624 continue # we never unlink added files on remove
2623 2625 repo.wvfs.unlinkpath(f, ignoremissing=True)
2624 2626 repo[None].forget(list)
2625 2627
2626 2628 if warn:
2627 2629 for warning in warnings:
2628 2630 ui.warn(warning)
2629 2631
2630 2632 return ret
2631 2633
2632 2634 def cat(ui, repo, ctx, matcher, prefix, **opts):
2633 2635 err = 1
2634 2636
2635 2637 def write(path):
2636 2638 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2637 2639 pathname=os.path.join(prefix, path))
2638 2640 data = ctx[path].data()
2639 2641 if opts.get('decode'):
2640 2642 data = repo.wwritedata(path, data)
2641 2643 fp.write(data)
2642 2644 fp.close()
2643 2645
2644 2646 # Automation often uses hg cat on single files, so special case it
2645 2647 # for performance to avoid the cost of parsing the manifest.
2646 2648 if len(matcher.files()) == 1 and not matcher.anypats():
2647 2649 file = matcher.files()[0]
2648 2650 mfl = repo.manifestlog
2649 2651 mfnode = ctx.manifestnode()
2650 2652 try:
2651 2653 if mfnode and mfl[mfnode].find(file)[0]:
2652 2654 write(file)
2653 2655 return 0
2654 2656 except KeyError:
2655 2657 pass
2656 2658
2657 2659 for abs in ctx.walk(matcher):
2658 2660 write(abs)
2659 2661 err = 0
2660 2662
2661 2663 for subpath in sorted(ctx.substate):
2662 2664 sub = ctx.sub(subpath)
2663 2665 try:
2664 2666 submatch = matchmod.subdirmatcher(subpath, matcher)
2665 2667
2666 2668 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2667 2669 **opts):
2668 2670 err = 0
2669 2671 except error.RepoLookupError:
2670 2672 ui.status(_("skipping missing subrepository: %s\n")
2671 2673 % os.path.join(prefix, subpath))
2672 2674
2673 2675 return err
2674 2676
2675 2677 def commit(ui, repo, commitfunc, pats, opts):
2676 2678 '''commit the specified files or all outstanding changes'''
2677 2679 date = opts.get('date')
2678 2680 if date:
2679 2681 opts['date'] = util.parsedate(date)
2680 2682 message = logmessage(ui, opts)
2681 2683 matcher = scmutil.match(repo[None], pats, opts)
2682 2684
2683 2685 # extract addremove carefully -- this function can be called from a command
2684 2686 # that doesn't support addremove
2685 2687 if opts.get('addremove'):
2686 2688 if scmutil.addremove(repo, matcher, "", opts) != 0:
2687 2689 raise error.Abort(
2688 2690 _("failed to mark all new/missing files as added/removed"))
2689 2691
2690 2692 return commitfunc(ui, repo, message, matcher, opts)
2691 2693
2692 2694 def samefile(f, ctx1, ctx2):
2693 2695 if f in ctx1.manifest():
2694 2696 a = ctx1.filectx(f)
2695 2697 if f in ctx2.manifest():
2696 2698 b = ctx2.filectx(f)
2697 2699 return (not a.cmp(b)
2698 2700 and a.flags() == b.flags())
2699 2701 else:
2700 2702 return False
2701 2703 else:
2702 2704 return f not in ctx2.manifest()
2703 2705
2704 2706 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2705 2707 # avoid cycle context -> subrepo -> cmdutil
2706 2708 from . import context
2707 2709
2708 2710 # amend will reuse the existing user if not specified, but the obsolete
2709 2711 # marker creation requires that the current user's name is specified.
2710 2712 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2711 2713 ui.username() # raise exception if username not set
2712 2714
2713 2715 ui.note(_('amending changeset %s\n') % old)
2714 2716 base = old.p1()
2715 2717 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2716 2718
2717 2719 wlock = lock = newid = None
2718 2720 try:
2719 2721 wlock = repo.wlock()
2720 2722 lock = repo.lock()
2721 2723 with repo.transaction('amend') as tr:
2722 2724 # See if we got a message from -m or -l, if not, open the editor
2723 2725 # with the message of the changeset to amend
2724 2726 message = logmessage(ui, opts)
2725 2727 # ensure logfile does not conflict with later enforcement of the
2726 2728 # message. potential logfile content has been processed by
2727 2729 # `logmessage` anyway.
2728 2730 opts.pop('logfile')
2729 2731 # First, do a regular commit to record all changes in the working
2730 2732 # directory (if there are any)
2731 2733 ui.callhooks = False
2732 2734 activebookmark = repo._bookmarks.active
2733 2735 try:
2734 2736 repo._bookmarks.active = None
2735 2737 opts['message'] = 'temporary amend commit for %s' % old
2736 2738 node = commit(ui, repo, commitfunc, pats, opts)
2737 2739 finally:
2738 2740 repo._bookmarks.active = activebookmark
2739 2741 repo._bookmarks.recordchange(tr)
2740 2742 ui.callhooks = True
2741 2743 ctx = repo[node]
2742 2744
2743 2745 # Participating changesets:
2744 2746 #
2745 2747 # node/ctx o - new (intermediate) commit that contains changes
2746 2748 # | from working dir to go into amending commit
2747 2749 # | (or a workingctx if there were no changes)
2748 2750 # |
2749 2751 # old o - changeset to amend
2750 2752 # |
2751 2753 # base o - parent of amending changeset
2752 2754
2753 2755 # Update extra dict from amended commit (e.g. to preserve graft
2754 2756 # source)
2755 2757 extra.update(old.extra())
2756 2758
2757 2759 # Also update it from the intermediate commit or from the wctx
2758 2760 extra.update(ctx.extra())
2759 2761
2760 2762 if len(old.parents()) > 1:
2761 2763 # ctx.files() isn't reliable for merges, so fall back to the
2762 2764 # slower repo.status() method
2763 2765 files = set([fn for st in repo.status(base, old)[:3]
2764 2766 for fn in st])
2765 2767 else:
2766 2768 files = set(old.files())
2767 2769
2768 2770 # Second, we use either the commit we just did, or if there were no
2769 2771 # changes the parent of the working directory as the version of the
2770 2772 # files in the final amend commit
2771 2773 if node:
2772 2774 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2773 2775
2774 2776 user = ctx.user()
2775 2777 date = ctx.date()
2776 2778 # Recompute copies (avoid recording a -> b -> a)
2777 2779 copied = copies.pathcopies(base, ctx)
2778 2780 if old.p2:
2779 2781 copied.update(copies.pathcopies(old.p2(), ctx))
2780 2782
2781 2783 # Prune files which were reverted by the updates: if old
2782 2784 # introduced file X and our intermediate commit, node,
2783 2785 # renamed that file, then those two files are the same and
2784 2786 # we can discard X from our list of files. Likewise if X
2785 2787 # was deleted, it's no longer relevant
2786 2788 files.update(ctx.files())
2787 2789 files = [f for f in files if not samefile(f, ctx, base)]
2788 2790
2789 2791 def filectxfn(repo, ctx_, path):
2790 2792 try:
2791 2793 fctx = ctx[path]
2792 2794 flags = fctx.flags()
2793 2795 mctx = context.memfilectx(repo,
2794 2796 fctx.path(), fctx.data(),
2795 2797 islink='l' in flags,
2796 2798 isexec='x' in flags,
2797 2799 copied=copied.get(path))
2798 2800 return mctx
2799 2801 except KeyError:
2800 2802 return None
2801 2803 else:
2802 2804 ui.note(_('copying changeset %s to %s\n') % (old, base))
2803 2805
2804 2806 # Use version of files as in the old cset
2805 2807 def filectxfn(repo, ctx_, path):
2806 2808 try:
2807 2809 return old.filectx(path)
2808 2810 except KeyError:
2809 2811 return None
2810 2812
2811 2813 user = opts.get('user') or old.user()
2812 2814 date = opts.get('date') or old.date()
2813 2815 editform = mergeeditform(old, 'commit.amend')
2814 2816 editor = getcommiteditor(editform=editform, **opts)
2815 2817 if not message:
2816 2818 editor = getcommiteditor(edit=True, editform=editform)
2817 2819 message = old.description()
2818 2820
2819 2821 pureextra = extra.copy()
2820 2822 extra['amend_source'] = old.hex()
2821 2823
2822 2824 new = context.memctx(repo,
2823 2825 parents=[base.node(), old.p2().node()],
2824 2826 text=message,
2825 2827 files=files,
2826 2828 filectxfn=filectxfn,
2827 2829 user=user,
2828 2830 date=date,
2829 2831 extra=extra,
2830 2832 editor=editor)
2831 2833
2832 2834 newdesc = changelog.stripdesc(new.description())
2833 2835 if ((not node)
2834 2836 and newdesc == old.description()
2835 2837 and user == old.user()
2836 2838 and date == old.date()
2837 2839 and pureextra == old.extra()):
2838 2840 # nothing changed. continuing here would create a new node
2839 2841 # anyway because of the amend_source noise.
2840 2842 #
2841 2843 # This not what we expect from amend.
2842 2844 return old.node()
2843 2845
2844 2846 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2845 2847 try:
2846 2848 if opts.get('secret'):
2847 2849 commitphase = 'secret'
2848 2850 else:
2849 2851 commitphase = old.phase()
2850 2852 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2851 2853 newid = repo.commitctx(new)
2852 2854 finally:
2853 2855 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2854 2856 if newid != old.node():
2855 2857 # Reroute the working copy parent to the new changeset
2856 2858 repo.setparents(newid, nullid)
2857 2859
2858 2860 # Move bookmarks from old parent to amend commit
2859 2861 bms = repo.nodebookmarks(old.node())
2860 2862 if bms:
2861 2863 marks = repo._bookmarks
2862 2864 for bm in bms:
2863 2865 ui.debug('moving bookmarks %r from %s to %s\n' %
2864 2866 (marks, old.hex(), hex(newid)))
2865 2867 marks[bm] = newid
2866 2868 marks.recordchange(tr)
2867 2869 #commit the whole amend process
2868 2870 if createmarkers:
2869 2871 # mark the new changeset as successor of the rewritten one
2870 2872 new = repo[newid]
2871 2873 obs = [(old, (new,))]
2872 2874 if node:
2873 2875 obs.append((ctx, ()))
2874 2876
2875 2877 obsolete.createmarkers(repo, obs, operation='amend')
2876 2878 if not createmarkers and newid != old.node():
2877 2879 # Strip the intermediate commit (if there was one) and the amended
2878 2880 # commit
2879 2881 if node:
2880 2882 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2881 2883 ui.note(_('stripping amended changeset %s\n') % old)
2882 2884 repair.strip(ui, repo, old.node(), topic='amend-backup')
2883 2885 finally:
2884 2886 lockmod.release(lock, wlock)
2885 2887 return newid
2886 2888
2887 2889 def commiteditor(repo, ctx, subs, editform=''):
2888 2890 if ctx.description():
2889 2891 return ctx.description()
2890 2892 return commitforceeditor(repo, ctx, subs, editform=editform,
2891 2893 unchangedmessagedetection=True)
2892 2894
2893 2895 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2894 2896 editform='', unchangedmessagedetection=False):
2895 2897 if not extramsg:
2896 2898 extramsg = _("Leave message empty to abort commit.")
2897 2899
2898 2900 forms = [e for e in editform.split('.') if e]
2899 2901 forms.insert(0, 'changeset')
2900 2902 templatetext = None
2901 2903 while forms:
2902 2904 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2903 2905 if tmpl:
2904 2906 tmpl = templater.unquotestring(tmpl)
2905 2907 templatetext = committext = buildcommittemplate(
2906 2908 repo, ctx, subs, extramsg, tmpl)
2907 2909 break
2908 2910 forms.pop()
2909 2911 else:
2910 2912 committext = buildcommittext(repo, ctx, subs, extramsg)
2911 2913
2912 2914 # run editor in the repository root
2913 2915 olddir = pycompat.getcwd()
2914 2916 os.chdir(repo.root)
2915 2917
2916 2918 # make in-memory changes visible to external process
2917 2919 tr = repo.currenttransaction()
2918 2920 repo.dirstate.write(tr)
2919 2921 pending = tr and tr.writepending() and repo.root
2920 2922
2921 2923 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2922 2924 editform=editform, pending=pending,
2923 2925 repopath=repo.path)
2924 2926 text = editortext
2925 2927
2926 2928 # strip away anything below this special string (used for editors that want
2927 2929 # to display the diff)
2928 2930 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2929 2931 if stripbelow:
2930 2932 text = text[:stripbelow.start()]
2931 2933
2932 2934 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2933 2935 os.chdir(olddir)
2934 2936
2935 2937 if finishdesc:
2936 2938 text = finishdesc(text)
2937 2939 if not text.strip():
2938 2940 raise error.Abort(_("empty commit message"))
2939 2941 if unchangedmessagedetection and editortext == templatetext:
2940 2942 raise error.Abort(_("commit message unchanged"))
2941 2943
2942 2944 return text
2943 2945
2944 2946 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2945 2947 ui = repo.ui
2946 2948 tmpl, mapfile = gettemplate(ui, tmpl, None)
2947 2949
2948 2950 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2949 2951
2950 2952 for k, v in repo.ui.configitems('committemplate'):
2951 2953 if k != 'changeset':
2952 2954 t.t.cache[k] = v
2953 2955
2954 2956 if not extramsg:
2955 2957 extramsg = '' # ensure that extramsg is string
2956 2958
2957 2959 ui.pushbuffer()
2958 2960 t.show(ctx, extramsg=extramsg)
2959 2961 return ui.popbuffer()
2960 2962
2961 2963 def hgprefix(msg):
2962 2964 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2963 2965
2964 2966 def buildcommittext(repo, ctx, subs, extramsg):
2965 2967 edittext = []
2966 2968 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2967 2969 if ctx.description():
2968 2970 edittext.append(ctx.description())
2969 2971 edittext.append("")
2970 2972 edittext.append("") # Empty line between message and comments.
2971 2973 edittext.append(hgprefix(_("Enter commit message."
2972 2974 " Lines beginning with 'HG:' are removed.")))
2973 2975 edittext.append(hgprefix(extramsg))
2974 2976 edittext.append("HG: --")
2975 2977 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2976 2978 if ctx.p2():
2977 2979 edittext.append(hgprefix(_("branch merge")))
2978 2980 if ctx.branch():
2979 2981 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2980 2982 if bookmarks.isactivewdirparent(repo):
2981 2983 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2982 2984 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2983 2985 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2984 2986 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2985 2987 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2986 2988 if not added and not modified and not removed:
2987 2989 edittext.append(hgprefix(_("no files changed")))
2988 2990 edittext.append("")
2989 2991
2990 2992 return "\n".join(edittext)
2991 2993
2992 2994 def commitstatus(repo, node, branch, bheads=None, opts=None):
2993 2995 if opts is None:
2994 2996 opts = {}
2995 2997 ctx = repo[node]
2996 2998 parents = ctx.parents()
2997 2999
2998 3000 if (not opts.get('amend') and bheads and node not in bheads and not
2999 3001 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3000 3002 repo.ui.status(_('created new head\n'))
3001 3003 # The message is not printed for initial roots. For the other
3002 3004 # changesets, it is printed in the following situations:
3003 3005 #
3004 3006 # Par column: for the 2 parents with ...
3005 3007 # N: null or no parent
3006 3008 # B: parent is on another named branch
3007 3009 # C: parent is a regular non head changeset
3008 3010 # H: parent was a branch head of the current branch
3009 3011 # Msg column: whether we print "created new head" message
3010 3012 # In the following, it is assumed that there already exists some
3011 3013 # initial branch heads of the current branch, otherwise nothing is
3012 3014 # printed anyway.
3013 3015 #
3014 3016 # Par Msg Comment
3015 3017 # N N y additional topo root
3016 3018 #
3017 3019 # B N y additional branch root
3018 3020 # C N y additional topo head
3019 3021 # H N n usual case
3020 3022 #
3021 3023 # B B y weird additional branch root
3022 3024 # C B y branch merge
3023 3025 # H B n merge with named branch
3024 3026 #
3025 3027 # C C y additional head from merge
3026 3028 # C H n merge with a head
3027 3029 #
3028 3030 # H H n head merge: head count decreases
3029 3031
3030 3032 if not opts.get('close_branch'):
3031 3033 for r in parents:
3032 3034 if r.closesbranch() and r.branch() == branch:
3033 3035 repo.ui.status(_('reopening closed branch head %d\n') % r)
3034 3036
3035 3037 if repo.ui.debugflag:
3036 3038 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3037 3039 elif repo.ui.verbose:
3038 3040 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3039 3041
3040 3042 def postcommitstatus(repo, pats, opts):
3041 3043 return repo.status(match=scmutil.match(repo[None], pats, opts))
3042 3044
3043 3045 def revert(ui, repo, ctx, parents, *pats, **opts):
3044 3046 parent, p2 = parents
3045 3047 node = ctx.node()
3046 3048
3047 3049 mf = ctx.manifest()
3048 3050 if node == p2:
3049 3051 parent = p2
3050 3052
3051 3053 # need all matching names in dirstate and manifest of target rev,
3052 3054 # so have to walk both. do not print errors if files exist in one
3053 3055 # but not other. in both cases, filesets should be evaluated against
3054 3056 # workingctx to get consistent result (issue4497). this means 'set:**'
3055 3057 # cannot be used to select missing files from target rev.
3056 3058
3057 3059 # `names` is a mapping for all elements in working copy and target revision
3058 3060 # The mapping is in the form:
3059 3061 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3060 3062 names = {}
3061 3063
3062 3064 with repo.wlock():
3063 3065 ## filling of the `names` mapping
3064 3066 # walk dirstate to fill `names`
3065 3067
3066 3068 interactive = opts.get('interactive', False)
3067 3069 wctx = repo[None]
3068 3070 m = scmutil.match(wctx, pats, opts)
3069 3071
3070 3072 # we'll need this later
3071 3073 targetsubs = sorted(s for s in wctx.substate if m(s))
3072 3074
3073 3075 if not m.always():
3074 3076 matcher = matchmod.badmatch(m, lambda x, y: False)
3075 3077 for abs in wctx.walk(matcher):
3076 3078 names[abs] = m.rel(abs), m.exact(abs)
3077 3079
3078 3080 # walk target manifest to fill `names`
3079 3081
3080 3082 def badfn(path, msg):
3081 3083 if path in names:
3082 3084 return
3083 3085 if path in ctx.substate:
3084 3086 return
3085 3087 path_ = path + '/'
3086 3088 for f in names:
3087 3089 if f.startswith(path_):
3088 3090 return
3089 3091 ui.warn("%s: %s\n" % (m.rel(path), msg))
3090 3092
3091 3093 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3092 3094 if abs not in names:
3093 3095 names[abs] = m.rel(abs), m.exact(abs)
3094 3096
3095 3097 # Find status of all file in `names`.
3096 3098 m = scmutil.matchfiles(repo, names)
3097 3099
3098 3100 changes = repo.status(node1=node, match=m,
3099 3101 unknown=True, ignored=True, clean=True)
3100 3102 else:
3101 3103 changes = repo.status(node1=node, match=m)
3102 3104 for kind in changes:
3103 3105 for abs in kind:
3104 3106 names[abs] = m.rel(abs), m.exact(abs)
3105 3107
3106 3108 m = scmutil.matchfiles(repo, names)
3107 3109
3108 3110 modified = set(changes.modified)
3109 3111 added = set(changes.added)
3110 3112 removed = set(changes.removed)
3111 3113 _deleted = set(changes.deleted)
3112 3114 unknown = set(changes.unknown)
3113 3115 unknown.update(changes.ignored)
3114 3116 clean = set(changes.clean)
3115 3117 modadded = set()
3116 3118
3117 3119 # We need to account for the state of the file in the dirstate,
3118 3120 # even when we revert against something else than parent. This will
3119 3121 # slightly alter the behavior of revert (doing back up or not, delete
3120 3122 # or just forget etc).
3121 3123 if parent == node:
3122 3124 dsmodified = modified
3123 3125 dsadded = added
3124 3126 dsremoved = removed
3125 3127 # store all local modifications, useful later for rename detection
3126 3128 localchanges = dsmodified | dsadded
3127 3129 modified, added, removed = set(), set(), set()
3128 3130 else:
3129 3131 changes = repo.status(node1=parent, match=m)
3130 3132 dsmodified = set(changes.modified)
3131 3133 dsadded = set(changes.added)
3132 3134 dsremoved = set(changes.removed)
3133 3135 # store all local modifications, useful later for rename detection
3134 3136 localchanges = dsmodified | dsadded
3135 3137
3136 3138 # only take into account for removes between wc and target
3137 3139 clean |= dsremoved - removed
3138 3140 dsremoved &= removed
3139 3141 # distinct between dirstate remove and other
3140 3142 removed -= dsremoved
3141 3143
3142 3144 modadded = added & dsmodified
3143 3145 added -= modadded
3144 3146
3145 3147 # tell newly modified apart.
3146 3148 dsmodified &= modified
3147 3149 dsmodified |= modified & dsadded # dirstate added may need backup
3148 3150 modified -= dsmodified
3149 3151
3150 3152 # We need to wait for some post-processing to update this set
3151 3153 # before making the distinction. The dirstate will be used for
3152 3154 # that purpose.
3153 3155 dsadded = added
3154 3156
3155 3157 # in case of merge, files that are actually added can be reported as
3156 3158 # modified, we need to post process the result
3157 3159 if p2 != nullid:
3158 3160 mergeadd = set(dsmodified)
3159 3161 for path in dsmodified:
3160 3162 if path in mf:
3161 3163 mergeadd.remove(path)
3162 3164 dsadded |= mergeadd
3163 3165 dsmodified -= mergeadd
3164 3166
3165 3167 # if f is a rename, update `names` to also revert the source
3166 3168 cwd = repo.getcwd()
3167 3169 for f in localchanges:
3168 3170 src = repo.dirstate.copied(f)
3169 3171 # XXX should we check for rename down to target node?
3170 3172 if src and src not in names and repo.dirstate[src] == 'r':
3171 3173 dsremoved.add(src)
3172 3174 names[src] = (repo.pathto(src, cwd), True)
3173 3175
3174 3176 # determine the exact nature of the deleted changesets
3175 3177 deladded = set(_deleted)
3176 3178 for path in _deleted:
3177 3179 if path in mf:
3178 3180 deladded.remove(path)
3179 3181 deleted = _deleted - deladded
3180 3182
3181 3183 # distinguish between file to forget and the other
3182 3184 added = set()
3183 3185 for abs in dsadded:
3184 3186 if repo.dirstate[abs] != 'a':
3185 3187 added.add(abs)
3186 3188 dsadded -= added
3187 3189
3188 3190 for abs in deladded:
3189 3191 if repo.dirstate[abs] == 'a':
3190 3192 dsadded.add(abs)
3191 3193 deladded -= dsadded
3192 3194
3193 3195 # For files marked as removed, we check if an unknown file is present at
3194 3196 # the same path. If a such file exists it may need to be backed up.
3195 3197 # Making the distinction at this stage helps have simpler backup
3196 3198 # logic.
3197 3199 removunk = set()
3198 3200 for abs in removed:
3199 3201 target = repo.wjoin(abs)
3200 3202 if os.path.lexists(target):
3201 3203 removunk.add(abs)
3202 3204 removed -= removunk
3203 3205
3204 3206 dsremovunk = set()
3205 3207 for abs in dsremoved:
3206 3208 target = repo.wjoin(abs)
3207 3209 if os.path.lexists(target):
3208 3210 dsremovunk.add(abs)
3209 3211 dsremoved -= dsremovunk
3210 3212
3211 3213 # action to be actually performed by revert
3212 3214 # (<list of file>, message>) tuple
3213 3215 actions = {'revert': ([], _('reverting %s\n')),
3214 3216 'add': ([], _('adding %s\n')),
3215 3217 'remove': ([], _('removing %s\n')),
3216 3218 'drop': ([], _('removing %s\n')),
3217 3219 'forget': ([], _('forgetting %s\n')),
3218 3220 'undelete': ([], _('undeleting %s\n')),
3219 3221 'noop': (None, _('no changes needed to %s\n')),
3220 3222 'unknown': (None, _('file not managed: %s\n')),
3221 3223 }
3222 3224
3223 3225 # "constant" that convey the backup strategy.
3224 3226 # All set to `discard` if `no-backup` is set do avoid checking
3225 3227 # no_backup lower in the code.
3226 3228 # These values are ordered for comparison purposes
3227 3229 backupinteractive = 3 # do backup if interactively modified
3228 3230 backup = 2 # unconditionally do backup
3229 3231 check = 1 # check if the existing file differs from target
3230 3232 discard = 0 # never do backup
3231 3233 if opts.get('no_backup'):
3232 3234 backupinteractive = backup = check = discard
3233 3235 if interactive:
3234 3236 dsmodifiedbackup = backupinteractive
3235 3237 else:
3236 3238 dsmodifiedbackup = backup
3237 3239 tobackup = set()
3238 3240
3239 3241 backupanddel = actions['remove']
3240 3242 if not opts.get('no_backup'):
3241 3243 backupanddel = actions['drop']
3242 3244
3243 3245 disptable = (
3244 3246 # dispatch table:
3245 3247 # file state
3246 3248 # action
3247 3249 # make backup
3248 3250
3249 3251 ## Sets that results that will change file on disk
3250 3252 # Modified compared to target, no local change
3251 3253 (modified, actions['revert'], discard),
3252 3254 # Modified compared to target, but local file is deleted
3253 3255 (deleted, actions['revert'], discard),
3254 3256 # Modified compared to target, local change
3255 3257 (dsmodified, actions['revert'], dsmodifiedbackup),
3256 3258 # Added since target
3257 3259 (added, actions['remove'], discard),
3258 3260 # Added in working directory
3259 3261 (dsadded, actions['forget'], discard),
3260 3262 # Added since target, have local modification
3261 3263 (modadded, backupanddel, backup),
3262 3264 # Added since target but file is missing in working directory
3263 3265 (deladded, actions['drop'], discard),
3264 3266 # Removed since target, before working copy parent
3265 3267 (removed, actions['add'], discard),
3266 3268 # Same as `removed` but an unknown file exists at the same path
3267 3269 (removunk, actions['add'], check),
3268 3270 # Removed since targe, marked as such in working copy parent
3269 3271 (dsremoved, actions['undelete'], discard),
3270 3272 # Same as `dsremoved` but an unknown file exists at the same path
3271 3273 (dsremovunk, actions['undelete'], check),
3272 3274 ## the following sets does not result in any file changes
3273 3275 # File with no modification
3274 3276 (clean, actions['noop'], discard),
3275 3277 # Existing file, not tracked anywhere
3276 3278 (unknown, actions['unknown'], discard),
3277 3279 )
3278 3280
3279 3281 for abs, (rel, exact) in sorted(names.items()):
3280 3282 # target file to be touch on disk (relative to cwd)
3281 3283 target = repo.wjoin(abs)
3282 3284 # search the entry in the dispatch table.
3283 3285 # if the file is in any of these sets, it was touched in the working
3284 3286 # directory parent and we are sure it needs to be reverted.
3285 3287 for table, (xlist, msg), dobackup in disptable:
3286 3288 if abs not in table:
3287 3289 continue
3288 3290 if xlist is not None:
3289 3291 xlist.append(abs)
3290 3292 if dobackup:
3291 3293 # If in interactive mode, don't automatically create
3292 3294 # .orig files (issue4793)
3293 3295 if dobackup == backupinteractive:
3294 3296 tobackup.add(abs)
3295 3297 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3296 3298 bakname = scmutil.origpath(ui, repo, rel)
3297 3299 ui.note(_('saving current version of %s as %s\n') %
3298 3300 (rel, bakname))
3299 3301 if not opts.get('dry_run'):
3300 3302 if interactive:
3301 3303 util.copyfile(target, bakname)
3302 3304 else:
3303 3305 util.rename(target, bakname)
3304 3306 if ui.verbose or not exact:
3305 3307 if not isinstance(msg, basestring):
3306 3308 msg = msg(abs)
3307 3309 ui.status(msg % rel)
3308 3310 elif exact:
3309 3311 ui.warn(msg % rel)
3310 3312 break
3311 3313
3312 3314 if not opts.get('dry_run'):
3313 3315 needdata = ('revert', 'add', 'undelete')
3314 3316 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3315 3317 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3316 3318
3317 3319 if targetsubs:
3318 3320 # Revert the subrepos on the revert list
3319 3321 for sub in targetsubs:
3320 3322 try:
3321 3323 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3322 3324 except KeyError:
3323 3325 raise error.Abort("subrepository '%s' does not exist in %s!"
3324 3326 % (sub, short(ctx.node())))
3325 3327
3326 3328 def _revertprefetch(repo, ctx, *files):
3327 3329 """Let extension changing the storage layer prefetch content"""
3328 3330 pass
3329 3331
3330 3332 def _performrevert(repo, parents, ctx, actions, interactive=False,
3331 3333 tobackup=None):
3332 3334 """function that actually perform all the actions computed for revert
3333 3335
3334 3336 This is an independent function to let extension to plug in and react to
3335 3337 the imminent revert.
3336 3338
3337 3339 Make sure you have the working directory locked when calling this function.
3338 3340 """
3339 3341 parent, p2 = parents
3340 3342 node = ctx.node()
3341 3343 excluded_files = []
3342 3344 matcher_opts = {"exclude": excluded_files}
3343 3345
3344 3346 def checkout(f):
3345 3347 fc = ctx[f]
3346 3348 repo.wwrite(f, fc.data(), fc.flags())
3347 3349
3348 3350 def doremove(f):
3349 3351 try:
3350 3352 repo.wvfs.unlinkpath(f)
3351 3353 except OSError:
3352 3354 pass
3353 3355 repo.dirstate.remove(f)
3354 3356
3355 3357 audit_path = pathutil.pathauditor(repo.root)
3356 3358 for f in actions['forget'][0]:
3357 3359 if interactive:
3358 3360 choice = repo.ui.promptchoice(
3359 3361 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3360 3362 if choice == 0:
3361 3363 repo.dirstate.drop(f)
3362 3364 else:
3363 3365 excluded_files.append(repo.wjoin(f))
3364 3366 else:
3365 3367 repo.dirstate.drop(f)
3366 3368 for f in actions['remove'][0]:
3367 3369 audit_path(f)
3368 3370 if interactive:
3369 3371 choice = repo.ui.promptchoice(
3370 3372 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3371 3373 if choice == 0:
3372 3374 doremove(f)
3373 3375 else:
3374 3376 excluded_files.append(repo.wjoin(f))
3375 3377 else:
3376 3378 doremove(f)
3377 3379 for f in actions['drop'][0]:
3378 3380 audit_path(f)
3379 3381 repo.dirstate.remove(f)
3380 3382
3381 3383 normal = None
3382 3384 if node == parent:
3383 3385 # We're reverting to our parent. If possible, we'd like status
3384 3386 # to report the file as clean. We have to use normallookup for
3385 3387 # merges to avoid losing information about merged/dirty files.
3386 3388 if p2 != nullid:
3387 3389 normal = repo.dirstate.normallookup
3388 3390 else:
3389 3391 normal = repo.dirstate.normal
3390 3392
3391 3393 newlyaddedandmodifiedfiles = set()
3392 3394 if interactive:
3393 3395 # Prompt the user for changes to revert
3394 3396 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3395 3397 m = scmutil.match(ctx, torevert, matcher_opts)
3396 3398 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3397 3399 diffopts.nodates = True
3398 3400 diffopts.git = True
3399 3401 operation = 'discard'
3400 3402 reversehunks = True
3401 3403 if node != parent:
3402 3404 operation = 'revert'
3403 3405 reversehunks = repo.ui.configbool('experimental',
3404 3406 'revertalternateinteractivemode',
3405 3407 True)
3406 3408 if reversehunks:
3407 3409 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3408 3410 else:
3409 3411 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3410 3412 originalchunks = patch.parsepatch(diff)
3411 3413
3412 3414 try:
3413 3415
3414 3416 chunks, opts = recordfilter(repo.ui, originalchunks,
3415 3417 operation=operation)
3416 3418 if reversehunks:
3417 3419 chunks = patch.reversehunks(chunks)
3418 3420
3419 3421 except patch.PatchError as err:
3420 3422 raise error.Abort(_('error parsing patch: %s') % err)
3421 3423
3422 3424 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3423 3425 if tobackup is None:
3424 3426 tobackup = set()
3425 3427 # Apply changes
3426 3428 fp = stringio()
3427 3429 for c in chunks:
3428 3430 # Create a backup file only if this hunk should be backed up
3429 3431 if ishunk(c) and c.header.filename() in tobackup:
3430 3432 abs = c.header.filename()
3431 3433 target = repo.wjoin(abs)
3432 3434 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3433 3435 util.copyfile(target, bakname)
3434 3436 tobackup.remove(abs)
3435 3437 c.write(fp)
3436 3438 dopatch = fp.tell()
3437 3439 fp.seek(0)
3438 3440 if dopatch:
3439 3441 try:
3440 3442 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3441 3443 except patch.PatchError as err:
3442 3444 raise error.Abort(str(err))
3443 3445 del fp
3444 3446 else:
3445 3447 for f in actions['revert'][0]:
3446 3448 checkout(f)
3447 3449 if normal:
3448 3450 normal(f)
3449 3451
3450 3452 for f in actions['add'][0]:
3451 3453 # Don't checkout modified files, they are already created by the diff
3452 3454 if f not in newlyaddedandmodifiedfiles:
3453 3455 checkout(f)
3454 3456 repo.dirstate.add(f)
3455 3457
3456 3458 normal = repo.dirstate.normallookup
3457 3459 if node == parent and p2 == nullid:
3458 3460 normal = repo.dirstate.normal
3459 3461 for f in actions['undelete'][0]:
3460 3462 checkout(f)
3461 3463 normal(f)
3462 3464
3463 3465 copied = copies.pathcopies(repo[parent], ctx)
3464 3466
3465 3467 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3466 3468 if f in copied:
3467 3469 repo.dirstate.copy(copied[f], f)
3468 3470
3469 3471 class command(registrar.command):
3470 3472 def _doregister(self, func, name, *args, **kwargs):
3471 3473 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3472 3474 return super(command, self)._doregister(func, name, *args, **kwargs)
3473 3475
3474 3476 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3475 3477 # commands.outgoing. "missing" is "missing" of the result of
3476 3478 # "findcommonoutgoing()"
3477 3479 outgoinghooks = util.hooks()
3478 3480
3479 3481 # a list of (ui, repo) functions called by commands.summary
3480 3482 summaryhooks = util.hooks()
3481 3483
3482 3484 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3483 3485 #
3484 3486 # functions should return tuple of booleans below, if 'changes' is None:
3485 3487 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3486 3488 #
3487 3489 # otherwise, 'changes' is a tuple of tuples below:
3488 3490 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3489 3491 # - (desturl, destbranch, destpeer, outgoing)
3490 3492 summaryremotehooks = util.hooks()
3491 3493
3492 3494 # A list of state files kept by multistep operations like graft.
3493 3495 # Since graft cannot be aborted, it is considered 'clearable' by update.
3494 3496 # note: bisect is intentionally excluded
3495 3497 # (state file, clearable, allowcommit, error, hint)
3496 3498 unfinishedstates = [
3497 3499 ('graftstate', True, False, _('graft in progress'),
3498 3500 _("use 'hg graft --continue' or 'hg update' to abort")),
3499 3501 ('updatestate', True, False, _('last update was interrupted'),
3500 3502 _("use 'hg update' to get a consistent checkout"))
3501 3503 ]
3502 3504
3503 3505 def checkunfinished(repo, commit=False):
3504 3506 '''Look for an unfinished multistep operation, like graft, and abort
3505 3507 if found. It's probably good to check this right before
3506 3508 bailifchanged().
3507 3509 '''
3508 3510 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3509 3511 if commit and allowcommit:
3510 3512 continue
3511 3513 if repo.vfs.exists(f):
3512 3514 raise error.Abort(msg, hint=hint)
3513 3515
3514 3516 def clearunfinished(repo):
3515 3517 '''Check for unfinished operations (as above), and clear the ones
3516 3518 that are clearable.
3517 3519 '''
3518 3520 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3519 3521 if not clearable and repo.vfs.exists(f):
3520 3522 raise error.Abort(msg, hint=hint)
3521 3523 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3522 3524 if clearable and repo.vfs.exists(f):
3523 3525 util.unlink(repo.vfs.join(f))
3524 3526
3525 3527 afterresolvedstates = [
3526 3528 ('graftstate',
3527 3529 _('hg graft --continue')),
3528 3530 ]
3529 3531
3530 3532 def howtocontinue(repo):
3531 3533 '''Check for an unfinished operation and return the command to finish
3532 3534 it.
3533 3535
3534 3536 afterresolvedstates tuples define a .hg/{file} and the corresponding
3535 3537 command needed to finish it.
3536 3538
3537 3539 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3538 3540 a boolean.
3539 3541 '''
3540 3542 contmsg = _("continue: %s")
3541 3543 for f, msg in afterresolvedstates:
3542 3544 if repo.vfs.exists(f):
3543 3545 return contmsg % msg, True
3544 3546 workingctx = repo[None]
3545 3547 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3546 3548 for s in workingctx.substate)
3547 3549 if dirty:
3548 3550 return contmsg % _("hg commit"), False
3549 3551 return None, None
3550 3552
3551 3553 def checkafterresolved(repo):
3552 3554 '''Inform the user about the next action after completing hg resolve
3553 3555
3554 3556 If there's a matching afterresolvedstates, howtocontinue will yield
3555 3557 repo.ui.warn as the reporter.
3556 3558
3557 3559 Otherwise, it will yield repo.ui.note.
3558 3560 '''
3559 3561 msg, warning = howtocontinue(repo)
3560 3562 if msg is not None:
3561 3563 if warning:
3562 3564 repo.ui.warn("%s\n" % msg)
3563 3565 else:
3564 3566 repo.ui.note("%s\n" % msg)
3565 3567
3566 3568 def wrongtooltocontinue(repo, task):
3567 3569 '''Raise an abort suggesting how to properly continue if there is an
3568 3570 active task.
3569 3571
3570 3572 Uses howtocontinue() to find the active task.
3571 3573
3572 3574 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3573 3575 a hint.
3574 3576 '''
3575 3577 after = howtocontinue(repo)
3576 3578 hint = None
3577 3579 if after[1]:
3578 3580 hint = after[0]
3579 3581 raise error.Abort(_('no %s in progress') % task, hint=hint)
General Comments 0
You need to be logged in to leave comments. Login now